drm/radeon: use new cg/pg flags for SI
[linux-2.6-block.git] / drivers / gpu / drm / radeon / si.c
1 /*
2  * Copyright 2011 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Alex Deucher
23  */
24 #include <linux/firmware.h>
25 #include <linux/slab.h>
26 #include <linux/module.h>
27 #include <drm/drmP.h>
28 #include "radeon.h"
29 #include "radeon_asic.h"
30 #include <drm/radeon_drm.h>
31 #include "sid.h"
32 #include "atom.h"
33 #include "si_blit_shaders.h"
34 #include "clearstate_si.h"
35 #include "radeon_ucode.h"
36
37
38 MODULE_FIRMWARE("radeon/TAHITI_pfp.bin");
39 MODULE_FIRMWARE("radeon/TAHITI_me.bin");
40 MODULE_FIRMWARE("radeon/TAHITI_ce.bin");
41 MODULE_FIRMWARE("radeon/TAHITI_mc.bin");
42 MODULE_FIRMWARE("radeon/TAHITI_rlc.bin");
43 MODULE_FIRMWARE("radeon/TAHITI_smc.bin");
44 MODULE_FIRMWARE("radeon/PITCAIRN_pfp.bin");
45 MODULE_FIRMWARE("radeon/PITCAIRN_me.bin");
46 MODULE_FIRMWARE("radeon/PITCAIRN_ce.bin");
47 MODULE_FIRMWARE("radeon/PITCAIRN_mc.bin");
48 MODULE_FIRMWARE("radeon/PITCAIRN_rlc.bin");
49 MODULE_FIRMWARE("radeon/PITCAIRN_smc.bin");
50 MODULE_FIRMWARE("radeon/VERDE_pfp.bin");
51 MODULE_FIRMWARE("radeon/VERDE_me.bin");
52 MODULE_FIRMWARE("radeon/VERDE_ce.bin");
53 MODULE_FIRMWARE("radeon/VERDE_mc.bin");
54 MODULE_FIRMWARE("radeon/VERDE_rlc.bin");
55 MODULE_FIRMWARE("radeon/VERDE_smc.bin");
56 MODULE_FIRMWARE("radeon/OLAND_pfp.bin");
57 MODULE_FIRMWARE("radeon/OLAND_me.bin");
58 MODULE_FIRMWARE("radeon/OLAND_ce.bin");
59 MODULE_FIRMWARE("radeon/OLAND_mc.bin");
60 MODULE_FIRMWARE("radeon/OLAND_rlc.bin");
61 MODULE_FIRMWARE("radeon/OLAND_smc.bin");
62 MODULE_FIRMWARE("radeon/HAINAN_pfp.bin");
63 MODULE_FIRMWARE("radeon/HAINAN_me.bin");
64 MODULE_FIRMWARE("radeon/HAINAN_ce.bin");
65 MODULE_FIRMWARE("radeon/HAINAN_mc.bin");
66 MODULE_FIRMWARE("radeon/HAINAN_rlc.bin");
67 MODULE_FIRMWARE("radeon/HAINAN_smc.bin");
68
69 static void si_pcie_gen3_enable(struct radeon_device *rdev);
70 static void si_program_aspm(struct radeon_device *rdev);
71 extern void sumo_rlc_fini(struct radeon_device *rdev);
72 extern int sumo_rlc_init(struct radeon_device *rdev);
73 extern int r600_ih_ring_alloc(struct radeon_device *rdev);
74 extern void r600_ih_ring_fini(struct radeon_device *rdev);
75 extern void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev);
76 extern void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save);
77 extern void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save);
78 extern u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev);
79 extern void evergreen_print_gpu_status_regs(struct radeon_device *rdev);
80 extern bool evergreen_is_display_hung(struct radeon_device *rdev);
81 extern void si_dma_vm_set_page(struct radeon_device *rdev,
82                                struct radeon_ib *ib,
83                                uint64_t pe,
84                                uint64_t addr, unsigned count,
85                                uint32_t incr, uint32_t flags);
86
87 static const u32 verde_rlc_save_restore_register_list[] =
88 {
89         (0x8000 << 16) | (0x98f4 >> 2),
90         0x00000000,
91         (0x8040 << 16) | (0x98f4 >> 2),
92         0x00000000,
93         (0x8000 << 16) | (0xe80 >> 2),
94         0x00000000,
95         (0x8040 << 16) | (0xe80 >> 2),
96         0x00000000,
97         (0x8000 << 16) | (0x89bc >> 2),
98         0x00000000,
99         (0x8040 << 16) | (0x89bc >> 2),
100         0x00000000,
101         (0x8000 << 16) | (0x8c1c >> 2),
102         0x00000000,
103         (0x8040 << 16) | (0x8c1c >> 2),
104         0x00000000,
105         (0x9c00 << 16) | (0x98f0 >> 2),
106         0x00000000,
107         (0x9c00 << 16) | (0xe7c >> 2),
108         0x00000000,
109         (0x8000 << 16) | (0x9148 >> 2),
110         0x00000000,
111         (0x8040 << 16) | (0x9148 >> 2),
112         0x00000000,
113         (0x9c00 << 16) | (0x9150 >> 2),
114         0x00000000,
115         (0x9c00 << 16) | (0x897c >> 2),
116         0x00000000,
117         (0x9c00 << 16) | (0x8d8c >> 2),
118         0x00000000,
119         (0x9c00 << 16) | (0xac54 >> 2),
120         0X00000000,
121         0x3,
122         (0x9c00 << 16) | (0x98f8 >> 2),
123         0x00000000,
124         (0x9c00 << 16) | (0x9910 >> 2),
125         0x00000000,
126         (0x9c00 << 16) | (0x9914 >> 2),
127         0x00000000,
128         (0x9c00 << 16) | (0x9918 >> 2),
129         0x00000000,
130         (0x9c00 << 16) | (0x991c >> 2),
131         0x00000000,
132         (0x9c00 << 16) | (0x9920 >> 2),
133         0x00000000,
134         (0x9c00 << 16) | (0x9924 >> 2),
135         0x00000000,
136         (0x9c00 << 16) | (0x9928 >> 2),
137         0x00000000,
138         (0x9c00 << 16) | (0x992c >> 2),
139         0x00000000,
140         (0x9c00 << 16) | (0x9930 >> 2),
141         0x00000000,
142         (0x9c00 << 16) | (0x9934 >> 2),
143         0x00000000,
144         (0x9c00 << 16) | (0x9938 >> 2),
145         0x00000000,
146         (0x9c00 << 16) | (0x993c >> 2),
147         0x00000000,
148         (0x9c00 << 16) | (0x9940 >> 2),
149         0x00000000,
150         (0x9c00 << 16) | (0x9944 >> 2),
151         0x00000000,
152         (0x9c00 << 16) | (0x9948 >> 2),
153         0x00000000,
154         (0x9c00 << 16) | (0x994c >> 2),
155         0x00000000,
156         (0x9c00 << 16) | (0x9950 >> 2),
157         0x00000000,
158         (0x9c00 << 16) | (0x9954 >> 2),
159         0x00000000,
160         (0x9c00 << 16) | (0x9958 >> 2),
161         0x00000000,
162         (0x9c00 << 16) | (0x995c >> 2),
163         0x00000000,
164         (0x9c00 << 16) | (0x9960 >> 2),
165         0x00000000,
166         (0x9c00 << 16) | (0x9964 >> 2),
167         0x00000000,
168         (0x9c00 << 16) | (0x9968 >> 2),
169         0x00000000,
170         (0x9c00 << 16) | (0x996c >> 2),
171         0x00000000,
172         (0x9c00 << 16) | (0x9970 >> 2),
173         0x00000000,
174         (0x9c00 << 16) | (0x9974 >> 2),
175         0x00000000,
176         (0x9c00 << 16) | (0x9978 >> 2),
177         0x00000000,
178         (0x9c00 << 16) | (0x997c >> 2),
179         0x00000000,
180         (0x9c00 << 16) | (0x9980 >> 2),
181         0x00000000,
182         (0x9c00 << 16) | (0x9984 >> 2),
183         0x00000000,
184         (0x9c00 << 16) | (0x9988 >> 2),
185         0x00000000,
186         (0x9c00 << 16) | (0x998c >> 2),
187         0x00000000,
188         (0x9c00 << 16) | (0x8c00 >> 2),
189         0x00000000,
190         (0x9c00 << 16) | (0x8c14 >> 2),
191         0x00000000,
192         (0x9c00 << 16) | (0x8c04 >> 2),
193         0x00000000,
194         (0x9c00 << 16) | (0x8c08 >> 2),
195         0x00000000,
196         (0x8000 << 16) | (0x9b7c >> 2),
197         0x00000000,
198         (0x8040 << 16) | (0x9b7c >> 2),
199         0x00000000,
200         (0x8000 << 16) | (0xe84 >> 2),
201         0x00000000,
202         (0x8040 << 16) | (0xe84 >> 2),
203         0x00000000,
204         (0x8000 << 16) | (0x89c0 >> 2),
205         0x00000000,
206         (0x8040 << 16) | (0x89c0 >> 2),
207         0x00000000,
208         (0x8000 << 16) | (0x914c >> 2),
209         0x00000000,
210         (0x8040 << 16) | (0x914c >> 2),
211         0x00000000,
212         (0x8000 << 16) | (0x8c20 >> 2),
213         0x00000000,
214         (0x8040 << 16) | (0x8c20 >> 2),
215         0x00000000,
216         (0x8000 << 16) | (0x9354 >> 2),
217         0x00000000,
218         (0x8040 << 16) | (0x9354 >> 2),
219         0x00000000,
220         (0x9c00 << 16) | (0x9060 >> 2),
221         0x00000000,
222         (0x9c00 << 16) | (0x9364 >> 2),
223         0x00000000,
224         (0x9c00 << 16) | (0x9100 >> 2),
225         0x00000000,
226         (0x9c00 << 16) | (0x913c >> 2),
227         0x00000000,
228         (0x8000 << 16) | (0x90e0 >> 2),
229         0x00000000,
230         (0x8000 << 16) | (0x90e4 >> 2),
231         0x00000000,
232         (0x8000 << 16) | (0x90e8 >> 2),
233         0x00000000,
234         (0x8040 << 16) | (0x90e0 >> 2),
235         0x00000000,
236         (0x8040 << 16) | (0x90e4 >> 2),
237         0x00000000,
238         (0x8040 << 16) | (0x90e8 >> 2),
239         0x00000000,
240         (0x9c00 << 16) | (0x8bcc >> 2),
241         0x00000000,
242         (0x9c00 << 16) | (0x8b24 >> 2),
243         0x00000000,
244         (0x9c00 << 16) | (0x88c4 >> 2),
245         0x00000000,
246         (0x9c00 << 16) | (0x8e50 >> 2),
247         0x00000000,
248         (0x9c00 << 16) | (0x8c0c >> 2),
249         0x00000000,
250         (0x9c00 << 16) | (0x8e58 >> 2),
251         0x00000000,
252         (0x9c00 << 16) | (0x8e5c >> 2),
253         0x00000000,
254         (0x9c00 << 16) | (0x9508 >> 2),
255         0x00000000,
256         (0x9c00 << 16) | (0x950c >> 2),
257         0x00000000,
258         (0x9c00 << 16) | (0x9494 >> 2),
259         0x00000000,
260         (0x9c00 << 16) | (0xac0c >> 2),
261         0x00000000,
262         (0x9c00 << 16) | (0xac10 >> 2),
263         0x00000000,
264         (0x9c00 << 16) | (0xac14 >> 2),
265         0x00000000,
266         (0x9c00 << 16) | (0xae00 >> 2),
267         0x00000000,
268         (0x9c00 << 16) | (0xac08 >> 2),
269         0x00000000,
270         (0x9c00 << 16) | (0x88d4 >> 2),
271         0x00000000,
272         (0x9c00 << 16) | (0x88c8 >> 2),
273         0x00000000,
274         (0x9c00 << 16) | (0x88cc >> 2),
275         0x00000000,
276         (0x9c00 << 16) | (0x89b0 >> 2),
277         0x00000000,
278         (0x9c00 << 16) | (0x8b10 >> 2),
279         0x00000000,
280         (0x9c00 << 16) | (0x8a14 >> 2),
281         0x00000000,
282         (0x9c00 << 16) | (0x9830 >> 2),
283         0x00000000,
284         (0x9c00 << 16) | (0x9834 >> 2),
285         0x00000000,
286         (0x9c00 << 16) | (0x9838 >> 2),
287         0x00000000,
288         (0x9c00 << 16) | (0x9a10 >> 2),
289         0x00000000,
290         (0x8000 << 16) | (0x9870 >> 2),
291         0x00000000,
292         (0x8000 << 16) | (0x9874 >> 2),
293         0x00000000,
294         (0x8001 << 16) | (0x9870 >> 2),
295         0x00000000,
296         (0x8001 << 16) | (0x9874 >> 2),
297         0x00000000,
298         (0x8040 << 16) | (0x9870 >> 2),
299         0x00000000,
300         (0x8040 << 16) | (0x9874 >> 2),
301         0x00000000,
302         (0x8041 << 16) | (0x9870 >> 2),
303         0x00000000,
304         (0x8041 << 16) | (0x9874 >> 2),
305         0x00000000,
306         0x00000000
307 };
308
309 static const u32 tahiti_golden_rlc_registers[] =
310 {
311         0xc424, 0xffffffff, 0x00601005,
312         0xc47c, 0xffffffff, 0x10104040,
313         0xc488, 0xffffffff, 0x0100000a,
314         0xc314, 0xffffffff, 0x00000800,
315         0xc30c, 0xffffffff, 0x800000f4,
316         0xf4a8, 0xffffffff, 0x00000000
317 };
318
319 static const u32 tahiti_golden_registers[] =
320 {
321         0x9a10, 0x00010000, 0x00018208,
322         0x9830, 0xffffffff, 0x00000000,
323         0x9834, 0xf00fffff, 0x00000400,
324         0x9838, 0x0002021c, 0x00020200,
325         0xc78, 0x00000080, 0x00000000,
326         0xd030, 0x000300c0, 0x00800040,
327         0xd830, 0x000300c0, 0x00800040,
328         0x5bb0, 0x000000f0, 0x00000070,
329         0x5bc0, 0x00200000, 0x50100000,
330         0x7030, 0x31000311, 0x00000011,
331         0x277c, 0x00000003, 0x000007ff,
332         0x240c, 0x000007ff, 0x00000000,
333         0x8a14, 0xf000001f, 0x00000007,
334         0x8b24, 0xffffffff, 0x00ffffff,
335         0x8b10, 0x0000ff0f, 0x00000000,
336         0x28a4c, 0x07ffffff, 0x4e000000,
337         0x28350, 0x3f3f3fff, 0x2a00126a,
338         0x30, 0x000000ff, 0x0040,
339         0x34, 0x00000040, 0x00004040,
340         0x9100, 0x07ffffff, 0x03000000,
341         0x8e88, 0x01ff1f3f, 0x00000000,
342         0x8e84, 0x01ff1f3f, 0x00000000,
343         0x9060, 0x0000007f, 0x00000020,
344         0x9508, 0x00010000, 0x00010000,
345         0xac14, 0x00000200, 0x000002fb,
346         0xac10, 0xffffffff, 0x0000543b,
347         0xac0c, 0xffffffff, 0xa9210876,
348         0x88d0, 0xffffffff, 0x000fff40,
349         0x88d4, 0x0000001f, 0x00000010,
350         0x1410, 0x20000000, 0x20fffed8,
351         0x15c0, 0x000c0fc0, 0x000c0400
352 };
353
354 static const u32 tahiti_golden_registers2[] =
355 {
356         0xc64, 0x00000001, 0x00000001
357 };
358
359 static const u32 pitcairn_golden_rlc_registers[] =
360 {
361         0xc424, 0xffffffff, 0x00601004,
362         0xc47c, 0xffffffff, 0x10102020,
363         0xc488, 0xffffffff, 0x01000020,
364         0xc314, 0xffffffff, 0x00000800,
365         0xc30c, 0xffffffff, 0x800000a4
366 };
367
368 static const u32 pitcairn_golden_registers[] =
369 {
370         0x9a10, 0x00010000, 0x00018208,
371         0x9830, 0xffffffff, 0x00000000,
372         0x9834, 0xf00fffff, 0x00000400,
373         0x9838, 0x0002021c, 0x00020200,
374         0xc78, 0x00000080, 0x00000000,
375         0xd030, 0x000300c0, 0x00800040,
376         0xd830, 0x000300c0, 0x00800040,
377         0x5bb0, 0x000000f0, 0x00000070,
378         0x5bc0, 0x00200000, 0x50100000,
379         0x7030, 0x31000311, 0x00000011,
380         0x2ae4, 0x00073ffe, 0x000022a2,
381         0x240c, 0x000007ff, 0x00000000,
382         0x8a14, 0xf000001f, 0x00000007,
383         0x8b24, 0xffffffff, 0x00ffffff,
384         0x8b10, 0x0000ff0f, 0x00000000,
385         0x28a4c, 0x07ffffff, 0x4e000000,
386         0x28350, 0x3f3f3fff, 0x2a00126a,
387         0x30, 0x000000ff, 0x0040,
388         0x34, 0x00000040, 0x00004040,
389         0x9100, 0x07ffffff, 0x03000000,
390         0x9060, 0x0000007f, 0x00000020,
391         0x9508, 0x00010000, 0x00010000,
392         0xac14, 0x000003ff, 0x000000f7,
393         0xac10, 0xffffffff, 0x00000000,
394         0xac0c, 0xffffffff, 0x32761054,
395         0x88d4, 0x0000001f, 0x00000010,
396         0x15c0, 0x000c0fc0, 0x000c0400
397 };
398
399 static const u32 verde_golden_rlc_registers[] =
400 {
401         0xc424, 0xffffffff, 0x033f1005,
402         0xc47c, 0xffffffff, 0x10808020,
403         0xc488, 0xffffffff, 0x00800008,
404         0xc314, 0xffffffff, 0x00001000,
405         0xc30c, 0xffffffff, 0x80010014
406 };
407
408 static const u32 verde_golden_registers[] =
409 {
410         0x9a10, 0x00010000, 0x00018208,
411         0x9830, 0xffffffff, 0x00000000,
412         0x9834, 0xf00fffff, 0x00000400,
413         0x9838, 0x0002021c, 0x00020200,
414         0xc78, 0x00000080, 0x00000000,
415         0xd030, 0x000300c0, 0x00800040,
416         0xd030, 0x000300c0, 0x00800040,
417         0xd830, 0x000300c0, 0x00800040,
418         0xd830, 0x000300c0, 0x00800040,
419         0x5bb0, 0x000000f0, 0x00000070,
420         0x5bc0, 0x00200000, 0x50100000,
421         0x7030, 0x31000311, 0x00000011,
422         0x2ae4, 0x00073ffe, 0x000022a2,
423         0x2ae4, 0x00073ffe, 0x000022a2,
424         0x2ae4, 0x00073ffe, 0x000022a2,
425         0x240c, 0x000007ff, 0x00000000,
426         0x240c, 0x000007ff, 0x00000000,
427         0x240c, 0x000007ff, 0x00000000,
428         0x8a14, 0xf000001f, 0x00000007,
429         0x8a14, 0xf000001f, 0x00000007,
430         0x8a14, 0xf000001f, 0x00000007,
431         0x8b24, 0xffffffff, 0x00ffffff,
432         0x8b10, 0x0000ff0f, 0x00000000,
433         0x28a4c, 0x07ffffff, 0x4e000000,
434         0x28350, 0x3f3f3fff, 0x0000124a,
435         0x28350, 0x3f3f3fff, 0x0000124a,
436         0x28350, 0x3f3f3fff, 0x0000124a,
437         0x30, 0x000000ff, 0x0040,
438         0x34, 0x00000040, 0x00004040,
439         0x9100, 0x07ffffff, 0x03000000,
440         0x9100, 0x07ffffff, 0x03000000,
441         0x8e88, 0x01ff1f3f, 0x00000000,
442         0x8e88, 0x01ff1f3f, 0x00000000,
443         0x8e88, 0x01ff1f3f, 0x00000000,
444         0x8e84, 0x01ff1f3f, 0x00000000,
445         0x8e84, 0x01ff1f3f, 0x00000000,
446         0x8e84, 0x01ff1f3f, 0x00000000,
447         0x9060, 0x0000007f, 0x00000020,
448         0x9508, 0x00010000, 0x00010000,
449         0xac14, 0x000003ff, 0x00000003,
450         0xac14, 0x000003ff, 0x00000003,
451         0xac14, 0x000003ff, 0x00000003,
452         0xac10, 0xffffffff, 0x00000000,
453         0xac10, 0xffffffff, 0x00000000,
454         0xac10, 0xffffffff, 0x00000000,
455         0xac0c, 0xffffffff, 0x00001032,
456         0xac0c, 0xffffffff, 0x00001032,
457         0xac0c, 0xffffffff, 0x00001032,
458         0x88d4, 0x0000001f, 0x00000010,
459         0x88d4, 0x0000001f, 0x00000010,
460         0x88d4, 0x0000001f, 0x00000010,
461         0x15c0, 0x000c0fc0, 0x000c0400
462 };
463
464 static const u32 oland_golden_rlc_registers[] =
465 {
466         0xc424, 0xffffffff, 0x00601005,
467         0xc47c, 0xffffffff, 0x10104040,
468         0xc488, 0xffffffff, 0x0100000a,
469         0xc314, 0xffffffff, 0x00000800,
470         0xc30c, 0xffffffff, 0x800000f4
471 };
472
473 static const u32 oland_golden_registers[] =
474 {
475         0x9a10, 0x00010000, 0x00018208,
476         0x9830, 0xffffffff, 0x00000000,
477         0x9834, 0xf00fffff, 0x00000400,
478         0x9838, 0x0002021c, 0x00020200,
479         0xc78, 0x00000080, 0x00000000,
480         0xd030, 0x000300c0, 0x00800040,
481         0xd830, 0x000300c0, 0x00800040,
482         0x5bb0, 0x000000f0, 0x00000070,
483         0x5bc0, 0x00200000, 0x50100000,
484         0x7030, 0x31000311, 0x00000011,
485         0x2ae4, 0x00073ffe, 0x000022a2,
486         0x240c, 0x000007ff, 0x00000000,
487         0x8a14, 0xf000001f, 0x00000007,
488         0x8b24, 0xffffffff, 0x00ffffff,
489         0x8b10, 0x0000ff0f, 0x00000000,
490         0x28a4c, 0x07ffffff, 0x4e000000,
491         0x28350, 0x3f3f3fff, 0x00000082,
492         0x30, 0x000000ff, 0x0040,
493         0x34, 0x00000040, 0x00004040,
494         0x9100, 0x07ffffff, 0x03000000,
495         0x9060, 0x0000007f, 0x00000020,
496         0x9508, 0x00010000, 0x00010000,
497         0xac14, 0x000003ff, 0x000000f3,
498         0xac10, 0xffffffff, 0x00000000,
499         0xac0c, 0xffffffff, 0x00003210,
500         0x88d4, 0x0000001f, 0x00000010,
501         0x15c0, 0x000c0fc0, 0x000c0400
502 };
503
504 static const u32 hainan_golden_registers[] =
505 {
506         0x9a10, 0x00010000, 0x00018208,
507         0x9830, 0xffffffff, 0x00000000,
508         0x9834, 0xf00fffff, 0x00000400,
509         0x9838, 0x0002021c, 0x00020200,
510         0xd0c0, 0xff000fff, 0x00000100,
511         0xd030, 0x000300c0, 0x00800040,
512         0xd8c0, 0xff000fff, 0x00000100,
513         0xd830, 0x000300c0, 0x00800040,
514         0x2ae4, 0x00073ffe, 0x000022a2,
515         0x240c, 0x000007ff, 0x00000000,
516         0x8a14, 0xf000001f, 0x00000007,
517         0x8b24, 0xffffffff, 0x00ffffff,
518         0x8b10, 0x0000ff0f, 0x00000000,
519         0x28a4c, 0x07ffffff, 0x4e000000,
520         0x28350, 0x3f3f3fff, 0x00000000,
521         0x30, 0x000000ff, 0x0040,
522         0x34, 0x00000040, 0x00004040,
523         0x9100, 0x03e00000, 0x03600000,
524         0x9060, 0x0000007f, 0x00000020,
525         0x9508, 0x00010000, 0x00010000,
526         0xac14, 0x000003ff, 0x000000f1,
527         0xac10, 0xffffffff, 0x00000000,
528         0xac0c, 0xffffffff, 0x00003210,
529         0x88d4, 0x0000001f, 0x00000010,
530         0x15c0, 0x000c0fc0, 0x000c0400
531 };
532
533 static const u32 hainan_golden_registers2[] =
534 {
535         0x98f8, 0xffffffff, 0x02010001
536 };
537
538 static const u32 tahiti_mgcg_cgcg_init[] =
539 {
540         0xc400, 0xffffffff, 0xfffffffc,
541         0x802c, 0xffffffff, 0xe0000000,
542         0x9a60, 0xffffffff, 0x00000100,
543         0x92a4, 0xffffffff, 0x00000100,
544         0xc164, 0xffffffff, 0x00000100,
545         0x9774, 0xffffffff, 0x00000100,
546         0x8984, 0xffffffff, 0x06000100,
547         0x8a18, 0xffffffff, 0x00000100,
548         0x92a0, 0xffffffff, 0x00000100,
549         0xc380, 0xffffffff, 0x00000100,
550         0x8b28, 0xffffffff, 0x00000100,
551         0x9144, 0xffffffff, 0x00000100,
552         0x8d88, 0xffffffff, 0x00000100,
553         0x8d8c, 0xffffffff, 0x00000100,
554         0x9030, 0xffffffff, 0x00000100,
555         0x9034, 0xffffffff, 0x00000100,
556         0x9038, 0xffffffff, 0x00000100,
557         0x903c, 0xffffffff, 0x00000100,
558         0xad80, 0xffffffff, 0x00000100,
559         0xac54, 0xffffffff, 0x00000100,
560         0x897c, 0xffffffff, 0x06000100,
561         0x9868, 0xffffffff, 0x00000100,
562         0x9510, 0xffffffff, 0x00000100,
563         0xaf04, 0xffffffff, 0x00000100,
564         0xae04, 0xffffffff, 0x00000100,
565         0x949c, 0xffffffff, 0x00000100,
566         0x802c, 0xffffffff, 0xe0000000,
567         0x9160, 0xffffffff, 0x00010000,
568         0x9164, 0xffffffff, 0x00030002,
569         0x9168, 0xffffffff, 0x00040007,
570         0x916c, 0xffffffff, 0x00060005,
571         0x9170, 0xffffffff, 0x00090008,
572         0x9174, 0xffffffff, 0x00020001,
573         0x9178, 0xffffffff, 0x00040003,
574         0x917c, 0xffffffff, 0x00000007,
575         0x9180, 0xffffffff, 0x00060005,
576         0x9184, 0xffffffff, 0x00090008,
577         0x9188, 0xffffffff, 0x00030002,
578         0x918c, 0xffffffff, 0x00050004,
579         0x9190, 0xffffffff, 0x00000008,
580         0x9194, 0xffffffff, 0x00070006,
581         0x9198, 0xffffffff, 0x000a0009,
582         0x919c, 0xffffffff, 0x00040003,
583         0x91a0, 0xffffffff, 0x00060005,
584         0x91a4, 0xffffffff, 0x00000009,
585         0x91a8, 0xffffffff, 0x00080007,
586         0x91ac, 0xffffffff, 0x000b000a,
587         0x91b0, 0xffffffff, 0x00050004,
588         0x91b4, 0xffffffff, 0x00070006,
589         0x91b8, 0xffffffff, 0x0008000b,
590         0x91bc, 0xffffffff, 0x000a0009,
591         0x91c0, 0xffffffff, 0x000d000c,
592         0x91c4, 0xffffffff, 0x00060005,
593         0x91c8, 0xffffffff, 0x00080007,
594         0x91cc, 0xffffffff, 0x0000000b,
595         0x91d0, 0xffffffff, 0x000a0009,
596         0x91d4, 0xffffffff, 0x000d000c,
597         0x91d8, 0xffffffff, 0x00070006,
598         0x91dc, 0xffffffff, 0x00090008,
599         0x91e0, 0xffffffff, 0x0000000c,
600         0x91e4, 0xffffffff, 0x000b000a,
601         0x91e8, 0xffffffff, 0x000e000d,
602         0x91ec, 0xffffffff, 0x00080007,
603         0x91f0, 0xffffffff, 0x000a0009,
604         0x91f4, 0xffffffff, 0x0000000d,
605         0x91f8, 0xffffffff, 0x000c000b,
606         0x91fc, 0xffffffff, 0x000f000e,
607         0x9200, 0xffffffff, 0x00090008,
608         0x9204, 0xffffffff, 0x000b000a,
609         0x9208, 0xffffffff, 0x000c000f,
610         0x920c, 0xffffffff, 0x000e000d,
611         0x9210, 0xffffffff, 0x00110010,
612         0x9214, 0xffffffff, 0x000a0009,
613         0x9218, 0xffffffff, 0x000c000b,
614         0x921c, 0xffffffff, 0x0000000f,
615         0x9220, 0xffffffff, 0x000e000d,
616         0x9224, 0xffffffff, 0x00110010,
617         0x9228, 0xffffffff, 0x000b000a,
618         0x922c, 0xffffffff, 0x000d000c,
619         0x9230, 0xffffffff, 0x00000010,
620         0x9234, 0xffffffff, 0x000f000e,
621         0x9238, 0xffffffff, 0x00120011,
622         0x923c, 0xffffffff, 0x000c000b,
623         0x9240, 0xffffffff, 0x000e000d,
624         0x9244, 0xffffffff, 0x00000011,
625         0x9248, 0xffffffff, 0x0010000f,
626         0x924c, 0xffffffff, 0x00130012,
627         0x9250, 0xffffffff, 0x000d000c,
628         0x9254, 0xffffffff, 0x000f000e,
629         0x9258, 0xffffffff, 0x00100013,
630         0x925c, 0xffffffff, 0x00120011,
631         0x9260, 0xffffffff, 0x00150014,
632         0x9264, 0xffffffff, 0x000e000d,
633         0x9268, 0xffffffff, 0x0010000f,
634         0x926c, 0xffffffff, 0x00000013,
635         0x9270, 0xffffffff, 0x00120011,
636         0x9274, 0xffffffff, 0x00150014,
637         0x9278, 0xffffffff, 0x000f000e,
638         0x927c, 0xffffffff, 0x00110010,
639         0x9280, 0xffffffff, 0x00000014,
640         0x9284, 0xffffffff, 0x00130012,
641         0x9288, 0xffffffff, 0x00160015,
642         0x928c, 0xffffffff, 0x0010000f,
643         0x9290, 0xffffffff, 0x00120011,
644         0x9294, 0xffffffff, 0x00000015,
645         0x9298, 0xffffffff, 0x00140013,
646         0x929c, 0xffffffff, 0x00170016,
647         0x9150, 0xffffffff, 0x96940200,
648         0x8708, 0xffffffff, 0x00900100,
649         0xc478, 0xffffffff, 0x00000080,
650         0xc404, 0xffffffff, 0x0020003f,
651         0x30, 0xffffffff, 0x0000001c,
652         0x34, 0x000f0000, 0x000f0000,
653         0x160c, 0xffffffff, 0x00000100,
654         0x1024, 0xffffffff, 0x00000100,
655         0x102c, 0x00000101, 0x00000000,
656         0x20a8, 0xffffffff, 0x00000104,
657         0x264c, 0x000c0000, 0x000c0000,
658         0x2648, 0x000c0000, 0x000c0000,
659         0x55e4, 0xff000fff, 0x00000100,
660         0x55e8, 0x00000001, 0x00000001,
661         0x2f50, 0x00000001, 0x00000001,
662         0x30cc, 0xc0000fff, 0x00000104,
663         0xc1e4, 0x00000001, 0x00000001,
664         0xd0c0, 0xfffffff0, 0x00000100,
665         0xd8c0, 0xfffffff0, 0x00000100
666 };
667
668 static const u32 pitcairn_mgcg_cgcg_init[] =
669 {
670         0xc400, 0xffffffff, 0xfffffffc,
671         0x802c, 0xffffffff, 0xe0000000,
672         0x9a60, 0xffffffff, 0x00000100,
673         0x92a4, 0xffffffff, 0x00000100,
674         0xc164, 0xffffffff, 0x00000100,
675         0x9774, 0xffffffff, 0x00000100,
676         0x8984, 0xffffffff, 0x06000100,
677         0x8a18, 0xffffffff, 0x00000100,
678         0x92a0, 0xffffffff, 0x00000100,
679         0xc380, 0xffffffff, 0x00000100,
680         0x8b28, 0xffffffff, 0x00000100,
681         0x9144, 0xffffffff, 0x00000100,
682         0x8d88, 0xffffffff, 0x00000100,
683         0x8d8c, 0xffffffff, 0x00000100,
684         0x9030, 0xffffffff, 0x00000100,
685         0x9034, 0xffffffff, 0x00000100,
686         0x9038, 0xffffffff, 0x00000100,
687         0x903c, 0xffffffff, 0x00000100,
688         0xad80, 0xffffffff, 0x00000100,
689         0xac54, 0xffffffff, 0x00000100,
690         0x897c, 0xffffffff, 0x06000100,
691         0x9868, 0xffffffff, 0x00000100,
692         0x9510, 0xffffffff, 0x00000100,
693         0xaf04, 0xffffffff, 0x00000100,
694         0xae04, 0xffffffff, 0x00000100,
695         0x949c, 0xffffffff, 0x00000100,
696         0x802c, 0xffffffff, 0xe0000000,
697         0x9160, 0xffffffff, 0x00010000,
698         0x9164, 0xffffffff, 0x00030002,
699         0x9168, 0xffffffff, 0x00040007,
700         0x916c, 0xffffffff, 0x00060005,
701         0x9170, 0xffffffff, 0x00090008,
702         0x9174, 0xffffffff, 0x00020001,
703         0x9178, 0xffffffff, 0x00040003,
704         0x917c, 0xffffffff, 0x00000007,
705         0x9180, 0xffffffff, 0x00060005,
706         0x9184, 0xffffffff, 0x00090008,
707         0x9188, 0xffffffff, 0x00030002,
708         0x918c, 0xffffffff, 0x00050004,
709         0x9190, 0xffffffff, 0x00000008,
710         0x9194, 0xffffffff, 0x00070006,
711         0x9198, 0xffffffff, 0x000a0009,
712         0x919c, 0xffffffff, 0x00040003,
713         0x91a0, 0xffffffff, 0x00060005,
714         0x91a4, 0xffffffff, 0x00000009,
715         0x91a8, 0xffffffff, 0x00080007,
716         0x91ac, 0xffffffff, 0x000b000a,
717         0x91b0, 0xffffffff, 0x00050004,
718         0x91b4, 0xffffffff, 0x00070006,
719         0x91b8, 0xffffffff, 0x0008000b,
720         0x91bc, 0xffffffff, 0x000a0009,
721         0x91c0, 0xffffffff, 0x000d000c,
722         0x9200, 0xffffffff, 0x00090008,
723         0x9204, 0xffffffff, 0x000b000a,
724         0x9208, 0xffffffff, 0x000c000f,
725         0x920c, 0xffffffff, 0x000e000d,
726         0x9210, 0xffffffff, 0x00110010,
727         0x9214, 0xffffffff, 0x000a0009,
728         0x9218, 0xffffffff, 0x000c000b,
729         0x921c, 0xffffffff, 0x0000000f,
730         0x9220, 0xffffffff, 0x000e000d,
731         0x9224, 0xffffffff, 0x00110010,
732         0x9228, 0xffffffff, 0x000b000a,
733         0x922c, 0xffffffff, 0x000d000c,
734         0x9230, 0xffffffff, 0x00000010,
735         0x9234, 0xffffffff, 0x000f000e,
736         0x9238, 0xffffffff, 0x00120011,
737         0x923c, 0xffffffff, 0x000c000b,
738         0x9240, 0xffffffff, 0x000e000d,
739         0x9244, 0xffffffff, 0x00000011,
740         0x9248, 0xffffffff, 0x0010000f,
741         0x924c, 0xffffffff, 0x00130012,
742         0x9250, 0xffffffff, 0x000d000c,
743         0x9254, 0xffffffff, 0x000f000e,
744         0x9258, 0xffffffff, 0x00100013,
745         0x925c, 0xffffffff, 0x00120011,
746         0x9260, 0xffffffff, 0x00150014,
747         0x9150, 0xffffffff, 0x96940200,
748         0x8708, 0xffffffff, 0x00900100,
749         0xc478, 0xffffffff, 0x00000080,
750         0xc404, 0xffffffff, 0x0020003f,
751         0x30, 0xffffffff, 0x0000001c,
752         0x34, 0x000f0000, 0x000f0000,
753         0x160c, 0xffffffff, 0x00000100,
754         0x1024, 0xffffffff, 0x00000100,
755         0x102c, 0x00000101, 0x00000000,
756         0x20a8, 0xffffffff, 0x00000104,
757         0x55e4, 0xff000fff, 0x00000100,
758         0x55e8, 0x00000001, 0x00000001,
759         0x2f50, 0x00000001, 0x00000001,
760         0x30cc, 0xc0000fff, 0x00000104,
761         0xc1e4, 0x00000001, 0x00000001,
762         0xd0c0, 0xfffffff0, 0x00000100,
763         0xd8c0, 0xfffffff0, 0x00000100
764 };
765
766 static const u32 verde_mgcg_cgcg_init[] =
767 {
768         0xc400, 0xffffffff, 0xfffffffc,
769         0x802c, 0xffffffff, 0xe0000000,
770         0x9a60, 0xffffffff, 0x00000100,
771         0x92a4, 0xffffffff, 0x00000100,
772         0xc164, 0xffffffff, 0x00000100,
773         0x9774, 0xffffffff, 0x00000100,
774         0x8984, 0xffffffff, 0x06000100,
775         0x8a18, 0xffffffff, 0x00000100,
776         0x92a0, 0xffffffff, 0x00000100,
777         0xc380, 0xffffffff, 0x00000100,
778         0x8b28, 0xffffffff, 0x00000100,
779         0x9144, 0xffffffff, 0x00000100,
780         0x8d88, 0xffffffff, 0x00000100,
781         0x8d8c, 0xffffffff, 0x00000100,
782         0x9030, 0xffffffff, 0x00000100,
783         0x9034, 0xffffffff, 0x00000100,
784         0x9038, 0xffffffff, 0x00000100,
785         0x903c, 0xffffffff, 0x00000100,
786         0xad80, 0xffffffff, 0x00000100,
787         0xac54, 0xffffffff, 0x00000100,
788         0x897c, 0xffffffff, 0x06000100,
789         0x9868, 0xffffffff, 0x00000100,
790         0x9510, 0xffffffff, 0x00000100,
791         0xaf04, 0xffffffff, 0x00000100,
792         0xae04, 0xffffffff, 0x00000100,
793         0x949c, 0xffffffff, 0x00000100,
794         0x802c, 0xffffffff, 0xe0000000,
795         0x9160, 0xffffffff, 0x00010000,
796         0x9164, 0xffffffff, 0x00030002,
797         0x9168, 0xffffffff, 0x00040007,
798         0x916c, 0xffffffff, 0x00060005,
799         0x9170, 0xffffffff, 0x00090008,
800         0x9174, 0xffffffff, 0x00020001,
801         0x9178, 0xffffffff, 0x00040003,
802         0x917c, 0xffffffff, 0x00000007,
803         0x9180, 0xffffffff, 0x00060005,
804         0x9184, 0xffffffff, 0x00090008,
805         0x9188, 0xffffffff, 0x00030002,
806         0x918c, 0xffffffff, 0x00050004,
807         0x9190, 0xffffffff, 0x00000008,
808         0x9194, 0xffffffff, 0x00070006,
809         0x9198, 0xffffffff, 0x000a0009,
810         0x919c, 0xffffffff, 0x00040003,
811         0x91a0, 0xffffffff, 0x00060005,
812         0x91a4, 0xffffffff, 0x00000009,
813         0x91a8, 0xffffffff, 0x00080007,
814         0x91ac, 0xffffffff, 0x000b000a,
815         0x91b0, 0xffffffff, 0x00050004,
816         0x91b4, 0xffffffff, 0x00070006,
817         0x91b8, 0xffffffff, 0x0008000b,
818         0x91bc, 0xffffffff, 0x000a0009,
819         0x91c0, 0xffffffff, 0x000d000c,
820         0x9200, 0xffffffff, 0x00090008,
821         0x9204, 0xffffffff, 0x000b000a,
822         0x9208, 0xffffffff, 0x000c000f,
823         0x920c, 0xffffffff, 0x000e000d,
824         0x9210, 0xffffffff, 0x00110010,
825         0x9214, 0xffffffff, 0x000a0009,
826         0x9218, 0xffffffff, 0x000c000b,
827         0x921c, 0xffffffff, 0x0000000f,
828         0x9220, 0xffffffff, 0x000e000d,
829         0x9224, 0xffffffff, 0x00110010,
830         0x9228, 0xffffffff, 0x000b000a,
831         0x922c, 0xffffffff, 0x000d000c,
832         0x9230, 0xffffffff, 0x00000010,
833         0x9234, 0xffffffff, 0x000f000e,
834         0x9238, 0xffffffff, 0x00120011,
835         0x923c, 0xffffffff, 0x000c000b,
836         0x9240, 0xffffffff, 0x000e000d,
837         0x9244, 0xffffffff, 0x00000011,
838         0x9248, 0xffffffff, 0x0010000f,
839         0x924c, 0xffffffff, 0x00130012,
840         0x9250, 0xffffffff, 0x000d000c,
841         0x9254, 0xffffffff, 0x000f000e,
842         0x9258, 0xffffffff, 0x00100013,
843         0x925c, 0xffffffff, 0x00120011,
844         0x9260, 0xffffffff, 0x00150014,
845         0x9150, 0xffffffff, 0x96940200,
846         0x8708, 0xffffffff, 0x00900100,
847         0xc478, 0xffffffff, 0x00000080,
848         0xc404, 0xffffffff, 0x0020003f,
849         0x30, 0xffffffff, 0x0000001c,
850         0x34, 0x000f0000, 0x000f0000,
851         0x160c, 0xffffffff, 0x00000100,
852         0x1024, 0xffffffff, 0x00000100,
853         0x102c, 0x00000101, 0x00000000,
854         0x20a8, 0xffffffff, 0x00000104,
855         0x264c, 0x000c0000, 0x000c0000,
856         0x2648, 0x000c0000, 0x000c0000,
857         0x55e4, 0xff000fff, 0x00000100,
858         0x55e8, 0x00000001, 0x00000001,
859         0x2f50, 0x00000001, 0x00000001,
860         0x30cc, 0xc0000fff, 0x00000104,
861         0xc1e4, 0x00000001, 0x00000001,
862         0xd0c0, 0xfffffff0, 0x00000100,
863         0xd8c0, 0xfffffff0, 0x00000100
864 };
865
866 static const u32 oland_mgcg_cgcg_init[] =
867 {
868         0xc400, 0xffffffff, 0xfffffffc,
869         0x802c, 0xffffffff, 0xe0000000,
870         0x9a60, 0xffffffff, 0x00000100,
871         0x92a4, 0xffffffff, 0x00000100,
872         0xc164, 0xffffffff, 0x00000100,
873         0x9774, 0xffffffff, 0x00000100,
874         0x8984, 0xffffffff, 0x06000100,
875         0x8a18, 0xffffffff, 0x00000100,
876         0x92a0, 0xffffffff, 0x00000100,
877         0xc380, 0xffffffff, 0x00000100,
878         0x8b28, 0xffffffff, 0x00000100,
879         0x9144, 0xffffffff, 0x00000100,
880         0x8d88, 0xffffffff, 0x00000100,
881         0x8d8c, 0xffffffff, 0x00000100,
882         0x9030, 0xffffffff, 0x00000100,
883         0x9034, 0xffffffff, 0x00000100,
884         0x9038, 0xffffffff, 0x00000100,
885         0x903c, 0xffffffff, 0x00000100,
886         0xad80, 0xffffffff, 0x00000100,
887         0xac54, 0xffffffff, 0x00000100,
888         0x897c, 0xffffffff, 0x06000100,
889         0x9868, 0xffffffff, 0x00000100,
890         0x9510, 0xffffffff, 0x00000100,
891         0xaf04, 0xffffffff, 0x00000100,
892         0xae04, 0xffffffff, 0x00000100,
893         0x949c, 0xffffffff, 0x00000100,
894         0x802c, 0xffffffff, 0xe0000000,
895         0x9160, 0xffffffff, 0x00010000,
896         0x9164, 0xffffffff, 0x00030002,
897         0x9168, 0xffffffff, 0x00040007,
898         0x916c, 0xffffffff, 0x00060005,
899         0x9170, 0xffffffff, 0x00090008,
900         0x9174, 0xffffffff, 0x00020001,
901         0x9178, 0xffffffff, 0x00040003,
902         0x917c, 0xffffffff, 0x00000007,
903         0x9180, 0xffffffff, 0x00060005,
904         0x9184, 0xffffffff, 0x00090008,
905         0x9188, 0xffffffff, 0x00030002,
906         0x918c, 0xffffffff, 0x00050004,
907         0x9190, 0xffffffff, 0x00000008,
908         0x9194, 0xffffffff, 0x00070006,
909         0x9198, 0xffffffff, 0x000a0009,
910         0x919c, 0xffffffff, 0x00040003,
911         0x91a0, 0xffffffff, 0x00060005,
912         0x91a4, 0xffffffff, 0x00000009,
913         0x91a8, 0xffffffff, 0x00080007,
914         0x91ac, 0xffffffff, 0x000b000a,
915         0x91b0, 0xffffffff, 0x00050004,
916         0x91b4, 0xffffffff, 0x00070006,
917         0x91b8, 0xffffffff, 0x0008000b,
918         0x91bc, 0xffffffff, 0x000a0009,
919         0x91c0, 0xffffffff, 0x000d000c,
920         0x91c4, 0xffffffff, 0x00060005,
921         0x91c8, 0xffffffff, 0x00080007,
922         0x91cc, 0xffffffff, 0x0000000b,
923         0x91d0, 0xffffffff, 0x000a0009,
924         0x91d4, 0xffffffff, 0x000d000c,
925         0x9150, 0xffffffff, 0x96940200,
926         0x8708, 0xffffffff, 0x00900100,
927         0xc478, 0xffffffff, 0x00000080,
928         0xc404, 0xffffffff, 0x0020003f,
929         0x30, 0xffffffff, 0x0000001c,
930         0x34, 0x000f0000, 0x000f0000,
931         0x160c, 0xffffffff, 0x00000100,
932         0x1024, 0xffffffff, 0x00000100,
933         0x102c, 0x00000101, 0x00000000,
934         0x20a8, 0xffffffff, 0x00000104,
935         0x264c, 0x000c0000, 0x000c0000,
936         0x2648, 0x000c0000, 0x000c0000,
937         0x55e4, 0xff000fff, 0x00000100,
938         0x55e8, 0x00000001, 0x00000001,
939         0x2f50, 0x00000001, 0x00000001,
940         0x30cc, 0xc0000fff, 0x00000104,
941         0xc1e4, 0x00000001, 0x00000001,
942         0xd0c0, 0xfffffff0, 0x00000100,
943         0xd8c0, 0xfffffff0, 0x00000100
944 };
945
946 static const u32 hainan_mgcg_cgcg_init[] =
947 {
948         0xc400, 0xffffffff, 0xfffffffc,
949         0x802c, 0xffffffff, 0xe0000000,
950         0x9a60, 0xffffffff, 0x00000100,
951         0x92a4, 0xffffffff, 0x00000100,
952         0xc164, 0xffffffff, 0x00000100,
953         0x9774, 0xffffffff, 0x00000100,
954         0x8984, 0xffffffff, 0x06000100,
955         0x8a18, 0xffffffff, 0x00000100,
956         0x92a0, 0xffffffff, 0x00000100,
957         0xc380, 0xffffffff, 0x00000100,
958         0x8b28, 0xffffffff, 0x00000100,
959         0x9144, 0xffffffff, 0x00000100,
960         0x8d88, 0xffffffff, 0x00000100,
961         0x8d8c, 0xffffffff, 0x00000100,
962         0x9030, 0xffffffff, 0x00000100,
963         0x9034, 0xffffffff, 0x00000100,
964         0x9038, 0xffffffff, 0x00000100,
965         0x903c, 0xffffffff, 0x00000100,
966         0xad80, 0xffffffff, 0x00000100,
967         0xac54, 0xffffffff, 0x00000100,
968         0x897c, 0xffffffff, 0x06000100,
969         0x9868, 0xffffffff, 0x00000100,
970         0x9510, 0xffffffff, 0x00000100,
971         0xaf04, 0xffffffff, 0x00000100,
972         0xae04, 0xffffffff, 0x00000100,
973         0x949c, 0xffffffff, 0x00000100,
974         0x802c, 0xffffffff, 0xe0000000,
975         0x9160, 0xffffffff, 0x00010000,
976         0x9164, 0xffffffff, 0x00030002,
977         0x9168, 0xffffffff, 0x00040007,
978         0x916c, 0xffffffff, 0x00060005,
979         0x9170, 0xffffffff, 0x00090008,
980         0x9174, 0xffffffff, 0x00020001,
981         0x9178, 0xffffffff, 0x00040003,
982         0x917c, 0xffffffff, 0x00000007,
983         0x9180, 0xffffffff, 0x00060005,
984         0x9184, 0xffffffff, 0x00090008,
985         0x9188, 0xffffffff, 0x00030002,
986         0x918c, 0xffffffff, 0x00050004,
987         0x9190, 0xffffffff, 0x00000008,
988         0x9194, 0xffffffff, 0x00070006,
989         0x9198, 0xffffffff, 0x000a0009,
990         0x919c, 0xffffffff, 0x00040003,
991         0x91a0, 0xffffffff, 0x00060005,
992         0x91a4, 0xffffffff, 0x00000009,
993         0x91a8, 0xffffffff, 0x00080007,
994         0x91ac, 0xffffffff, 0x000b000a,
995         0x91b0, 0xffffffff, 0x00050004,
996         0x91b4, 0xffffffff, 0x00070006,
997         0x91b8, 0xffffffff, 0x0008000b,
998         0x91bc, 0xffffffff, 0x000a0009,
999         0x91c0, 0xffffffff, 0x000d000c,
1000         0x91c4, 0xffffffff, 0x00060005,
1001         0x91c8, 0xffffffff, 0x00080007,
1002         0x91cc, 0xffffffff, 0x0000000b,
1003         0x91d0, 0xffffffff, 0x000a0009,
1004         0x91d4, 0xffffffff, 0x000d000c,
1005         0x9150, 0xffffffff, 0x96940200,
1006         0x8708, 0xffffffff, 0x00900100,
1007         0xc478, 0xffffffff, 0x00000080,
1008         0xc404, 0xffffffff, 0x0020003f,
1009         0x30, 0xffffffff, 0x0000001c,
1010         0x34, 0x000f0000, 0x000f0000,
1011         0x160c, 0xffffffff, 0x00000100,
1012         0x1024, 0xffffffff, 0x00000100,
1013         0x20a8, 0xffffffff, 0x00000104,
1014         0x264c, 0x000c0000, 0x000c0000,
1015         0x2648, 0x000c0000, 0x000c0000,
1016         0x2f50, 0x00000001, 0x00000001,
1017         0x30cc, 0xc0000fff, 0x00000104,
1018         0xc1e4, 0x00000001, 0x00000001,
1019         0xd0c0, 0xfffffff0, 0x00000100,
1020         0xd8c0, 0xfffffff0, 0x00000100
1021 };
1022
1023 static u32 verde_pg_init[] =
1024 {
1025         0x353c, 0xffffffff, 0x40000,
1026         0x3538, 0xffffffff, 0x200010ff,
1027         0x353c, 0xffffffff, 0x0,
1028         0x353c, 0xffffffff, 0x0,
1029         0x353c, 0xffffffff, 0x0,
1030         0x353c, 0xffffffff, 0x0,
1031         0x353c, 0xffffffff, 0x0,
1032         0x353c, 0xffffffff, 0x7007,
1033         0x3538, 0xffffffff, 0x300010ff,
1034         0x353c, 0xffffffff, 0x0,
1035         0x353c, 0xffffffff, 0x0,
1036         0x353c, 0xffffffff, 0x0,
1037         0x353c, 0xffffffff, 0x0,
1038         0x353c, 0xffffffff, 0x0,
1039         0x353c, 0xffffffff, 0x400000,
1040         0x3538, 0xffffffff, 0x100010ff,
1041         0x353c, 0xffffffff, 0x0,
1042         0x353c, 0xffffffff, 0x0,
1043         0x353c, 0xffffffff, 0x0,
1044         0x353c, 0xffffffff, 0x0,
1045         0x353c, 0xffffffff, 0x0,
1046         0x353c, 0xffffffff, 0x120200,
1047         0x3538, 0xffffffff, 0x500010ff,
1048         0x353c, 0xffffffff, 0x0,
1049         0x353c, 0xffffffff, 0x0,
1050         0x353c, 0xffffffff, 0x0,
1051         0x353c, 0xffffffff, 0x0,
1052         0x353c, 0xffffffff, 0x0,
1053         0x353c, 0xffffffff, 0x1e1e16,
1054         0x3538, 0xffffffff, 0x600010ff,
1055         0x353c, 0xffffffff, 0x0,
1056         0x353c, 0xffffffff, 0x0,
1057         0x353c, 0xffffffff, 0x0,
1058         0x353c, 0xffffffff, 0x0,
1059         0x353c, 0xffffffff, 0x0,
1060         0x353c, 0xffffffff, 0x171f1e,
1061         0x3538, 0xffffffff, 0x700010ff,
1062         0x353c, 0xffffffff, 0x0,
1063         0x353c, 0xffffffff, 0x0,
1064         0x353c, 0xffffffff, 0x0,
1065         0x353c, 0xffffffff, 0x0,
1066         0x353c, 0xffffffff, 0x0,
1067         0x353c, 0xffffffff, 0x0,
1068         0x3538, 0xffffffff, 0x9ff,
1069         0x3500, 0xffffffff, 0x0,
1070         0x3504, 0xffffffff, 0x10000800,
1071         0x3504, 0xffffffff, 0xf,
1072         0x3504, 0xffffffff, 0xf,
1073         0x3500, 0xffffffff, 0x4,
1074         0x3504, 0xffffffff, 0x1000051e,
1075         0x3504, 0xffffffff, 0xffff,
1076         0x3504, 0xffffffff, 0xffff,
1077         0x3500, 0xffffffff, 0x8,
1078         0x3504, 0xffffffff, 0x80500,
1079         0x3500, 0xffffffff, 0x12,
1080         0x3504, 0xffffffff, 0x9050c,
1081         0x3500, 0xffffffff, 0x1d,
1082         0x3504, 0xffffffff, 0xb052c,
1083         0x3500, 0xffffffff, 0x2a,
1084         0x3504, 0xffffffff, 0x1053e,
1085         0x3500, 0xffffffff, 0x2d,
1086         0x3504, 0xffffffff, 0x10546,
1087         0x3500, 0xffffffff, 0x30,
1088         0x3504, 0xffffffff, 0xa054e,
1089         0x3500, 0xffffffff, 0x3c,
1090         0x3504, 0xffffffff, 0x1055f,
1091         0x3500, 0xffffffff, 0x3f,
1092         0x3504, 0xffffffff, 0x10567,
1093         0x3500, 0xffffffff, 0x42,
1094         0x3504, 0xffffffff, 0x1056f,
1095         0x3500, 0xffffffff, 0x45,
1096         0x3504, 0xffffffff, 0x10572,
1097         0x3500, 0xffffffff, 0x48,
1098         0x3504, 0xffffffff, 0x20575,
1099         0x3500, 0xffffffff, 0x4c,
1100         0x3504, 0xffffffff, 0x190801,
1101         0x3500, 0xffffffff, 0x67,
1102         0x3504, 0xffffffff, 0x1082a,
1103         0x3500, 0xffffffff, 0x6a,
1104         0x3504, 0xffffffff, 0x1b082d,
1105         0x3500, 0xffffffff, 0x87,
1106         0x3504, 0xffffffff, 0x310851,
1107         0x3500, 0xffffffff, 0xba,
1108         0x3504, 0xffffffff, 0x891,
1109         0x3500, 0xffffffff, 0xbc,
1110         0x3504, 0xffffffff, 0x893,
1111         0x3500, 0xffffffff, 0xbe,
1112         0x3504, 0xffffffff, 0x20895,
1113         0x3500, 0xffffffff, 0xc2,
1114         0x3504, 0xffffffff, 0x20899,
1115         0x3500, 0xffffffff, 0xc6,
1116         0x3504, 0xffffffff, 0x2089d,
1117         0x3500, 0xffffffff, 0xca,
1118         0x3504, 0xffffffff, 0x8a1,
1119         0x3500, 0xffffffff, 0xcc,
1120         0x3504, 0xffffffff, 0x8a3,
1121         0x3500, 0xffffffff, 0xce,
1122         0x3504, 0xffffffff, 0x308a5,
1123         0x3500, 0xffffffff, 0xd3,
1124         0x3504, 0xffffffff, 0x6d08cd,
1125         0x3500, 0xffffffff, 0x142,
1126         0x3504, 0xffffffff, 0x2000095a,
1127         0x3504, 0xffffffff, 0x1,
1128         0x3500, 0xffffffff, 0x144,
1129         0x3504, 0xffffffff, 0x301f095b,
1130         0x3500, 0xffffffff, 0x165,
1131         0x3504, 0xffffffff, 0xc094d,
1132         0x3500, 0xffffffff, 0x173,
1133         0x3504, 0xffffffff, 0xf096d,
1134         0x3500, 0xffffffff, 0x184,
1135         0x3504, 0xffffffff, 0x15097f,
1136         0x3500, 0xffffffff, 0x19b,
1137         0x3504, 0xffffffff, 0xc0998,
1138         0x3500, 0xffffffff, 0x1a9,
1139         0x3504, 0xffffffff, 0x409a7,
1140         0x3500, 0xffffffff, 0x1af,
1141         0x3504, 0xffffffff, 0xcdc,
1142         0x3500, 0xffffffff, 0x1b1,
1143         0x3504, 0xffffffff, 0x800,
1144         0x3508, 0xffffffff, 0x6c9b2000,
1145         0x3510, 0xfc00, 0x2000,
1146         0x3544, 0xffffffff, 0xfc0,
1147         0x28d4, 0x00000100, 0x100
1148 };
1149
1150 static void si_init_golden_registers(struct radeon_device *rdev)
1151 {
1152         switch (rdev->family) {
1153         case CHIP_TAHITI:
1154                 radeon_program_register_sequence(rdev,
1155                                                  tahiti_golden_registers,
1156                                                  (const u32)ARRAY_SIZE(tahiti_golden_registers));
1157                 radeon_program_register_sequence(rdev,
1158                                                  tahiti_golden_rlc_registers,
1159                                                  (const u32)ARRAY_SIZE(tahiti_golden_rlc_registers));
1160                 radeon_program_register_sequence(rdev,
1161                                                  tahiti_mgcg_cgcg_init,
1162                                                  (const u32)ARRAY_SIZE(tahiti_mgcg_cgcg_init));
1163                 radeon_program_register_sequence(rdev,
1164                                                  tahiti_golden_registers2,
1165                                                  (const u32)ARRAY_SIZE(tahiti_golden_registers2));
1166                 break;
1167         case CHIP_PITCAIRN:
1168                 radeon_program_register_sequence(rdev,
1169                                                  pitcairn_golden_registers,
1170                                                  (const u32)ARRAY_SIZE(pitcairn_golden_registers));
1171                 radeon_program_register_sequence(rdev,
1172                                                  pitcairn_golden_rlc_registers,
1173                                                  (const u32)ARRAY_SIZE(pitcairn_golden_rlc_registers));
1174                 radeon_program_register_sequence(rdev,
1175                                                  pitcairn_mgcg_cgcg_init,
1176                                                  (const u32)ARRAY_SIZE(pitcairn_mgcg_cgcg_init));
1177                 break;
1178         case CHIP_VERDE:
1179                 radeon_program_register_sequence(rdev,
1180                                                  verde_golden_registers,
1181                                                  (const u32)ARRAY_SIZE(verde_golden_registers));
1182                 radeon_program_register_sequence(rdev,
1183                                                  verde_golden_rlc_registers,
1184                                                  (const u32)ARRAY_SIZE(verde_golden_rlc_registers));
1185                 radeon_program_register_sequence(rdev,
1186                                                  verde_mgcg_cgcg_init,
1187                                                  (const u32)ARRAY_SIZE(verde_mgcg_cgcg_init));
1188                 radeon_program_register_sequence(rdev,
1189                                                  verde_pg_init,
1190                                                  (const u32)ARRAY_SIZE(verde_pg_init));
1191                 break;
1192         case CHIP_OLAND:
1193                 radeon_program_register_sequence(rdev,
1194                                                  oland_golden_registers,
1195                                                  (const u32)ARRAY_SIZE(oland_golden_registers));
1196                 radeon_program_register_sequence(rdev,
1197                                                  oland_golden_rlc_registers,
1198                                                  (const u32)ARRAY_SIZE(oland_golden_rlc_registers));
1199                 radeon_program_register_sequence(rdev,
1200                                                  oland_mgcg_cgcg_init,
1201                                                  (const u32)ARRAY_SIZE(oland_mgcg_cgcg_init));
1202                 break;
1203         case CHIP_HAINAN:
1204                 radeon_program_register_sequence(rdev,
1205                                                  hainan_golden_registers,
1206                                                  (const u32)ARRAY_SIZE(hainan_golden_registers));
1207                 radeon_program_register_sequence(rdev,
1208                                                  hainan_golden_registers2,
1209                                                  (const u32)ARRAY_SIZE(hainan_golden_registers2));
1210                 radeon_program_register_sequence(rdev,
1211                                                  hainan_mgcg_cgcg_init,
1212                                                  (const u32)ARRAY_SIZE(hainan_mgcg_cgcg_init));
1213                 break;
1214         default:
1215                 break;
1216         }
1217 }
1218
1219 #define PCIE_BUS_CLK                10000
1220 #define TCLK                        (PCIE_BUS_CLK / 10)
1221
1222 /**
1223  * si_get_xclk - get the xclk
1224  *
1225  * @rdev: radeon_device pointer
1226  *
1227  * Returns the reference clock used by the gfx engine
1228  * (SI).
1229  */
1230 u32 si_get_xclk(struct radeon_device *rdev)
1231 {
1232         u32 reference_clock = rdev->clock.spll.reference_freq;
1233         u32 tmp;
1234
1235         tmp = RREG32(CG_CLKPIN_CNTL_2);
1236         if (tmp & MUX_TCLK_TO_XCLK)
1237                 return TCLK;
1238
1239         tmp = RREG32(CG_CLKPIN_CNTL);
1240         if (tmp & XTALIN_DIVIDE)
1241                 return reference_clock / 4;
1242
1243         return reference_clock;
1244 }
1245
1246 /* get temperature in millidegrees */
1247 int si_get_temp(struct radeon_device *rdev)
1248 {
1249         u32 temp;
1250         int actual_temp = 0;
1251
1252         temp = (RREG32(CG_MULT_THERMAL_STATUS) & CTF_TEMP_MASK) >>
1253                 CTF_TEMP_SHIFT;
1254
1255         if (temp & 0x200)
1256                 actual_temp = 255;
1257         else
1258                 actual_temp = temp & 0x1ff;
1259
1260         actual_temp = (actual_temp * 1000);
1261
1262         return actual_temp;
1263 }
1264
1265 #define TAHITI_IO_MC_REGS_SIZE 36
1266
1267 static const u32 tahiti_io_mc_regs[TAHITI_IO_MC_REGS_SIZE][2] = {
1268         {0x0000006f, 0x03044000},
1269         {0x00000070, 0x0480c018},
1270         {0x00000071, 0x00000040},
1271         {0x00000072, 0x01000000},
1272         {0x00000074, 0x000000ff},
1273         {0x00000075, 0x00143400},
1274         {0x00000076, 0x08ec0800},
1275         {0x00000077, 0x040000cc},
1276         {0x00000079, 0x00000000},
1277         {0x0000007a, 0x21000409},
1278         {0x0000007c, 0x00000000},
1279         {0x0000007d, 0xe8000000},
1280         {0x0000007e, 0x044408a8},
1281         {0x0000007f, 0x00000003},
1282         {0x00000080, 0x00000000},
1283         {0x00000081, 0x01000000},
1284         {0x00000082, 0x02000000},
1285         {0x00000083, 0x00000000},
1286         {0x00000084, 0xe3f3e4f4},
1287         {0x00000085, 0x00052024},
1288         {0x00000087, 0x00000000},
1289         {0x00000088, 0x66036603},
1290         {0x00000089, 0x01000000},
1291         {0x0000008b, 0x1c0a0000},
1292         {0x0000008c, 0xff010000},
1293         {0x0000008e, 0xffffefff},
1294         {0x0000008f, 0xfff3efff},
1295         {0x00000090, 0xfff3efbf},
1296         {0x00000094, 0x00101101},
1297         {0x00000095, 0x00000fff},
1298         {0x00000096, 0x00116fff},
1299         {0x00000097, 0x60010000},
1300         {0x00000098, 0x10010000},
1301         {0x00000099, 0x00006000},
1302         {0x0000009a, 0x00001000},
1303         {0x0000009f, 0x00a77400}
1304 };
1305
1306 static const u32 pitcairn_io_mc_regs[TAHITI_IO_MC_REGS_SIZE][2] = {
1307         {0x0000006f, 0x03044000},
1308         {0x00000070, 0x0480c018},
1309         {0x00000071, 0x00000040},
1310         {0x00000072, 0x01000000},
1311         {0x00000074, 0x000000ff},
1312         {0x00000075, 0x00143400},
1313         {0x00000076, 0x08ec0800},
1314         {0x00000077, 0x040000cc},
1315         {0x00000079, 0x00000000},
1316         {0x0000007a, 0x21000409},
1317         {0x0000007c, 0x00000000},
1318         {0x0000007d, 0xe8000000},
1319         {0x0000007e, 0x044408a8},
1320         {0x0000007f, 0x00000003},
1321         {0x00000080, 0x00000000},
1322         {0x00000081, 0x01000000},
1323         {0x00000082, 0x02000000},
1324         {0x00000083, 0x00000000},
1325         {0x00000084, 0xe3f3e4f4},
1326         {0x00000085, 0x00052024},
1327         {0x00000087, 0x00000000},
1328         {0x00000088, 0x66036603},
1329         {0x00000089, 0x01000000},
1330         {0x0000008b, 0x1c0a0000},
1331         {0x0000008c, 0xff010000},
1332         {0x0000008e, 0xffffefff},
1333         {0x0000008f, 0xfff3efff},
1334         {0x00000090, 0xfff3efbf},
1335         {0x00000094, 0x00101101},
1336         {0x00000095, 0x00000fff},
1337         {0x00000096, 0x00116fff},
1338         {0x00000097, 0x60010000},
1339         {0x00000098, 0x10010000},
1340         {0x00000099, 0x00006000},
1341         {0x0000009a, 0x00001000},
1342         {0x0000009f, 0x00a47400}
1343 };
1344
1345 static const u32 verde_io_mc_regs[TAHITI_IO_MC_REGS_SIZE][2] = {
1346         {0x0000006f, 0x03044000},
1347         {0x00000070, 0x0480c018},
1348         {0x00000071, 0x00000040},
1349         {0x00000072, 0x01000000},
1350         {0x00000074, 0x000000ff},
1351         {0x00000075, 0x00143400},
1352         {0x00000076, 0x08ec0800},
1353         {0x00000077, 0x040000cc},
1354         {0x00000079, 0x00000000},
1355         {0x0000007a, 0x21000409},
1356         {0x0000007c, 0x00000000},
1357         {0x0000007d, 0xe8000000},
1358         {0x0000007e, 0x044408a8},
1359         {0x0000007f, 0x00000003},
1360         {0x00000080, 0x00000000},
1361         {0x00000081, 0x01000000},
1362         {0x00000082, 0x02000000},
1363         {0x00000083, 0x00000000},
1364         {0x00000084, 0xe3f3e4f4},
1365         {0x00000085, 0x00052024},
1366         {0x00000087, 0x00000000},
1367         {0x00000088, 0x66036603},
1368         {0x00000089, 0x01000000},
1369         {0x0000008b, 0x1c0a0000},
1370         {0x0000008c, 0xff010000},
1371         {0x0000008e, 0xffffefff},
1372         {0x0000008f, 0xfff3efff},
1373         {0x00000090, 0xfff3efbf},
1374         {0x00000094, 0x00101101},
1375         {0x00000095, 0x00000fff},
1376         {0x00000096, 0x00116fff},
1377         {0x00000097, 0x60010000},
1378         {0x00000098, 0x10010000},
1379         {0x00000099, 0x00006000},
1380         {0x0000009a, 0x00001000},
1381         {0x0000009f, 0x00a37400}
1382 };
1383
1384 static const u32 oland_io_mc_regs[TAHITI_IO_MC_REGS_SIZE][2] = {
1385         {0x0000006f, 0x03044000},
1386         {0x00000070, 0x0480c018},
1387         {0x00000071, 0x00000040},
1388         {0x00000072, 0x01000000},
1389         {0x00000074, 0x000000ff},
1390         {0x00000075, 0x00143400},
1391         {0x00000076, 0x08ec0800},
1392         {0x00000077, 0x040000cc},
1393         {0x00000079, 0x00000000},
1394         {0x0000007a, 0x21000409},
1395         {0x0000007c, 0x00000000},
1396         {0x0000007d, 0xe8000000},
1397         {0x0000007e, 0x044408a8},
1398         {0x0000007f, 0x00000003},
1399         {0x00000080, 0x00000000},
1400         {0x00000081, 0x01000000},
1401         {0x00000082, 0x02000000},
1402         {0x00000083, 0x00000000},
1403         {0x00000084, 0xe3f3e4f4},
1404         {0x00000085, 0x00052024},
1405         {0x00000087, 0x00000000},
1406         {0x00000088, 0x66036603},
1407         {0x00000089, 0x01000000},
1408         {0x0000008b, 0x1c0a0000},
1409         {0x0000008c, 0xff010000},
1410         {0x0000008e, 0xffffefff},
1411         {0x0000008f, 0xfff3efff},
1412         {0x00000090, 0xfff3efbf},
1413         {0x00000094, 0x00101101},
1414         {0x00000095, 0x00000fff},
1415         {0x00000096, 0x00116fff},
1416         {0x00000097, 0x60010000},
1417         {0x00000098, 0x10010000},
1418         {0x00000099, 0x00006000},
1419         {0x0000009a, 0x00001000},
1420         {0x0000009f, 0x00a17730}
1421 };
1422
1423 static const u32 hainan_io_mc_regs[TAHITI_IO_MC_REGS_SIZE][2] = {
1424         {0x0000006f, 0x03044000},
1425         {0x00000070, 0x0480c018},
1426         {0x00000071, 0x00000040},
1427         {0x00000072, 0x01000000},
1428         {0x00000074, 0x000000ff},
1429         {0x00000075, 0x00143400},
1430         {0x00000076, 0x08ec0800},
1431         {0x00000077, 0x040000cc},
1432         {0x00000079, 0x00000000},
1433         {0x0000007a, 0x21000409},
1434         {0x0000007c, 0x00000000},
1435         {0x0000007d, 0xe8000000},
1436         {0x0000007e, 0x044408a8},
1437         {0x0000007f, 0x00000003},
1438         {0x00000080, 0x00000000},
1439         {0x00000081, 0x01000000},
1440         {0x00000082, 0x02000000},
1441         {0x00000083, 0x00000000},
1442         {0x00000084, 0xe3f3e4f4},
1443         {0x00000085, 0x00052024},
1444         {0x00000087, 0x00000000},
1445         {0x00000088, 0x66036603},
1446         {0x00000089, 0x01000000},
1447         {0x0000008b, 0x1c0a0000},
1448         {0x0000008c, 0xff010000},
1449         {0x0000008e, 0xffffefff},
1450         {0x0000008f, 0xfff3efff},
1451         {0x00000090, 0xfff3efbf},
1452         {0x00000094, 0x00101101},
1453         {0x00000095, 0x00000fff},
1454         {0x00000096, 0x00116fff},
1455         {0x00000097, 0x60010000},
1456         {0x00000098, 0x10010000},
1457         {0x00000099, 0x00006000},
1458         {0x0000009a, 0x00001000},
1459         {0x0000009f, 0x00a07730}
1460 };
1461
1462 /* ucode loading */
1463 static int si_mc_load_microcode(struct radeon_device *rdev)
1464 {
1465         const __be32 *fw_data;
1466         u32 running, blackout = 0;
1467         u32 *io_mc_regs;
1468         int i, ucode_size, regs_size;
1469
1470         if (!rdev->mc_fw)
1471                 return -EINVAL;
1472
1473         switch (rdev->family) {
1474         case CHIP_TAHITI:
1475                 io_mc_regs = (u32 *)&tahiti_io_mc_regs;
1476                 ucode_size = SI_MC_UCODE_SIZE;
1477                 regs_size = TAHITI_IO_MC_REGS_SIZE;
1478                 break;
1479         case CHIP_PITCAIRN:
1480                 io_mc_regs = (u32 *)&pitcairn_io_mc_regs;
1481                 ucode_size = SI_MC_UCODE_SIZE;
1482                 regs_size = TAHITI_IO_MC_REGS_SIZE;
1483                 break;
1484         case CHIP_VERDE:
1485         default:
1486                 io_mc_regs = (u32 *)&verde_io_mc_regs;
1487                 ucode_size = SI_MC_UCODE_SIZE;
1488                 regs_size = TAHITI_IO_MC_REGS_SIZE;
1489                 break;
1490         case CHIP_OLAND:
1491                 io_mc_regs = (u32 *)&oland_io_mc_regs;
1492                 ucode_size = OLAND_MC_UCODE_SIZE;
1493                 regs_size = TAHITI_IO_MC_REGS_SIZE;
1494                 break;
1495         case CHIP_HAINAN:
1496                 io_mc_regs = (u32 *)&hainan_io_mc_regs;
1497                 ucode_size = OLAND_MC_UCODE_SIZE;
1498                 regs_size = TAHITI_IO_MC_REGS_SIZE;
1499                 break;
1500         }
1501
1502         running = RREG32(MC_SEQ_SUP_CNTL) & RUN_MASK;
1503
1504         if (running == 0) {
1505                 if (running) {
1506                         blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
1507                         WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
1508                 }
1509
1510                 /* reset the engine and set to writable */
1511                 WREG32(MC_SEQ_SUP_CNTL, 0x00000008);
1512                 WREG32(MC_SEQ_SUP_CNTL, 0x00000010);
1513
1514                 /* load mc io regs */
1515                 for (i = 0; i < regs_size; i++) {
1516                         WREG32(MC_SEQ_IO_DEBUG_INDEX, io_mc_regs[(i << 1)]);
1517                         WREG32(MC_SEQ_IO_DEBUG_DATA, io_mc_regs[(i << 1) + 1]);
1518                 }
1519                 /* load the MC ucode */
1520                 fw_data = (const __be32 *)rdev->mc_fw->data;
1521                 for (i = 0; i < ucode_size; i++)
1522                         WREG32(MC_SEQ_SUP_PGM, be32_to_cpup(fw_data++));
1523
1524                 /* put the engine back into the active state */
1525                 WREG32(MC_SEQ_SUP_CNTL, 0x00000008);
1526                 WREG32(MC_SEQ_SUP_CNTL, 0x00000004);
1527                 WREG32(MC_SEQ_SUP_CNTL, 0x00000001);
1528
1529                 /* wait for training to complete */
1530                 for (i = 0; i < rdev->usec_timeout; i++) {
1531                         if (RREG32(MC_SEQ_TRAIN_WAKEUP_CNTL) & TRAIN_DONE_D0)
1532                                 break;
1533                         udelay(1);
1534                 }
1535                 for (i = 0; i < rdev->usec_timeout; i++) {
1536                         if (RREG32(MC_SEQ_TRAIN_WAKEUP_CNTL) & TRAIN_DONE_D1)
1537                                 break;
1538                         udelay(1);
1539                 }
1540
1541                 if (running)
1542                         WREG32(MC_SHARED_BLACKOUT_CNTL, blackout);
1543         }
1544
1545         return 0;
1546 }
1547
1548 static int si_init_microcode(struct radeon_device *rdev)
1549 {
1550         const char *chip_name;
1551         const char *rlc_chip_name;
1552         size_t pfp_req_size, me_req_size, ce_req_size, rlc_req_size, mc_req_size;
1553         size_t smc_req_size;
1554         char fw_name[30];
1555         int err;
1556
1557         DRM_DEBUG("\n");
1558
1559         switch (rdev->family) {
1560         case CHIP_TAHITI:
1561                 chip_name = "TAHITI";
1562                 rlc_chip_name = "TAHITI";
1563                 pfp_req_size = SI_PFP_UCODE_SIZE * 4;
1564                 me_req_size = SI_PM4_UCODE_SIZE * 4;
1565                 ce_req_size = SI_CE_UCODE_SIZE * 4;
1566                 rlc_req_size = SI_RLC_UCODE_SIZE * 4;
1567                 mc_req_size = SI_MC_UCODE_SIZE * 4;
1568                 smc_req_size = ALIGN(TAHITI_SMC_UCODE_SIZE, 4);
1569                 break;
1570         case CHIP_PITCAIRN:
1571                 chip_name = "PITCAIRN";
1572                 rlc_chip_name = "PITCAIRN";
1573                 pfp_req_size = SI_PFP_UCODE_SIZE * 4;
1574                 me_req_size = SI_PM4_UCODE_SIZE * 4;
1575                 ce_req_size = SI_CE_UCODE_SIZE * 4;
1576                 rlc_req_size = SI_RLC_UCODE_SIZE * 4;
1577                 mc_req_size = SI_MC_UCODE_SIZE * 4;
1578                 smc_req_size = ALIGN(PITCAIRN_SMC_UCODE_SIZE, 4);
1579                 break;
1580         case CHIP_VERDE:
1581                 chip_name = "VERDE";
1582                 rlc_chip_name = "VERDE";
1583                 pfp_req_size = SI_PFP_UCODE_SIZE * 4;
1584                 me_req_size = SI_PM4_UCODE_SIZE * 4;
1585                 ce_req_size = SI_CE_UCODE_SIZE * 4;
1586                 rlc_req_size = SI_RLC_UCODE_SIZE * 4;
1587                 mc_req_size = SI_MC_UCODE_SIZE * 4;
1588                 smc_req_size = ALIGN(VERDE_SMC_UCODE_SIZE, 4);
1589                 break;
1590         case CHIP_OLAND:
1591                 chip_name = "OLAND";
1592                 rlc_chip_name = "OLAND";
1593                 pfp_req_size = SI_PFP_UCODE_SIZE * 4;
1594                 me_req_size = SI_PM4_UCODE_SIZE * 4;
1595                 ce_req_size = SI_CE_UCODE_SIZE * 4;
1596                 rlc_req_size = SI_RLC_UCODE_SIZE * 4;
1597                 mc_req_size = OLAND_MC_UCODE_SIZE * 4;
1598                 smc_req_size = ALIGN(OLAND_SMC_UCODE_SIZE, 4);
1599                 break;
1600         case CHIP_HAINAN:
1601                 chip_name = "HAINAN";
1602                 rlc_chip_name = "HAINAN";
1603                 pfp_req_size = SI_PFP_UCODE_SIZE * 4;
1604                 me_req_size = SI_PM4_UCODE_SIZE * 4;
1605                 ce_req_size = SI_CE_UCODE_SIZE * 4;
1606                 rlc_req_size = SI_RLC_UCODE_SIZE * 4;
1607                 mc_req_size = OLAND_MC_UCODE_SIZE * 4;
1608                 smc_req_size = ALIGN(HAINAN_SMC_UCODE_SIZE, 4);
1609                 break;
1610         default: BUG();
1611         }
1612
1613         DRM_INFO("Loading %s Microcode\n", chip_name);
1614
1615         snprintf(fw_name, sizeof(fw_name), "radeon/%s_pfp.bin", chip_name);
1616         err = request_firmware(&rdev->pfp_fw, fw_name, rdev->dev);
1617         if (err)
1618                 goto out;
1619         if (rdev->pfp_fw->size != pfp_req_size) {
1620                 printk(KERN_ERR
1621                        "si_cp: Bogus length %zu in firmware \"%s\"\n",
1622                        rdev->pfp_fw->size, fw_name);
1623                 err = -EINVAL;
1624                 goto out;
1625         }
1626
1627         snprintf(fw_name, sizeof(fw_name), "radeon/%s_me.bin", chip_name);
1628         err = request_firmware(&rdev->me_fw, fw_name, rdev->dev);
1629         if (err)
1630                 goto out;
1631         if (rdev->me_fw->size != me_req_size) {
1632                 printk(KERN_ERR
1633                        "si_cp: Bogus length %zu in firmware \"%s\"\n",
1634                        rdev->me_fw->size, fw_name);
1635                 err = -EINVAL;
1636         }
1637
1638         snprintf(fw_name, sizeof(fw_name), "radeon/%s_ce.bin", chip_name);
1639         err = request_firmware(&rdev->ce_fw, fw_name, rdev->dev);
1640         if (err)
1641                 goto out;
1642         if (rdev->ce_fw->size != ce_req_size) {
1643                 printk(KERN_ERR
1644                        "si_cp: Bogus length %zu in firmware \"%s\"\n",
1645                        rdev->ce_fw->size, fw_name);
1646                 err = -EINVAL;
1647         }
1648
1649         snprintf(fw_name, sizeof(fw_name), "radeon/%s_rlc.bin", rlc_chip_name);
1650         err = request_firmware(&rdev->rlc_fw, fw_name, rdev->dev);
1651         if (err)
1652                 goto out;
1653         if (rdev->rlc_fw->size != rlc_req_size) {
1654                 printk(KERN_ERR
1655                        "si_rlc: Bogus length %zu in firmware \"%s\"\n",
1656                        rdev->rlc_fw->size, fw_name);
1657                 err = -EINVAL;
1658         }
1659
1660         snprintf(fw_name, sizeof(fw_name), "radeon/%s_mc.bin", chip_name);
1661         err = request_firmware(&rdev->mc_fw, fw_name, rdev->dev);
1662         if (err)
1663                 goto out;
1664         if (rdev->mc_fw->size != mc_req_size) {
1665                 printk(KERN_ERR
1666                        "si_mc: Bogus length %zu in firmware \"%s\"\n",
1667                        rdev->mc_fw->size, fw_name);
1668                 err = -EINVAL;
1669         }
1670
1671         snprintf(fw_name, sizeof(fw_name), "radeon/%s_smc.bin", chip_name);
1672         err = request_firmware(&rdev->smc_fw, fw_name, rdev->dev);
1673         if (err) {
1674                 printk(KERN_ERR
1675                        "smc: error loading firmware \"%s\"\n",
1676                        fw_name);
1677                 release_firmware(rdev->smc_fw);
1678                 rdev->smc_fw = NULL;
1679         } else if (rdev->smc_fw->size != smc_req_size) {
1680                 printk(KERN_ERR
1681                        "si_smc: Bogus length %zu in firmware \"%s\"\n",
1682                        rdev->smc_fw->size, fw_name);
1683                 err = -EINVAL;
1684         }
1685
1686 out:
1687         if (err) {
1688                 if (err != -EINVAL)
1689                         printk(KERN_ERR
1690                                "si_cp: Failed to load firmware \"%s\"\n",
1691                                fw_name);
1692                 release_firmware(rdev->pfp_fw);
1693                 rdev->pfp_fw = NULL;
1694                 release_firmware(rdev->me_fw);
1695                 rdev->me_fw = NULL;
1696                 release_firmware(rdev->ce_fw);
1697                 rdev->ce_fw = NULL;
1698                 release_firmware(rdev->rlc_fw);
1699                 rdev->rlc_fw = NULL;
1700                 release_firmware(rdev->mc_fw);
1701                 rdev->mc_fw = NULL;
1702                 release_firmware(rdev->smc_fw);
1703                 rdev->smc_fw = NULL;
1704         }
1705         return err;
1706 }
1707
1708 /* watermark setup */
1709 static u32 dce6_line_buffer_adjust(struct radeon_device *rdev,
1710                                    struct radeon_crtc *radeon_crtc,
1711                                    struct drm_display_mode *mode,
1712                                    struct drm_display_mode *other_mode)
1713 {
1714         u32 tmp;
1715         /*
1716          * Line Buffer Setup
1717          * There are 3 line buffers, each one shared by 2 display controllers.
1718          * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1719          * the display controllers.  The paritioning is done via one of four
1720          * preset allocations specified in bits 21:20:
1721          *  0 - half lb
1722          *  2 - whole lb, other crtc must be disabled
1723          */
1724         /* this can get tricky if we have two large displays on a paired group
1725          * of crtcs.  Ideally for multiple large displays we'd assign them to
1726          * non-linked crtcs for maximum line buffer allocation.
1727          */
1728         if (radeon_crtc->base.enabled && mode) {
1729                 if (other_mode)
1730                         tmp = 0; /* 1/2 */
1731                 else
1732                         tmp = 2; /* whole */
1733         } else
1734                 tmp = 0;
1735
1736         WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset,
1737                DC_LB_MEMORY_CONFIG(tmp));
1738
1739         if (radeon_crtc->base.enabled && mode) {
1740                 switch (tmp) {
1741                 case 0:
1742                 default:
1743                         return 4096 * 2;
1744                 case 2:
1745                         return 8192 * 2;
1746                 }
1747         }
1748
1749         /* controller not enabled, so no lb used */
1750         return 0;
1751 }
1752
1753 static u32 si_get_number_of_dram_channels(struct radeon_device *rdev)
1754 {
1755         u32 tmp = RREG32(MC_SHARED_CHMAP);
1756
1757         switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1758         case 0:
1759         default:
1760                 return 1;
1761         case 1:
1762                 return 2;
1763         case 2:
1764                 return 4;
1765         case 3:
1766                 return 8;
1767         case 4:
1768                 return 3;
1769         case 5:
1770                 return 6;
1771         case 6:
1772                 return 10;
1773         case 7:
1774                 return 12;
1775         case 8:
1776                 return 16;
1777         }
1778 }
1779
1780 struct dce6_wm_params {
1781         u32 dram_channels; /* number of dram channels */
1782         u32 yclk;          /* bandwidth per dram data pin in kHz */
1783         u32 sclk;          /* engine clock in kHz */
1784         u32 disp_clk;      /* display clock in kHz */
1785         u32 src_width;     /* viewport width */
1786         u32 active_time;   /* active display time in ns */
1787         u32 blank_time;    /* blank time in ns */
1788         bool interlaced;    /* mode is interlaced */
1789         fixed20_12 vsc;    /* vertical scale ratio */
1790         u32 num_heads;     /* number of active crtcs */
1791         u32 bytes_per_pixel; /* bytes per pixel display + overlay */
1792         u32 lb_size;       /* line buffer allocated to pipe */
1793         u32 vtaps;         /* vertical scaler taps */
1794 };
1795
1796 static u32 dce6_dram_bandwidth(struct dce6_wm_params *wm)
1797 {
1798         /* Calculate raw DRAM Bandwidth */
1799         fixed20_12 dram_efficiency; /* 0.7 */
1800         fixed20_12 yclk, dram_channels, bandwidth;
1801         fixed20_12 a;
1802
1803         a.full = dfixed_const(1000);
1804         yclk.full = dfixed_const(wm->yclk);
1805         yclk.full = dfixed_div(yclk, a);
1806         dram_channels.full = dfixed_const(wm->dram_channels * 4);
1807         a.full = dfixed_const(10);
1808         dram_efficiency.full = dfixed_const(7);
1809         dram_efficiency.full = dfixed_div(dram_efficiency, a);
1810         bandwidth.full = dfixed_mul(dram_channels, yclk);
1811         bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
1812
1813         return dfixed_trunc(bandwidth);
1814 }
1815
1816 static u32 dce6_dram_bandwidth_for_display(struct dce6_wm_params *wm)
1817 {
1818         /* Calculate DRAM Bandwidth and the part allocated to display. */
1819         fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
1820         fixed20_12 yclk, dram_channels, bandwidth;
1821         fixed20_12 a;
1822
1823         a.full = dfixed_const(1000);
1824         yclk.full = dfixed_const(wm->yclk);
1825         yclk.full = dfixed_div(yclk, a);
1826         dram_channels.full = dfixed_const(wm->dram_channels * 4);
1827         a.full = dfixed_const(10);
1828         disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
1829         disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
1830         bandwidth.full = dfixed_mul(dram_channels, yclk);
1831         bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
1832
1833         return dfixed_trunc(bandwidth);
1834 }
1835
1836 static u32 dce6_data_return_bandwidth(struct dce6_wm_params *wm)
1837 {
1838         /* Calculate the display Data return Bandwidth */
1839         fixed20_12 return_efficiency; /* 0.8 */
1840         fixed20_12 sclk, bandwidth;
1841         fixed20_12 a;
1842
1843         a.full = dfixed_const(1000);
1844         sclk.full = dfixed_const(wm->sclk);
1845         sclk.full = dfixed_div(sclk, a);
1846         a.full = dfixed_const(10);
1847         return_efficiency.full = dfixed_const(8);
1848         return_efficiency.full = dfixed_div(return_efficiency, a);
1849         a.full = dfixed_const(32);
1850         bandwidth.full = dfixed_mul(a, sclk);
1851         bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
1852
1853         return dfixed_trunc(bandwidth);
1854 }
1855
1856 static u32 dce6_get_dmif_bytes_per_request(struct dce6_wm_params *wm)
1857 {
1858         return 32;
1859 }
1860
1861 static u32 dce6_dmif_request_bandwidth(struct dce6_wm_params *wm)
1862 {
1863         /* Calculate the DMIF Request Bandwidth */
1864         fixed20_12 disp_clk_request_efficiency; /* 0.8 */
1865         fixed20_12 disp_clk, sclk, bandwidth;
1866         fixed20_12 a, b1, b2;
1867         u32 min_bandwidth;
1868
1869         a.full = dfixed_const(1000);
1870         disp_clk.full = dfixed_const(wm->disp_clk);
1871         disp_clk.full = dfixed_div(disp_clk, a);
1872         a.full = dfixed_const(dce6_get_dmif_bytes_per_request(wm) / 2);
1873         b1.full = dfixed_mul(a, disp_clk);
1874
1875         a.full = dfixed_const(1000);
1876         sclk.full = dfixed_const(wm->sclk);
1877         sclk.full = dfixed_div(sclk, a);
1878         a.full = dfixed_const(dce6_get_dmif_bytes_per_request(wm));
1879         b2.full = dfixed_mul(a, sclk);
1880
1881         a.full = dfixed_const(10);
1882         disp_clk_request_efficiency.full = dfixed_const(8);
1883         disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
1884
1885         min_bandwidth = min(dfixed_trunc(b1), dfixed_trunc(b2));
1886
1887         a.full = dfixed_const(min_bandwidth);
1888         bandwidth.full = dfixed_mul(a, disp_clk_request_efficiency);
1889
1890         return dfixed_trunc(bandwidth);
1891 }
1892
1893 static u32 dce6_available_bandwidth(struct dce6_wm_params *wm)
1894 {
1895         /* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
1896         u32 dram_bandwidth = dce6_dram_bandwidth(wm);
1897         u32 data_return_bandwidth = dce6_data_return_bandwidth(wm);
1898         u32 dmif_req_bandwidth = dce6_dmif_request_bandwidth(wm);
1899
1900         return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
1901 }
1902
1903 static u32 dce6_average_bandwidth(struct dce6_wm_params *wm)
1904 {
1905         /* Calculate the display mode Average Bandwidth
1906          * DisplayMode should contain the source and destination dimensions,
1907          * timing, etc.
1908          */
1909         fixed20_12 bpp;
1910         fixed20_12 line_time;
1911         fixed20_12 src_width;
1912         fixed20_12 bandwidth;
1913         fixed20_12 a;
1914
1915         a.full = dfixed_const(1000);
1916         line_time.full = dfixed_const(wm->active_time + wm->blank_time);
1917         line_time.full = dfixed_div(line_time, a);
1918         bpp.full = dfixed_const(wm->bytes_per_pixel);
1919         src_width.full = dfixed_const(wm->src_width);
1920         bandwidth.full = dfixed_mul(src_width, bpp);
1921         bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
1922         bandwidth.full = dfixed_div(bandwidth, line_time);
1923
1924         return dfixed_trunc(bandwidth);
1925 }
1926
1927 static u32 dce6_latency_watermark(struct dce6_wm_params *wm)
1928 {
1929         /* First calcualte the latency in ns */
1930         u32 mc_latency = 2000; /* 2000 ns. */
1931         u32 available_bandwidth = dce6_available_bandwidth(wm);
1932         u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
1933         u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
1934         u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
1935         u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
1936                 (wm->num_heads * cursor_line_pair_return_time);
1937         u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
1938         u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
1939         u32 tmp, dmif_size = 12288;
1940         fixed20_12 a, b, c;
1941
1942         if (wm->num_heads == 0)
1943                 return 0;
1944
1945         a.full = dfixed_const(2);
1946         b.full = dfixed_const(1);
1947         if ((wm->vsc.full > a.full) ||
1948             ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
1949             (wm->vtaps >= 5) ||
1950             ((wm->vsc.full >= a.full) && wm->interlaced))
1951                 max_src_lines_per_dst_line = 4;
1952         else
1953                 max_src_lines_per_dst_line = 2;
1954
1955         a.full = dfixed_const(available_bandwidth);
1956         b.full = dfixed_const(wm->num_heads);
1957         a.full = dfixed_div(a, b);
1958
1959         b.full = dfixed_const(mc_latency + 512);
1960         c.full = dfixed_const(wm->disp_clk);
1961         b.full = dfixed_div(b, c);
1962
1963         c.full = dfixed_const(dmif_size);
1964         b.full = dfixed_div(c, b);
1965
1966         tmp = min(dfixed_trunc(a), dfixed_trunc(b));
1967
1968         b.full = dfixed_const(1000);
1969         c.full = dfixed_const(wm->disp_clk);
1970         b.full = dfixed_div(c, b);
1971         c.full = dfixed_const(wm->bytes_per_pixel);
1972         b.full = dfixed_mul(b, c);
1973
1974         lb_fill_bw = min(tmp, dfixed_trunc(b));
1975
1976         a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
1977         b.full = dfixed_const(1000);
1978         c.full = dfixed_const(lb_fill_bw);
1979         b.full = dfixed_div(c, b);
1980         a.full = dfixed_div(a, b);
1981         line_fill_time = dfixed_trunc(a);
1982
1983         if (line_fill_time < wm->active_time)
1984                 return latency;
1985         else
1986                 return latency + (line_fill_time - wm->active_time);
1987
1988 }
1989
1990 static bool dce6_average_bandwidth_vs_dram_bandwidth_for_display(struct dce6_wm_params *wm)
1991 {
1992         if (dce6_average_bandwidth(wm) <=
1993             (dce6_dram_bandwidth_for_display(wm) / wm->num_heads))
1994                 return true;
1995         else
1996                 return false;
1997 };
1998
1999 static bool dce6_average_bandwidth_vs_available_bandwidth(struct dce6_wm_params *wm)
2000 {
2001         if (dce6_average_bandwidth(wm) <=
2002             (dce6_available_bandwidth(wm) / wm->num_heads))
2003                 return true;
2004         else
2005                 return false;
2006 };
2007
2008 static bool dce6_check_latency_hiding(struct dce6_wm_params *wm)
2009 {
2010         u32 lb_partitions = wm->lb_size / wm->src_width;
2011         u32 line_time = wm->active_time + wm->blank_time;
2012         u32 latency_tolerant_lines;
2013         u32 latency_hiding;
2014         fixed20_12 a;
2015
2016         a.full = dfixed_const(1);
2017         if (wm->vsc.full > a.full)
2018                 latency_tolerant_lines = 1;
2019         else {
2020                 if (lb_partitions <= (wm->vtaps + 1))
2021                         latency_tolerant_lines = 1;
2022                 else
2023                         latency_tolerant_lines = 2;
2024         }
2025
2026         latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2027
2028         if (dce6_latency_watermark(wm) <= latency_hiding)
2029                 return true;
2030         else
2031                 return false;
2032 }
2033
2034 static void dce6_program_watermarks(struct radeon_device *rdev,
2035                                          struct radeon_crtc *radeon_crtc,
2036                                          u32 lb_size, u32 num_heads)
2037 {
2038         struct drm_display_mode *mode = &radeon_crtc->base.mode;
2039         struct dce6_wm_params wm_low, wm_high;
2040         u32 dram_channels;
2041         u32 pixel_period;
2042         u32 line_time = 0;
2043         u32 latency_watermark_a = 0, latency_watermark_b = 0;
2044         u32 priority_a_mark = 0, priority_b_mark = 0;
2045         u32 priority_a_cnt = PRIORITY_OFF;
2046         u32 priority_b_cnt = PRIORITY_OFF;
2047         u32 tmp, arb_control3;
2048         fixed20_12 a, b, c;
2049
2050         if (radeon_crtc->base.enabled && num_heads && mode) {
2051                 pixel_period = 1000000 / (u32)mode->clock;
2052                 line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2053                 priority_a_cnt = 0;
2054                 priority_b_cnt = 0;
2055
2056                 if (rdev->family == CHIP_ARUBA)
2057                         dram_channels = evergreen_get_number_of_dram_channels(rdev);
2058                 else
2059                         dram_channels = si_get_number_of_dram_channels(rdev);
2060
2061                 /* watermark for high clocks */
2062                 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2063                         wm_high.yclk =
2064                                 radeon_dpm_get_mclk(rdev, false) * 10;
2065                         wm_high.sclk =
2066                                 radeon_dpm_get_sclk(rdev, false) * 10;
2067                 } else {
2068                         wm_high.yclk = rdev->pm.current_mclk * 10;
2069                         wm_high.sclk = rdev->pm.current_sclk * 10;
2070                 }
2071
2072                 wm_high.disp_clk = mode->clock;
2073                 wm_high.src_width = mode->crtc_hdisplay;
2074                 wm_high.active_time = mode->crtc_hdisplay * pixel_period;
2075                 wm_high.blank_time = line_time - wm_high.active_time;
2076                 wm_high.interlaced = false;
2077                 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2078                         wm_high.interlaced = true;
2079                 wm_high.vsc = radeon_crtc->vsc;
2080                 wm_high.vtaps = 1;
2081                 if (radeon_crtc->rmx_type != RMX_OFF)
2082                         wm_high.vtaps = 2;
2083                 wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2084                 wm_high.lb_size = lb_size;
2085                 wm_high.dram_channels = dram_channels;
2086                 wm_high.num_heads = num_heads;
2087
2088                 /* watermark for low clocks */
2089                 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2090                         wm_low.yclk =
2091                                 radeon_dpm_get_mclk(rdev, true) * 10;
2092                         wm_low.sclk =
2093                                 radeon_dpm_get_sclk(rdev, true) * 10;
2094                 } else {
2095                         wm_low.yclk = rdev->pm.current_mclk * 10;
2096                         wm_low.sclk = rdev->pm.current_sclk * 10;
2097                 }
2098
2099                 wm_low.disp_clk = mode->clock;
2100                 wm_low.src_width = mode->crtc_hdisplay;
2101                 wm_low.active_time = mode->crtc_hdisplay * pixel_period;
2102                 wm_low.blank_time = line_time - wm_low.active_time;
2103                 wm_low.interlaced = false;
2104                 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2105                         wm_low.interlaced = true;
2106                 wm_low.vsc = radeon_crtc->vsc;
2107                 wm_low.vtaps = 1;
2108                 if (radeon_crtc->rmx_type != RMX_OFF)
2109                         wm_low.vtaps = 2;
2110                 wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2111                 wm_low.lb_size = lb_size;
2112                 wm_low.dram_channels = dram_channels;
2113                 wm_low.num_heads = num_heads;
2114
2115                 /* set for high clocks */
2116                 latency_watermark_a = min(dce6_latency_watermark(&wm_high), (u32)65535);
2117                 /* set for low clocks */
2118                 latency_watermark_b = min(dce6_latency_watermark(&wm_low), (u32)65535);
2119
2120                 /* possibly force display priority to high */
2121                 /* should really do this at mode validation time... */
2122                 if (!dce6_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2123                     !dce6_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2124                     !dce6_check_latency_hiding(&wm_high) ||
2125                     (rdev->disp_priority == 2)) {
2126                         DRM_DEBUG_KMS("force priority to high\n");
2127                         priority_a_cnt |= PRIORITY_ALWAYS_ON;
2128                         priority_b_cnt |= PRIORITY_ALWAYS_ON;
2129                 }
2130                 if (!dce6_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2131                     !dce6_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2132                     !dce6_check_latency_hiding(&wm_low) ||
2133                     (rdev->disp_priority == 2)) {
2134                         DRM_DEBUG_KMS("force priority to high\n");
2135                         priority_a_cnt |= PRIORITY_ALWAYS_ON;
2136                         priority_b_cnt |= PRIORITY_ALWAYS_ON;
2137                 }
2138
2139                 a.full = dfixed_const(1000);
2140                 b.full = dfixed_const(mode->clock);
2141                 b.full = dfixed_div(b, a);
2142                 c.full = dfixed_const(latency_watermark_a);
2143                 c.full = dfixed_mul(c, b);
2144                 c.full = dfixed_mul(c, radeon_crtc->hsc);
2145                 c.full = dfixed_div(c, a);
2146                 a.full = dfixed_const(16);
2147                 c.full = dfixed_div(c, a);
2148                 priority_a_mark = dfixed_trunc(c);
2149                 priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2150
2151                 a.full = dfixed_const(1000);
2152                 b.full = dfixed_const(mode->clock);
2153                 b.full = dfixed_div(b, a);
2154                 c.full = dfixed_const(latency_watermark_b);
2155                 c.full = dfixed_mul(c, b);
2156                 c.full = dfixed_mul(c, radeon_crtc->hsc);
2157                 c.full = dfixed_div(c, a);
2158                 a.full = dfixed_const(16);
2159                 c.full = dfixed_div(c, a);
2160                 priority_b_mark = dfixed_trunc(c);
2161                 priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2162         }
2163
2164         /* select wm A */
2165         arb_control3 = RREG32(DPG_PIPE_ARBITRATION_CONTROL3 + radeon_crtc->crtc_offset);
2166         tmp = arb_control3;
2167         tmp &= ~LATENCY_WATERMARK_MASK(3);
2168         tmp |= LATENCY_WATERMARK_MASK(1);
2169         WREG32(DPG_PIPE_ARBITRATION_CONTROL3 + radeon_crtc->crtc_offset, tmp);
2170         WREG32(DPG_PIPE_LATENCY_CONTROL + radeon_crtc->crtc_offset,
2171                (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2172                 LATENCY_HIGH_WATERMARK(line_time)));
2173         /* select wm B */
2174         tmp = RREG32(DPG_PIPE_ARBITRATION_CONTROL3 + radeon_crtc->crtc_offset);
2175         tmp &= ~LATENCY_WATERMARK_MASK(3);
2176         tmp |= LATENCY_WATERMARK_MASK(2);
2177         WREG32(DPG_PIPE_ARBITRATION_CONTROL3 + radeon_crtc->crtc_offset, tmp);
2178         WREG32(DPG_PIPE_LATENCY_CONTROL + radeon_crtc->crtc_offset,
2179                (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2180                 LATENCY_HIGH_WATERMARK(line_time)));
2181         /* restore original selection */
2182         WREG32(DPG_PIPE_ARBITRATION_CONTROL3 + radeon_crtc->crtc_offset, arb_control3);
2183
2184         /* write the priority marks */
2185         WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2186         WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2187
2188         /* save values for DPM */
2189         radeon_crtc->line_time = line_time;
2190         radeon_crtc->wm_high = latency_watermark_a;
2191         radeon_crtc->wm_low = latency_watermark_b;
2192 }
2193
2194 void dce6_bandwidth_update(struct radeon_device *rdev)
2195 {
2196         struct drm_display_mode *mode0 = NULL;
2197         struct drm_display_mode *mode1 = NULL;
2198         u32 num_heads = 0, lb_size;
2199         int i;
2200
2201         radeon_update_display_priority(rdev);
2202
2203         for (i = 0; i < rdev->num_crtc; i++) {
2204                 if (rdev->mode_info.crtcs[i]->base.enabled)
2205                         num_heads++;
2206         }
2207         for (i = 0; i < rdev->num_crtc; i += 2) {
2208                 mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2209                 mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2210                 lb_size = dce6_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2211                 dce6_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2212                 lb_size = dce6_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2213                 dce6_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2214         }
2215 }
2216
2217 /*
2218  * Core functions
2219  */
2220 static void si_tiling_mode_table_init(struct radeon_device *rdev)
2221 {
2222         const u32 num_tile_mode_states = 32;
2223         u32 reg_offset, gb_tile_moden, split_equal_to_row_size;
2224
2225         switch (rdev->config.si.mem_row_size_in_kb) {
2226         case 1:
2227                 split_equal_to_row_size = ADDR_SURF_TILE_SPLIT_1KB;
2228                 break;
2229         case 2:
2230         default:
2231                 split_equal_to_row_size = ADDR_SURF_TILE_SPLIT_2KB;
2232                 break;
2233         case 4:
2234                 split_equal_to_row_size = ADDR_SURF_TILE_SPLIT_4KB;
2235                 break;
2236         }
2237
2238         if ((rdev->family == CHIP_TAHITI) ||
2239             (rdev->family == CHIP_PITCAIRN)) {
2240                 for (reg_offset = 0; reg_offset < num_tile_mode_states; reg_offset++) {
2241                         switch (reg_offset) {
2242                         case 0:  /* non-AA compressed depth or any compressed stencil */
2243                                 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2244                                                  MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2245                                                  PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2246                                                  TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2247                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2248                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2249                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2250                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2251                                 break;
2252                         case 1:  /* 2xAA/4xAA compressed depth only */
2253                                 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2254                                                  MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2255                                                  PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2256                                                  TILE_SPLIT(ADDR_SURF_TILE_SPLIT_128B) |
2257                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2258                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2259                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2260                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2261                                 break;
2262                         case 2:  /* 8xAA compressed depth only */
2263                                 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2264                                                  MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2265                                                  PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2266                                                  TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2267                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2268                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2269                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2270                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2271                                 break;
2272                         case 3:  /* 2xAA/4xAA compressed depth with stencil (for depth buffer) */
2273                                 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2274                                                  MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2275                                                  PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2276                                                  TILE_SPLIT(ADDR_SURF_TILE_SPLIT_128B) |
2277                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2278                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2279                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2280                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2281                                 break;
2282                         case 4:  /* Maps w/ a dimension less than the 2D macro-tile dimensions (for mipmapped depth textures) */
2283                                 gb_tile_moden = (ARRAY_MODE(ARRAY_1D_TILED_THIN1) |
2284                                                  MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2285                                                  PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2286                                                  TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2287                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2288                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2289                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2290                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2291                                 break;
2292                         case 5:  /* Uncompressed 16bpp depth - and stencil buffer allocated with it */
2293                                 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2294                                                  MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2295                                                  PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2296                                                  TILE_SPLIT(split_equal_to_row_size) |
2297                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2298                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2299                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2300                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2301                                 break;
2302                         case 6:  /* Uncompressed 32bpp depth - and stencil buffer allocated with it */
2303                                 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2304                                                  MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2305                                                  PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2306                                                  TILE_SPLIT(split_equal_to_row_size) |
2307                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2308                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2309                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2310                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_1));
2311                                 break;
2312                         case 7:  /* Uncompressed 8bpp stencil without depth (drivers typically do not use) */
2313                                 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2314                                                  MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2315                                                  PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2316                                                  TILE_SPLIT(split_equal_to_row_size) |
2317                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2318                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2319                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2320                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2321                                 break;
2322                         case 8:  /* 1D and 1D Array Surfaces */
2323                                 gb_tile_moden = (ARRAY_MODE(ARRAY_LINEAR_ALIGNED) |
2324                                                  MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2325                                                  PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2326                                                  TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2327                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2328                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2329                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2330                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2331                                 break;
2332                         case 9:  /* Displayable maps. */
2333                                 gb_tile_moden = (ARRAY_MODE(ARRAY_1D_TILED_THIN1) |
2334                                                  MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2335                                                  PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2336                                                  TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2337                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2338                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2339                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2340                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2341                                 break;
2342                         case 10:  /* Display 8bpp. */
2343                                 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2344                                                  MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2345                                                  PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2346                                                  TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2347                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2348                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2349                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2350                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2351                                 break;
2352                         case 11:  /* Display 16bpp. */
2353                                 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2354                                                  MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2355                                                  PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2356                                                  TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2357                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2358                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2359                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2360                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2361                                 break;
2362                         case 12:  /* Display 32bpp. */
2363                                 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2364                                                  MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2365                                                  PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2366                                                  TILE_SPLIT(ADDR_SURF_TILE_SPLIT_512B) |
2367                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2368                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2369                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2370                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_1));
2371                                 break;
2372                         case 13:  /* Thin. */
2373                                 gb_tile_moden = (ARRAY_MODE(ARRAY_1D_TILED_THIN1) |
2374                                                  MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2375                                                  PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2376                                                  TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2377                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2378                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2379                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2380                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2381                                 break;
2382                         case 14:  /* Thin 8 bpp. */
2383                                 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2384                                                  MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2385                                                  PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2386                                                  TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2387                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2388                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2389                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2390                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_1));
2391                                 break;
2392                         case 15:  /* Thin 16 bpp. */
2393                                 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2394                                                  MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2395                                                  PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2396                                                  TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2397                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2398                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2399                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2400                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_1));
2401                                 break;
2402                         case 16:  /* Thin 32 bpp. */
2403                                 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2404                                                  MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2405                                                  PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2406                                                  TILE_SPLIT(ADDR_SURF_TILE_SPLIT_512B) |
2407                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2408                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2409                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2410                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_1));
2411                                 break;
2412                         case 17:  /* Thin 64 bpp. */
2413                                 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2414                                                  MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2415                                                  PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2416                                                  TILE_SPLIT(split_equal_to_row_size) |
2417                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2418                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2419                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2420                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_1));
2421                                 break;
2422                         case 21:  /* 8 bpp PRT. */
2423                                 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2424                                                  MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2425                                                  PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2426                                                  TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2427                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2428                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_2) |
2429                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2430                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2431                                 break;
2432                         case 22:  /* 16 bpp PRT */
2433                                 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2434                                                  MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2435                                                  PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2436                                                  TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2437                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2438                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2439                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2440                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_4));
2441                                 break;
2442                         case 23:  /* 32 bpp PRT */
2443                                 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2444                                                  MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2445                                                  PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2446                                                  TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2447                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2448                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2449                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2450                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2451                                 break;
2452                         case 24:  /* 64 bpp PRT */
2453                                 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2454                                                  MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2455                                                  PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2456                                                  TILE_SPLIT(ADDR_SURF_TILE_SPLIT_512B) |
2457                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2458                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2459                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2460                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2461                                 break;
2462                         case 25:  /* 128 bpp PRT */
2463                                 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2464                                                  MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2465                                                  PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2466                                                  TILE_SPLIT(ADDR_SURF_TILE_SPLIT_1KB) |
2467                                                  NUM_BANKS(ADDR_SURF_8_BANK) |
2468                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2469                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2470                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_1));
2471                                 break;
2472                         default:
2473                                 gb_tile_moden = 0;
2474                                 break;
2475                         }
2476                         rdev->config.si.tile_mode_array[reg_offset] = gb_tile_moden;
2477                         WREG32(GB_TILE_MODE0 + (reg_offset * 4), gb_tile_moden);
2478                 }
2479         } else if ((rdev->family == CHIP_VERDE) ||
2480                    (rdev->family == CHIP_OLAND) ||
2481                    (rdev->family == CHIP_HAINAN)) {
2482                 for (reg_offset = 0; reg_offset < num_tile_mode_states; reg_offset++) {
2483                         switch (reg_offset) {
2484                         case 0:  /* non-AA compressed depth or any compressed stencil */
2485                                 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2486                                                  MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2487                                                  PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2488                                                  TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2489                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2490                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2491                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2492                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_4));
2493                                 break;
2494                         case 1:  /* 2xAA/4xAA compressed depth only */
2495                                 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2496                                                  MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2497                                                  PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2498                                                  TILE_SPLIT(ADDR_SURF_TILE_SPLIT_128B) |
2499                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2500                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2501                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2502                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_4));
2503                                 break;
2504                         case 2:  /* 8xAA compressed depth only */
2505                                 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2506                                                  MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2507                                                  PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2508                                                  TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2509                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2510                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2511                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2512                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_4));
2513                                 break;
2514                         case 3:  /* 2xAA/4xAA compressed depth with stencil (for depth buffer) */
2515                                 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2516                                                  MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2517                                                  PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2518                                                  TILE_SPLIT(ADDR_SURF_TILE_SPLIT_128B) |
2519                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2520                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2521                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2522                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_4));
2523                                 break;
2524                         case 4:  /* Maps w/ a dimension less than the 2D macro-tile dimensions (for mipmapped depth textures) */
2525                                 gb_tile_moden = (ARRAY_MODE(ARRAY_1D_TILED_THIN1) |
2526                                                  MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2527                                                  PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2528                                                  TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2529                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2530                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2531                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2532                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2533                                 break;
2534                         case 5:  /* Uncompressed 16bpp depth - and stencil buffer allocated with it */
2535                                 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2536                                                  MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2537                                                  PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2538                                                  TILE_SPLIT(split_equal_to_row_size) |
2539                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2540                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2541                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2542                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2543                                 break;
2544                         case 6:  /* Uncompressed 32bpp depth - and stencil buffer allocated with it */
2545                                 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2546                                                  MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2547                                                  PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2548                                                  TILE_SPLIT(split_equal_to_row_size) |
2549                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2550                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2551                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2552                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2553                                 break;
2554                         case 7:  /* Uncompressed 8bpp stencil without depth (drivers typically do not use) */
2555                                 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2556                                                  MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2557                                                  PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2558                                                  TILE_SPLIT(split_equal_to_row_size) |
2559                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2560                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2561                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2562                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_4));
2563                                 break;
2564                         case 8:  /* 1D and 1D Array Surfaces */
2565                                 gb_tile_moden = (ARRAY_MODE(ARRAY_LINEAR_ALIGNED) |
2566                                                  MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2567                                                  PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2568                                                  TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2569                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2570                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2571                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2572                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2573                                 break;
2574                         case 9:  /* Displayable maps. */
2575                                 gb_tile_moden = (ARRAY_MODE(ARRAY_1D_TILED_THIN1) |
2576                                                  MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2577                                                  PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2578                                                  TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2579                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2580                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2581                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2582                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2583                                 break;
2584                         case 10:  /* Display 8bpp. */
2585                                 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2586                                                  MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2587                                                  PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2588                                                  TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2589                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2590                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2591                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2592                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_4));
2593                                 break;
2594                         case 11:  /* Display 16bpp. */
2595                                 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2596                                                  MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2597                                                  PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2598                                                  TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2599                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2600                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2601                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2602                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2603                                 break;
2604                         case 12:  /* Display 32bpp. */
2605                                 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2606                                                  MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2607                                                  PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2608                                                  TILE_SPLIT(ADDR_SURF_TILE_SPLIT_512B) |
2609                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2610                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2611                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2612                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2613                                 break;
2614                         case 13:  /* Thin. */
2615                                 gb_tile_moden = (ARRAY_MODE(ARRAY_1D_TILED_THIN1) |
2616                                                  MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2617                                                  PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2618                                                  TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2619                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2620                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2621                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2622                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2623                                 break;
2624                         case 14:  /* Thin 8 bpp. */
2625                                 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2626                                                  MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2627                                                  PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2628                                                  TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2629                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2630                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2631                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2632                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2633                                 break;
2634                         case 15:  /* Thin 16 bpp. */
2635                                 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2636                                                  MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2637                                                  PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2638                                                  TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2639                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2640                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2641                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2642                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2643                                 break;
2644                         case 16:  /* Thin 32 bpp. */
2645                                 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2646                                                  MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2647                                                  PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2648                                                  TILE_SPLIT(ADDR_SURF_TILE_SPLIT_512B) |
2649                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2650                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2651                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2652                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2653                                 break;
2654                         case 17:  /* Thin 64 bpp. */
2655                                 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2656                                                  MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2657                                                  PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2658                                                  TILE_SPLIT(split_equal_to_row_size) |
2659                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2660                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2661                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2662                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2663                                 break;
2664                         case 21:  /* 8 bpp PRT. */
2665                                 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2666                                                  MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2667                                                  PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2668                                                  TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2669                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2670                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_2) |
2671                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2672                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2673                                 break;
2674                         case 22:  /* 16 bpp PRT */
2675                                 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2676                                                  MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2677                                                  PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2678                                                  TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2679                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2680                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2681                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2682                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_4));
2683                                 break;
2684                         case 23:  /* 32 bpp PRT */
2685                                 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2686                                                  MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2687                                                  PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2688                                                  TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2689                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2690                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2691                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2692                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2693                                 break;
2694                         case 24:  /* 64 bpp PRT */
2695                                 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2696                                                  MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2697                                                  PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2698                                                  TILE_SPLIT(ADDR_SURF_TILE_SPLIT_512B) |
2699                                                  NUM_BANKS(ADDR_SURF_16_BANK) |
2700                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2701                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2702                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2703                                 break;
2704                         case 25:  /* 128 bpp PRT */
2705                                 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2706                                                  MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2707                                                  PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2708                                                  TILE_SPLIT(ADDR_SURF_TILE_SPLIT_1KB) |
2709                                                  NUM_BANKS(ADDR_SURF_8_BANK) |
2710                                                  BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2711                                                  BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2712                                                  MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_1));
2713                                 break;
2714                         default:
2715                                 gb_tile_moden = 0;
2716                                 break;
2717                         }
2718                         rdev->config.si.tile_mode_array[reg_offset] = gb_tile_moden;
2719                         WREG32(GB_TILE_MODE0 + (reg_offset * 4), gb_tile_moden);
2720                 }
2721         } else
2722                 DRM_ERROR("unknown asic: 0x%x\n", rdev->family);
2723 }
2724
2725 static void si_select_se_sh(struct radeon_device *rdev,
2726                             u32 se_num, u32 sh_num)
2727 {
2728         u32 data = INSTANCE_BROADCAST_WRITES;
2729
2730         if ((se_num == 0xffffffff) && (sh_num == 0xffffffff))
2731                 data |= SH_BROADCAST_WRITES | SE_BROADCAST_WRITES;
2732         else if (se_num == 0xffffffff)
2733                 data |= SE_BROADCAST_WRITES | SH_INDEX(sh_num);
2734         else if (sh_num == 0xffffffff)
2735                 data |= SH_BROADCAST_WRITES | SE_INDEX(se_num);
2736         else
2737                 data |= SH_INDEX(sh_num) | SE_INDEX(se_num);
2738         WREG32(GRBM_GFX_INDEX, data);
2739 }
2740
2741 static u32 si_create_bitmask(u32 bit_width)
2742 {
2743         u32 i, mask = 0;
2744
2745         for (i = 0; i < bit_width; i++) {
2746                 mask <<= 1;
2747                 mask |= 1;
2748         }
2749         return mask;
2750 }
2751
2752 static u32 si_get_cu_enabled(struct radeon_device *rdev, u32 cu_per_sh)
2753 {
2754         u32 data, mask;
2755
2756         data = RREG32(CC_GC_SHADER_ARRAY_CONFIG);
2757         if (data & 1)
2758                 data &= INACTIVE_CUS_MASK;
2759         else
2760                 data = 0;
2761         data |= RREG32(GC_USER_SHADER_ARRAY_CONFIG);
2762
2763         data >>= INACTIVE_CUS_SHIFT;
2764
2765         mask = si_create_bitmask(cu_per_sh);
2766
2767         return ~data & mask;
2768 }
2769
2770 static void si_setup_spi(struct radeon_device *rdev,
2771                          u32 se_num, u32 sh_per_se,
2772                          u32 cu_per_sh)
2773 {
2774         int i, j, k;
2775         u32 data, mask, active_cu;
2776
2777         for (i = 0; i < se_num; i++) {
2778                 for (j = 0; j < sh_per_se; j++) {
2779                         si_select_se_sh(rdev, i, j);
2780                         data = RREG32(SPI_STATIC_THREAD_MGMT_3);
2781                         active_cu = si_get_cu_enabled(rdev, cu_per_sh);
2782
2783                         mask = 1;
2784                         for (k = 0; k < 16; k++) {
2785                                 mask <<= k;
2786                                 if (active_cu & mask) {
2787                                         data &= ~mask;
2788                                         WREG32(SPI_STATIC_THREAD_MGMT_3, data);
2789                                         break;
2790                                 }
2791                         }
2792                 }
2793         }
2794         si_select_se_sh(rdev, 0xffffffff, 0xffffffff);
2795 }
2796
2797 static u32 si_get_rb_disabled(struct radeon_device *rdev,
2798                               u32 max_rb_num, u32 se_num,
2799                               u32 sh_per_se)
2800 {
2801         u32 data, mask;
2802
2803         data = RREG32(CC_RB_BACKEND_DISABLE);
2804         if (data & 1)
2805                 data &= BACKEND_DISABLE_MASK;
2806         else
2807                 data = 0;
2808         data |= RREG32(GC_USER_RB_BACKEND_DISABLE);
2809
2810         data >>= BACKEND_DISABLE_SHIFT;
2811
2812         mask = si_create_bitmask(max_rb_num / se_num / sh_per_se);
2813
2814         return data & mask;
2815 }
2816
2817 static void si_setup_rb(struct radeon_device *rdev,
2818                         u32 se_num, u32 sh_per_se,
2819                         u32 max_rb_num)
2820 {
2821         int i, j;
2822         u32 data, mask;
2823         u32 disabled_rbs = 0;
2824         u32 enabled_rbs = 0;
2825
2826         for (i = 0; i < se_num; i++) {
2827                 for (j = 0; j < sh_per_se; j++) {
2828                         si_select_se_sh(rdev, i, j);
2829                         data = si_get_rb_disabled(rdev, max_rb_num, se_num, sh_per_se);
2830                         disabled_rbs |= data << ((i * sh_per_se + j) * TAHITI_RB_BITMAP_WIDTH_PER_SH);
2831                 }
2832         }
2833         si_select_se_sh(rdev, 0xffffffff, 0xffffffff);
2834
2835         mask = 1;
2836         for (i = 0; i < max_rb_num; i++) {
2837                 if (!(disabled_rbs & mask))
2838                         enabled_rbs |= mask;
2839                 mask <<= 1;
2840         }
2841
2842         for (i = 0; i < se_num; i++) {
2843                 si_select_se_sh(rdev, i, 0xffffffff);
2844                 data = 0;
2845                 for (j = 0; j < sh_per_se; j++) {
2846                         switch (enabled_rbs & 3) {
2847                         case 1:
2848                                 data |= (RASTER_CONFIG_RB_MAP_0 << (i * sh_per_se + j) * 2);
2849                                 break;
2850                         case 2:
2851                                 data |= (RASTER_CONFIG_RB_MAP_3 << (i * sh_per_se + j) * 2);
2852                                 break;
2853                         case 3:
2854                         default:
2855                                 data |= (RASTER_CONFIG_RB_MAP_2 << (i * sh_per_se + j) * 2);
2856                                 break;
2857                         }
2858                         enabled_rbs >>= 2;
2859                 }
2860                 WREG32(PA_SC_RASTER_CONFIG, data);
2861         }
2862         si_select_se_sh(rdev, 0xffffffff, 0xffffffff);
2863 }
2864
2865 static void si_gpu_init(struct radeon_device *rdev)
2866 {
2867         u32 gb_addr_config = 0;
2868         u32 mc_shared_chmap, mc_arb_ramcfg;
2869         u32 sx_debug_1;
2870         u32 hdp_host_path_cntl;
2871         u32 tmp;
2872         int i, j;
2873
2874         switch (rdev->family) {
2875         case CHIP_TAHITI:
2876                 rdev->config.si.max_shader_engines = 2;
2877                 rdev->config.si.max_tile_pipes = 12;
2878                 rdev->config.si.max_cu_per_sh = 8;
2879                 rdev->config.si.max_sh_per_se = 2;
2880                 rdev->config.si.max_backends_per_se = 4;
2881                 rdev->config.si.max_texture_channel_caches = 12;
2882                 rdev->config.si.max_gprs = 256;
2883                 rdev->config.si.max_gs_threads = 32;
2884                 rdev->config.si.max_hw_contexts = 8;
2885
2886                 rdev->config.si.sc_prim_fifo_size_frontend = 0x20;
2887                 rdev->config.si.sc_prim_fifo_size_backend = 0x100;
2888                 rdev->config.si.sc_hiz_tile_fifo_size = 0x30;
2889                 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130;
2890                 gb_addr_config = TAHITI_GB_ADDR_CONFIG_GOLDEN;
2891                 break;
2892         case CHIP_PITCAIRN:
2893                 rdev->config.si.max_shader_engines = 2;
2894                 rdev->config.si.max_tile_pipes = 8;
2895                 rdev->config.si.max_cu_per_sh = 5;
2896                 rdev->config.si.max_sh_per_se = 2;
2897                 rdev->config.si.max_backends_per_se = 4;
2898                 rdev->config.si.max_texture_channel_caches = 8;
2899                 rdev->config.si.max_gprs = 256;
2900                 rdev->config.si.max_gs_threads = 32;
2901                 rdev->config.si.max_hw_contexts = 8;
2902
2903                 rdev->config.si.sc_prim_fifo_size_frontend = 0x20;
2904                 rdev->config.si.sc_prim_fifo_size_backend = 0x100;
2905                 rdev->config.si.sc_hiz_tile_fifo_size = 0x30;
2906                 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130;
2907                 gb_addr_config = TAHITI_GB_ADDR_CONFIG_GOLDEN;
2908                 break;
2909         case CHIP_VERDE:
2910         default:
2911                 rdev->config.si.max_shader_engines = 1;
2912                 rdev->config.si.max_tile_pipes = 4;
2913                 rdev->config.si.max_cu_per_sh = 5;
2914                 rdev->config.si.max_sh_per_se = 2;
2915                 rdev->config.si.max_backends_per_se = 4;
2916                 rdev->config.si.max_texture_channel_caches = 4;
2917                 rdev->config.si.max_gprs = 256;
2918                 rdev->config.si.max_gs_threads = 32;
2919                 rdev->config.si.max_hw_contexts = 8;
2920
2921                 rdev->config.si.sc_prim_fifo_size_frontend = 0x20;
2922                 rdev->config.si.sc_prim_fifo_size_backend = 0x40;
2923                 rdev->config.si.sc_hiz_tile_fifo_size = 0x30;
2924                 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130;
2925                 gb_addr_config = VERDE_GB_ADDR_CONFIG_GOLDEN;
2926                 break;
2927         case CHIP_OLAND:
2928                 rdev->config.si.max_shader_engines = 1;
2929                 rdev->config.si.max_tile_pipes = 4;
2930                 rdev->config.si.max_cu_per_sh = 6;
2931                 rdev->config.si.max_sh_per_se = 1;
2932                 rdev->config.si.max_backends_per_se = 2;
2933                 rdev->config.si.max_texture_channel_caches = 4;
2934                 rdev->config.si.max_gprs = 256;
2935                 rdev->config.si.max_gs_threads = 16;
2936                 rdev->config.si.max_hw_contexts = 8;
2937
2938                 rdev->config.si.sc_prim_fifo_size_frontend = 0x20;
2939                 rdev->config.si.sc_prim_fifo_size_backend = 0x40;
2940                 rdev->config.si.sc_hiz_tile_fifo_size = 0x30;
2941                 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130;
2942                 gb_addr_config = VERDE_GB_ADDR_CONFIG_GOLDEN;
2943                 break;
2944         case CHIP_HAINAN:
2945                 rdev->config.si.max_shader_engines = 1;
2946                 rdev->config.si.max_tile_pipes = 4;
2947                 rdev->config.si.max_cu_per_sh = 5;
2948                 rdev->config.si.max_sh_per_se = 1;
2949                 rdev->config.si.max_backends_per_se = 1;
2950                 rdev->config.si.max_texture_channel_caches = 2;
2951                 rdev->config.si.max_gprs = 256;
2952                 rdev->config.si.max_gs_threads = 16;
2953                 rdev->config.si.max_hw_contexts = 8;
2954
2955                 rdev->config.si.sc_prim_fifo_size_frontend = 0x20;
2956                 rdev->config.si.sc_prim_fifo_size_backend = 0x40;
2957                 rdev->config.si.sc_hiz_tile_fifo_size = 0x30;
2958                 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130;
2959                 gb_addr_config = HAINAN_GB_ADDR_CONFIG_GOLDEN;
2960                 break;
2961         }
2962
2963         /* Initialize HDP */
2964         for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2965                 WREG32((0x2c14 + j), 0x00000000);
2966                 WREG32((0x2c18 + j), 0x00000000);
2967                 WREG32((0x2c1c + j), 0x00000000);
2968                 WREG32((0x2c20 + j), 0x00000000);
2969                 WREG32((0x2c24 + j), 0x00000000);
2970         }
2971
2972         WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
2973
2974         evergreen_fix_pci_max_read_req_size(rdev);
2975
2976         WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2977
2978         mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
2979         mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
2980
2981         rdev->config.si.num_tile_pipes = rdev->config.si.max_tile_pipes;
2982         rdev->config.si.mem_max_burst_length_bytes = 256;
2983         tmp = (mc_arb_ramcfg & NOOFCOLS_MASK) >> NOOFCOLS_SHIFT;
2984         rdev->config.si.mem_row_size_in_kb = (4 * (1 << (8 + tmp))) / 1024;
2985         if (rdev->config.si.mem_row_size_in_kb > 4)
2986                 rdev->config.si.mem_row_size_in_kb = 4;
2987         /* XXX use MC settings? */
2988         rdev->config.si.shader_engine_tile_size = 32;
2989         rdev->config.si.num_gpus = 1;
2990         rdev->config.si.multi_gpu_tile_size = 64;
2991
2992         /* fix up row size */
2993         gb_addr_config &= ~ROW_SIZE_MASK;
2994         switch (rdev->config.si.mem_row_size_in_kb) {
2995         case 1:
2996         default:
2997                 gb_addr_config |= ROW_SIZE(0);
2998                 break;
2999         case 2:
3000                 gb_addr_config |= ROW_SIZE(1);
3001                 break;
3002         case 4:
3003                 gb_addr_config |= ROW_SIZE(2);
3004                 break;
3005         }
3006
3007         /* setup tiling info dword.  gb_addr_config is not adequate since it does
3008          * not have bank info, so create a custom tiling dword.
3009          * bits 3:0   num_pipes
3010          * bits 7:4   num_banks
3011          * bits 11:8  group_size
3012          * bits 15:12 row_size
3013          */
3014         rdev->config.si.tile_config = 0;
3015         switch (rdev->config.si.num_tile_pipes) {
3016         case 1:
3017                 rdev->config.si.tile_config |= (0 << 0);
3018                 break;
3019         case 2:
3020                 rdev->config.si.tile_config |= (1 << 0);
3021                 break;
3022         case 4:
3023                 rdev->config.si.tile_config |= (2 << 0);
3024                 break;
3025         case 8:
3026         default:
3027                 /* XXX what about 12? */
3028                 rdev->config.si.tile_config |= (3 << 0);
3029                 break;
3030         }       
3031         switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3032         case 0: /* four banks */
3033                 rdev->config.si.tile_config |= 0 << 4;
3034                 break;
3035         case 1: /* eight banks */
3036                 rdev->config.si.tile_config |= 1 << 4;
3037                 break;
3038         case 2: /* sixteen banks */
3039         default:
3040                 rdev->config.si.tile_config |= 2 << 4;
3041                 break;
3042         }
3043         rdev->config.si.tile_config |=
3044                 ((gb_addr_config & PIPE_INTERLEAVE_SIZE_MASK) >> PIPE_INTERLEAVE_SIZE_SHIFT) << 8;
3045         rdev->config.si.tile_config |=
3046                 ((gb_addr_config & ROW_SIZE_MASK) >> ROW_SIZE_SHIFT) << 12;
3047
3048         WREG32(GB_ADDR_CONFIG, gb_addr_config);
3049         WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3050         WREG32(DMIF_ADDR_CALC, gb_addr_config);
3051         WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3052         WREG32(DMA_TILING_CONFIG + DMA0_REGISTER_OFFSET, gb_addr_config);
3053         WREG32(DMA_TILING_CONFIG + DMA1_REGISTER_OFFSET, gb_addr_config);
3054         if (rdev->has_uvd) {
3055                 WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3056                 WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3057                 WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3058         }
3059
3060         si_tiling_mode_table_init(rdev);
3061
3062         si_setup_rb(rdev, rdev->config.si.max_shader_engines,
3063                     rdev->config.si.max_sh_per_se,
3064                     rdev->config.si.max_backends_per_se);
3065
3066         si_setup_spi(rdev, rdev->config.si.max_shader_engines,
3067                      rdev->config.si.max_sh_per_se,
3068                      rdev->config.si.max_cu_per_sh);
3069
3070
3071         /* set HW defaults for 3D engine */
3072         WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3073                                      ROQ_IB2_START(0x2b)));
3074         WREG32(CP_MEQ_THRESHOLDS, MEQ1_START(0x30) | MEQ2_START(0x60));
3075
3076         sx_debug_1 = RREG32(SX_DEBUG_1);
3077         WREG32(SX_DEBUG_1, sx_debug_1);
3078
3079         WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3080
3081         WREG32(PA_SC_FIFO_SIZE, (SC_FRONTEND_PRIM_FIFO_SIZE(rdev->config.si.sc_prim_fifo_size_frontend) |
3082                                  SC_BACKEND_PRIM_FIFO_SIZE(rdev->config.si.sc_prim_fifo_size_backend) |
3083                                  SC_HIZ_TILE_FIFO_SIZE(rdev->config.si.sc_hiz_tile_fifo_size) |
3084                                  SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.si.sc_earlyz_tile_fifo_size)));
3085
3086         WREG32(VGT_NUM_INSTANCES, 1);
3087
3088         WREG32(CP_PERFMON_CNTL, 0);
3089
3090         WREG32(SQ_CONFIG, 0);
3091
3092         WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3093                                           FORCE_EOV_MAX_REZ_CNT(255)));
3094
3095         WREG32(VGT_CACHE_INVALIDATION, CACHE_INVALIDATION(VC_AND_TC) |
3096                AUTO_INVLD_EN(ES_AND_GS_AUTO));
3097
3098         WREG32(VGT_GS_VERTEX_REUSE, 16);
3099         WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3100
3101         WREG32(CB_PERFCOUNTER0_SELECT0, 0);
3102         WREG32(CB_PERFCOUNTER0_SELECT1, 0);
3103         WREG32(CB_PERFCOUNTER1_SELECT0, 0);
3104         WREG32(CB_PERFCOUNTER1_SELECT1, 0);
3105         WREG32(CB_PERFCOUNTER2_SELECT0, 0);
3106         WREG32(CB_PERFCOUNTER2_SELECT1, 0);
3107         WREG32(CB_PERFCOUNTER3_SELECT0, 0);
3108         WREG32(CB_PERFCOUNTER3_SELECT1, 0);
3109
3110         tmp = RREG32(HDP_MISC_CNTL);
3111         tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3112         WREG32(HDP_MISC_CNTL, tmp);
3113
3114         hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3115         WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3116
3117         WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3118
3119         udelay(50);
3120 }
3121
3122 /*
3123  * GPU scratch registers helpers function.
3124  */
3125 static void si_scratch_init(struct radeon_device *rdev)
3126 {
3127         int i;
3128
3129         rdev->scratch.num_reg = 7;
3130         rdev->scratch.reg_base = SCRATCH_REG0;
3131         for (i = 0; i < rdev->scratch.num_reg; i++) {
3132                 rdev->scratch.free[i] = true;
3133                 rdev->scratch.reg[i] = rdev->scratch.reg_base + (i * 4);
3134         }
3135 }
3136
3137 void si_fence_ring_emit(struct radeon_device *rdev,
3138                         struct radeon_fence *fence)
3139 {
3140         struct radeon_ring *ring = &rdev->ring[fence->ring];
3141         u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
3142
3143         /* flush read cache over gart */
3144         radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
3145         radeon_ring_write(ring, (CP_COHER_CNTL2 - PACKET3_SET_CONFIG_REG_START) >> 2);
3146         radeon_ring_write(ring, 0);
3147         radeon_ring_write(ring, PACKET3(PACKET3_SURFACE_SYNC, 3));
3148         radeon_ring_write(ring, PACKET3_TCL1_ACTION_ENA |
3149                           PACKET3_TC_ACTION_ENA |
3150                           PACKET3_SH_KCACHE_ACTION_ENA |
3151                           PACKET3_SH_ICACHE_ACTION_ENA);
3152         radeon_ring_write(ring, 0xFFFFFFFF);
3153         radeon_ring_write(ring, 0);
3154         radeon_ring_write(ring, 10); /* poll interval */
3155         /* EVENT_WRITE_EOP - flush caches, send int */
3156         radeon_ring_write(ring, PACKET3(PACKET3_EVENT_WRITE_EOP, 4));
3157         radeon_ring_write(ring, EVENT_TYPE(CACHE_FLUSH_AND_INV_TS_EVENT) | EVENT_INDEX(5));
3158         radeon_ring_write(ring, addr & 0xffffffff);
3159         radeon_ring_write(ring, (upper_32_bits(addr) & 0xff) | DATA_SEL(1) | INT_SEL(2));
3160         radeon_ring_write(ring, fence->seq);
3161         radeon_ring_write(ring, 0);
3162 }
3163
3164 /*
3165  * IB stuff
3166  */
3167 void si_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
3168 {
3169         struct radeon_ring *ring = &rdev->ring[ib->ring];
3170         u32 header;
3171
3172         if (ib->is_const_ib) {
3173                 /* set switch buffer packet before const IB */
3174                 radeon_ring_write(ring, PACKET3(PACKET3_SWITCH_BUFFER, 0));
3175                 radeon_ring_write(ring, 0);
3176
3177                 header = PACKET3(PACKET3_INDIRECT_BUFFER_CONST, 2);
3178         } else {
3179                 u32 next_rptr;
3180                 if (ring->rptr_save_reg) {
3181                         next_rptr = ring->wptr + 3 + 4 + 8;
3182                         radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
3183                         radeon_ring_write(ring, ((ring->rptr_save_reg -
3184                                                   PACKET3_SET_CONFIG_REG_START) >> 2));
3185                         radeon_ring_write(ring, next_rptr);
3186                 } else if (rdev->wb.enabled) {
3187                         next_rptr = ring->wptr + 5 + 4 + 8;
3188                         radeon_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
3189                         radeon_ring_write(ring, (1 << 8));
3190                         radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
3191                         radeon_ring_write(ring, upper_32_bits(ring->next_rptr_gpu_addr) & 0xffffffff);
3192                         radeon_ring_write(ring, next_rptr);
3193                 }
3194
3195                 header = PACKET3(PACKET3_INDIRECT_BUFFER, 2);
3196         }
3197
3198         radeon_ring_write(ring, header);
3199         radeon_ring_write(ring,
3200 #ifdef __BIG_ENDIAN
3201                           (2 << 0) |
3202 #endif
3203                           (ib->gpu_addr & 0xFFFFFFFC));
3204         radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFFFF);
3205         radeon_ring_write(ring, ib->length_dw |
3206                           (ib->vm ? (ib->vm->id << 24) : 0));
3207
3208         if (!ib->is_const_ib) {
3209                 /* flush read cache over gart for this vmid */
3210                 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
3211                 radeon_ring_write(ring, (CP_COHER_CNTL2 - PACKET3_SET_CONFIG_REG_START) >> 2);
3212                 radeon_ring_write(ring, ib->vm ? ib->vm->id : 0);
3213                 radeon_ring_write(ring, PACKET3(PACKET3_SURFACE_SYNC, 3));
3214                 radeon_ring_write(ring, PACKET3_TCL1_ACTION_ENA |
3215                                   PACKET3_TC_ACTION_ENA |
3216                                   PACKET3_SH_KCACHE_ACTION_ENA |
3217                                   PACKET3_SH_ICACHE_ACTION_ENA);
3218                 radeon_ring_write(ring, 0xFFFFFFFF);
3219                 radeon_ring_write(ring, 0);
3220                 radeon_ring_write(ring, 10); /* poll interval */
3221         }
3222 }
3223
3224 /*
3225  * CP.
3226  */
3227 static void si_cp_enable(struct radeon_device *rdev, bool enable)
3228 {
3229         if (enable)
3230                 WREG32(CP_ME_CNTL, 0);
3231         else {
3232                 radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size);
3233                 WREG32(CP_ME_CNTL, (CP_ME_HALT | CP_PFP_HALT | CP_CE_HALT));
3234                 WREG32(SCRATCH_UMSK, 0);
3235                 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false;
3236                 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false;
3237                 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false;
3238         }
3239         udelay(50);
3240 }
3241
3242 static int si_cp_load_microcode(struct radeon_device *rdev)
3243 {
3244         const __be32 *fw_data;
3245         int i;
3246
3247         if (!rdev->me_fw || !rdev->pfp_fw)
3248                 return -EINVAL;
3249
3250         si_cp_enable(rdev, false);
3251
3252         /* PFP */
3253         fw_data = (const __be32 *)rdev->pfp_fw->data;
3254         WREG32(CP_PFP_UCODE_ADDR, 0);
3255         for (i = 0; i < SI_PFP_UCODE_SIZE; i++)
3256                 WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
3257         WREG32(CP_PFP_UCODE_ADDR, 0);
3258
3259         /* CE */
3260         fw_data = (const __be32 *)rdev->ce_fw->data;
3261         WREG32(CP_CE_UCODE_ADDR, 0);
3262         for (i = 0; i < SI_CE_UCODE_SIZE; i++)
3263                 WREG32(CP_CE_UCODE_DATA, be32_to_cpup(fw_data++));
3264         WREG32(CP_CE_UCODE_ADDR, 0);
3265
3266         /* ME */
3267         fw_data = (const __be32 *)rdev->me_fw->data;
3268         WREG32(CP_ME_RAM_WADDR, 0);
3269         for (i = 0; i < SI_PM4_UCODE_SIZE; i++)
3270                 WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
3271         WREG32(CP_ME_RAM_WADDR, 0);
3272
3273         WREG32(CP_PFP_UCODE_ADDR, 0);
3274         WREG32(CP_CE_UCODE_ADDR, 0);
3275         WREG32(CP_ME_RAM_WADDR, 0);
3276         WREG32(CP_ME_RAM_RADDR, 0);
3277         return 0;
3278 }
3279
3280 static int si_cp_start(struct radeon_device *rdev)
3281 {
3282         struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3283         int r, i;
3284
3285         r = radeon_ring_lock(rdev, ring, 7 + 4);
3286         if (r) {
3287                 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3288                 return r;
3289         }
3290         /* init the CP */
3291         radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
3292         radeon_ring_write(ring, 0x1);
3293         radeon_ring_write(ring, 0x0);
3294         radeon_ring_write(ring, rdev->config.si.max_hw_contexts - 1);
3295         radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
3296         radeon_ring_write(ring, 0);
3297         radeon_ring_write(ring, 0);
3298
3299         /* init the CE partitions */
3300         radeon_ring_write(ring, PACKET3(PACKET3_SET_BASE, 2));
3301         radeon_ring_write(ring, PACKET3_BASE_INDEX(CE_PARTITION_BASE));
3302         radeon_ring_write(ring, 0xc000);
3303         radeon_ring_write(ring, 0xe000);
3304         radeon_ring_unlock_commit(rdev, ring);
3305
3306         si_cp_enable(rdev, true);
3307
3308         r = radeon_ring_lock(rdev, ring, si_default_size + 10);
3309         if (r) {
3310                 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3311                 return r;
3312         }
3313
3314         /* setup clear context state */
3315         radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3316         radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
3317
3318         for (i = 0; i < si_default_size; i++)
3319                 radeon_ring_write(ring, si_default_state[i]);
3320
3321         radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3322         radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
3323
3324         /* set clear context state */
3325         radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
3326         radeon_ring_write(ring, 0);
3327
3328         radeon_ring_write(ring, PACKET3(PACKET3_SET_CONTEXT_REG, 2));
3329         radeon_ring_write(ring, 0x00000316);
3330         radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
3331         radeon_ring_write(ring, 0x00000010); /* VGT_OUT_DEALLOC_CNTL */
3332
3333         radeon_ring_unlock_commit(rdev, ring);
3334
3335         for (i = RADEON_RING_TYPE_GFX_INDEX; i <= CAYMAN_RING_TYPE_CP2_INDEX; ++i) {
3336                 ring = &rdev->ring[i];
3337                 r = radeon_ring_lock(rdev, ring, 2);
3338
3339                 /* clear the compute context state */
3340                 radeon_ring_write(ring, PACKET3_COMPUTE(PACKET3_CLEAR_STATE, 0));
3341                 radeon_ring_write(ring, 0);
3342
3343                 radeon_ring_unlock_commit(rdev, ring);
3344         }
3345
3346         return 0;
3347 }
3348
3349 static void si_cp_fini(struct radeon_device *rdev)
3350 {
3351         struct radeon_ring *ring;
3352         si_cp_enable(rdev, false);
3353
3354         ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3355         radeon_ring_fini(rdev, ring);
3356         radeon_scratch_free(rdev, ring->rptr_save_reg);
3357
3358         ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX];
3359         radeon_ring_fini(rdev, ring);
3360         radeon_scratch_free(rdev, ring->rptr_save_reg);
3361
3362         ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX];
3363         radeon_ring_fini(rdev, ring);
3364         radeon_scratch_free(rdev, ring->rptr_save_reg);
3365 }
3366
3367 static int si_cp_resume(struct radeon_device *rdev)
3368 {
3369         struct radeon_ring *ring;
3370         u32 tmp;
3371         u32 rb_bufsz;
3372         int r;
3373
3374         /* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
3375         WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
3376                                  SOFT_RESET_PA |
3377                                  SOFT_RESET_VGT |
3378                                  SOFT_RESET_SPI |
3379                                  SOFT_RESET_SX));
3380         RREG32(GRBM_SOFT_RESET);
3381         mdelay(15);
3382         WREG32(GRBM_SOFT_RESET, 0);
3383         RREG32(GRBM_SOFT_RESET);
3384
3385         WREG32(CP_SEM_WAIT_TIMER, 0x0);
3386         WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
3387
3388         /* Set the write pointer delay */
3389         WREG32(CP_RB_WPTR_DELAY, 0);
3390
3391         WREG32(CP_DEBUG, 0);
3392         WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
3393
3394         /* ring 0 - compute and gfx */
3395         /* Set ring buffer size */
3396         ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3397         rb_bufsz = drm_order(ring->ring_size / 8);
3398         tmp = (drm_order(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
3399 #ifdef __BIG_ENDIAN
3400         tmp |= BUF_SWAP_32BIT;
3401 #endif
3402         WREG32(CP_RB0_CNTL, tmp);
3403
3404         /* Initialize the ring buffer's read and write pointers */
3405         WREG32(CP_RB0_CNTL, tmp | RB_RPTR_WR_ENA);
3406         ring->wptr = 0;
3407         WREG32(CP_RB0_WPTR, ring->wptr);
3408
3409         /* set the wb address whether it's enabled or not */
3410         WREG32(CP_RB0_RPTR_ADDR, (rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC);
3411         WREG32(CP_RB0_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
3412
3413         if (rdev->wb.enabled)
3414                 WREG32(SCRATCH_UMSK, 0xff);
3415         else {
3416                 tmp |= RB_NO_UPDATE;
3417                 WREG32(SCRATCH_UMSK, 0);
3418         }
3419
3420         mdelay(1);
3421         WREG32(CP_RB0_CNTL, tmp);
3422
3423         WREG32(CP_RB0_BASE, ring->gpu_addr >> 8);
3424
3425         ring->rptr = RREG32(CP_RB0_RPTR);
3426
3427         /* ring1  - compute only */
3428         /* Set ring buffer size */
3429         ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX];
3430         rb_bufsz = drm_order(ring->ring_size / 8);
3431         tmp = (drm_order(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
3432 #ifdef __BIG_ENDIAN
3433         tmp |= BUF_SWAP_32BIT;
3434 #endif
3435         WREG32(CP_RB1_CNTL, tmp);
3436
3437         /* Initialize the ring buffer's read and write pointers */
3438         WREG32(CP_RB1_CNTL, tmp | RB_RPTR_WR_ENA);
3439         ring->wptr = 0;
3440         WREG32(CP_RB1_WPTR, ring->wptr);
3441
3442         /* set the wb address whether it's enabled or not */
3443         WREG32(CP_RB1_RPTR_ADDR, (rdev->wb.gpu_addr + RADEON_WB_CP1_RPTR_OFFSET) & 0xFFFFFFFC);
3444         WREG32(CP_RB1_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP1_RPTR_OFFSET) & 0xFF);
3445
3446         mdelay(1);
3447         WREG32(CP_RB1_CNTL, tmp);
3448
3449         WREG32(CP_RB1_BASE, ring->gpu_addr >> 8);
3450
3451         ring->rptr = RREG32(CP_RB1_RPTR);
3452
3453         /* ring2 - compute only */
3454         /* Set ring buffer size */
3455         ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX];
3456         rb_bufsz = drm_order(ring->ring_size / 8);
3457         tmp = (drm_order(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
3458 #ifdef __BIG_ENDIAN
3459         tmp |= BUF_SWAP_32BIT;
3460 #endif
3461         WREG32(CP_RB2_CNTL, tmp);
3462
3463         /* Initialize the ring buffer's read and write pointers */
3464         WREG32(CP_RB2_CNTL, tmp | RB_RPTR_WR_ENA);
3465         ring->wptr = 0;
3466         WREG32(CP_RB2_WPTR, ring->wptr);
3467
3468         /* set the wb address whether it's enabled or not */
3469         WREG32(CP_RB2_RPTR_ADDR, (rdev->wb.gpu_addr + RADEON_WB_CP2_RPTR_OFFSET) & 0xFFFFFFFC);
3470         WREG32(CP_RB2_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP2_RPTR_OFFSET) & 0xFF);
3471
3472         mdelay(1);
3473         WREG32(CP_RB2_CNTL, tmp);
3474
3475         WREG32(CP_RB2_BASE, ring->gpu_addr >> 8);
3476
3477         ring->rptr = RREG32(CP_RB2_RPTR);
3478
3479         /* start the rings */
3480         si_cp_start(rdev);
3481         rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = true;
3482         rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = true;
3483         rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = true;
3484         r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]);
3485         if (r) {
3486                 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false;
3487                 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false;
3488                 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false;
3489                 return r;
3490         }
3491         r = radeon_ring_test(rdev, CAYMAN_RING_TYPE_CP1_INDEX, &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]);
3492         if (r) {
3493                 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false;
3494         }
3495         r = radeon_ring_test(rdev, CAYMAN_RING_TYPE_CP2_INDEX, &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]);
3496         if (r) {
3497                 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false;
3498         }
3499
3500         return 0;
3501 }
3502
3503 u32 si_gpu_check_soft_reset(struct radeon_device *rdev)
3504 {
3505         u32 reset_mask = 0;
3506         u32 tmp;
3507
3508         /* GRBM_STATUS */
3509         tmp = RREG32(GRBM_STATUS);
3510         if (tmp & (PA_BUSY | SC_BUSY |
3511                    BCI_BUSY | SX_BUSY |
3512                    TA_BUSY | VGT_BUSY |
3513                    DB_BUSY | CB_BUSY |
3514                    GDS_BUSY | SPI_BUSY |
3515                    IA_BUSY | IA_BUSY_NO_DMA))
3516                 reset_mask |= RADEON_RESET_GFX;
3517
3518         if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3519                    CP_BUSY | CP_COHERENCY_BUSY))
3520                 reset_mask |= RADEON_RESET_CP;
3521
3522         if (tmp & GRBM_EE_BUSY)
3523                 reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3524
3525         /* GRBM_STATUS2 */
3526         tmp = RREG32(GRBM_STATUS2);
3527         if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3528                 reset_mask |= RADEON_RESET_RLC;
3529
3530         /* DMA_STATUS_REG 0 */
3531         tmp = RREG32(DMA_STATUS_REG + DMA0_REGISTER_OFFSET);
3532         if (!(tmp & DMA_IDLE))
3533                 reset_mask |= RADEON_RESET_DMA;
3534
3535         /* DMA_STATUS_REG 1 */
3536         tmp = RREG32(DMA_STATUS_REG + DMA1_REGISTER_OFFSET);
3537         if (!(tmp & DMA_IDLE))
3538                 reset_mask |= RADEON_RESET_DMA1;
3539
3540         /* SRBM_STATUS2 */
3541         tmp = RREG32(SRBM_STATUS2);
3542         if (tmp & DMA_BUSY)
3543                 reset_mask |= RADEON_RESET_DMA;
3544
3545         if (tmp & DMA1_BUSY)
3546                 reset_mask |= RADEON_RESET_DMA1;
3547
3548         /* SRBM_STATUS */
3549         tmp = RREG32(SRBM_STATUS);
3550
3551         if (tmp & IH_BUSY)
3552                 reset_mask |= RADEON_RESET_IH;
3553
3554         if (tmp & SEM_BUSY)
3555                 reset_mask |= RADEON_RESET_SEM;
3556
3557         if (tmp & GRBM_RQ_PENDING)
3558                 reset_mask |= RADEON_RESET_GRBM;
3559
3560         if (tmp & VMC_BUSY)
3561                 reset_mask |= RADEON_RESET_VMC;
3562
3563         if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3564                    MCC_BUSY | MCD_BUSY))
3565                 reset_mask |= RADEON_RESET_MC;
3566
3567         if (evergreen_is_display_hung(rdev))
3568                 reset_mask |= RADEON_RESET_DISPLAY;
3569
3570         /* VM_L2_STATUS */
3571         tmp = RREG32(VM_L2_STATUS);
3572         if (tmp & L2_BUSY)
3573                 reset_mask |= RADEON_RESET_VMC;
3574
3575         /* Skip MC reset as it's mostly likely not hung, just busy */
3576         if (reset_mask & RADEON_RESET_MC) {
3577                 DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3578                 reset_mask &= ~RADEON_RESET_MC;
3579         }
3580
3581         return reset_mask;
3582 }
3583
3584 static void si_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3585 {
3586         struct evergreen_mc_save save;
3587         u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3588         u32 tmp;
3589
3590         if (reset_mask == 0)
3591                 return;
3592
3593         dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3594
3595         evergreen_print_gpu_status_regs(rdev);
3596         dev_info(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_ADDR   0x%08X\n",
3597                  RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR));
3598         dev_info(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
3599                  RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS));
3600
3601         /* Disable CP parsing/prefetching */
3602         WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT | CP_CE_HALT);
3603
3604         if (reset_mask & RADEON_RESET_DMA) {
3605                 /* dma0 */
3606                 tmp = RREG32(DMA_RB_CNTL + DMA0_REGISTER_OFFSET);
3607                 tmp &= ~DMA_RB_ENABLE;
3608                 WREG32(DMA_RB_CNTL + DMA0_REGISTER_OFFSET, tmp);
3609         }
3610         if (reset_mask & RADEON_RESET_DMA1) {
3611                 /* dma1 */
3612                 tmp = RREG32(DMA_RB_CNTL + DMA1_REGISTER_OFFSET);
3613                 tmp &= ~DMA_RB_ENABLE;
3614                 WREG32(DMA_RB_CNTL + DMA1_REGISTER_OFFSET, tmp);
3615         }
3616
3617         udelay(50);
3618
3619         evergreen_mc_stop(rdev, &save);
3620         if (evergreen_mc_wait_for_idle(rdev)) {
3621                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3622         }
3623
3624         if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE | RADEON_RESET_CP)) {
3625                 grbm_soft_reset = SOFT_RESET_CB |
3626                         SOFT_RESET_DB |
3627                         SOFT_RESET_GDS |
3628                         SOFT_RESET_PA |
3629                         SOFT_RESET_SC |
3630                         SOFT_RESET_BCI |
3631                         SOFT_RESET_SPI |
3632                         SOFT_RESET_SX |
3633                         SOFT_RESET_TC |
3634                         SOFT_RESET_TA |
3635                         SOFT_RESET_VGT |
3636                         SOFT_RESET_IA;
3637         }
3638
3639         if (reset_mask & RADEON_RESET_CP) {
3640                 grbm_soft_reset |= SOFT_RESET_CP | SOFT_RESET_VGT;
3641
3642                 srbm_soft_reset |= SOFT_RESET_GRBM;
3643         }
3644
3645         if (reset_mask & RADEON_RESET_DMA)
3646                 srbm_soft_reset |= SOFT_RESET_DMA;
3647
3648         if (reset_mask & RADEON_RESET_DMA1)
3649                 srbm_soft_reset |= SOFT_RESET_DMA1;
3650
3651         if (reset_mask & RADEON_RESET_DISPLAY)
3652                 srbm_soft_reset |= SOFT_RESET_DC;
3653
3654         if (reset_mask & RADEON_RESET_RLC)
3655                 grbm_soft_reset |= SOFT_RESET_RLC;
3656
3657         if (reset_mask & RADEON_RESET_SEM)
3658                 srbm_soft_reset |= SOFT_RESET_SEM;
3659
3660         if (reset_mask & RADEON_RESET_IH)
3661                 srbm_soft_reset |= SOFT_RESET_IH;
3662
3663         if (reset_mask & RADEON_RESET_GRBM)
3664                 srbm_soft_reset |= SOFT_RESET_GRBM;
3665
3666         if (reset_mask & RADEON_RESET_VMC)
3667                 srbm_soft_reset |= SOFT_RESET_VMC;
3668
3669         if (reset_mask & RADEON_RESET_MC)
3670                 srbm_soft_reset |= SOFT_RESET_MC;
3671
3672         if (grbm_soft_reset) {
3673                 tmp = RREG32(GRBM_SOFT_RESET);
3674                 tmp |= grbm_soft_reset;
3675                 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
3676                 WREG32(GRBM_SOFT_RESET, tmp);
3677                 tmp = RREG32(GRBM_SOFT_RESET);
3678
3679                 udelay(50);
3680
3681                 tmp &= ~grbm_soft_reset;
3682                 WREG32(GRBM_SOFT_RESET, tmp);
3683                 tmp = RREG32(GRBM_SOFT_RESET);
3684         }
3685
3686         if (srbm_soft_reset) {
3687                 tmp = RREG32(SRBM_SOFT_RESET);
3688                 tmp |= srbm_soft_reset;
3689                 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
3690                 WREG32(SRBM_SOFT_RESET, tmp);
3691                 tmp = RREG32(SRBM_SOFT_RESET);
3692
3693                 udelay(50);
3694
3695                 tmp &= ~srbm_soft_reset;
3696                 WREG32(SRBM_SOFT_RESET, tmp);
3697                 tmp = RREG32(SRBM_SOFT_RESET);
3698         }
3699
3700         /* Wait a little for things to settle down */
3701         udelay(50);
3702
3703         evergreen_mc_resume(rdev, &save);
3704         udelay(50);
3705
3706         evergreen_print_gpu_status_regs(rdev);
3707 }
3708
3709 int si_asic_reset(struct radeon_device *rdev)
3710 {
3711         u32 reset_mask;
3712
3713         reset_mask = si_gpu_check_soft_reset(rdev);
3714
3715         if (reset_mask)
3716                 r600_set_bios_scratch_engine_hung(rdev, true);
3717
3718         si_gpu_soft_reset(rdev, reset_mask);
3719
3720         reset_mask = si_gpu_check_soft_reset(rdev);
3721
3722         if (!reset_mask)
3723                 r600_set_bios_scratch_engine_hung(rdev, false);
3724
3725         return 0;
3726 }
3727
3728 /**
3729  * si_gfx_is_lockup - Check if the GFX engine is locked up
3730  *
3731  * @rdev: radeon_device pointer
3732  * @ring: radeon_ring structure holding ring information
3733  *
3734  * Check if the GFX engine is locked up.
3735  * Returns true if the engine appears to be locked up, false if not.
3736  */
3737 bool si_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
3738 {
3739         u32 reset_mask = si_gpu_check_soft_reset(rdev);
3740
3741         if (!(reset_mask & (RADEON_RESET_GFX |
3742                             RADEON_RESET_COMPUTE |
3743                             RADEON_RESET_CP))) {
3744                 radeon_ring_lockup_update(ring);
3745                 return false;
3746         }
3747         /* force CP activities */
3748         radeon_ring_force_activity(rdev, ring);
3749         return radeon_ring_test_lockup(rdev, ring);
3750 }
3751
3752 /* MC */
3753 static void si_mc_program(struct radeon_device *rdev)
3754 {
3755         struct evergreen_mc_save save;
3756         u32 tmp;
3757         int i, j;
3758
3759         /* Initialize HDP */
3760         for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3761                 WREG32((0x2c14 + j), 0x00000000);
3762                 WREG32((0x2c18 + j), 0x00000000);
3763                 WREG32((0x2c1c + j), 0x00000000);
3764                 WREG32((0x2c20 + j), 0x00000000);
3765                 WREG32((0x2c24 + j), 0x00000000);
3766         }
3767         WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
3768
3769         evergreen_mc_stop(rdev, &save);
3770         if (radeon_mc_wait_for_idle(rdev)) {
3771                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3772         }
3773         if (!ASIC_IS_NODCE(rdev))
3774                 /* Lockout access through VGA aperture*/
3775                 WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
3776         /* Update configuration */
3777         WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
3778                rdev->mc.vram_start >> 12);
3779         WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
3780                rdev->mc.vram_end >> 12);
3781         WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR,
3782                rdev->vram_scratch.gpu_addr >> 12);
3783         tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
3784         tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
3785         WREG32(MC_VM_FB_LOCATION, tmp);
3786         /* XXX double check these! */
3787         WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
3788         WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
3789         WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
3790         WREG32(MC_VM_AGP_BASE, 0);
3791         WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
3792         WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
3793         if (radeon_mc_wait_for_idle(rdev)) {
3794                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3795         }
3796         evergreen_mc_resume(rdev, &save);
3797         if (!ASIC_IS_NODCE(rdev)) {
3798                 /* we need to own VRAM, so turn off the VGA renderer here
3799                  * to stop it overwriting our objects */
3800                 rv515_vga_render_disable(rdev);
3801         }
3802 }
3803
3804 void si_vram_gtt_location(struct radeon_device *rdev,
3805                           struct radeon_mc *mc)
3806 {
3807         if (mc->mc_vram_size > 0xFFC0000000ULL) {
3808                 /* leave room for at least 1024M GTT */
3809                 dev_warn(rdev->dev, "limiting VRAM\n");
3810                 mc->real_vram_size = 0xFFC0000000ULL;
3811                 mc->mc_vram_size = 0xFFC0000000ULL;
3812         }
3813         radeon_vram_location(rdev, &rdev->mc, 0);
3814         rdev->mc.gtt_base_align = 0;
3815         radeon_gtt_location(rdev, mc);
3816 }
3817
3818 static int si_mc_init(struct radeon_device *rdev)
3819 {
3820         u32 tmp;
3821         int chansize, numchan;
3822
3823         /* Get VRAM informations */
3824         rdev->mc.vram_is_ddr = true;
3825         tmp = RREG32(MC_ARB_RAMCFG);
3826         if (tmp & CHANSIZE_OVERRIDE) {
3827                 chansize = 16;
3828         } else if (tmp & CHANSIZE_MASK) {
3829                 chansize = 64;
3830         } else {
3831                 chansize = 32;
3832         }
3833         tmp = RREG32(MC_SHARED_CHMAP);
3834         switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3835         case 0:
3836         default:
3837                 numchan = 1;
3838                 break;
3839         case 1:
3840                 numchan = 2;
3841                 break;
3842         case 2:
3843                 numchan = 4;
3844                 break;
3845         case 3:
3846                 numchan = 8;
3847                 break;
3848         case 4:
3849                 numchan = 3;
3850                 break;
3851         case 5:
3852                 numchan = 6;
3853                 break;
3854         case 6:
3855                 numchan = 10;
3856                 break;
3857         case 7:
3858                 numchan = 12;
3859                 break;
3860         case 8:
3861                 numchan = 16;
3862                 break;
3863         }
3864         rdev->mc.vram_width = numchan * chansize;
3865         /* Could aper size report 0 ? */
3866         rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3867         rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
3868         /* size in MB on si */
3869         rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3870         rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3871         rdev->mc.visible_vram_size = rdev->mc.aper_size;
3872         si_vram_gtt_location(rdev, &rdev->mc);
3873         radeon_update_bandwidth_info(rdev);
3874
3875         return 0;
3876 }
3877
3878 /*
3879  * GART
3880  */
3881 void si_pcie_gart_tlb_flush(struct radeon_device *rdev)
3882 {
3883         /* flush hdp cache */
3884         WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
3885
3886         /* bits 0-15 are the VM contexts0-15 */
3887         WREG32(VM_INVALIDATE_REQUEST, 1);
3888 }
3889
3890 static int si_pcie_gart_enable(struct radeon_device *rdev)
3891 {
3892         int r, i;
3893
3894         if (rdev->gart.robj == NULL) {
3895                 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
3896                 return -EINVAL;
3897         }
3898         r = radeon_gart_table_vram_pin(rdev);
3899         if (r)
3900                 return r;
3901         radeon_gart_restore(rdev);
3902         /* Setup TLB control */
3903         WREG32(MC_VM_MX_L1_TLB_CNTL,
3904                (0xA << 7) |
3905                ENABLE_L1_TLB |
3906                SYSTEM_ACCESS_MODE_NOT_IN_SYS |
3907                ENABLE_ADVANCED_DRIVER_MODEL |
3908                SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU);
3909         /* Setup L2 cache */
3910         WREG32(VM_L2_CNTL, ENABLE_L2_CACHE |
3911                ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
3912                ENABLE_L2_PDE0_CACHE_LRU_UPDATE_BY_WRITE |
3913                EFFECTIVE_L2_QUEUE_SIZE(7) |
3914                CONTEXT1_IDENTITY_ACCESS_MODE(1));
3915         WREG32(VM_L2_CNTL2, INVALIDATE_ALL_L1_TLBS | INVALIDATE_L2_CACHE);
3916         WREG32(VM_L2_CNTL3, L2_CACHE_BIGK_ASSOCIATIVITY |
3917                L2_CACHE_BIGK_FRAGMENT_SIZE(0));
3918         /* setup context0 */
3919         WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
3920         WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
3921         WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
3922         WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
3923                         (u32)(rdev->dummy_page.addr >> 12));
3924         WREG32(VM_CONTEXT0_CNTL2, 0);
3925         WREG32(VM_CONTEXT0_CNTL, (ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
3926                                   RANGE_PROTECTION_FAULT_ENABLE_DEFAULT));
3927
3928         WREG32(0x15D4, 0);
3929         WREG32(0x15D8, 0);
3930         WREG32(0x15DC, 0);
3931
3932         /* empty context1-15 */
3933         /* set vm size, must be a multiple of 4 */
3934         WREG32(VM_CONTEXT1_PAGE_TABLE_START_ADDR, 0);
3935         WREG32(VM_CONTEXT1_PAGE_TABLE_END_ADDR, rdev->vm_manager.max_pfn);
3936         /* Assign the pt base to something valid for now; the pts used for
3937          * the VMs are determined by the application and setup and assigned
3938          * on the fly in the vm part of radeon_gart.c
3939          */
3940         for (i = 1; i < 16; i++) {
3941                 if (i < 8)
3942                         WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR + (i << 2),
3943                                rdev->gart.table_addr >> 12);
3944                 else
3945                         WREG32(VM_CONTEXT8_PAGE_TABLE_BASE_ADDR + ((i - 8) << 2),
3946                                rdev->gart.table_addr >> 12);
3947         }
3948
3949         /* enable context1-15 */
3950         WREG32(VM_CONTEXT1_PROTECTION_FAULT_DEFAULT_ADDR,
3951                (u32)(rdev->dummy_page.addr >> 12));
3952         WREG32(VM_CONTEXT1_CNTL2, 4);
3953         WREG32(VM_CONTEXT1_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(1) |
3954                                 RANGE_PROTECTION_FAULT_ENABLE_INTERRUPT |
3955                                 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT |
3956                                 DUMMY_PAGE_PROTECTION_FAULT_ENABLE_INTERRUPT |
3957                                 DUMMY_PAGE_PROTECTION_FAULT_ENABLE_DEFAULT |
3958                                 PDE0_PROTECTION_FAULT_ENABLE_INTERRUPT |
3959                                 PDE0_PROTECTION_FAULT_ENABLE_DEFAULT |
3960                                 VALID_PROTECTION_FAULT_ENABLE_INTERRUPT |
3961                                 VALID_PROTECTION_FAULT_ENABLE_DEFAULT |
3962                                 READ_PROTECTION_FAULT_ENABLE_INTERRUPT |
3963                                 READ_PROTECTION_FAULT_ENABLE_DEFAULT |
3964                                 WRITE_PROTECTION_FAULT_ENABLE_INTERRUPT |
3965                                 WRITE_PROTECTION_FAULT_ENABLE_DEFAULT);
3966
3967         si_pcie_gart_tlb_flush(rdev);
3968         DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
3969                  (unsigned)(rdev->mc.gtt_size >> 20),
3970                  (unsigned long long)rdev->gart.table_addr);
3971         rdev->gart.ready = true;
3972         return 0;
3973 }
3974
3975 static void si_pcie_gart_disable(struct radeon_device *rdev)
3976 {
3977         /* Disable all tables */
3978         WREG32(VM_CONTEXT0_CNTL, 0);
3979         WREG32(VM_CONTEXT1_CNTL, 0);
3980         /* Setup TLB control */
3981         WREG32(MC_VM_MX_L1_TLB_CNTL, SYSTEM_ACCESS_MODE_NOT_IN_SYS |
3982                SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU);
3983         /* Setup L2 cache */
3984         WREG32(VM_L2_CNTL, ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
3985                ENABLE_L2_PDE0_CACHE_LRU_UPDATE_BY_WRITE |
3986                EFFECTIVE_L2_QUEUE_SIZE(7) |
3987                CONTEXT1_IDENTITY_ACCESS_MODE(1));
3988         WREG32(VM_L2_CNTL2, 0);
3989         WREG32(VM_L2_CNTL3, L2_CACHE_BIGK_ASSOCIATIVITY |
3990                L2_CACHE_BIGK_FRAGMENT_SIZE(0));
3991         radeon_gart_table_vram_unpin(rdev);
3992 }
3993
3994 static void si_pcie_gart_fini(struct radeon_device *rdev)
3995 {
3996         si_pcie_gart_disable(rdev);
3997         radeon_gart_table_vram_free(rdev);
3998         radeon_gart_fini(rdev);
3999 }
4000
4001 /* vm parser */
4002 static bool si_vm_reg_valid(u32 reg)
4003 {
4004         /* context regs are fine */
4005         if (reg >= 0x28000)
4006                 return true;
4007
4008         /* check config regs */
4009         switch (reg) {
4010         case GRBM_GFX_INDEX:
4011         case CP_STRMOUT_CNTL:
4012         case VGT_VTX_VECT_EJECT_REG:
4013         case VGT_CACHE_INVALIDATION:
4014         case VGT_ESGS_RING_SIZE:
4015         case VGT_GSVS_RING_SIZE:
4016         case VGT_GS_VERTEX_REUSE:
4017         case VGT_PRIMITIVE_TYPE:
4018         case VGT_INDEX_TYPE:
4019         case VGT_NUM_INDICES:
4020         case VGT_NUM_INSTANCES:
4021         case VGT_TF_RING_SIZE:
4022         case VGT_HS_OFFCHIP_PARAM:
4023         case VGT_TF_MEMORY_BASE:
4024         case PA_CL_ENHANCE:
4025         case PA_SU_LINE_STIPPLE_VALUE:
4026         case PA_SC_LINE_STIPPLE_STATE:
4027         case PA_SC_ENHANCE:
4028         case SQC_CACHES:
4029         case SPI_STATIC_THREAD_MGMT_1:
4030         case SPI_STATIC_THREAD_MGMT_2:
4031         case SPI_STATIC_THREAD_MGMT_3:
4032         case SPI_PS_MAX_WAVE_ID:
4033         case SPI_CONFIG_CNTL:
4034         case SPI_CONFIG_CNTL_1:
4035         case TA_CNTL_AUX:
4036                 return true;
4037         default:
4038                 DRM_ERROR("Invalid register 0x%x in CS\n", reg);
4039                 return false;
4040         }
4041 }
4042
4043 static int si_vm_packet3_ce_check(struct radeon_device *rdev,
4044                                   u32 *ib, struct radeon_cs_packet *pkt)
4045 {
4046         switch (pkt->opcode) {
4047         case PACKET3_NOP:
4048         case PACKET3_SET_BASE:
4049         case PACKET3_SET_CE_DE_COUNTERS:
4050         case PACKET3_LOAD_CONST_RAM:
4051         case PACKET3_WRITE_CONST_RAM:
4052         case PACKET3_WRITE_CONST_RAM_OFFSET:
4053         case PACKET3_DUMP_CONST_RAM:
4054         case PACKET3_INCREMENT_CE_COUNTER:
4055         case PACKET3_WAIT_ON_DE_COUNTER:
4056         case PACKET3_CE_WRITE:
4057                 break;
4058         default:
4059                 DRM_ERROR("Invalid CE packet3: 0x%x\n", pkt->opcode);
4060                 return -EINVAL;
4061         }
4062         return 0;
4063 }
4064
4065 static int si_vm_packet3_gfx_check(struct radeon_device *rdev,
4066                                    u32 *ib, struct radeon_cs_packet *pkt)
4067 {
4068         u32 idx = pkt->idx + 1;
4069         u32 idx_value = ib[idx];
4070         u32 start_reg, end_reg, reg, i;
4071         u32 command, info;
4072
4073         switch (pkt->opcode) {
4074         case PACKET3_NOP:
4075         case PACKET3_SET_BASE:
4076         case PACKET3_CLEAR_STATE:
4077         case PACKET3_INDEX_BUFFER_SIZE:
4078         case PACKET3_DISPATCH_DIRECT:
4079         case PACKET3_DISPATCH_INDIRECT:
4080         case PACKET3_ALLOC_GDS:
4081         case PACKET3_WRITE_GDS_RAM:
4082         case PACKET3_ATOMIC_GDS:
4083         case PACKET3_ATOMIC:
4084         case PACKET3_OCCLUSION_QUERY:
4085         case PACKET3_SET_PREDICATION:
4086         case PACKET3_COND_EXEC:
4087         case PACKET3_PRED_EXEC:
4088         case PACKET3_DRAW_INDIRECT:
4089         case PACKET3_DRAW_INDEX_INDIRECT:
4090         case PACKET3_INDEX_BASE:
4091         case PACKET3_DRAW_INDEX_2:
4092         case PACKET3_CONTEXT_CONTROL:
4093         case PACKET3_INDEX_TYPE:
4094         case PACKET3_DRAW_INDIRECT_MULTI:
4095         case PACKET3_DRAW_INDEX_AUTO:
4096         case PACKET3_DRAW_INDEX_IMMD:
4097         case PACKET3_NUM_INSTANCES:
4098         case PACKET3_DRAW_INDEX_MULTI_AUTO:
4099         case PACKET3_STRMOUT_BUFFER_UPDATE:
4100         case PACKET3_DRAW_INDEX_OFFSET_2:
4101         case PACKET3_DRAW_INDEX_MULTI_ELEMENT:
4102         case PACKET3_DRAW_INDEX_INDIRECT_MULTI:
4103         case PACKET3_MPEG_INDEX:
4104         case PACKET3_WAIT_REG_MEM:
4105         case PACKET3_MEM_WRITE:
4106         case PACKET3_PFP_SYNC_ME:
4107         case PACKET3_SURFACE_SYNC:
4108         case PACKET3_EVENT_WRITE:
4109         case PACKET3_EVENT_WRITE_EOP:
4110         case PACKET3_EVENT_WRITE_EOS:
4111         case PACKET3_SET_CONTEXT_REG:
4112         case PACKET3_SET_CONTEXT_REG_INDIRECT:
4113         case PACKET3_SET_SH_REG:
4114         case PACKET3_SET_SH_REG_OFFSET:
4115         case PACKET3_INCREMENT_DE_COUNTER:
4116         case PACKET3_WAIT_ON_CE_COUNTER:
4117         case PACKET3_WAIT_ON_AVAIL_BUFFER:
4118         case PACKET3_ME_WRITE:
4119                 break;
4120         case PACKET3_COPY_DATA:
4121                 if ((idx_value & 0xf00) == 0) {
4122                         reg = ib[idx + 3] * 4;
4123                         if (!si_vm_reg_valid(reg))
4124                                 return -EINVAL;
4125                 }
4126                 break;
4127         case PACKET3_WRITE_DATA:
4128                 if ((idx_value & 0xf00) == 0) {
4129                         start_reg = ib[idx + 1] * 4;
4130                         if (idx_value & 0x10000) {
4131                                 if (!si_vm_reg_valid(start_reg))
4132                                         return -EINVAL;
4133                         } else {
4134                                 for (i = 0; i < (pkt->count - 2); i++) {
4135                                         reg = start_reg + (4 * i);
4136                                         if (!si_vm_reg_valid(reg))
4137                                                 return -EINVAL;
4138                                 }
4139                         }
4140                 }
4141                 break;
4142         case PACKET3_COND_WRITE:
4143                 if (idx_value & 0x100) {
4144                         reg = ib[idx + 5] * 4;
4145                         if (!si_vm_reg_valid(reg))
4146                                 return -EINVAL;
4147                 }
4148                 break;
4149         case PACKET3_COPY_DW:
4150                 if (idx_value & 0x2) {
4151                         reg = ib[idx + 3] * 4;
4152                         if (!si_vm_reg_valid(reg))
4153                                 return -EINVAL;
4154                 }
4155                 break;
4156         case PACKET3_SET_CONFIG_REG:
4157                 start_reg = (idx_value << 2) + PACKET3_SET_CONFIG_REG_START;
4158                 end_reg = 4 * pkt->count + start_reg - 4;
4159                 if ((start_reg < PACKET3_SET_CONFIG_REG_START) ||
4160                     (start_reg >= PACKET3_SET_CONFIG_REG_END) ||
4161                     (end_reg >= PACKET3_SET_CONFIG_REG_END)) {
4162                         DRM_ERROR("bad PACKET3_SET_CONFIG_REG\n");
4163                         return -EINVAL;
4164                 }
4165                 for (i = 0; i < pkt->count; i++) {
4166                         reg = start_reg + (4 * i);
4167                         if (!si_vm_reg_valid(reg))
4168                                 return -EINVAL;
4169                 }
4170                 break;
4171         case PACKET3_CP_DMA:
4172                 command = ib[idx + 4];
4173                 info = ib[idx + 1];
4174                 if (command & PACKET3_CP_DMA_CMD_SAS) {
4175                         /* src address space is register */
4176                         if (((info & 0x60000000) >> 29) == 0) {
4177                                 start_reg = idx_value << 2;
4178                                 if (command & PACKET3_CP_DMA_CMD_SAIC) {
4179                                         reg = start_reg;
4180                                         if (!si_vm_reg_valid(reg)) {
4181                                                 DRM_ERROR("CP DMA Bad SRC register\n");
4182                                                 return -EINVAL;
4183                                         }
4184                                 } else {
4185                                         for (i = 0; i < (command & 0x1fffff); i++) {
4186                                                 reg = start_reg + (4 * i);
4187                                                 if (!si_vm_reg_valid(reg)) {
4188                                                         DRM_ERROR("CP DMA Bad SRC register\n");
4189                                                         return -EINVAL;
4190                                                 }
4191                                         }
4192                                 }
4193                         }
4194                 }
4195                 if (command & PACKET3_CP_DMA_CMD_DAS) {
4196                         /* dst address space is register */
4197                         if (((info & 0x00300000) >> 20) == 0) {
4198                                 start_reg = ib[idx + 2];
4199                                 if (command & PACKET3_CP_DMA_CMD_DAIC) {
4200                                         reg = start_reg;
4201                                         if (!si_vm_reg_valid(reg)) {
4202                                                 DRM_ERROR("CP DMA Bad DST register\n");
4203                                                 return -EINVAL;
4204                                         }
4205                                 } else {
4206                                         for (i = 0; i < (command & 0x1fffff); i++) {
4207                                                 reg = start_reg + (4 * i);
4208                                                 if (!si_vm_reg_valid(reg)) {
4209                                                         DRM_ERROR("CP DMA Bad DST register\n");
4210                                                         return -EINVAL;
4211                                                 }
4212                                         }
4213                                 }
4214                         }
4215                 }
4216                 break;
4217         default:
4218                 DRM_ERROR("Invalid GFX packet3: 0x%x\n", pkt->opcode);
4219                 return -EINVAL;
4220         }
4221         return 0;
4222 }
4223
4224 static int si_vm_packet3_compute_check(struct radeon_device *rdev,
4225                                        u32 *ib, struct radeon_cs_packet *pkt)
4226 {
4227         u32 idx = pkt->idx + 1;
4228         u32 idx_value = ib[idx];
4229         u32 start_reg, reg, i;
4230
4231         switch (pkt->opcode) {
4232         case PACKET3_NOP:
4233         case PACKET3_SET_BASE:
4234         case PACKET3_CLEAR_STATE:
4235         case PACKET3_DISPATCH_DIRECT:
4236         case PACKET3_DISPATCH_INDIRECT:
4237         case PACKET3_ALLOC_GDS:
4238         case PACKET3_WRITE_GDS_RAM:
4239         case PACKET3_ATOMIC_GDS:
4240         case PACKET3_ATOMIC:
4241         case PACKET3_OCCLUSION_QUERY:
4242         case PACKET3_SET_PREDICATION:
4243         case PACKET3_COND_EXEC:
4244         case PACKET3_PRED_EXEC:
4245         case PACKET3_CONTEXT_CONTROL:
4246         case PACKET3_STRMOUT_BUFFER_UPDATE:
4247         case PACKET3_WAIT_REG_MEM:
4248         case PACKET3_MEM_WRITE:
4249         case PACKET3_PFP_SYNC_ME:
4250         case PACKET3_SURFACE_SYNC:
4251         case PACKET3_EVENT_WRITE:
4252         case PACKET3_EVENT_WRITE_EOP:
4253         case PACKET3_EVENT_WRITE_EOS:
4254         case PACKET3_SET_CONTEXT_REG:
4255         case PACKET3_SET_CONTEXT_REG_INDIRECT:
4256         case PACKET3_SET_SH_REG:
4257         case PACKET3_SET_SH_REG_OFFSET:
4258         case PACKET3_INCREMENT_DE_COUNTER:
4259         case PACKET3_WAIT_ON_CE_COUNTER:
4260         case PACKET3_WAIT_ON_AVAIL_BUFFER:
4261         case PACKET3_ME_WRITE:
4262                 break;
4263         case PACKET3_COPY_DATA:
4264                 if ((idx_value & 0xf00) == 0) {
4265                         reg = ib[idx + 3] * 4;
4266                         if (!si_vm_reg_valid(reg))
4267                                 return -EINVAL;
4268                 }
4269                 break;
4270         case PACKET3_WRITE_DATA:
4271                 if ((idx_value & 0xf00) == 0) {
4272                         start_reg = ib[idx + 1] * 4;
4273                         if (idx_value & 0x10000) {
4274                                 if (!si_vm_reg_valid(start_reg))
4275                                         return -EINVAL;
4276                         } else {
4277                                 for (i = 0; i < (pkt->count - 2); i++) {
4278                                         reg = start_reg + (4 * i);
4279                                         if (!si_vm_reg_valid(reg))
4280                                                 return -EINVAL;
4281                                 }
4282                         }
4283                 }
4284                 break;
4285         case PACKET3_COND_WRITE:
4286                 if (idx_value & 0x100) {
4287                         reg = ib[idx + 5] * 4;
4288                         if (!si_vm_reg_valid(reg))
4289                                 return -EINVAL;
4290                 }
4291                 break;
4292         case PACKET3_COPY_DW:
4293                 if (idx_value & 0x2) {
4294                         reg = ib[idx + 3] * 4;
4295                         if (!si_vm_reg_valid(reg))
4296                                 return -EINVAL;
4297                 }
4298                 break;
4299         default:
4300                 DRM_ERROR("Invalid Compute packet3: 0x%x\n", pkt->opcode);
4301                 return -EINVAL;
4302         }
4303         return 0;
4304 }
4305
4306 int si_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib)
4307 {
4308         int ret = 0;
4309         u32 idx = 0;
4310         struct radeon_cs_packet pkt;
4311
4312         do {
4313                 pkt.idx = idx;
4314                 pkt.type = RADEON_CP_PACKET_GET_TYPE(ib->ptr[idx]);
4315                 pkt.count = RADEON_CP_PACKET_GET_COUNT(ib->ptr[idx]);
4316                 pkt.one_reg_wr = 0;
4317                 switch (pkt.type) {
4318                 case RADEON_PACKET_TYPE0:
4319                         dev_err(rdev->dev, "Packet0 not allowed!\n");
4320                         ret = -EINVAL;
4321                         break;
4322                 case RADEON_PACKET_TYPE2:
4323                         idx += 1;
4324                         break;
4325                 case RADEON_PACKET_TYPE3:
4326                         pkt.opcode = RADEON_CP_PACKET3_GET_OPCODE(ib->ptr[idx]);
4327                         if (ib->is_const_ib)
4328                                 ret = si_vm_packet3_ce_check(rdev, ib->ptr, &pkt);
4329                         else {
4330                                 switch (ib->ring) {
4331                                 case RADEON_RING_TYPE_GFX_INDEX:
4332                                         ret = si_vm_packet3_gfx_check(rdev, ib->ptr, &pkt);
4333                                         break;
4334                                 case CAYMAN_RING_TYPE_CP1_INDEX:
4335                                 case CAYMAN_RING_TYPE_CP2_INDEX:
4336                                         ret = si_vm_packet3_compute_check(rdev, ib->ptr, &pkt);
4337                                         break;
4338                                 default:
4339                                         dev_err(rdev->dev, "Non-PM4 ring %d !\n", ib->ring);
4340                                         ret = -EINVAL;
4341                                         break;
4342                                 }
4343                         }
4344                         idx += pkt.count + 2;
4345                         break;
4346                 default:
4347                         dev_err(rdev->dev, "Unknown packet type %d !\n", pkt.type);
4348                         ret = -EINVAL;
4349                         break;
4350                 }
4351                 if (ret)
4352                         break;
4353         } while (idx < ib->length_dw);
4354
4355         return ret;
4356 }
4357
4358 /*
4359  * vm
4360  */
4361 int si_vm_init(struct radeon_device *rdev)
4362 {
4363         /* number of VMs */
4364         rdev->vm_manager.nvm = 16;
4365         /* base offset of vram pages */
4366         rdev->vm_manager.vram_base_offset = 0;
4367
4368         return 0;
4369 }
4370
4371 void si_vm_fini(struct radeon_device *rdev)
4372 {
4373 }
4374
4375 /**
4376  * si_vm_decode_fault - print human readable fault info
4377  *
4378  * @rdev: radeon_device pointer
4379  * @status: VM_CONTEXT1_PROTECTION_FAULT_STATUS register value
4380  * @addr: VM_CONTEXT1_PROTECTION_FAULT_ADDR register value
4381  *
4382  * Print human readable fault information (SI).
4383  */
4384 static void si_vm_decode_fault(struct radeon_device *rdev,
4385                                u32 status, u32 addr)
4386 {
4387         u32 mc_id = (status & MEMORY_CLIENT_ID_MASK) >> MEMORY_CLIENT_ID_SHIFT;
4388         u32 vmid = (status & FAULT_VMID_MASK) >> FAULT_VMID_SHIFT;
4389         u32 protections = (status & PROTECTIONS_MASK) >> PROTECTIONS_SHIFT;
4390         char *block;
4391
4392         if (rdev->family == CHIP_TAHITI) {
4393                 switch (mc_id) {
4394                 case 160:
4395                 case 144:
4396                 case 96:
4397                 case 80:
4398                 case 224:
4399                 case 208:
4400                 case 32:
4401                 case 16:
4402                         block = "CB";
4403                         break;
4404                 case 161:
4405                 case 145:
4406                 case 97:
4407                 case 81:
4408                 case 225:
4409                 case 209:
4410                 case 33:
4411                 case 17:
4412                         block = "CB_FMASK";
4413                         break;
4414                 case 162:
4415                 case 146:
4416                 case 98:
4417                 case 82:
4418                 case 226:
4419                 case 210:
4420                 case 34:
4421                 case 18:
4422                         block = "CB_CMASK";
4423                         break;
4424                 case 163:
4425                 case 147:
4426                 case 99:
4427                 case 83:
4428                 case 227:
4429                 case 211:
4430                 case 35:
4431                 case 19:
4432                         block = "CB_IMMED";
4433                         break;
4434                 case 164:
4435                 case 148:
4436                 case 100:
4437                 case 84:
4438                 case 228:
4439                 case 212:
4440                 case 36:
4441                 case 20:
4442                         block = "DB";
4443                         break;
4444                 case 165:
4445                 case 149:
4446                 case 101:
4447                 case 85:
4448                 case 229:
4449                 case 213:
4450                 case 37:
4451                 case 21:
4452                         block = "DB_HTILE";
4453                         break;
4454                 case 167:
4455                 case 151:
4456                 case 103:
4457                 case 87:
4458                 case 231:
4459                 case 215:
4460                 case 39:
4461                 case 23:
4462                         block = "DB_STEN";
4463                         break;
4464                 case 72:
4465                 case 68:
4466                 case 64:
4467                 case 8:
4468                 case 4:
4469                 case 0:
4470                 case 136:
4471                 case 132:
4472                 case 128:
4473                 case 200:
4474                 case 196:
4475                 case 192:
4476                         block = "TC";
4477                         break;
4478                 case 112:
4479                 case 48:
4480                         block = "CP";
4481                         break;
4482                 case 49:
4483                 case 177:
4484                 case 50:
4485                 case 178:
4486                         block = "SH";
4487                         break;
4488                 case 53:
4489                 case 190:
4490                         block = "VGT";
4491                         break;
4492                 case 117:
4493                         block = "IH";
4494                         break;
4495                 case 51:
4496                 case 115:
4497                         block = "RLC";
4498                         break;
4499                 case 119:
4500                 case 183:
4501                         block = "DMA0";
4502                         break;
4503                 case 61:
4504                         block = "DMA1";
4505                         break;
4506                 case 248:
4507                 case 120:
4508                         block = "HDP";
4509                         break;
4510                 default:
4511                         block = "unknown";
4512                         break;
4513                 }
4514         } else {
4515                 switch (mc_id) {
4516                 case 32:
4517                 case 16:
4518                 case 96:
4519                 case 80:
4520                 case 160:
4521                 case 144:
4522                 case 224:
4523                 case 208:
4524                         block = "CB";
4525                         break;
4526                 case 33:
4527                 case 17:
4528                 case 97:
4529                 case 81:
4530                 case 161:
4531                 case 145:
4532                 case 225:
4533                 case 209:
4534                         block = "CB_FMASK";
4535                         break;
4536                 case 34:
4537                 case 18:
4538                 case 98:
4539                 case 82:
4540                 case 162:
4541                 case 146:
4542                 case 226:
4543                 case 210:
4544                         block = "CB_CMASK";
4545                         break;
4546                 case 35:
4547                 case 19:
4548                 case 99:
4549                 case 83:
4550                 case 163:
4551                 case 147:
4552                 case 227:
4553                 case 211:
4554                         block = "CB_IMMED";
4555                         break;
4556                 case 36:
4557                 case 20:
4558                 case 100:
4559                 case 84:
4560                 case 164:
4561                 case 148:
4562                 case 228:
4563                 case 212:
4564                         block = "DB";
4565                         break;
4566                 case 37:
4567                 case 21:
4568                 case 101:
4569                 case 85:
4570                 case 165:
4571                 case 149:
4572                 case 229:
4573                 case 213:
4574                         block = "DB_HTILE";
4575                         break;
4576                 case 39:
4577                 case 23:
4578                 case 103:
4579                 case 87:
4580                 case 167:
4581                 case 151:
4582                 case 231:
4583                 case 215:
4584                         block = "DB_STEN";
4585                         break;
4586                 case 72:
4587                 case 68:
4588                 case 8:
4589                 case 4:
4590                 case 136:
4591                 case 132:
4592                 case 200:
4593                 case 196:
4594                         block = "TC";
4595                         break;
4596                 case 112:
4597                 case 48:
4598                         block = "CP";
4599                         break;
4600                 case 49:
4601                 case 177:
4602                 case 50:
4603                 case 178:
4604                         block = "SH";
4605                         break;
4606                 case 53:
4607                         block = "VGT";
4608                         break;
4609                 case 117:
4610                         block = "IH";
4611                         break;
4612                 case 51:
4613                 case 115:
4614                         block = "RLC";
4615                         break;
4616                 case 119:
4617                 case 183:
4618                         block = "DMA0";
4619                         break;
4620                 case 61:
4621                         block = "DMA1";
4622                         break;
4623                 case 248:
4624                 case 120:
4625                         block = "HDP";
4626                         break;
4627                 default:
4628                         block = "unknown";
4629                         break;
4630                 }
4631         }
4632
4633         printk("VM fault (0x%02x, vmid %d) at page %u, %s from %s (%d)\n",
4634                protections, vmid, addr,
4635                (status & MEMORY_CLIENT_RW_MASK) ? "write" : "read",
4636                block, mc_id);
4637 }
4638
4639 /**
4640  * si_vm_set_page - update the page tables using the CP
4641  *
4642  * @rdev: radeon_device pointer
4643  * @ib: indirect buffer to fill with commands
4644  * @pe: addr of the page entry
4645  * @addr: dst addr to write into pe
4646  * @count: number of page entries to update
4647  * @incr: increase next addr by incr bytes
4648  * @flags: access flags
4649  *
4650  * Update the page tables using the CP (SI).
4651  */
4652 void si_vm_set_page(struct radeon_device *rdev,
4653                     struct radeon_ib *ib,
4654                     uint64_t pe,
4655                     uint64_t addr, unsigned count,
4656                     uint32_t incr, uint32_t flags)
4657 {
4658         uint32_t r600_flags = cayman_vm_page_flags(rdev, flags);
4659         uint64_t value;
4660         unsigned ndw;
4661
4662         if (rdev->asic->vm.pt_ring_index == RADEON_RING_TYPE_GFX_INDEX) {
4663                 while (count) {
4664                         ndw = 2 + count * 2;
4665                         if (ndw > 0x3FFE)
4666                                 ndw = 0x3FFE;
4667
4668                         ib->ptr[ib->length_dw++] = PACKET3(PACKET3_WRITE_DATA, ndw);
4669                         ib->ptr[ib->length_dw++] = (WRITE_DATA_ENGINE_SEL(0) |
4670                                         WRITE_DATA_DST_SEL(1));
4671                         ib->ptr[ib->length_dw++] = pe;
4672                         ib->ptr[ib->length_dw++] = upper_32_bits(pe);
4673                         for (; ndw > 2; ndw -= 2, --count, pe += 8) {
4674                                 if (flags & RADEON_VM_PAGE_SYSTEM) {
4675                                         value = radeon_vm_map_gart(rdev, addr);
4676                                         value &= 0xFFFFFFFFFFFFF000ULL;
4677                                 } else if (flags & RADEON_VM_PAGE_VALID) {
4678                                         value = addr;
4679                                 } else {
4680                                         value = 0;
4681                                 }
4682                                 addr += incr;
4683                                 value |= r600_flags;
4684                                 ib->ptr[ib->length_dw++] = value;
4685                                 ib->ptr[ib->length_dw++] = upper_32_bits(value);
4686                         }
4687                 }
4688         } else {
4689                 /* DMA */
4690                 si_dma_vm_set_page(rdev, ib, pe, addr, count, incr, flags);
4691         }
4692 }
4693
4694 void si_vm_flush(struct radeon_device *rdev, int ridx, struct radeon_vm *vm)
4695 {
4696         struct radeon_ring *ring = &rdev->ring[ridx];
4697
4698         if (vm == NULL)
4699                 return;
4700
4701         /* write new base address */
4702         radeon_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
4703         radeon_ring_write(ring, (WRITE_DATA_ENGINE_SEL(0) |
4704                                  WRITE_DATA_DST_SEL(0)));
4705
4706         if (vm->id < 8) {
4707                 radeon_ring_write(ring,
4708                                   (VM_CONTEXT0_PAGE_TABLE_BASE_ADDR + (vm->id << 2)) >> 2);
4709         } else {
4710                 radeon_ring_write(ring,
4711                                   (VM_CONTEXT8_PAGE_TABLE_BASE_ADDR + ((vm->id - 8) << 2)) >> 2);
4712         }
4713         radeon_ring_write(ring, 0);
4714         radeon_ring_write(ring, vm->pd_gpu_addr >> 12);
4715
4716         /* flush hdp cache */
4717         radeon_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
4718         radeon_ring_write(ring, (WRITE_DATA_ENGINE_SEL(0) |
4719                                  WRITE_DATA_DST_SEL(0)));
4720         radeon_ring_write(ring, HDP_MEM_COHERENCY_FLUSH_CNTL >> 2);
4721         radeon_ring_write(ring, 0);
4722         radeon_ring_write(ring, 0x1);
4723
4724         /* bits 0-15 are the VM contexts0-15 */
4725         radeon_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
4726         radeon_ring_write(ring, (WRITE_DATA_ENGINE_SEL(0) |
4727                                  WRITE_DATA_DST_SEL(0)));
4728         radeon_ring_write(ring, VM_INVALIDATE_REQUEST >> 2);
4729         radeon_ring_write(ring, 0);
4730         radeon_ring_write(ring, 1 << vm->id);
4731
4732         /* sync PFP to ME, otherwise we might get invalid PFP reads */
4733         radeon_ring_write(ring, PACKET3(PACKET3_PFP_SYNC_ME, 0));
4734         radeon_ring_write(ring, 0x0);
4735 }
4736
4737 /*
4738  *  Power and clock gating
4739  */
4740 static void si_wait_for_rlc_serdes(struct radeon_device *rdev)
4741 {
4742         int i;
4743
4744         for (i = 0; i < rdev->usec_timeout; i++) {
4745                 if (RREG32(RLC_SERDES_MASTER_BUSY_0) == 0)
4746                         break;
4747                 udelay(1);
4748         }
4749
4750         for (i = 0; i < rdev->usec_timeout; i++) {
4751                 if (RREG32(RLC_SERDES_MASTER_BUSY_1) == 0)
4752                         break;
4753                 udelay(1);
4754         }
4755 }
4756
4757 static void si_enable_gui_idle_interrupt(struct radeon_device *rdev,
4758                                          bool enable)
4759 {
4760         u32 tmp = RREG32(CP_INT_CNTL_RING0);
4761         u32 mask;
4762         int i;
4763
4764         if (enable)
4765                 tmp |= (CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4766         else
4767                 tmp &= ~(CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4768         WREG32(CP_INT_CNTL_RING0, tmp);
4769
4770         if (!enable) {
4771                 /* read a gfx register */
4772                 tmp = RREG32(DB_DEPTH_INFO);
4773
4774                 mask = RLC_BUSY_STATUS | GFX_POWER_STATUS | GFX_CLOCK_STATUS | GFX_LS_STATUS;
4775                 for (i = 0; i < rdev->usec_timeout; i++) {
4776                         if ((RREG32(RLC_STAT) & mask) == (GFX_CLOCK_STATUS | GFX_POWER_STATUS))
4777                                 break;
4778                         udelay(1);
4779                 }
4780         }
4781 }
4782
4783 static void si_set_uvd_dcm(struct radeon_device *rdev,
4784                            bool sw_mode)
4785 {
4786         u32 tmp, tmp2;
4787
4788         tmp = RREG32(UVD_CGC_CTRL);
4789         tmp &= ~(CLK_OD_MASK | CG_DT_MASK);
4790         tmp |= DCM | CG_DT(1) | CLK_OD(4);
4791
4792         if (sw_mode) {
4793                 tmp &= ~0x7ffff800;
4794                 tmp2 = DYN_OR_EN | DYN_RR_EN | G_DIV_ID(7);
4795         } else {
4796                 tmp |= 0x7ffff800;
4797                 tmp2 = 0;
4798         }
4799
4800         WREG32(UVD_CGC_CTRL, tmp);
4801         WREG32_UVD_CTX(UVD_CGC_CTRL2, tmp2);
4802 }
4803
4804 void si_init_uvd_internal_cg(struct radeon_device *rdev)
4805 {
4806         bool hw_mode = true;
4807
4808         if (hw_mode) {
4809                 si_set_uvd_dcm(rdev, false);
4810         } else {
4811                 u32 tmp = RREG32(UVD_CGC_CTRL);
4812                 tmp &= ~DCM;
4813                 WREG32(UVD_CGC_CTRL, tmp);
4814         }
4815 }
4816
4817 static u32 si_halt_rlc(struct radeon_device *rdev)
4818 {
4819         u32 data, orig;
4820
4821         orig = data = RREG32(RLC_CNTL);
4822
4823         if (data & RLC_ENABLE) {
4824                 data &= ~RLC_ENABLE;
4825                 WREG32(RLC_CNTL, data);
4826
4827                 si_wait_for_rlc_serdes(rdev);
4828         }
4829
4830         return orig;
4831 }
4832
4833 static void si_update_rlc(struct radeon_device *rdev, u32 rlc)
4834 {
4835         u32 tmp;
4836
4837         tmp = RREG32(RLC_CNTL);
4838         if (tmp != rlc)
4839                 WREG32(RLC_CNTL, rlc);
4840 }
4841
4842 static void si_enable_dma_pg(struct radeon_device *rdev, bool enable)
4843 {
4844         u32 data, orig;
4845
4846         orig = data = RREG32(DMA_PG);
4847         if (enable)
4848                 data |= PG_CNTL_ENABLE;
4849         else
4850                 data &= ~PG_CNTL_ENABLE;
4851         if (orig != data)
4852                 WREG32(DMA_PG, data);
4853 }
4854
4855 static void si_init_dma_pg(struct radeon_device *rdev)
4856 {
4857         u32 tmp;
4858
4859         WREG32(DMA_PGFSM_WRITE,  0x00002000);
4860         WREG32(DMA_PGFSM_CONFIG, 0x100010ff);
4861
4862         for (tmp = 0; tmp < 5; tmp++)
4863                 WREG32(DMA_PGFSM_WRITE, 0);
4864 }
4865
4866 static void si_enable_gfx_cgpg(struct radeon_device *rdev,
4867                                bool enable)
4868 {
4869         u32 tmp;
4870
4871         if (enable) {
4872                 tmp = RLC_PUD(0x10) | RLC_PDD(0x10) | RLC_TTPD(0x10) | RLC_MSD(0x10);
4873                 WREG32(RLC_TTOP_D, tmp);
4874
4875                 tmp = RREG32(RLC_PG_CNTL);
4876                 tmp |= GFX_PG_ENABLE;
4877                 WREG32(RLC_PG_CNTL, tmp);
4878
4879                 tmp = RREG32(RLC_AUTO_PG_CTRL);
4880                 tmp |= AUTO_PG_EN;
4881                 WREG32(RLC_AUTO_PG_CTRL, tmp);
4882         } else {
4883                 tmp = RREG32(RLC_AUTO_PG_CTRL);
4884                 tmp &= ~AUTO_PG_EN;
4885                 WREG32(RLC_AUTO_PG_CTRL, tmp);
4886
4887                 tmp = RREG32(DB_RENDER_CONTROL);
4888         }
4889 }
4890
4891 static void si_init_gfx_cgpg(struct radeon_device *rdev)
4892 {
4893         u32 tmp;
4894
4895         WREG32(RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
4896
4897         tmp = RREG32(RLC_PG_CNTL);
4898         tmp |= GFX_PG_SRC;
4899         WREG32(RLC_PG_CNTL, tmp);
4900
4901         WREG32(RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
4902
4903         tmp = RREG32(RLC_AUTO_PG_CTRL);
4904
4905         tmp &= ~GRBM_REG_SGIT_MASK;
4906         tmp |= GRBM_REG_SGIT(0x700);
4907         tmp &= ~PG_AFTER_GRBM_REG_ST_MASK;
4908         WREG32(RLC_AUTO_PG_CTRL, tmp);
4909 }
4910
4911 static u32 si_get_cu_active_bitmap(struct radeon_device *rdev, u32 se, u32 sh)
4912 {
4913         u32 mask = 0, tmp, tmp1;
4914         int i;
4915
4916         si_select_se_sh(rdev, se, sh);
4917         tmp = RREG32(CC_GC_SHADER_ARRAY_CONFIG);
4918         tmp1 = RREG32(GC_USER_SHADER_ARRAY_CONFIG);
4919         si_select_se_sh(rdev, 0xffffffff, 0xffffffff);
4920
4921         tmp &= 0xffff0000;
4922
4923         tmp |= tmp1;
4924         tmp >>= 16;
4925
4926         for (i = 0; i < rdev->config.si.max_cu_per_sh; i ++) {
4927                 mask <<= 1;
4928                 mask |= 1;
4929         }
4930
4931         return (~tmp) & mask;
4932 }
4933
4934 static void si_init_ao_cu_mask(struct radeon_device *rdev)
4935 {
4936         u32 i, j, k, active_cu_number = 0;
4937         u32 mask, counter, cu_bitmap;
4938         u32 tmp = 0;
4939
4940         for (i = 0; i < rdev->config.si.max_shader_engines; i++) {
4941                 for (j = 0; j < rdev->config.si.max_sh_per_se; j++) {
4942                         mask = 1;
4943                         cu_bitmap = 0;
4944                         counter  = 0;
4945                         for (k = 0; k < rdev->config.si.max_cu_per_sh; k++) {
4946                                 if (si_get_cu_active_bitmap(rdev, i, j) & mask) {
4947                                         if (counter < 2)
4948                                                 cu_bitmap |= mask;
4949                                         counter++;
4950                                 }
4951                                 mask <<= 1;
4952                         }
4953
4954                         active_cu_number += counter;
4955                         tmp |= (cu_bitmap << (i * 16 + j * 8));
4956                 }
4957         }
4958
4959         WREG32(RLC_PG_AO_CU_MASK, tmp);
4960
4961         tmp = RREG32(RLC_MAX_PG_CU);
4962         tmp &= ~MAX_PU_CU_MASK;
4963         tmp |= MAX_PU_CU(active_cu_number);
4964         WREG32(RLC_MAX_PG_CU, tmp);
4965 }
4966
4967 static void si_enable_cgcg(struct radeon_device *rdev,
4968                            bool enable)
4969 {
4970         u32 data, orig, tmp;
4971
4972         orig = data = RREG32(RLC_CGCG_CGLS_CTRL);
4973
4974         si_enable_gui_idle_interrupt(rdev, enable);
4975
4976         if (enable) {
4977                 WREG32(RLC_GCPM_GENERAL_3, 0x00000080);
4978
4979                 tmp = si_halt_rlc(rdev);
4980
4981                 WREG32(RLC_SERDES_WR_MASTER_MASK_0, 0xffffffff);
4982                 WREG32(RLC_SERDES_WR_MASTER_MASK_1, 0xffffffff);
4983                 WREG32(RLC_SERDES_WR_CTRL, 0x00b000ff);
4984
4985                 si_wait_for_rlc_serdes(rdev);
4986
4987                 si_update_rlc(rdev, tmp);
4988
4989                 WREG32(RLC_SERDES_WR_CTRL, 0x007000ff);
4990
4991                 data |= CGCG_EN | CGLS_EN;
4992         } else {
4993                 RREG32(CB_CGTT_SCLK_CTRL);
4994                 RREG32(CB_CGTT_SCLK_CTRL);
4995                 RREG32(CB_CGTT_SCLK_CTRL);
4996                 RREG32(CB_CGTT_SCLK_CTRL);
4997
4998                 data &= ~(CGCG_EN | CGLS_EN);
4999         }
5000
5001         if (orig != data)
5002                 WREG32(RLC_CGCG_CGLS_CTRL, data);
5003 }
5004
5005 static void si_enable_mgcg(struct radeon_device *rdev,
5006                            bool enable)
5007 {
5008         u32 data, orig, tmp = 0;
5009
5010         if (enable) {
5011                 orig = data = RREG32(CGTS_SM_CTRL_REG);
5012                 data = 0x96940200;
5013                 if (orig != data)
5014                         WREG32(CGTS_SM_CTRL_REG, data);
5015
5016                 orig = data = RREG32(CP_MEM_SLP_CNTL);
5017                 data |= CP_MEM_LS_EN;
5018                 if (orig != data)
5019                         WREG32(CP_MEM_SLP_CNTL, data);
5020
5021                 orig = data = RREG32(RLC_CGTT_MGCG_OVERRIDE);
5022                 data &= 0xffffffc0;
5023                 if (orig != data)
5024                         WREG32(RLC_CGTT_MGCG_OVERRIDE, data);
5025
5026                 tmp = si_halt_rlc(rdev);
5027
5028                 WREG32(RLC_SERDES_WR_MASTER_MASK_0, 0xffffffff);
5029                 WREG32(RLC_SERDES_WR_MASTER_MASK_1, 0xffffffff);
5030                 WREG32(RLC_SERDES_WR_CTRL, 0x00d000ff);
5031
5032                 si_update_rlc(rdev, tmp);
5033         } else {
5034                 orig = data = RREG32(RLC_CGTT_MGCG_OVERRIDE);
5035                 data |= 0x00000003;
5036                 if (orig != data)
5037                         WREG32(RLC_CGTT_MGCG_OVERRIDE, data);
5038
5039                 data = RREG32(CP_MEM_SLP_CNTL);
5040                 if (data & CP_MEM_LS_EN) {
5041                         data &= ~CP_MEM_LS_EN;
5042                         WREG32(CP_MEM_SLP_CNTL, data);
5043                 }
5044                 orig = data = RREG32(CGTS_SM_CTRL_REG);
5045                 data |= LS_OVERRIDE | OVERRIDE;
5046                 if (orig != data)
5047                         WREG32(CGTS_SM_CTRL_REG, data);
5048
5049                 tmp = si_halt_rlc(rdev);
5050
5051                 WREG32(RLC_SERDES_WR_MASTER_MASK_0, 0xffffffff);
5052                 WREG32(RLC_SERDES_WR_MASTER_MASK_1, 0xffffffff);
5053                 WREG32(RLC_SERDES_WR_CTRL, 0x00e000ff);
5054
5055                 si_update_rlc(rdev, tmp);
5056         }
5057 }
5058
5059 static void si_enable_uvd_mgcg(struct radeon_device *rdev,
5060                                bool enable)
5061 {
5062         u32 orig, data, tmp;
5063
5064         if (enable) {
5065                 tmp = RREG32_UVD_CTX(UVD_CGC_MEM_CTRL);
5066                 tmp |= 0x3fff;
5067                 WREG32_UVD_CTX(UVD_CGC_MEM_CTRL, tmp);
5068
5069                 orig = data = RREG32(UVD_CGC_CTRL);
5070                 data |= DCM;
5071                 if (orig != data)
5072                         WREG32(UVD_CGC_CTRL, data);
5073
5074                 WREG32_SMC(SMC_CG_IND_START + CG_CGTT_LOCAL_0, 0);
5075                 WREG32_SMC(SMC_CG_IND_START + CG_CGTT_LOCAL_1, 0);
5076         } else {
5077                 tmp = RREG32_UVD_CTX(UVD_CGC_MEM_CTRL);
5078                 tmp &= ~0x3fff;
5079                 WREG32_UVD_CTX(UVD_CGC_MEM_CTRL, tmp);
5080
5081                 orig = data = RREG32(UVD_CGC_CTRL);
5082                 data &= ~DCM;
5083                 if (orig != data)
5084                         WREG32(UVD_CGC_CTRL, data);
5085
5086                 WREG32_SMC(SMC_CG_IND_START + CG_CGTT_LOCAL_0, 0xffffffff);
5087                 WREG32_SMC(SMC_CG_IND_START + CG_CGTT_LOCAL_1, 0xffffffff);
5088         }
5089 }
5090
5091 static const u32 mc_cg_registers[] =
5092 {
5093         MC_HUB_MISC_HUB_CG,
5094         MC_HUB_MISC_SIP_CG,
5095         MC_HUB_MISC_VM_CG,
5096         MC_XPB_CLK_GAT,
5097         ATC_MISC_CG,
5098         MC_CITF_MISC_WR_CG,
5099         MC_CITF_MISC_RD_CG,
5100         MC_CITF_MISC_VM_CG,
5101         VM_L2_CG,
5102 };
5103
5104 static void si_enable_mc_ls(struct radeon_device *rdev,
5105                             bool enable)
5106 {
5107         int i;
5108         u32 orig, data;
5109
5110         for (i = 0; i < ARRAY_SIZE(mc_cg_registers); i++) {
5111                 orig = data = RREG32(mc_cg_registers[i]);
5112                 if (enable)
5113                         data |= MC_LS_ENABLE;
5114                 else
5115                         data &= ~MC_LS_ENABLE;
5116                 if (data != orig)
5117                         WREG32(mc_cg_registers[i], data);
5118         }
5119 }
5120
5121
5122 static void si_init_cg(struct radeon_device *rdev)
5123 {
5124         if (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_MGCG)
5125                 si_enable_mgcg(rdev, true);
5126         if (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_CGCG)
5127                 si_enable_cgcg(rdev, false/*true*/);
5128         /* Disable MC LS on tahiti */
5129         if (!(rdev->cg_flags & RADEON_CG_SUPPORT_MC_LS))
5130                 si_enable_mc_ls(rdev, false);
5131         if (rdev->has_uvd) {
5132                 if (rdev->cg_flags & RADEON_CG_SUPPORT_UVD_MGCG)
5133                         si_enable_uvd_mgcg(rdev, true);
5134                 si_init_uvd_internal_cg(rdev);
5135         }
5136 }
5137
5138 static void si_fini_cg(struct radeon_device *rdev)
5139 {
5140         if (rdev->has_uvd) {
5141                 if (rdev->cg_flags & RADEON_CG_SUPPORT_UVD_MGCG)
5142                         si_enable_uvd_mgcg(rdev, false);
5143         }
5144         if (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_CGCG)
5145                 si_enable_cgcg(rdev, false);
5146         if (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_MGCG)
5147                 si_enable_mgcg(rdev, false);
5148 }
5149
5150 static void si_init_pg(struct radeon_device *rdev)
5151 {
5152         if (rdev->pg_flags) {
5153                 if (rdev->pg_flags & RADEON_PG_SUPPORT_SDMA) {
5154                         si_init_dma_pg(rdev);
5155                         si_enable_dma_pg(rdev, true);
5156                 }
5157                 si_init_ao_cu_mask(rdev);
5158                 if (rdev->pg_flags & RADEON_PG_SUPPORT_GFX_CG) {
5159                         si_init_gfx_cgpg(rdev);
5160                         si_enable_gfx_cgpg(rdev, true);
5161                 }
5162         } else {
5163                 WREG32(RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
5164                 WREG32(RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
5165         }
5166 }
5167
5168 static void si_fini_pg(struct radeon_device *rdev)
5169 {
5170         if (rdev->pg_flags) {
5171                 if (rdev->pg_flags & RADEON_PG_SUPPORT_SDMA)
5172                         si_enable_dma_pg(rdev, false);
5173                 if (rdev->pg_flags & RADEON_PG_SUPPORT_GFX_CG)
5174                         si_enable_gfx_cgpg(rdev, false);
5175         }
5176 }
5177
5178 /*
5179  * RLC
5180  */
5181 void si_rlc_reset(struct radeon_device *rdev)
5182 {
5183         u32 tmp = RREG32(GRBM_SOFT_RESET);
5184
5185         tmp |= SOFT_RESET_RLC;
5186         WREG32(GRBM_SOFT_RESET, tmp);
5187         udelay(50);
5188         tmp &= ~SOFT_RESET_RLC;
5189         WREG32(GRBM_SOFT_RESET, tmp);
5190         udelay(50);
5191 }
5192
5193 static void si_rlc_stop(struct radeon_device *rdev)
5194 {
5195         WREG32(RLC_CNTL, 0);
5196
5197         si_enable_gui_idle_interrupt(rdev, false);
5198
5199         si_wait_for_rlc_serdes(rdev);
5200 }
5201
5202 static void si_rlc_start(struct radeon_device *rdev)
5203 {
5204         WREG32(RLC_CNTL, RLC_ENABLE);
5205
5206         si_enable_gui_idle_interrupt(rdev, true);
5207
5208         udelay(50);
5209 }
5210
5211 static bool si_lbpw_supported(struct radeon_device *rdev)
5212 {
5213         u32 tmp;
5214
5215         /* Enable LBPW only for DDR3 */
5216         tmp = RREG32(MC_SEQ_MISC0);
5217         if ((tmp & 0xF0000000) == 0xB0000000)
5218                 return true;
5219         return false;
5220 }
5221
5222 static void si_enable_lbpw(struct radeon_device *rdev, bool enable)
5223 {
5224         u32 tmp;
5225
5226         tmp = RREG32(RLC_LB_CNTL);
5227         if (enable)
5228                 tmp |= LOAD_BALANCE_ENABLE;
5229         else
5230                 tmp &= ~LOAD_BALANCE_ENABLE;
5231         WREG32(RLC_LB_CNTL, tmp);
5232
5233         if (!enable) {
5234                 si_select_se_sh(rdev, 0xffffffff, 0xffffffff);
5235                 WREG32(SPI_LB_CU_MASK, 0x00ff);
5236         }
5237 }
5238
5239 static int si_rlc_resume(struct radeon_device *rdev)
5240 {
5241         u32 i;
5242         const __be32 *fw_data;
5243
5244         if (!rdev->rlc_fw)
5245                 return -EINVAL;
5246
5247         si_rlc_stop(rdev);
5248
5249         si_rlc_reset(rdev);
5250
5251         si_init_pg(rdev);
5252
5253         si_init_cg(rdev);
5254
5255         WREG32(RLC_RL_BASE, 0);
5256         WREG32(RLC_RL_SIZE, 0);
5257         WREG32(RLC_LB_CNTL, 0);
5258         WREG32(RLC_LB_CNTR_MAX, 0xffffffff);
5259         WREG32(RLC_LB_CNTR_INIT, 0);
5260         WREG32(RLC_LB_INIT_CU_MASK, 0xffffffff);
5261
5262         WREG32(RLC_MC_CNTL, 0);
5263         WREG32(RLC_UCODE_CNTL, 0);
5264
5265         fw_data = (const __be32 *)rdev->rlc_fw->data;
5266         for (i = 0; i < SI_RLC_UCODE_SIZE; i++) {
5267                 WREG32(RLC_UCODE_ADDR, i);
5268                 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
5269         }
5270         WREG32(RLC_UCODE_ADDR, 0);
5271
5272         si_enable_lbpw(rdev, si_lbpw_supported(rdev));
5273
5274         si_rlc_start(rdev);
5275
5276         return 0;
5277 }
5278
5279 static void si_enable_interrupts(struct radeon_device *rdev)
5280 {
5281         u32 ih_cntl = RREG32(IH_CNTL);
5282         u32 ih_rb_cntl = RREG32(IH_RB_CNTL);
5283
5284         ih_cntl |= ENABLE_INTR;
5285         ih_rb_cntl |= IH_RB_ENABLE;
5286         WREG32(IH_CNTL, ih_cntl);
5287         WREG32(IH_RB_CNTL, ih_rb_cntl);
5288         rdev->ih.enabled = true;
5289 }
5290
5291 static void si_disable_interrupts(struct radeon_device *rdev)
5292 {
5293         u32 ih_rb_cntl = RREG32(IH_RB_CNTL);
5294         u32 ih_cntl = RREG32(IH_CNTL);
5295
5296         ih_rb_cntl &= ~IH_RB_ENABLE;
5297         ih_cntl &= ~ENABLE_INTR;
5298         WREG32(IH_RB_CNTL, ih_rb_cntl);
5299         WREG32(IH_CNTL, ih_cntl);
5300         /* set rptr, wptr to 0 */
5301         WREG32(IH_RB_RPTR, 0);
5302         WREG32(IH_RB_WPTR, 0);
5303         rdev->ih.enabled = false;
5304         rdev->ih.rptr = 0;
5305 }
5306
5307 static void si_disable_interrupt_state(struct radeon_device *rdev)
5308 {
5309         u32 tmp;
5310
5311         WREG32(CP_INT_CNTL_RING0, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
5312         WREG32(CP_INT_CNTL_RING1, 0);
5313         WREG32(CP_INT_CNTL_RING2, 0);
5314         tmp = RREG32(DMA_CNTL + DMA0_REGISTER_OFFSET) & ~TRAP_ENABLE;
5315         WREG32(DMA_CNTL + DMA0_REGISTER_OFFSET, tmp);
5316         tmp = RREG32(DMA_CNTL + DMA1_REGISTER_OFFSET) & ~TRAP_ENABLE;
5317         WREG32(DMA_CNTL + DMA1_REGISTER_OFFSET, tmp);
5318         WREG32(GRBM_INT_CNTL, 0);
5319         if (rdev->num_crtc >= 2) {
5320                 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
5321                 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
5322         }
5323         if (rdev->num_crtc >= 4) {
5324                 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
5325                 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
5326         }
5327         if (rdev->num_crtc >= 6) {
5328                 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
5329                 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
5330         }
5331
5332         if (rdev->num_crtc >= 2) {
5333                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
5334                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
5335         }
5336         if (rdev->num_crtc >= 4) {
5337                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
5338                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
5339         }
5340         if (rdev->num_crtc >= 6) {
5341                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
5342                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
5343         }
5344
5345         if (!ASIC_IS_NODCE(rdev)) {
5346                 WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
5347
5348                 tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
5349                 WREG32(DC_HPD1_INT_CONTROL, tmp);
5350                 tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
5351                 WREG32(DC_HPD2_INT_CONTROL, tmp);
5352                 tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
5353                 WREG32(DC_HPD3_INT_CONTROL, tmp);
5354                 tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
5355                 WREG32(DC_HPD4_INT_CONTROL, tmp);
5356                 tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
5357                 WREG32(DC_HPD5_INT_CONTROL, tmp);
5358                 tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
5359                 WREG32(DC_HPD6_INT_CONTROL, tmp);
5360         }
5361 }
5362
5363 static int si_irq_init(struct radeon_device *rdev)
5364 {
5365         int ret = 0;
5366         int rb_bufsz;
5367         u32 interrupt_cntl, ih_cntl, ih_rb_cntl;
5368
5369         /* allocate ring */
5370         ret = r600_ih_ring_alloc(rdev);
5371         if (ret)
5372                 return ret;
5373
5374         /* disable irqs */
5375         si_disable_interrupts(rdev);
5376
5377         /* init rlc */
5378         ret = si_rlc_resume(rdev);
5379         if (ret) {
5380                 r600_ih_ring_fini(rdev);
5381                 return ret;
5382         }
5383
5384         /* setup interrupt control */
5385         /* set dummy read address to ring address */
5386         WREG32(INTERRUPT_CNTL2, rdev->ih.gpu_addr >> 8);
5387         interrupt_cntl = RREG32(INTERRUPT_CNTL);
5388         /* IH_DUMMY_RD_OVERRIDE=0 - dummy read disabled with msi, enabled without msi
5389          * IH_DUMMY_RD_OVERRIDE=1 - dummy read controlled by IH_DUMMY_RD_EN
5390          */
5391         interrupt_cntl &= ~IH_DUMMY_RD_OVERRIDE;
5392         /* IH_REQ_NONSNOOP_EN=1 if ring is in non-cacheable memory, e.g., vram */
5393         interrupt_cntl &= ~IH_REQ_NONSNOOP_EN;
5394         WREG32(INTERRUPT_CNTL, interrupt_cntl);
5395
5396         WREG32(IH_RB_BASE, rdev->ih.gpu_addr >> 8);
5397         rb_bufsz = drm_order(rdev->ih.ring_size / 4);
5398
5399         ih_rb_cntl = (IH_WPTR_OVERFLOW_ENABLE |
5400                       IH_WPTR_OVERFLOW_CLEAR |
5401                       (rb_bufsz << 1));
5402
5403         if (rdev->wb.enabled)
5404                 ih_rb_cntl |= IH_WPTR_WRITEBACK_ENABLE;
5405
5406         /* set the writeback address whether it's enabled or not */
5407         WREG32(IH_RB_WPTR_ADDR_LO, (rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFFFFFFFC);
5408         WREG32(IH_RB_WPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFF);
5409
5410         WREG32(IH_RB_CNTL, ih_rb_cntl);
5411
5412         /* set rptr, wptr to 0 */
5413         WREG32(IH_RB_RPTR, 0);
5414         WREG32(IH_RB_WPTR, 0);
5415
5416         /* Default settings for IH_CNTL (disabled at first) */
5417         ih_cntl = MC_WRREQ_CREDIT(0x10) | MC_WR_CLEAN_CNT(0x10) | MC_VMID(0);
5418         /* RPTR_REARM only works if msi's are enabled */
5419         if (rdev->msi_enabled)
5420                 ih_cntl |= RPTR_REARM;
5421         WREG32(IH_CNTL, ih_cntl);
5422
5423         /* force the active interrupt state to all disabled */
5424         si_disable_interrupt_state(rdev);
5425
5426         pci_set_master(rdev->pdev);
5427
5428         /* enable irqs */
5429         si_enable_interrupts(rdev);
5430
5431         return ret;
5432 }
5433
5434 int si_irq_set(struct radeon_device *rdev)
5435 {
5436         u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
5437         u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
5438         u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
5439         u32 hpd1 = 0, hpd2 = 0, hpd3 = 0, hpd4 = 0, hpd5 = 0, hpd6 = 0;
5440         u32 grbm_int_cntl = 0;
5441         u32 grph1 = 0, grph2 = 0, grph3 = 0, grph4 = 0, grph5 = 0, grph6 = 0;
5442         u32 dma_cntl, dma_cntl1;
5443         u32 thermal_int = 0;
5444
5445         if (!rdev->irq.installed) {
5446                 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
5447                 return -EINVAL;
5448         }
5449         /* don't enable anything if the ih is disabled */
5450         if (!rdev->ih.enabled) {
5451                 si_disable_interrupts(rdev);
5452                 /* force the active interrupt state to all disabled */
5453                 si_disable_interrupt_state(rdev);
5454                 return 0;
5455         }
5456
5457         if (!ASIC_IS_NODCE(rdev)) {
5458                 hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
5459                 hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
5460                 hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
5461                 hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
5462                 hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
5463                 hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
5464         }
5465
5466         dma_cntl = RREG32(DMA_CNTL + DMA0_REGISTER_OFFSET) & ~TRAP_ENABLE;
5467         dma_cntl1 = RREG32(DMA_CNTL + DMA1_REGISTER_OFFSET) & ~TRAP_ENABLE;
5468
5469         thermal_int = RREG32(CG_THERMAL_INT) &
5470                 ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
5471
5472         /* enable CP interrupts on all rings */
5473         if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
5474                 DRM_DEBUG("si_irq_set: sw int gfx\n");
5475                 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
5476         }
5477         if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
5478                 DRM_DEBUG("si_irq_set: sw int cp1\n");
5479                 cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
5480         }
5481         if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
5482                 DRM_DEBUG("si_irq_set: sw int cp2\n");
5483                 cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
5484         }
5485         if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
5486                 DRM_DEBUG("si_irq_set: sw int dma\n");
5487                 dma_cntl |= TRAP_ENABLE;
5488         }
5489
5490         if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
5491                 DRM_DEBUG("si_irq_set: sw int dma1\n");
5492                 dma_cntl1 |= TRAP_ENABLE;
5493         }
5494         if (rdev->irq.crtc_vblank_int[0] ||
5495             atomic_read(&rdev->irq.pflip[0])) {
5496                 DRM_DEBUG("si_irq_set: vblank 0\n");
5497                 crtc1 |= VBLANK_INT_MASK;
5498         }
5499         if (rdev->irq.crtc_vblank_int[1] ||
5500             atomic_read(&rdev->irq.pflip[1])) {
5501                 DRM_DEBUG("si_irq_set: vblank 1\n");
5502                 crtc2 |= VBLANK_INT_MASK;
5503         }
5504         if (rdev->irq.crtc_vblank_int[2] ||
5505             atomic_read(&rdev->irq.pflip[2])) {
5506                 DRM_DEBUG("si_irq_set: vblank 2\n");
5507                 crtc3 |= VBLANK_INT_MASK;
5508         }
5509         if (rdev->irq.crtc_vblank_int[3] ||
5510             atomic_read(&rdev->irq.pflip[3])) {
5511                 DRM_DEBUG("si_irq_set: vblank 3\n");
5512                 crtc4 |= VBLANK_INT_MASK;
5513         }
5514         if (rdev->irq.crtc_vblank_int[4] ||
5515             atomic_read(&rdev->irq.pflip[4])) {
5516                 DRM_DEBUG("si_irq_set: vblank 4\n");
5517                 crtc5 |= VBLANK_INT_MASK;
5518         }
5519         if (rdev->irq.crtc_vblank_int[5] ||
5520             atomic_read(&rdev->irq.pflip[5])) {
5521                 DRM_DEBUG("si_irq_set: vblank 5\n");
5522                 crtc6 |= VBLANK_INT_MASK;
5523         }
5524         if (rdev->irq.hpd[0]) {
5525                 DRM_DEBUG("si_irq_set: hpd 1\n");
5526                 hpd1 |= DC_HPDx_INT_EN;
5527         }
5528         if (rdev->irq.hpd[1]) {
5529                 DRM_DEBUG("si_irq_set: hpd 2\n");
5530                 hpd2 |= DC_HPDx_INT_EN;
5531         }
5532         if (rdev->irq.hpd[2]) {
5533                 DRM_DEBUG("si_irq_set: hpd 3\n");
5534                 hpd3 |= DC_HPDx_INT_EN;
5535         }
5536         if (rdev->irq.hpd[3]) {
5537                 DRM_DEBUG("si_irq_set: hpd 4\n");
5538                 hpd4 |= DC_HPDx_INT_EN;
5539         }
5540         if (rdev->irq.hpd[4]) {
5541                 DRM_DEBUG("si_irq_set: hpd 5\n");
5542                 hpd5 |= DC_HPDx_INT_EN;
5543         }
5544         if (rdev->irq.hpd[5]) {
5545                 DRM_DEBUG("si_irq_set: hpd 6\n");
5546                 hpd6 |= DC_HPDx_INT_EN;
5547         }
5548
5549         WREG32(CP_INT_CNTL_RING0, cp_int_cntl);
5550         WREG32(CP_INT_CNTL_RING1, cp_int_cntl1);
5551         WREG32(CP_INT_CNTL_RING2, cp_int_cntl2);
5552
5553         WREG32(DMA_CNTL + DMA0_REGISTER_OFFSET, dma_cntl);
5554         WREG32(DMA_CNTL + DMA1_REGISTER_OFFSET, dma_cntl1);
5555
5556         WREG32(GRBM_INT_CNTL, grbm_int_cntl);
5557
5558         if (rdev->irq.dpm_thermal) {
5559                 DRM_DEBUG("dpm thermal\n");
5560                 thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
5561         }
5562
5563         if (rdev->num_crtc >= 2) {
5564                 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
5565                 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
5566         }
5567         if (rdev->num_crtc >= 4) {
5568                 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
5569                 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
5570         }
5571         if (rdev->num_crtc >= 6) {
5572                 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
5573                 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
5574         }
5575
5576         if (rdev->num_crtc >= 2) {
5577                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, grph1);
5578                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, grph2);
5579         }
5580         if (rdev->num_crtc >= 4) {
5581                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, grph3);
5582                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, grph4);
5583         }
5584         if (rdev->num_crtc >= 6) {
5585                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, grph5);
5586                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, grph6);
5587         }
5588
5589         if (!ASIC_IS_NODCE(rdev)) {
5590                 WREG32(DC_HPD1_INT_CONTROL, hpd1);
5591                 WREG32(DC_HPD2_INT_CONTROL, hpd2);
5592                 WREG32(DC_HPD3_INT_CONTROL, hpd3);
5593                 WREG32(DC_HPD4_INT_CONTROL, hpd4);
5594                 WREG32(DC_HPD5_INT_CONTROL, hpd5);
5595                 WREG32(DC_HPD6_INT_CONTROL, hpd6);
5596         }
5597
5598         WREG32(CG_THERMAL_INT, thermal_int);
5599
5600         return 0;
5601 }
5602
5603 static inline void si_irq_ack(struct radeon_device *rdev)
5604 {
5605         u32 tmp;
5606
5607         if (ASIC_IS_NODCE(rdev))
5608                 return;
5609
5610         rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
5611         rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
5612         rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
5613         rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
5614         rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
5615         rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
5616         rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
5617         rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
5618         if (rdev->num_crtc >= 4) {
5619                 rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
5620                 rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
5621         }
5622         if (rdev->num_crtc >= 6) {
5623                 rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
5624                 rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
5625         }
5626
5627         if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
5628                 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
5629         if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
5630                 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
5631         if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
5632                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
5633         if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
5634                 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
5635         if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
5636                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
5637         if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
5638                 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
5639
5640         if (rdev->num_crtc >= 4) {
5641                 if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
5642                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
5643                 if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
5644                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
5645                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
5646                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
5647                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
5648                         WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
5649                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
5650                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
5651                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
5652                         WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
5653         }
5654
5655         if (rdev->num_crtc >= 6) {
5656                 if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
5657                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
5658                 if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
5659                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
5660                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
5661                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
5662                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
5663                         WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
5664                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
5665                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
5666                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
5667                         WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
5668         }
5669
5670         if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
5671                 tmp = RREG32(DC_HPD1_INT_CONTROL);
5672                 tmp |= DC_HPDx_INT_ACK;
5673                 WREG32(DC_HPD1_INT_CONTROL, tmp);
5674         }
5675         if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
5676                 tmp = RREG32(DC_HPD2_INT_CONTROL);
5677                 tmp |= DC_HPDx_INT_ACK;
5678                 WREG32(DC_HPD2_INT_CONTROL, tmp);
5679         }
5680         if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
5681                 tmp = RREG32(DC_HPD3_INT_CONTROL);
5682                 tmp |= DC_HPDx_INT_ACK;
5683                 WREG32(DC_HPD3_INT_CONTROL, tmp);
5684         }
5685         if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
5686                 tmp = RREG32(DC_HPD4_INT_CONTROL);
5687                 tmp |= DC_HPDx_INT_ACK;
5688                 WREG32(DC_HPD4_INT_CONTROL, tmp);
5689         }
5690         if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
5691                 tmp = RREG32(DC_HPD5_INT_CONTROL);
5692                 tmp |= DC_HPDx_INT_ACK;
5693                 WREG32(DC_HPD5_INT_CONTROL, tmp);
5694         }
5695         if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
5696                 tmp = RREG32(DC_HPD5_INT_CONTROL);
5697                 tmp |= DC_HPDx_INT_ACK;
5698                 WREG32(DC_HPD6_INT_CONTROL, tmp);
5699         }
5700 }
5701
5702 static void si_irq_disable(struct radeon_device *rdev)
5703 {
5704         si_disable_interrupts(rdev);
5705         /* Wait and acknowledge irq */
5706         mdelay(1);
5707         si_irq_ack(rdev);
5708         si_disable_interrupt_state(rdev);
5709 }
5710
5711 static void si_irq_suspend(struct radeon_device *rdev)
5712 {
5713         si_irq_disable(rdev);
5714         si_rlc_stop(rdev);
5715 }
5716
5717 static void si_irq_fini(struct radeon_device *rdev)
5718 {
5719         si_irq_suspend(rdev);
5720         r600_ih_ring_fini(rdev);
5721 }
5722
5723 static inline u32 si_get_ih_wptr(struct radeon_device *rdev)
5724 {
5725         u32 wptr, tmp;
5726
5727         if (rdev->wb.enabled)
5728                 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
5729         else
5730                 wptr = RREG32(IH_RB_WPTR);
5731
5732         if (wptr & RB_OVERFLOW) {
5733                 /* When a ring buffer overflow happen start parsing interrupt
5734                  * from the last not overwritten vector (wptr + 16). Hopefully
5735                  * this should allow us to catchup.
5736                  */
5737                 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
5738                         wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
5739                 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
5740                 tmp = RREG32(IH_RB_CNTL);
5741                 tmp |= IH_WPTR_OVERFLOW_CLEAR;
5742                 WREG32(IH_RB_CNTL, tmp);
5743         }
5744         return (wptr & rdev->ih.ptr_mask);
5745 }
5746
5747 /*        SI IV Ring
5748  * Each IV ring entry is 128 bits:
5749  * [7:0]    - interrupt source id
5750  * [31:8]   - reserved
5751  * [59:32]  - interrupt source data
5752  * [63:60]  - reserved
5753  * [71:64]  - RINGID
5754  * [79:72]  - VMID
5755  * [127:80] - reserved
5756  */
5757 int si_irq_process(struct radeon_device *rdev)
5758 {
5759         u32 wptr;
5760         u32 rptr;
5761         u32 src_id, src_data, ring_id;
5762         u32 ring_index;
5763         bool queue_hotplug = false;
5764         bool queue_thermal = false;
5765         u32 status, addr;
5766
5767         if (!rdev->ih.enabled || rdev->shutdown)
5768                 return IRQ_NONE;
5769
5770         wptr = si_get_ih_wptr(rdev);
5771
5772 restart_ih:
5773         /* is somebody else already processing irqs? */
5774         if (atomic_xchg(&rdev->ih.lock, 1))
5775                 return IRQ_NONE;
5776
5777         rptr = rdev->ih.rptr;
5778         DRM_DEBUG("si_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
5779
5780         /* Order reading of wptr vs. reading of IH ring data */
5781         rmb();
5782
5783         /* display interrupts */
5784         si_irq_ack(rdev);
5785
5786         while (rptr != wptr) {
5787                 /* wptr/rptr are in bytes! */
5788                 ring_index = rptr / 4;
5789                 src_id =  le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
5790                 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
5791                 ring_id = le32_to_cpu(rdev->ih.ring[ring_index + 2]) & 0xff;
5792
5793                 switch (src_id) {
5794                 case 1: /* D1 vblank/vline */
5795                         switch (src_data) {
5796                         case 0: /* D1 vblank */
5797                                 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) {
5798                                         if (rdev->irq.crtc_vblank_int[0]) {
5799                                                 drm_handle_vblank(rdev->ddev, 0);
5800                                                 rdev->pm.vblank_sync = true;
5801                                                 wake_up(&rdev->irq.vblank_queue);
5802                                         }
5803                                         if (atomic_read(&rdev->irq.pflip[0]))
5804                                                 radeon_crtc_handle_flip(rdev, 0);
5805                                         rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
5806                                         DRM_DEBUG("IH: D1 vblank\n");
5807                                 }
5808                                 break;
5809                         case 1: /* D1 vline */
5810                                 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) {
5811                                         rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
5812                                         DRM_DEBUG("IH: D1 vline\n");
5813                                 }
5814                                 break;
5815                         default:
5816                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5817                                 break;
5818                         }
5819                         break;
5820                 case 2: /* D2 vblank/vline */
5821                         switch (src_data) {
5822                         case 0: /* D2 vblank */
5823                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
5824                                         if (rdev->irq.crtc_vblank_int[1]) {
5825                                                 drm_handle_vblank(rdev->ddev, 1);
5826                                                 rdev->pm.vblank_sync = true;
5827                                                 wake_up(&rdev->irq.vblank_queue);
5828                                         }
5829                                         if (atomic_read(&rdev->irq.pflip[1]))
5830                                                 radeon_crtc_handle_flip(rdev, 1);
5831                                         rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
5832                                         DRM_DEBUG("IH: D2 vblank\n");
5833                                 }
5834                                 break;
5835                         case 1: /* D2 vline */
5836                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) {
5837                                         rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
5838                                         DRM_DEBUG("IH: D2 vline\n");
5839                                 }
5840                                 break;
5841                         default:
5842                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5843                                 break;
5844                         }
5845                         break;
5846                 case 3: /* D3 vblank/vline */
5847                         switch (src_data) {
5848                         case 0: /* D3 vblank */
5849                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
5850                                         if (rdev->irq.crtc_vblank_int[2]) {
5851                                                 drm_handle_vblank(rdev->ddev, 2);
5852                                                 rdev->pm.vblank_sync = true;
5853                                                 wake_up(&rdev->irq.vblank_queue);
5854                                         }
5855                                         if (atomic_read(&rdev->irq.pflip[2]))
5856                                                 radeon_crtc_handle_flip(rdev, 2);
5857                                         rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
5858                                         DRM_DEBUG("IH: D3 vblank\n");
5859                                 }
5860                                 break;
5861                         case 1: /* D3 vline */
5862                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
5863                                         rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
5864                                         DRM_DEBUG("IH: D3 vline\n");
5865                                 }
5866                                 break;
5867                         default:
5868                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5869                                 break;
5870                         }
5871                         break;
5872                 case 4: /* D4 vblank/vline */
5873                         switch (src_data) {
5874                         case 0: /* D4 vblank */
5875                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
5876                                         if (rdev->irq.crtc_vblank_int[3]) {
5877                                                 drm_handle_vblank(rdev->ddev, 3);
5878                                                 rdev->pm.vblank_sync = true;
5879                                                 wake_up(&rdev->irq.vblank_queue);
5880                                         }
5881                                         if (atomic_read(&rdev->irq.pflip[3]))
5882                                                 radeon_crtc_handle_flip(rdev, 3);
5883                                         rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
5884                                         DRM_DEBUG("IH: D4 vblank\n");
5885                                 }
5886                                 break;
5887                         case 1: /* D4 vline */
5888                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
5889                                         rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
5890                                         DRM_DEBUG("IH: D4 vline\n");
5891                                 }
5892                                 break;
5893                         default:
5894                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5895                                 break;
5896                         }
5897                         break;
5898                 case 5: /* D5 vblank/vline */
5899                         switch (src_data) {
5900                         case 0: /* D5 vblank */
5901                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
5902                                         if (rdev->irq.crtc_vblank_int[4]) {
5903                                                 drm_handle_vblank(rdev->ddev, 4);
5904                                                 rdev->pm.vblank_sync = true;
5905                                                 wake_up(&rdev->irq.vblank_queue);
5906                                         }
5907                                         if (atomic_read(&rdev->irq.pflip[4]))
5908                                                 radeon_crtc_handle_flip(rdev, 4);
5909                                         rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
5910                                         DRM_DEBUG("IH: D5 vblank\n");
5911                                 }
5912                                 break;
5913                         case 1: /* D5 vline */
5914                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
5915                                         rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
5916                                         DRM_DEBUG("IH: D5 vline\n");
5917                                 }
5918                                 break;
5919                         default:
5920                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5921                                 break;
5922                         }
5923                         break;
5924                 case 6: /* D6 vblank/vline */
5925                         switch (src_data) {
5926                         case 0: /* D6 vblank */
5927                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
5928                                         if (rdev->irq.crtc_vblank_int[5]) {
5929                                                 drm_handle_vblank(rdev->ddev, 5);
5930                                                 rdev->pm.vblank_sync = true;
5931                                                 wake_up(&rdev->irq.vblank_queue);
5932                                         }
5933                                         if (atomic_read(&rdev->irq.pflip[5]))
5934                                                 radeon_crtc_handle_flip(rdev, 5);
5935                                         rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
5936                                         DRM_DEBUG("IH: D6 vblank\n");
5937                                 }
5938                                 break;
5939                         case 1: /* D6 vline */
5940                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
5941                                         rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
5942                                         DRM_DEBUG("IH: D6 vline\n");
5943                                 }
5944                                 break;
5945                         default:
5946                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5947                                 break;
5948                         }
5949                         break;
5950                 case 42: /* HPD hotplug */
5951                         switch (src_data) {
5952                         case 0:
5953                                 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
5954                                         rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
5955                                         queue_hotplug = true;
5956                                         DRM_DEBUG("IH: HPD1\n");
5957                                 }
5958                                 break;
5959                         case 1:
5960                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
5961                                         rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
5962                                         queue_hotplug = true;
5963                                         DRM_DEBUG("IH: HPD2\n");
5964                                 }
5965                                 break;
5966                         case 2:
5967                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
5968                                         rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
5969                                         queue_hotplug = true;
5970                                         DRM_DEBUG("IH: HPD3\n");
5971                                 }
5972                                 break;
5973                         case 3:
5974                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
5975                                         rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
5976                                         queue_hotplug = true;
5977                                         DRM_DEBUG("IH: HPD4\n");
5978                                 }
5979                                 break;
5980                         case 4:
5981                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
5982                                         rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
5983                                         queue_hotplug = true;
5984                                         DRM_DEBUG("IH: HPD5\n");
5985                                 }
5986                                 break;
5987                         case 5:
5988                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
5989                                         rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
5990                                         queue_hotplug = true;
5991                                         DRM_DEBUG("IH: HPD6\n");
5992                                 }
5993                                 break;
5994                         default:
5995                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5996                                 break;
5997                         }
5998                         break;
5999                 case 146:
6000                 case 147:
6001                         addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
6002                         status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
6003                         dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
6004                         dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_ADDR   0x%08X\n",
6005                                 addr);
6006                         dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
6007                                 status);
6008                         si_vm_decode_fault(rdev, status, addr);
6009                         /* reset addr and status */
6010                         WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
6011                         break;
6012                 case 176: /* RINGID0 CP_INT */
6013                         radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
6014                         break;
6015                 case 177: /* RINGID1 CP_INT */
6016                         radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
6017                         break;
6018                 case 178: /* RINGID2 CP_INT */
6019                         radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
6020                         break;
6021                 case 181: /* CP EOP event */
6022                         DRM_DEBUG("IH: CP EOP\n");
6023                         switch (ring_id) {
6024                         case 0:
6025                                 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
6026                                 break;
6027                         case 1:
6028                                 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
6029                                 break;
6030                         case 2:
6031                                 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
6032                                 break;
6033                         }
6034                         break;
6035                 case 224: /* DMA trap event */
6036                         DRM_DEBUG("IH: DMA trap\n");
6037                         radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
6038                         break;
6039                 case 230: /* thermal low to high */
6040                         DRM_DEBUG("IH: thermal low to high\n");
6041                         rdev->pm.dpm.thermal.high_to_low = false;
6042                         queue_thermal = true;
6043                         break;
6044                 case 231: /* thermal high to low */
6045                         DRM_DEBUG("IH: thermal high to low\n");
6046                         rdev->pm.dpm.thermal.high_to_low = true;
6047                         queue_thermal = true;
6048                         break;
6049                 case 233: /* GUI IDLE */
6050                         DRM_DEBUG("IH: GUI idle\n");
6051                         break;
6052                 case 244: /* DMA trap event */
6053                         DRM_DEBUG("IH: DMA1 trap\n");
6054                         radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
6055                         break;
6056                 default:
6057                         DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
6058                         break;
6059                 }
6060
6061                 /* wptr/rptr are in bytes! */
6062                 rptr += 16;
6063                 rptr &= rdev->ih.ptr_mask;
6064         }
6065         if (queue_hotplug)
6066                 schedule_work(&rdev->hotplug_work);
6067         if (queue_thermal && rdev->pm.dpm_enabled)
6068                 schedule_work(&rdev->pm.dpm.thermal.work);
6069         rdev->ih.rptr = rptr;
6070         WREG32(IH_RB_RPTR, rdev->ih.rptr);
6071         atomic_set(&rdev->ih.lock, 0);
6072
6073         /* make sure wptr hasn't changed while processing */
6074         wptr = si_get_ih_wptr(rdev);
6075         if (wptr != rptr)
6076                 goto restart_ih;
6077
6078         return IRQ_HANDLED;
6079 }
6080
6081 /*
6082  * startup/shutdown callbacks
6083  */
6084 static int si_startup(struct radeon_device *rdev)
6085 {
6086         struct radeon_ring *ring;
6087         int r;
6088
6089         /* enable pcie gen2/3 link */
6090         si_pcie_gen3_enable(rdev);
6091         /* enable aspm */
6092         si_program_aspm(rdev);
6093
6094         si_mc_program(rdev);
6095
6096         if (!rdev->me_fw || !rdev->pfp_fw || !rdev->ce_fw ||
6097             !rdev->rlc_fw || !rdev->mc_fw) {
6098                 r = si_init_microcode(rdev);
6099                 if (r) {
6100                         DRM_ERROR("Failed to load firmware!\n");
6101                         return r;
6102                 }
6103         }
6104
6105         r = si_mc_load_microcode(rdev);
6106         if (r) {
6107                 DRM_ERROR("Failed to load MC firmware!\n");
6108                 return r;
6109         }
6110
6111         r = r600_vram_scratch_init(rdev);
6112         if (r)
6113                 return r;
6114
6115         r = si_pcie_gart_enable(rdev);
6116         if (r)
6117                 return r;
6118         si_gpu_init(rdev);
6119
6120         /* allocate rlc buffers */
6121         if (rdev->family == CHIP_VERDE) {
6122                 rdev->rlc.reg_list = verde_rlc_save_restore_register_list;
6123                 rdev->rlc.reg_list_size =
6124                         (u32)ARRAY_SIZE(verde_rlc_save_restore_register_list);
6125         }
6126         rdev->rlc.cs_data = si_cs_data;
6127         r = sumo_rlc_init(rdev);
6128         if (r) {
6129                 DRM_ERROR("Failed to init rlc BOs!\n");
6130                 return r;
6131         }
6132
6133         /* allocate wb buffer */
6134         r = radeon_wb_init(rdev);
6135         if (r)
6136                 return r;
6137
6138         r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
6139         if (r) {
6140                 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
6141                 return r;
6142         }
6143
6144         r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
6145         if (r) {
6146                 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
6147                 return r;
6148         }
6149
6150         r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
6151         if (r) {
6152                 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
6153                 return r;
6154         }
6155
6156         r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
6157         if (r) {
6158                 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
6159                 return r;
6160         }
6161
6162         r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
6163         if (r) {
6164                 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
6165                 return r;
6166         }
6167
6168         if (rdev->has_uvd) {
6169                 r = uvd_v2_2_resume(rdev);
6170                 if (!r) {
6171                         r = radeon_fence_driver_start_ring(rdev,
6172                                                            R600_RING_TYPE_UVD_INDEX);
6173                         if (r)
6174                                 dev_err(rdev->dev, "UVD fences init error (%d).\n", r);
6175                 }
6176                 if (r)
6177                         rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
6178         }
6179
6180         /* Enable IRQ */
6181         if (!rdev->irq.installed) {
6182                 r = radeon_irq_kms_init(rdev);
6183                 if (r)
6184                         return r;
6185         }
6186
6187         r = si_irq_init(rdev);
6188         if (r) {
6189                 DRM_ERROR("radeon: IH init failed (%d).\n", r);
6190                 radeon_irq_kms_fini(rdev);
6191                 return r;
6192         }
6193         si_irq_set(rdev);
6194
6195         ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
6196         r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
6197                              CP_RB0_RPTR, CP_RB0_WPTR,
6198                              RADEON_CP_PACKET2);
6199         if (r)
6200                 return r;
6201
6202         ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX];
6203         r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP1_RPTR_OFFSET,
6204                              CP_RB1_RPTR, CP_RB1_WPTR,
6205                              RADEON_CP_PACKET2);
6206         if (r)
6207                 return r;
6208
6209         ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX];
6210         r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP2_RPTR_OFFSET,
6211                              CP_RB2_RPTR, CP_RB2_WPTR,
6212                              RADEON_CP_PACKET2);
6213         if (r)
6214                 return r;
6215
6216         ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
6217         r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
6218                              DMA_RB_RPTR + DMA0_REGISTER_OFFSET,
6219                              DMA_RB_WPTR + DMA0_REGISTER_OFFSET,
6220                              DMA_PACKET(DMA_PACKET_NOP, 0, 0, 0, 0));
6221         if (r)
6222                 return r;
6223
6224         ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX];
6225         r = radeon_ring_init(rdev, ring, ring->ring_size, CAYMAN_WB_DMA1_RPTR_OFFSET,
6226                              DMA_RB_RPTR + DMA1_REGISTER_OFFSET,
6227                              DMA_RB_WPTR + DMA1_REGISTER_OFFSET,
6228                              DMA_PACKET(DMA_PACKET_NOP, 0, 0, 0, 0));
6229         if (r)
6230                 return r;
6231
6232         r = si_cp_load_microcode(rdev);
6233         if (r)
6234                 return r;
6235         r = si_cp_resume(rdev);
6236         if (r)
6237                 return r;
6238
6239         r = cayman_dma_resume(rdev);
6240         if (r)
6241                 return r;
6242
6243         if (rdev->has_uvd) {
6244                 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
6245                 if (ring->ring_size) {
6246                         r = radeon_ring_init(rdev, ring, ring->ring_size, 0,
6247                                              UVD_RBC_RB_RPTR, UVD_RBC_RB_WPTR,
6248                                              RADEON_CP_PACKET2);
6249                         if (!r)
6250                                 r = uvd_v1_0_init(rdev);
6251                         if (r)
6252                                 DRM_ERROR("radeon: failed initializing UVD (%d).\n", r);
6253                 }
6254         }
6255
6256         r = radeon_ib_pool_init(rdev);
6257         if (r) {
6258                 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
6259                 return r;
6260         }
6261
6262         r = radeon_vm_manager_init(rdev);
6263         if (r) {
6264                 dev_err(rdev->dev, "vm manager initialization failed (%d).\n", r);
6265                 return r;
6266         }
6267
6268         r = dce6_audio_init(rdev);
6269         if (r)
6270                 return r;
6271
6272         return 0;
6273 }
6274
6275 int si_resume(struct radeon_device *rdev)
6276 {
6277         int r;
6278
6279         /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
6280          * posting will perform necessary task to bring back GPU into good
6281          * shape.
6282          */
6283         /* post card */
6284         atom_asic_init(rdev->mode_info.atom_context);
6285
6286         /* init golden registers */
6287         si_init_golden_registers(rdev);
6288
6289         rdev->accel_working = true;
6290         r = si_startup(rdev);
6291         if (r) {
6292                 DRM_ERROR("si startup failed on resume\n");
6293                 rdev->accel_working = false;
6294                 return r;
6295         }
6296
6297         return r;
6298
6299 }
6300
6301 int si_suspend(struct radeon_device *rdev)
6302 {
6303         dce6_audio_fini(rdev);
6304         radeon_vm_manager_fini(rdev);
6305         si_cp_enable(rdev, false);
6306         cayman_dma_stop(rdev);
6307         if (rdev->has_uvd) {
6308                 uvd_v1_0_fini(rdev);
6309                 radeon_uvd_suspend(rdev);
6310         }
6311         si_irq_suspend(rdev);
6312         radeon_wb_disable(rdev);
6313         si_pcie_gart_disable(rdev);
6314         return 0;
6315 }
6316
6317 /* Plan is to move initialization in that function and use
6318  * helper function so that radeon_device_init pretty much
6319  * do nothing more than calling asic specific function. This
6320  * should also allow to remove a bunch of callback function
6321  * like vram_info.
6322  */
6323 int si_init(struct radeon_device *rdev)
6324 {
6325         struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
6326         int r;
6327
6328         /* Read BIOS */
6329         if (!radeon_get_bios(rdev)) {
6330                 if (ASIC_IS_AVIVO(rdev))
6331                         return -EINVAL;
6332         }
6333         /* Must be an ATOMBIOS */
6334         if (!rdev->is_atom_bios) {
6335                 dev_err(rdev->dev, "Expecting atombios for cayman GPU\n");
6336                 return -EINVAL;
6337         }
6338         r = radeon_atombios_init(rdev);
6339         if (r)
6340                 return r;
6341
6342         /* Post card if necessary */
6343         if (!radeon_card_posted(rdev)) {
6344                 if (!rdev->bios) {
6345                         dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
6346                         return -EINVAL;
6347                 }
6348                 DRM_INFO("GPU not posted. posting now...\n");
6349                 atom_asic_init(rdev->mode_info.atom_context);
6350         }
6351         /* init golden registers */
6352         si_init_golden_registers(rdev);
6353         /* Initialize scratch registers */
6354         si_scratch_init(rdev);
6355         /* Initialize surface registers */
6356         radeon_surface_init(rdev);
6357         /* Initialize clocks */
6358         radeon_get_clock_info(rdev->ddev);
6359
6360         /* Fence driver */
6361         r = radeon_fence_driver_init(rdev);
6362         if (r)
6363                 return r;
6364
6365         /* initialize memory controller */
6366         r = si_mc_init(rdev);
6367         if (r)
6368                 return r;
6369         /* Memory manager */
6370         r = radeon_bo_init(rdev);
6371         if (r)
6372                 return r;
6373
6374         ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
6375         ring->ring_obj = NULL;
6376         r600_ring_init(rdev, ring, 1024 * 1024);
6377
6378         ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX];
6379         ring->ring_obj = NULL;
6380         r600_ring_init(rdev, ring, 1024 * 1024);
6381
6382         ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX];
6383         ring->ring_obj = NULL;
6384         r600_ring_init(rdev, ring, 1024 * 1024);
6385
6386         ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
6387         ring->ring_obj = NULL;
6388         r600_ring_init(rdev, ring, 64 * 1024);
6389
6390         ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX];
6391         ring->ring_obj = NULL;
6392         r600_ring_init(rdev, ring, 64 * 1024);
6393
6394         if (rdev->has_uvd) {
6395                 r = radeon_uvd_init(rdev);
6396                 if (!r) {
6397                         ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
6398                         ring->ring_obj = NULL;
6399                         r600_ring_init(rdev, ring, 4096);
6400                 }
6401         }
6402
6403         rdev->ih.ring_obj = NULL;
6404         r600_ih_ring_init(rdev, 64 * 1024);
6405
6406         r = r600_pcie_gart_init(rdev);
6407         if (r)
6408                 return r;
6409
6410         rdev->accel_working = true;
6411         r = si_startup(rdev);
6412         if (r) {
6413                 dev_err(rdev->dev, "disabling GPU acceleration\n");
6414                 si_cp_fini(rdev);
6415                 cayman_dma_fini(rdev);
6416                 si_irq_fini(rdev);
6417                 sumo_rlc_fini(rdev);
6418                 radeon_wb_fini(rdev);
6419                 radeon_ib_pool_fini(rdev);
6420                 radeon_vm_manager_fini(rdev);
6421                 radeon_irq_kms_fini(rdev);
6422                 si_pcie_gart_fini(rdev);
6423                 rdev->accel_working = false;
6424         }
6425
6426         /* Don't start up if the MC ucode is missing.
6427          * The default clocks and voltages before the MC ucode
6428          * is loaded are not suffient for advanced operations.
6429          */
6430         if (!rdev->mc_fw) {
6431                 DRM_ERROR("radeon: MC ucode required for NI+.\n");
6432                 return -EINVAL;
6433         }
6434
6435         return 0;
6436 }
6437
6438 void si_fini(struct radeon_device *rdev)
6439 {
6440         si_cp_fini(rdev);
6441         cayman_dma_fini(rdev);
6442         si_irq_fini(rdev);
6443         sumo_rlc_fini(rdev);
6444         si_fini_cg(rdev);
6445         si_fini_pg(rdev);
6446         radeon_wb_fini(rdev);
6447         radeon_vm_manager_fini(rdev);
6448         radeon_ib_pool_fini(rdev);
6449         radeon_irq_kms_fini(rdev);
6450         if (rdev->has_uvd) {
6451                 uvd_v1_0_fini(rdev);
6452                 radeon_uvd_fini(rdev);
6453         }
6454         si_pcie_gart_fini(rdev);
6455         r600_vram_scratch_fini(rdev);
6456         radeon_gem_fini(rdev);
6457         radeon_fence_driver_fini(rdev);
6458         radeon_bo_fini(rdev);
6459         radeon_atombios_fini(rdev);
6460         kfree(rdev->bios);
6461         rdev->bios = NULL;
6462 }
6463
6464 /**
6465  * si_get_gpu_clock_counter - return GPU clock counter snapshot
6466  *
6467  * @rdev: radeon_device pointer
6468  *
6469  * Fetches a GPU clock counter snapshot (SI).
6470  * Returns the 64 bit clock counter snapshot.
6471  */
6472 uint64_t si_get_gpu_clock_counter(struct radeon_device *rdev)
6473 {
6474         uint64_t clock;
6475
6476         mutex_lock(&rdev->gpu_clock_mutex);
6477         WREG32(RLC_CAPTURE_GPU_CLOCK_COUNT, 1);
6478         clock = (uint64_t)RREG32(RLC_GPU_CLOCK_COUNT_LSB) |
6479                 ((uint64_t)RREG32(RLC_GPU_CLOCK_COUNT_MSB) << 32ULL);
6480         mutex_unlock(&rdev->gpu_clock_mutex);
6481         return clock;
6482 }
6483
6484 int si_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
6485 {
6486         unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
6487         int r;
6488
6489         /* bypass vclk and dclk with bclk */
6490         WREG32_P(CG_UPLL_FUNC_CNTL_2,
6491                 VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
6492                 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
6493
6494         /* put PLL in bypass mode */
6495         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
6496
6497         if (!vclk || !dclk) {
6498                 /* keep the Bypass mode, put PLL to sleep */
6499                 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
6500                 return 0;
6501         }
6502
6503         r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
6504                                           16384, 0x03FFFFFF, 0, 128, 5,
6505                                           &fb_div, &vclk_div, &dclk_div);
6506         if (r)
6507                 return r;
6508
6509         /* set RESET_ANTI_MUX to 0 */
6510         WREG32_P(CG_UPLL_FUNC_CNTL_5, 0, ~RESET_ANTI_MUX_MASK);
6511
6512         /* set VCO_MODE to 1 */
6513         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
6514
6515         /* toggle UPLL_SLEEP to 1 then back to 0 */
6516         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
6517         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
6518
6519         /* deassert UPLL_RESET */
6520         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
6521
6522         mdelay(1);
6523
6524         r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
6525         if (r)
6526                 return r;
6527
6528         /* assert UPLL_RESET again */
6529         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
6530
6531         /* disable spread spectrum. */
6532         WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
6533
6534         /* set feedback divider */
6535         WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
6536
6537         /* set ref divider to 0 */
6538         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
6539
6540         if (fb_div < 307200)
6541                 WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
6542         else
6543                 WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
6544
6545         /* set PDIV_A and PDIV_B */
6546         WREG32_P(CG_UPLL_FUNC_CNTL_2,
6547                 UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
6548                 ~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
6549
6550         /* give the PLL some time to settle */
6551         mdelay(15);
6552
6553         /* deassert PLL_RESET */
6554         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
6555
6556         mdelay(15);
6557
6558         /* switch from bypass mode to normal mode */
6559         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
6560
6561         r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
6562         if (r)
6563                 return r;
6564
6565         /* switch VCLK and DCLK selection */
6566         WREG32_P(CG_UPLL_FUNC_CNTL_2,
6567                 VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
6568                 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
6569
6570         mdelay(100);
6571
6572         return 0;
6573 }
6574
6575 static void si_pcie_gen3_enable(struct radeon_device *rdev)
6576 {
6577         struct pci_dev *root = rdev->pdev->bus->self;
6578         int bridge_pos, gpu_pos;
6579         u32 speed_cntl, mask, current_data_rate;
6580         int ret, i;
6581         u16 tmp16;
6582
6583         if (radeon_pcie_gen2 == 0)
6584                 return;
6585
6586         if (rdev->flags & RADEON_IS_IGP)
6587                 return;
6588
6589         if (!(rdev->flags & RADEON_IS_PCIE))
6590                 return;
6591
6592         ret = drm_pcie_get_speed_cap_mask(rdev->ddev, &mask);
6593         if (ret != 0)
6594                 return;
6595
6596         if (!(mask & (DRM_PCIE_SPEED_50 | DRM_PCIE_SPEED_80)))
6597                 return;
6598
6599         speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
6600         current_data_rate = (speed_cntl & LC_CURRENT_DATA_RATE_MASK) >>
6601                 LC_CURRENT_DATA_RATE_SHIFT;
6602         if (mask & DRM_PCIE_SPEED_80) {
6603                 if (current_data_rate == 2) {
6604                         DRM_INFO("PCIE gen 3 link speeds already enabled\n");
6605                         return;
6606                 }
6607                 DRM_INFO("enabling PCIE gen 3 link speeds, disable with radeon.pcie_gen2=0\n");
6608         } else if (mask & DRM_PCIE_SPEED_50) {
6609                 if (current_data_rate == 1) {
6610                         DRM_INFO("PCIE gen 2 link speeds already enabled\n");
6611                         return;
6612                 }
6613                 DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
6614         }
6615
6616         bridge_pos = pci_pcie_cap(root);
6617         if (!bridge_pos)
6618                 return;
6619
6620         gpu_pos = pci_pcie_cap(rdev->pdev);
6621         if (!gpu_pos)
6622                 return;
6623
6624         if (mask & DRM_PCIE_SPEED_80) {
6625                 /* re-try equalization if gen3 is not already enabled */
6626                 if (current_data_rate != 2) {
6627                         u16 bridge_cfg, gpu_cfg;
6628                         u16 bridge_cfg2, gpu_cfg2;
6629                         u32 max_lw, current_lw, tmp;
6630
6631                         pci_read_config_word(root, bridge_pos + PCI_EXP_LNKCTL, &bridge_cfg);
6632                         pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, &gpu_cfg);
6633
6634                         tmp16 = bridge_cfg | PCI_EXP_LNKCTL_HAWD;
6635                         pci_write_config_word(root, bridge_pos + PCI_EXP_LNKCTL, tmp16);
6636
6637                         tmp16 = gpu_cfg | PCI_EXP_LNKCTL_HAWD;
6638                         pci_write_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, tmp16);
6639
6640                         tmp = RREG32_PCIE(PCIE_LC_STATUS1);
6641                         max_lw = (tmp & LC_DETECTED_LINK_WIDTH_MASK) >> LC_DETECTED_LINK_WIDTH_SHIFT;
6642                         current_lw = (tmp & LC_OPERATING_LINK_WIDTH_MASK) >> LC_OPERATING_LINK_WIDTH_SHIFT;
6643
6644                         if (current_lw < max_lw) {
6645                                 tmp = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
6646                                 if (tmp & LC_RENEGOTIATION_SUPPORT) {
6647                                         tmp &= ~(LC_LINK_WIDTH_MASK | LC_UPCONFIGURE_DIS);
6648                                         tmp |= (max_lw << LC_LINK_WIDTH_SHIFT);
6649                                         tmp |= LC_UPCONFIGURE_SUPPORT | LC_RENEGOTIATE_EN | LC_RECONFIG_NOW;
6650                                         WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, tmp);
6651                                 }
6652                         }
6653
6654                         for (i = 0; i < 10; i++) {
6655                                 /* check status */
6656                                 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_DEVSTA, &tmp16);
6657                                 if (tmp16 & PCI_EXP_DEVSTA_TRPND)
6658                                         break;
6659
6660                                 pci_read_config_word(root, bridge_pos + PCI_EXP_LNKCTL, &bridge_cfg);
6661                                 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, &gpu_cfg);
6662
6663                                 pci_read_config_word(root, bridge_pos + PCI_EXP_LNKCTL2, &bridge_cfg2);
6664                                 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, &gpu_cfg2);
6665
6666                                 tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL4);
6667                                 tmp |= LC_SET_QUIESCE;
6668                                 WREG32_PCIE_PORT(PCIE_LC_CNTL4, tmp);
6669
6670                                 tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL4);
6671                                 tmp |= LC_REDO_EQ;
6672                                 WREG32_PCIE_PORT(PCIE_LC_CNTL4, tmp);
6673
6674                                 mdelay(100);
6675
6676                                 /* linkctl */
6677                                 pci_read_config_word(root, bridge_pos + PCI_EXP_LNKCTL, &tmp16);
6678                                 tmp16 &= ~PCI_EXP_LNKCTL_HAWD;
6679                                 tmp16 |= (bridge_cfg & PCI_EXP_LNKCTL_HAWD);
6680                                 pci_write_config_word(root, bridge_pos + PCI_EXP_LNKCTL, tmp16);
6681
6682                                 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, &tmp16);
6683                                 tmp16 &= ~PCI_EXP_LNKCTL_HAWD;
6684                                 tmp16 |= (gpu_cfg & PCI_EXP_LNKCTL_HAWD);
6685                                 pci_write_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, tmp16);
6686
6687                                 /* linkctl2 */
6688                                 pci_read_config_word(root, bridge_pos + PCI_EXP_LNKCTL2, &tmp16);
6689                                 tmp16 &= ~((1 << 4) | (7 << 9));
6690                                 tmp16 |= (bridge_cfg2 & ((1 << 4) | (7 << 9)));
6691                                 pci_write_config_word(root, bridge_pos + PCI_EXP_LNKCTL2, tmp16);
6692
6693                                 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, &tmp16);
6694                                 tmp16 &= ~((1 << 4) | (7 << 9));
6695                                 tmp16 |= (gpu_cfg2 & ((1 << 4) | (7 << 9)));
6696                                 pci_write_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, tmp16);
6697
6698                                 tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL4);
6699                                 tmp &= ~LC_SET_QUIESCE;
6700                                 WREG32_PCIE_PORT(PCIE_LC_CNTL4, tmp);
6701                         }
6702                 }
6703         }
6704
6705         /* set the link speed */
6706         speed_cntl |= LC_FORCE_EN_SW_SPEED_CHANGE | LC_FORCE_DIS_HW_SPEED_CHANGE;
6707         speed_cntl &= ~LC_FORCE_DIS_SW_SPEED_CHANGE;
6708         WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
6709
6710         pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, &tmp16);
6711         tmp16 &= ~0xf;
6712         if (mask & DRM_PCIE_SPEED_80)
6713                 tmp16 |= 3; /* gen3 */
6714         else if (mask & DRM_PCIE_SPEED_50)
6715                 tmp16 |= 2; /* gen2 */
6716         else
6717                 tmp16 |= 1; /* gen1 */
6718         pci_write_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, tmp16);
6719
6720         speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
6721         speed_cntl |= LC_INITIATE_LINK_SPEED_CHANGE;
6722         WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
6723
6724         for (i = 0; i < rdev->usec_timeout; i++) {
6725                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
6726                 if ((speed_cntl & LC_INITIATE_LINK_SPEED_CHANGE) == 0)
6727                         break;
6728                 udelay(1);
6729         }
6730 }
6731
6732 static void si_program_aspm(struct radeon_device *rdev)
6733 {
6734         u32 data, orig;
6735         bool disable_l0s = false, disable_l1 = false, disable_plloff_in_l1 = false;
6736         bool disable_clkreq = false;
6737
6738         if (radeon_aspm == 0)
6739                 return;
6740
6741         if (!(rdev->flags & RADEON_IS_PCIE))
6742                 return;
6743
6744         orig = data = RREG32_PCIE_PORT(PCIE_LC_N_FTS_CNTL);
6745         data &= ~LC_XMIT_N_FTS_MASK;
6746         data |= LC_XMIT_N_FTS(0x24) | LC_XMIT_N_FTS_OVERRIDE_EN;
6747         if (orig != data)
6748                 WREG32_PCIE_PORT(PCIE_LC_N_FTS_CNTL, data);
6749
6750         orig = data = RREG32_PCIE_PORT(PCIE_LC_CNTL3);
6751         data |= LC_GO_TO_RECOVERY;
6752         if (orig != data)
6753                 WREG32_PCIE_PORT(PCIE_LC_CNTL3, data);
6754
6755         orig = data = RREG32_PCIE(PCIE_P_CNTL);
6756         data |= P_IGNORE_EDB_ERR;
6757         if (orig != data)
6758                 WREG32_PCIE(PCIE_P_CNTL, data);
6759
6760         orig = data = RREG32_PCIE_PORT(PCIE_LC_CNTL);
6761         data &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
6762         data |= LC_PMI_TO_L1_DIS;
6763         if (!disable_l0s)
6764                 data |= LC_L0S_INACTIVITY(7);
6765
6766         if (!disable_l1) {
6767                 data |= LC_L1_INACTIVITY(7);
6768                 data &= ~LC_PMI_TO_L1_DIS;
6769                 if (orig != data)
6770                         WREG32_PCIE_PORT(PCIE_LC_CNTL, data);
6771
6772                 if (!disable_plloff_in_l1) {
6773                         bool clk_req_support;
6774
6775                         orig = data = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
6776                         data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
6777                         data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
6778                         if (orig != data)
6779                                 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
6780
6781                         orig = data = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
6782                         data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
6783                         data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
6784                         if (orig != data)
6785                                 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
6786
6787                         orig = data = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
6788                         data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
6789                         data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
6790                         if (orig != data)
6791                                 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
6792
6793                         orig = data = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
6794                         data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
6795                         data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
6796                         if (orig != data)
6797                                 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
6798
6799                         if ((rdev->family != CHIP_OLAND) && (rdev->family != CHIP_HAINAN)) {
6800                                 orig = data = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
6801                                 data &= ~PLL_RAMP_UP_TIME_0_MASK;
6802                                 if (orig != data)
6803                                         WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
6804
6805                                 orig = data = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
6806                                 data &= ~PLL_RAMP_UP_TIME_1_MASK;
6807                                 if (orig != data)
6808                                         WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
6809
6810                                 orig = data = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_2);
6811                                 data &= ~PLL_RAMP_UP_TIME_2_MASK;
6812                                 if (orig != data)
6813                                         WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_2, data);
6814
6815                                 orig = data = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_3);
6816                                 data &= ~PLL_RAMP_UP_TIME_3_MASK;
6817                                 if (orig != data)
6818                                         WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_3, data);
6819
6820                                 orig = data = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
6821                                 data &= ~PLL_RAMP_UP_TIME_0_MASK;
6822                                 if (orig != data)
6823                                         WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
6824
6825                                 orig = data = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
6826                                 data &= ~PLL_RAMP_UP_TIME_1_MASK;
6827                                 if (orig != data)
6828                                         WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
6829
6830                                 orig = data = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_2);
6831                                 data &= ~PLL_RAMP_UP_TIME_2_MASK;
6832                                 if (orig != data)
6833                                         WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_2, data);
6834
6835                                 orig = data = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_3);
6836                                 data &= ~PLL_RAMP_UP_TIME_3_MASK;
6837                                 if (orig != data)
6838                                         WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_3, data);
6839                         }
6840                         orig = data = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
6841                         data &= ~LC_DYN_LANES_PWR_STATE_MASK;
6842                         data |= LC_DYN_LANES_PWR_STATE(3);
6843                         if (orig != data)
6844                                 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
6845
6846                         orig = data = RREG32_PIF_PHY0(PB0_PIF_CNTL);
6847                         data &= ~LS2_EXIT_TIME_MASK;
6848                         if ((rdev->family == CHIP_OLAND) || (rdev->family == CHIP_HAINAN))
6849                                 data |= LS2_EXIT_TIME(5);
6850                         if (orig != data)
6851                                 WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
6852
6853                         orig = data = RREG32_PIF_PHY1(PB1_PIF_CNTL);
6854                         data &= ~LS2_EXIT_TIME_MASK;
6855                         if ((rdev->family == CHIP_OLAND) || (rdev->family == CHIP_HAINAN))
6856                                 data |= LS2_EXIT_TIME(5);
6857                         if (orig != data)
6858                                 WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
6859
6860                         if (!disable_clkreq) {
6861                                 struct pci_dev *root = rdev->pdev->bus->self;
6862                                 u32 lnkcap;
6863
6864                                 clk_req_support = false;
6865                                 pcie_capability_read_dword(root, PCI_EXP_LNKCAP, &lnkcap);
6866                                 if (lnkcap & PCI_EXP_LNKCAP_CLKPM)
6867                                         clk_req_support = true;
6868                         } else {
6869                                 clk_req_support = false;
6870                         }
6871
6872                         if (clk_req_support) {
6873                                 orig = data = RREG32_PCIE_PORT(PCIE_LC_CNTL2);
6874                                 data |= LC_ALLOW_PDWN_IN_L1 | LC_ALLOW_PDWN_IN_L23;
6875                                 if (orig != data)
6876                                         WREG32_PCIE_PORT(PCIE_LC_CNTL2, data);
6877
6878                                 orig = data = RREG32(THM_CLK_CNTL);
6879                                 data &= ~(CMON_CLK_SEL_MASK | TMON_CLK_SEL_MASK);
6880                                 data |= CMON_CLK_SEL(1) | TMON_CLK_SEL(1);
6881                                 if (orig != data)
6882                                         WREG32(THM_CLK_CNTL, data);
6883
6884                                 orig = data = RREG32(MISC_CLK_CNTL);
6885                                 data &= ~(DEEP_SLEEP_CLK_SEL_MASK | ZCLK_SEL_MASK);
6886                                 data |= DEEP_SLEEP_CLK_SEL(1) | ZCLK_SEL(1);
6887                                 if (orig != data)
6888                                         WREG32(MISC_CLK_CNTL, data);
6889
6890                                 orig = data = RREG32(CG_CLKPIN_CNTL);
6891                                 data &= ~BCLK_AS_XCLK;
6892                                 if (orig != data)
6893                                         WREG32(CG_CLKPIN_CNTL, data);
6894
6895                                 orig = data = RREG32(CG_CLKPIN_CNTL_2);
6896                                 data &= ~FORCE_BIF_REFCLK_EN;
6897                                 if (orig != data)
6898                                         WREG32(CG_CLKPIN_CNTL_2, data);
6899
6900                                 orig = data = RREG32(MPLL_BYPASSCLK_SEL);
6901                                 data &= ~MPLL_CLKOUT_SEL_MASK;
6902                                 data |= MPLL_CLKOUT_SEL(4);
6903                                 if (orig != data)
6904                                         WREG32(MPLL_BYPASSCLK_SEL, data);
6905
6906                                 orig = data = RREG32(SPLL_CNTL_MODE);
6907                                 data &= ~SPLL_REFCLK_SEL_MASK;
6908                                 if (orig != data)
6909                                         WREG32(SPLL_CNTL_MODE, data);
6910                         }
6911                 }
6912         } else {
6913                 if (orig != data)
6914                         WREG32_PCIE_PORT(PCIE_LC_CNTL, data);
6915         }
6916
6917         orig = data = RREG32_PCIE(PCIE_CNTL2);
6918         data |= SLV_MEM_LS_EN | MST_MEM_LS_EN | REPLAY_MEM_LS_EN;
6919         if (orig != data)
6920                 WREG32_PCIE(PCIE_CNTL2, data);
6921
6922         if (!disable_l0s) {
6923                 data = RREG32_PCIE_PORT(PCIE_LC_N_FTS_CNTL);
6924                 if((data & LC_N_FTS_MASK) == LC_N_FTS_MASK) {
6925                         data = RREG32_PCIE(PCIE_LC_STATUS1);
6926                         if ((data & LC_REVERSE_XMIT) && (data & LC_REVERSE_RCVR)) {
6927                                 orig = data = RREG32_PCIE_PORT(PCIE_LC_CNTL);
6928                                 data &= ~LC_L0S_INACTIVITY_MASK;
6929                                 if (orig != data)
6930                                         WREG32_PCIE_PORT(PCIE_LC_CNTL, data);
6931                         }
6932                 }
6933         }
6934 }