drm/radeon/dpm: save some display parameters for DPM
[linux-2.6-block.git] / drivers / gpu / drm / radeon / evergreen.c
1 /*
2  * Copyright 2010 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Alex Deucher
23  */
24 #include <linux/firmware.h>
25 #include <linux/platform_device.h>
26 #include <linux/slab.h>
27 #include <drm/drmP.h>
28 #include "radeon.h"
29 #include "radeon_asic.h"
30 #include <drm/radeon_drm.h>
31 #include "evergreend.h"
32 #include "atom.h"
33 #include "avivod.h"
34 #include "evergreen_reg.h"
35 #include "evergreen_blit_shaders.h"
36 #include "radeon_ucode.h"
37
38 static const u32 crtc_offsets[6] =
39 {
40         EVERGREEN_CRTC0_REGISTER_OFFSET,
41         EVERGREEN_CRTC1_REGISTER_OFFSET,
42         EVERGREEN_CRTC2_REGISTER_OFFSET,
43         EVERGREEN_CRTC3_REGISTER_OFFSET,
44         EVERGREEN_CRTC4_REGISTER_OFFSET,
45         EVERGREEN_CRTC5_REGISTER_OFFSET
46 };
47
48 #include "clearstate_evergreen.h"
49
50 static u32 sumo_rlc_save_restore_register_list[] =
51 {
52         0x98fc,
53         0x9830,
54         0x9834,
55         0x9838,
56         0x9870,
57         0x9874,
58         0x8a14,
59         0x8b24,
60         0x8bcc,
61         0x8b10,
62         0x8d00,
63         0x8d04,
64         0x8c00,
65         0x8c04,
66         0x8c08,
67         0x8c0c,
68         0x8d8c,
69         0x8c20,
70         0x8c24,
71         0x8c28,
72         0x8c18,
73         0x8c1c,
74         0x8cf0,
75         0x8e2c,
76         0x8e38,
77         0x8c30,
78         0x9508,
79         0x9688,
80         0x9608,
81         0x960c,
82         0x9610,
83         0x9614,
84         0x88c4,
85         0x88d4,
86         0xa008,
87         0x900c,
88         0x9100,
89         0x913c,
90         0x98f8,
91         0x98f4,
92         0x9b7c,
93         0x3f8c,
94         0x8950,
95         0x8954,
96         0x8a18,
97         0x8b28,
98         0x9144,
99         0x9148,
100         0x914c,
101         0x3f90,
102         0x3f94,
103         0x915c,
104         0x9160,
105         0x9178,
106         0x917c,
107         0x9180,
108         0x918c,
109         0x9190,
110         0x9194,
111         0x9198,
112         0x919c,
113         0x91a8,
114         0x91ac,
115         0x91b0,
116         0x91b4,
117         0x91b8,
118         0x91c4,
119         0x91c8,
120         0x91cc,
121         0x91d0,
122         0x91d4,
123         0x91e0,
124         0x91e4,
125         0x91ec,
126         0x91f0,
127         0x91f4,
128         0x9200,
129         0x9204,
130         0x929c,
131         0x9150,
132         0x802c,
133 };
134 static u32 sumo_rlc_save_restore_register_list_size = ARRAY_SIZE(sumo_rlc_save_restore_register_list);
135
136 static void evergreen_gpu_init(struct radeon_device *rdev);
137 void evergreen_fini(struct radeon_device *rdev);
138 void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
139 void evergreen_program_aspm(struct radeon_device *rdev);
140 extern void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
141                                      int ring, u32 cp_int_cntl);
142
143 static const u32 evergreen_golden_registers[] =
144 {
145         0x3f90, 0xffff0000, 0xff000000,
146         0x9148, 0xffff0000, 0xff000000,
147         0x3f94, 0xffff0000, 0xff000000,
148         0x914c, 0xffff0000, 0xff000000,
149         0x9b7c, 0xffffffff, 0x00000000,
150         0x8a14, 0xffffffff, 0x00000007,
151         0x8b10, 0xffffffff, 0x00000000,
152         0x960c, 0xffffffff, 0x54763210,
153         0x88c4, 0xffffffff, 0x000000c2,
154         0x88d4, 0xffffffff, 0x00000010,
155         0x8974, 0xffffffff, 0x00000000,
156         0xc78, 0x00000080, 0x00000080,
157         0x5eb4, 0xffffffff, 0x00000002,
158         0x5e78, 0xffffffff, 0x001000f0,
159         0x6104, 0x01000300, 0x00000000,
160         0x5bc0, 0x00300000, 0x00000000,
161         0x7030, 0xffffffff, 0x00000011,
162         0x7c30, 0xffffffff, 0x00000011,
163         0x10830, 0xffffffff, 0x00000011,
164         0x11430, 0xffffffff, 0x00000011,
165         0x12030, 0xffffffff, 0x00000011,
166         0x12c30, 0xffffffff, 0x00000011,
167         0xd02c, 0xffffffff, 0x08421000,
168         0x240c, 0xffffffff, 0x00000380,
169         0x8b24, 0xffffffff, 0x00ff0fff,
170         0x28a4c, 0x06000000, 0x06000000,
171         0x10c, 0x00000001, 0x00000001,
172         0x8d00, 0xffffffff, 0x100e4848,
173         0x8d04, 0xffffffff, 0x00164745,
174         0x8c00, 0xffffffff, 0xe4000003,
175         0x8c04, 0xffffffff, 0x40600060,
176         0x8c08, 0xffffffff, 0x001c001c,
177         0x8cf0, 0xffffffff, 0x08e00620,
178         0x8c20, 0xffffffff, 0x00800080,
179         0x8c24, 0xffffffff, 0x00800080,
180         0x8c18, 0xffffffff, 0x20202078,
181         0x8c1c, 0xffffffff, 0x00001010,
182         0x28350, 0xffffffff, 0x00000000,
183         0xa008, 0xffffffff, 0x00010000,
184         0x5cc, 0xffffffff, 0x00000001,
185         0x9508, 0xffffffff, 0x00000002,
186         0x913c, 0x0000000f, 0x0000000a
187 };
188
189 static const u32 evergreen_golden_registers2[] =
190 {
191         0x2f4c, 0xffffffff, 0x00000000,
192         0x54f4, 0xffffffff, 0x00000000,
193         0x54f0, 0xffffffff, 0x00000000,
194         0x5498, 0xffffffff, 0x00000000,
195         0x549c, 0xffffffff, 0x00000000,
196         0x5494, 0xffffffff, 0x00000000,
197         0x53cc, 0xffffffff, 0x00000000,
198         0x53c8, 0xffffffff, 0x00000000,
199         0x53c4, 0xffffffff, 0x00000000,
200         0x53c0, 0xffffffff, 0x00000000,
201         0x53bc, 0xffffffff, 0x00000000,
202         0x53b8, 0xffffffff, 0x00000000,
203         0x53b4, 0xffffffff, 0x00000000,
204         0x53b0, 0xffffffff, 0x00000000
205 };
206
207 static const u32 cypress_mgcg_init[] =
208 {
209         0x802c, 0xffffffff, 0xc0000000,
210         0x5448, 0xffffffff, 0x00000100,
211         0x55e4, 0xffffffff, 0x00000100,
212         0x160c, 0xffffffff, 0x00000100,
213         0x5644, 0xffffffff, 0x00000100,
214         0xc164, 0xffffffff, 0x00000100,
215         0x8a18, 0xffffffff, 0x00000100,
216         0x897c, 0xffffffff, 0x06000100,
217         0x8b28, 0xffffffff, 0x00000100,
218         0x9144, 0xffffffff, 0x00000100,
219         0x9a60, 0xffffffff, 0x00000100,
220         0x9868, 0xffffffff, 0x00000100,
221         0x8d58, 0xffffffff, 0x00000100,
222         0x9510, 0xffffffff, 0x00000100,
223         0x949c, 0xffffffff, 0x00000100,
224         0x9654, 0xffffffff, 0x00000100,
225         0x9030, 0xffffffff, 0x00000100,
226         0x9034, 0xffffffff, 0x00000100,
227         0x9038, 0xffffffff, 0x00000100,
228         0x903c, 0xffffffff, 0x00000100,
229         0x9040, 0xffffffff, 0x00000100,
230         0xa200, 0xffffffff, 0x00000100,
231         0xa204, 0xffffffff, 0x00000100,
232         0xa208, 0xffffffff, 0x00000100,
233         0xa20c, 0xffffffff, 0x00000100,
234         0x971c, 0xffffffff, 0x00000100,
235         0x977c, 0xffffffff, 0x00000100,
236         0x3f80, 0xffffffff, 0x00000100,
237         0xa210, 0xffffffff, 0x00000100,
238         0xa214, 0xffffffff, 0x00000100,
239         0x4d8, 0xffffffff, 0x00000100,
240         0x9784, 0xffffffff, 0x00000100,
241         0x9698, 0xffffffff, 0x00000100,
242         0x4d4, 0xffffffff, 0x00000200,
243         0x30cc, 0xffffffff, 0x00000100,
244         0xd0c0, 0xffffffff, 0xff000100,
245         0x802c, 0xffffffff, 0x40000000,
246         0x915c, 0xffffffff, 0x00010000,
247         0x9160, 0xffffffff, 0x00030002,
248         0x9178, 0xffffffff, 0x00070000,
249         0x917c, 0xffffffff, 0x00030002,
250         0x9180, 0xffffffff, 0x00050004,
251         0x918c, 0xffffffff, 0x00010006,
252         0x9190, 0xffffffff, 0x00090008,
253         0x9194, 0xffffffff, 0x00070000,
254         0x9198, 0xffffffff, 0x00030002,
255         0x919c, 0xffffffff, 0x00050004,
256         0x91a8, 0xffffffff, 0x00010006,
257         0x91ac, 0xffffffff, 0x00090008,
258         0x91b0, 0xffffffff, 0x00070000,
259         0x91b4, 0xffffffff, 0x00030002,
260         0x91b8, 0xffffffff, 0x00050004,
261         0x91c4, 0xffffffff, 0x00010006,
262         0x91c8, 0xffffffff, 0x00090008,
263         0x91cc, 0xffffffff, 0x00070000,
264         0x91d0, 0xffffffff, 0x00030002,
265         0x91d4, 0xffffffff, 0x00050004,
266         0x91e0, 0xffffffff, 0x00010006,
267         0x91e4, 0xffffffff, 0x00090008,
268         0x91e8, 0xffffffff, 0x00000000,
269         0x91ec, 0xffffffff, 0x00070000,
270         0x91f0, 0xffffffff, 0x00030002,
271         0x91f4, 0xffffffff, 0x00050004,
272         0x9200, 0xffffffff, 0x00010006,
273         0x9204, 0xffffffff, 0x00090008,
274         0x9208, 0xffffffff, 0x00070000,
275         0x920c, 0xffffffff, 0x00030002,
276         0x9210, 0xffffffff, 0x00050004,
277         0x921c, 0xffffffff, 0x00010006,
278         0x9220, 0xffffffff, 0x00090008,
279         0x9224, 0xffffffff, 0x00070000,
280         0x9228, 0xffffffff, 0x00030002,
281         0x922c, 0xffffffff, 0x00050004,
282         0x9238, 0xffffffff, 0x00010006,
283         0x923c, 0xffffffff, 0x00090008,
284         0x9240, 0xffffffff, 0x00070000,
285         0x9244, 0xffffffff, 0x00030002,
286         0x9248, 0xffffffff, 0x00050004,
287         0x9254, 0xffffffff, 0x00010006,
288         0x9258, 0xffffffff, 0x00090008,
289         0x925c, 0xffffffff, 0x00070000,
290         0x9260, 0xffffffff, 0x00030002,
291         0x9264, 0xffffffff, 0x00050004,
292         0x9270, 0xffffffff, 0x00010006,
293         0x9274, 0xffffffff, 0x00090008,
294         0x9278, 0xffffffff, 0x00070000,
295         0x927c, 0xffffffff, 0x00030002,
296         0x9280, 0xffffffff, 0x00050004,
297         0x928c, 0xffffffff, 0x00010006,
298         0x9290, 0xffffffff, 0x00090008,
299         0x9294, 0xffffffff, 0x00000000,
300         0x929c, 0xffffffff, 0x00000001,
301         0x802c, 0xffffffff, 0x40010000,
302         0x915c, 0xffffffff, 0x00010000,
303         0x9160, 0xffffffff, 0x00030002,
304         0x9178, 0xffffffff, 0x00070000,
305         0x917c, 0xffffffff, 0x00030002,
306         0x9180, 0xffffffff, 0x00050004,
307         0x918c, 0xffffffff, 0x00010006,
308         0x9190, 0xffffffff, 0x00090008,
309         0x9194, 0xffffffff, 0x00070000,
310         0x9198, 0xffffffff, 0x00030002,
311         0x919c, 0xffffffff, 0x00050004,
312         0x91a8, 0xffffffff, 0x00010006,
313         0x91ac, 0xffffffff, 0x00090008,
314         0x91b0, 0xffffffff, 0x00070000,
315         0x91b4, 0xffffffff, 0x00030002,
316         0x91b8, 0xffffffff, 0x00050004,
317         0x91c4, 0xffffffff, 0x00010006,
318         0x91c8, 0xffffffff, 0x00090008,
319         0x91cc, 0xffffffff, 0x00070000,
320         0x91d0, 0xffffffff, 0x00030002,
321         0x91d4, 0xffffffff, 0x00050004,
322         0x91e0, 0xffffffff, 0x00010006,
323         0x91e4, 0xffffffff, 0x00090008,
324         0x91e8, 0xffffffff, 0x00000000,
325         0x91ec, 0xffffffff, 0x00070000,
326         0x91f0, 0xffffffff, 0x00030002,
327         0x91f4, 0xffffffff, 0x00050004,
328         0x9200, 0xffffffff, 0x00010006,
329         0x9204, 0xffffffff, 0x00090008,
330         0x9208, 0xffffffff, 0x00070000,
331         0x920c, 0xffffffff, 0x00030002,
332         0x9210, 0xffffffff, 0x00050004,
333         0x921c, 0xffffffff, 0x00010006,
334         0x9220, 0xffffffff, 0x00090008,
335         0x9224, 0xffffffff, 0x00070000,
336         0x9228, 0xffffffff, 0x00030002,
337         0x922c, 0xffffffff, 0x00050004,
338         0x9238, 0xffffffff, 0x00010006,
339         0x923c, 0xffffffff, 0x00090008,
340         0x9240, 0xffffffff, 0x00070000,
341         0x9244, 0xffffffff, 0x00030002,
342         0x9248, 0xffffffff, 0x00050004,
343         0x9254, 0xffffffff, 0x00010006,
344         0x9258, 0xffffffff, 0x00090008,
345         0x925c, 0xffffffff, 0x00070000,
346         0x9260, 0xffffffff, 0x00030002,
347         0x9264, 0xffffffff, 0x00050004,
348         0x9270, 0xffffffff, 0x00010006,
349         0x9274, 0xffffffff, 0x00090008,
350         0x9278, 0xffffffff, 0x00070000,
351         0x927c, 0xffffffff, 0x00030002,
352         0x9280, 0xffffffff, 0x00050004,
353         0x928c, 0xffffffff, 0x00010006,
354         0x9290, 0xffffffff, 0x00090008,
355         0x9294, 0xffffffff, 0x00000000,
356         0x929c, 0xffffffff, 0x00000001,
357         0x802c, 0xffffffff, 0xc0000000
358 };
359
360 static const u32 redwood_mgcg_init[] =
361 {
362         0x802c, 0xffffffff, 0xc0000000,
363         0x5448, 0xffffffff, 0x00000100,
364         0x55e4, 0xffffffff, 0x00000100,
365         0x160c, 0xffffffff, 0x00000100,
366         0x5644, 0xffffffff, 0x00000100,
367         0xc164, 0xffffffff, 0x00000100,
368         0x8a18, 0xffffffff, 0x00000100,
369         0x897c, 0xffffffff, 0x06000100,
370         0x8b28, 0xffffffff, 0x00000100,
371         0x9144, 0xffffffff, 0x00000100,
372         0x9a60, 0xffffffff, 0x00000100,
373         0x9868, 0xffffffff, 0x00000100,
374         0x8d58, 0xffffffff, 0x00000100,
375         0x9510, 0xffffffff, 0x00000100,
376         0x949c, 0xffffffff, 0x00000100,
377         0x9654, 0xffffffff, 0x00000100,
378         0x9030, 0xffffffff, 0x00000100,
379         0x9034, 0xffffffff, 0x00000100,
380         0x9038, 0xffffffff, 0x00000100,
381         0x903c, 0xffffffff, 0x00000100,
382         0x9040, 0xffffffff, 0x00000100,
383         0xa200, 0xffffffff, 0x00000100,
384         0xa204, 0xffffffff, 0x00000100,
385         0xa208, 0xffffffff, 0x00000100,
386         0xa20c, 0xffffffff, 0x00000100,
387         0x971c, 0xffffffff, 0x00000100,
388         0x977c, 0xffffffff, 0x00000100,
389         0x3f80, 0xffffffff, 0x00000100,
390         0xa210, 0xffffffff, 0x00000100,
391         0xa214, 0xffffffff, 0x00000100,
392         0x4d8, 0xffffffff, 0x00000100,
393         0x9784, 0xffffffff, 0x00000100,
394         0x9698, 0xffffffff, 0x00000100,
395         0x4d4, 0xffffffff, 0x00000200,
396         0x30cc, 0xffffffff, 0x00000100,
397         0xd0c0, 0xffffffff, 0xff000100,
398         0x802c, 0xffffffff, 0x40000000,
399         0x915c, 0xffffffff, 0x00010000,
400         0x9160, 0xffffffff, 0x00030002,
401         0x9178, 0xffffffff, 0x00070000,
402         0x917c, 0xffffffff, 0x00030002,
403         0x9180, 0xffffffff, 0x00050004,
404         0x918c, 0xffffffff, 0x00010006,
405         0x9190, 0xffffffff, 0x00090008,
406         0x9194, 0xffffffff, 0x00070000,
407         0x9198, 0xffffffff, 0x00030002,
408         0x919c, 0xffffffff, 0x00050004,
409         0x91a8, 0xffffffff, 0x00010006,
410         0x91ac, 0xffffffff, 0x00090008,
411         0x91b0, 0xffffffff, 0x00070000,
412         0x91b4, 0xffffffff, 0x00030002,
413         0x91b8, 0xffffffff, 0x00050004,
414         0x91c4, 0xffffffff, 0x00010006,
415         0x91c8, 0xffffffff, 0x00090008,
416         0x91cc, 0xffffffff, 0x00070000,
417         0x91d0, 0xffffffff, 0x00030002,
418         0x91d4, 0xffffffff, 0x00050004,
419         0x91e0, 0xffffffff, 0x00010006,
420         0x91e4, 0xffffffff, 0x00090008,
421         0x91e8, 0xffffffff, 0x00000000,
422         0x91ec, 0xffffffff, 0x00070000,
423         0x91f0, 0xffffffff, 0x00030002,
424         0x91f4, 0xffffffff, 0x00050004,
425         0x9200, 0xffffffff, 0x00010006,
426         0x9204, 0xffffffff, 0x00090008,
427         0x9294, 0xffffffff, 0x00000000,
428         0x929c, 0xffffffff, 0x00000001,
429         0x802c, 0xffffffff, 0xc0000000
430 };
431
432 static const u32 cedar_golden_registers[] =
433 {
434         0x3f90, 0xffff0000, 0xff000000,
435         0x9148, 0xffff0000, 0xff000000,
436         0x3f94, 0xffff0000, 0xff000000,
437         0x914c, 0xffff0000, 0xff000000,
438         0x9b7c, 0xffffffff, 0x00000000,
439         0x8a14, 0xffffffff, 0x00000007,
440         0x8b10, 0xffffffff, 0x00000000,
441         0x960c, 0xffffffff, 0x54763210,
442         0x88c4, 0xffffffff, 0x000000c2,
443         0x88d4, 0xffffffff, 0x00000000,
444         0x8974, 0xffffffff, 0x00000000,
445         0xc78, 0x00000080, 0x00000080,
446         0x5eb4, 0xffffffff, 0x00000002,
447         0x5e78, 0xffffffff, 0x001000f0,
448         0x6104, 0x01000300, 0x00000000,
449         0x5bc0, 0x00300000, 0x00000000,
450         0x7030, 0xffffffff, 0x00000011,
451         0x7c30, 0xffffffff, 0x00000011,
452         0x10830, 0xffffffff, 0x00000011,
453         0x11430, 0xffffffff, 0x00000011,
454         0xd02c, 0xffffffff, 0x08421000,
455         0x240c, 0xffffffff, 0x00000380,
456         0x8b24, 0xffffffff, 0x00ff0fff,
457         0x28a4c, 0x06000000, 0x06000000,
458         0x10c, 0x00000001, 0x00000001,
459         0x8d00, 0xffffffff, 0x100e4848,
460         0x8d04, 0xffffffff, 0x00164745,
461         0x8c00, 0xffffffff, 0xe4000003,
462         0x8c04, 0xffffffff, 0x40600060,
463         0x8c08, 0xffffffff, 0x001c001c,
464         0x8cf0, 0xffffffff, 0x08e00410,
465         0x8c20, 0xffffffff, 0x00800080,
466         0x8c24, 0xffffffff, 0x00800080,
467         0x8c18, 0xffffffff, 0x20202078,
468         0x8c1c, 0xffffffff, 0x00001010,
469         0x28350, 0xffffffff, 0x00000000,
470         0xa008, 0xffffffff, 0x00010000,
471         0x5cc, 0xffffffff, 0x00000001,
472         0x9508, 0xffffffff, 0x00000002
473 };
474
475 static const u32 cedar_mgcg_init[] =
476 {
477         0x802c, 0xffffffff, 0xc0000000,
478         0x5448, 0xffffffff, 0x00000100,
479         0x55e4, 0xffffffff, 0x00000100,
480         0x160c, 0xffffffff, 0x00000100,
481         0x5644, 0xffffffff, 0x00000100,
482         0xc164, 0xffffffff, 0x00000100,
483         0x8a18, 0xffffffff, 0x00000100,
484         0x897c, 0xffffffff, 0x06000100,
485         0x8b28, 0xffffffff, 0x00000100,
486         0x9144, 0xffffffff, 0x00000100,
487         0x9a60, 0xffffffff, 0x00000100,
488         0x9868, 0xffffffff, 0x00000100,
489         0x8d58, 0xffffffff, 0x00000100,
490         0x9510, 0xffffffff, 0x00000100,
491         0x949c, 0xffffffff, 0x00000100,
492         0x9654, 0xffffffff, 0x00000100,
493         0x9030, 0xffffffff, 0x00000100,
494         0x9034, 0xffffffff, 0x00000100,
495         0x9038, 0xffffffff, 0x00000100,
496         0x903c, 0xffffffff, 0x00000100,
497         0x9040, 0xffffffff, 0x00000100,
498         0xa200, 0xffffffff, 0x00000100,
499         0xa204, 0xffffffff, 0x00000100,
500         0xa208, 0xffffffff, 0x00000100,
501         0xa20c, 0xffffffff, 0x00000100,
502         0x971c, 0xffffffff, 0x00000100,
503         0x977c, 0xffffffff, 0x00000100,
504         0x3f80, 0xffffffff, 0x00000100,
505         0xa210, 0xffffffff, 0x00000100,
506         0xa214, 0xffffffff, 0x00000100,
507         0x4d8, 0xffffffff, 0x00000100,
508         0x9784, 0xffffffff, 0x00000100,
509         0x9698, 0xffffffff, 0x00000100,
510         0x4d4, 0xffffffff, 0x00000200,
511         0x30cc, 0xffffffff, 0x00000100,
512         0xd0c0, 0xffffffff, 0xff000100,
513         0x802c, 0xffffffff, 0x40000000,
514         0x915c, 0xffffffff, 0x00010000,
515         0x9178, 0xffffffff, 0x00050000,
516         0x917c, 0xffffffff, 0x00030002,
517         0x918c, 0xffffffff, 0x00010004,
518         0x9190, 0xffffffff, 0x00070006,
519         0x9194, 0xffffffff, 0x00050000,
520         0x9198, 0xffffffff, 0x00030002,
521         0x91a8, 0xffffffff, 0x00010004,
522         0x91ac, 0xffffffff, 0x00070006,
523         0x91e8, 0xffffffff, 0x00000000,
524         0x9294, 0xffffffff, 0x00000000,
525         0x929c, 0xffffffff, 0x00000001,
526         0x802c, 0xffffffff, 0xc0000000
527 };
528
529 static const u32 juniper_mgcg_init[] =
530 {
531         0x802c, 0xffffffff, 0xc0000000,
532         0x5448, 0xffffffff, 0x00000100,
533         0x55e4, 0xffffffff, 0x00000100,
534         0x160c, 0xffffffff, 0x00000100,
535         0x5644, 0xffffffff, 0x00000100,
536         0xc164, 0xffffffff, 0x00000100,
537         0x8a18, 0xffffffff, 0x00000100,
538         0x897c, 0xffffffff, 0x06000100,
539         0x8b28, 0xffffffff, 0x00000100,
540         0x9144, 0xffffffff, 0x00000100,
541         0x9a60, 0xffffffff, 0x00000100,
542         0x9868, 0xffffffff, 0x00000100,
543         0x8d58, 0xffffffff, 0x00000100,
544         0x9510, 0xffffffff, 0x00000100,
545         0x949c, 0xffffffff, 0x00000100,
546         0x9654, 0xffffffff, 0x00000100,
547         0x9030, 0xffffffff, 0x00000100,
548         0x9034, 0xffffffff, 0x00000100,
549         0x9038, 0xffffffff, 0x00000100,
550         0x903c, 0xffffffff, 0x00000100,
551         0x9040, 0xffffffff, 0x00000100,
552         0xa200, 0xffffffff, 0x00000100,
553         0xa204, 0xffffffff, 0x00000100,
554         0xa208, 0xffffffff, 0x00000100,
555         0xa20c, 0xffffffff, 0x00000100,
556         0x971c, 0xffffffff, 0x00000100,
557         0xd0c0, 0xffffffff, 0xff000100,
558         0x802c, 0xffffffff, 0x40000000,
559         0x915c, 0xffffffff, 0x00010000,
560         0x9160, 0xffffffff, 0x00030002,
561         0x9178, 0xffffffff, 0x00070000,
562         0x917c, 0xffffffff, 0x00030002,
563         0x9180, 0xffffffff, 0x00050004,
564         0x918c, 0xffffffff, 0x00010006,
565         0x9190, 0xffffffff, 0x00090008,
566         0x9194, 0xffffffff, 0x00070000,
567         0x9198, 0xffffffff, 0x00030002,
568         0x919c, 0xffffffff, 0x00050004,
569         0x91a8, 0xffffffff, 0x00010006,
570         0x91ac, 0xffffffff, 0x00090008,
571         0x91b0, 0xffffffff, 0x00070000,
572         0x91b4, 0xffffffff, 0x00030002,
573         0x91b8, 0xffffffff, 0x00050004,
574         0x91c4, 0xffffffff, 0x00010006,
575         0x91c8, 0xffffffff, 0x00090008,
576         0x91cc, 0xffffffff, 0x00070000,
577         0x91d0, 0xffffffff, 0x00030002,
578         0x91d4, 0xffffffff, 0x00050004,
579         0x91e0, 0xffffffff, 0x00010006,
580         0x91e4, 0xffffffff, 0x00090008,
581         0x91e8, 0xffffffff, 0x00000000,
582         0x91ec, 0xffffffff, 0x00070000,
583         0x91f0, 0xffffffff, 0x00030002,
584         0x91f4, 0xffffffff, 0x00050004,
585         0x9200, 0xffffffff, 0x00010006,
586         0x9204, 0xffffffff, 0x00090008,
587         0x9208, 0xffffffff, 0x00070000,
588         0x920c, 0xffffffff, 0x00030002,
589         0x9210, 0xffffffff, 0x00050004,
590         0x921c, 0xffffffff, 0x00010006,
591         0x9220, 0xffffffff, 0x00090008,
592         0x9224, 0xffffffff, 0x00070000,
593         0x9228, 0xffffffff, 0x00030002,
594         0x922c, 0xffffffff, 0x00050004,
595         0x9238, 0xffffffff, 0x00010006,
596         0x923c, 0xffffffff, 0x00090008,
597         0x9240, 0xffffffff, 0x00070000,
598         0x9244, 0xffffffff, 0x00030002,
599         0x9248, 0xffffffff, 0x00050004,
600         0x9254, 0xffffffff, 0x00010006,
601         0x9258, 0xffffffff, 0x00090008,
602         0x925c, 0xffffffff, 0x00070000,
603         0x9260, 0xffffffff, 0x00030002,
604         0x9264, 0xffffffff, 0x00050004,
605         0x9270, 0xffffffff, 0x00010006,
606         0x9274, 0xffffffff, 0x00090008,
607         0x9278, 0xffffffff, 0x00070000,
608         0x927c, 0xffffffff, 0x00030002,
609         0x9280, 0xffffffff, 0x00050004,
610         0x928c, 0xffffffff, 0x00010006,
611         0x9290, 0xffffffff, 0x00090008,
612         0x9294, 0xffffffff, 0x00000000,
613         0x929c, 0xffffffff, 0x00000001,
614         0x802c, 0xffffffff, 0xc0000000,
615         0x977c, 0xffffffff, 0x00000100,
616         0x3f80, 0xffffffff, 0x00000100,
617         0xa210, 0xffffffff, 0x00000100,
618         0xa214, 0xffffffff, 0x00000100,
619         0x4d8, 0xffffffff, 0x00000100,
620         0x9784, 0xffffffff, 0x00000100,
621         0x9698, 0xffffffff, 0x00000100,
622         0x4d4, 0xffffffff, 0x00000200,
623         0x30cc, 0xffffffff, 0x00000100,
624         0x802c, 0xffffffff, 0xc0000000
625 };
626
627 static const u32 supersumo_golden_registers[] =
628 {
629         0x5eb4, 0xffffffff, 0x00000002,
630         0x5cc, 0xffffffff, 0x00000001,
631         0x7030, 0xffffffff, 0x00000011,
632         0x7c30, 0xffffffff, 0x00000011,
633         0x6104, 0x01000300, 0x00000000,
634         0x5bc0, 0x00300000, 0x00000000,
635         0x8c04, 0xffffffff, 0x40600060,
636         0x8c08, 0xffffffff, 0x001c001c,
637         0x8c20, 0xffffffff, 0x00800080,
638         0x8c24, 0xffffffff, 0x00800080,
639         0x8c18, 0xffffffff, 0x20202078,
640         0x8c1c, 0xffffffff, 0x00001010,
641         0x918c, 0xffffffff, 0x00010006,
642         0x91a8, 0xffffffff, 0x00010006,
643         0x91c4, 0xffffffff, 0x00010006,
644         0x91e0, 0xffffffff, 0x00010006,
645         0x9200, 0xffffffff, 0x00010006,
646         0x9150, 0xffffffff, 0x6e944040,
647         0x917c, 0xffffffff, 0x00030002,
648         0x9180, 0xffffffff, 0x00050004,
649         0x9198, 0xffffffff, 0x00030002,
650         0x919c, 0xffffffff, 0x00050004,
651         0x91b4, 0xffffffff, 0x00030002,
652         0x91b8, 0xffffffff, 0x00050004,
653         0x91d0, 0xffffffff, 0x00030002,
654         0x91d4, 0xffffffff, 0x00050004,
655         0x91f0, 0xffffffff, 0x00030002,
656         0x91f4, 0xffffffff, 0x00050004,
657         0x915c, 0xffffffff, 0x00010000,
658         0x9160, 0xffffffff, 0x00030002,
659         0x3f90, 0xffff0000, 0xff000000,
660         0x9178, 0xffffffff, 0x00070000,
661         0x9194, 0xffffffff, 0x00070000,
662         0x91b0, 0xffffffff, 0x00070000,
663         0x91cc, 0xffffffff, 0x00070000,
664         0x91ec, 0xffffffff, 0x00070000,
665         0x9148, 0xffff0000, 0xff000000,
666         0x9190, 0xffffffff, 0x00090008,
667         0x91ac, 0xffffffff, 0x00090008,
668         0x91c8, 0xffffffff, 0x00090008,
669         0x91e4, 0xffffffff, 0x00090008,
670         0x9204, 0xffffffff, 0x00090008,
671         0x3f94, 0xffff0000, 0xff000000,
672         0x914c, 0xffff0000, 0xff000000,
673         0x929c, 0xffffffff, 0x00000001,
674         0x8a18, 0xffffffff, 0x00000100,
675         0x8b28, 0xffffffff, 0x00000100,
676         0x9144, 0xffffffff, 0x00000100,
677         0x5644, 0xffffffff, 0x00000100,
678         0x9b7c, 0xffffffff, 0x00000000,
679         0x8030, 0xffffffff, 0x0000100a,
680         0x8a14, 0xffffffff, 0x00000007,
681         0x8b24, 0xffffffff, 0x00ff0fff,
682         0x8b10, 0xffffffff, 0x00000000,
683         0x28a4c, 0x06000000, 0x06000000,
684         0x4d8, 0xffffffff, 0x00000100,
685         0x913c, 0xffff000f, 0x0100000a,
686         0x960c, 0xffffffff, 0x54763210,
687         0x88c4, 0xffffffff, 0x000000c2,
688         0x88d4, 0xffffffff, 0x00000010,
689         0x8974, 0xffffffff, 0x00000000,
690         0xc78, 0x00000080, 0x00000080,
691         0x5e78, 0xffffffff, 0x001000f0,
692         0xd02c, 0xffffffff, 0x08421000,
693         0xa008, 0xffffffff, 0x00010000,
694         0x8d00, 0xffffffff, 0x100e4848,
695         0x8d04, 0xffffffff, 0x00164745,
696         0x8c00, 0xffffffff, 0xe4000003,
697         0x8cf0, 0x1fffffff, 0x08e00620,
698         0x28350, 0xffffffff, 0x00000000,
699         0x9508, 0xffffffff, 0x00000002
700 };
701
702 static const u32 sumo_golden_registers[] =
703 {
704         0x900c, 0x00ffffff, 0x0017071f,
705         0x8c18, 0xffffffff, 0x10101060,
706         0x8c1c, 0xffffffff, 0x00001010,
707         0x8c30, 0x0000000f, 0x00000005,
708         0x9688, 0x0000000f, 0x00000007
709 };
710
711 static const u32 wrestler_golden_registers[] =
712 {
713         0x5eb4, 0xffffffff, 0x00000002,
714         0x5cc, 0xffffffff, 0x00000001,
715         0x7030, 0xffffffff, 0x00000011,
716         0x7c30, 0xffffffff, 0x00000011,
717         0x6104, 0x01000300, 0x00000000,
718         0x5bc0, 0x00300000, 0x00000000,
719         0x918c, 0xffffffff, 0x00010006,
720         0x91a8, 0xffffffff, 0x00010006,
721         0x9150, 0xffffffff, 0x6e944040,
722         0x917c, 0xffffffff, 0x00030002,
723         0x9198, 0xffffffff, 0x00030002,
724         0x915c, 0xffffffff, 0x00010000,
725         0x3f90, 0xffff0000, 0xff000000,
726         0x9178, 0xffffffff, 0x00070000,
727         0x9194, 0xffffffff, 0x00070000,
728         0x9148, 0xffff0000, 0xff000000,
729         0x9190, 0xffffffff, 0x00090008,
730         0x91ac, 0xffffffff, 0x00090008,
731         0x3f94, 0xffff0000, 0xff000000,
732         0x914c, 0xffff0000, 0xff000000,
733         0x929c, 0xffffffff, 0x00000001,
734         0x8a18, 0xffffffff, 0x00000100,
735         0x8b28, 0xffffffff, 0x00000100,
736         0x9144, 0xffffffff, 0x00000100,
737         0x9b7c, 0xffffffff, 0x00000000,
738         0x8030, 0xffffffff, 0x0000100a,
739         0x8a14, 0xffffffff, 0x00000001,
740         0x8b24, 0xffffffff, 0x00ff0fff,
741         0x8b10, 0xffffffff, 0x00000000,
742         0x28a4c, 0x06000000, 0x06000000,
743         0x4d8, 0xffffffff, 0x00000100,
744         0x913c, 0xffff000f, 0x0100000a,
745         0x960c, 0xffffffff, 0x54763210,
746         0x88c4, 0xffffffff, 0x000000c2,
747         0x88d4, 0xffffffff, 0x00000010,
748         0x8974, 0xffffffff, 0x00000000,
749         0xc78, 0x00000080, 0x00000080,
750         0x5e78, 0xffffffff, 0x001000f0,
751         0xd02c, 0xffffffff, 0x08421000,
752         0xa008, 0xffffffff, 0x00010000,
753         0x8d00, 0xffffffff, 0x100e4848,
754         0x8d04, 0xffffffff, 0x00164745,
755         0x8c00, 0xffffffff, 0xe4000003,
756         0x8cf0, 0x1fffffff, 0x08e00410,
757         0x28350, 0xffffffff, 0x00000000,
758         0x9508, 0xffffffff, 0x00000002,
759         0x900c, 0xffffffff, 0x0017071f,
760         0x8c18, 0xffffffff, 0x10101060,
761         0x8c1c, 0xffffffff, 0x00001010
762 };
763
764 static const u32 barts_golden_registers[] =
765 {
766         0x5eb4, 0xffffffff, 0x00000002,
767         0x5e78, 0x8f311ff1, 0x001000f0,
768         0x3f90, 0xffff0000, 0xff000000,
769         0x9148, 0xffff0000, 0xff000000,
770         0x3f94, 0xffff0000, 0xff000000,
771         0x914c, 0xffff0000, 0xff000000,
772         0xc78, 0x00000080, 0x00000080,
773         0xbd4, 0x70073777, 0x00010001,
774         0xd02c, 0xbfffff1f, 0x08421000,
775         0xd0b8, 0x03773777, 0x02011003,
776         0x5bc0, 0x00200000, 0x50100000,
777         0x98f8, 0x33773777, 0x02011003,
778         0x98fc, 0xffffffff, 0x76543210,
779         0x7030, 0x31000311, 0x00000011,
780         0x2f48, 0x00000007, 0x02011003,
781         0x6b28, 0x00000010, 0x00000012,
782         0x7728, 0x00000010, 0x00000012,
783         0x10328, 0x00000010, 0x00000012,
784         0x10f28, 0x00000010, 0x00000012,
785         0x11b28, 0x00000010, 0x00000012,
786         0x12728, 0x00000010, 0x00000012,
787         0x240c, 0x000007ff, 0x00000380,
788         0x8a14, 0xf000001f, 0x00000007,
789         0x8b24, 0x3fff3fff, 0x00ff0fff,
790         0x8b10, 0x0000ff0f, 0x00000000,
791         0x28a4c, 0x07ffffff, 0x06000000,
792         0x10c, 0x00000001, 0x00010003,
793         0xa02c, 0xffffffff, 0x0000009b,
794         0x913c, 0x0000000f, 0x0100000a,
795         0x8d00, 0xffff7f7f, 0x100e4848,
796         0x8d04, 0x00ffffff, 0x00164745,
797         0x8c00, 0xfffc0003, 0xe4000003,
798         0x8c04, 0xf8ff00ff, 0x40600060,
799         0x8c08, 0x00ff00ff, 0x001c001c,
800         0x8cf0, 0x1fff1fff, 0x08e00620,
801         0x8c20, 0x0fff0fff, 0x00800080,
802         0x8c24, 0x0fff0fff, 0x00800080,
803         0x8c18, 0xffffffff, 0x20202078,
804         0x8c1c, 0x0000ffff, 0x00001010,
805         0x28350, 0x00000f01, 0x00000000,
806         0x9508, 0x3700001f, 0x00000002,
807         0x960c, 0xffffffff, 0x54763210,
808         0x88c4, 0x001f3ae3, 0x000000c2,
809         0x88d4, 0x0000001f, 0x00000010,
810         0x8974, 0xffffffff, 0x00000000
811 };
812
813 static const u32 turks_golden_registers[] =
814 {
815         0x5eb4, 0xffffffff, 0x00000002,
816         0x5e78, 0x8f311ff1, 0x001000f0,
817         0x8c8, 0x00003000, 0x00001070,
818         0x8cc, 0x000fffff, 0x00040035,
819         0x3f90, 0xffff0000, 0xfff00000,
820         0x9148, 0xffff0000, 0xfff00000,
821         0x3f94, 0xffff0000, 0xfff00000,
822         0x914c, 0xffff0000, 0xfff00000,
823         0xc78, 0x00000080, 0x00000080,
824         0xbd4, 0x00073007, 0x00010002,
825         0xd02c, 0xbfffff1f, 0x08421000,
826         0xd0b8, 0x03773777, 0x02010002,
827         0x5bc0, 0x00200000, 0x50100000,
828         0x98f8, 0x33773777, 0x00010002,
829         0x98fc, 0xffffffff, 0x33221100,
830         0x7030, 0x31000311, 0x00000011,
831         0x2f48, 0x33773777, 0x00010002,
832         0x6b28, 0x00000010, 0x00000012,
833         0x7728, 0x00000010, 0x00000012,
834         0x10328, 0x00000010, 0x00000012,
835         0x10f28, 0x00000010, 0x00000012,
836         0x11b28, 0x00000010, 0x00000012,
837         0x12728, 0x00000010, 0x00000012,
838         0x240c, 0x000007ff, 0x00000380,
839         0x8a14, 0xf000001f, 0x00000007,
840         0x8b24, 0x3fff3fff, 0x00ff0fff,
841         0x8b10, 0x0000ff0f, 0x00000000,
842         0x28a4c, 0x07ffffff, 0x06000000,
843         0x10c, 0x00000001, 0x00010003,
844         0xa02c, 0xffffffff, 0x0000009b,
845         0x913c, 0x0000000f, 0x0100000a,
846         0x8d00, 0xffff7f7f, 0x100e4848,
847         0x8d04, 0x00ffffff, 0x00164745,
848         0x8c00, 0xfffc0003, 0xe4000003,
849         0x8c04, 0xf8ff00ff, 0x40600060,
850         0x8c08, 0x00ff00ff, 0x001c001c,
851         0x8cf0, 0x1fff1fff, 0x08e00410,
852         0x8c20, 0x0fff0fff, 0x00800080,
853         0x8c24, 0x0fff0fff, 0x00800080,
854         0x8c18, 0xffffffff, 0x20202078,
855         0x8c1c, 0x0000ffff, 0x00001010,
856         0x28350, 0x00000f01, 0x00000000,
857         0x9508, 0x3700001f, 0x00000002,
858         0x960c, 0xffffffff, 0x54763210,
859         0x88c4, 0x001f3ae3, 0x000000c2,
860         0x88d4, 0x0000001f, 0x00000010,
861         0x8974, 0xffffffff, 0x00000000
862 };
863
864 static const u32 caicos_golden_registers[] =
865 {
866         0x5eb4, 0xffffffff, 0x00000002,
867         0x5e78, 0x8f311ff1, 0x001000f0,
868         0x8c8, 0x00003420, 0x00001450,
869         0x8cc, 0x000fffff, 0x00040035,
870         0x3f90, 0xffff0000, 0xfffc0000,
871         0x9148, 0xffff0000, 0xfffc0000,
872         0x3f94, 0xffff0000, 0xfffc0000,
873         0x914c, 0xffff0000, 0xfffc0000,
874         0xc78, 0x00000080, 0x00000080,
875         0xbd4, 0x00073007, 0x00010001,
876         0xd02c, 0xbfffff1f, 0x08421000,
877         0xd0b8, 0x03773777, 0x02010001,
878         0x5bc0, 0x00200000, 0x50100000,
879         0x98f8, 0x33773777, 0x02010001,
880         0x98fc, 0xffffffff, 0x33221100,
881         0x7030, 0x31000311, 0x00000011,
882         0x2f48, 0x33773777, 0x02010001,
883         0x6b28, 0x00000010, 0x00000012,
884         0x7728, 0x00000010, 0x00000012,
885         0x10328, 0x00000010, 0x00000012,
886         0x10f28, 0x00000010, 0x00000012,
887         0x11b28, 0x00000010, 0x00000012,
888         0x12728, 0x00000010, 0x00000012,
889         0x240c, 0x000007ff, 0x00000380,
890         0x8a14, 0xf000001f, 0x00000001,
891         0x8b24, 0x3fff3fff, 0x00ff0fff,
892         0x8b10, 0x0000ff0f, 0x00000000,
893         0x28a4c, 0x07ffffff, 0x06000000,
894         0x10c, 0x00000001, 0x00010003,
895         0xa02c, 0xffffffff, 0x0000009b,
896         0x913c, 0x0000000f, 0x0100000a,
897         0x8d00, 0xffff7f7f, 0x100e4848,
898         0x8d04, 0x00ffffff, 0x00164745,
899         0x8c00, 0xfffc0003, 0xe4000003,
900         0x8c04, 0xf8ff00ff, 0x40600060,
901         0x8c08, 0x00ff00ff, 0x001c001c,
902         0x8cf0, 0x1fff1fff, 0x08e00410,
903         0x8c20, 0x0fff0fff, 0x00800080,
904         0x8c24, 0x0fff0fff, 0x00800080,
905         0x8c18, 0xffffffff, 0x20202078,
906         0x8c1c, 0x0000ffff, 0x00001010,
907         0x28350, 0x00000f01, 0x00000000,
908         0x9508, 0x3700001f, 0x00000002,
909         0x960c, 0xffffffff, 0x54763210,
910         0x88c4, 0x001f3ae3, 0x000000c2,
911         0x88d4, 0x0000001f, 0x00000010,
912         0x8974, 0xffffffff, 0x00000000
913 };
914
915 static void evergreen_init_golden_registers(struct radeon_device *rdev)
916 {
917         switch (rdev->family) {
918         case CHIP_CYPRESS:
919         case CHIP_HEMLOCK:
920                 radeon_program_register_sequence(rdev,
921                                                  evergreen_golden_registers,
922                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers));
923                 radeon_program_register_sequence(rdev,
924                                                  evergreen_golden_registers2,
925                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
926                 radeon_program_register_sequence(rdev,
927                                                  cypress_mgcg_init,
928                                                  (const u32)ARRAY_SIZE(cypress_mgcg_init));
929                 break;
930         case CHIP_JUNIPER:
931                 radeon_program_register_sequence(rdev,
932                                                  evergreen_golden_registers,
933                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers));
934                 radeon_program_register_sequence(rdev,
935                                                  evergreen_golden_registers2,
936                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
937                 radeon_program_register_sequence(rdev,
938                                                  juniper_mgcg_init,
939                                                  (const u32)ARRAY_SIZE(juniper_mgcg_init));
940                 break;
941         case CHIP_REDWOOD:
942                 radeon_program_register_sequence(rdev,
943                                                  evergreen_golden_registers,
944                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers));
945                 radeon_program_register_sequence(rdev,
946                                                  evergreen_golden_registers2,
947                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
948                 radeon_program_register_sequence(rdev,
949                                                  redwood_mgcg_init,
950                                                  (const u32)ARRAY_SIZE(redwood_mgcg_init));
951                 break;
952         case CHIP_CEDAR:
953                 radeon_program_register_sequence(rdev,
954                                                  cedar_golden_registers,
955                                                  (const u32)ARRAY_SIZE(cedar_golden_registers));
956                 radeon_program_register_sequence(rdev,
957                                                  evergreen_golden_registers2,
958                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
959                 radeon_program_register_sequence(rdev,
960                                                  cedar_mgcg_init,
961                                                  (const u32)ARRAY_SIZE(cedar_mgcg_init));
962                 break;
963         case CHIP_PALM:
964                 radeon_program_register_sequence(rdev,
965                                                  wrestler_golden_registers,
966                                                  (const u32)ARRAY_SIZE(wrestler_golden_registers));
967                 break;
968         case CHIP_SUMO:
969                 radeon_program_register_sequence(rdev,
970                                                  supersumo_golden_registers,
971                                                  (const u32)ARRAY_SIZE(supersumo_golden_registers));
972                 break;
973         case CHIP_SUMO2:
974                 radeon_program_register_sequence(rdev,
975                                                  supersumo_golden_registers,
976                                                  (const u32)ARRAY_SIZE(supersumo_golden_registers));
977                 radeon_program_register_sequence(rdev,
978                                                  sumo_golden_registers,
979                                                  (const u32)ARRAY_SIZE(sumo_golden_registers));
980                 break;
981         case CHIP_BARTS:
982                 radeon_program_register_sequence(rdev,
983                                                  barts_golden_registers,
984                                                  (const u32)ARRAY_SIZE(barts_golden_registers));
985                 break;
986         case CHIP_TURKS:
987                 radeon_program_register_sequence(rdev,
988                                                  turks_golden_registers,
989                                                  (const u32)ARRAY_SIZE(turks_golden_registers));
990                 break;
991         case CHIP_CAICOS:
992                 radeon_program_register_sequence(rdev,
993                                                  caicos_golden_registers,
994                                                  (const u32)ARRAY_SIZE(caicos_golden_registers));
995                 break;
996         default:
997                 break;
998         }
999 }
1000
1001 void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
1002                              unsigned *bankh, unsigned *mtaspect,
1003                              unsigned *tile_split)
1004 {
1005         *bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
1006         *bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
1007         *mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
1008         *tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
1009         switch (*bankw) {
1010         default:
1011         case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
1012         case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
1013         case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
1014         case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
1015         }
1016         switch (*bankh) {
1017         default:
1018         case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
1019         case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
1020         case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
1021         case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
1022         }
1023         switch (*mtaspect) {
1024         default:
1025         case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
1026         case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
1027         case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
1028         case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
1029         }
1030 }
1031
1032 static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
1033                               u32 cntl_reg, u32 status_reg)
1034 {
1035         int r, i;
1036         struct atom_clock_dividers dividers;
1037
1038         r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
1039                                            clock, false, &dividers);
1040         if (r)
1041                 return r;
1042
1043         WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
1044
1045         for (i = 0; i < 100; i++) {
1046                 if (RREG32(status_reg) & DCLK_STATUS)
1047                         break;
1048                 mdelay(10);
1049         }
1050         if (i == 100)
1051                 return -ETIMEDOUT;
1052
1053         return 0;
1054 }
1055
1056 int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1057 {
1058         int r = 0;
1059         u32 cg_scratch = RREG32(CG_SCRATCH1);
1060
1061         r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
1062         if (r)
1063                 goto done;
1064         cg_scratch &= 0xffff0000;
1065         cg_scratch |= vclk / 100; /* Mhz */
1066
1067         r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
1068         if (r)
1069                 goto done;
1070         cg_scratch &= 0x0000ffff;
1071         cg_scratch |= (dclk / 100) << 16; /* Mhz */
1072
1073 done:
1074         WREG32(CG_SCRATCH1, cg_scratch);
1075
1076         return r;
1077 }
1078
1079 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1080 {
1081         /* start off with something large */
1082         unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
1083         int r;
1084
1085         /* bypass vclk and dclk with bclk */
1086         WREG32_P(CG_UPLL_FUNC_CNTL_2,
1087                 VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1088                 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1089
1090         /* put PLL in bypass mode */
1091         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1092
1093         if (!vclk || !dclk) {
1094                 /* keep the Bypass mode, put PLL to sleep */
1095                 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1096                 return 0;
1097         }
1098
1099         r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1100                                           16384, 0x03FFFFFF, 0, 128, 5,
1101                                           &fb_div, &vclk_div, &dclk_div);
1102         if (r)
1103                 return r;
1104
1105         /* set VCO_MODE to 1 */
1106         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1107
1108         /* toggle UPLL_SLEEP to 1 then back to 0 */
1109         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1110         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1111
1112         /* deassert UPLL_RESET */
1113         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1114
1115         mdelay(1);
1116
1117         r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1118         if (r)
1119                 return r;
1120
1121         /* assert UPLL_RESET again */
1122         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1123
1124         /* disable spread spectrum. */
1125         WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1126
1127         /* set feedback divider */
1128         WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1129
1130         /* set ref divider to 0 */
1131         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1132
1133         if (fb_div < 307200)
1134                 WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1135         else
1136                 WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1137
1138         /* set PDIV_A and PDIV_B */
1139         WREG32_P(CG_UPLL_FUNC_CNTL_2,
1140                 UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1141                 ~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1142
1143         /* give the PLL some time to settle */
1144         mdelay(15);
1145
1146         /* deassert PLL_RESET */
1147         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1148
1149         mdelay(15);
1150
1151         /* switch from bypass mode to normal mode */
1152         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1153
1154         r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1155         if (r)
1156                 return r;
1157
1158         /* switch VCLK and DCLK selection */
1159         WREG32_P(CG_UPLL_FUNC_CNTL_2,
1160                 VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1161                 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1162
1163         mdelay(100);
1164
1165         return 0;
1166 }
1167
1168 void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1169 {
1170         u16 ctl, v;
1171         int err;
1172
1173         err = pcie_capability_read_word(rdev->pdev, PCI_EXP_DEVCTL, &ctl);
1174         if (err)
1175                 return;
1176
1177         v = (ctl & PCI_EXP_DEVCTL_READRQ) >> 12;
1178
1179         /* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1180          * to avoid hangs or perfomance issues
1181          */
1182         if ((v == 0) || (v == 6) || (v == 7)) {
1183                 ctl &= ~PCI_EXP_DEVCTL_READRQ;
1184                 ctl |= (2 << 12);
1185                 pcie_capability_write_word(rdev->pdev, PCI_EXP_DEVCTL, ctl);
1186         }
1187 }
1188
1189 static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1190 {
1191         if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1192                 return true;
1193         else
1194                 return false;
1195 }
1196
1197 static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1198 {
1199         u32 pos1, pos2;
1200
1201         pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1202         pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1203
1204         if (pos1 != pos2)
1205                 return true;
1206         else
1207                 return false;
1208 }
1209
1210 /**
1211  * dce4_wait_for_vblank - vblank wait asic callback.
1212  *
1213  * @rdev: radeon_device pointer
1214  * @crtc: crtc to wait for vblank on
1215  *
1216  * Wait for vblank on the requested crtc (evergreen+).
1217  */
1218 void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1219 {
1220         unsigned i = 0;
1221
1222         if (crtc >= rdev->num_crtc)
1223                 return;
1224
1225         if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1226                 return;
1227
1228         /* depending on when we hit vblank, we may be close to active; if so,
1229          * wait for another frame.
1230          */
1231         while (dce4_is_in_vblank(rdev, crtc)) {
1232                 if (i++ % 100 == 0) {
1233                         if (!dce4_is_counter_moving(rdev, crtc))
1234                                 break;
1235                 }
1236         }
1237
1238         while (!dce4_is_in_vblank(rdev, crtc)) {
1239                 if (i++ % 100 == 0) {
1240                         if (!dce4_is_counter_moving(rdev, crtc))
1241                                 break;
1242                 }
1243         }
1244 }
1245
1246 /**
1247  * radeon_irq_kms_pflip_irq_get - pre-pageflip callback.
1248  *
1249  * @rdev: radeon_device pointer
1250  * @crtc: crtc to prepare for pageflip on
1251  *
1252  * Pre-pageflip callback (evergreen+).
1253  * Enables the pageflip irq (vblank irq).
1254  */
1255 void evergreen_pre_page_flip(struct radeon_device *rdev, int crtc)
1256 {
1257         /* enable the pflip int */
1258         radeon_irq_kms_pflip_irq_get(rdev, crtc);
1259 }
1260
1261 /**
1262  * evergreen_post_page_flip - pos-pageflip callback.
1263  *
1264  * @rdev: radeon_device pointer
1265  * @crtc: crtc to cleanup pageflip on
1266  *
1267  * Post-pageflip callback (evergreen+).
1268  * Disables the pageflip irq (vblank irq).
1269  */
1270 void evergreen_post_page_flip(struct radeon_device *rdev, int crtc)
1271 {
1272         /* disable the pflip int */
1273         radeon_irq_kms_pflip_irq_put(rdev, crtc);
1274 }
1275
1276 /**
1277  * evergreen_page_flip - pageflip callback.
1278  *
1279  * @rdev: radeon_device pointer
1280  * @crtc_id: crtc to cleanup pageflip on
1281  * @crtc_base: new address of the crtc (GPU MC address)
1282  *
1283  * Does the actual pageflip (evergreen+).
1284  * During vblank we take the crtc lock and wait for the update_pending
1285  * bit to go high, when it does, we release the lock, and allow the
1286  * double buffered update to take place.
1287  * Returns the current update pending status.
1288  */
1289 u32 evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
1290 {
1291         struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1292         u32 tmp = RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset);
1293         int i;
1294
1295         /* Lock the graphics update lock */
1296         tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
1297         WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1298
1299         /* update the scanout addresses */
1300         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1301                upper_32_bits(crtc_base));
1302         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1303                (u32)crtc_base);
1304
1305         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1306                upper_32_bits(crtc_base));
1307         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1308                (u32)crtc_base);
1309
1310         /* Wait for update_pending to go high. */
1311         for (i = 0; i < rdev->usec_timeout; i++) {
1312                 if (RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING)
1313                         break;
1314                 udelay(1);
1315         }
1316         DRM_DEBUG("Update pending now high. Unlocking vupdate_lock.\n");
1317
1318         /* Unlock the lock, so double-buffering can take place inside vblank */
1319         tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
1320         WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1321
1322         /* Return current update_pending status: */
1323         return RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING;
1324 }
1325
1326 /* get temperature in millidegrees */
1327 int evergreen_get_temp(struct radeon_device *rdev)
1328 {
1329         u32 temp, toffset;
1330         int actual_temp = 0;
1331
1332         if (rdev->family == CHIP_JUNIPER) {
1333                 toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1334                         TOFFSET_SHIFT;
1335                 temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1336                         TS0_ADC_DOUT_SHIFT;
1337
1338                 if (toffset & 0x100)
1339                         actual_temp = temp / 2 - (0x200 - toffset);
1340                 else
1341                         actual_temp = temp / 2 + toffset;
1342
1343                 actual_temp = actual_temp * 1000;
1344
1345         } else {
1346                 temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1347                         ASIC_T_SHIFT;
1348
1349                 if (temp & 0x400)
1350                         actual_temp = -256;
1351                 else if (temp & 0x200)
1352                         actual_temp = 255;
1353                 else if (temp & 0x100) {
1354                         actual_temp = temp & 0x1ff;
1355                         actual_temp |= ~0x1ff;
1356                 } else
1357                         actual_temp = temp & 0xff;
1358
1359                 actual_temp = (actual_temp * 1000) / 2;
1360         }
1361
1362         return actual_temp;
1363 }
1364
1365 int sumo_get_temp(struct radeon_device *rdev)
1366 {
1367         u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1368         int actual_temp = temp - 49;
1369
1370         return actual_temp * 1000;
1371 }
1372
1373 /**
1374  * sumo_pm_init_profile - Initialize power profiles callback.
1375  *
1376  * @rdev: radeon_device pointer
1377  *
1378  * Initialize the power states used in profile mode
1379  * (sumo, trinity, SI).
1380  * Used for profile mode only.
1381  */
1382 void sumo_pm_init_profile(struct radeon_device *rdev)
1383 {
1384         int idx;
1385
1386         /* default */
1387         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1388         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1389         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1390         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1391
1392         /* low,mid sh/mh */
1393         if (rdev->flags & RADEON_IS_MOBILITY)
1394                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1395         else
1396                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1397
1398         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1399         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1400         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1401         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1402
1403         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1404         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1405         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1406         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1407
1408         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1409         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1410         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1411         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1412
1413         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1414         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1415         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1416         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1417
1418         /* high sh/mh */
1419         idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1420         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1421         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1422         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1423         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1424                 rdev->pm.power_state[idx].num_clock_modes - 1;
1425
1426         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1427         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1428         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1429         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1430                 rdev->pm.power_state[idx].num_clock_modes - 1;
1431 }
1432
1433 /**
1434  * btc_pm_init_profile - Initialize power profiles callback.
1435  *
1436  * @rdev: radeon_device pointer
1437  *
1438  * Initialize the power states used in profile mode
1439  * (BTC, cayman).
1440  * Used for profile mode only.
1441  */
1442 void btc_pm_init_profile(struct radeon_device *rdev)
1443 {
1444         int idx;
1445
1446         /* default */
1447         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1448         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1449         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1450         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1451         /* starting with BTC, there is one state that is used for both
1452          * MH and SH.  Difference is that we always use the high clock index for
1453          * mclk.
1454          */
1455         if (rdev->flags & RADEON_IS_MOBILITY)
1456                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1457         else
1458                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1459         /* low sh */
1460         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1461         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1462         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1463         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1464         /* mid sh */
1465         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1466         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1467         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1468         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1469         /* high sh */
1470         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1471         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1472         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1473         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1474         /* low mh */
1475         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1476         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1477         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1478         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1479         /* mid mh */
1480         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1481         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1482         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1483         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1484         /* high mh */
1485         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1486         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1487         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1488         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1489 }
1490
1491 /**
1492  * evergreen_pm_misc - set additional pm hw parameters callback.
1493  *
1494  * @rdev: radeon_device pointer
1495  *
1496  * Set non-clock parameters associated with a power state
1497  * (voltage, etc.) (evergreen+).
1498  */
1499 void evergreen_pm_misc(struct radeon_device *rdev)
1500 {
1501         int req_ps_idx = rdev->pm.requested_power_state_index;
1502         int req_cm_idx = rdev->pm.requested_clock_mode_index;
1503         struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1504         struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1505
1506         if (voltage->type == VOLTAGE_SW) {
1507                 /* 0xff01 is a flag rather then an actual voltage */
1508                 if (voltage->voltage == 0xff01)
1509                         return;
1510                 if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1511                         radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1512                         rdev->pm.current_vddc = voltage->voltage;
1513                         DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1514                 }
1515
1516                 /* starting with BTC, there is one state that is used for both
1517                  * MH and SH.  Difference is that we always use the high clock index for
1518                  * mclk and vddci.
1519                  */
1520                 if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1521                     (rdev->family >= CHIP_BARTS) &&
1522                     rdev->pm.active_crtc_count &&
1523                     ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1524                      (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1525                         voltage = &rdev->pm.power_state[req_ps_idx].
1526                                 clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1527
1528                 /* 0xff01 is a flag rather then an actual voltage */
1529                 if (voltage->vddci == 0xff01)
1530                         return;
1531                 if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1532                         radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1533                         rdev->pm.current_vddci = voltage->vddci;
1534                         DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1535                 }
1536         }
1537 }
1538
1539 /**
1540  * evergreen_pm_prepare - pre-power state change callback.
1541  *
1542  * @rdev: radeon_device pointer
1543  *
1544  * Prepare for a power state change (evergreen+).
1545  */
1546 void evergreen_pm_prepare(struct radeon_device *rdev)
1547 {
1548         struct drm_device *ddev = rdev->ddev;
1549         struct drm_crtc *crtc;
1550         struct radeon_crtc *radeon_crtc;
1551         u32 tmp;
1552
1553         /* disable any active CRTCs */
1554         list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1555                 radeon_crtc = to_radeon_crtc(crtc);
1556                 if (radeon_crtc->enabled) {
1557                         tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1558                         tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1559                         WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1560                 }
1561         }
1562 }
1563
1564 /**
1565  * evergreen_pm_finish - post-power state change callback.
1566  *
1567  * @rdev: radeon_device pointer
1568  *
1569  * Clean up after a power state change (evergreen+).
1570  */
1571 void evergreen_pm_finish(struct radeon_device *rdev)
1572 {
1573         struct drm_device *ddev = rdev->ddev;
1574         struct drm_crtc *crtc;
1575         struct radeon_crtc *radeon_crtc;
1576         u32 tmp;
1577
1578         /* enable any active CRTCs */
1579         list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1580                 radeon_crtc = to_radeon_crtc(crtc);
1581                 if (radeon_crtc->enabled) {
1582                         tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1583                         tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1584                         WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1585                 }
1586         }
1587 }
1588
1589 /**
1590  * evergreen_hpd_sense - hpd sense callback.
1591  *
1592  * @rdev: radeon_device pointer
1593  * @hpd: hpd (hotplug detect) pin
1594  *
1595  * Checks if a digital monitor is connected (evergreen+).
1596  * Returns true if connected, false if not connected.
1597  */
1598 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1599 {
1600         bool connected = false;
1601
1602         switch (hpd) {
1603         case RADEON_HPD_1:
1604                 if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1605                         connected = true;
1606                 break;
1607         case RADEON_HPD_2:
1608                 if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1609                         connected = true;
1610                 break;
1611         case RADEON_HPD_3:
1612                 if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1613                         connected = true;
1614                 break;
1615         case RADEON_HPD_4:
1616                 if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1617                         connected = true;
1618                 break;
1619         case RADEON_HPD_5:
1620                 if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1621                         connected = true;
1622                 break;
1623         case RADEON_HPD_6:
1624                 if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1625                         connected = true;
1626                         break;
1627         default:
1628                 break;
1629         }
1630
1631         return connected;
1632 }
1633
1634 /**
1635  * evergreen_hpd_set_polarity - hpd set polarity callback.
1636  *
1637  * @rdev: radeon_device pointer
1638  * @hpd: hpd (hotplug detect) pin
1639  *
1640  * Set the polarity of the hpd pin (evergreen+).
1641  */
1642 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1643                                 enum radeon_hpd_id hpd)
1644 {
1645         u32 tmp;
1646         bool connected = evergreen_hpd_sense(rdev, hpd);
1647
1648         switch (hpd) {
1649         case RADEON_HPD_1:
1650                 tmp = RREG32(DC_HPD1_INT_CONTROL);
1651                 if (connected)
1652                         tmp &= ~DC_HPDx_INT_POLARITY;
1653                 else
1654                         tmp |= DC_HPDx_INT_POLARITY;
1655                 WREG32(DC_HPD1_INT_CONTROL, tmp);
1656                 break;
1657         case RADEON_HPD_2:
1658                 tmp = RREG32(DC_HPD2_INT_CONTROL);
1659                 if (connected)
1660                         tmp &= ~DC_HPDx_INT_POLARITY;
1661                 else
1662                         tmp |= DC_HPDx_INT_POLARITY;
1663                 WREG32(DC_HPD2_INT_CONTROL, tmp);
1664                 break;
1665         case RADEON_HPD_3:
1666                 tmp = RREG32(DC_HPD3_INT_CONTROL);
1667                 if (connected)
1668                         tmp &= ~DC_HPDx_INT_POLARITY;
1669                 else
1670                         tmp |= DC_HPDx_INT_POLARITY;
1671                 WREG32(DC_HPD3_INT_CONTROL, tmp);
1672                 break;
1673         case RADEON_HPD_4:
1674                 tmp = RREG32(DC_HPD4_INT_CONTROL);
1675                 if (connected)
1676                         tmp &= ~DC_HPDx_INT_POLARITY;
1677                 else
1678                         tmp |= DC_HPDx_INT_POLARITY;
1679                 WREG32(DC_HPD4_INT_CONTROL, tmp);
1680                 break;
1681         case RADEON_HPD_5:
1682                 tmp = RREG32(DC_HPD5_INT_CONTROL);
1683                 if (connected)
1684                         tmp &= ~DC_HPDx_INT_POLARITY;
1685                 else
1686                         tmp |= DC_HPDx_INT_POLARITY;
1687                 WREG32(DC_HPD5_INT_CONTROL, tmp);
1688                         break;
1689         case RADEON_HPD_6:
1690                 tmp = RREG32(DC_HPD6_INT_CONTROL);
1691                 if (connected)
1692                         tmp &= ~DC_HPDx_INT_POLARITY;
1693                 else
1694                         tmp |= DC_HPDx_INT_POLARITY;
1695                 WREG32(DC_HPD6_INT_CONTROL, tmp);
1696                 break;
1697         default:
1698                 break;
1699         }
1700 }
1701
1702 /**
1703  * evergreen_hpd_init - hpd setup callback.
1704  *
1705  * @rdev: radeon_device pointer
1706  *
1707  * Setup the hpd pins used by the card (evergreen+).
1708  * Enable the pin, set the polarity, and enable the hpd interrupts.
1709  */
1710 void evergreen_hpd_init(struct radeon_device *rdev)
1711 {
1712         struct drm_device *dev = rdev->ddev;
1713         struct drm_connector *connector;
1714         unsigned enabled = 0;
1715         u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1716                 DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1717
1718         list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1719                 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1720
1721                 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1722                     connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1723                         /* don't try to enable hpd on eDP or LVDS avoid breaking the
1724                          * aux dp channel on imac and help (but not completely fix)
1725                          * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1726                          * also avoid interrupt storms during dpms.
1727                          */
1728                         continue;
1729                 }
1730                 switch (radeon_connector->hpd.hpd) {
1731                 case RADEON_HPD_1:
1732                         WREG32(DC_HPD1_CONTROL, tmp);
1733                         break;
1734                 case RADEON_HPD_2:
1735                         WREG32(DC_HPD2_CONTROL, tmp);
1736                         break;
1737                 case RADEON_HPD_3:
1738                         WREG32(DC_HPD3_CONTROL, tmp);
1739                         break;
1740                 case RADEON_HPD_4:
1741                         WREG32(DC_HPD4_CONTROL, tmp);
1742                         break;
1743                 case RADEON_HPD_5:
1744                         WREG32(DC_HPD5_CONTROL, tmp);
1745                         break;
1746                 case RADEON_HPD_6:
1747                         WREG32(DC_HPD6_CONTROL, tmp);
1748                         break;
1749                 default:
1750                         break;
1751                 }
1752                 radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1753                 enabled |= 1 << radeon_connector->hpd.hpd;
1754         }
1755         radeon_irq_kms_enable_hpd(rdev, enabled);
1756 }
1757
1758 /**
1759  * evergreen_hpd_fini - hpd tear down callback.
1760  *
1761  * @rdev: radeon_device pointer
1762  *
1763  * Tear down the hpd pins used by the card (evergreen+).
1764  * Disable the hpd interrupts.
1765  */
1766 void evergreen_hpd_fini(struct radeon_device *rdev)
1767 {
1768         struct drm_device *dev = rdev->ddev;
1769         struct drm_connector *connector;
1770         unsigned disabled = 0;
1771
1772         list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1773                 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1774                 switch (radeon_connector->hpd.hpd) {
1775                 case RADEON_HPD_1:
1776                         WREG32(DC_HPD1_CONTROL, 0);
1777                         break;
1778                 case RADEON_HPD_2:
1779                         WREG32(DC_HPD2_CONTROL, 0);
1780                         break;
1781                 case RADEON_HPD_3:
1782                         WREG32(DC_HPD3_CONTROL, 0);
1783                         break;
1784                 case RADEON_HPD_4:
1785                         WREG32(DC_HPD4_CONTROL, 0);
1786                         break;
1787                 case RADEON_HPD_5:
1788                         WREG32(DC_HPD5_CONTROL, 0);
1789                         break;
1790                 case RADEON_HPD_6:
1791                         WREG32(DC_HPD6_CONTROL, 0);
1792                         break;
1793                 default:
1794                         break;
1795                 }
1796                 disabled |= 1 << radeon_connector->hpd.hpd;
1797         }
1798         radeon_irq_kms_disable_hpd(rdev, disabled);
1799 }
1800
1801 /* watermark setup */
1802
1803 static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1804                                         struct radeon_crtc *radeon_crtc,
1805                                         struct drm_display_mode *mode,
1806                                         struct drm_display_mode *other_mode)
1807 {
1808         u32 tmp;
1809         /*
1810          * Line Buffer Setup
1811          * There are 3 line buffers, each one shared by 2 display controllers.
1812          * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1813          * the display controllers.  The paritioning is done via one of four
1814          * preset allocations specified in bits 2:0:
1815          * first display controller
1816          *  0 - first half of lb (3840 * 2)
1817          *  1 - first 3/4 of lb (5760 * 2)
1818          *  2 - whole lb (7680 * 2), other crtc must be disabled
1819          *  3 - first 1/4 of lb (1920 * 2)
1820          * second display controller
1821          *  4 - second half of lb (3840 * 2)
1822          *  5 - second 3/4 of lb (5760 * 2)
1823          *  6 - whole lb (7680 * 2), other crtc must be disabled
1824          *  7 - last 1/4 of lb (1920 * 2)
1825          */
1826         /* this can get tricky if we have two large displays on a paired group
1827          * of crtcs.  Ideally for multiple large displays we'd assign them to
1828          * non-linked crtcs for maximum line buffer allocation.
1829          */
1830         if (radeon_crtc->base.enabled && mode) {
1831                 if (other_mode)
1832                         tmp = 0; /* 1/2 */
1833                 else
1834                         tmp = 2; /* whole */
1835         } else
1836                 tmp = 0;
1837
1838         /* second controller of the pair uses second half of the lb */
1839         if (radeon_crtc->crtc_id % 2)
1840                 tmp += 4;
1841         WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1842
1843         if (radeon_crtc->base.enabled && mode) {
1844                 switch (tmp) {
1845                 case 0:
1846                 case 4:
1847                 default:
1848                         if (ASIC_IS_DCE5(rdev))
1849                                 return 4096 * 2;
1850                         else
1851                                 return 3840 * 2;
1852                 case 1:
1853                 case 5:
1854                         if (ASIC_IS_DCE5(rdev))
1855                                 return 6144 * 2;
1856                         else
1857                                 return 5760 * 2;
1858                 case 2:
1859                 case 6:
1860                         if (ASIC_IS_DCE5(rdev))
1861                                 return 8192 * 2;
1862                         else
1863                                 return 7680 * 2;
1864                 case 3:
1865                 case 7:
1866                         if (ASIC_IS_DCE5(rdev))
1867                                 return 2048 * 2;
1868                         else
1869                                 return 1920 * 2;
1870                 }
1871         }
1872
1873         /* controller not enabled, so no lb used */
1874         return 0;
1875 }
1876
1877 u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
1878 {
1879         u32 tmp = RREG32(MC_SHARED_CHMAP);
1880
1881         switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1882         case 0:
1883         default:
1884                 return 1;
1885         case 1:
1886                 return 2;
1887         case 2:
1888                 return 4;
1889         case 3:
1890                 return 8;
1891         }
1892 }
1893
1894 struct evergreen_wm_params {
1895         u32 dram_channels; /* number of dram channels */
1896         u32 yclk;          /* bandwidth per dram data pin in kHz */
1897         u32 sclk;          /* engine clock in kHz */
1898         u32 disp_clk;      /* display clock in kHz */
1899         u32 src_width;     /* viewport width */
1900         u32 active_time;   /* active display time in ns */
1901         u32 blank_time;    /* blank time in ns */
1902         bool interlaced;    /* mode is interlaced */
1903         fixed20_12 vsc;    /* vertical scale ratio */
1904         u32 num_heads;     /* number of active crtcs */
1905         u32 bytes_per_pixel; /* bytes per pixel display + overlay */
1906         u32 lb_size;       /* line buffer allocated to pipe */
1907         u32 vtaps;         /* vertical scaler taps */
1908 };
1909
1910 static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
1911 {
1912         /* Calculate DRAM Bandwidth and the part allocated to display. */
1913         fixed20_12 dram_efficiency; /* 0.7 */
1914         fixed20_12 yclk, dram_channels, bandwidth;
1915         fixed20_12 a;
1916
1917         a.full = dfixed_const(1000);
1918         yclk.full = dfixed_const(wm->yclk);
1919         yclk.full = dfixed_div(yclk, a);
1920         dram_channels.full = dfixed_const(wm->dram_channels * 4);
1921         a.full = dfixed_const(10);
1922         dram_efficiency.full = dfixed_const(7);
1923         dram_efficiency.full = dfixed_div(dram_efficiency, a);
1924         bandwidth.full = dfixed_mul(dram_channels, yclk);
1925         bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
1926
1927         return dfixed_trunc(bandwidth);
1928 }
1929
1930 static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
1931 {
1932         /* Calculate DRAM Bandwidth and the part allocated to display. */
1933         fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
1934         fixed20_12 yclk, dram_channels, bandwidth;
1935         fixed20_12 a;
1936
1937         a.full = dfixed_const(1000);
1938         yclk.full = dfixed_const(wm->yclk);
1939         yclk.full = dfixed_div(yclk, a);
1940         dram_channels.full = dfixed_const(wm->dram_channels * 4);
1941         a.full = dfixed_const(10);
1942         disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
1943         disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
1944         bandwidth.full = dfixed_mul(dram_channels, yclk);
1945         bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
1946
1947         return dfixed_trunc(bandwidth);
1948 }
1949
1950 static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
1951 {
1952         /* Calculate the display Data return Bandwidth */
1953         fixed20_12 return_efficiency; /* 0.8 */
1954         fixed20_12 sclk, bandwidth;
1955         fixed20_12 a;
1956
1957         a.full = dfixed_const(1000);
1958         sclk.full = dfixed_const(wm->sclk);
1959         sclk.full = dfixed_div(sclk, a);
1960         a.full = dfixed_const(10);
1961         return_efficiency.full = dfixed_const(8);
1962         return_efficiency.full = dfixed_div(return_efficiency, a);
1963         a.full = dfixed_const(32);
1964         bandwidth.full = dfixed_mul(a, sclk);
1965         bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
1966
1967         return dfixed_trunc(bandwidth);
1968 }
1969
1970 static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
1971 {
1972         /* Calculate the DMIF Request Bandwidth */
1973         fixed20_12 disp_clk_request_efficiency; /* 0.8 */
1974         fixed20_12 disp_clk, bandwidth;
1975         fixed20_12 a;
1976
1977         a.full = dfixed_const(1000);
1978         disp_clk.full = dfixed_const(wm->disp_clk);
1979         disp_clk.full = dfixed_div(disp_clk, a);
1980         a.full = dfixed_const(10);
1981         disp_clk_request_efficiency.full = dfixed_const(8);
1982         disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
1983         a.full = dfixed_const(32);
1984         bandwidth.full = dfixed_mul(a, disp_clk);
1985         bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
1986
1987         return dfixed_trunc(bandwidth);
1988 }
1989
1990 static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
1991 {
1992         /* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
1993         u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
1994         u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
1995         u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
1996
1997         return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
1998 }
1999
2000 static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
2001 {
2002         /* Calculate the display mode Average Bandwidth
2003          * DisplayMode should contain the source and destination dimensions,
2004          * timing, etc.
2005          */
2006         fixed20_12 bpp;
2007         fixed20_12 line_time;
2008         fixed20_12 src_width;
2009         fixed20_12 bandwidth;
2010         fixed20_12 a;
2011
2012         a.full = dfixed_const(1000);
2013         line_time.full = dfixed_const(wm->active_time + wm->blank_time);
2014         line_time.full = dfixed_div(line_time, a);
2015         bpp.full = dfixed_const(wm->bytes_per_pixel);
2016         src_width.full = dfixed_const(wm->src_width);
2017         bandwidth.full = dfixed_mul(src_width, bpp);
2018         bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
2019         bandwidth.full = dfixed_div(bandwidth, line_time);
2020
2021         return dfixed_trunc(bandwidth);
2022 }
2023
2024 static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
2025 {
2026         /* First calcualte the latency in ns */
2027         u32 mc_latency = 2000; /* 2000 ns. */
2028         u32 available_bandwidth = evergreen_available_bandwidth(wm);
2029         u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
2030         u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
2031         u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
2032         u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
2033                 (wm->num_heads * cursor_line_pair_return_time);
2034         u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
2035         u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
2036         fixed20_12 a, b, c;
2037
2038         if (wm->num_heads == 0)
2039                 return 0;
2040
2041         a.full = dfixed_const(2);
2042         b.full = dfixed_const(1);
2043         if ((wm->vsc.full > a.full) ||
2044             ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
2045             (wm->vtaps >= 5) ||
2046             ((wm->vsc.full >= a.full) && wm->interlaced))
2047                 max_src_lines_per_dst_line = 4;
2048         else
2049                 max_src_lines_per_dst_line = 2;
2050
2051         a.full = dfixed_const(available_bandwidth);
2052         b.full = dfixed_const(wm->num_heads);
2053         a.full = dfixed_div(a, b);
2054
2055         b.full = dfixed_const(1000);
2056         c.full = dfixed_const(wm->disp_clk);
2057         b.full = dfixed_div(c, b);
2058         c.full = dfixed_const(wm->bytes_per_pixel);
2059         b.full = dfixed_mul(b, c);
2060
2061         lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
2062
2063         a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2064         b.full = dfixed_const(1000);
2065         c.full = dfixed_const(lb_fill_bw);
2066         b.full = dfixed_div(c, b);
2067         a.full = dfixed_div(a, b);
2068         line_fill_time = dfixed_trunc(a);
2069
2070         if (line_fill_time < wm->active_time)
2071                 return latency;
2072         else
2073                 return latency + (line_fill_time - wm->active_time);
2074
2075 }
2076
2077 static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2078 {
2079         if (evergreen_average_bandwidth(wm) <=
2080             (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2081                 return true;
2082         else
2083                 return false;
2084 };
2085
2086 static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2087 {
2088         if (evergreen_average_bandwidth(wm) <=
2089             (evergreen_available_bandwidth(wm) / wm->num_heads))
2090                 return true;
2091         else
2092                 return false;
2093 };
2094
2095 static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2096 {
2097         u32 lb_partitions = wm->lb_size / wm->src_width;
2098         u32 line_time = wm->active_time + wm->blank_time;
2099         u32 latency_tolerant_lines;
2100         u32 latency_hiding;
2101         fixed20_12 a;
2102
2103         a.full = dfixed_const(1);
2104         if (wm->vsc.full > a.full)
2105                 latency_tolerant_lines = 1;
2106         else {
2107                 if (lb_partitions <= (wm->vtaps + 1))
2108                         latency_tolerant_lines = 1;
2109                 else
2110                         latency_tolerant_lines = 2;
2111         }
2112
2113         latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2114
2115         if (evergreen_latency_watermark(wm) <= latency_hiding)
2116                 return true;
2117         else
2118                 return false;
2119 }
2120
2121 static void evergreen_program_watermarks(struct radeon_device *rdev,
2122                                          struct radeon_crtc *radeon_crtc,
2123                                          u32 lb_size, u32 num_heads)
2124 {
2125         struct drm_display_mode *mode = &radeon_crtc->base.mode;
2126         struct evergreen_wm_params wm_low, wm_high;
2127         u32 dram_channels;
2128         u32 pixel_period;
2129         u32 line_time = 0;
2130         u32 latency_watermark_a = 0, latency_watermark_b = 0;
2131         u32 priority_a_mark = 0, priority_b_mark = 0;
2132         u32 priority_a_cnt = PRIORITY_OFF;
2133         u32 priority_b_cnt = PRIORITY_OFF;
2134         u32 pipe_offset = radeon_crtc->crtc_id * 16;
2135         u32 tmp, arb_control3;
2136         fixed20_12 a, b, c;
2137
2138         if (radeon_crtc->base.enabled && num_heads && mode) {
2139                 pixel_period = 1000000 / (u32)mode->clock;
2140                 line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2141                 priority_a_cnt = 0;
2142                 priority_b_cnt = 0;
2143                 dram_channels = evergreen_get_number_of_dram_channels(rdev);
2144
2145                 /* watermark for high clocks */
2146                 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2147                         wm_high.yclk =
2148                                 radeon_dpm_get_mclk(rdev, false) * 10;
2149                         wm_high.sclk =
2150                                 radeon_dpm_get_sclk(rdev, false) * 10;
2151                 } else {
2152                         wm_high.yclk = rdev->pm.current_mclk * 10;
2153                         wm_high.sclk = rdev->pm.current_sclk * 10;
2154                 }
2155
2156                 wm_high.disp_clk = mode->clock;
2157                 wm_high.src_width = mode->crtc_hdisplay;
2158                 wm_high.active_time = mode->crtc_hdisplay * pixel_period;
2159                 wm_high.blank_time = line_time - wm_high.active_time;
2160                 wm_high.interlaced = false;
2161                 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2162                         wm_high.interlaced = true;
2163                 wm_high.vsc = radeon_crtc->vsc;
2164                 wm_high.vtaps = 1;
2165                 if (radeon_crtc->rmx_type != RMX_OFF)
2166                         wm_high.vtaps = 2;
2167                 wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2168                 wm_high.lb_size = lb_size;
2169                 wm_high.dram_channels = dram_channels;
2170                 wm_high.num_heads = num_heads;
2171
2172                 /* watermark for low clocks */
2173                 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2174                         wm_low.yclk =
2175                                 radeon_dpm_get_mclk(rdev, true) * 10;
2176                         wm_low.sclk =
2177                                 radeon_dpm_get_sclk(rdev, true) * 10;
2178                 } else {
2179                         wm_low.yclk = rdev->pm.current_mclk * 10;
2180                         wm_low.sclk = rdev->pm.current_sclk * 10;
2181                 }
2182
2183                 wm_low.disp_clk = mode->clock;
2184                 wm_low.src_width = mode->crtc_hdisplay;
2185                 wm_low.active_time = mode->crtc_hdisplay * pixel_period;
2186                 wm_low.blank_time = line_time - wm_low.active_time;
2187                 wm_low.interlaced = false;
2188                 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2189                         wm_low.interlaced = true;
2190                 wm_low.vsc = radeon_crtc->vsc;
2191                 wm_low.vtaps = 1;
2192                 if (radeon_crtc->rmx_type != RMX_OFF)
2193                         wm_low.vtaps = 2;
2194                 wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2195                 wm_low.lb_size = lb_size;
2196                 wm_low.dram_channels = dram_channels;
2197                 wm_low.num_heads = num_heads;
2198
2199                 /* set for high clocks */
2200                 latency_watermark_a = min(evergreen_latency_watermark(&wm_high), (u32)65535);
2201                 /* set for low clocks */
2202                 latency_watermark_b = min(evergreen_latency_watermark(&wm_low), (u32)65535);
2203
2204                 /* possibly force display priority to high */
2205                 /* should really do this at mode validation time... */
2206                 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2207                     !evergreen_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2208                     !evergreen_check_latency_hiding(&wm_high) ||
2209                     (rdev->disp_priority == 2)) {
2210                         DRM_DEBUG_KMS("force priority a to high\n");
2211                         priority_a_cnt |= PRIORITY_ALWAYS_ON;
2212                 }
2213                 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2214                     !evergreen_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2215                     !evergreen_check_latency_hiding(&wm_low) ||
2216                     (rdev->disp_priority == 2)) {
2217                         DRM_DEBUG_KMS("force priority b to high\n");
2218                         priority_b_cnt |= PRIORITY_ALWAYS_ON;
2219                 }
2220
2221                 a.full = dfixed_const(1000);
2222                 b.full = dfixed_const(mode->clock);
2223                 b.full = dfixed_div(b, a);
2224                 c.full = dfixed_const(latency_watermark_a);
2225                 c.full = dfixed_mul(c, b);
2226                 c.full = dfixed_mul(c, radeon_crtc->hsc);
2227                 c.full = dfixed_div(c, a);
2228                 a.full = dfixed_const(16);
2229                 c.full = dfixed_div(c, a);
2230                 priority_a_mark = dfixed_trunc(c);
2231                 priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2232
2233                 a.full = dfixed_const(1000);
2234                 b.full = dfixed_const(mode->clock);
2235                 b.full = dfixed_div(b, a);
2236                 c.full = dfixed_const(latency_watermark_b);
2237                 c.full = dfixed_mul(c, b);
2238                 c.full = dfixed_mul(c, radeon_crtc->hsc);
2239                 c.full = dfixed_div(c, a);
2240                 a.full = dfixed_const(16);
2241                 c.full = dfixed_div(c, a);
2242                 priority_b_mark = dfixed_trunc(c);
2243                 priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2244         }
2245
2246         /* select wm A */
2247         arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2248         tmp = arb_control3;
2249         tmp &= ~LATENCY_WATERMARK_MASK(3);
2250         tmp |= LATENCY_WATERMARK_MASK(1);
2251         WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2252         WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2253                (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2254                 LATENCY_HIGH_WATERMARK(line_time)));
2255         /* select wm B */
2256         tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2257         tmp &= ~LATENCY_WATERMARK_MASK(3);
2258         tmp |= LATENCY_WATERMARK_MASK(2);
2259         WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2260         WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2261                (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2262                 LATENCY_HIGH_WATERMARK(line_time)));
2263         /* restore original selection */
2264         WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2265
2266         /* write the priority marks */
2267         WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2268         WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2269
2270         /* save values for DPM */
2271         radeon_crtc->line_time = line_time;
2272         radeon_crtc->wm_high = latency_watermark_a;
2273         radeon_crtc->wm_low = latency_watermark_b;
2274 }
2275
2276 /**
2277  * evergreen_bandwidth_update - update display watermarks callback.
2278  *
2279  * @rdev: radeon_device pointer
2280  *
2281  * Update the display watermarks based on the requested mode(s)
2282  * (evergreen+).
2283  */
2284 void evergreen_bandwidth_update(struct radeon_device *rdev)
2285 {
2286         struct drm_display_mode *mode0 = NULL;
2287         struct drm_display_mode *mode1 = NULL;
2288         u32 num_heads = 0, lb_size;
2289         int i;
2290
2291         radeon_update_display_priority(rdev);
2292
2293         for (i = 0; i < rdev->num_crtc; i++) {
2294                 if (rdev->mode_info.crtcs[i]->base.enabled)
2295                         num_heads++;
2296         }
2297         for (i = 0; i < rdev->num_crtc; i += 2) {
2298                 mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2299                 mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2300                 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2301                 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2302                 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2303                 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2304         }
2305 }
2306
2307 /**
2308  * evergreen_mc_wait_for_idle - wait for MC idle callback.
2309  *
2310  * @rdev: radeon_device pointer
2311  *
2312  * Wait for the MC (memory controller) to be idle.
2313  * (evergreen+).
2314  * Returns 0 if the MC is idle, -1 if not.
2315  */
2316 int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
2317 {
2318         unsigned i;
2319         u32 tmp;
2320
2321         for (i = 0; i < rdev->usec_timeout; i++) {
2322                 /* read MC_STATUS */
2323                 tmp = RREG32(SRBM_STATUS) & 0x1F00;
2324                 if (!tmp)
2325                         return 0;
2326                 udelay(1);
2327         }
2328         return -1;
2329 }
2330
2331 /*
2332  * GART
2333  */
2334 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2335 {
2336         unsigned i;
2337         u32 tmp;
2338
2339         WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2340
2341         WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2342         for (i = 0; i < rdev->usec_timeout; i++) {
2343                 /* read MC_STATUS */
2344                 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2345                 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2346                 if (tmp == 2) {
2347                         printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
2348                         return;
2349                 }
2350                 if (tmp) {
2351                         return;
2352                 }
2353                 udelay(1);
2354         }
2355 }
2356
2357 static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
2358 {
2359         u32 tmp;
2360         int r;
2361
2362         if (rdev->gart.robj == NULL) {
2363                 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2364                 return -EINVAL;
2365         }
2366         r = radeon_gart_table_vram_pin(rdev);
2367         if (r)
2368                 return r;
2369         radeon_gart_restore(rdev);
2370         /* Setup L2 cache */
2371         WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2372                                 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2373                                 EFFECTIVE_L2_QUEUE_SIZE(7));
2374         WREG32(VM_L2_CNTL2, 0);
2375         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2376         /* Setup TLB control */
2377         tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2378                 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2379                 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2380                 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2381         if (rdev->flags & RADEON_IS_IGP) {
2382                 WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2383                 WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2384                 WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2385         } else {
2386                 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2387                 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2388                 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2389                 if ((rdev->family == CHIP_JUNIPER) ||
2390                     (rdev->family == CHIP_CYPRESS) ||
2391                     (rdev->family == CHIP_HEMLOCK) ||
2392                     (rdev->family == CHIP_BARTS))
2393                         WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
2394         }
2395         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2396         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2397         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2398         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2399         WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2400         WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2401         WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2402         WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2403                                 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2404         WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2405                         (u32)(rdev->dummy_page.addr >> 12));
2406         WREG32(VM_CONTEXT1_CNTL, 0);
2407
2408         evergreen_pcie_gart_tlb_flush(rdev);
2409         DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2410                  (unsigned)(rdev->mc.gtt_size >> 20),
2411                  (unsigned long long)rdev->gart.table_addr);
2412         rdev->gart.ready = true;
2413         return 0;
2414 }
2415
2416 static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
2417 {
2418         u32 tmp;
2419
2420         /* Disable all tables */
2421         WREG32(VM_CONTEXT0_CNTL, 0);
2422         WREG32(VM_CONTEXT1_CNTL, 0);
2423
2424         /* Setup L2 cache */
2425         WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2426                                 EFFECTIVE_L2_QUEUE_SIZE(7));
2427         WREG32(VM_L2_CNTL2, 0);
2428         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2429         /* Setup TLB control */
2430         tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2431         WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2432         WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2433         WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2434         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2435         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2436         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2437         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2438         radeon_gart_table_vram_unpin(rdev);
2439 }
2440
2441 static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
2442 {
2443         evergreen_pcie_gart_disable(rdev);
2444         radeon_gart_table_vram_free(rdev);
2445         radeon_gart_fini(rdev);
2446 }
2447
2448
2449 static void evergreen_agp_enable(struct radeon_device *rdev)
2450 {
2451         u32 tmp;
2452
2453         /* Setup L2 cache */
2454         WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2455                                 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2456                                 EFFECTIVE_L2_QUEUE_SIZE(7));
2457         WREG32(VM_L2_CNTL2, 0);
2458         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2459         /* Setup TLB control */
2460         tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2461                 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2462                 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2463                 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2464         WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2465         WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2466         WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2467         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2468         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2469         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2470         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2471         WREG32(VM_CONTEXT0_CNTL, 0);
2472         WREG32(VM_CONTEXT1_CNTL, 0);
2473 }
2474
2475 void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
2476 {
2477         u32 crtc_enabled, tmp, frame_count, blackout;
2478         int i, j;
2479
2480         if (!ASIC_IS_NODCE(rdev)) {
2481                 save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2482                 save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2483
2484                 /* disable VGA render */
2485                 WREG32(VGA_RENDER_CONTROL, 0);
2486         }
2487         /* blank the display controllers */
2488         for (i = 0; i < rdev->num_crtc; i++) {
2489                 crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2490                 if (crtc_enabled) {
2491                         save->crtc_enabled[i] = true;
2492                         if (ASIC_IS_DCE6(rdev)) {
2493                                 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2494                                 if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2495                                         radeon_wait_for_vblank(rdev, i);
2496                                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2497                                         tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2498                                         WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2499                                 }
2500                         } else {
2501                                 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2502                                 if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2503                                         radeon_wait_for_vblank(rdev, i);
2504                                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2505                                         tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2506                                         WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2507                                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2508                                 }
2509                         }
2510                         /* wait for the next frame */
2511                         frame_count = radeon_get_vblank_counter(rdev, i);
2512                         for (j = 0; j < rdev->usec_timeout; j++) {
2513                                 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2514                                         break;
2515                                 udelay(1);
2516                         }
2517
2518                         /* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2519                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2520                         tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2521                         tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2522                         WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2523                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2524                         save->crtc_enabled[i] = false;
2525                         /* ***** */
2526                 } else {
2527                         save->crtc_enabled[i] = false;
2528                 }
2529         }
2530
2531         radeon_mc_wait_for_idle(rdev);
2532
2533         blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2534         if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2535                 /* Block CPU access */
2536                 WREG32(BIF_FB_EN, 0);
2537                 /* blackout the MC */
2538                 blackout &= ~BLACKOUT_MODE_MASK;
2539                 WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2540         }
2541         /* wait for the MC to settle */
2542         udelay(100);
2543
2544         /* lock double buffered regs */
2545         for (i = 0; i < rdev->num_crtc; i++) {
2546                 if (save->crtc_enabled[i]) {
2547                         tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2548                         if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2549                                 tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2550                                 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2551                         }
2552                         tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2553                         if (!(tmp & 1)) {
2554                                 tmp |= 1;
2555                                 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2556                         }
2557                 }
2558         }
2559 }
2560
2561 void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
2562 {
2563         u32 tmp, frame_count;
2564         int i, j;
2565
2566         /* update crtc base addresses */
2567         for (i = 0; i < rdev->num_crtc; i++) {
2568                 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2569                        upper_32_bits(rdev->mc.vram_start));
2570                 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2571                        upper_32_bits(rdev->mc.vram_start));
2572                 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
2573                        (u32)rdev->mc.vram_start);
2574                 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
2575                        (u32)rdev->mc.vram_start);
2576         }
2577
2578         if (!ASIC_IS_NODCE(rdev)) {
2579                 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2580                 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2581         }
2582
2583         /* unlock regs and wait for update */
2584         for (i = 0; i < rdev->num_crtc; i++) {
2585                 if (save->crtc_enabled[i]) {
2586                         tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2587                         if ((tmp & 0x3) != 0) {
2588                                 tmp &= ~0x3;
2589                                 WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2590                         }
2591                         tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2592                         if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2593                                 tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2594                                 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2595                         }
2596                         tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2597                         if (tmp & 1) {
2598                                 tmp &= ~1;
2599                                 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2600                         }
2601                         for (j = 0; j < rdev->usec_timeout; j++) {
2602                                 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2603                                 if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2604                                         break;
2605                                 udelay(1);
2606                         }
2607                 }
2608         }
2609
2610         /* unblackout the MC */
2611         tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2612         tmp &= ~BLACKOUT_MODE_MASK;
2613         WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2614         /* allow CPU access */
2615         WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2616
2617         for (i = 0; i < rdev->num_crtc; i++) {
2618                 if (save->crtc_enabled[i]) {
2619                         if (ASIC_IS_DCE6(rdev)) {
2620                                 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2621                                 tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2622                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2623                                 WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2624                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2625                         } else {
2626                                 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2627                                 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2628                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2629                                 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2630                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2631                         }
2632                         /* wait for the next frame */
2633                         frame_count = radeon_get_vblank_counter(rdev, i);
2634                         for (j = 0; j < rdev->usec_timeout; j++) {
2635                                 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2636                                         break;
2637                                 udelay(1);
2638                         }
2639                 }
2640         }
2641         if (!ASIC_IS_NODCE(rdev)) {
2642                 /* Unlock vga access */
2643                 WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2644                 mdelay(1);
2645                 WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2646         }
2647 }
2648
2649 void evergreen_mc_program(struct radeon_device *rdev)
2650 {
2651         struct evergreen_mc_save save;
2652         u32 tmp;
2653         int i, j;
2654
2655         /* Initialize HDP */
2656         for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2657                 WREG32((0x2c14 + j), 0x00000000);
2658                 WREG32((0x2c18 + j), 0x00000000);
2659                 WREG32((0x2c1c + j), 0x00000000);
2660                 WREG32((0x2c20 + j), 0x00000000);
2661                 WREG32((0x2c24 + j), 0x00000000);
2662         }
2663         WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2664
2665         evergreen_mc_stop(rdev, &save);
2666         if (evergreen_mc_wait_for_idle(rdev)) {
2667                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2668         }
2669         /* Lockout access through VGA aperture*/
2670         WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2671         /* Update configuration */
2672         if (rdev->flags & RADEON_IS_AGP) {
2673                 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2674                         /* VRAM before AGP */
2675                         WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2676                                 rdev->mc.vram_start >> 12);
2677                         WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2678                                 rdev->mc.gtt_end >> 12);
2679                 } else {
2680                         /* VRAM after AGP */
2681                         WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2682                                 rdev->mc.gtt_start >> 12);
2683                         WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2684                                 rdev->mc.vram_end >> 12);
2685                 }
2686         } else {
2687                 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2688                         rdev->mc.vram_start >> 12);
2689                 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2690                         rdev->mc.vram_end >> 12);
2691         }
2692         WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
2693         /* llano/ontario only */
2694         if ((rdev->family == CHIP_PALM) ||
2695             (rdev->family == CHIP_SUMO) ||
2696             (rdev->family == CHIP_SUMO2)) {
2697                 tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2698                 tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2699                 tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2700                 WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2701         }
2702         tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2703         tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2704         WREG32(MC_VM_FB_LOCATION, tmp);
2705         WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
2706         WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
2707         WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
2708         if (rdev->flags & RADEON_IS_AGP) {
2709                 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
2710                 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
2711                 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
2712         } else {
2713                 WREG32(MC_VM_AGP_BASE, 0);
2714                 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
2715                 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
2716         }
2717         if (evergreen_mc_wait_for_idle(rdev)) {
2718                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2719         }
2720         evergreen_mc_resume(rdev, &save);
2721         /* we need to own VRAM, so turn off the VGA renderer here
2722          * to stop it overwriting our objects */
2723         rv515_vga_render_disable(rdev);
2724 }
2725
2726 /*
2727  * CP.
2728  */
2729 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
2730 {
2731         struct radeon_ring *ring = &rdev->ring[ib->ring];
2732         u32 next_rptr;
2733
2734         /* set to DX10/11 mode */
2735         radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
2736         radeon_ring_write(ring, 1);
2737
2738         if (ring->rptr_save_reg) {
2739                 next_rptr = ring->wptr + 3 + 4;
2740                 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2741                 radeon_ring_write(ring, ((ring->rptr_save_reg - 
2742                                           PACKET3_SET_CONFIG_REG_START) >> 2));
2743                 radeon_ring_write(ring, next_rptr);
2744         } else if (rdev->wb.enabled) {
2745                 next_rptr = ring->wptr + 5 + 4;
2746                 radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
2747                 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
2748                 radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
2749                 radeon_ring_write(ring, next_rptr);
2750                 radeon_ring_write(ring, 0);
2751         }
2752
2753         radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
2754         radeon_ring_write(ring,
2755 #ifdef __BIG_ENDIAN
2756                           (2 << 0) |
2757 #endif
2758                           (ib->gpu_addr & 0xFFFFFFFC));
2759         radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
2760         radeon_ring_write(ring, ib->length_dw);
2761 }
2762
2763
2764 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
2765 {
2766         const __be32 *fw_data;
2767         int i;
2768
2769         if (!rdev->me_fw || !rdev->pfp_fw)
2770                 return -EINVAL;
2771
2772         r700_cp_stop(rdev);
2773         WREG32(CP_RB_CNTL,
2774 #ifdef __BIG_ENDIAN
2775                BUF_SWAP_32BIT |
2776 #endif
2777                RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
2778
2779         fw_data = (const __be32 *)rdev->pfp_fw->data;
2780         WREG32(CP_PFP_UCODE_ADDR, 0);
2781         for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
2782                 WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
2783         WREG32(CP_PFP_UCODE_ADDR, 0);
2784
2785         fw_data = (const __be32 *)rdev->me_fw->data;
2786         WREG32(CP_ME_RAM_WADDR, 0);
2787         for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
2788                 WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
2789
2790         WREG32(CP_PFP_UCODE_ADDR, 0);
2791         WREG32(CP_ME_RAM_WADDR, 0);
2792         WREG32(CP_ME_RAM_RADDR, 0);
2793         return 0;
2794 }
2795
2796 static int evergreen_cp_start(struct radeon_device *rdev)
2797 {
2798         struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2799         int r, i;
2800         uint32_t cp_me;
2801
2802         r = radeon_ring_lock(rdev, ring, 7);
2803         if (r) {
2804                 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2805                 return r;
2806         }
2807         radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
2808         radeon_ring_write(ring, 0x1);
2809         radeon_ring_write(ring, 0x0);
2810         radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
2811         radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
2812         radeon_ring_write(ring, 0);
2813         radeon_ring_write(ring, 0);
2814         radeon_ring_unlock_commit(rdev, ring);
2815
2816         cp_me = 0xff;
2817         WREG32(CP_ME_CNTL, cp_me);
2818
2819         r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
2820         if (r) {
2821                 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2822                 return r;
2823         }
2824
2825         /* setup clear context state */
2826         radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2827         radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
2828
2829         for (i = 0; i < evergreen_default_size; i++)
2830                 radeon_ring_write(ring, evergreen_default_state[i]);
2831
2832         radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2833         radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
2834
2835         /* set clear context state */
2836         radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
2837         radeon_ring_write(ring, 0);
2838
2839         /* SQ_VTX_BASE_VTX_LOC */
2840         radeon_ring_write(ring, 0xc0026f00);
2841         radeon_ring_write(ring, 0x00000000);
2842         radeon_ring_write(ring, 0x00000000);
2843         radeon_ring_write(ring, 0x00000000);
2844
2845         /* Clear consts */
2846         radeon_ring_write(ring, 0xc0036f00);
2847         radeon_ring_write(ring, 0x00000bc4);
2848         radeon_ring_write(ring, 0xffffffff);
2849         radeon_ring_write(ring, 0xffffffff);
2850         radeon_ring_write(ring, 0xffffffff);
2851
2852         radeon_ring_write(ring, 0xc0026900);
2853         radeon_ring_write(ring, 0x00000316);
2854         radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
2855         radeon_ring_write(ring, 0x00000010); /*  */
2856
2857         radeon_ring_unlock_commit(rdev, ring);
2858
2859         return 0;
2860 }
2861
2862 static int evergreen_cp_resume(struct radeon_device *rdev)
2863 {
2864         struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2865         u32 tmp;
2866         u32 rb_bufsz;
2867         int r;
2868
2869         /* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
2870         WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
2871                                  SOFT_RESET_PA |
2872                                  SOFT_RESET_SH |
2873                                  SOFT_RESET_VGT |
2874                                  SOFT_RESET_SPI |
2875                                  SOFT_RESET_SX));
2876         RREG32(GRBM_SOFT_RESET);
2877         mdelay(15);
2878         WREG32(GRBM_SOFT_RESET, 0);
2879         RREG32(GRBM_SOFT_RESET);
2880
2881         /* Set ring buffer size */
2882         rb_bufsz = drm_order(ring->ring_size / 8);
2883         tmp = (drm_order(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
2884 #ifdef __BIG_ENDIAN
2885         tmp |= BUF_SWAP_32BIT;
2886 #endif
2887         WREG32(CP_RB_CNTL, tmp);
2888         WREG32(CP_SEM_WAIT_TIMER, 0x0);
2889         WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
2890
2891         /* Set the write pointer delay */
2892         WREG32(CP_RB_WPTR_DELAY, 0);
2893
2894         /* Initialize the ring buffer's read and write pointers */
2895         WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
2896         WREG32(CP_RB_RPTR_WR, 0);
2897         ring->wptr = 0;
2898         WREG32(CP_RB_WPTR, ring->wptr);
2899
2900         /* set the wb address whether it's enabled or not */
2901         WREG32(CP_RB_RPTR_ADDR,
2902                ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
2903         WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
2904         WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
2905
2906         if (rdev->wb.enabled)
2907                 WREG32(SCRATCH_UMSK, 0xff);
2908         else {
2909                 tmp |= RB_NO_UPDATE;
2910                 WREG32(SCRATCH_UMSK, 0);
2911         }
2912
2913         mdelay(1);
2914         WREG32(CP_RB_CNTL, tmp);
2915
2916         WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
2917         WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
2918
2919         ring->rptr = RREG32(CP_RB_RPTR);
2920
2921         evergreen_cp_start(rdev);
2922         ring->ready = true;
2923         r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
2924         if (r) {
2925                 ring->ready = false;
2926                 return r;
2927         }
2928         return 0;
2929 }
2930
2931 /*
2932  * Core functions
2933  */
2934 static void evergreen_gpu_init(struct radeon_device *rdev)
2935 {
2936         u32 gb_addr_config;
2937         u32 mc_shared_chmap, mc_arb_ramcfg;
2938         u32 sx_debug_1;
2939         u32 smx_dc_ctl0;
2940         u32 sq_config;
2941         u32 sq_lds_resource_mgmt;
2942         u32 sq_gpr_resource_mgmt_1;
2943         u32 sq_gpr_resource_mgmt_2;
2944         u32 sq_gpr_resource_mgmt_3;
2945         u32 sq_thread_resource_mgmt;
2946         u32 sq_thread_resource_mgmt_2;
2947         u32 sq_stack_resource_mgmt_1;
2948         u32 sq_stack_resource_mgmt_2;
2949         u32 sq_stack_resource_mgmt_3;
2950         u32 vgt_cache_invalidation;
2951         u32 hdp_host_path_cntl, tmp;
2952         u32 disabled_rb_mask;
2953         int i, j, num_shader_engines, ps_thread_count;
2954
2955         switch (rdev->family) {
2956         case CHIP_CYPRESS:
2957         case CHIP_HEMLOCK:
2958                 rdev->config.evergreen.num_ses = 2;
2959                 rdev->config.evergreen.max_pipes = 4;
2960                 rdev->config.evergreen.max_tile_pipes = 8;
2961                 rdev->config.evergreen.max_simds = 10;
2962                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
2963                 rdev->config.evergreen.max_gprs = 256;
2964                 rdev->config.evergreen.max_threads = 248;
2965                 rdev->config.evergreen.max_gs_threads = 32;
2966                 rdev->config.evergreen.max_stack_entries = 512;
2967                 rdev->config.evergreen.sx_num_of_sets = 4;
2968                 rdev->config.evergreen.sx_max_export_size = 256;
2969                 rdev->config.evergreen.sx_max_export_pos_size = 64;
2970                 rdev->config.evergreen.sx_max_export_smx_size = 192;
2971                 rdev->config.evergreen.max_hw_contexts = 8;
2972                 rdev->config.evergreen.sq_num_cf_insts = 2;
2973
2974                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
2975                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2976                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2977                 gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
2978                 break;
2979         case CHIP_JUNIPER:
2980                 rdev->config.evergreen.num_ses = 1;
2981                 rdev->config.evergreen.max_pipes = 4;
2982                 rdev->config.evergreen.max_tile_pipes = 4;
2983                 rdev->config.evergreen.max_simds = 10;
2984                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
2985                 rdev->config.evergreen.max_gprs = 256;
2986                 rdev->config.evergreen.max_threads = 248;
2987                 rdev->config.evergreen.max_gs_threads = 32;
2988                 rdev->config.evergreen.max_stack_entries = 512;
2989                 rdev->config.evergreen.sx_num_of_sets = 4;
2990                 rdev->config.evergreen.sx_max_export_size = 256;
2991                 rdev->config.evergreen.sx_max_export_pos_size = 64;
2992                 rdev->config.evergreen.sx_max_export_smx_size = 192;
2993                 rdev->config.evergreen.max_hw_contexts = 8;
2994                 rdev->config.evergreen.sq_num_cf_insts = 2;
2995
2996                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
2997                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2998                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2999                 gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
3000                 break;
3001         case CHIP_REDWOOD:
3002                 rdev->config.evergreen.num_ses = 1;
3003                 rdev->config.evergreen.max_pipes = 4;
3004                 rdev->config.evergreen.max_tile_pipes = 4;
3005                 rdev->config.evergreen.max_simds = 5;
3006                 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3007                 rdev->config.evergreen.max_gprs = 256;
3008                 rdev->config.evergreen.max_threads = 248;
3009                 rdev->config.evergreen.max_gs_threads = 32;
3010                 rdev->config.evergreen.max_stack_entries = 256;
3011                 rdev->config.evergreen.sx_num_of_sets = 4;
3012                 rdev->config.evergreen.sx_max_export_size = 256;
3013                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3014                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3015                 rdev->config.evergreen.max_hw_contexts = 8;
3016                 rdev->config.evergreen.sq_num_cf_insts = 2;
3017
3018                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3019                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3020                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3021                 gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
3022                 break;
3023         case CHIP_CEDAR:
3024         default:
3025                 rdev->config.evergreen.num_ses = 1;
3026                 rdev->config.evergreen.max_pipes = 2;
3027                 rdev->config.evergreen.max_tile_pipes = 2;
3028                 rdev->config.evergreen.max_simds = 2;
3029                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3030                 rdev->config.evergreen.max_gprs = 256;
3031                 rdev->config.evergreen.max_threads = 192;
3032                 rdev->config.evergreen.max_gs_threads = 16;
3033                 rdev->config.evergreen.max_stack_entries = 256;
3034                 rdev->config.evergreen.sx_num_of_sets = 4;
3035                 rdev->config.evergreen.sx_max_export_size = 128;
3036                 rdev->config.evergreen.sx_max_export_pos_size = 32;
3037                 rdev->config.evergreen.sx_max_export_smx_size = 96;
3038                 rdev->config.evergreen.max_hw_contexts = 4;
3039                 rdev->config.evergreen.sq_num_cf_insts = 1;
3040
3041                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3042                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3043                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3044                 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3045                 break;
3046         case CHIP_PALM:
3047                 rdev->config.evergreen.num_ses = 1;
3048                 rdev->config.evergreen.max_pipes = 2;
3049                 rdev->config.evergreen.max_tile_pipes = 2;
3050                 rdev->config.evergreen.max_simds = 2;
3051                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3052                 rdev->config.evergreen.max_gprs = 256;
3053                 rdev->config.evergreen.max_threads = 192;
3054                 rdev->config.evergreen.max_gs_threads = 16;
3055                 rdev->config.evergreen.max_stack_entries = 256;
3056                 rdev->config.evergreen.sx_num_of_sets = 4;
3057                 rdev->config.evergreen.sx_max_export_size = 128;
3058                 rdev->config.evergreen.sx_max_export_pos_size = 32;
3059                 rdev->config.evergreen.sx_max_export_smx_size = 96;
3060                 rdev->config.evergreen.max_hw_contexts = 4;
3061                 rdev->config.evergreen.sq_num_cf_insts = 1;
3062
3063                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3064                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3065                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3066                 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3067                 break;
3068         case CHIP_SUMO:
3069                 rdev->config.evergreen.num_ses = 1;
3070                 rdev->config.evergreen.max_pipes = 4;
3071                 rdev->config.evergreen.max_tile_pipes = 4;
3072                 if (rdev->pdev->device == 0x9648)
3073                         rdev->config.evergreen.max_simds = 3;
3074                 else if ((rdev->pdev->device == 0x9647) ||
3075                          (rdev->pdev->device == 0x964a))
3076                         rdev->config.evergreen.max_simds = 4;
3077                 else
3078                         rdev->config.evergreen.max_simds = 5;
3079                 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3080                 rdev->config.evergreen.max_gprs = 256;
3081                 rdev->config.evergreen.max_threads = 248;
3082                 rdev->config.evergreen.max_gs_threads = 32;
3083                 rdev->config.evergreen.max_stack_entries = 256;
3084                 rdev->config.evergreen.sx_num_of_sets = 4;
3085                 rdev->config.evergreen.sx_max_export_size = 256;
3086                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3087                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3088                 rdev->config.evergreen.max_hw_contexts = 8;
3089                 rdev->config.evergreen.sq_num_cf_insts = 2;
3090
3091                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3092                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3093                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3094                 gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
3095                 break;
3096         case CHIP_SUMO2:
3097                 rdev->config.evergreen.num_ses = 1;
3098                 rdev->config.evergreen.max_pipes = 4;
3099                 rdev->config.evergreen.max_tile_pipes = 4;
3100                 rdev->config.evergreen.max_simds = 2;
3101                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3102                 rdev->config.evergreen.max_gprs = 256;
3103                 rdev->config.evergreen.max_threads = 248;
3104                 rdev->config.evergreen.max_gs_threads = 32;
3105                 rdev->config.evergreen.max_stack_entries = 512;
3106                 rdev->config.evergreen.sx_num_of_sets = 4;
3107                 rdev->config.evergreen.sx_max_export_size = 256;
3108                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3109                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3110                 rdev->config.evergreen.max_hw_contexts = 8;
3111                 rdev->config.evergreen.sq_num_cf_insts = 2;
3112
3113                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3114                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3115                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3116                 gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
3117                 break;
3118         case CHIP_BARTS:
3119                 rdev->config.evergreen.num_ses = 2;
3120                 rdev->config.evergreen.max_pipes = 4;
3121                 rdev->config.evergreen.max_tile_pipes = 8;
3122                 rdev->config.evergreen.max_simds = 7;
3123                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3124                 rdev->config.evergreen.max_gprs = 256;
3125                 rdev->config.evergreen.max_threads = 248;
3126                 rdev->config.evergreen.max_gs_threads = 32;
3127                 rdev->config.evergreen.max_stack_entries = 512;
3128                 rdev->config.evergreen.sx_num_of_sets = 4;
3129                 rdev->config.evergreen.sx_max_export_size = 256;
3130                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3131                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3132                 rdev->config.evergreen.max_hw_contexts = 8;
3133                 rdev->config.evergreen.sq_num_cf_insts = 2;
3134
3135                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3136                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3137                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3138                 gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
3139                 break;
3140         case CHIP_TURKS:
3141                 rdev->config.evergreen.num_ses = 1;
3142                 rdev->config.evergreen.max_pipes = 4;
3143                 rdev->config.evergreen.max_tile_pipes = 4;
3144                 rdev->config.evergreen.max_simds = 6;
3145                 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3146                 rdev->config.evergreen.max_gprs = 256;
3147                 rdev->config.evergreen.max_threads = 248;
3148                 rdev->config.evergreen.max_gs_threads = 32;
3149                 rdev->config.evergreen.max_stack_entries = 256;
3150                 rdev->config.evergreen.sx_num_of_sets = 4;
3151                 rdev->config.evergreen.sx_max_export_size = 256;
3152                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3153                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3154                 rdev->config.evergreen.max_hw_contexts = 8;
3155                 rdev->config.evergreen.sq_num_cf_insts = 2;
3156
3157                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3158                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3159                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3160                 gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
3161                 break;
3162         case CHIP_CAICOS:
3163                 rdev->config.evergreen.num_ses = 1;
3164                 rdev->config.evergreen.max_pipes = 2;
3165                 rdev->config.evergreen.max_tile_pipes = 2;
3166                 rdev->config.evergreen.max_simds = 2;
3167                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3168                 rdev->config.evergreen.max_gprs = 256;
3169                 rdev->config.evergreen.max_threads = 192;
3170                 rdev->config.evergreen.max_gs_threads = 16;
3171                 rdev->config.evergreen.max_stack_entries = 256;
3172                 rdev->config.evergreen.sx_num_of_sets = 4;
3173                 rdev->config.evergreen.sx_max_export_size = 128;
3174                 rdev->config.evergreen.sx_max_export_pos_size = 32;
3175                 rdev->config.evergreen.sx_max_export_smx_size = 96;
3176                 rdev->config.evergreen.max_hw_contexts = 4;
3177                 rdev->config.evergreen.sq_num_cf_insts = 1;
3178
3179                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3180                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3181                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3182                 gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
3183                 break;
3184         }
3185
3186         /* Initialize HDP */
3187         for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3188                 WREG32((0x2c14 + j), 0x00000000);
3189                 WREG32((0x2c18 + j), 0x00000000);
3190                 WREG32((0x2c1c + j), 0x00000000);
3191                 WREG32((0x2c20 + j), 0x00000000);
3192                 WREG32((0x2c24 + j), 0x00000000);
3193         }
3194
3195         WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3196
3197         evergreen_fix_pci_max_read_req_size(rdev);
3198
3199         mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
3200         if ((rdev->family == CHIP_PALM) ||
3201             (rdev->family == CHIP_SUMO) ||
3202             (rdev->family == CHIP_SUMO2))
3203                 mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3204         else
3205                 mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3206
3207         /* setup tiling info dword.  gb_addr_config is not adequate since it does
3208          * not have bank info, so create a custom tiling dword.
3209          * bits 3:0   num_pipes
3210          * bits 7:4   num_banks
3211          * bits 11:8  group_size
3212          * bits 15:12 row_size
3213          */
3214         rdev->config.evergreen.tile_config = 0;
3215         switch (rdev->config.evergreen.max_tile_pipes) {
3216         case 1:
3217         default:
3218                 rdev->config.evergreen.tile_config |= (0 << 0);
3219                 break;
3220         case 2:
3221                 rdev->config.evergreen.tile_config |= (1 << 0);
3222                 break;
3223         case 4:
3224                 rdev->config.evergreen.tile_config |= (2 << 0);
3225                 break;
3226         case 8:
3227                 rdev->config.evergreen.tile_config |= (3 << 0);
3228                 break;
3229         }
3230         /* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
3231         if (rdev->flags & RADEON_IS_IGP)
3232                 rdev->config.evergreen.tile_config |= 1 << 4;
3233         else {
3234                 switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3235                 case 0: /* four banks */
3236                         rdev->config.evergreen.tile_config |= 0 << 4;
3237                         break;
3238                 case 1: /* eight banks */
3239                         rdev->config.evergreen.tile_config |= 1 << 4;
3240                         break;
3241                 case 2: /* sixteen banks */
3242                 default:
3243                         rdev->config.evergreen.tile_config |= 2 << 4;
3244                         break;
3245                 }
3246         }
3247         rdev->config.evergreen.tile_config |= 0 << 8;
3248         rdev->config.evergreen.tile_config |=
3249                 ((gb_addr_config & 0x30000000) >> 28) << 12;
3250
3251         num_shader_engines = (gb_addr_config & NUM_SHADER_ENGINES(3) >> 12) + 1;
3252
3253         if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3254                 u32 efuse_straps_4;
3255                 u32 efuse_straps_3;
3256
3257                 efuse_straps_4 = RREG32_RCU(0x204);
3258                 efuse_straps_3 = RREG32_RCU(0x203);
3259                 tmp = (((efuse_straps_4 & 0xf) << 4) |
3260                       ((efuse_straps_3 & 0xf0000000) >> 28));
3261         } else {
3262                 tmp = 0;
3263                 for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3264                         u32 rb_disable_bitmap;
3265
3266                         WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3267                         WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3268                         rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3269                         tmp <<= 4;
3270                         tmp |= rb_disable_bitmap;
3271                 }
3272         }
3273         /* enabled rb are just the one not disabled :) */
3274         disabled_rb_mask = tmp;
3275         tmp = 0;
3276         for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3277                 tmp |= (1 << i);
3278         /* if all the backends are disabled, fix it up here */
3279         if ((disabled_rb_mask & tmp) == tmp) {
3280                 for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3281                         disabled_rb_mask &= ~(1 << i);
3282         }
3283
3284         WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3285         WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3286
3287         WREG32(GB_ADDR_CONFIG, gb_addr_config);
3288         WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3289         WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3290         WREG32(DMA_TILING_CONFIG, gb_addr_config);
3291         WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3292         WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3293         WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3294
3295         if ((rdev->config.evergreen.max_backends == 1) &&
3296             (rdev->flags & RADEON_IS_IGP)) {
3297                 if ((disabled_rb_mask & 3) == 1) {
3298                         /* RB0 disabled, RB1 enabled */
3299                         tmp = 0x11111111;
3300                 } else {
3301                         /* RB1 disabled, RB0 enabled */
3302                         tmp = 0x00000000;
3303                 }
3304         } else {
3305                 tmp = gb_addr_config & NUM_PIPES_MASK;
3306                 tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3307                                                 EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3308         }
3309         WREG32(GB_BACKEND_MAP, tmp);
3310
3311         WREG32(CGTS_SYS_TCC_DISABLE, 0);
3312         WREG32(CGTS_TCC_DISABLE, 0);
3313         WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3314         WREG32(CGTS_USER_TCC_DISABLE, 0);
3315
3316         /* set HW defaults for 3D engine */
3317         WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3318                                      ROQ_IB2_START(0x2b)));
3319
3320         WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3321
3322         WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3323                              SYNC_GRADIENT |
3324                              SYNC_WALKER |
3325                              SYNC_ALIGNER));
3326
3327         sx_debug_1 = RREG32(SX_DEBUG_1);
3328         sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3329         WREG32(SX_DEBUG_1, sx_debug_1);
3330
3331
3332         smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3333         smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3334         smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3335         WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3336
3337         if (rdev->family <= CHIP_SUMO2)
3338                 WREG32(SMX_SAR_CTL0, 0x00010000);
3339
3340         WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3341                                         POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3342                                         SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3343
3344         WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3345                                  SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3346                                  SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3347
3348         WREG32(VGT_NUM_INSTANCES, 1);
3349         WREG32(SPI_CONFIG_CNTL, 0);
3350         WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3351         WREG32(CP_PERFMON_CNTL, 0);
3352
3353         WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3354                                   FETCH_FIFO_HIWATER(0x4) |
3355                                   DONE_FIFO_HIWATER(0xe0) |
3356                                   ALU_UPDATE_FIFO_HIWATER(0x8)));
3357
3358         sq_config = RREG32(SQ_CONFIG);
3359         sq_config &= ~(PS_PRIO(3) |
3360                        VS_PRIO(3) |
3361                        GS_PRIO(3) |
3362                        ES_PRIO(3));
3363         sq_config |= (VC_ENABLE |
3364                       EXPORT_SRC_C |
3365                       PS_PRIO(0) |
3366                       VS_PRIO(1) |
3367                       GS_PRIO(2) |
3368                       ES_PRIO(3));
3369
3370         switch (rdev->family) {
3371         case CHIP_CEDAR:
3372         case CHIP_PALM:
3373         case CHIP_SUMO:
3374         case CHIP_SUMO2:
3375         case CHIP_CAICOS:
3376                 /* no vertex cache */
3377                 sq_config &= ~VC_ENABLE;
3378                 break;
3379         default:
3380                 break;
3381         }
3382
3383         sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3384
3385         sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3386         sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3387         sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3388         sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3389         sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3390         sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3391         sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3392
3393         switch (rdev->family) {
3394         case CHIP_CEDAR:
3395         case CHIP_PALM:
3396         case CHIP_SUMO:
3397         case CHIP_SUMO2:
3398                 ps_thread_count = 96;
3399                 break;
3400         default:
3401                 ps_thread_count = 128;
3402                 break;
3403         }
3404
3405         sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3406         sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3407         sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3408         sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3409         sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3410         sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3411
3412         sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3413         sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3414         sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3415         sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3416         sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3417         sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3418
3419         WREG32(SQ_CONFIG, sq_config);
3420         WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3421         WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3422         WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3423         WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3424         WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3425         WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3426         WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3427         WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3428         WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3429         WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3430
3431         WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3432                                           FORCE_EOV_MAX_REZ_CNT(255)));
3433
3434         switch (rdev->family) {
3435         case CHIP_CEDAR:
3436         case CHIP_PALM:
3437         case CHIP_SUMO:
3438         case CHIP_SUMO2:
3439         case CHIP_CAICOS:
3440                 vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3441                 break;
3442         default:
3443                 vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3444                 break;
3445         }
3446         vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3447         WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3448
3449         WREG32(VGT_GS_VERTEX_REUSE, 16);
3450         WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3451         WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3452
3453         WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3454         WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3455
3456         WREG32(CB_PERF_CTR0_SEL_0, 0);
3457         WREG32(CB_PERF_CTR0_SEL_1, 0);
3458         WREG32(CB_PERF_CTR1_SEL_0, 0);
3459         WREG32(CB_PERF_CTR1_SEL_1, 0);
3460         WREG32(CB_PERF_CTR2_SEL_0, 0);
3461         WREG32(CB_PERF_CTR2_SEL_1, 0);
3462         WREG32(CB_PERF_CTR3_SEL_0, 0);
3463         WREG32(CB_PERF_CTR3_SEL_1, 0);
3464
3465         /* clear render buffer base addresses */
3466         WREG32(CB_COLOR0_BASE, 0);
3467         WREG32(CB_COLOR1_BASE, 0);
3468         WREG32(CB_COLOR2_BASE, 0);
3469         WREG32(CB_COLOR3_BASE, 0);
3470         WREG32(CB_COLOR4_BASE, 0);
3471         WREG32(CB_COLOR5_BASE, 0);
3472         WREG32(CB_COLOR6_BASE, 0);
3473         WREG32(CB_COLOR7_BASE, 0);
3474         WREG32(CB_COLOR8_BASE, 0);
3475         WREG32(CB_COLOR9_BASE, 0);
3476         WREG32(CB_COLOR10_BASE, 0);
3477         WREG32(CB_COLOR11_BASE, 0);
3478
3479         /* set the shader const cache sizes to 0 */
3480         for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3481                 WREG32(i, 0);
3482         for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3483                 WREG32(i, 0);
3484
3485         tmp = RREG32(HDP_MISC_CNTL);
3486         tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3487         WREG32(HDP_MISC_CNTL, tmp);
3488
3489         hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3490         WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3491
3492         WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3493
3494         udelay(50);
3495
3496 }
3497
3498 int evergreen_mc_init(struct radeon_device *rdev)
3499 {
3500         u32 tmp;
3501         int chansize, numchan;
3502
3503         /* Get VRAM informations */
3504         rdev->mc.vram_is_ddr = true;
3505         if ((rdev->family == CHIP_PALM) ||
3506             (rdev->family == CHIP_SUMO) ||
3507             (rdev->family == CHIP_SUMO2))
3508                 tmp = RREG32(FUS_MC_ARB_RAMCFG);
3509         else
3510                 tmp = RREG32(MC_ARB_RAMCFG);
3511         if (tmp & CHANSIZE_OVERRIDE) {
3512                 chansize = 16;
3513         } else if (tmp & CHANSIZE_MASK) {
3514                 chansize = 64;
3515         } else {
3516                 chansize = 32;
3517         }
3518         tmp = RREG32(MC_SHARED_CHMAP);
3519         switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3520         case 0:
3521         default:
3522                 numchan = 1;
3523                 break;
3524         case 1:
3525                 numchan = 2;
3526                 break;
3527         case 2:
3528                 numchan = 4;
3529                 break;
3530         case 3:
3531                 numchan = 8;
3532                 break;
3533         }
3534         rdev->mc.vram_width = numchan * chansize;
3535         /* Could aper size report 0 ? */
3536         rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3537         rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
3538         /* Setup GPU memory space */
3539         if ((rdev->family == CHIP_PALM) ||
3540             (rdev->family == CHIP_SUMO) ||
3541             (rdev->family == CHIP_SUMO2)) {
3542                 /* size in bytes on fusion */
3543                 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3544                 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3545         } else {
3546                 /* size in MB on evergreen/cayman/tn */
3547                 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3548                 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3549         }
3550         rdev->mc.visible_vram_size = rdev->mc.aper_size;
3551         r700_vram_gtt_location(rdev, &rdev->mc);
3552         radeon_update_bandwidth_info(rdev);
3553
3554         return 0;
3555 }
3556
3557 void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
3558 {
3559         dev_info(rdev->dev, "  GRBM_STATUS               = 0x%08X\n",
3560                 RREG32(GRBM_STATUS));
3561         dev_info(rdev->dev, "  GRBM_STATUS_SE0           = 0x%08X\n",
3562                 RREG32(GRBM_STATUS_SE0));
3563         dev_info(rdev->dev, "  GRBM_STATUS_SE1           = 0x%08X\n",
3564                 RREG32(GRBM_STATUS_SE1));
3565         dev_info(rdev->dev, "  SRBM_STATUS               = 0x%08X\n",
3566                 RREG32(SRBM_STATUS));
3567         dev_info(rdev->dev, "  SRBM_STATUS2              = 0x%08X\n",
3568                 RREG32(SRBM_STATUS2));
3569         dev_info(rdev->dev, "  R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3570                 RREG32(CP_STALLED_STAT1));
3571         dev_info(rdev->dev, "  R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3572                 RREG32(CP_STALLED_STAT2));
3573         dev_info(rdev->dev, "  R_00867C_CP_BUSY_STAT     = 0x%08X\n",
3574                 RREG32(CP_BUSY_STAT));
3575         dev_info(rdev->dev, "  R_008680_CP_STAT          = 0x%08X\n",
3576                 RREG32(CP_STAT));
3577         dev_info(rdev->dev, "  R_00D034_DMA_STATUS_REG   = 0x%08X\n",
3578                 RREG32(DMA_STATUS_REG));
3579         if (rdev->family >= CHIP_CAYMAN) {
3580                 dev_info(rdev->dev, "  R_00D834_DMA_STATUS_REG   = 0x%08X\n",
3581                          RREG32(DMA_STATUS_REG + 0x800));
3582         }
3583 }
3584
3585 bool evergreen_is_display_hung(struct radeon_device *rdev)
3586 {
3587         u32 crtc_hung = 0;
3588         u32 crtc_status[6];
3589         u32 i, j, tmp;
3590
3591         for (i = 0; i < rdev->num_crtc; i++) {
3592                 if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3593                         crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3594                         crtc_hung |= (1 << i);
3595                 }
3596         }
3597
3598         for (j = 0; j < 10; j++) {
3599                 for (i = 0; i < rdev->num_crtc; i++) {
3600                         if (crtc_hung & (1 << i)) {
3601                                 tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3602                                 if (tmp != crtc_status[i])
3603                                         crtc_hung &= ~(1 << i);
3604                         }
3605                 }
3606                 if (crtc_hung == 0)
3607                         return false;
3608                 udelay(100);
3609         }
3610
3611         return true;
3612 }
3613
3614 static u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3615 {
3616         u32 reset_mask = 0;
3617         u32 tmp;
3618
3619         /* GRBM_STATUS */
3620         tmp = RREG32(GRBM_STATUS);
3621         if (tmp & (PA_BUSY | SC_BUSY |
3622                    SH_BUSY | SX_BUSY |
3623                    TA_BUSY | VGT_BUSY |
3624                    DB_BUSY | CB_BUSY |
3625                    SPI_BUSY | VGT_BUSY_NO_DMA))
3626                 reset_mask |= RADEON_RESET_GFX;
3627
3628         if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3629                    CP_BUSY | CP_COHERENCY_BUSY))
3630                 reset_mask |= RADEON_RESET_CP;
3631
3632         if (tmp & GRBM_EE_BUSY)
3633                 reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3634
3635         /* DMA_STATUS_REG */
3636         tmp = RREG32(DMA_STATUS_REG);
3637         if (!(tmp & DMA_IDLE))
3638                 reset_mask |= RADEON_RESET_DMA;
3639
3640         /* SRBM_STATUS2 */
3641         tmp = RREG32(SRBM_STATUS2);
3642         if (tmp & DMA_BUSY)
3643                 reset_mask |= RADEON_RESET_DMA;
3644
3645         /* SRBM_STATUS */
3646         tmp = RREG32(SRBM_STATUS);
3647         if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3648                 reset_mask |= RADEON_RESET_RLC;
3649
3650         if (tmp & IH_BUSY)
3651                 reset_mask |= RADEON_RESET_IH;
3652
3653         if (tmp & SEM_BUSY)
3654                 reset_mask |= RADEON_RESET_SEM;
3655
3656         if (tmp & GRBM_RQ_PENDING)
3657                 reset_mask |= RADEON_RESET_GRBM;
3658
3659         if (tmp & VMC_BUSY)
3660                 reset_mask |= RADEON_RESET_VMC;
3661
3662         if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3663                    MCC_BUSY | MCD_BUSY))
3664                 reset_mask |= RADEON_RESET_MC;
3665
3666         if (evergreen_is_display_hung(rdev))
3667                 reset_mask |= RADEON_RESET_DISPLAY;
3668
3669         /* VM_L2_STATUS */
3670         tmp = RREG32(VM_L2_STATUS);
3671         if (tmp & L2_BUSY)
3672                 reset_mask |= RADEON_RESET_VMC;
3673
3674         /* Skip MC reset as it's mostly likely not hung, just busy */
3675         if (reset_mask & RADEON_RESET_MC) {
3676                 DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3677                 reset_mask &= ~RADEON_RESET_MC;
3678         }
3679
3680         return reset_mask;
3681 }
3682
3683 static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3684 {
3685         struct evergreen_mc_save save;
3686         u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3687         u32 tmp;
3688
3689         if (reset_mask == 0)
3690                 return;
3691
3692         dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3693
3694         evergreen_print_gpu_status_regs(rdev);
3695
3696         /* Disable CP parsing/prefetching */
3697         WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3698
3699         if (reset_mask & RADEON_RESET_DMA) {
3700                 /* Disable DMA */
3701                 tmp = RREG32(DMA_RB_CNTL);
3702                 tmp &= ~DMA_RB_ENABLE;
3703                 WREG32(DMA_RB_CNTL, tmp);
3704         }
3705
3706         udelay(50);
3707
3708         evergreen_mc_stop(rdev, &save);
3709         if (evergreen_mc_wait_for_idle(rdev)) {
3710                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3711         }
3712
3713         if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
3714                 grbm_soft_reset |= SOFT_RESET_DB |
3715                         SOFT_RESET_CB |
3716                         SOFT_RESET_PA |
3717                         SOFT_RESET_SC |
3718                         SOFT_RESET_SPI |
3719                         SOFT_RESET_SX |
3720                         SOFT_RESET_SH |
3721                         SOFT_RESET_TC |
3722                         SOFT_RESET_TA |
3723                         SOFT_RESET_VC |
3724                         SOFT_RESET_VGT;
3725         }
3726
3727         if (reset_mask & RADEON_RESET_CP) {
3728                 grbm_soft_reset |= SOFT_RESET_CP |
3729                         SOFT_RESET_VGT;
3730
3731                 srbm_soft_reset |= SOFT_RESET_GRBM;
3732         }
3733
3734         if (reset_mask & RADEON_RESET_DMA)
3735                 srbm_soft_reset |= SOFT_RESET_DMA;
3736
3737         if (reset_mask & RADEON_RESET_DISPLAY)
3738                 srbm_soft_reset |= SOFT_RESET_DC;
3739
3740         if (reset_mask & RADEON_RESET_RLC)
3741                 srbm_soft_reset |= SOFT_RESET_RLC;
3742
3743         if (reset_mask & RADEON_RESET_SEM)
3744                 srbm_soft_reset |= SOFT_RESET_SEM;
3745
3746         if (reset_mask & RADEON_RESET_IH)
3747                 srbm_soft_reset |= SOFT_RESET_IH;
3748
3749         if (reset_mask & RADEON_RESET_GRBM)
3750                 srbm_soft_reset |= SOFT_RESET_GRBM;
3751
3752         if (reset_mask & RADEON_RESET_VMC)
3753                 srbm_soft_reset |= SOFT_RESET_VMC;
3754
3755         if (!(rdev->flags & RADEON_IS_IGP)) {
3756                 if (reset_mask & RADEON_RESET_MC)
3757                         srbm_soft_reset |= SOFT_RESET_MC;
3758         }
3759
3760         if (grbm_soft_reset) {
3761                 tmp = RREG32(GRBM_SOFT_RESET);
3762                 tmp |= grbm_soft_reset;
3763                 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
3764                 WREG32(GRBM_SOFT_RESET, tmp);
3765                 tmp = RREG32(GRBM_SOFT_RESET);
3766
3767                 udelay(50);
3768
3769                 tmp &= ~grbm_soft_reset;
3770                 WREG32(GRBM_SOFT_RESET, tmp);
3771                 tmp = RREG32(GRBM_SOFT_RESET);
3772         }
3773
3774         if (srbm_soft_reset) {
3775                 tmp = RREG32(SRBM_SOFT_RESET);
3776                 tmp |= srbm_soft_reset;
3777                 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
3778                 WREG32(SRBM_SOFT_RESET, tmp);
3779                 tmp = RREG32(SRBM_SOFT_RESET);
3780
3781                 udelay(50);
3782
3783                 tmp &= ~srbm_soft_reset;
3784                 WREG32(SRBM_SOFT_RESET, tmp);
3785                 tmp = RREG32(SRBM_SOFT_RESET);
3786         }
3787
3788         /* Wait a little for things to settle down */
3789         udelay(50);
3790
3791         evergreen_mc_resume(rdev, &save);
3792         udelay(50);
3793
3794         evergreen_print_gpu_status_regs(rdev);
3795 }
3796
3797 int evergreen_asic_reset(struct radeon_device *rdev)
3798 {
3799         u32 reset_mask;
3800
3801         reset_mask = evergreen_gpu_check_soft_reset(rdev);
3802
3803         if (reset_mask)
3804                 r600_set_bios_scratch_engine_hung(rdev, true);
3805
3806         evergreen_gpu_soft_reset(rdev, reset_mask);
3807
3808         reset_mask = evergreen_gpu_check_soft_reset(rdev);
3809
3810         if (!reset_mask)
3811                 r600_set_bios_scratch_engine_hung(rdev, false);
3812
3813         return 0;
3814 }
3815
3816 /**
3817  * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
3818  *
3819  * @rdev: radeon_device pointer
3820  * @ring: radeon_ring structure holding ring information
3821  *
3822  * Check if the GFX engine is locked up.
3823  * Returns true if the engine appears to be locked up, false if not.
3824  */
3825 bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
3826 {
3827         u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3828
3829         if (!(reset_mask & (RADEON_RESET_GFX |
3830                             RADEON_RESET_COMPUTE |
3831                             RADEON_RESET_CP))) {
3832                 radeon_ring_lockup_update(ring);
3833                 return false;
3834         }
3835         /* force CP activities */
3836         radeon_ring_force_activity(rdev, ring);
3837         return radeon_ring_test_lockup(rdev, ring);
3838 }
3839
3840 /**
3841  * evergreen_dma_is_lockup - Check if the DMA engine is locked up
3842  *
3843  * @rdev: radeon_device pointer
3844  * @ring: radeon_ring structure holding ring information
3845  *
3846  * Check if the async DMA engine is locked up.
3847  * Returns true if the engine appears to be locked up, false if not.
3848  */
3849 bool evergreen_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
3850 {
3851         u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3852
3853         if (!(reset_mask & RADEON_RESET_DMA)) {
3854                 radeon_ring_lockup_update(ring);
3855                 return false;
3856         }
3857         /* force ring activities */
3858         radeon_ring_force_activity(rdev, ring);
3859         return radeon_ring_test_lockup(rdev, ring);
3860 }
3861
3862 /*
3863  * RLC
3864  */
3865 #define RLC_SAVE_RESTORE_LIST_END_MARKER    0x00000000
3866 #define RLC_CLEAR_STATE_END_MARKER          0x00000001
3867
3868 void sumo_rlc_fini(struct radeon_device *rdev)
3869 {
3870         int r;
3871
3872         /* save restore block */
3873         if (rdev->rlc.save_restore_obj) {
3874                 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
3875                 if (unlikely(r != 0))
3876                         dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r);
3877                 radeon_bo_unpin(rdev->rlc.save_restore_obj);
3878                 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
3879
3880                 radeon_bo_unref(&rdev->rlc.save_restore_obj);
3881                 rdev->rlc.save_restore_obj = NULL;
3882         }
3883
3884         /* clear state block */
3885         if (rdev->rlc.clear_state_obj) {
3886                 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
3887                 if (unlikely(r != 0))
3888                         dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r);
3889                 radeon_bo_unpin(rdev->rlc.clear_state_obj);
3890                 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
3891
3892                 radeon_bo_unref(&rdev->rlc.clear_state_obj);
3893                 rdev->rlc.clear_state_obj = NULL;
3894         }
3895 }
3896
3897 int sumo_rlc_init(struct radeon_device *rdev)
3898 {
3899         u32 *src_ptr;
3900         volatile u32 *dst_ptr;
3901         u32 dws, data, i, j, k, reg_num;
3902         u32 reg_list_num, reg_list_hdr_blk_index, reg_list_blk_index;
3903         u64 reg_list_mc_addr;
3904         struct cs_section_def *cs_data;
3905         int r;
3906
3907         src_ptr = rdev->rlc.reg_list;
3908         dws = rdev->rlc.reg_list_size;
3909         cs_data = rdev->rlc.cs_data;
3910
3911         /* save restore block */
3912         if (rdev->rlc.save_restore_obj == NULL) {
3913                 r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
3914                                      RADEON_GEM_DOMAIN_VRAM, NULL, &rdev->rlc.save_restore_obj);
3915                 if (r) {
3916                         dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r);
3917                         return r;
3918                 }
3919         }
3920
3921         r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
3922         if (unlikely(r != 0)) {
3923                 sumo_rlc_fini(rdev);
3924                 return r;
3925         }
3926         r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM,
3927                           &rdev->rlc.save_restore_gpu_addr);
3928         if (r) {
3929                 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
3930                 dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r);
3931                 sumo_rlc_fini(rdev);
3932                 return r;
3933         }
3934         r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void **)&rdev->rlc.sr_ptr);
3935         if (r) {
3936                 dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r);
3937                 sumo_rlc_fini(rdev);
3938                 return r;
3939         }
3940         /* write the sr buffer */
3941         dst_ptr = rdev->rlc.sr_ptr;
3942         /* format:
3943          * dw0: (reg2 << 16) | reg1
3944          * dw1: reg1 save space
3945          * dw2: reg2 save space
3946          */
3947         for (i = 0; i < dws; i++) {
3948                 data = src_ptr[i] >> 2;
3949                 i++;
3950                 if (i < dws)
3951                         data |= (src_ptr[i] >> 2) << 16;
3952                 j = (((i - 1) * 3) / 2);
3953                 dst_ptr[j] = data;
3954         }
3955         j = ((i * 3) / 2);
3956         dst_ptr[j] = RLC_SAVE_RESTORE_LIST_END_MARKER;
3957
3958         radeon_bo_kunmap(rdev->rlc.save_restore_obj);
3959         radeon_bo_unreserve(rdev->rlc.save_restore_obj);
3960
3961         /* clear state block */
3962         reg_list_num = 0;
3963         dws = 0;
3964         for (i = 0; cs_data[i].section != NULL; i++) {
3965                 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
3966                         reg_list_num++;
3967                         dws += cs_data[i].section[j].reg_count;
3968                 }
3969         }
3970         reg_list_blk_index = (3 * reg_list_num + 2);
3971         dws += reg_list_blk_index;
3972
3973         if (rdev->rlc.clear_state_obj == NULL) {
3974                 r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
3975                                      RADEON_GEM_DOMAIN_VRAM, NULL, &rdev->rlc.clear_state_obj);
3976                 if (r) {
3977                         dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r);
3978                         sumo_rlc_fini(rdev);
3979                         return r;
3980                 }
3981         }
3982         r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
3983         if (unlikely(r != 0)) {
3984                 sumo_rlc_fini(rdev);
3985                 return r;
3986         }
3987         r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM,
3988                           &rdev->rlc.clear_state_gpu_addr);
3989         if (r) {
3990
3991                 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
3992                 dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r);
3993                 sumo_rlc_fini(rdev);
3994                 return r;
3995         }
3996         r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void **)&rdev->rlc.cs_ptr);
3997         if (r) {
3998                 dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r);
3999                 sumo_rlc_fini(rdev);
4000                 return r;
4001         }
4002         /* set up the cs buffer */
4003         dst_ptr = rdev->rlc.cs_ptr;
4004         reg_list_hdr_blk_index = 0;
4005         reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4);
4006         data = upper_32_bits(reg_list_mc_addr);
4007         dst_ptr[reg_list_hdr_blk_index] = data;
4008         reg_list_hdr_blk_index++;
4009         for (i = 0; cs_data[i].section != NULL; i++) {
4010                 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4011                         reg_num = cs_data[i].section[j].reg_count;
4012                         data = reg_list_mc_addr & 0xffffffff;
4013                         dst_ptr[reg_list_hdr_blk_index] = data;
4014                         reg_list_hdr_blk_index++;
4015
4016                         data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff;
4017                         dst_ptr[reg_list_hdr_blk_index] = data;
4018                         reg_list_hdr_blk_index++;
4019
4020                         data = 0x08000000 | (reg_num * 4);
4021                         dst_ptr[reg_list_hdr_blk_index] = data;
4022                         reg_list_hdr_blk_index++;
4023
4024                         for (k = 0; k < reg_num; k++) {
4025                                 data = cs_data[i].section[j].extent[k];
4026                                 dst_ptr[reg_list_blk_index + k] = data;
4027                         }
4028                         reg_list_mc_addr += reg_num * 4;
4029                         reg_list_blk_index += reg_num;
4030                 }
4031         }
4032         dst_ptr[reg_list_hdr_blk_index] = RLC_CLEAR_STATE_END_MARKER;
4033
4034         radeon_bo_kunmap(rdev->rlc.clear_state_obj);
4035         radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4036
4037         return 0;
4038 }
4039
4040 static void evergreen_rlc_start(struct radeon_device *rdev)
4041 {
4042         u32 mask = RLC_ENABLE;
4043
4044         if (rdev->flags & RADEON_IS_IGP) {
4045                 mask |= GFX_POWER_GATING_ENABLE | GFX_POWER_GATING_SRC;
4046                 if (rdev->family == CHIP_ARUBA)
4047                         mask |= DYN_PER_SIMD_PG_ENABLE | LB_CNT_SPIM_ACTIVE | LOAD_BALANCE_ENABLE;
4048         }
4049
4050         WREG32(RLC_CNTL, mask);
4051 }
4052
4053 int evergreen_rlc_resume(struct radeon_device *rdev)
4054 {
4055         u32 i;
4056         const __be32 *fw_data;
4057
4058         if (!rdev->rlc_fw)
4059                 return -EINVAL;
4060
4061         r600_rlc_stop(rdev);
4062
4063         WREG32(RLC_HB_CNTL, 0);
4064
4065         if (rdev->flags & RADEON_IS_IGP) {
4066                 if (rdev->family == CHIP_ARUBA) {
4067                         u32 always_on_bitmap =
4068                                 3 | (3 << (16 * rdev->config.cayman.max_shader_engines));
4069                         /* find out the number of active simds */
4070                         u32 tmp = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
4071                         tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
4072                         tmp = hweight32(~tmp);
4073                         if (tmp == rdev->config.cayman.max_simds_per_se) {
4074                                 WREG32(TN_RLC_LB_ALWAYS_ACTIVE_SIMD_MASK, always_on_bitmap);
4075                                 WREG32(TN_RLC_LB_PARAMS, 0x00601004);
4076                                 WREG32(TN_RLC_LB_INIT_SIMD_MASK, 0xffffffff);
4077                                 WREG32(TN_RLC_LB_CNTR_INIT, 0x00000000);
4078                                 WREG32(TN_RLC_LB_CNTR_MAX, 0x00002000);
4079                         }
4080                 } else {
4081                         WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4082                         WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4083                 }
4084                 WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
4085                 WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
4086         } else {
4087                 WREG32(RLC_HB_BASE, 0);
4088                 WREG32(RLC_HB_RPTR, 0);
4089                 WREG32(RLC_HB_WPTR, 0);
4090                 WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4091                 WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4092         }
4093         WREG32(RLC_MC_CNTL, 0);
4094         WREG32(RLC_UCODE_CNTL, 0);
4095
4096         fw_data = (const __be32 *)rdev->rlc_fw->data;
4097         if (rdev->family >= CHIP_ARUBA) {
4098                 for (i = 0; i < ARUBA_RLC_UCODE_SIZE; i++) {
4099                         WREG32(RLC_UCODE_ADDR, i);
4100                         WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4101                 }
4102         } else if (rdev->family >= CHIP_CAYMAN) {
4103                 for (i = 0; i < CAYMAN_RLC_UCODE_SIZE; i++) {
4104                         WREG32(RLC_UCODE_ADDR, i);
4105                         WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4106                 }
4107         } else {
4108                 for (i = 0; i < EVERGREEN_RLC_UCODE_SIZE; i++) {
4109                         WREG32(RLC_UCODE_ADDR, i);
4110                         WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4111                 }
4112         }
4113         WREG32(RLC_UCODE_ADDR, 0);
4114
4115         evergreen_rlc_start(rdev);
4116
4117         return 0;
4118 }
4119
4120 /* Interrupts */
4121
4122 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
4123 {
4124         if (crtc >= rdev->num_crtc)
4125                 return 0;
4126         else
4127                 return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
4128 }
4129
4130 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
4131 {
4132         u32 tmp;
4133
4134         if (rdev->family >= CHIP_CAYMAN) {
4135                 cayman_cp_int_cntl_setup(rdev, 0,
4136                                          CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4137                 cayman_cp_int_cntl_setup(rdev, 1, 0);
4138                 cayman_cp_int_cntl_setup(rdev, 2, 0);
4139                 tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4140                 WREG32(CAYMAN_DMA1_CNTL, tmp);
4141         } else
4142                 WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4143         tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4144         WREG32(DMA_CNTL, tmp);
4145         WREG32(GRBM_INT_CNTL, 0);
4146         WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4147         WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4148         if (rdev->num_crtc >= 4) {
4149                 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4150                 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4151         }
4152         if (rdev->num_crtc >= 6) {
4153                 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4154                 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4155         }
4156
4157         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4158         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4159         if (rdev->num_crtc >= 4) {
4160                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4161                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4162         }
4163         if (rdev->num_crtc >= 6) {
4164                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4165                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4166         }
4167
4168         /* only one DAC on DCE6 */
4169         if (!ASIC_IS_DCE6(rdev))
4170                 WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
4171         WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
4172
4173         tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4174         WREG32(DC_HPD1_INT_CONTROL, tmp);
4175         tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4176         WREG32(DC_HPD2_INT_CONTROL, tmp);
4177         tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4178         WREG32(DC_HPD3_INT_CONTROL, tmp);
4179         tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4180         WREG32(DC_HPD4_INT_CONTROL, tmp);
4181         tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4182         WREG32(DC_HPD5_INT_CONTROL, tmp);
4183         tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4184         WREG32(DC_HPD6_INT_CONTROL, tmp);
4185
4186 }
4187
4188 int evergreen_irq_set(struct radeon_device *rdev)
4189 {
4190         u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
4191         u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
4192         u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
4193         u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
4194         u32 grbm_int_cntl = 0;
4195         u32 grph1 = 0, grph2 = 0, grph3 = 0, grph4 = 0, grph5 = 0, grph6 = 0;
4196         u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
4197         u32 dma_cntl, dma_cntl1 = 0;
4198         u32 thermal_int = 0;
4199
4200         if (!rdev->irq.installed) {
4201                 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
4202                 return -EINVAL;
4203         }
4204         /* don't enable anything if the ih is disabled */
4205         if (!rdev->ih.enabled) {
4206                 r600_disable_interrupts(rdev);
4207                 /* force the active interrupt state to all disabled */
4208                 evergreen_disable_interrupt_state(rdev);
4209                 return 0;
4210         }
4211
4212         hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
4213         hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
4214         hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
4215         hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
4216         hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
4217         hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
4218         if (rdev->family == CHIP_ARUBA)
4219                 thermal_int = RREG32(TN_CG_THERMAL_INT_CTRL) &
4220                         ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4221         else
4222                 thermal_int = RREG32(CG_THERMAL_INT) &
4223                         ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4224
4225         afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4226         afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4227         afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4228         afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4229         afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4230         afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4231
4232         dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4233
4234         if (rdev->family >= CHIP_CAYMAN) {
4235                 /* enable CP interrupts on all rings */
4236                 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4237                         DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4238                         cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4239                 }
4240                 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
4241                         DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
4242                         cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
4243                 }
4244                 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
4245                         DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
4246                         cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
4247                 }
4248         } else {
4249                 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4250                         DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4251                         cp_int_cntl |= RB_INT_ENABLE;
4252                         cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4253                 }
4254         }
4255
4256         if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
4257                 DRM_DEBUG("r600_irq_set: sw int dma\n");
4258                 dma_cntl |= TRAP_ENABLE;
4259         }
4260
4261         if (rdev->family >= CHIP_CAYMAN) {
4262                 dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4263                 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
4264                         DRM_DEBUG("r600_irq_set: sw int dma1\n");
4265                         dma_cntl1 |= TRAP_ENABLE;
4266                 }
4267         }
4268
4269         if (rdev->irq.dpm_thermal) {
4270                 DRM_DEBUG("dpm thermal\n");
4271                 thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
4272         }
4273
4274         if (rdev->irq.crtc_vblank_int[0] ||
4275             atomic_read(&rdev->irq.pflip[0])) {
4276                 DRM_DEBUG("evergreen_irq_set: vblank 0\n");
4277                 crtc1 |= VBLANK_INT_MASK;
4278         }
4279         if (rdev->irq.crtc_vblank_int[1] ||
4280             atomic_read(&rdev->irq.pflip[1])) {
4281                 DRM_DEBUG("evergreen_irq_set: vblank 1\n");
4282                 crtc2 |= VBLANK_INT_MASK;
4283         }
4284         if (rdev->irq.crtc_vblank_int[2] ||
4285             atomic_read(&rdev->irq.pflip[2])) {
4286                 DRM_DEBUG("evergreen_irq_set: vblank 2\n");
4287                 crtc3 |= VBLANK_INT_MASK;
4288         }
4289         if (rdev->irq.crtc_vblank_int[3] ||
4290             atomic_read(&rdev->irq.pflip[3])) {
4291                 DRM_DEBUG("evergreen_irq_set: vblank 3\n");
4292                 crtc4 |= VBLANK_INT_MASK;
4293         }
4294         if (rdev->irq.crtc_vblank_int[4] ||
4295             atomic_read(&rdev->irq.pflip[4])) {
4296                 DRM_DEBUG("evergreen_irq_set: vblank 4\n");
4297                 crtc5 |= VBLANK_INT_MASK;
4298         }
4299         if (rdev->irq.crtc_vblank_int[5] ||
4300             atomic_read(&rdev->irq.pflip[5])) {
4301                 DRM_DEBUG("evergreen_irq_set: vblank 5\n");
4302                 crtc6 |= VBLANK_INT_MASK;
4303         }
4304         if (rdev->irq.hpd[0]) {
4305                 DRM_DEBUG("evergreen_irq_set: hpd 1\n");
4306                 hpd1 |= DC_HPDx_INT_EN;
4307         }
4308         if (rdev->irq.hpd[1]) {
4309                 DRM_DEBUG("evergreen_irq_set: hpd 2\n");
4310                 hpd2 |= DC_HPDx_INT_EN;
4311         }
4312         if (rdev->irq.hpd[2]) {
4313                 DRM_DEBUG("evergreen_irq_set: hpd 3\n");
4314                 hpd3 |= DC_HPDx_INT_EN;
4315         }
4316         if (rdev->irq.hpd[3]) {
4317                 DRM_DEBUG("evergreen_irq_set: hpd 4\n");
4318                 hpd4 |= DC_HPDx_INT_EN;
4319         }
4320         if (rdev->irq.hpd[4]) {
4321                 DRM_DEBUG("evergreen_irq_set: hpd 5\n");
4322                 hpd5 |= DC_HPDx_INT_EN;
4323         }
4324         if (rdev->irq.hpd[5]) {
4325                 DRM_DEBUG("evergreen_irq_set: hpd 6\n");
4326                 hpd6 |= DC_HPDx_INT_EN;
4327         }
4328         if (rdev->irq.afmt[0]) {
4329                 DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
4330                 afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4331         }
4332         if (rdev->irq.afmt[1]) {
4333                 DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
4334                 afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4335         }
4336         if (rdev->irq.afmt[2]) {
4337                 DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
4338                 afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4339         }
4340         if (rdev->irq.afmt[3]) {
4341                 DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
4342                 afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4343         }
4344         if (rdev->irq.afmt[4]) {
4345                 DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
4346                 afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4347         }
4348         if (rdev->irq.afmt[5]) {
4349                 DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
4350                 afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4351         }
4352
4353         if (rdev->family >= CHIP_CAYMAN) {
4354                 cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
4355                 cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
4356                 cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
4357         } else
4358                 WREG32(CP_INT_CNTL, cp_int_cntl);
4359
4360         WREG32(DMA_CNTL, dma_cntl);
4361
4362         if (rdev->family >= CHIP_CAYMAN)
4363                 WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
4364
4365         WREG32(GRBM_INT_CNTL, grbm_int_cntl);
4366
4367         WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
4368         WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
4369         if (rdev->num_crtc >= 4) {
4370                 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
4371                 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
4372         }
4373         if (rdev->num_crtc >= 6) {
4374                 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
4375                 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
4376         }
4377
4378         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, grph1);
4379         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, grph2);
4380         if (rdev->num_crtc >= 4) {
4381                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, grph3);
4382                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, grph4);
4383         }
4384         if (rdev->num_crtc >= 6) {
4385                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, grph5);
4386                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, grph6);
4387         }
4388
4389         WREG32(DC_HPD1_INT_CONTROL, hpd1);
4390         WREG32(DC_HPD2_INT_CONTROL, hpd2);
4391         WREG32(DC_HPD3_INT_CONTROL, hpd3);
4392         WREG32(DC_HPD4_INT_CONTROL, hpd4);
4393         WREG32(DC_HPD5_INT_CONTROL, hpd5);
4394         WREG32(DC_HPD6_INT_CONTROL, hpd6);
4395         if (rdev->family == CHIP_ARUBA)
4396                 WREG32(TN_CG_THERMAL_INT_CTRL, thermal_int);
4397         else
4398                 WREG32(CG_THERMAL_INT, thermal_int);
4399
4400         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
4401         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
4402         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
4403         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
4404         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
4405         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
4406
4407         return 0;
4408 }
4409
4410 static void evergreen_irq_ack(struct radeon_device *rdev)
4411 {
4412         u32 tmp;
4413
4414         rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4415         rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4416         rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4417         rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4418         rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4419         rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4420         rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4421         rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4422         if (rdev->num_crtc >= 4) {
4423                 rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4424                 rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4425         }
4426         if (rdev->num_crtc >= 6) {
4427                 rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4428                 rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4429         }
4430
4431         rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4432         rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4433         rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4434         rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4435         rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4436         rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4437
4438         if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4439                 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4440         if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4441                 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4442         if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
4443                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
4444         if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
4445                 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
4446         if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
4447                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
4448         if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
4449                 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4450
4451         if (rdev->num_crtc >= 4) {
4452                 if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4453                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4454                 if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4455                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4456                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4457                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4458                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4459                         WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4460                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4461                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4462                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4463                         WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4464         }
4465
4466         if (rdev->num_crtc >= 6) {
4467                 if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4468                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4469                 if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4470                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4471                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4472                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4473                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4474                         WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4475                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4476                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4477                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4478                         WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4479         }
4480
4481         if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4482                 tmp = RREG32(DC_HPD1_INT_CONTROL);
4483                 tmp |= DC_HPDx_INT_ACK;
4484                 WREG32(DC_HPD1_INT_CONTROL, tmp);
4485         }
4486         if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4487                 tmp = RREG32(DC_HPD2_INT_CONTROL);
4488                 tmp |= DC_HPDx_INT_ACK;
4489                 WREG32(DC_HPD2_INT_CONTROL, tmp);
4490         }
4491         if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4492                 tmp = RREG32(DC_HPD3_INT_CONTROL);
4493                 tmp |= DC_HPDx_INT_ACK;
4494                 WREG32(DC_HPD3_INT_CONTROL, tmp);
4495         }
4496         if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4497                 tmp = RREG32(DC_HPD4_INT_CONTROL);
4498                 tmp |= DC_HPDx_INT_ACK;
4499                 WREG32(DC_HPD4_INT_CONTROL, tmp);
4500         }
4501         if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4502                 tmp = RREG32(DC_HPD5_INT_CONTROL);
4503                 tmp |= DC_HPDx_INT_ACK;
4504                 WREG32(DC_HPD5_INT_CONTROL, tmp);
4505         }
4506         if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4507                 tmp = RREG32(DC_HPD5_INT_CONTROL);
4508                 tmp |= DC_HPDx_INT_ACK;
4509                 WREG32(DC_HPD6_INT_CONTROL, tmp);
4510         }
4511         if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4512                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4513                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4514                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4515         }
4516         if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4517                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4518                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4519                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4520         }
4521         if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4522                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4523                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4524                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4525         }
4526         if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4527                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4528                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4529                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4530         }
4531         if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4532                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4533                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4534                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4535         }
4536         if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4537                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
4538                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4539                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
4540         }
4541 }
4542
4543 static void evergreen_irq_disable(struct radeon_device *rdev)
4544 {
4545         r600_disable_interrupts(rdev);
4546         /* Wait and acknowledge irq */
4547         mdelay(1);
4548         evergreen_irq_ack(rdev);
4549         evergreen_disable_interrupt_state(rdev);
4550 }
4551
4552 void evergreen_irq_suspend(struct radeon_device *rdev)
4553 {
4554         evergreen_irq_disable(rdev);
4555         r600_rlc_stop(rdev);
4556 }
4557
4558 static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
4559 {
4560         u32 wptr, tmp;
4561
4562         if (rdev->wb.enabled)
4563                 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
4564         else
4565                 wptr = RREG32(IH_RB_WPTR);
4566
4567         if (wptr & RB_OVERFLOW) {
4568                 /* When a ring buffer overflow happen start parsing interrupt
4569                  * from the last not overwritten vector (wptr + 16). Hopefully
4570                  * this should allow us to catchup.
4571                  */
4572                 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
4573                         wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
4574                 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
4575                 tmp = RREG32(IH_RB_CNTL);
4576                 tmp |= IH_WPTR_OVERFLOW_CLEAR;
4577                 WREG32(IH_RB_CNTL, tmp);
4578         }
4579         return (wptr & rdev->ih.ptr_mask);
4580 }
4581
4582 int evergreen_irq_process(struct radeon_device *rdev)
4583 {
4584         u32 wptr;
4585         u32 rptr;
4586         u32 src_id, src_data;
4587         u32 ring_index;
4588         bool queue_hotplug = false;
4589         bool queue_hdmi = false;
4590         bool queue_thermal = false;
4591
4592         if (!rdev->ih.enabled || rdev->shutdown)
4593                 return IRQ_NONE;
4594
4595         wptr = evergreen_get_ih_wptr(rdev);
4596
4597 restart_ih:
4598         /* is somebody else already processing irqs? */
4599         if (atomic_xchg(&rdev->ih.lock, 1))
4600                 return IRQ_NONE;
4601
4602         rptr = rdev->ih.rptr;
4603         DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
4604
4605         /* Order reading of wptr vs. reading of IH ring data */
4606         rmb();
4607
4608         /* display interrupts */
4609         evergreen_irq_ack(rdev);
4610
4611         while (rptr != wptr) {
4612                 /* wptr/rptr are in bytes! */
4613                 ring_index = rptr / 4;
4614                 src_id =  le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
4615                 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
4616
4617                 switch (src_id) {
4618                 case 1: /* D1 vblank/vline */
4619                         switch (src_data) {
4620                         case 0: /* D1 vblank */
4621                                 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) {
4622                                         if (rdev->irq.crtc_vblank_int[0]) {
4623                                                 drm_handle_vblank(rdev->ddev, 0);
4624                                                 rdev->pm.vblank_sync = true;
4625                                                 wake_up(&rdev->irq.vblank_queue);
4626                                         }
4627                                         if (atomic_read(&rdev->irq.pflip[0]))
4628                                                 radeon_crtc_handle_flip(rdev, 0);
4629                                         rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
4630                                         DRM_DEBUG("IH: D1 vblank\n");
4631                                 }
4632                                 break;
4633                         case 1: /* D1 vline */
4634                                 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) {
4635                                         rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
4636                                         DRM_DEBUG("IH: D1 vline\n");
4637                                 }
4638                                 break;
4639                         default:
4640                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4641                                 break;
4642                         }
4643                         break;
4644                 case 2: /* D2 vblank/vline */
4645                         switch (src_data) {
4646                         case 0: /* D2 vblank */
4647                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
4648                                         if (rdev->irq.crtc_vblank_int[1]) {
4649                                                 drm_handle_vblank(rdev->ddev, 1);
4650                                                 rdev->pm.vblank_sync = true;
4651                                                 wake_up(&rdev->irq.vblank_queue);
4652                                         }
4653                                         if (atomic_read(&rdev->irq.pflip[1]))
4654                                                 radeon_crtc_handle_flip(rdev, 1);
4655                                         rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
4656                                         DRM_DEBUG("IH: D2 vblank\n");
4657                                 }
4658                                 break;
4659                         case 1: /* D2 vline */
4660                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) {
4661                                         rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
4662                                         DRM_DEBUG("IH: D2 vline\n");
4663                                 }
4664                                 break;
4665                         default:
4666                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4667                                 break;
4668                         }
4669                         break;
4670                 case 3: /* D3 vblank/vline */
4671                         switch (src_data) {
4672                         case 0: /* D3 vblank */
4673                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
4674                                         if (rdev->irq.crtc_vblank_int[2]) {
4675                                                 drm_handle_vblank(rdev->ddev, 2);
4676                                                 rdev->pm.vblank_sync = true;
4677                                                 wake_up(&rdev->irq.vblank_queue);
4678                                         }
4679                                         if (atomic_read(&rdev->irq.pflip[2]))
4680                                                 radeon_crtc_handle_flip(rdev, 2);
4681                                         rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
4682                                         DRM_DEBUG("IH: D3 vblank\n");
4683                                 }
4684                                 break;
4685                         case 1: /* D3 vline */
4686                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
4687                                         rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
4688                                         DRM_DEBUG("IH: D3 vline\n");
4689                                 }
4690                                 break;
4691                         default:
4692                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4693                                 break;
4694                         }
4695                         break;
4696                 case 4: /* D4 vblank/vline */
4697                         switch (src_data) {
4698                         case 0: /* D4 vblank */
4699                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
4700                                         if (rdev->irq.crtc_vblank_int[3]) {
4701                                                 drm_handle_vblank(rdev->ddev, 3);
4702                                                 rdev->pm.vblank_sync = true;
4703                                                 wake_up(&rdev->irq.vblank_queue);
4704                                         }
4705                                         if (atomic_read(&rdev->irq.pflip[3]))
4706                                                 radeon_crtc_handle_flip(rdev, 3);
4707                                         rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
4708                                         DRM_DEBUG("IH: D4 vblank\n");
4709                                 }
4710                                 break;
4711                         case 1: /* D4 vline */
4712                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
4713                                         rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
4714                                         DRM_DEBUG("IH: D4 vline\n");
4715                                 }
4716                                 break;
4717                         default:
4718                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4719                                 break;
4720                         }
4721                         break;
4722                 case 5: /* D5 vblank/vline */
4723                         switch (src_data) {
4724                         case 0: /* D5 vblank */
4725                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
4726                                         if (rdev->irq.crtc_vblank_int[4]) {
4727                                                 drm_handle_vblank(rdev->ddev, 4);
4728                                                 rdev->pm.vblank_sync = true;
4729                                                 wake_up(&rdev->irq.vblank_queue);
4730                                         }
4731                                         if (atomic_read(&rdev->irq.pflip[4]))
4732                                                 radeon_crtc_handle_flip(rdev, 4);
4733                                         rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
4734                                         DRM_DEBUG("IH: D5 vblank\n");
4735                                 }
4736                                 break;
4737                         case 1: /* D5 vline */
4738                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
4739                                         rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
4740                                         DRM_DEBUG("IH: D5 vline\n");
4741                                 }
4742                                 break;
4743                         default:
4744                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4745                                 break;
4746                         }
4747                         break;
4748                 case 6: /* D6 vblank/vline */
4749                         switch (src_data) {
4750                         case 0: /* D6 vblank */
4751                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
4752                                         if (rdev->irq.crtc_vblank_int[5]) {
4753                                                 drm_handle_vblank(rdev->ddev, 5);
4754                                                 rdev->pm.vblank_sync = true;
4755                                                 wake_up(&rdev->irq.vblank_queue);
4756                                         }
4757                                         if (atomic_read(&rdev->irq.pflip[5]))
4758                                                 radeon_crtc_handle_flip(rdev, 5);
4759                                         rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
4760                                         DRM_DEBUG("IH: D6 vblank\n");
4761                                 }
4762                                 break;
4763                         case 1: /* D6 vline */
4764                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
4765                                         rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
4766                                         DRM_DEBUG("IH: D6 vline\n");
4767                                 }
4768                                 break;
4769                         default:
4770                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4771                                 break;
4772                         }
4773                         break;
4774                 case 42: /* HPD hotplug */
4775                         switch (src_data) {
4776                         case 0:
4777                                 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4778                                         rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
4779                                         queue_hotplug = true;
4780                                         DRM_DEBUG("IH: HPD1\n");
4781                                 }
4782                                 break;
4783                         case 1:
4784                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4785                                         rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
4786                                         queue_hotplug = true;
4787                                         DRM_DEBUG("IH: HPD2\n");
4788                                 }
4789                                 break;
4790                         case 2:
4791                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4792                                         rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
4793                                         queue_hotplug = true;
4794                                         DRM_DEBUG("IH: HPD3\n");
4795                                 }
4796                                 break;
4797                         case 3:
4798                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4799                                         rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
4800                                         queue_hotplug = true;
4801                                         DRM_DEBUG("IH: HPD4\n");
4802                                 }
4803                                 break;
4804                         case 4:
4805                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4806                                         rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
4807                                         queue_hotplug = true;
4808                                         DRM_DEBUG("IH: HPD5\n");
4809                                 }
4810                                 break;
4811                         case 5:
4812                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4813                                         rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
4814                                         queue_hotplug = true;
4815                                         DRM_DEBUG("IH: HPD6\n");
4816                                 }
4817                                 break;
4818                         default:
4819                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4820                                 break;
4821                         }
4822                         break;
4823                 case 44: /* hdmi */
4824                         switch (src_data) {
4825                         case 0:
4826                                 if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4827                                         rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
4828                                         queue_hdmi = true;
4829                                         DRM_DEBUG("IH: HDMI0\n");
4830                                 }
4831                                 break;
4832                         case 1:
4833                                 if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4834                                         rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
4835                                         queue_hdmi = true;
4836                                         DRM_DEBUG("IH: HDMI1\n");
4837                                 }
4838                                 break;
4839                         case 2:
4840                                 if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4841                                         rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
4842                                         queue_hdmi = true;
4843                                         DRM_DEBUG("IH: HDMI2\n");
4844                                 }
4845                                 break;
4846                         case 3:
4847                                 if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4848                                         rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
4849                                         queue_hdmi = true;
4850                                         DRM_DEBUG("IH: HDMI3\n");
4851                                 }
4852                                 break;
4853                         case 4:
4854                                 if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4855                                         rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
4856                                         queue_hdmi = true;
4857                                         DRM_DEBUG("IH: HDMI4\n");
4858                                 }
4859                                 break;
4860                         case 5:
4861                                 if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4862                                         rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
4863                                         queue_hdmi = true;
4864                                         DRM_DEBUG("IH: HDMI5\n");
4865                                 }
4866                                 break;
4867                         default:
4868                                 DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
4869                                 break;
4870                         }
4871                 case 124: /* UVD */
4872                         DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
4873                         radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
4874                         break;
4875                 case 146:
4876                 case 147:
4877                         dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
4878                         dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_ADDR   0x%08X\n",
4879                                 RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR));
4880                         dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
4881                                 RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS));
4882                         /* reset addr and status */
4883                         WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
4884                         break;
4885                 case 176: /* CP_INT in ring buffer */
4886                 case 177: /* CP_INT in IB1 */
4887                 case 178: /* CP_INT in IB2 */
4888                         DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
4889                         radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4890                         break;
4891                 case 181: /* CP EOP event */
4892                         DRM_DEBUG("IH: CP EOP\n");
4893                         if (rdev->family >= CHIP_CAYMAN) {
4894                                 switch (src_data) {
4895                                 case 0:
4896                                         radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4897                                         break;
4898                                 case 1:
4899                                         radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
4900                                         break;
4901                                 case 2:
4902                                         radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
4903                                         break;
4904                                 }
4905                         } else
4906                                 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4907                         break;
4908                 case 224: /* DMA trap event */
4909                         DRM_DEBUG("IH: DMA trap\n");
4910                         radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
4911                         break;
4912                 case 230: /* thermal low to high */
4913                         DRM_DEBUG("IH: thermal low to high\n");
4914                         rdev->pm.dpm.thermal.high_to_low = false;
4915                         queue_thermal = true;
4916                         break;
4917                 case 231: /* thermal high to low */
4918                         DRM_DEBUG("IH: thermal high to low\n");
4919                         rdev->pm.dpm.thermal.high_to_low = true;
4920                         queue_thermal = true;
4921                         break;
4922                 case 233: /* GUI IDLE */
4923                         DRM_DEBUG("IH: GUI idle\n");
4924                         break;
4925                 case 244: /* DMA trap event */
4926                         if (rdev->family >= CHIP_CAYMAN) {
4927                                 DRM_DEBUG("IH: DMA1 trap\n");
4928                                 radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
4929                         }
4930                         break;
4931                 default:
4932                         DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4933                         break;
4934                 }
4935
4936                 /* wptr/rptr are in bytes! */
4937                 rptr += 16;
4938                 rptr &= rdev->ih.ptr_mask;
4939         }
4940         if (queue_hotplug)
4941                 schedule_work(&rdev->hotplug_work);
4942         if (queue_hdmi)
4943                 schedule_work(&rdev->audio_work);
4944         if (queue_thermal && rdev->pm.dpm_enabled)
4945                 schedule_work(&rdev->pm.dpm.thermal.work);
4946         rdev->ih.rptr = rptr;
4947         WREG32(IH_RB_RPTR, rdev->ih.rptr);
4948         atomic_set(&rdev->ih.lock, 0);
4949
4950         /* make sure wptr hasn't changed while processing */
4951         wptr = evergreen_get_ih_wptr(rdev);
4952         if (wptr != rptr)
4953                 goto restart_ih;
4954
4955         return IRQ_HANDLED;
4956 }
4957
4958 /**
4959  * evergreen_dma_fence_ring_emit - emit a fence on the DMA ring
4960  *
4961  * @rdev: radeon_device pointer
4962  * @fence: radeon fence object
4963  *
4964  * Add a DMA fence packet to the ring to write
4965  * the fence seq number and DMA trap packet to generate
4966  * an interrupt if needed (evergreen-SI).
4967  */
4968 void evergreen_dma_fence_ring_emit(struct radeon_device *rdev,
4969                                    struct radeon_fence *fence)
4970 {
4971         struct radeon_ring *ring = &rdev->ring[fence->ring];
4972         u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
4973         /* write the fence */
4974         radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_FENCE, 0, 0));
4975         radeon_ring_write(ring, addr & 0xfffffffc);
4976         radeon_ring_write(ring, (upper_32_bits(addr) & 0xff));
4977         radeon_ring_write(ring, fence->seq);
4978         /* generate an interrupt */
4979         radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_TRAP, 0, 0));
4980         /* flush HDP */
4981         radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_SRBM_WRITE, 0, 0));
4982         radeon_ring_write(ring, (0xf << 16) | (HDP_MEM_COHERENCY_FLUSH_CNTL >> 2));
4983         radeon_ring_write(ring, 1);
4984 }
4985
4986 /**
4987  * evergreen_dma_ring_ib_execute - schedule an IB on the DMA engine
4988  *
4989  * @rdev: radeon_device pointer
4990  * @ib: IB object to schedule
4991  *
4992  * Schedule an IB in the DMA ring (evergreen).
4993  */
4994 void evergreen_dma_ring_ib_execute(struct radeon_device *rdev,
4995                                    struct radeon_ib *ib)
4996 {
4997         struct radeon_ring *ring = &rdev->ring[ib->ring];
4998
4999         if (rdev->wb.enabled) {
5000                 u32 next_rptr = ring->wptr + 4;
5001                 while ((next_rptr & 7) != 5)
5002                         next_rptr++;
5003                 next_rptr += 3;
5004                 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_WRITE, 0, 1));
5005                 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
5006                 radeon_ring_write(ring, upper_32_bits(ring->next_rptr_gpu_addr) & 0xff);
5007                 radeon_ring_write(ring, next_rptr);
5008         }
5009
5010         /* The indirect buffer packet must end on an 8 DW boundary in the DMA ring.
5011          * Pad as necessary with NOPs.
5012          */
5013         while ((ring->wptr & 7) != 5)
5014                 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_NOP, 0, 0));
5015         radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_INDIRECT_BUFFER, 0, 0));
5016         radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0));
5017         radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF));
5018
5019 }
5020
5021 /**
5022  * evergreen_copy_dma - copy pages using the DMA engine
5023  *
5024  * @rdev: radeon_device pointer
5025  * @src_offset: src GPU address
5026  * @dst_offset: dst GPU address
5027  * @num_gpu_pages: number of GPU pages to xfer
5028  * @fence: radeon fence object
5029  *
5030  * Copy GPU paging using the DMA engine (evergreen-cayman).
5031  * Used by the radeon ttm implementation to move pages if
5032  * registered as the asic copy callback.
5033  */
5034 int evergreen_copy_dma(struct radeon_device *rdev,
5035                        uint64_t src_offset, uint64_t dst_offset,
5036                        unsigned num_gpu_pages,
5037                        struct radeon_fence **fence)
5038 {
5039         struct radeon_semaphore *sem = NULL;
5040         int ring_index = rdev->asic->copy.dma_ring_index;
5041         struct radeon_ring *ring = &rdev->ring[ring_index];
5042         u32 size_in_dw, cur_size_in_dw;
5043         int i, num_loops;
5044         int r = 0;
5045
5046         r = radeon_semaphore_create(rdev, &sem);
5047         if (r) {
5048                 DRM_ERROR("radeon: moving bo (%d).\n", r);
5049                 return r;
5050         }
5051
5052         size_in_dw = (num_gpu_pages << RADEON_GPU_PAGE_SHIFT) / 4;
5053         num_loops = DIV_ROUND_UP(size_in_dw, 0xfffff);
5054         r = radeon_ring_lock(rdev, ring, num_loops * 5 + 11);
5055         if (r) {
5056                 DRM_ERROR("radeon: moving bo (%d).\n", r);
5057                 radeon_semaphore_free(rdev, &sem, NULL);
5058                 return r;
5059         }
5060
5061         if (radeon_fence_need_sync(*fence, ring->idx)) {
5062                 radeon_semaphore_sync_rings(rdev, sem, (*fence)->ring,
5063                                             ring->idx);
5064                 radeon_fence_note_sync(*fence, ring->idx);
5065         } else {
5066                 radeon_semaphore_free(rdev, &sem, NULL);
5067         }
5068
5069         for (i = 0; i < num_loops; i++) {
5070                 cur_size_in_dw = size_in_dw;
5071                 if (cur_size_in_dw > 0xFFFFF)
5072                         cur_size_in_dw = 0xFFFFF;
5073                 size_in_dw -= cur_size_in_dw;
5074                 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_COPY, 0, cur_size_in_dw));
5075                 radeon_ring_write(ring, dst_offset & 0xfffffffc);
5076                 radeon_ring_write(ring, src_offset & 0xfffffffc);
5077                 radeon_ring_write(ring, upper_32_bits(dst_offset) & 0xff);
5078                 radeon_ring_write(ring, upper_32_bits(src_offset) & 0xff);
5079                 src_offset += cur_size_in_dw * 4;
5080                 dst_offset += cur_size_in_dw * 4;
5081         }
5082
5083         r = radeon_fence_emit(rdev, fence, ring->idx);
5084         if (r) {
5085                 radeon_ring_unlock_undo(rdev, ring);
5086                 return r;
5087         }
5088
5089         radeon_ring_unlock_commit(rdev, ring);
5090         radeon_semaphore_free(rdev, &sem, *fence);
5091
5092         return r;
5093 }
5094
5095 static int evergreen_startup(struct radeon_device *rdev)
5096 {
5097         struct radeon_ring *ring;
5098         int r;
5099
5100         /* enable pcie gen2 link */
5101         evergreen_pcie_gen2_enable(rdev);
5102         /* enable aspm */
5103         evergreen_program_aspm(rdev);
5104
5105         if (ASIC_IS_DCE5(rdev)) {
5106                 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
5107                         r = ni_init_microcode(rdev);
5108                         if (r) {
5109                                 DRM_ERROR("Failed to load firmware!\n");
5110                                 return r;
5111                         }
5112                 }
5113                 r = ni_mc_load_microcode(rdev);
5114                 if (r) {
5115                         DRM_ERROR("Failed to load MC firmware!\n");
5116                         return r;
5117                 }
5118         } else {
5119                 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
5120                         r = r600_init_microcode(rdev);
5121                         if (r) {
5122                                 DRM_ERROR("Failed to load firmware!\n");
5123                                 return r;
5124                         }
5125                 }
5126         }
5127
5128         r = r600_vram_scratch_init(rdev);
5129         if (r)
5130                 return r;
5131
5132         evergreen_mc_program(rdev);
5133         if (rdev->flags & RADEON_IS_AGP) {
5134                 evergreen_agp_enable(rdev);
5135         } else {
5136                 r = evergreen_pcie_gart_enable(rdev);
5137                 if (r)
5138                         return r;
5139         }
5140         evergreen_gpu_init(rdev);
5141
5142         r = evergreen_blit_init(rdev);
5143         if (r) {
5144                 r600_blit_fini(rdev);
5145                 rdev->asic->copy.copy = NULL;
5146                 dev_warn(rdev->dev, "failed blitter (%d) falling back to memcpy\n", r);
5147         }
5148
5149         /* allocate rlc buffers */
5150         if (rdev->flags & RADEON_IS_IGP) {
5151                 rdev->rlc.reg_list = sumo_rlc_save_restore_register_list;
5152                 rdev->rlc.reg_list_size = sumo_rlc_save_restore_register_list_size;
5153                 rdev->rlc.cs_data = evergreen_cs_data;
5154                 r = sumo_rlc_init(rdev);
5155                 if (r) {
5156                         DRM_ERROR("Failed to init rlc BOs!\n");
5157                         return r;
5158                 }
5159         }
5160
5161         /* allocate wb buffer */
5162         r = radeon_wb_init(rdev);
5163         if (r)
5164                 return r;
5165
5166         r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
5167         if (r) {
5168                 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
5169                 return r;
5170         }
5171
5172         r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
5173         if (r) {
5174                 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
5175                 return r;
5176         }
5177
5178         r = rv770_uvd_resume(rdev);
5179         if (!r) {
5180                 r = radeon_fence_driver_start_ring(rdev,
5181                                                    R600_RING_TYPE_UVD_INDEX);
5182                 if (r)
5183                         dev_err(rdev->dev, "UVD fences init error (%d).\n", r);
5184         }
5185
5186         if (r)
5187                 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
5188
5189         /* Enable IRQ */
5190         if (!rdev->irq.installed) {
5191                 r = radeon_irq_kms_init(rdev);
5192                 if (r)
5193                         return r;
5194         }
5195
5196         r = r600_irq_init(rdev);
5197         if (r) {
5198                 DRM_ERROR("radeon: IH init failed (%d).\n", r);
5199                 radeon_irq_kms_fini(rdev);
5200                 return r;
5201         }
5202         evergreen_irq_set(rdev);
5203
5204         ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
5205         r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
5206                              R600_CP_RB_RPTR, R600_CP_RB_WPTR,
5207                              0, 0xfffff, RADEON_CP_PACKET2);
5208         if (r)
5209                 return r;
5210
5211         ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
5212         r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
5213                              DMA_RB_RPTR, DMA_RB_WPTR,
5214                              2, 0x3fffc, DMA_PACKET(DMA_PACKET_NOP, 0, 0));
5215         if (r)
5216                 return r;
5217
5218         r = evergreen_cp_load_microcode(rdev);
5219         if (r)
5220                 return r;
5221         r = evergreen_cp_resume(rdev);
5222         if (r)
5223                 return r;
5224         r = r600_dma_resume(rdev);
5225         if (r)
5226                 return r;
5227
5228         ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
5229         if (ring->ring_size) {
5230                 r = radeon_ring_init(rdev, ring, ring->ring_size,
5231                                      R600_WB_UVD_RPTR_OFFSET,
5232                                      UVD_RBC_RB_RPTR, UVD_RBC_RB_WPTR,
5233                                      0, 0xfffff, RADEON_CP_PACKET2);
5234                 if (!r)
5235                         r = r600_uvd_init(rdev);
5236
5237                 if (r)
5238                         DRM_ERROR("radeon: error initializing UVD (%d).\n", r);
5239         }
5240
5241         r = radeon_ib_pool_init(rdev);
5242         if (r) {
5243                 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
5244                 return r;
5245         }
5246
5247         r = r600_audio_init(rdev);
5248         if (r) {
5249                 DRM_ERROR("radeon: audio init failed\n");
5250                 return r;
5251         }
5252
5253         return 0;
5254 }
5255
5256 int evergreen_resume(struct radeon_device *rdev)
5257 {
5258         int r;
5259
5260         /* reset the asic, the gfx blocks are often in a bad state
5261          * after the driver is unloaded or after a resume
5262          */
5263         if (radeon_asic_reset(rdev))
5264                 dev_warn(rdev->dev, "GPU reset failed !\n");
5265         /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
5266          * posting will perform necessary task to bring back GPU into good
5267          * shape.
5268          */
5269         /* post card */
5270         atom_asic_init(rdev->mode_info.atom_context);
5271
5272         /* init golden registers */
5273         evergreen_init_golden_registers(rdev);
5274
5275         rdev->accel_working = true;
5276         r = evergreen_startup(rdev);
5277         if (r) {
5278                 DRM_ERROR("evergreen startup failed on resume\n");
5279                 rdev->accel_working = false;
5280                 return r;
5281         }
5282
5283         return r;
5284
5285 }
5286
5287 int evergreen_suspend(struct radeon_device *rdev)
5288 {
5289         r600_audio_fini(rdev);
5290         radeon_uvd_suspend(rdev);
5291         r700_cp_stop(rdev);
5292         r600_dma_stop(rdev);
5293         r600_uvd_rbc_stop(rdev);
5294         evergreen_irq_suspend(rdev);
5295         radeon_wb_disable(rdev);
5296         evergreen_pcie_gart_disable(rdev);
5297
5298         return 0;
5299 }
5300
5301 /* Plan is to move initialization in that function and use
5302  * helper function so that radeon_device_init pretty much
5303  * do nothing more than calling asic specific function. This
5304  * should also allow to remove a bunch of callback function
5305  * like vram_info.
5306  */
5307 int evergreen_init(struct radeon_device *rdev)
5308 {
5309         int r;
5310
5311         /* Read BIOS */
5312         if (!radeon_get_bios(rdev)) {
5313                 if (ASIC_IS_AVIVO(rdev))
5314                         return -EINVAL;
5315         }
5316         /* Must be an ATOMBIOS */
5317         if (!rdev->is_atom_bios) {
5318                 dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
5319                 return -EINVAL;
5320         }
5321         r = radeon_atombios_init(rdev);
5322         if (r)
5323                 return r;
5324         /* reset the asic, the gfx blocks are often in a bad state
5325          * after the driver is unloaded or after a resume
5326          */
5327         if (radeon_asic_reset(rdev))
5328                 dev_warn(rdev->dev, "GPU reset failed !\n");
5329         /* Post card if necessary */
5330         if (!radeon_card_posted(rdev)) {
5331                 if (!rdev->bios) {
5332                         dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
5333                         return -EINVAL;
5334                 }
5335                 DRM_INFO("GPU not posted. posting now...\n");
5336                 atom_asic_init(rdev->mode_info.atom_context);
5337         }
5338         /* init golden registers */
5339         evergreen_init_golden_registers(rdev);
5340         /* Initialize scratch registers */
5341         r600_scratch_init(rdev);
5342         /* Initialize surface registers */
5343         radeon_surface_init(rdev);
5344         /* Initialize clocks */
5345         radeon_get_clock_info(rdev->ddev);
5346         /* Fence driver */
5347         r = radeon_fence_driver_init(rdev);
5348         if (r)
5349                 return r;
5350         /* initialize AGP */
5351         if (rdev->flags & RADEON_IS_AGP) {
5352                 r = radeon_agp_init(rdev);
5353                 if (r)
5354                         radeon_agp_disable(rdev);
5355         }
5356         /* initialize memory controller */
5357         r = evergreen_mc_init(rdev);
5358         if (r)
5359                 return r;
5360         /* Memory manager */
5361         r = radeon_bo_init(rdev);
5362         if (r)
5363                 return r;
5364
5365         rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
5366         r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
5367
5368         rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
5369         r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
5370
5371         r = radeon_uvd_init(rdev);
5372         if (!r) {
5373                 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
5374                 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX],
5375                                4096);
5376         }
5377
5378         rdev->ih.ring_obj = NULL;
5379         r600_ih_ring_init(rdev, 64 * 1024);
5380
5381         r = r600_pcie_gart_init(rdev);
5382         if (r)
5383                 return r;
5384
5385         rdev->accel_working = true;
5386         r = evergreen_startup(rdev);
5387         if (r) {
5388                 dev_err(rdev->dev, "disabling GPU acceleration\n");
5389                 r700_cp_fini(rdev);
5390                 r600_dma_fini(rdev);
5391                 r600_irq_fini(rdev);
5392                 if (rdev->flags & RADEON_IS_IGP)
5393                         sumo_rlc_fini(rdev);
5394                 radeon_wb_fini(rdev);
5395                 radeon_ib_pool_fini(rdev);
5396                 radeon_irq_kms_fini(rdev);
5397                 evergreen_pcie_gart_fini(rdev);
5398                 rdev->accel_working = false;
5399         }
5400
5401         /* Don't start up if the MC ucode is missing on BTC parts.
5402          * The default clocks and voltages before the MC ucode
5403          * is loaded are not suffient for advanced operations.
5404          */
5405         if (ASIC_IS_DCE5(rdev)) {
5406                 if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
5407                         DRM_ERROR("radeon: MC ucode required for NI+.\n");
5408                         return -EINVAL;
5409                 }
5410         }
5411
5412         return 0;
5413 }
5414
5415 void evergreen_fini(struct radeon_device *rdev)
5416 {
5417         r600_audio_fini(rdev);
5418         r600_blit_fini(rdev);
5419         r700_cp_fini(rdev);
5420         r600_dma_fini(rdev);
5421         r600_irq_fini(rdev);
5422         if (rdev->flags & RADEON_IS_IGP)
5423                 sumo_rlc_fini(rdev);
5424         radeon_wb_fini(rdev);
5425         radeon_ib_pool_fini(rdev);
5426         radeon_irq_kms_fini(rdev);
5427         evergreen_pcie_gart_fini(rdev);
5428         radeon_uvd_fini(rdev);
5429         r600_vram_scratch_fini(rdev);
5430         radeon_gem_fini(rdev);
5431         radeon_fence_driver_fini(rdev);
5432         radeon_agp_fini(rdev);
5433         radeon_bo_fini(rdev);
5434         radeon_atombios_fini(rdev);
5435         kfree(rdev->bios);
5436         rdev->bios = NULL;
5437 }
5438
5439 void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
5440 {
5441         u32 link_width_cntl, speed_cntl;
5442
5443         if (radeon_pcie_gen2 == 0)
5444                 return;
5445
5446         if (rdev->flags & RADEON_IS_IGP)
5447                 return;
5448
5449         if (!(rdev->flags & RADEON_IS_PCIE))
5450                 return;
5451
5452         /* x2 cards have a special sequence */
5453         if (ASIC_IS_X2(rdev))
5454                 return;
5455
5456         if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
5457                 (rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))
5458                 return;
5459
5460         speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5461         if (speed_cntl & LC_CURRENT_DATA_RATE) {
5462                 DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5463                 return;
5464         }
5465
5466         DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5467
5468         if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5469             (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5470
5471                 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5472                 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5473                 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5474
5475                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5476                 speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
5477                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5478
5479                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5480                 speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
5481                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5482
5483                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5484                 speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
5485                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5486
5487                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5488                 speed_cntl |= LC_GEN2_EN_STRAP;
5489                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5490
5491         } else {
5492                 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5493                 /* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5494                 if (1)
5495                         link_width_cntl |= LC_UPCONFIGURE_DIS;
5496                 else
5497                         link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5498                 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5499         }
5500 }
5501
5502 void evergreen_program_aspm(struct radeon_device *rdev)
5503 {
5504         u32 data, orig;
5505         u32 pcie_lc_cntl, pcie_lc_cntl_old;
5506         bool disable_l0s, disable_l1 = false, disable_plloff_in_l1 = false;
5507         /* fusion_platform = true
5508          * if the system is a fusion system
5509          * (APU or DGPU in a fusion system).
5510          * todo: check if the system is a fusion platform.
5511          */
5512         bool fusion_platform = false;
5513
5514         if (!(rdev->flags & RADEON_IS_PCIE))
5515                 return;
5516
5517         switch (rdev->family) {
5518         case CHIP_CYPRESS:
5519         case CHIP_HEMLOCK:
5520         case CHIP_JUNIPER:
5521         case CHIP_REDWOOD:
5522         case CHIP_CEDAR:
5523         case CHIP_SUMO:
5524         case CHIP_SUMO2:
5525         case CHIP_PALM:
5526         case CHIP_ARUBA:
5527                 disable_l0s = true;
5528                 break;
5529         default:
5530                 disable_l0s = false;
5531                 break;
5532         }
5533
5534         if (rdev->flags & RADEON_IS_IGP)
5535                 fusion_platform = true; /* XXX also dGPUs in a fusion system */
5536
5537         data = orig = RREG32_PIF_PHY0(PB0_PIF_PAIRING);
5538         if (fusion_platform)
5539                 data &= ~MULTI_PIF;
5540         else
5541                 data |= MULTI_PIF;
5542         if (data != orig)
5543                 WREG32_PIF_PHY0(PB0_PIF_PAIRING, data);
5544
5545         data = orig = RREG32_PIF_PHY1(PB1_PIF_PAIRING);
5546         if (fusion_platform)
5547                 data &= ~MULTI_PIF;
5548         else
5549                 data |= MULTI_PIF;
5550         if (data != orig)
5551                 WREG32_PIF_PHY1(PB1_PIF_PAIRING, data);
5552
5553         pcie_lc_cntl = pcie_lc_cntl_old = RREG32_PCIE_PORT(PCIE_LC_CNTL);
5554         pcie_lc_cntl &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
5555         if (!disable_l0s) {
5556                 if (rdev->family >= CHIP_BARTS)
5557                         pcie_lc_cntl |= LC_L0S_INACTIVITY(7);
5558                 else
5559                         pcie_lc_cntl |= LC_L0S_INACTIVITY(3);
5560         }
5561
5562         if (!disable_l1) {
5563                 if (rdev->family >= CHIP_BARTS)
5564                         pcie_lc_cntl |= LC_L1_INACTIVITY(7);
5565                 else
5566                         pcie_lc_cntl |= LC_L1_INACTIVITY(8);
5567
5568                 if (!disable_plloff_in_l1) {
5569                         data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5570                         data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5571                         data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5572                         if (data != orig)
5573                                 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5574
5575                         data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5576                         data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5577                         data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5578                         if (data != orig)
5579                                 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5580
5581                         data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5582                         data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5583                         data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5584                         if (data != orig)
5585                                 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5586
5587                         data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5588                         data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5589                         data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5590                         if (data != orig)
5591                                 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5592
5593                         if (rdev->family >= CHIP_BARTS) {
5594                                 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5595                                 data &= ~PLL_RAMP_UP_TIME_0_MASK;
5596                                 data |= PLL_RAMP_UP_TIME_0(4);
5597                                 if (data != orig)
5598                                         WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5599
5600                                 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5601                                 data &= ~PLL_RAMP_UP_TIME_1_MASK;
5602                                 data |= PLL_RAMP_UP_TIME_1(4);
5603                                 if (data != orig)
5604                                         WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5605
5606                                 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5607                                 data &= ~PLL_RAMP_UP_TIME_0_MASK;
5608                                 data |= PLL_RAMP_UP_TIME_0(4);
5609                                 if (data != orig)
5610                                         WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5611
5612                                 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5613                                 data &= ~PLL_RAMP_UP_TIME_1_MASK;
5614                                 data |= PLL_RAMP_UP_TIME_1(4);
5615                                 if (data != orig)
5616                                         WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5617                         }
5618
5619                         data = orig = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5620                         data &= ~LC_DYN_LANES_PWR_STATE_MASK;
5621                         data |= LC_DYN_LANES_PWR_STATE(3);
5622                         if (data != orig)
5623                                 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
5624
5625                         if (rdev->family >= CHIP_BARTS) {
5626                                 data = orig = RREG32_PIF_PHY0(PB0_PIF_CNTL);
5627                                 data &= ~LS2_EXIT_TIME_MASK;
5628                                 data |= LS2_EXIT_TIME(1);
5629                                 if (data != orig)
5630                                         WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
5631
5632                                 data = orig = RREG32_PIF_PHY1(PB1_PIF_CNTL);
5633                                 data &= ~LS2_EXIT_TIME_MASK;
5634                                 data |= LS2_EXIT_TIME(1);
5635                                 if (data != orig)
5636                                         WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
5637                         }
5638                 }
5639         }
5640
5641         /* evergreen parts only */
5642         if (rdev->family < CHIP_BARTS)
5643                 pcie_lc_cntl |= LC_PMI_TO_L1_DIS;
5644
5645         if (pcie_lc_cntl != pcie_lc_cntl_old)
5646                 WREG32_PCIE_PORT(PCIE_LC_CNTL, pcie_lc_cntl);
5647 }