drm/radeon/si: Add support for CP DMA to CS checker for compute v2
[linux-2.6-block.git] / drivers / gpu / drm / radeon / evergreen.c
CommitLineData
bcc1c2a1
AD
1/*
2 * Copyright 2010 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: Alex Deucher
23 */
24#include <linux/firmware.h>
25#include <linux/platform_device.h>
5a0e3ad6 26#include <linux/slab.h>
760285e7 27#include <drm/drmP.h>
bcc1c2a1 28#include "radeon.h"
e6990375 29#include "radeon_asic.h"
760285e7 30#include <drm/radeon_drm.h>
0fcdb61e 31#include "evergreend.h"
bcc1c2a1
AD
32#include "atom.h"
33#include "avivod.h"
34#include "evergreen_reg.h"
2281a378 35#include "evergreen_blit_shaders.h"
138e4e16 36#include "radeon_ucode.h"
fe251e2f 37
4a15903d
AD
38static const u32 crtc_offsets[6] =
39{
40 EVERGREEN_CRTC0_REGISTER_OFFSET,
41 EVERGREEN_CRTC1_REGISTER_OFFSET,
42 EVERGREEN_CRTC2_REGISTER_OFFSET,
43 EVERGREEN_CRTC3_REGISTER_OFFSET,
44 EVERGREEN_CRTC4_REGISTER_OFFSET,
45 EVERGREEN_CRTC5_REGISTER_OFFSET
46};
47
2948f5e6
AD
48#include "clearstate_evergreen.h"
49
1fd11777 50static const u32 sumo_rlc_save_restore_register_list[] =
2948f5e6
AD
51{
52 0x98fc,
53 0x9830,
54 0x9834,
55 0x9838,
56 0x9870,
57 0x9874,
58 0x8a14,
59 0x8b24,
60 0x8bcc,
61 0x8b10,
62 0x8d00,
63 0x8d04,
64 0x8c00,
65 0x8c04,
66 0x8c08,
67 0x8c0c,
68 0x8d8c,
69 0x8c20,
70 0x8c24,
71 0x8c28,
72 0x8c18,
73 0x8c1c,
74 0x8cf0,
75 0x8e2c,
76 0x8e38,
77 0x8c30,
78 0x9508,
79 0x9688,
80 0x9608,
81 0x960c,
82 0x9610,
83 0x9614,
84 0x88c4,
85 0x88d4,
86 0xa008,
87 0x900c,
88 0x9100,
89 0x913c,
90 0x98f8,
91 0x98f4,
92 0x9b7c,
93 0x3f8c,
94 0x8950,
95 0x8954,
96 0x8a18,
97 0x8b28,
98 0x9144,
99 0x9148,
100 0x914c,
101 0x3f90,
102 0x3f94,
103 0x915c,
104 0x9160,
105 0x9178,
106 0x917c,
107 0x9180,
108 0x918c,
109 0x9190,
110 0x9194,
111 0x9198,
112 0x919c,
113 0x91a8,
114 0x91ac,
115 0x91b0,
116 0x91b4,
117 0x91b8,
118 0x91c4,
119 0x91c8,
120 0x91cc,
121 0x91d0,
122 0x91d4,
123 0x91e0,
124 0x91e4,
125 0x91ec,
126 0x91f0,
127 0x91f4,
128 0x9200,
129 0x9204,
130 0x929c,
131 0x9150,
132 0x802c,
133};
2948f5e6 134
bcc1c2a1
AD
135static void evergreen_gpu_init(struct radeon_device *rdev);
136void evergreen_fini(struct radeon_device *rdev);
b07759bf 137void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
f52382d7 138void evergreen_program_aspm(struct radeon_device *rdev);
1b37078b
AD
139extern void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
140 int ring, u32 cp_int_cntl);
54e2e49c
AD
141extern void cayman_vm_decode_fault(struct radeon_device *rdev,
142 u32 status, u32 addr);
22c775ce 143void cik_init_cp_pg_table(struct radeon_device *rdev);
bcc1c2a1 144
59a82d0e
AD
145extern u32 si_get_csb_size(struct radeon_device *rdev);
146extern void si_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
a0f38609
AD
147extern u32 cik_get_csb_size(struct radeon_device *rdev);
148extern void cik_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
59a82d0e 149
d4788db3
AD
150static const u32 evergreen_golden_registers[] =
151{
152 0x3f90, 0xffff0000, 0xff000000,
153 0x9148, 0xffff0000, 0xff000000,
154 0x3f94, 0xffff0000, 0xff000000,
155 0x914c, 0xffff0000, 0xff000000,
156 0x9b7c, 0xffffffff, 0x00000000,
157 0x8a14, 0xffffffff, 0x00000007,
158 0x8b10, 0xffffffff, 0x00000000,
159 0x960c, 0xffffffff, 0x54763210,
160 0x88c4, 0xffffffff, 0x000000c2,
161 0x88d4, 0xffffffff, 0x00000010,
162 0x8974, 0xffffffff, 0x00000000,
163 0xc78, 0x00000080, 0x00000080,
164 0x5eb4, 0xffffffff, 0x00000002,
165 0x5e78, 0xffffffff, 0x001000f0,
166 0x6104, 0x01000300, 0x00000000,
167 0x5bc0, 0x00300000, 0x00000000,
168 0x7030, 0xffffffff, 0x00000011,
169 0x7c30, 0xffffffff, 0x00000011,
170 0x10830, 0xffffffff, 0x00000011,
171 0x11430, 0xffffffff, 0x00000011,
172 0x12030, 0xffffffff, 0x00000011,
173 0x12c30, 0xffffffff, 0x00000011,
174 0xd02c, 0xffffffff, 0x08421000,
175 0x240c, 0xffffffff, 0x00000380,
176 0x8b24, 0xffffffff, 0x00ff0fff,
177 0x28a4c, 0x06000000, 0x06000000,
178 0x10c, 0x00000001, 0x00000001,
179 0x8d00, 0xffffffff, 0x100e4848,
180 0x8d04, 0xffffffff, 0x00164745,
181 0x8c00, 0xffffffff, 0xe4000003,
182 0x8c04, 0xffffffff, 0x40600060,
183 0x8c08, 0xffffffff, 0x001c001c,
184 0x8cf0, 0xffffffff, 0x08e00620,
185 0x8c20, 0xffffffff, 0x00800080,
186 0x8c24, 0xffffffff, 0x00800080,
187 0x8c18, 0xffffffff, 0x20202078,
188 0x8c1c, 0xffffffff, 0x00001010,
189 0x28350, 0xffffffff, 0x00000000,
190 0xa008, 0xffffffff, 0x00010000,
191 0x5cc, 0xffffffff, 0x00000001,
192 0x9508, 0xffffffff, 0x00000002,
193 0x913c, 0x0000000f, 0x0000000a
194};
195
196static const u32 evergreen_golden_registers2[] =
197{
198 0x2f4c, 0xffffffff, 0x00000000,
199 0x54f4, 0xffffffff, 0x00000000,
200 0x54f0, 0xffffffff, 0x00000000,
201 0x5498, 0xffffffff, 0x00000000,
202 0x549c, 0xffffffff, 0x00000000,
203 0x5494, 0xffffffff, 0x00000000,
204 0x53cc, 0xffffffff, 0x00000000,
205 0x53c8, 0xffffffff, 0x00000000,
206 0x53c4, 0xffffffff, 0x00000000,
207 0x53c0, 0xffffffff, 0x00000000,
208 0x53bc, 0xffffffff, 0x00000000,
209 0x53b8, 0xffffffff, 0x00000000,
210 0x53b4, 0xffffffff, 0x00000000,
211 0x53b0, 0xffffffff, 0x00000000
212};
213
214static const u32 cypress_mgcg_init[] =
215{
216 0x802c, 0xffffffff, 0xc0000000,
217 0x5448, 0xffffffff, 0x00000100,
218 0x55e4, 0xffffffff, 0x00000100,
219 0x160c, 0xffffffff, 0x00000100,
220 0x5644, 0xffffffff, 0x00000100,
221 0xc164, 0xffffffff, 0x00000100,
222 0x8a18, 0xffffffff, 0x00000100,
223 0x897c, 0xffffffff, 0x06000100,
224 0x8b28, 0xffffffff, 0x00000100,
225 0x9144, 0xffffffff, 0x00000100,
226 0x9a60, 0xffffffff, 0x00000100,
227 0x9868, 0xffffffff, 0x00000100,
228 0x8d58, 0xffffffff, 0x00000100,
229 0x9510, 0xffffffff, 0x00000100,
230 0x949c, 0xffffffff, 0x00000100,
231 0x9654, 0xffffffff, 0x00000100,
232 0x9030, 0xffffffff, 0x00000100,
233 0x9034, 0xffffffff, 0x00000100,
234 0x9038, 0xffffffff, 0x00000100,
235 0x903c, 0xffffffff, 0x00000100,
236 0x9040, 0xffffffff, 0x00000100,
237 0xa200, 0xffffffff, 0x00000100,
238 0xa204, 0xffffffff, 0x00000100,
239 0xa208, 0xffffffff, 0x00000100,
240 0xa20c, 0xffffffff, 0x00000100,
241 0x971c, 0xffffffff, 0x00000100,
242 0x977c, 0xffffffff, 0x00000100,
243 0x3f80, 0xffffffff, 0x00000100,
244 0xa210, 0xffffffff, 0x00000100,
245 0xa214, 0xffffffff, 0x00000100,
246 0x4d8, 0xffffffff, 0x00000100,
247 0x9784, 0xffffffff, 0x00000100,
248 0x9698, 0xffffffff, 0x00000100,
249 0x4d4, 0xffffffff, 0x00000200,
250 0x30cc, 0xffffffff, 0x00000100,
251 0xd0c0, 0xffffffff, 0xff000100,
252 0x802c, 0xffffffff, 0x40000000,
253 0x915c, 0xffffffff, 0x00010000,
254 0x9160, 0xffffffff, 0x00030002,
255 0x9178, 0xffffffff, 0x00070000,
256 0x917c, 0xffffffff, 0x00030002,
257 0x9180, 0xffffffff, 0x00050004,
258 0x918c, 0xffffffff, 0x00010006,
259 0x9190, 0xffffffff, 0x00090008,
260 0x9194, 0xffffffff, 0x00070000,
261 0x9198, 0xffffffff, 0x00030002,
262 0x919c, 0xffffffff, 0x00050004,
263 0x91a8, 0xffffffff, 0x00010006,
264 0x91ac, 0xffffffff, 0x00090008,
265 0x91b0, 0xffffffff, 0x00070000,
266 0x91b4, 0xffffffff, 0x00030002,
267 0x91b8, 0xffffffff, 0x00050004,
268 0x91c4, 0xffffffff, 0x00010006,
269 0x91c8, 0xffffffff, 0x00090008,
270 0x91cc, 0xffffffff, 0x00070000,
271 0x91d0, 0xffffffff, 0x00030002,
272 0x91d4, 0xffffffff, 0x00050004,
273 0x91e0, 0xffffffff, 0x00010006,
274 0x91e4, 0xffffffff, 0x00090008,
275 0x91e8, 0xffffffff, 0x00000000,
276 0x91ec, 0xffffffff, 0x00070000,
277 0x91f0, 0xffffffff, 0x00030002,
278 0x91f4, 0xffffffff, 0x00050004,
279 0x9200, 0xffffffff, 0x00010006,
280 0x9204, 0xffffffff, 0x00090008,
281 0x9208, 0xffffffff, 0x00070000,
282 0x920c, 0xffffffff, 0x00030002,
283 0x9210, 0xffffffff, 0x00050004,
284 0x921c, 0xffffffff, 0x00010006,
285 0x9220, 0xffffffff, 0x00090008,
286 0x9224, 0xffffffff, 0x00070000,
287 0x9228, 0xffffffff, 0x00030002,
288 0x922c, 0xffffffff, 0x00050004,
289 0x9238, 0xffffffff, 0x00010006,
290 0x923c, 0xffffffff, 0x00090008,
291 0x9240, 0xffffffff, 0x00070000,
292 0x9244, 0xffffffff, 0x00030002,
293 0x9248, 0xffffffff, 0x00050004,
294 0x9254, 0xffffffff, 0x00010006,
295 0x9258, 0xffffffff, 0x00090008,
296 0x925c, 0xffffffff, 0x00070000,
297 0x9260, 0xffffffff, 0x00030002,
298 0x9264, 0xffffffff, 0x00050004,
299 0x9270, 0xffffffff, 0x00010006,
300 0x9274, 0xffffffff, 0x00090008,
301 0x9278, 0xffffffff, 0x00070000,
302 0x927c, 0xffffffff, 0x00030002,
303 0x9280, 0xffffffff, 0x00050004,
304 0x928c, 0xffffffff, 0x00010006,
305 0x9290, 0xffffffff, 0x00090008,
306 0x9294, 0xffffffff, 0x00000000,
307 0x929c, 0xffffffff, 0x00000001,
308 0x802c, 0xffffffff, 0x40010000,
309 0x915c, 0xffffffff, 0x00010000,
310 0x9160, 0xffffffff, 0x00030002,
311 0x9178, 0xffffffff, 0x00070000,
312 0x917c, 0xffffffff, 0x00030002,
313 0x9180, 0xffffffff, 0x00050004,
314 0x918c, 0xffffffff, 0x00010006,
315 0x9190, 0xffffffff, 0x00090008,
316 0x9194, 0xffffffff, 0x00070000,
317 0x9198, 0xffffffff, 0x00030002,
318 0x919c, 0xffffffff, 0x00050004,
319 0x91a8, 0xffffffff, 0x00010006,
320 0x91ac, 0xffffffff, 0x00090008,
321 0x91b0, 0xffffffff, 0x00070000,
322 0x91b4, 0xffffffff, 0x00030002,
323 0x91b8, 0xffffffff, 0x00050004,
324 0x91c4, 0xffffffff, 0x00010006,
325 0x91c8, 0xffffffff, 0x00090008,
326 0x91cc, 0xffffffff, 0x00070000,
327 0x91d0, 0xffffffff, 0x00030002,
328 0x91d4, 0xffffffff, 0x00050004,
329 0x91e0, 0xffffffff, 0x00010006,
330 0x91e4, 0xffffffff, 0x00090008,
331 0x91e8, 0xffffffff, 0x00000000,
332 0x91ec, 0xffffffff, 0x00070000,
333 0x91f0, 0xffffffff, 0x00030002,
334 0x91f4, 0xffffffff, 0x00050004,
335 0x9200, 0xffffffff, 0x00010006,
336 0x9204, 0xffffffff, 0x00090008,
337 0x9208, 0xffffffff, 0x00070000,
338 0x920c, 0xffffffff, 0x00030002,
339 0x9210, 0xffffffff, 0x00050004,
340 0x921c, 0xffffffff, 0x00010006,
341 0x9220, 0xffffffff, 0x00090008,
342 0x9224, 0xffffffff, 0x00070000,
343 0x9228, 0xffffffff, 0x00030002,
344 0x922c, 0xffffffff, 0x00050004,
345 0x9238, 0xffffffff, 0x00010006,
346 0x923c, 0xffffffff, 0x00090008,
347 0x9240, 0xffffffff, 0x00070000,
348 0x9244, 0xffffffff, 0x00030002,
349 0x9248, 0xffffffff, 0x00050004,
350 0x9254, 0xffffffff, 0x00010006,
351 0x9258, 0xffffffff, 0x00090008,
352 0x925c, 0xffffffff, 0x00070000,
353 0x9260, 0xffffffff, 0x00030002,
354 0x9264, 0xffffffff, 0x00050004,
355 0x9270, 0xffffffff, 0x00010006,
356 0x9274, 0xffffffff, 0x00090008,
357 0x9278, 0xffffffff, 0x00070000,
358 0x927c, 0xffffffff, 0x00030002,
359 0x9280, 0xffffffff, 0x00050004,
360 0x928c, 0xffffffff, 0x00010006,
361 0x9290, 0xffffffff, 0x00090008,
362 0x9294, 0xffffffff, 0x00000000,
363 0x929c, 0xffffffff, 0x00000001,
364 0x802c, 0xffffffff, 0xc0000000
365};
366
367static const u32 redwood_mgcg_init[] =
368{
369 0x802c, 0xffffffff, 0xc0000000,
370 0x5448, 0xffffffff, 0x00000100,
371 0x55e4, 0xffffffff, 0x00000100,
372 0x160c, 0xffffffff, 0x00000100,
373 0x5644, 0xffffffff, 0x00000100,
374 0xc164, 0xffffffff, 0x00000100,
375 0x8a18, 0xffffffff, 0x00000100,
376 0x897c, 0xffffffff, 0x06000100,
377 0x8b28, 0xffffffff, 0x00000100,
378 0x9144, 0xffffffff, 0x00000100,
379 0x9a60, 0xffffffff, 0x00000100,
380 0x9868, 0xffffffff, 0x00000100,
381 0x8d58, 0xffffffff, 0x00000100,
382 0x9510, 0xffffffff, 0x00000100,
383 0x949c, 0xffffffff, 0x00000100,
384 0x9654, 0xffffffff, 0x00000100,
385 0x9030, 0xffffffff, 0x00000100,
386 0x9034, 0xffffffff, 0x00000100,
387 0x9038, 0xffffffff, 0x00000100,
388 0x903c, 0xffffffff, 0x00000100,
389 0x9040, 0xffffffff, 0x00000100,
390 0xa200, 0xffffffff, 0x00000100,
391 0xa204, 0xffffffff, 0x00000100,
392 0xa208, 0xffffffff, 0x00000100,
393 0xa20c, 0xffffffff, 0x00000100,
394 0x971c, 0xffffffff, 0x00000100,
395 0x977c, 0xffffffff, 0x00000100,
396 0x3f80, 0xffffffff, 0x00000100,
397 0xa210, 0xffffffff, 0x00000100,
398 0xa214, 0xffffffff, 0x00000100,
399 0x4d8, 0xffffffff, 0x00000100,
400 0x9784, 0xffffffff, 0x00000100,
401 0x9698, 0xffffffff, 0x00000100,
402 0x4d4, 0xffffffff, 0x00000200,
403 0x30cc, 0xffffffff, 0x00000100,
404 0xd0c0, 0xffffffff, 0xff000100,
405 0x802c, 0xffffffff, 0x40000000,
406 0x915c, 0xffffffff, 0x00010000,
407 0x9160, 0xffffffff, 0x00030002,
408 0x9178, 0xffffffff, 0x00070000,
409 0x917c, 0xffffffff, 0x00030002,
410 0x9180, 0xffffffff, 0x00050004,
411 0x918c, 0xffffffff, 0x00010006,
412 0x9190, 0xffffffff, 0x00090008,
413 0x9194, 0xffffffff, 0x00070000,
414 0x9198, 0xffffffff, 0x00030002,
415 0x919c, 0xffffffff, 0x00050004,
416 0x91a8, 0xffffffff, 0x00010006,
417 0x91ac, 0xffffffff, 0x00090008,
418 0x91b0, 0xffffffff, 0x00070000,
419 0x91b4, 0xffffffff, 0x00030002,
420 0x91b8, 0xffffffff, 0x00050004,
421 0x91c4, 0xffffffff, 0x00010006,
422 0x91c8, 0xffffffff, 0x00090008,
423 0x91cc, 0xffffffff, 0x00070000,
424 0x91d0, 0xffffffff, 0x00030002,
425 0x91d4, 0xffffffff, 0x00050004,
426 0x91e0, 0xffffffff, 0x00010006,
427 0x91e4, 0xffffffff, 0x00090008,
428 0x91e8, 0xffffffff, 0x00000000,
429 0x91ec, 0xffffffff, 0x00070000,
430 0x91f0, 0xffffffff, 0x00030002,
431 0x91f4, 0xffffffff, 0x00050004,
432 0x9200, 0xffffffff, 0x00010006,
433 0x9204, 0xffffffff, 0x00090008,
434 0x9294, 0xffffffff, 0x00000000,
435 0x929c, 0xffffffff, 0x00000001,
436 0x802c, 0xffffffff, 0xc0000000
437};
438
439static const u32 cedar_golden_registers[] =
440{
441 0x3f90, 0xffff0000, 0xff000000,
442 0x9148, 0xffff0000, 0xff000000,
443 0x3f94, 0xffff0000, 0xff000000,
444 0x914c, 0xffff0000, 0xff000000,
445 0x9b7c, 0xffffffff, 0x00000000,
446 0x8a14, 0xffffffff, 0x00000007,
447 0x8b10, 0xffffffff, 0x00000000,
448 0x960c, 0xffffffff, 0x54763210,
449 0x88c4, 0xffffffff, 0x000000c2,
450 0x88d4, 0xffffffff, 0x00000000,
451 0x8974, 0xffffffff, 0x00000000,
452 0xc78, 0x00000080, 0x00000080,
453 0x5eb4, 0xffffffff, 0x00000002,
454 0x5e78, 0xffffffff, 0x001000f0,
455 0x6104, 0x01000300, 0x00000000,
456 0x5bc0, 0x00300000, 0x00000000,
457 0x7030, 0xffffffff, 0x00000011,
458 0x7c30, 0xffffffff, 0x00000011,
459 0x10830, 0xffffffff, 0x00000011,
460 0x11430, 0xffffffff, 0x00000011,
461 0xd02c, 0xffffffff, 0x08421000,
462 0x240c, 0xffffffff, 0x00000380,
463 0x8b24, 0xffffffff, 0x00ff0fff,
464 0x28a4c, 0x06000000, 0x06000000,
465 0x10c, 0x00000001, 0x00000001,
466 0x8d00, 0xffffffff, 0x100e4848,
467 0x8d04, 0xffffffff, 0x00164745,
468 0x8c00, 0xffffffff, 0xe4000003,
469 0x8c04, 0xffffffff, 0x40600060,
470 0x8c08, 0xffffffff, 0x001c001c,
471 0x8cf0, 0xffffffff, 0x08e00410,
472 0x8c20, 0xffffffff, 0x00800080,
473 0x8c24, 0xffffffff, 0x00800080,
474 0x8c18, 0xffffffff, 0x20202078,
475 0x8c1c, 0xffffffff, 0x00001010,
476 0x28350, 0xffffffff, 0x00000000,
477 0xa008, 0xffffffff, 0x00010000,
478 0x5cc, 0xffffffff, 0x00000001,
479 0x9508, 0xffffffff, 0x00000002
480};
481
482static const u32 cedar_mgcg_init[] =
483{
484 0x802c, 0xffffffff, 0xc0000000,
485 0x5448, 0xffffffff, 0x00000100,
486 0x55e4, 0xffffffff, 0x00000100,
487 0x160c, 0xffffffff, 0x00000100,
488 0x5644, 0xffffffff, 0x00000100,
489 0xc164, 0xffffffff, 0x00000100,
490 0x8a18, 0xffffffff, 0x00000100,
491 0x897c, 0xffffffff, 0x06000100,
492 0x8b28, 0xffffffff, 0x00000100,
493 0x9144, 0xffffffff, 0x00000100,
494 0x9a60, 0xffffffff, 0x00000100,
495 0x9868, 0xffffffff, 0x00000100,
496 0x8d58, 0xffffffff, 0x00000100,
497 0x9510, 0xffffffff, 0x00000100,
498 0x949c, 0xffffffff, 0x00000100,
499 0x9654, 0xffffffff, 0x00000100,
500 0x9030, 0xffffffff, 0x00000100,
501 0x9034, 0xffffffff, 0x00000100,
502 0x9038, 0xffffffff, 0x00000100,
503 0x903c, 0xffffffff, 0x00000100,
504 0x9040, 0xffffffff, 0x00000100,
505 0xa200, 0xffffffff, 0x00000100,
506 0xa204, 0xffffffff, 0x00000100,
507 0xa208, 0xffffffff, 0x00000100,
508 0xa20c, 0xffffffff, 0x00000100,
509 0x971c, 0xffffffff, 0x00000100,
510 0x977c, 0xffffffff, 0x00000100,
511 0x3f80, 0xffffffff, 0x00000100,
512 0xa210, 0xffffffff, 0x00000100,
513 0xa214, 0xffffffff, 0x00000100,
514 0x4d8, 0xffffffff, 0x00000100,
515 0x9784, 0xffffffff, 0x00000100,
516 0x9698, 0xffffffff, 0x00000100,
517 0x4d4, 0xffffffff, 0x00000200,
518 0x30cc, 0xffffffff, 0x00000100,
519 0xd0c0, 0xffffffff, 0xff000100,
520 0x802c, 0xffffffff, 0x40000000,
521 0x915c, 0xffffffff, 0x00010000,
522 0x9178, 0xffffffff, 0x00050000,
523 0x917c, 0xffffffff, 0x00030002,
524 0x918c, 0xffffffff, 0x00010004,
525 0x9190, 0xffffffff, 0x00070006,
526 0x9194, 0xffffffff, 0x00050000,
527 0x9198, 0xffffffff, 0x00030002,
528 0x91a8, 0xffffffff, 0x00010004,
529 0x91ac, 0xffffffff, 0x00070006,
530 0x91e8, 0xffffffff, 0x00000000,
531 0x9294, 0xffffffff, 0x00000000,
532 0x929c, 0xffffffff, 0x00000001,
533 0x802c, 0xffffffff, 0xc0000000
534};
535
536static const u32 juniper_mgcg_init[] =
537{
538 0x802c, 0xffffffff, 0xc0000000,
539 0x5448, 0xffffffff, 0x00000100,
540 0x55e4, 0xffffffff, 0x00000100,
541 0x160c, 0xffffffff, 0x00000100,
542 0x5644, 0xffffffff, 0x00000100,
543 0xc164, 0xffffffff, 0x00000100,
544 0x8a18, 0xffffffff, 0x00000100,
545 0x897c, 0xffffffff, 0x06000100,
546 0x8b28, 0xffffffff, 0x00000100,
547 0x9144, 0xffffffff, 0x00000100,
548 0x9a60, 0xffffffff, 0x00000100,
549 0x9868, 0xffffffff, 0x00000100,
550 0x8d58, 0xffffffff, 0x00000100,
551 0x9510, 0xffffffff, 0x00000100,
552 0x949c, 0xffffffff, 0x00000100,
553 0x9654, 0xffffffff, 0x00000100,
554 0x9030, 0xffffffff, 0x00000100,
555 0x9034, 0xffffffff, 0x00000100,
556 0x9038, 0xffffffff, 0x00000100,
557 0x903c, 0xffffffff, 0x00000100,
558 0x9040, 0xffffffff, 0x00000100,
559 0xa200, 0xffffffff, 0x00000100,
560 0xa204, 0xffffffff, 0x00000100,
561 0xa208, 0xffffffff, 0x00000100,
562 0xa20c, 0xffffffff, 0x00000100,
563 0x971c, 0xffffffff, 0x00000100,
564 0xd0c0, 0xffffffff, 0xff000100,
565 0x802c, 0xffffffff, 0x40000000,
566 0x915c, 0xffffffff, 0x00010000,
567 0x9160, 0xffffffff, 0x00030002,
568 0x9178, 0xffffffff, 0x00070000,
569 0x917c, 0xffffffff, 0x00030002,
570 0x9180, 0xffffffff, 0x00050004,
571 0x918c, 0xffffffff, 0x00010006,
572 0x9190, 0xffffffff, 0x00090008,
573 0x9194, 0xffffffff, 0x00070000,
574 0x9198, 0xffffffff, 0x00030002,
575 0x919c, 0xffffffff, 0x00050004,
576 0x91a8, 0xffffffff, 0x00010006,
577 0x91ac, 0xffffffff, 0x00090008,
578 0x91b0, 0xffffffff, 0x00070000,
579 0x91b4, 0xffffffff, 0x00030002,
580 0x91b8, 0xffffffff, 0x00050004,
581 0x91c4, 0xffffffff, 0x00010006,
582 0x91c8, 0xffffffff, 0x00090008,
583 0x91cc, 0xffffffff, 0x00070000,
584 0x91d0, 0xffffffff, 0x00030002,
585 0x91d4, 0xffffffff, 0x00050004,
586 0x91e0, 0xffffffff, 0x00010006,
587 0x91e4, 0xffffffff, 0x00090008,
588 0x91e8, 0xffffffff, 0x00000000,
589 0x91ec, 0xffffffff, 0x00070000,
590 0x91f0, 0xffffffff, 0x00030002,
591 0x91f4, 0xffffffff, 0x00050004,
592 0x9200, 0xffffffff, 0x00010006,
593 0x9204, 0xffffffff, 0x00090008,
594 0x9208, 0xffffffff, 0x00070000,
595 0x920c, 0xffffffff, 0x00030002,
596 0x9210, 0xffffffff, 0x00050004,
597 0x921c, 0xffffffff, 0x00010006,
598 0x9220, 0xffffffff, 0x00090008,
599 0x9224, 0xffffffff, 0x00070000,
600 0x9228, 0xffffffff, 0x00030002,
601 0x922c, 0xffffffff, 0x00050004,
602 0x9238, 0xffffffff, 0x00010006,
603 0x923c, 0xffffffff, 0x00090008,
604 0x9240, 0xffffffff, 0x00070000,
605 0x9244, 0xffffffff, 0x00030002,
606 0x9248, 0xffffffff, 0x00050004,
607 0x9254, 0xffffffff, 0x00010006,
608 0x9258, 0xffffffff, 0x00090008,
609 0x925c, 0xffffffff, 0x00070000,
610 0x9260, 0xffffffff, 0x00030002,
611 0x9264, 0xffffffff, 0x00050004,
612 0x9270, 0xffffffff, 0x00010006,
613 0x9274, 0xffffffff, 0x00090008,
614 0x9278, 0xffffffff, 0x00070000,
615 0x927c, 0xffffffff, 0x00030002,
616 0x9280, 0xffffffff, 0x00050004,
617 0x928c, 0xffffffff, 0x00010006,
618 0x9290, 0xffffffff, 0x00090008,
619 0x9294, 0xffffffff, 0x00000000,
620 0x929c, 0xffffffff, 0x00000001,
621 0x802c, 0xffffffff, 0xc0000000,
622 0x977c, 0xffffffff, 0x00000100,
623 0x3f80, 0xffffffff, 0x00000100,
624 0xa210, 0xffffffff, 0x00000100,
625 0xa214, 0xffffffff, 0x00000100,
626 0x4d8, 0xffffffff, 0x00000100,
627 0x9784, 0xffffffff, 0x00000100,
628 0x9698, 0xffffffff, 0x00000100,
629 0x4d4, 0xffffffff, 0x00000200,
630 0x30cc, 0xffffffff, 0x00000100,
631 0x802c, 0xffffffff, 0xc0000000
632};
633
634static const u32 supersumo_golden_registers[] =
635{
636 0x5eb4, 0xffffffff, 0x00000002,
637 0x5cc, 0xffffffff, 0x00000001,
638 0x7030, 0xffffffff, 0x00000011,
639 0x7c30, 0xffffffff, 0x00000011,
640 0x6104, 0x01000300, 0x00000000,
641 0x5bc0, 0x00300000, 0x00000000,
642 0x8c04, 0xffffffff, 0x40600060,
643 0x8c08, 0xffffffff, 0x001c001c,
644 0x8c20, 0xffffffff, 0x00800080,
645 0x8c24, 0xffffffff, 0x00800080,
646 0x8c18, 0xffffffff, 0x20202078,
647 0x8c1c, 0xffffffff, 0x00001010,
648 0x918c, 0xffffffff, 0x00010006,
649 0x91a8, 0xffffffff, 0x00010006,
650 0x91c4, 0xffffffff, 0x00010006,
651 0x91e0, 0xffffffff, 0x00010006,
652 0x9200, 0xffffffff, 0x00010006,
653 0x9150, 0xffffffff, 0x6e944040,
654 0x917c, 0xffffffff, 0x00030002,
655 0x9180, 0xffffffff, 0x00050004,
656 0x9198, 0xffffffff, 0x00030002,
657 0x919c, 0xffffffff, 0x00050004,
658 0x91b4, 0xffffffff, 0x00030002,
659 0x91b8, 0xffffffff, 0x00050004,
660 0x91d0, 0xffffffff, 0x00030002,
661 0x91d4, 0xffffffff, 0x00050004,
662 0x91f0, 0xffffffff, 0x00030002,
663 0x91f4, 0xffffffff, 0x00050004,
664 0x915c, 0xffffffff, 0x00010000,
665 0x9160, 0xffffffff, 0x00030002,
666 0x3f90, 0xffff0000, 0xff000000,
667 0x9178, 0xffffffff, 0x00070000,
668 0x9194, 0xffffffff, 0x00070000,
669 0x91b0, 0xffffffff, 0x00070000,
670 0x91cc, 0xffffffff, 0x00070000,
671 0x91ec, 0xffffffff, 0x00070000,
672 0x9148, 0xffff0000, 0xff000000,
673 0x9190, 0xffffffff, 0x00090008,
674 0x91ac, 0xffffffff, 0x00090008,
675 0x91c8, 0xffffffff, 0x00090008,
676 0x91e4, 0xffffffff, 0x00090008,
677 0x9204, 0xffffffff, 0x00090008,
678 0x3f94, 0xffff0000, 0xff000000,
679 0x914c, 0xffff0000, 0xff000000,
680 0x929c, 0xffffffff, 0x00000001,
681 0x8a18, 0xffffffff, 0x00000100,
682 0x8b28, 0xffffffff, 0x00000100,
683 0x9144, 0xffffffff, 0x00000100,
684 0x5644, 0xffffffff, 0x00000100,
685 0x9b7c, 0xffffffff, 0x00000000,
686 0x8030, 0xffffffff, 0x0000100a,
687 0x8a14, 0xffffffff, 0x00000007,
688 0x8b24, 0xffffffff, 0x00ff0fff,
689 0x8b10, 0xffffffff, 0x00000000,
690 0x28a4c, 0x06000000, 0x06000000,
691 0x4d8, 0xffffffff, 0x00000100,
692 0x913c, 0xffff000f, 0x0100000a,
693 0x960c, 0xffffffff, 0x54763210,
694 0x88c4, 0xffffffff, 0x000000c2,
695 0x88d4, 0xffffffff, 0x00000010,
696 0x8974, 0xffffffff, 0x00000000,
697 0xc78, 0x00000080, 0x00000080,
698 0x5e78, 0xffffffff, 0x001000f0,
699 0xd02c, 0xffffffff, 0x08421000,
700 0xa008, 0xffffffff, 0x00010000,
701 0x8d00, 0xffffffff, 0x100e4848,
702 0x8d04, 0xffffffff, 0x00164745,
703 0x8c00, 0xffffffff, 0xe4000003,
704 0x8cf0, 0x1fffffff, 0x08e00620,
705 0x28350, 0xffffffff, 0x00000000,
706 0x9508, 0xffffffff, 0x00000002
707};
708
709static const u32 sumo_golden_registers[] =
710{
711 0x900c, 0x00ffffff, 0x0017071f,
712 0x8c18, 0xffffffff, 0x10101060,
713 0x8c1c, 0xffffffff, 0x00001010,
714 0x8c30, 0x0000000f, 0x00000005,
715 0x9688, 0x0000000f, 0x00000007
716};
717
718static const u32 wrestler_golden_registers[] =
719{
720 0x5eb4, 0xffffffff, 0x00000002,
721 0x5cc, 0xffffffff, 0x00000001,
722 0x7030, 0xffffffff, 0x00000011,
723 0x7c30, 0xffffffff, 0x00000011,
724 0x6104, 0x01000300, 0x00000000,
725 0x5bc0, 0x00300000, 0x00000000,
726 0x918c, 0xffffffff, 0x00010006,
727 0x91a8, 0xffffffff, 0x00010006,
728 0x9150, 0xffffffff, 0x6e944040,
729 0x917c, 0xffffffff, 0x00030002,
730 0x9198, 0xffffffff, 0x00030002,
731 0x915c, 0xffffffff, 0x00010000,
732 0x3f90, 0xffff0000, 0xff000000,
733 0x9178, 0xffffffff, 0x00070000,
734 0x9194, 0xffffffff, 0x00070000,
735 0x9148, 0xffff0000, 0xff000000,
736 0x9190, 0xffffffff, 0x00090008,
737 0x91ac, 0xffffffff, 0x00090008,
738 0x3f94, 0xffff0000, 0xff000000,
739 0x914c, 0xffff0000, 0xff000000,
740 0x929c, 0xffffffff, 0x00000001,
741 0x8a18, 0xffffffff, 0x00000100,
742 0x8b28, 0xffffffff, 0x00000100,
743 0x9144, 0xffffffff, 0x00000100,
744 0x9b7c, 0xffffffff, 0x00000000,
745 0x8030, 0xffffffff, 0x0000100a,
746 0x8a14, 0xffffffff, 0x00000001,
747 0x8b24, 0xffffffff, 0x00ff0fff,
748 0x8b10, 0xffffffff, 0x00000000,
749 0x28a4c, 0x06000000, 0x06000000,
750 0x4d8, 0xffffffff, 0x00000100,
751 0x913c, 0xffff000f, 0x0100000a,
752 0x960c, 0xffffffff, 0x54763210,
753 0x88c4, 0xffffffff, 0x000000c2,
754 0x88d4, 0xffffffff, 0x00000010,
755 0x8974, 0xffffffff, 0x00000000,
756 0xc78, 0x00000080, 0x00000080,
757 0x5e78, 0xffffffff, 0x001000f0,
758 0xd02c, 0xffffffff, 0x08421000,
759 0xa008, 0xffffffff, 0x00010000,
760 0x8d00, 0xffffffff, 0x100e4848,
761 0x8d04, 0xffffffff, 0x00164745,
762 0x8c00, 0xffffffff, 0xe4000003,
763 0x8cf0, 0x1fffffff, 0x08e00410,
764 0x28350, 0xffffffff, 0x00000000,
765 0x9508, 0xffffffff, 0x00000002,
766 0x900c, 0xffffffff, 0x0017071f,
767 0x8c18, 0xffffffff, 0x10101060,
768 0x8c1c, 0xffffffff, 0x00001010
769};
770
771static const u32 barts_golden_registers[] =
772{
773 0x5eb4, 0xffffffff, 0x00000002,
774 0x5e78, 0x8f311ff1, 0x001000f0,
775 0x3f90, 0xffff0000, 0xff000000,
776 0x9148, 0xffff0000, 0xff000000,
777 0x3f94, 0xffff0000, 0xff000000,
778 0x914c, 0xffff0000, 0xff000000,
779 0xc78, 0x00000080, 0x00000080,
780 0xbd4, 0x70073777, 0x00010001,
781 0xd02c, 0xbfffff1f, 0x08421000,
782 0xd0b8, 0x03773777, 0x02011003,
783 0x5bc0, 0x00200000, 0x50100000,
784 0x98f8, 0x33773777, 0x02011003,
785 0x98fc, 0xffffffff, 0x76543210,
786 0x7030, 0x31000311, 0x00000011,
787 0x2f48, 0x00000007, 0x02011003,
788 0x6b28, 0x00000010, 0x00000012,
789 0x7728, 0x00000010, 0x00000012,
790 0x10328, 0x00000010, 0x00000012,
791 0x10f28, 0x00000010, 0x00000012,
792 0x11b28, 0x00000010, 0x00000012,
793 0x12728, 0x00000010, 0x00000012,
794 0x240c, 0x000007ff, 0x00000380,
795 0x8a14, 0xf000001f, 0x00000007,
796 0x8b24, 0x3fff3fff, 0x00ff0fff,
797 0x8b10, 0x0000ff0f, 0x00000000,
798 0x28a4c, 0x07ffffff, 0x06000000,
799 0x10c, 0x00000001, 0x00010003,
800 0xa02c, 0xffffffff, 0x0000009b,
801 0x913c, 0x0000000f, 0x0100000a,
802 0x8d00, 0xffff7f7f, 0x100e4848,
803 0x8d04, 0x00ffffff, 0x00164745,
804 0x8c00, 0xfffc0003, 0xe4000003,
805 0x8c04, 0xf8ff00ff, 0x40600060,
806 0x8c08, 0x00ff00ff, 0x001c001c,
807 0x8cf0, 0x1fff1fff, 0x08e00620,
808 0x8c20, 0x0fff0fff, 0x00800080,
809 0x8c24, 0x0fff0fff, 0x00800080,
810 0x8c18, 0xffffffff, 0x20202078,
811 0x8c1c, 0x0000ffff, 0x00001010,
812 0x28350, 0x00000f01, 0x00000000,
813 0x9508, 0x3700001f, 0x00000002,
814 0x960c, 0xffffffff, 0x54763210,
815 0x88c4, 0x001f3ae3, 0x000000c2,
816 0x88d4, 0x0000001f, 0x00000010,
817 0x8974, 0xffffffff, 0x00000000
818};
819
820static const u32 turks_golden_registers[] =
821{
822 0x5eb4, 0xffffffff, 0x00000002,
823 0x5e78, 0x8f311ff1, 0x001000f0,
824 0x8c8, 0x00003000, 0x00001070,
825 0x8cc, 0x000fffff, 0x00040035,
826 0x3f90, 0xffff0000, 0xfff00000,
827 0x9148, 0xffff0000, 0xfff00000,
828 0x3f94, 0xffff0000, 0xfff00000,
829 0x914c, 0xffff0000, 0xfff00000,
830 0xc78, 0x00000080, 0x00000080,
831 0xbd4, 0x00073007, 0x00010002,
832 0xd02c, 0xbfffff1f, 0x08421000,
833 0xd0b8, 0x03773777, 0x02010002,
834 0x5bc0, 0x00200000, 0x50100000,
835 0x98f8, 0x33773777, 0x00010002,
836 0x98fc, 0xffffffff, 0x33221100,
837 0x7030, 0x31000311, 0x00000011,
838 0x2f48, 0x33773777, 0x00010002,
839 0x6b28, 0x00000010, 0x00000012,
840 0x7728, 0x00000010, 0x00000012,
841 0x10328, 0x00000010, 0x00000012,
842 0x10f28, 0x00000010, 0x00000012,
843 0x11b28, 0x00000010, 0x00000012,
844 0x12728, 0x00000010, 0x00000012,
845 0x240c, 0x000007ff, 0x00000380,
846 0x8a14, 0xf000001f, 0x00000007,
847 0x8b24, 0x3fff3fff, 0x00ff0fff,
848 0x8b10, 0x0000ff0f, 0x00000000,
849 0x28a4c, 0x07ffffff, 0x06000000,
850 0x10c, 0x00000001, 0x00010003,
851 0xa02c, 0xffffffff, 0x0000009b,
852 0x913c, 0x0000000f, 0x0100000a,
853 0x8d00, 0xffff7f7f, 0x100e4848,
854 0x8d04, 0x00ffffff, 0x00164745,
855 0x8c00, 0xfffc0003, 0xe4000003,
856 0x8c04, 0xf8ff00ff, 0x40600060,
857 0x8c08, 0x00ff00ff, 0x001c001c,
858 0x8cf0, 0x1fff1fff, 0x08e00410,
859 0x8c20, 0x0fff0fff, 0x00800080,
860 0x8c24, 0x0fff0fff, 0x00800080,
861 0x8c18, 0xffffffff, 0x20202078,
862 0x8c1c, 0x0000ffff, 0x00001010,
863 0x28350, 0x00000f01, 0x00000000,
864 0x9508, 0x3700001f, 0x00000002,
865 0x960c, 0xffffffff, 0x54763210,
866 0x88c4, 0x001f3ae3, 0x000000c2,
867 0x88d4, 0x0000001f, 0x00000010,
868 0x8974, 0xffffffff, 0x00000000
869};
870
871static const u32 caicos_golden_registers[] =
872{
873 0x5eb4, 0xffffffff, 0x00000002,
874 0x5e78, 0x8f311ff1, 0x001000f0,
875 0x8c8, 0x00003420, 0x00001450,
876 0x8cc, 0x000fffff, 0x00040035,
877 0x3f90, 0xffff0000, 0xfffc0000,
878 0x9148, 0xffff0000, 0xfffc0000,
879 0x3f94, 0xffff0000, 0xfffc0000,
880 0x914c, 0xffff0000, 0xfffc0000,
881 0xc78, 0x00000080, 0x00000080,
882 0xbd4, 0x00073007, 0x00010001,
883 0xd02c, 0xbfffff1f, 0x08421000,
884 0xd0b8, 0x03773777, 0x02010001,
885 0x5bc0, 0x00200000, 0x50100000,
886 0x98f8, 0x33773777, 0x02010001,
887 0x98fc, 0xffffffff, 0x33221100,
888 0x7030, 0x31000311, 0x00000011,
889 0x2f48, 0x33773777, 0x02010001,
890 0x6b28, 0x00000010, 0x00000012,
891 0x7728, 0x00000010, 0x00000012,
892 0x10328, 0x00000010, 0x00000012,
893 0x10f28, 0x00000010, 0x00000012,
894 0x11b28, 0x00000010, 0x00000012,
895 0x12728, 0x00000010, 0x00000012,
896 0x240c, 0x000007ff, 0x00000380,
897 0x8a14, 0xf000001f, 0x00000001,
898 0x8b24, 0x3fff3fff, 0x00ff0fff,
899 0x8b10, 0x0000ff0f, 0x00000000,
900 0x28a4c, 0x07ffffff, 0x06000000,
901 0x10c, 0x00000001, 0x00010003,
902 0xa02c, 0xffffffff, 0x0000009b,
903 0x913c, 0x0000000f, 0x0100000a,
904 0x8d00, 0xffff7f7f, 0x100e4848,
905 0x8d04, 0x00ffffff, 0x00164745,
906 0x8c00, 0xfffc0003, 0xe4000003,
907 0x8c04, 0xf8ff00ff, 0x40600060,
908 0x8c08, 0x00ff00ff, 0x001c001c,
909 0x8cf0, 0x1fff1fff, 0x08e00410,
910 0x8c20, 0x0fff0fff, 0x00800080,
911 0x8c24, 0x0fff0fff, 0x00800080,
912 0x8c18, 0xffffffff, 0x20202078,
913 0x8c1c, 0x0000ffff, 0x00001010,
914 0x28350, 0x00000f01, 0x00000000,
915 0x9508, 0x3700001f, 0x00000002,
916 0x960c, 0xffffffff, 0x54763210,
917 0x88c4, 0x001f3ae3, 0x000000c2,
918 0x88d4, 0x0000001f, 0x00000010,
919 0x8974, 0xffffffff, 0x00000000
920};
921
922static void evergreen_init_golden_registers(struct radeon_device *rdev)
923{
924 switch (rdev->family) {
925 case CHIP_CYPRESS:
926 case CHIP_HEMLOCK:
927 radeon_program_register_sequence(rdev,
928 evergreen_golden_registers,
929 (const u32)ARRAY_SIZE(evergreen_golden_registers));
930 radeon_program_register_sequence(rdev,
931 evergreen_golden_registers2,
932 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
933 radeon_program_register_sequence(rdev,
934 cypress_mgcg_init,
935 (const u32)ARRAY_SIZE(cypress_mgcg_init));
936 break;
937 case CHIP_JUNIPER:
938 radeon_program_register_sequence(rdev,
939 evergreen_golden_registers,
940 (const u32)ARRAY_SIZE(evergreen_golden_registers));
941 radeon_program_register_sequence(rdev,
942 evergreen_golden_registers2,
943 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
944 radeon_program_register_sequence(rdev,
945 juniper_mgcg_init,
946 (const u32)ARRAY_SIZE(juniper_mgcg_init));
947 break;
948 case CHIP_REDWOOD:
949 radeon_program_register_sequence(rdev,
950 evergreen_golden_registers,
951 (const u32)ARRAY_SIZE(evergreen_golden_registers));
952 radeon_program_register_sequence(rdev,
953 evergreen_golden_registers2,
954 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
955 radeon_program_register_sequence(rdev,
956 redwood_mgcg_init,
957 (const u32)ARRAY_SIZE(redwood_mgcg_init));
958 break;
959 case CHIP_CEDAR:
960 radeon_program_register_sequence(rdev,
961 cedar_golden_registers,
962 (const u32)ARRAY_SIZE(cedar_golden_registers));
963 radeon_program_register_sequence(rdev,
964 evergreen_golden_registers2,
965 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
966 radeon_program_register_sequence(rdev,
967 cedar_mgcg_init,
968 (const u32)ARRAY_SIZE(cedar_mgcg_init));
969 break;
970 case CHIP_PALM:
971 radeon_program_register_sequence(rdev,
972 wrestler_golden_registers,
973 (const u32)ARRAY_SIZE(wrestler_golden_registers));
974 break;
975 case CHIP_SUMO:
976 radeon_program_register_sequence(rdev,
977 supersumo_golden_registers,
978 (const u32)ARRAY_SIZE(supersumo_golden_registers));
979 break;
980 case CHIP_SUMO2:
981 radeon_program_register_sequence(rdev,
982 supersumo_golden_registers,
983 (const u32)ARRAY_SIZE(supersumo_golden_registers));
984 radeon_program_register_sequence(rdev,
985 sumo_golden_registers,
986 (const u32)ARRAY_SIZE(sumo_golden_registers));
987 break;
988 case CHIP_BARTS:
989 radeon_program_register_sequence(rdev,
990 barts_golden_registers,
991 (const u32)ARRAY_SIZE(barts_golden_registers));
992 break;
993 case CHIP_TURKS:
994 radeon_program_register_sequence(rdev,
995 turks_golden_registers,
996 (const u32)ARRAY_SIZE(turks_golden_registers));
997 break;
998 case CHIP_CAICOS:
999 radeon_program_register_sequence(rdev,
1000 caicos_golden_registers,
1001 (const u32)ARRAY_SIZE(caicos_golden_registers));
1002 break;
1003 default:
1004 break;
1005 }
1006}
1007
285484e2
JG
1008void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
1009 unsigned *bankh, unsigned *mtaspect,
1010 unsigned *tile_split)
1011{
1012 *bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
1013 *bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
1014 *mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
1015 *tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
1016 switch (*bankw) {
1017 default:
1018 case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
1019 case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
1020 case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
1021 case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
1022 }
1023 switch (*bankh) {
1024 default:
1025 case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
1026 case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
1027 case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
1028 case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
1029 }
1030 switch (*mtaspect) {
1031 default:
1032 case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
1033 case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
1034 case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
1035 case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
1036 }
1037}
1038
23d33ba3
AD
1039static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
1040 u32 cntl_reg, u32 status_reg)
1041{
1042 int r, i;
1043 struct atom_clock_dividers dividers;
1044
1045 r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
1046 clock, false, &dividers);
1047 if (r)
1048 return r;
1049
1050 WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
1051
1052 for (i = 0; i < 100; i++) {
1053 if (RREG32(status_reg) & DCLK_STATUS)
1054 break;
1055 mdelay(10);
1056 }
1057 if (i == 100)
1058 return -ETIMEDOUT;
1059
1060 return 0;
1061}
1062
1063int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1064{
1065 int r = 0;
1066 u32 cg_scratch = RREG32(CG_SCRATCH1);
1067
1068 r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
1069 if (r)
1070 goto done;
1071 cg_scratch &= 0xffff0000;
1072 cg_scratch |= vclk / 100; /* Mhz */
1073
1074 r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
1075 if (r)
1076 goto done;
1077 cg_scratch &= 0x0000ffff;
1078 cg_scratch |= (dclk / 100) << 16; /* Mhz */
1079
1080done:
1081 WREG32(CG_SCRATCH1, cg_scratch);
1082
1083 return r;
1084}
1085
a8b4925c
AD
1086int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1087{
1088 /* start off with something large */
facd112d 1089 unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
a8b4925c
AD
1090 int r;
1091
4ed10835
CK
1092 /* bypass vclk and dclk with bclk */
1093 WREG32_P(CG_UPLL_FUNC_CNTL_2,
1094 VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1095 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1096
1097 /* put PLL in bypass mode */
1098 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1099
1100 if (!vclk || !dclk) {
1101 /* keep the Bypass mode, put PLL to sleep */
1102 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1103 return 0;
1104 }
1105
facd112d
CK
1106 r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1107 16384, 0x03FFFFFF, 0, 128, 5,
1108 &fb_div, &vclk_div, &dclk_div);
1109 if (r)
1110 return r;
a8b4925c
AD
1111
1112 /* set VCO_MODE to 1 */
1113 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1114
1115 /* toggle UPLL_SLEEP to 1 then back to 0 */
1116 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1117 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1118
1119 /* deassert UPLL_RESET */
1120 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1121
1122 mdelay(1);
1123
facd112d 1124 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
a8b4925c
AD
1125 if (r)
1126 return r;
1127
1128 /* assert UPLL_RESET again */
1129 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1130
1131 /* disable spread spectrum. */
1132 WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1133
1134 /* set feedback divider */
facd112d 1135 WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
a8b4925c
AD
1136
1137 /* set ref divider to 0 */
1138 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1139
facd112d 1140 if (fb_div < 307200)
a8b4925c
AD
1141 WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1142 else
1143 WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1144
1145 /* set PDIV_A and PDIV_B */
1146 WREG32_P(CG_UPLL_FUNC_CNTL_2,
facd112d 1147 UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
a8b4925c
AD
1148 ~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1149
1150 /* give the PLL some time to settle */
1151 mdelay(15);
1152
1153 /* deassert PLL_RESET */
1154 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1155
1156 mdelay(15);
1157
1158 /* switch from bypass mode to normal mode */
1159 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1160
facd112d 1161 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
a8b4925c
AD
1162 if (r)
1163 return r;
1164
1165 /* switch VCLK and DCLK selection */
1166 WREG32_P(CG_UPLL_FUNC_CNTL_2,
1167 VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1168 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1169
1170 mdelay(100);
1171
1172 return 0;
1173}
1174
d054ac16
AD
1175void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1176{
1177 u16 ctl, v;
32195aec 1178 int err;
d054ac16 1179
32195aec 1180 err = pcie_capability_read_word(rdev->pdev, PCI_EXP_DEVCTL, &ctl);
d054ac16
AD
1181 if (err)
1182 return;
1183
1184 v = (ctl & PCI_EXP_DEVCTL_READRQ) >> 12;
1185
1186 /* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1187 * to avoid hangs or perfomance issues
1188 */
1189 if ((v == 0) || (v == 6) || (v == 7)) {
1190 ctl &= ~PCI_EXP_DEVCTL_READRQ;
1191 ctl |= (2 << 12);
32195aec 1192 pcie_capability_write_word(rdev->pdev, PCI_EXP_DEVCTL, ctl);
d054ac16
AD
1193 }
1194}
1195
10257a6d
AD
1196static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1197{
1198 if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1199 return true;
1200 else
1201 return false;
1202}
1203
1204static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1205{
1206 u32 pos1, pos2;
1207
1208 pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1209 pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1210
1211 if (pos1 != pos2)
1212 return true;
1213 else
1214 return false;
1215}
1216
377edc8b
AD
1217/**
1218 * dce4_wait_for_vblank - vblank wait asic callback.
1219 *
1220 * @rdev: radeon_device pointer
1221 * @crtc: crtc to wait for vblank on
1222 *
1223 * Wait for vblank on the requested crtc (evergreen+).
1224 */
3ae19b75
AD
1225void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1226{
10257a6d 1227 unsigned i = 0;
3ae19b75 1228
4a15903d
AD
1229 if (crtc >= rdev->num_crtc)
1230 return;
1231
10257a6d
AD
1232 if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1233 return;
1234
1235 /* depending on when we hit vblank, we may be close to active; if so,
1236 * wait for another frame.
1237 */
1238 while (dce4_is_in_vblank(rdev, crtc)) {
1239 if (i++ % 100 == 0) {
1240 if (!dce4_is_counter_moving(rdev, crtc))
3ae19b75 1241 break;
3ae19b75 1242 }
10257a6d
AD
1243 }
1244
1245 while (!dce4_is_in_vblank(rdev, crtc)) {
1246 if (i++ % 100 == 0) {
1247 if (!dce4_is_counter_moving(rdev, crtc))
3ae19b75 1248 break;
3ae19b75
AD
1249 }
1250 }
1251}
1252
377edc8b
AD
1253/**
1254 * radeon_irq_kms_pflip_irq_get - pre-pageflip callback.
1255 *
1256 * @rdev: radeon_device pointer
1257 * @crtc: crtc to prepare for pageflip on
1258 *
1259 * Pre-pageflip callback (evergreen+).
1260 * Enables the pageflip irq (vblank irq).
1261 */
6f34be50
AD
1262void evergreen_pre_page_flip(struct radeon_device *rdev, int crtc)
1263{
6f34be50
AD
1264 /* enable the pflip int */
1265 radeon_irq_kms_pflip_irq_get(rdev, crtc);
1266}
1267
377edc8b
AD
1268/**
1269 * evergreen_post_page_flip - pos-pageflip callback.
1270 *
1271 * @rdev: radeon_device pointer
1272 * @crtc: crtc to cleanup pageflip on
1273 *
1274 * Post-pageflip callback (evergreen+).
1275 * Disables the pageflip irq (vblank irq).
1276 */
6f34be50
AD
1277void evergreen_post_page_flip(struct radeon_device *rdev, int crtc)
1278{
1279 /* disable the pflip int */
1280 radeon_irq_kms_pflip_irq_put(rdev, crtc);
1281}
1282
377edc8b
AD
1283/**
1284 * evergreen_page_flip - pageflip callback.
1285 *
1286 * @rdev: radeon_device pointer
1287 * @crtc_id: crtc to cleanup pageflip on
1288 * @crtc_base: new address of the crtc (GPU MC address)
1289 *
1290 * Does the actual pageflip (evergreen+).
1291 * During vblank we take the crtc lock and wait for the update_pending
1292 * bit to go high, when it does, we release the lock, and allow the
1293 * double buffered update to take place.
1294 * Returns the current update pending status.
1295 */
6f34be50
AD
1296u32 evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
1297{
1298 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1299 u32 tmp = RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset);
f6496479 1300 int i;
6f34be50
AD
1301
1302 /* Lock the graphics update lock */
1303 tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
1304 WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1305
1306 /* update the scanout addresses */
1307 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1308 upper_32_bits(crtc_base));
1309 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1310 (u32)crtc_base);
1311
1312 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1313 upper_32_bits(crtc_base));
1314 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1315 (u32)crtc_base);
1316
1317 /* Wait for update_pending to go high. */
f6496479
AD
1318 for (i = 0; i < rdev->usec_timeout; i++) {
1319 if (RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING)
1320 break;
1321 udelay(1);
1322 }
6f34be50
AD
1323 DRM_DEBUG("Update pending now high. Unlocking vupdate_lock.\n");
1324
1325 /* Unlock the lock, so double-buffering can take place inside vblank */
1326 tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
1327 WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1328
1329 /* Return current update_pending status: */
1330 return RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING;
1331}
1332
21a8122a 1333/* get temperature in millidegrees */
20d391d7 1334int evergreen_get_temp(struct radeon_device *rdev)
21a8122a 1335{
1c88d74f
AD
1336 u32 temp, toffset;
1337 int actual_temp = 0;
67b3f823
AD
1338
1339 if (rdev->family == CHIP_JUNIPER) {
1340 toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1341 TOFFSET_SHIFT;
1342 temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1343 TS0_ADC_DOUT_SHIFT;
1344
1345 if (toffset & 0x100)
1346 actual_temp = temp / 2 - (0x200 - toffset);
1347 else
1348 actual_temp = temp / 2 + toffset;
1349
1350 actual_temp = actual_temp * 1000;
1351
1352 } else {
1353 temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1354 ASIC_T_SHIFT;
1355
1356 if (temp & 0x400)
1357 actual_temp = -256;
1358 else if (temp & 0x200)
1359 actual_temp = 255;
1360 else if (temp & 0x100) {
1361 actual_temp = temp & 0x1ff;
1362 actual_temp |= ~0x1ff;
1363 } else
1364 actual_temp = temp & 0xff;
1365
1366 actual_temp = (actual_temp * 1000) / 2;
1367 }
21a8122a 1368
67b3f823 1369 return actual_temp;
21a8122a
AD
1370}
1371
20d391d7 1372int sumo_get_temp(struct radeon_device *rdev)
e33df25f
AD
1373{
1374 u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
20d391d7 1375 int actual_temp = temp - 49;
e33df25f
AD
1376
1377 return actual_temp * 1000;
1378}
1379
377edc8b
AD
1380/**
1381 * sumo_pm_init_profile - Initialize power profiles callback.
1382 *
1383 * @rdev: radeon_device pointer
1384 *
1385 * Initialize the power states used in profile mode
1386 * (sumo, trinity, SI).
1387 * Used for profile mode only.
1388 */
a4c9e2ee
AD
1389void sumo_pm_init_profile(struct radeon_device *rdev)
1390{
1391 int idx;
1392
1393 /* default */
1394 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1395 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1396 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1397 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1398
1399 /* low,mid sh/mh */
1400 if (rdev->flags & RADEON_IS_MOBILITY)
1401 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1402 else
1403 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1404
1405 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1406 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1407 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1408 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1409
1410 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1411 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1412 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1413 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1414
1415 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1416 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1417 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1418 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1419
1420 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1421 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1422 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1423 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1424
1425 /* high sh/mh */
1426 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1427 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1428 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1429 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1430 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1431 rdev->pm.power_state[idx].num_clock_modes - 1;
1432
1433 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1434 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1435 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1436 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1437 rdev->pm.power_state[idx].num_clock_modes - 1;
1438}
1439
27810fb2
AD
1440/**
1441 * btc_pm_init_profile - Initialize power profiles callback.
1442 *
1443 * @rdev: radeon_device pointer
1444 *
1445 * Initialize the power states used in profile mode
1446 * (BTC, cayman).
1447 * Used for profile mode only.
1448 */
1449void btc_pm_init_profile(struct radeon_device *rdev)
1450{
1451 int idx;
1452
1453 /* default */
1454 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1455 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1456 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1457 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1458 /* starting with BTC, there is one state that is used for both
1459 * MH and SH. Difference is that we always use the high clock index for
1460 * mclk.
1461 */
1462 if (rdev->flags & RADEON_IS_MOBILITY)
1463 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1464 else
1465 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1466 /* low sh */
1467 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1468 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1469 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1470 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1471 /* mid sh */
1472 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1473 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1474 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1475 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1476 /* high sh */
1477 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1478 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1479 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1480 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1481 /* low mh */
1482 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1483 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1484 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1485 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1486 /* mid mh */
1487 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1488 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1489 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1490 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1491 /* high mh */
1492 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1493 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1494 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1495 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1496}
1497
377edc8b
AD
1498/**
1499 * evergreen_pm_misc - set additional pm hw parameters callback.
1500 *
1501 * @rdev: radeon_device pointer
1502 *
1503 * Set non-clock parameters associated with a power state
1504 * (voltage, etc.) (evergreen+).
1505 */
49e02b73
AD
1506void evergreen_pm_misc(struct radeon_device *rdev)
1507{
a081a9d6
RM
1508 int req_ps_idx = rdev->pm.requested_power_state_index;
1509 int req_cm_idx = rdev->pm.requested_clock_mode_index;
1510 struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1511 struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
49e02b73 1512
2feea49a 1513 if (voltage->type == VOLTAGE_SW) {
c6cf7777
AD
1514 /* 0xff0x are flags rather then an actual voltage */
1515 if ((voltage->voltage & 0xff00) == 0xff00)
a377e187 1516 return;
2feea49a 1517 if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
8a83ec5e 1518 radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
4d60173f 1519 rdev->pm.current_vddc = voltage->voltage;
2feea49a
AD
1520 DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1521 }
7ae764b1
AD
1522
1523 /* starting with BTC, there is one state that is used for both
1524 * MH and SH. Difference is that we always use the high clock index for
1525 * mclk and vddci.
1526 */
1527 if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1528 (rdev->family >= CHIP_BARTS) &&
1529 rdev->pm.active_crtc_count &&
1530 ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1531 (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1532 voltage = &rdev->pm.power_state[req_ps_idx].
1533 clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1534
c6cf7777
AD
1535 /* 0xff0x are flags rather then an actual voltage */
1536 if ((voltage->vddci & 0xff00) == 0xff00)
a377e187 1537 return;
2feea49a
AD
1538 if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1539 radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1540 rdev->pm.current_vddci = voltage->vddci;
1541 DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
4d60173f
AD
1542 }
1543 }
49e02b73
AD
1544}
1545
377edc8b
AD
1546/**
1547 * evergreen_pm_prepare - pre-power state change callback.
1548 *
1549 * @rdev: radeon_device pointer
1550 *
1551 * Prepare for a power state change (evergreen+).
1552 */
49e02b73
AD
1553void evergreen_pm_prepare(struct radeon_device *rdev)
1554{
1555 struct drm_device *ddev = rdev->ddev;
1556 struct drm_crtc *crtc;
1557 struct radeon_crtc *radeon_crtc;
1558 u32 tmp;
1559
1560 /* disable any active CRTCs */
1561 list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1562 radeon_crtc = to_radeon_crtc(crtc);
1563 if (radeon_crtc->enabled) {
1564 tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1565 tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1566 WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1567 }
1568 }
1569}
1570
377edc8b
AD
1571/**
1572 * evergreen_pm_finish - post-power state change callback.
1573 *
1574 * @rdev: radeon_device pointer
1575 *
1576 * Clean up after a power state change (evergreen+).
1577 */
49e02b73
AD
1578void evergreen_pm_finish(struct radeon_device *rdev)
1579{
1580 struct drm_device *ddev = rdev->ddev;
1581 struct drm_crtc *crtc;
1582 struct radeon_crtc *radeon_crtc;
1583 u32 tmp;
1584
1585 /* enable any active CRTCs */
1586 list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1587 radeon_crtc = to_radeon_crtc(crtc);
1588 if (radeon_crtc->enabled) {
1589 tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1590 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1591 WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1592 }
1593 }
1594}
1595
377edc8b
AD
1596/**
1597 * evergreen_hpd_sense - hpd sense callback.
1598 *
1599 * @rdev: radeon_device pointer
1600 * @hpd: hpd (hotplug detect) pin
1601 *
1602 * Checks if a digital monitor is connected (evergreen+).
1603 * Returns true if connected, false if not connected.
1604 */
bcc1c2a1
AD
1605bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1606{
1607 bool connected = false;
0ca2ab52
AD
1608
1609 switch (hpd) {
1610 case RADEON_HPD_1:
1611 if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1612 connected = true;
1613 break;
1614 case RADEON_HPD_2:
1615 if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1616 connected = true;
1617 break;
1618 case RADEON_HPD_3:
1619 if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1620 connected = true;
1621 break;
1622 case RADEON_HPD_4:
1623 if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1624 connected = true;
1625 break;
1626 case RADEON_HPD_5:
1627 if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1628 connected = true;
1629 break;
1630 case RADEON_HPD_6:
1631 if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1632 connected = true;
1633 break;
1634 default:
1635 break;
1636 }
1637
bcc1c2a1
AD
1638 return connected;
1639}
1640
377edc8b
AD
1641/**
1642 * evergreen_hpd_set_polarity - hpd set polarity callback.
1643 *
1644 * @rdev: radeon_device pointer
1645 * @hpd: hpd (hotplug detect) pin
1646 *
1647 * Set the polarity of the hpd pin (evergreen+).
1648 */
bcc1c2a1
AD
1649void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1650 enum radeon_hpd_id hpd)
1651{
0ca2ab52
AD
1652 u32 tmp;
1653 bool connected = evergreen_hpd_sense(rdev, hpd);
1654
1655 switch (hpd) {
1656 case RADEON_HPD_1:
1657 tmp = RREG32(DC_HPD1_INT_CONTROL);
1658 if (connected)
1659 tmp &= ~DC_HPDx_INT_POLARITY;
1660 else
1661 tmp |= DC_HPDx_INT_POLARITY;
1662 WREG32(DC_HPD1_INT_CONTROL, tmp);
1663 break;
1664 case RADEON_HPD_2:
1665 tmp = RREG32(DC_HPD2_INT_CONTROL);
1666 if (connected)
1667 tmp &= ~DC_HPDx_INT_POLARITY;
1668 else
1669 tmp |= DC_HPDx_INT_POLARITY;
1670 WREG32(DC_HPD2_INT_CONTROL, tmp);
1671 break;
1672 case RADEON_HPD_3:
1673 tmp = RREG32(DC_HPD3_INT_CONTROL);
1674 if (connected)
1675 tmp &= ~DC_HPDx_INT_POLARITY;
1676 else
1677 tmp |= DC_HPDx_INT_POLARITY;
1678 WREG32(DC_HPD3_INT_CONTROL, tmp);
1679 break;
1680 case RADEON_HPD_4:
1681 tmp = RREG32(DC_HPD4_INT_CONTROL);
1682 if (connected)
1683 tmp &= ~DC_HPDx_INT_POLARITY;
1684 else
1685 tmp |= DC_HPDx_INT_POLARITY;
1686 WREG32(DC_HPD4_INT_CONTROL, tmp);
1687 break;
1688 case RADEON_HPD_5:
1689 tmp = RREG32(DC_HPD5_INT_CONTROL);
1690 if (connected)
1691 tmp &= ~DC_HPDx_INT_POLARITY;
1692 else
1693 tmp |= DC_HPDx_INT_POLARITY;
1694 WREG32(DC_HPD5_INT_CONTROL, tmp);
1695 break;
1696 case RADEON_HPD_6:
1697 tmp = RREG32(DC_HPD6_INT_CONTROL);
1698 if (connected)
1699 tmp &= ~DC_HPDx_INT_POLARITY;
1700 else
1701 tmp |= DC_HPDx_INT_POLARITY;
1702 WREG32(DC_HPD6_INT_CONTROL, tmp);
1703 break;
1704 default:
1705 break;
1706 }
bcc1c2a1
AD
1707}
1708
377edc8b
AD
1709/**
1710 * evergreen_hpd_init - hpd setup callback.
1711 *
1712 * @rdev: radeon_device pointer
1713 *
1714 * Setup the hpd pins used by the card (evergreen+).
1715 * Enable the pin, set the polarity, and enable the hpd interrupts.
1716 */
bcc1c2a1
AD
1717void evergreen_hpd_init(struct radeon_device *rdev)
1718{
0ca2ab52
AD
1719 struct drm_device *dev = rdev->ddev;
1720 struct drm_connector *connector;
fb98257a 1721 unsigned enabled = 0;
0ca2ab52
AD
1722 u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1723 DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
bcc1c2a1 1724
0ca2ab52
AD
1725 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1726 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
2e97be73
AD
1727
1728 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1729 connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1730 /* don't try to enable hpd on eDP or LVDS avoid breaking the
1731 * aux dp channel on imac and help (but not completely fix)
1732 * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1733 * also avoid interrupt storms during dpms.
1734 */
1735 continue;
1736 }
0ca2ab52
AD
1737 switch (radeon_connector->hpd.hpd) {
1738 case RADEON_HPD_1:
1739 WREG32(DC_HPD1_CONTROL, tmp);
0ca2ab52
AD
1740 break;
1741 case RADEON_HPD_2:
1742 WREG32(DC_HPD2_CONTROL, tmp);
0ca2ab52
AD
1743 break;
1744 case RADEON_HPD_3:
1745 WREG32(DC_HPD3_CONTROL, tmp);
0ca2ab52
AD
1746 break;
1747 case RADEON_HPD_4:
1748 WREG32(DC_HPD4_CONTROL, tmp);
0ca2ab52
AD
1749 break;
1750 case RADEON_HPD_5:
1751 WREG32(DC_HPD5_CONTROL, tmp);
0ca2ab52
AD
1752 break;
1753 case RADEON_HPD_6:
1754 WREG32(DC_HPD6_CONTROL, tmp);
0ca2ab52
AD
1755 break;
1756 default:
1757 break;
1758 }
64912e99 1759 radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
fb98257a 1760 enabled |= 1 << radeon_connector->hpd.hpd;
0ca2ab52 1761 }
fb98257a 1762 radeon_irq_kms_enable_hpd(rdev, enabled);
bcc1c2a1
AD
1763}
1764
377edc8b
AD
1765/**
1766 * evergreen_hpd_fini - hpd tear down callback.
1767 *
1768 * @rdev: radeon_device pointer
1769 *
1770 * Tear down the hpd pins used by the card (evergreen+).
1771 * Disable the hpd interrupts.
1772 */
0ca2ab52 1773void evergreen_hpd_fini(struct radeon_device *rdev)
bcc1c2a1 1774{
0ca2ab52
AD
1775 struct drm_device *dev = rdev->ddev;
1776 struct drm_connector *connector;
fb98257a 1777 unsigned disabled = 0;
0ca2ab52
AD
1778
1779 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1780 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1781 switch (radeon_connector->hpd.hpd) {
1782 case RADEON_HPD_1:
1783 WREG32(DC_HPD1_CONTROL, 0);
0ca2ab52
AD
1784 break;
1785 case RADEON_HPD_2:
1786 WREG32(DC_HPD2_CONTROL, 0);
0ca2ab52
AD
1787 break;
1788 case RADEON_HPD_3:
1789 WREG32(DC_HPD3_CONTROL, 0);
0ca2ab52
AD
1790 break;
1791 case RADEON_HPD_4:
1792 WREG32(DC_HPD4_CONTROL, 0);
0ca2ab52
AD
1793 break;
1794 case RADEON_HPD_5:
1795 WREG32(DC_HPD5_CONTROL, 0);
0ca2ab52
AD
1796 break;
1797 case RADEON_HPD_6:
1798 WREG32(DC_HPD6_CONTROL, 0);
0ca2ab52
AD
1799 break;
1800 default:
1801 break;
1802 }
fb98257a 1803 disabled |= 1 << radeon_connector->hpd.hpd;
0ca2ab52 1804 }
fb98257a 1805 radeon_irq_kms_disable_hpd(rdev, disabled);
bcc1c2a1
AD
1806}
1807
f9d9c362
AD
1808/* watermark setup */
1809
1810static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1811 struct radeon_crtc *radeon_crtc,
1812 struct drm_display_mode *mode,
1813 struct drm_display_mode *other_mode)
1814{
12dfc843 1815 u32 tmp;
f9d9c362
AD
1816 /*
1817 * Line Buffer Setup
1818 * There are 3 line buffers, each one shared by 2 display controllers.
1819 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1820 * the display controllers. The paritioning is done via one of four
1821 * preset allocations specified in bits 2:0:
1822 * first display controller
1823 * 0 - first half of lb (3840 * 2)
1824 * 1 - first 3/4 of lb (5760 * 2)
12dfc843 1825 * 2 - whole lb (7680 * 2), other crtc must be disabled
f9d9c362
AD
1826 * 3 - first 1/4 of lb (1920 * 2)
1827 * second display controller
1828 * 4 - second half of lb (3840 * 2)
1829 * 5 - second 3/4 of lb (5760 * 2)
12dfc843 1830 * 6 - whole lb (7680 * 2), other crtc must be disabled
f9d9c362
AD
1831 * 7 - last 1/4 of lb (1920 * 2)
1832 */
12dfc843
AD
1833 /* this can get tricky if we have two large displays on a paired group
1834 * of crtcs. Ideally for multiple large displays we'd assign them to
1835 * non-linked crtcs for maximum line buffer allocation.
1836 */
1837 if (radeon_crtc->base.enabled && mode) {
1838 if (other_mode)
f9d9c362 1839 tmp = 0; /* 1/2 */
12dfc843
AD
1840 else
1841 tmp = 2; /* whole */
1842 } else
1843 tmp = 0;
f9d9c362
AD
1844
1845 /* second controller of the pair uses second half of the lb */
1846 if (radeon_crtc->crtc_id % 2)
1847 tmp += 4;
1848 WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1849
12dfc843
AD
1850 if (radeon_crtc->base.enabled && mode) {
1851 switch (tmp) {
1852 case 0:
1853 case 4:
1854 default:
1855 if (ASIC_IS_DCE5(rdev))
1856 return 4096 * 2;
1857 else
1858 return 3840 * 2;
1859 case 1:
1860 case 5:
1861 if (ASIC_IS_DCE5(rdev))
1862 return 6144 * 2;
1863 else
1864 return 5760 * 2;
1865 case 2:
1866 case 6:
1867 if (ASIC_IS_DCE5(rdev))
1868 return 8192 * 2;
1869 else
1870 return 7680 * 2;
1871 case 3:
1872 case 7:
1873 if (ASIC_IS_DCE5(rdev))
1874 return 2048 * 2;
1875 else
1876 return 1920 * 2;
1877 }
f9d9c362 1878 }
12dfc843
AD
1879
1880 /* controller not enabled, so no lb used */
1881 return 0;
f9d9c362
AD
1882}
1883
ca7db22b 1884u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
f9d9c362
AD
1885{
1886 u32 tmp = RREG32(MC_SHARED_CHMAP);
1887
1888 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1889 case 0:
1890 default:
1891 return 1;
1892 case 1:
1893 return 2;
1894 case 2:
1895 return 4;
1896 case 3:
1897 return 8;
1898 }
1899}
1900
1901struct evergreen_wm_params {
1902 u32 dram_channels; /* number of dram channels */
1903 u32 yclk; /* bandwidth per dram data pin in kHz */
1904 u32 sclk; /* engine clock in kHz */
1905 u32 disp_clk; /* display clock in kHz */
1906 u32 src_width; /* viewport width */
1907 u32 active_time; /* active display time in ns */
1908 u32 blank_time; /* blank time in ns */
1909 bool interlaced; /* mode is interlaced */
1910 fixed20_12 vsc; /* vertical scale ratio */
1911 u32 num_heads; /* number of active crtcs */
1912 u32 bytes_per_pixel; /* bytes per pixel display + overlay */
1913 u32 lb_size; /* line buffer allocated to pipe */
1914 u32 vtaps; /* vertical scaler taps */
1915};
1916
1917static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
1918{
1919 /* Calculate DRAM Bandwidth and the part allocated to display. */
1920 fixed20_12 dram_efficiency; /* 0.7 */
1921 fixed20_12 yclk, dram_channels, bandwidth;
1922 fixed20_12 a;
1923
1924 a.full = dfixed_const(1000);
1925 yclk.full = dfixed_const(wm->yclk);
1926 yclk.full = dfixed_div(yclk, a);
1927 dram_channels.full = dfixed_const(wm->dram_channels * 4);
1928 a.full = dfixed_const(10);
1929 dram_efficiency.full = dfixed_const(7);
1930 dram_efficiency.full = dfixed_div(dram_efficiency, a);
1931 bandwidth.full = dfixed_mul(dram_channels, yclk);
1932 bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
1933
1934 return dfixed_trunc(bandwidth);
1935}
1936
1937static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
1938{
1939 /* Calculate DRAM Bandwidth and the part allocated to display. */
1940 fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
1941 fixed20_12 yclk, dram_channels, bandwidth;
1942 fixed20_12 a;
1943
1944 a.full = dfixed_const(1000);
1945 yclk.full = dfixed_const(wm->yclk);
1946 yclk.full = dfixed_div(yclk, a);
1947 dram_channels.full = dfixed_const(wm->dram_channels * 4);
1948 a.full = dfixed_const(10);
1949 disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
1950 disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
1951 bandwidth.full = dfixed_mul(dram_channels, yclk);
1952 bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
1953
1954 return dfixed_trunc(bandwidth);
1955}
1956
1957static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
1958{
1959 /* Calculate the display Data return Bandwidth */
1960 fixed20_12 return_efficiency; /* 0.8 */
1961 fixed20_12 sclk, bandwidth;
1962 fixed20_12 a;
1963
1964 a.full = dfixed_const(1000);
1965 sclk.full = dfixed_const(wm->sclk);
1966 sclk.full = dfixed_div(sclk, a);
1967 a.full = dfixed_const(10);
1968 return_efficiency.full = dfixed_const(8);
1969 return_efficiency.full = dfixed_div(return_efficiency, a);
1970 a.full = dfixed_const(32);
1971 bandwidth.full = dfixed_mul(a, sclk);
1972 bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
1973
1974 return dfixed_trunc(bandwidth);
1975}
1976
1977static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
1978{
1979 /* Calculate the DMIF Request Bandwidth */
1980 fixed20_12 disp_clk_request_efficiency; /* 0.8 */
1981 fixed20_12 disp_clk, bandwidth;
1982 fixed20_12 a;
1983
1984 a.full = dfixed_const(1000);
1985 disp_clk.full = dfixed_const(wm->disp_clk);
1986 disp_clk.full = dfixed_div(disp_clk, a);
1987 a.full = dfixed_const(10);
1988 disp_clk_request_efficiency.full = dfixed_const(8);
1989 disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
1990 a.full = dfixed_const(32);
1991 bandwidth.full = dfixed_mul(a, disp_clk);
1992 bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
1993
1994 return dfixed_trunc(bandwidth);
1995}
1996
1997static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
1998{
1999 /* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
2000 u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
2001 u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
2002 u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
2003
2004 return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
2005}
2006
2007static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
2008{
2009 /* Calculate the display mode Average Bandwidth
2010 * DisplayMode should contain the source and destination dimensions,
2011 * timing, etc.
2012 */
2013 fixed20_12 bpp;
2014 fixed20_12 line_time;
2015 fixed20_12 src_width;
2016 fixed20_12 bandwidth;
2017 fixed20_12 a;
2018
2019 a.full = dfixed_const(1000);
2020 line_time.full = dfixed_const(wm->active_time + wm->blank_time);
2021 line_time.full = dfixed_div(line_time, a);
2022 bpp.full = dfixed_const(wm->bytes_per_pixel);
2023 src_width.full = dfixed_const(wm->src_width);
2024 bandwidth.full = dfixed_mul(src_width, bpp);
2025 bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
2026 bandwidth.full = dfixed_div(bandwidth, line_time);
2027
2028 return dfixed_trunc(bandwidth);
2029}
2030
2031static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
2032{
2033 /* First calcualte the latency in ns */
2034 u32 mc_latency = 2000; /* 2000 ns. */
2035 u32 available_bandwidth = evergreen_available_bandwidth(wm);
2036 u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
2037 u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
2038 u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
2039 u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
2040 (wm->num_heads * cursor_line_pair_return_time);
2041 u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
2042 u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
2043 fixed20_12 a, b, c;
2044
2045 if (wm->num_heads == 0)
2046 return 0;
2047
2048 a.full = dfixed_const(2);
2049 b.full = dfixed_const(1);
2050 if ((wm->vsc.full > a.full) ||
2051 ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
2052 (wm->vtaps >= 5) ||
2053 ((wm->vsc.full >= a.full) && wm->interlaced))
2054 max_src_lines_per_dst_line = 4;
2055 else
2056 max_src_lines_per_dst_line = 2;
2057
2058 a.full = dfixed_const(available_bandwidth);
2059 b.full = dfixed_const(wm->num_heads);
2060 a.full = dfixed_div(a, b);
2061
2062 b.full = dfixed_const(1000);
2063 c.full = dfixed_const(wm->disp_clk);
2064 b.full = dfixed_div(c, b);
2065 c.full = dfixed_const(wm->bytes_per_pixel);
2066 b.full = dfixed_mul(b, c);
2067
2068 lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
2069
2070 a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2071 b.full = dfixed_const(1000);
2072 c.full = dfixed_const(lb_fill_bw);
2073 b.full = dfixed_div(c, b);
2074 a.full = dfixed_div(a, b);
2075 line_fill_time = dfixed_trunc(a);
2076
2077 if (line_fill_time < wm->active_time)
2078 return latency;
2079 else
2080 return latency + (line_fill_time - wm->active_time);
2081
2082}
2083
2084static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2085{
2086 if (evergreen_average_bandwidth(wm) <=
2087 (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2088 return true;
2089 else
2090 return false;
2091};
2092
2093static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2094{
2095 if (evergreen_average_bandwidth(wm) <=
2096 (evergreen_available_bandwidth(wm) / wm->num_heads))
2097 return true;
2098 else
2099 return false;
2100};
2101
2102static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2103{
2104 u32 lb_partitions = wm->lb_size / wm->src_width;
2105 u32 line_time = wm->active_time + wm->blank_time;
2106 u32 latency_tolerant_lines;
2107 u32 latency_hiding;
2108 fixed20_12 a;
2109
2110 a.full = dfixed_const(1);
2111 if (wm->vsc.full > a.full)
2112 latency_tolerant_lines = 1;
2113 else {
2114 if (lb_partitions <= (wm->vtaps + 1))
2115 latency_tolerant_lines = 1;
2116 else
2117 latency_tolerant_lines = 2;
2118 }
2119
2120 latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2121
2122 if (evergreen_latency_watermark(wm) <= latency_hiding)
2123 return true;
2124 else
2125 return false;
2126}
2127
2128static void evergreen_program_watermarks(struct radeon_device *rdev,
2129 struct radeon_crtc *radeon_crtc,
2130 u32 lb_size, u32 num_heads)
2131{
2132 struct drm_display_mode *mode = &radeon_crtc->base.mode;
cf0cfdd7
AD
2133 struct evergreen_wm_params wm_low, wm_high;
2134 u32 dram_channels;
f9d9c362
AD
2135 u32 pixel_period;
2136 u32 line_time = 0;
2137 u32 latency_watermark_a = 0, latency_watermark_b = 0;
2138 u32 priority_a_mark = 0, priority_b_mark = 0;
2139 u32 priority_a_cnt = PRIORITY_OFF;
2140 u32 priority_b_cnt = PRIORITY_OFF;
2141 u32 pipe_offset = radeon_crtc->crtc_id * 16;
2142 u32 tmp, arb_control3;
2143 fixed20_12 a, b, c;
2144
2145 if (radeon_crtc->base.enabled && num_heads && mode) {
2146 pixel_period = 1000000 / (u32)mode->clock;
2147 line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2148 priority_a_cnt = 0;
2149 priority_b_cnt = 0;
cf0cfdd7
AD
2150 dram_channels = evergreen_get_number_of_dram_channels(rdev);
2151
2152 /* watermark for high clocks */
2153 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2154 wm_high.yclk =
2155 radeon_dpm_get_mclk(rdev, false) * 10;
2156 wm_high.sclk =
2157 radeon_dpm_get_sclk(rdev, false) * 10;
2158 } else {
2159 wm_high.yclk = rdev->pm.current_mclk * 10;
2160 wm_high.sclk = rdev->pm.current_sclk * 10;
2161 }
f9d9c362 2162
cf0cfdd7
AD
2163 wm_high.disp_clk = mode->clock;
2164 wm_high.src_width = mode->crtc_hdisplay;
2165 wm_high.active_time = mode->crtc_hdisplay * pixel_period;
2166 wm_high.blank_time = line_time - wm_high.active_time;
2167 wm_high.interlaced = false;
f9d9c362 2168 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
cf0cfdd7
AD
2169 wm_high.interlaced = true;
2170 wm_high.vsc = radeon_crtc->vsc;
2171 wm_high.vtaps = 1;
f9d9c362 2172 if (radeon_crtc->rmx_type != RMX_OFF)
cf0cfdd7
AD
2173 wm_high.vtaps = 2;
2174 wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2175 wm_high.lb_size = lb_size;
2176 wm_high.dram_channels = dram_channels;
2177 wm_high.num_heads = num_heads;
2178
2179 /* watermark for low clocks */
2180 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2181 wm_low.yclk =
2182 radeon_dpm_get_mclk(rdev, true) * 10;
2183 wm_low.sclk =
2184 radeon_dpm_get_sclk(rdev, true) * 10;
2185 } else {
2186 wm_low.yclk = rdev->pm.current_mclk * 10;
2187 wm_low.sclk = rdev->pm.current_sclk * 10;
2188 }
2189
2190 wm_low.disp_clk = mode->clock;
2191 wm_low.src_width = mode->crtc_hdisplay;
2192 wm_low.active_time = mode->crtc_hdisplay * pixel_period;
2193 wm_low.blank_time = line_time - wm_low.active_time;
2194 wm_low.interlaced = false;
2195 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2196 wm_low.interlaced = true;
2197 wm_low.vsc = radeon_crtc->vsc;
2198 wm_low.vtaps = 1;
2199 if (radeon_crtc->rmx_type != RMX_OFF)
2200 wm_low.vtaps = 2;
2201 wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2202 wm_low.lb_size = lb_size;
2203 wm_low.dram_channels = dram_channels;
2204 wm_low.num_heads = num_heads;
f9d9c362
AD
2205
2206 /* set for high clocks */
cf0cfdd7 2207 latency_watermark_a = min(evergreen_latency_watermark(&wm_high), (u32)65535);
f9d9c362 2208 /* set for low clocks */
cf0cfdd7 2209 latency_watermark_b = min(evergreen_latency_watermark(&wm_low), (u32)65535);
f9d9c362
AD
2210
2211 /* possibly force display priority to high */
2212 /* should really do this at mode validation time... */
cf0cfdd7
AD
2213 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2214 !evergreen_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2215 !evergreen_check_latency_hiding(&wm_high) ||
f9d9c362 2216 (rdev->disp_priority == 2)) {
cf0cfdd7 2217 DRM_DEBUG_KMS("force priority a to high\n");
f9d9c362 2218 priority_a_cnt |= PRIORITY_ALWAYS_ON;
cf0cfdd7
AD
2219 }
2220 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2221 !evergreen_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2222 !evergreen_check_latency_hiding(&wm_low) ||
2223 (rdev->disp_priority == 2)) {
2224 DRM_DEBUG_KMS("force priority b to high\n");
f9d9c362
AD
2225 priority_b_cnt |= PRIORITY_ALWAYS_ON;
2226 }
2227
2228 a.full = dfixed_const(1000);
2229 b.full = dfixed_const(mode->clock);
2230 b.full = dfixed_div(b, a);
2231 c.full = dfixed_const(latency_watermark_a);
2232 c.full = dfixed_mul(c, b);
2233 c.full = dfixed_mul(c, radeon_crtc->hsc);
2234 c.full = dfixed_div(c, a);
2235 a.full = dfixed_const(16);
2236 c.full = dfixed_div(c, a);
2237 priority_a_mark = dfixed_trunc(c);
2238 priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2239
2240 a.full = dfixed_const(1000);
2241 b.full = dfixed_const(mode->clock);
2242 b.full = dfixed_div(b, a);
2243 c.full = dfixed_const(latency_watermark_b);
2244 c.full = dfixed_mul(c, b);
2245 c.full = dfixed_mul(c, radeon_crtc->hsc);
2246 c.full = dfixed_div(c, a);
2247 a.full = dfixed_const(16);
2248 c.full = dfixed_div(c, a);
2249 priority_b_mark = dfixed_trunc(c);
2250 priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2251 }
2252
2253 /* select wm A */
2254 arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2255 tmp = arb_control3;
2256 tmp &= ~LATENCY_WATERMARK_MASK(3);
2257 tmp |= LATENCY_WATERMARK_MASK(1);
2258 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2259 WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2260 (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2261 LATENCY_HIGH_WATERMARK(line_time)));
2262 /* select wm B */
2263 tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2264 tmp &= ~LATENCY_WATERMARK_MASK(3);
2265 tmp |= LATENCY_WATERMARK_MASK(2);
2266 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2267 WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2268 (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2269 LATENCY_HIGH_WATERMARK(line_time)));
2270 /* restore original selection */
2271 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2272
2273 /* write the priority marks */
2274 WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2275 WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2276
7178d2a6
AD
2277 /* save values for DPM */
2278 radeon_crtc->line_time = line_time;
2279 radeon_crtc->wm_high = latency_watermark_a;
2280 radeon_crtc->wm_low = latency_watermark_b;
f9d9c362
AD
2281}
2282
377edc8b
AD
2283/**
2284 * evergreen_bandwidth_update - update display watermarks callback.
2285 *
2286 * @rdev: radeon_device pointer
2287 *
2288 * Update the display watermarks based on the requested mode(s)
2289 * (evergreen+).
2290 */
0ca2ab52 2291void evergreen_bandwidth_update(struct radeon_device *rdev)
bcc1c2a1 2292{
f9d9c362
AD
2293 struct drm_display_mode *mode0 = NULL;
2294 struct drm_display_mode *mode1 = NULL;
2295 u32 num_heads = 0, lb_size;
2296 int i;
2297
2298 radeon_update_display_priority(rdev);
2299
2300 for (i = 0; i < rdev->num_crtc; i++) {
2301 if (rdev->mode_info.crtcs[i]->base.enabled)
2302 num_heads++;
2303 }
2304 for (i = 0; i < rdev->num_crtc; i += 2) {
2305 mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2306 mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2307 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2308 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2309 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2310 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2311 }
bcc1c2a1
AD
2312}
2313
377edc8b
AD
2314/**
2315 * evergreen_mc_wait_for_idle - wait for MC idle callback.
2316 *
2317 * @rdev: radeon_device pointer
2318 *
2319 * Wait for the MC (memory controller) to be idle.
2320 * (evergreen+).
2321 * Returns 0 if the MC is idle, -1 if not.
2322 */
b9952a8a 2323int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
bcc1c2a1
AD
2324{
2325 unsigned i;
2326 u32 tmp;
2327
2328 for (i = 0; i < rdev->usec_timeout; i++) {
2329 /* read MC_STATUS */
2330 tmp = RREG32(SRBM_STATUS) & 0x1F00;
2331 if (!tmp)
2332 return 0;
2333 udelay(1);
2334 }
2335 return -1;
2336}
2337
2338/*
2339 * GART
2340 */
0fcdb61e
AD
2341void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2342{
2343 unsigned i;
2344 u32 tmp;
2345
6f2f48a9
AD
2346 WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2347
0fcdb61e
AD
2348 WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2349 for (i = 0; i < rdev->usec_timeout; i++) {
2350 /* read MC_STATUS */
2351 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2352 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2353 if (tmp == 2) {
2354 printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
2355 return;
2356 }
2357 if (tmp) {
2358 return;
2359 }
2360 udelay(1);
2361 }
2362}
2363
1109ca09 2364static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
bcc1c2a1
AD
2365{
2366 u32 tmp;
0fcdb61e 2367 int r;
bcc1c2a1 2368
c9a1be96 2369 if (rdev->gart.robj == NULL) {
bcc1c2a1
AD
2370 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2371 return -EINVAL;
2372 }
2373 r = radeon_gart_table_vram_pin(rdev);
2374 if (r)
2375 return r;
82568565 2376 radeon_gart_restore(rdev);
bcc1c2a1
AD
2377 /* Setup L2 cache */
2378 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2379 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2380 EFFECTIVE_L2_QUEUE_SIZE(7));
2381 WREG32(VM_L2_CNTL2, 0);
2382 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2383 /* Setup TLB control */
2384 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2385 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2386 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2387 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
8aeb96f8
AD
2388 if (rdev->flags & RADEON_IS_IGP) {
2389 WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2390 WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2391 WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2392 } else {
2393 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2394 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2395 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
0b8c30bc
AD
2396 if ((rdev->family == CHIP_JUNIPER) ||
2397 (rdev->family == CHIP_CYPRESS) ||
2398 (rdev->family == CHIP_HEMLOCK) ||
2399 (rdev->family == CHIP_BARTS))
2400 WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
8aeb96f8 2401 }
bcc1c2a1
AD
2402 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2403 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2404 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2405 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2406 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2407 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2408 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2409 WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2410 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2411 WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2412 (u32)(rdev->dummy_page.addr >> 12));
0fcdb61e 2413 WREG32(VM_CONTEXT1_CNTL, 0);
bcc1c2a1 2414
0fcdb61e 2415 evergreen_pcie_gart_tlb_flush(rdev);
fcf4de5a
TV
2416 DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2417 (unsigned)(rdev->mc.gtt_size >> 20),
2418 (unsigned long long)rdev->gart.table_addr);
bcc1c2a1
AD
2419 rdev->gart.ready = true;
2420 return 0;
2421}
2422
1109ca09 2423static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
bcc1c2a1
AD
2424{
2425 u32 tmp;
bcc1c2a1
AD
2426
2427 /* Disable all tables */
0fcdb61e
AD
2428 WREG32(VM_CONTEXT0_CNTL, 0);
2429 WREG32(VM_CONTEXT1_CNTL, 0);
bcc1c2a1
AD
2430
2431 /* Setup L2 cache */
2432 WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2433 EFFECTIVE_L2_QUEUE_SIZE(7));
2434 WREG32(VM_L2_CNTL2, 0);
2435 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2436 /* Setup TLB control */
2437 tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2438 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2439 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2440 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2441 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2442 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2443 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2444 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
c9a1be96 2445 radeon_gart_table_vram_unpin(rdev);
bcc1c2a1
AD
2446}
2447
1109ca09 2448static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
bcc1c2a1
AD
2449{
2450 evergreen_pcie_gart_disable(rdev);
2451 radeon_gart_table_vram_free(rdev);
2452 radeon_gart_fini(rdev);
2453}
2454
2455
1109ca09 2456static void evergreen_agp_enable(struct radeon_device *rdev)
bcc1c2a1
AD
2457{
2458 u32 tmp;
bcc1c2a1
AD
2459
2460 /* Setup L2 cache */
2461 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2462 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2463 EFFECTIVE_L2_QUEUE_SIZE(7));
2464 WREG32(VM_L2_CNTL2, 0);
2465 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2466 /* Setup TLB control */
2467 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2468 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2469 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2470 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2471 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2472 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2473 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2474 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2475 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2476 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2477 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
0fcdb61e
AD
2478 WREG32(VM_CONTEXT0_CNTL, 0);
2479 WREG32(VM_CONTEXT1_CNTL, 0);
bcc1c2a1
AD
2480}
2481
b9952a8a 2482void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
bcc1c2a1 2483{
62444b74
AD
2484 u32 crtc_enabled, tmp, frame_count, blackout;
2485 int i, j;
2486
5153550a
AD
2487 if (!ASIC_IS_NODCE(rdev)) {
2488 save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2489 save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
bcc1c2a1 2490
5153550a
AD
2491 /* disable VGA render */
2492 WREG32(VGA_RENDER_CONTROL, 0);
2493 }
62444b74
AD
2494 /* blank the display controllers */
2495 for (i = 0; i < rdev->num_crtc; i++) {
2496 crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2497 if (crtc_enabled) {
2498 save->crtc_enabled[i] = true;
2499 if (ASIC_IS_DCE6(rdev)) {
2500 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2501 if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2502 radeon_wait_for_vblank(rdev, i);
abf1457b 2503 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
62444b74
AD
2504 tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2505 WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2506 }
2507 } else {
2508 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2509 if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2510 radeon_wait_for_vblank(rdev, i);
abf1457b 2511 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
62444b74
AD
2512 tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2513 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
abf1457b 2514 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
62444b74
AD
2515 }
2516 }
2517 /* wait for the next frame */
2518 frame_count = radeon_get_vblank_counter(rdev, i);
2519 for (j = 0; j < rdev->usec_timeout; j++) {
2520 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2521 break;
2522 udelay(1);
2523 }
abf1457b
AD
2524
2525 /* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2526 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2527 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2528 tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2529 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2530 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2531 save->crtc_enabled[i] = false;
2532 /* ***** */
804cc4a0
AD
2533 } else {
2534 save->crtc_enabled[i] = false;
62444b74 2535 }
18007401 2536 }
bcc1c2a1 2537
62444b74
AD
2538 radeon_mc_wait_for_idle(rdev);
2539
2540 blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2541 if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2542 /* Block CPU access */
2543 WREG32(BIF_FB_EN, 0);
2544 /* blackout the MC */
2545 blackout &= ~BLACKOUT_MODE_MASK;
2546 WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
b7eff394 2547 }
ed39fadd
AD
2548 /* wait for the MC to settle */
2549 udelay(100);
968c0166
AD
2550
2551 /* lock double buffered regs */
2552 for (i = 0; i < rdev->num_crtc; i++) {
2553 if (save->crtc_enabled[i]) {
2554 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2555 if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2556 tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2557 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2558 }
2559 tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2560 if (!(tmp & 1)) {
2561 tmp |= 1;
2562 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2563 }
2564 }
2565 }
bcc1c2a1
AD
2566}
2567
b9952a8a 2568void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
bcc1c2a1 2569{
62444b74
AD
2570 u32 tmp, frame_count;
2571 int i, j;
18007401 2572
62444b74
AD
2573 /* update crtc base addresses */
2574 for (i = 0; i < rdev->num_crtc; i++) {
2575 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
18007401 2576 upper_32_bits(rdev->mc.vram_start));
62444b74 2577 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
18007401 2578 upper_32_bits(rdev->mc.vram_start));
62444b74 2579 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
18007401 2580 (u32)rdev->mc.vram_start);
62444b74 2581 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
18007401
AD
2582 (u32)rdev->mc.vram_start);
2583 }
5153550a
AD
2584
2585 if (!ASIC_IS_NODCE(rdev)) {
2586 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2587 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2588 }
62444b74 2589
968c0166
AD
2590 /* unlock regs and wait for update */
2591 for (i = 0; i < rdev->num_crtc; i++) {
2592 if (save->crtc_enabled[i]) {
2593 tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2594 if ((tmp & 0x3) != 0) {
2595 tmp &= ~0x3;
2596 WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2597 }
2598 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2599 if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2600 tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2601 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2602 }
2603 tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2604 if (tmp & 1) {
2605 tmp &= ~1;
2606 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2607 }
2608 for (j = 0; j < rdev->usec_timeout; j++) {
2609 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2610 if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2611 break;
2612 udelay(1);
2613 }
2614 }
2615 }
2616
62444b74
AD
2617 /* unblackout the MC */
2618 tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2619 tmp &= ~BLACKOUT_MODE_MASK;
2620 WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2621 /* allow CPU access */
2622 WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2623
2624 for (i = 0; i < rdev->num_crtc; i++) {
695ddeb4 2625 if (save->crtc_enabled[i]) {
62444b74
AD
2626 if (ASIC_IS_DCE6(rdev)) {
2627 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2628 tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
bb588820 2629 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
62444b74 2630 WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
bb588820 2631 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
62444b74
AD
2632 } else {
2633 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2634 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
bb588820 2635 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
62444b74 2636 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
bb588820 2637 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
62444b74
AD
2638 }
2639 /* wait for the next frame */
2640 frame_count = radeon_get_vblank_counter(rdev, i);
2641 for (j = 0; j < rdev->usec_timeout; j++) {
2642 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2643 break;
2644 udelay(1);
2645 }
2646 }
2647 }
5153550a
AD
2648 if (!ASIC_IS_NODCE(rdev)) {
2649 /* Unlock vga access */
2650 WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2651 mdelay(1);
2652 WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2653 }
bcc1c2a1
AD
2654}
2655
755d819e 2656void evergreen_mc_program(struct radeon_device *rdev)
bcc1c2a1
AD
2657{
2658 struct evergreen_mc_save save;
2659 u32 tmp;
2660 int i, j;
2661
2662 /* Initialize HDP */
2663 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2664 WREG32((0x2c14 + j), 0x00000000);
2665 WREG32((0x2c18 + j), 0x00000000);
2666 WREG32((0x2c1c + j), 0x00000000);
2667 WREG32((0x2c20 + j), 0x00000000);
2668 WREG32((0x2c24 + j), 0x00000000);
2669 }
2670 WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2671
2672 evergreen_mc_stop(rdev, &save);
2673 if (evergreen_mc_wait_for_idle(rdev)) {
2674 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2675 }
2676 /* Lockout access through VGA aperture*/
2677 WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2678 /* Update configuration */
2679 if (rdev->flags & RADEON_IS_AGP) {
2680 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2681 /* VRAM before AGP */
2682 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2683 rdev->mc.vram_start >> 12);
2684 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2685 rdev->mc.gtt_end >> 12);
2686 } else {
2687 /* VRAM after AGP */
2688 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2689 rdev->mc.gtt_start >> 12);
2690 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2691 rdev->mc.vram_end >> 12);
2692 }
2693 } else {
2694 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2695 rdev->mc.vram_start >> 12);
2696 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2697 rdev->mc.vram_end >> 12);
2698 }
3b9832f6 2699 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
05b3ef69
AD
2700 /* llano/ontario only */
2701 if ((rdev->family == CHIP_PALM) ||
2702 (rdev->family == CHIP_SUMO) ||
2703 (rdev->family == CHIP_SUMO2)) {
b4183e30
AD
2704 tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2705 tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2706 tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2707 WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2708 }
bcc1c2a1
AD
2709 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2710 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2711 WREG32(MC_VM_FB_LOCATION, tmp);
2712 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
c46cb4da 2713 WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
46fcd2b3 2714 WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
bcc1c2a1
AD
2715 if (rdev->flags & RADEON_IS_AGP) {
2716 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
2717 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
2718 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
2719 } else {
2720 WREG32(MC_VM_AGP_BASE, 0);
2721 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
2722 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
2723 }
2724 if (evergreen_mc_wait_for_idle(rdev)) {
2725 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2726 }
2727 evergreen_mc_resume(rdev, &save);
2728 /* we need to own VRAM, so turn off the VGA renderer here
2729 * to stop it overwriting our objects */
2730 rv515_vga_render_disable(rdev);
2731}
2732
bcc1c2a1
AD
2733/*
2734 * CP.
2735 */
12920591
AD
2736void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
2737{
876dc9f3 2738 struct radeon_ring *ring = &rdev->ring[ib->ring];
89d35807 2739 u32 next_rptr;
7b1f2485 2740
12920591 2741 /* set to DX10/11 mode */
e32eb50d
CK
2742 radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
2743 radeon_ring_write(ring, 1);
45df6803
CK
2744
2745 if (ring->rptr_save_reg) {
89d35807 2746 next_rptr = ring->wptr + 3 + 4;
45df6803
CK
2747 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2748 radeon_ring_write(ring, ((ring->rptr_save_reg -
2749 PACKET3_SET_CONFIG_REG_START) >> 2));
2750 radeon_ring_write(ring, next_rptr);
89d35807
AD
2751 } else if (rdev->wb.enabled) {
2752 next_rptr = ring->wptr + 5 + 4;
2753 radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
2754 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
2755 radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
2756 radeon_ring_write(ring, next_rptr);
2757 radeon_ring_write(ring, 0);
45df6803
CK
2758 }
2759
e32eb50d
CK
2760 radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
2761 radeon_ring_write(ring,
0f234f5f
AD
2762#ifdef __BIG_ENDIAN
2763 (2 << 0) |
2764#endif
2765 (ib->gpu_addr & 0xFFFFFFFC));
e32eb50d
CK
2766 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
2767 radeon_ring_write(ring, ib->length_dw);
12920591
AD
2768}
2769
bcc1c2a1
AD
2770
2771static int evergreen_cp_load_microcode(struct radeon_device *rdev)
2772{
fe251e2f
AD
2773 const __be32 *fw_data;
2774 int i;
2775
2776 if (!rdev->me_fw || !rdev->pfp_fw)
2777 return -EINVAL;
bcc1c2a1 2778
fe251e2f 2779 r700_cp_stop(rdev);
0f234f5f
AD
2780 WREG32(CP_RB_CNTL,
2781#ifdef __BIG_ENDIAN
2782 BUF_SWAP_32BIT |
2783#endif
2784 RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
fe251e2f
AD
2785
2786 fw_data = (const __be32 *)rdev->pfp_fw->data;
2787 WREG32(CP_PFP_UCODE_ADDR, 0);
2788 for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
2789 WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
2790 WREG32(CP_PFP_UCODE_ADDR, 0);
2791
2792 fw_data = (const __be32 *)rdev->me_fw->data;
2793 WREG32(CP_ME_RAM_WADDR, 0);
2794 for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
2795 WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
2796
2797 WREG32(CP_PFP_UCODE_ADDR, 0);
2798 WREG32(CP_ME_RAM_WADDR, 0);
2799 WREG32(CP_ME_RAM_RADDR, 0);
bcc1c2a1
AD
2800 return 0;
2801}
2802
7e7b41d2
AD
2803static int evergreen_cp_start(struct radeon_device *rdev)
2804{
e32eb50d 2805 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2281a378 2806 int r, i;
7e7b41d2
AD
2807 uint32_t cp_me;
2808
e32eb50d 2809 r = radeon_ring_lock(rdev, ring, 7);
7e7b41d2
AD
2810 if (r) {
2811 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2812 return r;
2813 }
e32eb50d
CK
2814 radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
2815 radeon_ring_write(ring, 0x1);
2816 radeon_ring_write(ring, 0x0);
2817 radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
2818 radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
2819 radeon_ring_write(ring, 0);
2820 radeon_ring_write(ring, 0);
2821 radeon_ring_unlock_commit(rdev, ring);
7e7b41d2
AD
2822
2823 cp_me = 0xff;
2824 WREG32(CP_ME_CNTL, cp_me);
2825
e32eb50d 2826 r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
7e7b41d2
AD
2827 if (r) {
2828 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2829 return r;
2830 }
2281a378
AD
2831
2832 /* setup clear context state */
e32eb50d
CK
2833 radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2834 radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
2281a378
AD
2835
2836 for (i = 0; i < evergreen_default_size; i++)
e32eb50d 2837 radeon_ring_write(ring, evergreen_default_state[i]);
2281a378 2838
e32eb50d
CK
2839 radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2840 radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
2281a378
AD
2841
2842 /* set clear context state */
e32eb50d
CK
2843 radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
2844 radeon_ring_write(ring, 0);
2281a378
AD
2845
2846 /* SQ_VTX_BASE_VTX_LOC */
e32eb50d
CK
2847 radeon_ring_write(ring, 0xc0026f00);
2848 radeon_ring_write(ring, 0x00000000);
2849 radeon_ring_write(ring, 0x00000000);
2850 radeon_ring_write(ring, 0x00000000);
2281a378
AD
2851
2852 /* Clear consts */
e32eb50d
CK
2853 radeon_ring_write(ring, 0xc0036f00);
2854 radeon_ring_write(ring, 0x00000bc4);
2855 radeon_ring_write(ring, 0xffffffff);
2856 radeon_ring_write(ring, 0xffffffff);
2857 radeon_ring_write(ring, 0xffffffff);
2281a378 2858
e32eb50d
CK
2859 radeon_ring_write(ring, 0xc0026900);
2860 radeon_ring_write(ring, 0x00000316);
2861 radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
2862 radeon_ring_write(ring, 0x00000010); /* */
18ff84da 2863
e32eb50d 2864 radeon_ring_unlock_commit(rdev, ring);
7e7b41d2
AD
2865
2866 return 0;
2867}
2868
1109ca09 2869static int evergreen_cp_resume(struct radeon_device *rdev)
fe251e2f 2870{
e32eb50d 2871 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
fe251e2f
AD
2872 u32 tmp;
2873 u32 rb_bufsz;
2874 int r;
2875
2876 /* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
2877 WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
2878 SOFT_RESET_PA |
2879 SOFT_RESET_SH |
2880 SOFT_RESET_VGT |
a49a50da 2881 SOFT_RESET_SPI |
fe251e2f
AD
2882 SOFT_RESET_SX));
2883 RREG32(GRBM_SOFT_RESET);
2884 mdelay(15);
2885 WREG32(GRBM_SOFT_RESET, 0);
2886 RREG32(GRBM_SOFT_RESET);
2887
2888 /* Set ring buffer size */
e32eb50d 2889 rb_bufsz = drm_order(ring->ring_size / 8);
724c80e1 2890 tmp = (drm_order(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
fe251e2f
AD
2891#ifdef __BIG_ENDIAN
2892 tmp |= BUF_SWAP_32BIT;
32fcdbf4 2893#endif
fe251e2f 2894 WREG32(CP_RB_CNTL, tmp);
15d3332f 2895 WREG32(CP_SEM_WAIT_TIMER, 0x0);
11ef3f1f 2896 WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
fe251e2f
AD
2897
2898 /* Set the write pointer delay */
2899 WREG32(CP_RB_WPTR_DELAY, 0);
2900
2901 /* Initialize the ring buffer's read and write pointers */
2902 WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
2903 WREG32(CP_RB_RPTR_WR, 0);
e32eb50d
CK
2904 ring->wptr = 0;
2905 WREG32(CP_RB_WPTR, ring->wptr);
724c80e1 2906
48fc7f7e 2907 /* set the wb address whether it's enabled or not */
0f234f5f 2908 WREG32(CP_RB_RPTR_ADDR,
0f234f5f 2909 ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
724c80e1
AD
2910 WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
2911 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
2912
2913 if (rdev->wb.enabled)
2914 WREG32(SCRATCH_UMSK, 0xff);
2915 else {
2916 tmp |= RB_NO_UPDATE;
2917 WREG32(SCRATCH_UMSK, 0);
2918 }
2919
fe251e2f
AD
2920 mdelay(1);
2921 WREG32(CP_RB_CNTL, tmp);
2922
e32eb50d 2923 WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
fe251e2f
AD
2924 WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
2925
e32eb50d 2926 ring->rptr = RREG32(CP_RB_RPTR);
fe251e2f 2927
7e7b41d2 2928 evergreen_cp_start(rdev);
e32eb50d 2929 ring->ready = true;
f712812e 2930 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
fe251e2f 2931 if (r) {
e32eb50d 2932 ring->ready = false;
fe251e2f
AD
2933 return r;
2934 }
2935 return 0;
2936}
bcc1c2a1
AD
2937
2938/*
2939 * Core functions
2940 */
bcc1c2a1
AD
2941static void evergreen_gpu_init(struct radeon_device *rdev)
2942{
416a2bd2 2943 u32 gb_addr_config;
32fcdbf4 2944 u32 mc_shared_chmap, mc_arb_ramcfg;
32fcdbf4
AD
2945 u32 sx_debug_1;
2946 u32 smx_dc_ctl0;
2947 u32 sq_config;
2948 u32 sq_lds_resource_mgmt;
2949 u32 sq_gpr_resource_mgmt_1;
2950 u32 sq_gpr_resource_mgmt_2;
2951 u32 sq_gpr_resource_mgmt_3;
2952 u32 sq_thread_resource_mgmt;
2953 u32 sq_thread_resource_mgmt_2;
2954 u32 sq_stack_resource_mgmt_1;
2955 u32 sq_stack_resource_mgmt_2;
2956 u32 sq_stack_resource_mgmt_3;
2957 u32 vgt_cache_invalidation;
f25a5c63 2958 u32 hdp_host_path_cntl, tmp;
416a2bd2 2959 u32 disabled_rb_mask;
32fcdbf4
AD
2960 int i, j, num_shader_engines, ps_thread_count;
2961
2962 switch (rdev->family) {
2963 case CHIP_CYPRESS:
2964 case CHIP_HEMLOCK:
2965 rdev->config.evergreen.num_ses = 2;
2966 rdev->config.evergreen.max_pipes = 4;
2967 rdev->config.evergreen.max_tile_pipes = 8;
2968 rdev->config.evergreen.max_simds = 10;
2969 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
2970 rdev->config.evergreen.max_gprs = 256;
2971 rdev->config.evergreen.max_threads = 248;
2972 rdev->config.evergreen.max_gs_threads = 32;
2973 rdev->config.evergreen.max_stack_entries = 512;
2974 rdev->config.evergreen.sx_num_of_sets = 4;
2975 rdev->config.evergreen.sx_max_export_size = 256;
2976 rdev->config.evergreen.sx_max_export_pos_size = 64;
2977 rdev->config.evergreen.sx_max_export_smx_size = 192;
2978 rdev->config.evergreen.max_hw_contexts = 8;
2979 rdev->config.evergreen.sq_num_cf_insts = 2;
2980
2981 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
2982 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2983 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
416a2bd2 2984 gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
32fcdbf4
AD
2985 break;
2986 case CHIP_JUNIPER:
2987 rdev->config.evergreen.num_ses = 1;
2988 rdev->config.evergreen.max_pipes = 4;
2989 rdev->config.evergreen.max_tile_pipes = 4;
2990 rdev->config.evergreen.max_simds = 10;
2991 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
2992 rdev->config.evergreen.max_gprs = 256;
2993 rdev->config.evergreen.max_threads = 248;
2994 rdev->config.evergreen.max_gs_threads = 32;
2995 rdev->config.evergreen.max_stack_entries = 512;
2996 rdev->config.evergreen.sx_num_of_sets = 4;
2997 rdev->config.evergreen.sx_max_export_size = 256;
2998 rdev->config.evergreen.sx_max_export_pos_size = 64;
2999 rdev->config.evergreen.sx_max_export_smx_size = 192;
3000 rdev->config.evergreen.max_hw_contexts = 8;
3001 rdev->config.evergreen.sq_num_cf_insts = 2;
3002
3003 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3004 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3005 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
416a2bd2 3006 gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
32fcdbf4
AD
3007 break;
3008 case CHIP_REDWOOD:
3009 rdev->config.evergreen.num_ses = 1;
3010 rdev->config.evergreen.max_pipes = 4;
3011 rdev->config.evergreen.max_tile_pipes = 4;
3012 rdev->config.evergreen.max_simds = 5;
3013 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3014 rdev->config.evergreen.max_gprs = 256;
3015 rdev->config.evergreen.max_threads = 248;
3016 rdev->config.evergreen.max_gs_threads = 32;
3017 rdev->config.evergreen.max_stack_entries = 256;
3018 rdev->config.evergreen.sx_num_of_sets = 4;
3019 rdev->config.evergreen.sx_max_export_size = 256;
3020 rdev->config.evergreen.sx_max_export_pos_size = 64;
3021 rdev->config.evergreen.sx_max_export_smx_size = 192;
3022 rdev->config.evergreen.max_hw_contexts = 8;
3023 rdev->config.evergreen.sq_num_cf_insts = 2;
3024
3025 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3026 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3027 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
416a2bd2 3028 gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
32fcdbf4
AD
3029 break;
3030 case CHIP_CEDAR:
3031 default:
3032 rdev->config.evergreen.num_ses = 1;
3033 rdev->config.evergreen.max_pipes = 2;
3034 rdev->config.evergreen.max_tile_pipes = 2;
3035 rdev->config.evergreen.max_simds = 2;
3036 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3037 rdev->config.evergreen.max_gprs = 256;
3038 rdev->config.evergreen.max_threads = 192;
3039 rdev->config.evergreen.max_gs_threads = 16;
3040 rdev->config.evergreen.max_stack_entries = 256;
3041 rdev->config.evergreen.sx_num_of_sets = 4;
3042 rdev->config.evergreen.sx_max_export_size = 128;
3043 rdev->config.evergreen.sx_max_export_pos_size = 32;
3044 rdev->config.evergreen.sx_max_export_smx_size = 96;
3045 rdev->config.evergreen.max_hw_contexts = 4;
3046 rdev->config.evergreen.sq_num_cf_insts = 1;
3047
d5e455e4
AD
3048 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3049 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3050 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
416a2bd2 3051 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
d5e455e4
AD
3052 break;
3053 case CHIP_PALM:
3054 rdev->config.evergreen.num_ses = 1;
3055 rdev->config.evergreen.max_pipes = 2;
3056 rdev->config.evergreen.max_tile_pipes = 2;
3057 rdev->config.evergreen.max_simds = 2;
3058 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3059 rdev->config.evergreen.max_gprs = 256;
3060 rdev->config.evergreen.max_threads = 192;
3061 rdev->config.evergreen.max_gs_threads = 16;
3062 rdev->config.evergreen.max_stack_entries = 256;
3063 rdev->config.evergreen.sx_num_of_sets = 4;
3064 rdev->config.evergreen.sx_max_export_size = 128;
3065 rdev->config.evergreen.sx_max_export_pos_size = 32;
3066 rdev->config.evergreen.sx_max_export_smx_size = 96;
3067 rdev->config.evergreen.max_hw_contexts = 4;
3068 rdev->config.evergreen.sq_num_cf_insts = 1;
3069
d5c5a72f
AD
3070 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3071 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3072 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
416a2bd2 3073 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
d5c5a72f
AD
3074 break;
3075 case CHIP_SUMO:
3076 rdev->config.evergreen.num_ses = 1;
3077 rdev->config.evergreen.max_pipes = 4;
bd25f078 3078 rdev->config.evergreen.max_tile_pipes = 4;
d5c5a72f
AD
3079 if (rdev->pdev->device == 0x9648)
3080 rdev->config.evergreen.max_simds = 3;
3081 else if ((rdev->pdev->device == 0x9647) ||
3082 (rdev->pdev->device == 0x964a))
3083 rdev->config.evergreen.max_simds = 4;
3084 else
3085 rdev->config.evergreen.max_simds = 5;
3086 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3087 rdev->config.evergreen.max_gprs = 256;
3088 rdev->config.evergreen.max_threads = 248;
3089 rdev->config.evergreen.max_gs_threads = 32;
3090 rdev->config.evergreen.max_stack_entries = 256;
3091 rdev->config.evergreen.sx_num_of_sets = 4;
3092 rdev->config.evergreen.sx_max_export_size = 256;
3093 rdev->config.evergreen.sx_max_export_pos_size = 64;
3094 rdev->config.evergreen.sx_max_export_smx_size = 192;
3095 rdev->config.evergreen.max_hw_contexts = 8;
3096 rdev->config.evergreen.sq_num_cf_insts = 2;
3097
3098 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3099 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3100 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
bd25f078 3101 gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
d5c5a72f
AD
3102 break;
3103 case CHIP_SUMO2:
3104 rdev->config.evergreen.num_ses = 1;
3105 rdev->config.evergreen.max_pipes = 4;
3106 rdev->config.evergreen.max_tile_pipes = 4;
3107 rdev->config.evergreen.max_simds = 2;
3108 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3109 rdev->config.evergreen.max_gprs = 256;
3110 rdev->config.evergreen.max_threads = 248;
3111 rdev->config.evergreen.max_gs_threads = 32;
3112 rdev->config.evergreen.max_stack_entries = 512;
3113 rdev->config.evergreen.sx_num_of_sets = 4;
3114 rdev->config.evergreen.sx_max_export_size = 256;
3115 rdev->config.evergreen.sx_max_export_pos_size = 64;
3116 rdev->config.evergreen.sx_max_export_smx_size = 192;
3117 rdev->config.evergreen.max_hw_contexts = 8;
3118 rdev->config.evergreen.sq_num_cf_insts = 2;
3119
adb68fa2
AD
3120 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3121 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3122 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
bd25f078 3123 gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
adb68fa2
AD
3124 break;
3125 case CHIP_BARTS:
3126 rdev->config.evergreen.num_ses = 2;
3127 rdev->config.evergreen.max_pipes = 4;
3128 rdev->config.evergreen.max_tile_pipes = 8;
3129 rdev->config.evergreen.max_simds = 7;
3130 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3131 rdev->config.evergreen.max_gprs = 256;
3132 rdev->config.evergreen.max_threads = 248;
3133 rdev->config.evergreen.max_gs_threads = 32;
3134 rdev->config.evergreen.max_stack_entries = 512;
3135 rdev->config.evergreen.sx_num_of_sets = 4;
3136 rdev->config.evergreen.sx_max_export_size = 256;
3137 rdev->config.evergreen.sx_max_export_pos_size = 64;
3138 rdev->config.evergreen.sx_max_export_smx_size = 192;
3139 rdev->config.evergreen.max_hw_contexts = 8;
3140 rdev->config.evergreen.sq_num_cf_insts = 2;
3141
3142 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3143 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3144 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
416a2bd2 3145 gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
adb68fa2
AD
3146 break;
3147 case CHIP_TURKS:
3148 rdev->config.evergreen.num_ses = 1;
3149 rdev->config.evergreen.max_pipes = 4;
3150 rdev->config.evergreen.max_tile_pipes = 4;
3151 rdev->config.evergreen.max_simds = 6;
3152 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3153 rdev->config.evergreen.max_gprs = 256;
3154 rdev->config.evergreen.max_threads = 248;
3155 rdev->config.evergreen.max_gs_threads = 32;
3156 rdev->config.evergreen.max_stack_entries = 256;
3157 rdev->config.evergreen.sx_num_of_sets = 4;
3158 rdev->config.evergreen.sx_max_export_size = 256;
3159 rdev->config.evergreen.sx_max_export_pos_size = 64;
3160 rdev->config.evergreen.sx_max_export_smx_size = 192;
3161 rdev->config.evergreen.max_hw_contexts = 8;
3162 rdev->config.evergreen.sq_num_cf_insts = 2;
3163
3164 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3165 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3166 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
416a2bd2 3167 gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
adb68fa2
AD
3168 break;
3169 case CHIP_CAICOS:
3170 rdev->config.evergreen.num_ses = 1;
bd25f078 3171 rdev->config.evergreen.max_pipes = 2;
adb68fa2
AD
3172 rdev->config.evergreen.max_tile_pipes = 2;
3173 rdev->config.evergreen.max_simds = 2;
3174 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3175 rdev->config.evergreen.max_gprs = 256;
3176 rdev->config.evergreen.max_threads = 192;
3177 rdev->config.evergreen.max_gs_threads = 16;
3178 rdev->config.evergreen.max_stack_entries = 256;
3179 rdev->config.evergreen.sx_num_of_sets = 4;
3180 rdev->config.evergreen.sx_max_export_size = 128;
3181 rdev->config.evergreen.sx_max_export_pos_size = 32;
3182 rdev->config.evergreen.sx_max_export_smx_size = 96;
3183 rdev->config.evergreen.max_hw_contexts = 4;
3184 rdev->config.evergreen.sq_num_cf_insts = 1;
3185
32fcdbf4
AD
3186 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3187 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3188 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
416a2bd2 3189 gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
32fcdbf4
AD
3190 break;
3191 }
3192
3193 /* Initialize HDP */
3194 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3195 WREG32((0x2c14 + j), 0x00000000);
3196 WREG32((0x2c18 + j), 0x00000000);
3197 WREG32((0x2c1c + j), 0x00000000);
3198 WREG32((0x2c20 + j), 0x00000000);
3199 WREG32((0x2c24 + j), 0x00000000);
3200 }
3201
3202 WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3203
d054ac16
AD
3204 evergreen_fix_pci_max_read_req_size(rdev);
3205
32fcdbf4 3206 mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
05b3ef69
AD
3207 if ((rdev->family == CHIP_PALM) ||
3208 (rdev->family == CHIP_SUMO) ||
3209 (rdev->family == CHIP_SUMO2))
d9282fca
AD
3210 mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3211 else
3212 mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
32fcdbf4 3213
1aa52bd3
AD
3214 /* setup tiling info dword. gb_addr_config is not adequate since it does
3215 * not have bank info, so create a custom tiling dword.
3216 * bits 3:0 num_pipes
3217 * bits 7:4 num_banks
3218 * bits 11:8 group_size
3219 * bits 15:12 row_size
3220 */
3221 rdev->config.evergreen.tile_config = 0;
3222 switch (rdev->config.evergreen.max_tile_pipes) {
3223 case 1:
3224 default:
3225 rdev->config.evergreen.tile_config |= (0 << 0);
3226 break;
3227 case 2:
3228 rdev->config.evergreen.tile_config |= (1 << 0);
3229 break;
3230 case 4:
3231 rdev->config.evergreen.tile_config |= (2 << 0);
3232 break;
3233 case 8:
3234 rdev->config.evergreen.tile_config |= (3 << 0);
3235 break;
3236 }
d698a34d 3237 /* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
5bfa4879 3238 if (rdev->flags & RADEON_IS_IGP)
d698a34d 3239 rdev->config.evergreen.tile_config |= 1 << 4;
29d65406 3240 else {
c8d15edc
AD
3241 switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3242 case 0: /* four banks */
29d65406 3243 rdev->config.evergreen.tile_config |= 0 << 4;
c8d15edc
AD
3244 break;
3245 case 1: /* eight banks */
3246 rdev->config.evergreen.tile_config |= 1 << 4;
3247 break;
3248 case 2: /* sixteen banks */
3249 default:
3250 rdev->config.evergreen.tile_config |= 2 << 4;
3251 break;
3252 }
29d65406 3253 }
416a2bd2 3254 rdev->config.evergreen.tile_config |= 0 << 8;
1aa52bd3
AD
3255 rdev->config.evergreen.tile_config |=
3256 ((gb_addr_config & 0x30000000) >> 28) << 12;
3257
416a2bd2 3258 num_shader_engines = (gb_addr_config & NUM_SHADER_ENGINES(3) >> 12) + 1;
32fcdbf4 3259
416a2bd2
AD
3260 if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3261 u32 efuse_straps_4;
3262 u32 efuse_straps_3;
32fcdbf4 3263
ff82bbc4
AD
3264 efuse_straps_4 = RREG32_RCU(0x204);
3265 efuse_straps_3 = RREG32_RCU(0x203);
416a2bd2
AD
3266 tmp = (((efuse_straps_4 & 0xf) << 4) |
3267 ((efuse_straps_3 & 0xf0000000) >> 28));
3268 } else {
3269 tmp = 0;
3270 for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3271 u32 rb_disable_bitmap;
3272
3273 WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3274 WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3275 rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3276 tmp <<= 4;
3277 tmp |= rb_disable_bitmap;
32fcdbf4 3278 }
416a2bd2
AD
3279 }
3280 /* enabled rb are just the one not disabled :) */
3281 disabled_rb_mask = tmp;
cedb655a
AD
3282 tmp = 0;
3283 for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3284 tmp |= (1 << i);
3285 /* if all the backends are disabled, fix it up here */
3286 if ((disabled_rb_mask & tmp) == tmp) {
3287 for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3288 disabled_rb_mask &= ~(1 << i);
3289 }
32fcdbf4 3290
416a2bd2
AD
3291 WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3292 WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
32fcdbf4 3293
416a2bd2
AD
3294 WREG32(GB_ADDR_CONFIG, gb_addr_config);
3295 WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3296 WREG32(HDP_ADDR_CONFIG, gb_addr_config);
233d1ad5 3297 WREG32(DMA_TILING_CONFIG, gb_addr_config);
9a21059d
CK
3298 WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3299 WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3300 WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
32fcdbf4 3301
f7eb9730
AD
3302 if ((rdev->config.evergreen.max_backends == 1) &&
3303 (rdev->flags & RADEON_IS_IGP)) {
3304 if ((disabled_rb_mask & 3) == 1) {
3305 /* RB0 disabled, RB1 enabled */
3306 tmp = 0x11111111;
3307 } else {
3308 /* RB1 disabled, RB0 enabled */
3309 tmp = 0x00000000;
3310 }
3311 } else {
3312 tmp = gb_addr_config & NUM_PIPES_MASK;
3313 tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3314 EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3315 }
416a2bd2 3316 WREG32(GB_BACKEND_MAP, tmp);
32fcdbf4
AD
3317
3318 WREG32(CGTS_SYS_TCC_DISABLE, 0);
3319 WREG32(CGTS_TCC_DISABLE, 0);
3320 WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3321 WREG32(CGTS_USER_TCC_DISABLE, 0);
3322
3323 /* set HW defaults for 3D engine */
3324 WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3325 ROQ_IB2_START(0x2b)));
3326
3327 WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3328
3329 WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3330 SYNC_GRADIENT |
3331 SYNC_WALKER |
3332 SYNC_ALIGNER));
3333
3334 sx_debug_1 = RREG32(SX_DEBUG_1);
3335 sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3336 WREG32(SX_DEBUG_1, sx_debug_1);
3337
3338
3339 smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3340 smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3341 smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3342 WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3343
b866d133
AD
3344 if (rdev->family <= CHIP_SUMO2)
3345 WREG32(SMX_SAR_CTL0, 0x00010000);
3346
32fcdbf4
AD
3347 WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3348 POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3349 SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3350
3351 WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3352 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3353 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3354
3355 WREG32(VGT_NUM_INSTANCES, 1);
3356 WREG32(SPI_CONFIG_CNTL, 0);
3357 WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3358 WREG32(CP_PERFMON_CNTL, 0);
3359
3360 WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3361 FETCH_FIFO_HIWATER(0x4) |
3362 DONE_FIFO_HIWATER(0xe0) |
3363 ALU_UPDATE_FIFO_HIWATER(0x8)));
3364
3365 sq_config = RREG32(SQ_CONFIG);
3366 sq_config &= ~(PS_PRIO(3) |
3367 VS_PRIO(3) |
3368 GS_PRIO(3) |
3369 ES_PRIO(3));
3370 sq_config |= (VC_ENABLE |
3371 EXPORT_SRC_C |
3372 PS_PRIO(0) |
3373 VS_PRIO(1) |
3374 GS_PRIO(2) |
3375 ES_PRIO(3));
3376
d5e455e4
AD
3377 switch (rdev->family) {
3378 case CHIP_CEDAR:
3379 case CHIP_PALM:
d5c5a72f
AD
3380 case CHIP_SUMO:
3381 case CHIP_SUMO2:
adb68fa2 3382 case CHIP_CAICOS:
32fcdbf4
AD
3383 /* no vertex cache */
3384 sq_config &= ~VC_ENABLE;
d5e455e4
AD
3385 break;
3386 default:
3387 break;
3388 }
32fcdbf4
AD
3389
3390 sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3391
3392 sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3393 sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3394 sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3395 sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3396 sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3397 sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3398 sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3399
d5e455e4
AD
3400 switch (rdev->family) {
3401 case CHIP_CEDAR:
3402 case CHIP_PALM:
d5c5a72f
AD
3403 case CHIP_SUMO:
3404 case CHIP_SUMO2:
32fcdbf4 3405 ps_thread_count = 96;
d5e455e4
AD
3406 break;
3407 default:
32fcdbf4 3408 ps_thread_count = 128;
d5e455e4
AD
3409 break;
3410 }
32fcdbf4
AD
3411
3412 sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
f96b35cd
AD
3413 sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3414 sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3415 sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3416 sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3417 sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
32fcdbf4
AD
3418
3419 sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3420 sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3421 sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3422 sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3423 sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3424 sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3425
3426 WREG32(SQ_CONFIG, sq_config);
3427 WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3428 WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3429 WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3430 WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3431 WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3432 WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3433 WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3434 WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3435 WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3436 WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3437
3438 WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3439 FORCE_EOV_MAX_REZ_CNT(255)));
3440
d5e455e4
AD
3441 switch (rdev->family) {
3442 case CHIP_CEDAR:
3443 case CHIP_PALM:
d5c5a72f
AD
3444 case CHIP_SUMO:
3445 case CHIP_SUMO2:
adb68fa2 3446 case CHIP_CAICOS:
32fcdbf4 3447 vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
d5e455e4
AD
3448 break;
3449 default:
32fcdbf4 3450 vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
d5e455e4
AD
3451 break;
3452 }
32fcdbf4
AD
3453 vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3454 WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3455
3456 WREG32(VGT_GS_VERTEX_REUSE, 16);
12920591 3457 WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
32fcdbf4
AD
3458 WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3459
60a4a3e0
AD
3460 WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3461 WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3462
32fcdbf4
AD
3463 WREG32(CB_PERF_CTR0_SEL_0, 0);
3464 WREG32(CB_PERF_CTR0_SEL_1, 0);
3465 WREG32(CB_PERF_CTR1_SEL_0, 0);
3466 WREG32(CB_PERF_CTR1_SEL_1, 0);
3467 WREG32(CB_PERF_CTR2_SEL_0, 0);
3468 WREG32(CB_PERF_CTR2_SEL_1, 0);
3469 WREG32(CB_PERF_CTR3_SEL_0, 0);
3470 WREG32(CB_PERF_CTR3_SEL_1, 0);
3471
60a4a3e0
AD
3472 /* clear render buffer base addresses */
3473 WREG32(CB_COLOR0_BASE, 0);
3474 WREG32(CB_COLOR1_BASE, 0);
3475 WREG32(CB_COLOR2_BASE, 0);
3476 WREG32(CB_COLOR3_BASE, 0);
3477 WREG32(CB_COLOR4_BASE, 0);
3478 WREG32(CB_COLOR5_BASE, 0);
3479 WREG32(CB_COLOR6_BASE, 0);
3480 WREG32(CB_COLOR7_BASE, 0);
3481 WREG32(CB_COLOR8_BASE, 0);
3482 WREG32(CB_COLOR9_BASE, 0);
3483 WREG32(CB_COLOR10_BASE, 0);
3484 WREG32(CB_COLOR11_BASE, 0);
3485
3486 /* set the shader const cache sizes to 0 */
3487 for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3488 WREG32(i, 0);
3489 for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3490 WREG32(i, 0);
3491
f25a5c63
AD
3492 tmp = RREG32(HDP_MISC_CNTL);
3493 tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3494 WREG32(HDP_MISC_CNTL, tmp);
3495
32fcdbf4
AD
3496 hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3497 WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3498
3499 WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3500
3501 udelay(50);
3502
bcc1c2a1
AD
3503}
3504
3505int evergreen_mc_init(struct radeon_device *rdev)
3506{
bcc1c2a1
AD
3507 u32 tmp;
3508 int chansize, numchan;
bcc1c2a1
AD
3509
3510 /* Get VRAM informations */
3511 rdev->mc.vram_is_ddr = true;
05b3ef69
AD
3512 if ((rdev->family == CHIP_PALM) ||
3513 (rdev->family == CHIP_SUMO) ||
3514 (rdev->family == CHIP_SUMO2))
8208441b
AD
3515 tmp = RREG32(FUS_MC_ARB_RAMCFG);
3516 else
3517 tmp = RREG32(MC_ARB_RAMCFG);
bcc1c2a1
AD
3518 if (tmp & CHANSIZE_OVERRIDE) {
3519 chansize = 16;
3520 } else if (tmp & CHANSIZE_MASK) {
3521 chansize = 64;
3522 } else {
3523 chansize = 32;
3524 }
3525 tmp = RREG32(MC_SHARED_CHMAP);
3526 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3527 case 0:
3528 default:
3529 numchan = 1;
3530 break;
3531 case 1:
3532 numchan = 2;
3533 break;
3534 case 2:
3535 numchan = 4;
3536 break;
3537 case 3:
3538 numchan = 8;
3539 break;
3540 }
3541 rdev->mc.vram_width = numchan * chansize;
3542 /* Could aper size report 0 ? */
01d73a69
JC
3543 rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3544 rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
bcc1c2a1 3545 /* Setup GPU memory space */
05b3ef69
AD
3546 if ((rdev->family == CHIP_PALM) ||
3547 (rdev->family == CHIP_SUMO) ||
3548 (rdev->family == CHIP_SUMO2)) {
6eb18f8b
AD
3549 /* size in bytes on fusion */
3550 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3551 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3552 } else {
05b3ef69 3553 /* size in MB on evergreen/cayman/tn */
fc986034
NOS
3554 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3555 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
6eb18f8b 3556 }
51e5fcd3 3557 rdev->mc.visible_vram_size = rdev->mc.aper_size;
0ef0c1f7 3558 r700_vram_gtt_location(rdev, &rdev->mc);
f47299c5
AD
3559 radeon_update_bandwidth_info(rdev);
3560
bcc1c2a1
AD
3561 return 0;
3562}
d594e46a 3563
187e3593 3564void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
bcc1c2a1 3565{
64c56e8c 3566 dev_info(rdev->dev, " GRBM_STATUS = 0x%08X\n",
747943ea 3567 RREG32(GRBM_STATUS));
64c56e8c 3568 dev_info(rdev->dev, " GRBM_STATUS_SE0 = 0x%08X\n",
747943ea 3569 RREG32(GRBM_STATUS_SE0));
64c56e8c 3570 dev_info(rdev->dev, " GRBM_STATUS_SE1 = 0x%08X\n",
747943ea 3571 RREG32(GRBM_STATUS_SE1));
64c56e8c 3572 dev_info(rdev->dev, " SRBM_STATUS = 0x%08X\n",
747943ea 3573 RREG32(SRBM_STATUS));
a65a4369
AD
3574 dev_info(rdev->dev, " SRBM_STATUS2 = 0x%08X\n",
3575 RREG32(SRBM_STATUS2));
440a7cd8
JG
3576 dev_info(rdev->dev, " R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3577 RREG32(CP_STALLED_STAT1));
3578 dev_info(rdev->dev, " R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3579 RREG32(CP_STALLED_STAT2));
3580 dev_info(rdev->dev, " R_00867C_CP_BUSY_STAT = 0x%08X\n",
3581 RREG32(CP_BUSY_STAT));
3582 dev_info(rdev->dev, " R_008680_CP_STAT = 0x%08X\n",
3583 RREG32(CP_STAT));
eaaa6983
JG
3584 dev_info(rdev->dev, " R_00D034_DMA_STATUS_REG = 0x%08X\n",
3585 RREG32(DMA_STATUS_REG));
168757ea
AD
3586 if (rdev->family >= CHIP_CAYMAN) {
3587 dev_info(rdev->dev, " R_00D834_DMA_STATUS_REG = 0x%08X\n",
3588 RREG32(DMA_STATUS_REG + 0x800));
3589 }
0ecebb9e
AD
3590}
3591
168757ea 3592bool evergreen_is_display_hung(struct radeon_device *rdev)
0ecebb9e 3593{
a65a4369
AD
3594 u32 crtc_hung = 0;
3595 u32 crtc_status[6];
3596 u32 i, j, tmp;
3597
3598 for (i = 0; i < rdev->num_crtc; i++) {
3599 if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3600 crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3601 crtc_hung |= (1 << i);
3602 }
3603 }
3604
3605 for (j = 0; j < 10; j++) {
3606 for (i = 0; i < rdev->num_crtc; i++) {
3607 if (crtc_hung & (1 << i)) {
3608 tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3609 if (tmp != crtc_status[i])
3610 crtc_hung &= ~(1 << i);
3611 }
3612 }
3613 if (crtc_hung == 0)
3614 return false;
3615 udelay(100);
3616 }
3617
3618 return true;
3619}
3620
2483b4ea 3621u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
a65a4369
AD
3622{
3623 u32 reset_mask = 0;
b7630473 3624 u32 tmp;
0ecebb9e 3625
a65a4369
AD
3626 /* GRBM_STATUS */
3627 tmp = RREG32(GRBM_STATUS);
3628 if (tmp & (PA_BUSY | SC_BUSY |
3629 SH_BUSY | SX_BUSY |
3630 TA_BUSY | VGT_BUSY |
3631 DB_BUSY | CB_BUSY |
3632 SPI_BUSY | VGT_BUSY_NO_DMA))
3633 reset_mask |= RADEON_RESET_GFX;
3634
3635 if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3636 CP_BUSY | CP_COHERENCY_BUSY))
3637 reset_mask |= RADEON_RESET_CP;
3638
3639 if (tmp & GRBM_EE_BUSY)
3640 reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
19fc42ed 3641
a65a4369
AD
3642 /* DMA_STATUS_REG */
3643 tmp = RREG32(DMA_STATUS_REG);
3644 if (!(tmp & DMA_IDLE))
3645 reset_mask |= RADEON_RESET_DMA;
3646
3647 /* SRBM_STATUS2 */
3648 tmp = RREG32(SRBM_STATUS2);
3649 if (tmp & DMA_BUSY)
3650 reset_mask |= RADEON_RESET_DMA;
3651
3652 /* SRBM_STATUS */
3653 tmp = RREG32(SRBM_STATUS);
3654 if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3655 reset_mask |= RADEON_RESET_RLC;
3656
3657 if (tmp & IH_BUSY)
3658 reset_mask |= RADEON_RESET_IH;
3659
3660 if (tmp & SEM_BUSY)
3661 reset_mask |= RADEON_RESET_SEM;
3662
3663 if (tmp & GRBM_RQ_PENDING)
3664 reset_mask |= RADEON_RESET_GRBM;
3665
3666 if (tmp & VMC_BUSY)
3667 reset_mask |= RADEON_RESET_VMC;
3668
3669 if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3670 MCC_BUSY | MCD_BUSY))
3671 reset_mask |= RADEON_RESET_MC;
3672
3673 if (evergreen_is_display_hung(rdev))
3674 reset_mask |= RADEON_RESET_DISPLAY;
3675
3676 /* VM_L2_STATUS */
3677 tmp = RREG32(VM_L2_STATUS);
3678 if (tmp & L2_BUSY)
3679 reset_mask |= RADEON_RESET_VMC;
3680
d808fc88
AD
3681 /* Skip MC reset as it's mostly likely not hung, just busy */
3682 if (reset_mask & RADEON_RESET_MC) {
3683 DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3684 reset_mask &= ~RADEON_RESET_MC;
3685 }
3686
a65a4369
AD
3687 return reset_mask;
3688}
3689
3690static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3691{
3692 struct evergreen_mc_save save;
3693 u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3694 u32 tmp;
19fc42ed 3695
0ecebb9e 3696 if (reset_mask == 0)
a65a4369 3697 return;
0ecebb9e
AD
3698
3699 dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3700
b7630473
AD
3701 evergreen_print_gpu_status_regs(rdev);
3702
b7630473
AD
3703 /* Disable CP parsing/prefetching */
3704 WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3705
3706 if (reset_mask & RADEON_RESET_DMA) {
3707 /* Disable DMA */
3708 tmp = RREG32(DMA_RB_CNTL);
3709 tmp &= ~DMA_RB_ENABLE;
3710 WREG32(DMA_RB_CNTL, tmp);
3711 }
3712
b21b6e7a
AD
3713 udelay(50);
3714
3715 evergreen_mc_stop(rdev, &save);
3716 if (evergreen_mc_wait_for_idle(rdev)) {
3717 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3718 }
3719
b7630473
AD
3720 if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
3721 grbm_soft_reset |= SOFT_RESET_DB |
3722 SOFT_RESET_CB |
3723 SOFT_RESET_PA |
3724 SOFT_RESET_SC |
3725 SOFT_RESET_SPI |
3726 SOFT_RESET_SX |
3727 SOFT_RESET_SH |
3728 SOFT_RESET_TC |
3729 SOFT_RESET_TA |
3730 SOFT_RESET_VC |
3731 SOFT_RESET_VGT;
3732 }
3733
3734 if (reset_mask & RADEON_RESET_CP) {
3735 grbm_soft_reset |= SOFT_RESET_CP |
3736 SOFT_RESET_VGT;
3737
3738 srbm_soft_reset |= SOFT_RESET_GRBM;
3739 }
0ecebb9e
AD
3740
3741 if (reset_mask & RADEON_RESET_DMA)
b7630473
AD
3742 srbm_soft_reset |= SOFT_RESET_DMA;
3743
a65a4369
AD
3744 if (reset_mask & RADEON_RESET_DISPLAY)
3745 srbm_soft_reset |= SOFT_RESET_DC;
3746
3747 if (reset_mask & RADEON_RESET_RLC)
3748 srbm_soft_reset |= SOFT_RESET_RLC;
3749
3750 if (reset_mask & RADEON_RESET_SEM)
3751 srbm_soft_reset |= SOFT_RESET_SEM;
3752
3753 if (reset_mask & RADEON_RESET_IH)
3754 srbm_soft_reset |= SOFT_RESET_IH;
3755
3756 if (reset_mask & RADEON_RESET_GRBM)
3757 srbm_soft_reset |= SOFT_RESET_GRBM;
3758
3759 if (reset_mask & RADEON_RESET_VMC)
3760 srbm_soft_reset |= SOFT_RESET_VMC;
3761
24178ec4
AD
3762 if (!(rdev->flags & RADEON_IS_IGP)) {
3763 if (reset_mask & RADEON_RESET_MC)
3764 srbm_soft_reset |= SOFT_RESET_MC;
3765 }
a65a4369 3766
b7630473
AD
3767 if (grbm_soft_reset) {
3768 tmp = RREG32(GRBM_SOFT_RESET);
3769 tmp |= grbm_soft_reset;
3770 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
3771 WREG32(GRBM_SOFT_RESET, tmp);
3772 tmp = RREG32(GRBM_SOFT_RESET);
3773
3774 udelay(50);
3775
3776 tmp &= ~grbm_soft_reset;
3777 WREG32(GRBM_SOFT_RESET, tmp);
3778 tmp = RREG32(GRBM_SOFT_RESET);
3779 }
3780
3781 if (srbm_soft_reset) {
3782 tmp = RREG32(SRBM_SOFT_RESET);
3783 tmp |= srbm_soft_reset;
3784 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
3785 WREG32(SRBM_SOFT_RESET, tmp);
3786 tmp = RREG32(SRBM_SOFT_RESET);
3787
3788 udelay(50);
3789
3790 tmp &= ~srbm_soft_reset;
3791 WREG32(SRBM_SOFT_RESET, tmp);
3792 tmp = RREG32(SRBM_SOFT_RESET);
3793 }
0ecebb9e
AD
3794
3795 /* Wait a little for things to settle down */
3796 udelay(50);
3797
747943ea 3798 evergreen_mc_resume(rdev, &save);
b7630473
AD
3799 udelay(50);
3800
b7630473 3801 evergreen_print_gpu_status_regs(rdev);
bcc1c2a1
AD
3802}
3803
a2d07b74 3804int evergreen_asic_reset(struct radeon_device *rdev)
bcc1c2a1 3805{
a65a4369
AD
3806 u32 reset_mask;
3807
3808 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3809
3810 if (reset_mask)
3811 r600_set_bios_scratch_engine_hung(rdev, true);
3812
3813 evergreen_gpu_soft_reset(rdev, reset_mask);
3814
3815 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3816
3817 if (!reset_mask)
3818 r600_set_bios_scratch_engine_hung(rdev, false);
3819
3820 return 0;
747943ea
AD
3821}
3822
123bc183
AD
3823/**
3824 * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
3825 *
3826 * @rdev: radeon_device pointer
3827 * @ring: radeon_ring structure holding ring information
3828 *
3829 * Check if the GFX engine is locked up.
3830 * Returns true if the engine appears to be locked up, false if not.
3831 */
3832bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
3833{
3834 u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3835
3836 if (!(reset_mask & (RADEON_RESET_GFX |
3837 RADEON_RESET_COMPUTE |
3838 RADEON_RESET_CP))) {
3839 radeon_ring_lockup_update(ring);
3840 return false;
3841 }
3842 /* force CP activities */
3843 radeon_ring_force_activity(rdev, ring);
3844 return radeon_ring_test_lockup(rdev, ring);
3845}
3846
2948f5e6
AD
3847/*
3848 * RLC
3849 */
3850#define RLC_SAVE_RESTORE_LIST_END_MARKER 0x00000000
3851#define RLC_CLEAR_STATE_END_MARKER 0x00000001
3852
3853void sumo_rlc_fini(struct radeon_device *rdev)
3854{
3855 int r;
3856
3857 /* save restore block */
3858 if (rdev->rlc.save_restore_obj) {
3859 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
3860 if (unlikely(r != 0))
3861 dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r);
3862 radeon_bo_unpin(rdev->rlc.save_restore_obj);
3863 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
3864
3865 radeon_bo_unref(&rdev->rlc.save_restore_obj);
3866 rdev->rlc.save_restore_obj = NULL;
3867 }
3868
3869 /* clear state block */
3870 if (rdev->rlc.clear_state_obj) {
3871 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
3872 if (unlikely(r != 0))
3873 dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r);
3874 radeon_bo_unpin(rdev->rlc.clear_state_obj);
3875 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
3876
3877 radeon_bo_unref(&rdev->rlc.clear_state_obj);
3878 rdev->rlc.clear_state_obj = NULL;
3879 }
22c775ce
AD
3880
3881 /* clear state block */
3882 if (rdev->rlc.cp_table_obj) {
3883 r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
3884 if (unlikely(r != 0))
3885 dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
3886 radeon_bo_unpin(rdev->rlc.cp_table_obj);
3887 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
3888
3889 radeon_bo_unref(&rdev->rlc.cp_table_obj);
3890 rdev->rlc.cp_table_obj = NULL;
3891 }
2948f5e6
AD
3892}
3893
22c775ce
AD
3894#define CP_ME_TABLE_SIZE 96
3895
2948f5e6
AD
3896int sumo_rlc_init(struct radeon_device *rdev)
3897{
1fd11777 3898 const u32 *src_ptr;
2948f5e6
AD
3899 volatile u32 *dst_ptr;
3900 u32 dws, data, i, j, k, reg_num;
59a82d0e 3901 u32 reg_list_num, reg_list_hdr_blk_index, reg_list_blk_index = 0;
2948f5e6 3902 u64 reg_list_mc_addr;
1fd11777 3903 const struct cs_section_def *cs_data;
2948f5e6
AD
3904 int r;
3905
3906 src_ptr = rdev->rlc.reg_list;
3907 dws = rdev->rlc.reg_list_size;
a0f38609
AD
3908 if (rdev->family >= CHIP_BONAIRE) {
3909 dws += (5 * 16) + 48 + 48 + 64;
3910 }
2948f5e6
AD
3911 cs_data = rdev->rlc.cs_data;
3912
10b7ca7e
AD
3913 if (src_ptr) {
3914 /* save restore block */
3915 if (rdev->rlc.save_restore_obj == NULL) {
3916 r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
3917 RADEON_GEM_DOMAIN_VRAM, NULL, &rdev->rlc.save_restore_obj);
3918 if (r) {
3919 dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r);
3920 return r;
3921 }
3922 }
3923
3924 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
3925 if (unlikely(r != 0)) {
3926 sumo_rlc_fini(rdev);
3927 return r;
3928 }
3929 r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM,
3930 &rdev->rlc.save_restore_gpu_addr);
2948f5e6 3931 if (r) {
10b7ca7e
AD
3932 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
3933 dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r);
3934 sumo_rlc_fini(rdev);
2948f5e6
AD
3935 return r;
3936 }
2948f5e6 3937
10b7ca7e
AD
3938 r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void **)&rdev->rlc.sr_ptr);
3939 if (r) {
3940 dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r);
3941 sumo_rlc_fini(rdev);
3942 return r;
3943 }
3944 /* write the sr buffer */
3945 dst_ptr = rdev->rlc.sr_ptr;
1fd11777
AD
3946 if (rdev->family >= CHIP_TAHITI) {
3947 /* SI */
59a82d0e 3948 for (i = 0; i < rdev->rlc.reg_list_size; i++)
1fd11777
AD
3949 dst_ptr[i] = src_ptr[i];
3950 } else {
3951 /* ON/LN/TN */
3952 /* format:
3953 * dw0: (reg2 << 16) | reg1
3954 * dw1: reg1 save space
3955 * dw2: reg2 save space
3956 */
3957 for (i = 0; i < dws; i++) {
3958 data = src_ptr[i] >> 2;
3959 i++;
3960 if (i < dws)
3961 data |= (src_ptr[i] >> 2) << 16;
3962 j = (((i - 1) * 3) / 2);
3963 dst_ptr[j] = data;
3964 }
3965 j = ((i * 3) / 2);
3966 dst_ptr[j] = RLC_SAVE_RESTORE_LIST_END_MARKER;
10b7ca7e 3967 }
10b7ca7e 3968 radeon_bo_kunmap(rdev->rlc.save_restore_obj);
2948f5e6 3969 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
2948f5e6 3970 }
2948f5e6 3971
10b7ca7e
AD
3972 if (cs_data) {
3973 /* clear state block */
a0f38609
AD
3974 if (rdev->family >= CHIP_BONAIRE) {
3975 rdev->rlc.clear_state_size = dws = cik_get_csb_size(rdev);
3976 } else if (rdev->family >= CHIP_TAHITI) {
59a82d0e
AD
3977 rdev->rlc.clear_state_size = si_get_csb_size(rdev);
3978 dws = rdev->rlc.clear_state_size + (256 / 4);
3979 } else {
3980 reg_list_num = 0;
3981 dws = 0;
3982 for (i = 0; cs_data[i].section != NULL; i++) {
3983 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
3984 reg_list_num++;
3985 dws += cs_data[i].section[j].reg_count;
3986 }
10b7ca7e 3987 }
59a82d0e
AD
3988 reg_list_blk_index = (3 * reg_list_num + 2);
3989 dws += reg_list_blk_index;
3990 rdev->rlc.clear_state_size = dws;
2948f5e6 3991 }
10b7ca7e
AD
3992
3993 if (rdev->rlc.clear_state_obj == NULL) {
59a82d0e 3994 r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
10b7ca7e
AD
3995 RADEON_GEM_DOMAIN_VRAM, NULL, &rdev->rlc.clear_state_obj);
3996 if (r) {
3997 dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r);
3998 sumo_rlc_fini(rdev);
3999 return r;
4000 }
4001 }
4002 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4003 if (unlikely(r != 0)) {
4004 sumo_rlc_fini(rdev);
4005 return r;
4006 }
4007 r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM,
4008 &rdev->rlc.clear_state_gpu_addr);
2948f5e6 4009 if (r) {
10b7ca7e
AD
4010 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4011 dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r);
2948f5e6
AD
4012 sumo_rlc_fini(rdev);
4013 return r;
4014 }
2948f5e6 4015
10b7ca7e
AD
4016 r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void **)&rdev->rlc.cs_ptr);
4017 if (r) {
4018 dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r);
4019 sumo_rlc_fini(rdev);
4020 return r;
4021 }
4022 /* set up the cs buffer */
4023 dst_ptr = rdev->rlc.cs_ptr;
a0f38609
AD
4024 if (rdev->family >= CHIP_BONAIRE) {
4025 cik_get_csb_buffer(rdev, dst_ptr);
4026 } else if (rdev->family >= CHIP_TAHITI) {
59a82d0e
AD
4027 reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256;
4028 dst_ptr[0] = upper_32_bits(reg_list_mc_addr);
4029 dst_ptr[1] = lower_32_bits(reg_list_mc_addr);
4030 dst_ptr[2] = rdev->rlc.clear_state_size;
4031 si_get_csb_buffer(rdev, &dst_ptr[(256/4)]);
4032 } else {
4033 reg_list_hdr_blk_index = 0;
4034 reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4);
4035 data = upper_32_bits(reg_list_mc_addr);
4036 dst_ptr[reg_list_hdr_blk_index] = data;
4037 reg_list_hdr_blk_index++;
4038 for (i = 0; cs_data[i].section != NULL; i++) {
4039 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4040 reg_num = cs_data[i].section[j].reg_count;
4041 data = reg_list_mc_addr & 0xffffffff;
4042 dst_ptr[reg_list_hdr_blk_index] = data;
4043 reg_list_hdr_blk_index++;
4044
4045 data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff;
4046 dst_ptr[reg_list_hdr_blk_index] = data;
4047 reg_list_hdr_blk_index++;
4048
4049 data = 0x08000000 | (reg_num * 4);
4050 dst_ptr[reg_list_hdr_blk_index] = data;
4051 reg_list_hdr_blk_index++;
4052
4053 for (k = 0; k < reg_num; k++) {
4054 data = cs_data[i].section[j].extent[k];
4055 dst_ptr[reg_list_blk_index + k] = data;
4056 }
4057 reg_list_mc_addr += reg_num * 4;
4058 reg_list_blk_index += reg_num;
10b7ca7e 4059 }
2948f5e6 4060 }
59a82d0e 4061 dst_ptr[reg_list_hdr_blk_index] = RLC_CLEAR_STATE_END_MARKER;
2948f5e6 4062 }
10b7ca7e
AD
4063 radeon_bo_kunmap(rdev->rlc.clear_state_obj);
4064 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4065 }
2948f5e6 4066
22c775ce
AD
4067 if (rdev->rlc.cp_table_size) {
4068 if (rdev->rlc.cp_table_obj == NULL) {
4069 r = radeon_bo_create(rdev, rdev->rlc.cp_table_size, PAGE_SIZE, true,
4070 RADEON_GEM_DOMAIN_VRAM, NULL, &rdev->rlc.cp_table_obj);
4071 if (r) {
4072 dev_warn(rdev->dev, "(%d) create RLC cp table bo failed\n", r);
4073 sumo_rlc_fini(rdev);
4074 return r;
4075 }
4076 }
4077
4078 r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4079 if (unlikely(r != 0)) {
4080 dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4081 sumo_rlc_fini(rdev);
4082 return r;
4083 }
4084 r = radeon_bo_pin(rdev->rlc.cp_table_obj, RADEON_GEM_DOMAIN_VRAM,
4085 &rdev->rlc.cp_table_gpu_addr);
4086 if (r) {
4087 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4088 dev_warn(rdev->dev, "(%d) pin RLC cp_table bo failed\n", r);
4089 sumo_rlc_fini(rdev);
4090 return r;
4091 }
4092 r = radeon_bo_kmap(rdev->rlc.cp_table_obj, (void **)&rdev->rlc.cp_table_ptr);
4093 if (r) {
4094 dev_warn(rdev->dev, "(%d) map RLC cp table bo failed\n", r);
4095 sumo_rlc_fini(rdev);
4096 return r;
4097 }
4098
4099 cik_init_cp_pg_table(rdev);
4100
4101 radeon_bo_kunmap(rdev->rlc.cp_table_obj);
4102 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4103
4104 }
4105
2948f5e6
AD
4106 return 0;
4107}
4108
4109static void evergreen_rlc_start(struct radeon_device *rdev)
4110{
8ba10463
AD
4111 u32 mask = RLC_ENABLE;
4112
4113 if (rdev->flags & RADEON_IS_IGP) {
4114 mask |= GFX_POWER_GATING_ENABLE | GFX_POWER_GATING_SRC;
8ba10463
AD
4115 }
4116
4117 WREG32(RLC_CNTL, mask);
2948f5e6
AD
4118}
4119
4120int evergreen_rlc_resume(struct radeon_device *rdev)
4121{
4122 u32 i;
4123 const __be32 *fw_data;
4124
4125 if (!rdev->rlc_fw)
4126 return -EINVAL;
4127
4128 r600_rlc_stop(rdev);
4129
4130 WREG32(RLC_HB_CNTL, 0);
4131
4132 if (rdev->flags & RADEON_IS_IGP) {
8ba10463
AD
4133 if (rdev->family == CHIP_ARUBA) {
4134 u32 always_on_bitmap =
4135 3 | (3 << (16 * rdev->config.cayman.max_shader_engines));
4136 /* find out the number of active simds */
4137 u32 tmp = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
4138 tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
4139 tmp = hweight32(~tmp);
4140 if (tmp == rdev->config.cayman.max_simds_per_se) {
4141 WREG32(TN_RLC_LB_ALWAYS_ACTIVE_SIMD_MASK, always_on_bitmap);
4142 WREG32(TN_RLC_LB_PARAMS, 0x00601004);
4143 WREG32(TN_RLC_LB_INIT_SIMD_MASK, 0xffffffff);
4144 WREG32(TN_RLC_LB_CNTR_INIT, 0x00000000);
4145 WREG32(TN_RLC_LB_CNTR_MAX, 0x00002000);
4146 }
4147 } else {
4148 WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4149 WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4150 }
2948f5e6
AD
4151 WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
4152 WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
4153 } else {
4154 WREG32(RLC_HB_BASE, 0);
4155 WREG32(RLC_HB_RPTR, 0);
4156 WREG32(RLC_HB_WPTR, 0);
8ba10463
AD
4157 WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4158 WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
2948f5e6 4159 }
2948f5e6
AD
4160 WREG32(RLC_MC_CNTL, 0);
4161 WREG32(RLC_UCODE_CNTL, 0);
4162
4163 fw_data = (const __be32 *)rdev->rlc_fw->data;
4164 if (rdev->family >= CHIP_ARUBA) {
4165 for (i = 0; i < ARUBA_RLC_UCODE_SIZE; i++) {
4166 WREG32(RLC_UCODE_ADDR, i);
4167 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4168 }
4169 } else if (rdev->family >= CHIP_CAYMAN) {
4170 for (i = 0; i < CAYMAN_RLC_UCODE_SIZE; i++) {
4171 WREG32(RLC_UCODE_ADDR, i);
4172 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4173 }
4174 } else {
4175 for (i = 0; i < EVERGREEN_RLC_UCODE_SIZE; i++) {
4176 WREG32(RLC_UCODE_ADDR, i);
4177 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4178 }
4179 }
4180 WREG32(RLC_UCODE_ADDR, 0);
4181
4182 evergreen_rlc_start(rdev);
4183
4184 return 0;
4185}
4186
45f9a39b
AD
4187/* Interrupts */
4188
4189u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
4190{
46437057 4191 if (crtc >= rdev->num_crtc)
45f9a39b 4192 return 0;
46437057
AD
4193 else
4194 return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
45f9a39b
AD
4195}
4196
4197void evergreen_disable_interrupt_state(struct radeon_device *rdev)
4198{
4199 u32 tmp;
4200
1b37078b
AD
4201 if (rdev->family >= CHIP_CAYMAN) {
4202 cayman_cp_int_cntl_setup(rdev, 0,
4203 CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4204 cayman_cp_int_cntl_setup(rdev, 1, 0);
4205 cayman_cp_int_cntl_setup(rdev, 2, 0);
f60cbd11
AD
4206 tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4207 WREG32(CAYMAN_DMA1_CNTL, tmp);
1b37078b
AD
4208 } else
4209 WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
233d1ad5
AD
4210 tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4211 WREG32(DMA_CNTL, tmp);
45f9a39b
AD
4212 WREG32(GRBM_INT_CNTL, 0);
4213 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4214 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
b7eff394 4215 if (rdev->num_crtc >= 4) {
18007401
AD
4216 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4217 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
b7eff394
AD
4218 }
4219 if (rdev->num_crtc >= 6) {
18007401
AD
4220 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4221 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4222 }
45f9a39b
AD
4223
4224 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4225 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
b7eff394 4226 if (rdev->num_crtc >= 4) {
18007401
AD
4227 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4228 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
b7eff394
AD
4229 }
4230 if (rdev->num_crtc >= 6) {
18007401
AD
4231 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4232 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4233 }
45f9a39b 4234
05b3ef69
AD
4235 /* only one DAC on DCE6 */
4236 if (!ASIC_IS_DCE6(rdev))
4237 WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
45f9a39b
AD
4238 WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
4239
4240 tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4241 WREG32(DC_HPD1_INT_CONTROL, tmp);
4242 tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4243 WREG32(DC_HPD2_INT_CONTROL, tmp);
4244 tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4245 WREG32(DC_HPD3_INT_CONTROL, tmp);
4246 tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4247 WREG32(DC_HPD4_INT_CONTROL, tmp);
4248 tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4249 WREG32(DC_HPD5_INT_CONTROL, tmp);
4250 tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4251 WREG32(DC_HPD6_INT_CONTROL, tmp);
4252
4253}
4254
4255int evergreen_irq_set(struct radeon_device *rdev)
4256{
4257 u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
1b37078b 4258 u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
45f9a39b
AD
4259 u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
4260 u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
2031f77c 4261 u32 grbm_int_cntl = 0;
6f34be50 4262 u32 grph1 = 0, grph2 = 0, grph3 = 0, grph4 = 0, grph5 = 0, grph6 = 0;
f122c610 4263 u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
f60cbd11 4264 u32 dma_cntl, dma_cntl1 = 0;
dc50ba7f 4265 u32 thermal_int = 0;
45f9a39b
AD
4266
4267 if (!rdev->irq.installed) {
fce7d61b 4268 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
45f9a39b
AD
4269 return -EINVAL;
4270 }
4271 /* don't enable anything if the ih is disabled */
4272 if (!rdev->ih.enabled) {
4273 r600_disable_interrupts(rdev);
4274 /* force the active interrupt state to all disabled */
4275 evergreen_disable_interrupt_state(rdev);
4276 return 0;
4277 }
4278
4279 hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
4280 hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
4281 hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
4282 hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
4283 hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
4284 hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
d70229f7
AD
4285 if (rdev->family == CHIP_ARUBA)
4286 thermal_int = RREG32(TN_CG_THERMAL_INT_CTRL) &
4287 ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4288 else
4289 thermal_int = RREG32(CG_THERMAL_INT) &
4290 ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
45f9a39b 4291
f122c610
AD
4292 afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4293 afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4294 afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4295 afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4296 afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4297 afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4298
233d1ad5
AD
4299 dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4300
1b37078b
AD
4301 if (rdev->family >= CHIP_CAYMAN) {
4302 /* enable CP interrupts on all rings */
736fc37f 4303 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
1b37078b
AD
4304 DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4305 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4306 }
736fc37f 4307 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
1b37078b
AD
4308 DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
4309 cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
4310 }
736fc37f 4311 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
1b37078b
AD
4312 DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
4313 cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
4314 }
4315 } else {
736fc37f 4316 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
1b37078b
AD
4317 DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4318 cp_int_cntl |= RB_INT_ENABLE;
4319 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4320 }
45f9a39b 4321 }
1b37078b 4322
233d1ad5
AD
4323 if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
4324 DRM_DEBUG("r600_irq_set: sw int dma\n");
4325 dma_cntl |= TRAP_ENABLE;
4326 }
4327
f60cbd11
AD
4328 if (rdev->family >= CHIP_CAYMAN) {
4329 dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4330 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
4331 DRM_DEBUG("r600_irq_set: sw int dma1\n");
4332 dma_cntl1 |= TRAP_ENABLE;
4333 }
4334 }
4335
dc50ba7f
AD
4336 if (rdev->irq.dpm_thermal) {
4337 DRM_DEBUG("dpm thermal\n");
4338 thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
4339 }
4340
6f34be50 4341 if (rdev->irq.crtc_vblank_int[0] ||
736fc37f 4342 atomic_read(&rdev->irq.pflip[0])) {
45f9a39b
AD
4343 DRM_DEBUG("evergreen_irq_set: vblank 0\n");
4344 crtc1 |= VBLANK_INT_MASK;
4345 }
6f34be50 4346 if (rdev->irq.crtc_vblank_int[1] ||
736fc37f 4347 atomic_read(&rdev->irq.pflip[1])) {
45f9a39b
AD
4348 DRM_DEBUG("evergreen_irq_set: vblank 1\n");
4349 crtc2 |= VBLANK_INT_MASK;
4350 }
6f34be50 4351 if (rdev->irq.crtc_vblank_int[2] ||
736fc37f 4352 atomic_read(&rdev->irq.pflip[2])) {
45f9a39b
AD
4353 DRM_DEBUG("evergreen_irq_set: vblank 2\n");
4354 crtc3 |= VBLANK_INT_MASK;
4355 }
6f34be50 4356 if (rdev->irq.crtc_vblank_int[3] ||
736fc37f 4357 atomic_read(&rdev->irq.pflip[3])) {
45f9a39b
AD
4358 DRM_DEBUG("evergreen_irq_set: vblank 3\n");
4359 crtc4 |= VBLANK_INT_MASK;
4360 }
6f34be50 4361 if (rdev->irq.crtc_vblank_int[4] ||
736fc37f 4362 atomic_read(&rdev->irq.pflip[4])) {
45f9a39b
AD
4363 DRM_DEBUG("evergreen_irq_set: vblank 4\n");
4364 crtc5 |= VBLANK_INT_MASK;
4365 }
6f34be50 4366 if (rdev->irq.crtc_vblank_int[5] ||
736fc37f 4367 atomic_read(&rdev->irq.pflip[5])) {
45f9a39b
AD
4368 DRM_DEBUG("evergreen_irq_set: vblank 5\n");
4369 crtc6 |= VBLANK_INT_MASK;
4370 }
4371 if (rdev->irq.hpd[0]) {
4372 DRM_DEBUG("evergreen_irq_set: hpd 1\n");
4373 hpd1 |= DC_HPDx_INT_EN;
4374 }
4375 if (rdev->irq.hpd[1]) {
4376 DRM_DEBUG("evergreen_irq_set: hpd 2\n");
4377 hpd2 |= DC_HPDx_INT_EN;
4378 }
4379 if (rdev->irq.hpd[2]) {
4380 DRM_DEBUG("evergreen_irq_set: hpd 3\n");
4381 hpd3 |= DC_HPDx_INT_EN;
4382 }
4383 if (rdev->irq.hpd[3]) {
4384 DRM_DEBUG("evergreen_irq_set: hpd 4\n");
4385 hpd4 |= DC_HPDx_INT_EN;
4386 }
4387 if (rdev->irq.hpd[4]) {
4388 DRM_DEBUG("evergreen_irq_set: hpd 5\n");
4389 hpd5 |= DC_HPDx_INT_EN;
4390 }
4391 if (rdev->irq.hpd[5]) {
4392 DRM_DEBUG("evergreen_irq_set: hpd 6\n");
4393 hpd6 |= DC_HPDx_INT_EN;
4394 }
f122c610
AD
4395 if (rdev->irq.afmt[0]) {
4396 DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
4397 afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4398 }
4399 if (rdev->irq.afmt[1]) {
4400 DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
4401 afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4402 }
4403 if (rdev->irq.afmt[2]) {
4404 DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
4405 afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4406 }
4407 if (rdev->irq.afmt[3]) {
4408 DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
4409 afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4410 }
4411 if (rdev->irq.afmt[4]) {
4412 DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
4413 afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4414 }
4415 if (rdev->irq.afmt[5]) {
4416 DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
4417 afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4418 }
45f9a39b 4419
1b37078b
AD
4420 if (rdev->family >= CHIP_CAYMAN) {
4421 cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
4422 cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
4423 cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
4424 } else
4425 WREG32(CP_INT_CNTL, cp_int_cntl);
233d1ad5
AD
4426
4427 WREG32(DMA_CNTL, dma_cntl);
4428
f60cbd11
AD
4429 if (rdev->family >= CHIP_CAYMAN)
4430 WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
4431
2031f77c 4432 WREG32(GRBM_INT_CNTL, grbm_int_cntl);
45f9a39b
AD
4433
4434 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
4435 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
b7eff394 4436 if (rdev->num_crtc >= 4) {
18007401
AD
4437 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
4438 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
b7eff394
AD
4439 }
4440 if (rdev->num_crtc >= 6) {
18007401
AD
4441 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
4442 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
4443 }
45f9a39b 4444
6f34be50
AD
4445 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, grph1);
4446 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, grph2);
b7eff394
AD
4447 if (rdev->num_crtc >= 4) {
4448 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, grph3);
4449 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, grph4);
4450 }
4451 if (rdev->num_crtc >= 6) {
4452 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, grph5);
4453 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, grph6);
4454 }
6f34be50 4455
45f9a39b
AD
4456 WREG32(DC_HPD1_INT_CONTROL, hpd1);
4457 WREG32(DC_HPD2_INT_CONTROL, hpd2);
4458 WREG32(DC_HPD3_INT_CONTROL, hpd3);
4459 WREG32(DC_HPD4_INT_CONTROL, hpd4);
4460 WREG32(DC_HPD5_INT_CONTROL, hpd5);
4461 WREG32(DC_HPD6_INT_CONTROL, hpd6);
d70229f7
AD
4462 if (rdev->family == CHIP_ARUBA)
4463 WREG32(TN_CG_THERMAL_INT_CTRL, thermal_int);
4464 else
4465 WREG32(CG_THERMAL_INT, thermal_int);
45f9a39b 4466
f122c610
AD
4467 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
4468 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
4469 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
4470 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
4471 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
4472 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
4473
bcc1c2a1
AD
4474 return 0;
4475}
4476
cbdd4501 4477static void evergreen_irq_ack(struct radeon_device *rdev)
45f9a39b
AD
4478{
4479 u32 tmp;
4480
6f34be50
AD
4481 rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4482 rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4483 rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4484 rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4485 rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4486 rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4487 rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4488 rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
b7eff394
AD
4489 if (rdev->num_crtc >= 4) {
4490 rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4491 rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4492 }
4493 if (rdev->num_crtc >= 6) {
4494 rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4495 rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4496 }
6f34be50 4497
f122c610
AD
4498 rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4499 rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4500 rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4501 rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4502 rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4503 rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4504
6f34be50
AD
4505 if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4506 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4507 if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4508 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
6f34be50 4509 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
45f9a39b 4510 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
6f34be50 4511 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
45f9a39b 4512 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
6f34be50 4513 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
45f9a39b 4514 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
6f34be50 4515 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
45f9a39b
AD
4516 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4517
b7eff394
AD
4518 if (rdev->num_crtc >= 4) {
4519 if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4520 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4521 if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4522 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4523 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4524 WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4525 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4526 WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4527 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4528 WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4529 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4530 WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4531 }
4532
4533 if (rdev->num_crtc >= 6) {
4534 if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4535 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4536 if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4537 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4538 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4539 WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4540 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4541 WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4542 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4543 WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4544 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4545 WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4546 }
45f9a39b 4547
6f34be50 4548 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
45f9a39b
AD
4549 tmp = RREG32(DC_HPD1_INT_CONTROL);
4550 tmp |= DC_HPDx_INT_ACK;
4551 WREG32(DC_HPD1_INT_CONTROL, tmp);
4552 }
6f34be50 4553 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
45f9a39b
AD
4554 tmp = RREG32(DC_HPD2_INT_CONTROL);
4555 tmp |= DC_HPDx_INT_ACK;
4556 WREG32(DC_HPD2_INT_CONTROL, tmp);
4557 }
6f34be50 4558 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
45f9a39b
AD
4559 tmp = RREG32(DC_HPD3_INT_CONTROL);
4560 tmp |= DC_HPDx_INT_ACK;
4561 WREG32(DC_HPD3_INT_CONTROL, tmp);
4562 }
6f34be50 4563 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
45f9a39b
AD
4564 tmp = RREG32(DC_HPD4_INT_CONTROL);
4565 tmp |= DC_HPDx_INT_ACK;
4566 WREG32(DC_HPD4_INT_CONTROL, tmp);
4567 }
6f34be50 4568 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
45f9a39b
AD
4569 tmp = RREG32(DC_HPD5_INT_CONTROL);
4570 tmp |= DC_HPDx_INT_ACK;
4571 WREG32(DC_HPD5_INT_CONTROL, tmp);
4572 }
6f34be50 4573 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
45f9a39b
AD
4574 tmp = RREG32(DC_HPD5_INT_CONTROL);
4575 tmp |= DC_HPDx_INT_ACK;
4576 WREG32(DC_HPD6_INT_CONTROL, tmp);
4577 }
f122c610
AD
4578 if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4579 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4580 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4581 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4582 }
4583 if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4584 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4585 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4586 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4587 }
4588 if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4589 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4590 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4591 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4592 }
4593 if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4594 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4595 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4596 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4597 }
4598 if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4599 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4600 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4601 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4602 }
4603 if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4604 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
4605 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4606 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
4607 }
45f9a39b
AD
4608}
4609
1109ca09 4610static void evergreen_irq_disable(struct radeon_device *rdev)
45f9a39b 4611{
45f9a39b
AD
4612 r600_disable_interrupts(rdev);
4613 /* Wait and acknowledge irq */
4614 mdelay(1);
6f34be50 4615 evergreen_irq_ack(rdev);
45f9a39b
AD
4616 evergreen_disable_interrupt_state(rdev);
4617}
4618
755d819e 4619void evergreen_irq_suspend(struct radeon_device *rdev)
45f9a39b
AD
4620{
4621 evergreen_irq_disable(rdev);
4622 r600_rlc_stop(rdev);
4623}
4624
cbdd4501 4625static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
45f9a39b
AD
4626{
4627 u32 wptr, tmp;
4628
724c80e1 4629 if (rdev->wb.enabled)
204ae24d 4630 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
724c80e1
AD
4631 else
4632 wptr = RREG32(IH_RB_WPTR);
45f9a39b
AD
4633
4634 if (wptr & RB_OVERFLOW) {
4635 /* When a ring buffer overflow happen start parsing interrupt
4636 * from the last not overwritten vector (wptr + 16). Hopefully
4637 * this should allow us to catchup.
4638 */
4639 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
4640 wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
4641 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
4642 tmp = RREG32(IH_RB_CNTL);
4643 tmp |= IH_WPTR_OVERFLOW_CLEAR;
4644 WREG32(IH_RB_CNTL, tmp);
4645 }
4646 return (wptr & rdev->ih.ptr_mask);
4647}
4648
4649int evergreen_irq_process(struct radeon_device *rdev)
4650{
682f1a54
DA
4651 u32 wptr;
4652 u32 rptr;
45f9a39b
AD
4653 u32 src_id, src_data;
4654 u32 ring_index;
45f9a39b 4655 bool queue_hotplug = false;
f122c610 4656 bool queue_hdmi = false;
dc50ba7f 4657 bool queue_thermal = false;
54e2e49c 4658 u32 status, addr;
45f9a39b 4659
682f1a54 4660 if (!rdev->ih.enabled || rdev->shutdown)
45f9a39b
AD
4661 return IRQ_NONE;
4662
682f1a54 4663 wptr = evergreen_get_ih_wptr(rdev);
c20dc369
CK
4664
4665restart_ih:
4666 /* is somebody else already processing irqs? */
4667 if (atomic_xchg(&rdev->ih.lock, 1))
4668 return IRQ_NONE;
4669
682f1a54
DA
4670 rptr = rdev->ih.rptr;
4671 DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
45f9a39b 4672
964f6645
BH
4673 /* Order reading of wptr vs. reading of IH ring data */
4674 rmb();
4675
45f9a39b 4676 /* display interrupts */
6f34be50 4677 evergreen_irq_ack(rdev);
45f9a39b 4678
45f9a39b
AD
4679 while (rptr != wptr) {
4680 /* wptr/rptr are in bytes! */
4681 ring_index = rptr / 4;
0f234f5f
AD
4682 src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
4683 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
45f9a39b
AD
4684
4685 switch (src_id) {
4686 case 1: /* D1 vblank/vline */
4687 switch (src_data) {
4688 case 0: /* D1 vblank */
6f34be50 4689 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) {
6f34be50
AD
4690 if (rdev->irq.crtc_vblank_int[0]) {
4691 drm_handle_vblank(rdev->ddev, 0);
4692 rdev->pm.vblank_sync = true;
4693 wake_up(&rdev->irq.vblank_queue);
4694 }
736fc37f 4695 if (atomic_read(&rdev->irq.pflip[0]))
3e4ea742 4696 radeon_crtc_handle_flip(rdev, 0);
6f34be50 4697 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
45f9a39b
AD
4698 DRM_DEBUG("IH: D1 vblank\n");
4699 }
4700 break;
4701 case 1: /* D1 vline */
6f34be50
AD
4702 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) {
4703 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
45f9a39b
AD
4704 DRM_DEBUG("IH: D1 vline\n");
4705 }
4706 break;
4707 default:
4708 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4709 break;
4710 }
4711 break;
4712 case 2: /* D2 vblank/vline */
4713 switch (src_data) {
4714 case 0: /* D2 vblank */
6f34be50 4715 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
6f34be50
AD
4716 if (rdev->irq.crtc_vblank_int[1]) {
4717 drm_handle_vblank(rdev->ddev, 1);
4718 rdev->pm.vblank_sync = true;
4719 wake_up(&rdev->irq.vblank_queue);
4720 }
736fc37f 4721 if (atomic_read(&rdev->irq.pflip[1]))
3e4ea742 4722 radeon_crtc_handle_flip(rdev, 1);
6f34be50 4723 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
45f9a39b
AD
4724 DRM_DEBUG("IH: D2 vblank\n");
4725 }
4726 break;
4727 case 1: /* D2 vline */
6f34be50
AD
4728 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) {
4729 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
45f9a39b
AD
4730 DRM_DEBUG("IH: D2 vline\n");
4731 }
4732 break;
4733 default:
4734 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4735 break;
4736 }
4737 break;
4738 case 3: /* D3 vblank/vline */
4739 switch (src_data) {
4740 case 0: /* D3 vblank */
6f34be50
AD
4741 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
4742 if (rdev->irq.crtc_vblank_int[2]) {
4743 drm_handle_vblank(rdev->ddev, 2);
4744 rdev->pm.vblank_sync = true;
4745 wake_up(&rdev->irq.vblank_queue);
4746 }
736fc37f 4747 if (atomic_read(&rdev->irq.pflip[2]))
6f34be50
AD
4748 radeon_crtc_handle_flip(rdev, 2);
4749 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
45f9a39b
AD
4750 DRM_DEBUG("IH: D3 vblank\n");
4751 }
4752 break;
4753 case 1: /* D3 vline */
6f34be50
AD
4754 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
4755 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
45f9a39b
AD
4756 DRM_DEBUG("IH: D3 vline\n");
4757 }
4758 break;
4759 default:
4760 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4761 break;
4762 }
4763 break;
4764 case 4: /* D4 vblank/vline */
4765 switch (src_data) {
4766 case 0: /* D4 vblank */
6f34be50
AD
4767 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
4768 if (rdev->irq.crtc_vblank_int[3]) {
4769 drm_handle_vblank(rdev->ddev, 3);
4770 rdev->pm.vblank_sync = true;
4771 wake_up(&rdev->irq.vblank_queue);
4772 }
736fc37f 4773 if (atomic_read(&rdev->irq.pflip[3]))
6f34be50
AD
4774 radeon_crtc_handle_flip(rdev, 3);
4775 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
45f9a39b
AD
4776 DRM_DEBUG("IH: D4 vblank\n");
4777 }
4778 break;
4779 case 1: /* D4 vline */
6f34be50
AD
4780 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
4781 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
45f9a39b
AD
4782 DRM_DEBUG("IH: D4 vline\n");
4783 }
4784 break;
4785 default:
4786 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4787 break;
4788 }
4789 break;
4790 case 5: /* D5 vblank/vline */
4791 switch (src_data) {
4792 case 0: /* D5 vblank */
6f34be50
AD
4793 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
4794 if (rdev->irq.crtc_vblank_int[4]) {
4795 drm_handle_vblank(rdev->ddev, 4);
4796 rdev->pm.vblank_sync = true;
4797 wake_up(&rdev->irq.vblank_queue);
4798 }
736fc37f 4799 if (atomic_read(&rdev->irq.pflip[4]))
6f34be50
AD
4800 radeon_crtc_handle_flip(rdev, 4);
4801 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
45f9a39b
AD
4802 DRM_DEBUG("IH: D5 vblank\n");
4803 }
4804 break;
4805 case 1: /* D5 vline */
6f34be50
AD
4806 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
4807 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
45f9a39b
AD
4808 DRM_DEBUG("IH: D5 vline\n");
4809 }
4810 break;
4811 default:
4812 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4813 break;
4814 }
4815 break;
4816 case 6: /* D6 vblank/vline */
4817 switch (src_data) {
4818 case 0: /* D6 vblank */
6f34be50
AD
4819 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
4820 if (rdev->irq.crtc_vblank_int[5]) {
4821 drm_handle_vblank(rdev->ddev, 5);
4822 rdev->pm.vblank_sync = true;
4823 wake_up(&rdev->irq.vblank_queue);
4824 }
736fc37f 4825 if (atomic_read(&rdev->irq.pflip[5]))
6f34be50
AD
4826 radeon_crtc_handle_flip(rdev, 5);
4827 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
45f9a39b
AD
4828 DRM_DEBUG("IH: D6 vblank\n");
4829 }
4830 break;
4831 case 1: /* D6 vline */
6f34be50
AD
4832 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
4833 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
45f9a39b
AD
4834 DRM_DEBUG("IH: D6 vline\n");
4835 }
4836 break;
4837 default:
4838 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4839 break;
4840 }
4841 break;
4842 case 42: /* HPD hotplug */
4843 switch (src_data) {
4844 case 0:
6f34be50
AD
4845 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4846 rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
45f9a39b
AD
4847 queue_hotplug = true;
4848 DRM_DEBUG("IH: HPD1\n");
4849 }
4850 break;
4851 case 1:
6f34be50
AD
4852 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4853 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
45f9a39b
AD
4854 queue_hotplug = true;
4855 DRM_DEBUG("IH: HPD2\n");
4856 }
4857 break;
4858 case 2:
6f34be50
AD
4859 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4860 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
45f9a39b
AD
4861 queue_hotplug = true;
4862 DRM_DEBUG("IH: HPD3\n");
4863 }
4864 break;
4865 case 3:
6f34be50
AD
4866 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4867 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
45f9a39b
AD
4868 queue_hotplug = true;
4869 DRM_DEBUG("IH: HPD4\n");
4870 }
4871 break;
4872 case 4:
6f34be50
AD
4873 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4874 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
45f9a39b
AD
4875 queue_hotplug = true;
4876 DRM_DEBUG("IH: HPD5\n");
4877 }
4878 break;
4879 case 5:
6f34be50
AD
4880 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4881 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
45f9a39b
AD
4882 queue_hotplug = true;
4883 DRM_DEBUG("IH: HPD6\n");
4884 }
4885 break;
4886 default:
4887 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4888 break;
4889 }
4890 break;
f122c610
AD
4891 case 44: /* hdmi */
4892 switch (src_data) {
4893 case 0:
4894 if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4895 rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
4896 queue_hdmi = true;
4897 DRM_DEBUG("IH: HDMI0\n");
4898 }
4899 break;
4900 case 1:
4901 if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4902 rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
4903 queue_hdmi = true;
4904 DRM_DEBUG("IH: HDMI1\n");
4905 }
4906 break;
4907 case 2:
4908 if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4909 rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
4910 queue_hdmi = true;
4911 DRM_DEBUG("IH: HDMI2\n");
4912 }
4913 break;
4914 case 3:
4915 if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4916 rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
4917 queue_hdmi = true;
4918 DRM_DEBUG("IH: HDMI3\n");
4919 }
4920 break;
4921 case 4:
4922 if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4923 rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
4924 queue_hdmi = true;
4925 DRM_DEBUG("IH: HDMI4\n");
4926 }
4927 break;
4928 case 5:
4929 if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4930 rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
4931 queue_hdmi = true;
4932 DRM_DEBUG("IH: HDMI5\n");
4933 }
4934 break;
4935 default:
4936 DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
4937 break;
4938 }
f2ba57b5
CK
4939 case 124: /* UVD */
4940 DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
4941 radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
f122c610 4942 break;
ae133a11
CK
4943 case 146:
4944 case 147:
54e2e49c
AD
4945 addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
4946 status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
ae133a11
CK
4947 dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
4948 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n",
54e2e49c 4949 addr);
ae133a11 4950 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
54e2e49c
AD
4951 status);
4952 cayman_vm_decode_fault(rdev, status, addr);
ae133a11
CK
4953 /* reset addr and status */
4954 WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
4955 break;
45f9a39b
AD
4956 case 176: /* CP_INT in ring buffer */
4957 case 177: /* CP_INT in IB1 */
4958 case 178: /* CP_INT in IB2 */
4959 DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
7465280c 4960 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
45f9a39b
AD
4961 break;
4962 case 181: /* CP EOP event */
4963 DRM_DEBUG("IH: CP EOP\n");
1b37078b
AD
4964 if (rdev->family >= CHIP_CAYMAN) {
4965 switch (src_data) {
4966 case 0:
4967 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4968 break;
4969 case 1:
4970 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
4971 break;
4972 case 2:
4973 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
4974 break;
4975 }
4976 } else
4977 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
45f9a39b 4978 break;
233d1ad5
AD
4979 case 224: /* DMA trap event */
4980 DRM_DEBUG("IH: DMA trap\n");
4981 radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
4982 break;
dc50ba7f
AD
4983 case 230: /* thermal low to high */
4984 DRM_DEBUG("IH: thermal low to high\n");
4985 rdev->pm.dpm.thermal.high_to_low = false;
4986 queue_thermal = true;
4987 break;
4988 case 231: /* thermal high to low */
4989 DRM_DEBUG("IH: thermal high to low\n");
4990 rdev->pm.dpm.thermal.high_to_low = true;
4991 queue_thermal = true;
4992 break;
2031f77c 4993 case 233: /* GUI IDLE */
303c805c 4994 DRM_DEBUG("IH: GUI idle\n");
2031f77c 4995 break;
f60cbd11
AD
4996 case 244: /* DMA trap event */
4997 if (rdev->family >= CHIP_CAYMAN) {
4998 DRM_DEBUG("IH: DMA1 trap\n");
4999 radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
5000 }
5001 break;
45f9a39b
AD
5002 default:
5003 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5004 break;
5005 }
5006
5007 /* wptr/rptr are in bytes! */
5008 rptr += 16;
5009 rptr &= rdev->ih.ptr_mask;
5010 }
45f9a39b 5011 if (queue_hotplug)
32c87fca 5012 schedule_work(&rdev->hotplug_work);
f122c610
AD
5013 if (queue_hdmi)
5014 schedule_work(&rdev->audio_work);
dc50ba7f
AD
5015 if (queue_thermal && rdev->pm.dpm_enabled)
5016 schedule_work(&rdev->pm.dpm.thermal.work);
45f9a39b
AD
5017 rdev->ih.rptr = rptr;
5018 WREG32(IH_RB_RPTR, rdev->ih.rptr);
c20dc369
CK
5019 atomic_set(&rdev->ih.lock, 0);
5020
5021 /* make sure wptr hasn't changed while processing */
5022 wptr = evergreen_get_ih_wptr(rdev);
5023 if (wptr != rptr)
5024 goto restart_ih;
5025
45f9a39b
AD
5026 return IRQ_HANDLED;
5027}
5028
bcc1c2a1
AD
5029static int evergreen_startup(struct radeon_device *rdev)
5030{
f2ba57b5 5031 struct radeon_ring *ring;
bcc1c2a1
AD
5032 int r;
5033
9e46a48d 5034 /* enable pcie gen2 link */
cd54033a 5035 evergreen_pcie_gen2_enable(rdev);
f52382d7
AD
5036 /* enable aspm */
5037 evergreen_program_aspm(rdev);
9e46a48d 5038
6fab3feb
AD
5039 evergreen_mc_program(rdev);
5040
0af62b01
AD
5041 if (ASIC_IS_DCE5(rdev)) {
5042 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
5043 r = ni_init_microcode(rdev);
5044 if (r) {
5045 DRM_ERROR("Failed to load firmware!\n");
5046 return r;
5047 }
5048 }
755d819e 5049 r = ni_mc_load_microcode(rdev);
bcc1c2a1 5050 if (r) {
0af62b01 5051 DRM_ERROR("Failed to load MC firmware!\n");
bcc1c2a1
AD
5052 return r;
5053 }
0af62b01
AD
5054 } else {
5055 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
5056 r = r600_init_microcode(rdev);
5057 if (r) {
5058 DRM_ERROR("Failed to load firmware!\n");
5059 return r;
5060 }
5061 }
bcc1c2a1 5062 }
fe251e2f 5063
16cdf04d
AD
5064 r = r600_vram_scratch_init(rdev);
5065 if (r)
5066 return r;
5067
bcc1c2a1 5068 if (rdev->flags & RADEON_IS_AGP) {
0fcdb61e 5069 evergreen_agp_enable(rdev);
bcc1c2a1
AD
5070 } else {
5071 r = evergreen_pcie_gart_enable(rdev);
5072 if (r)
5073 return r;
5074 }
bcc1c2a1 5075 evergreen_gpu_init(rdev);
bcc1c2a1 5076
2948f5e6
AD
5077 /* allocate rlc buffers */
5078 if (rdev->flags & RADEON_IS_IGP) {
5079 rdev->rlc.reg_list = sumo_rlc_save_restore_register_list;
1fd11777
AD
5080 rdev->rlc.reg_list_size =
5081 (u32)ARRAY_SIZE(sumo_rlc_save_restore_register_list);
2948f5e6
AD
5082 rdev->rlc.cs_data = evergreen_cs_data;
5083 r = sumo_rlc_init(rdev);
5084 if (r) {
5085 DRM_ERROR("Failed to init rlc BOs!\n");
5086 return r;
5087 }
5088 }
5089
724c80e1
AD
5090 /* allocate wb buffer */
5091 r = radeon_wb_init(rdev);
5092 if (r)
5093 return r;
5094
30eb77f4
JG
5095 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
5096 if (r) {
5097 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
5098 return r;
5099 }
5100
233d1ad5
AD
5101 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
5102 if (r) {
5103 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
5104 return r;
5105 }
5106
e409b128 5107 r = uvd_v2_2_resume(rdev);
f2ba57b5
CK
5108 if (!r) {
5109 r = radeon_fence_driver_start_ring(rdev,
5110 R600_RING_TYPE_UVD_INDEX);
5111 if (r)
5112 dev_err(rdev->dev, "UVD fences init error (%d).\n", r);
5113 }
5114
5115 if (r)
5116 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
5117
bcc1c2a1 5118 /* Enable IRQ */
e49f3959
AH
5119 if (!rdev->irq.installed) {
5120 r = radeon_irq_kms_init(rdev);
5121 if (r)
5122 return r;
5123 }
5124
bcc1c2a1
AD
5125 r = r600_irq_init(rdev);
5126 if (r) {
5127 DRM_ERROR("radeon: IH init failed (%d).\n", r);
5128 radeon_irq_kms_fini(rdev);
5129 return r;
5130 }
45f9a39b 5131 evergreen_irq_set(rdev);
bcc1c2a1 5132
f2ba57b5 5133 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
e32eb50d 5134 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
78c5560a 5135 R600_CP_RB_RPTR, R600_CP_RB_WPTR,
2e1e6dad 5136 RADEON_CP_PACKET2);
bcc1c2a1
AD
5137 if (r)
5138 return r;
233d1ad5
AD
5139
5140 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
5141 r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
5142 DMA_RB_RPTR, DMA_RB_WPTR,
2e1e6dad 5143 DMA_PACKET(DMA_PACKET_NOP, 0, 0));
233d1ad5
AD
5144 if (r)
5145 return r;
5146
bcc1c2a1
AD
5147 r = evergreen_cp_load_microcode(rdev);
5148 if (r)
5149 return r;
fe251e2f 5150 r = evergreen_cp_resume(rdev);
233d1ad5
AD
5151 if (r)
5152 return r;
5153 r = r600_dma_resume(rdev);
bcc1c2a1
AD
5154 if (r)
5155 return r;
fe251e2f 5156
f2ba57b5
CK
5157 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
5158 if (ring->ring_size) {
02c9f7fa 5159 r = radeon_ring_init(rdev, ring, ring->ring_size, 0,
f2ba57b5 5160 UVD_RBC_RB_RPTR, UVD_RBC_RB_WPTR,
2e1e6dad 5161 RADEON_CP_PACKET2);
f2ba57b5 5162 if (!r)
e409b128 5163 r = uvd_v1_0_init(rdev);
f2ba57b5
CK
5164
5165 if (r)
5166 DRM_ERROR("radeon: error initializing UVD (%d).\n", r);
5167 }
5168
2898c348
CK
5169 r = radeon_ib_pool_init(rdev);
5170 if (r) {
5171 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
b15ba512 5172 return r;
2898c348 5173 }
b15ba512 5174
69d2ae57
RM
5175 r = r600_audio_init(rdev);
5176 if (r) {
5177 DRM_ERROR("radeon: audio init failed\n");
b15ba512
JG
5178 return r;
5179 }
5180
bcc1c2a1
AD
5181 return 0;
5182}
5183
5184int evergreen_resume(struct radeon_device *rdev)
5185{
5186 int r;
5187
86f5c9ed
AD
5188 /* reset the asic, the gfx blocks are often in a bad state
5189 * after the driver is unloaded or after a resume
5190 */
5191 if (radeon_asic_reset(rdev))
5192 dev_warn(rdev->dev, "GPU reset failed !\n");
bcc1c2a1
AD
5193 /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
5194 * posting will perform necessary task to bring back GPU into good
5195 * shape.
5196 */
5197 /* post card */
5198 atom_asic_init(rdev->mode_info.atom_context);
bcc1c2a1 5199
d4788db3
AD
5200 /* init golden registers */
5201 evergreen_init_golden_registers(rdev);
5202
b15ba512 5203 rdev->accel_working = true;
bcc1c2a1
AD
5204 r = evergreen_startup(rdev);
5205 if (r) {
755d819e 5206 DRM_ERROR("evergreen startup failed on resume\n");
6b7746e8 5207 rdev->accel_working = false;
bcc1c2a1
AD
5208 return r;
5209 }
fe251e2f 5210
bcc1c2a1
AD
5211 return r;
5212
5213}
5214
5215int evergreen_suspend(struct radeon_device *rdev)
5216{
69d2ae57 5217 r600_audio_fini(rdev);
e409b128 5218 uvd_v1_0_fini(rdev);
f2ba57b5 5219 radeon_uvd_suspend(rdev);
bcc1c2a1 5220 r700_cp_stop(rdev);
233d1ad5 5221 r600_dma_stop(rdev);
45f9a39b 5222 evergreen_irq_suspend(rdev);
724c80e1 5223 radeon_wb_disable(rdev);
bcc1c2a1 5224 evergreen_pcie_gart_disable(rdev);
d7ccd8fc
AD
5225
5226 return 0;
5227}
5228
bcc1c2a1
AD
5229/* Plan is to move initialization in that function and use
5230 * helper function so that radeon_device_init pretty much
5231 * do nothing more than calling asic specific function. This
5232 * should also allow to remove a bunch of callback function
5233 * like vram_info.
5234 */
5235int evergreen_init(struct radeon_device *rdev)
5236{
5237 int r;
5238
bcc1c2a1
AD
5239 /* Read BIOS */
5240 if (!radeon_get_bios(rdev)) {
5241 if (ASIC_IS_AVIVO(rdev))
5242 return -EINVAL;
5243 }
5244 /* Must be an ATOMBIOS */
5245 if (!rdev->is_atom_bios) {
755d819e 5246 dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
bcc1c2a1
AD
5247 return -EINVAL;
5248 }
5249 r = radeon_atombios_init(rdev);
5250 if (r)
5251 return r;
86f5c9ed
AD
5252 /* reset the asic, the gfx blocks are often in a bad state
5253 * after the driver is unloaded or after a resume
5254 */
5255 if (radeon_asic_reset(rdev))
5256 dev_warn(rdev->dev, "GPU reset failed !\n");
bcc1c2a1 5257 /* Post card if necessary */
fd909c37 5258 if (!radeon_card_posted(rdev)) {
bcc1c2a1
AD
5259 if (!rdev->bios) {
5260 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
5261 return -EINVAL;
5262 }
5263 DRM_INFO("GPU not posted. posting now...\n");
5264 atom_asic_init(rdev->mode_info.atom_context);
5265 }
d4788db3
AD
5266 /* init golden registers */
5267 evergreen_init_golden_registers(rdev);
bcc1c2a1
AD
5268 /* Initialize scratch registers */
5269 r600_scratch_init(rdev);
5270 /* Initialize surface registers */
5271 radeon_surface_init(rdev);
5272 /* Initialize clocks */
5273 radeon_get_clock_info(rdev->ddev);
bcc1c2a1
AD
5274 /* Fence driver */
5275 r = radeon_fence_driver_init(rdev);
5276 if (r)
5277 return r;
d594e46a
JG
5278 /* initialize AGP */
5279 if (rdev->flags & RADEON_IS_AGP) {
5280 r = radeon_agp_init(rdev);
5281 if (r)
5282 radeon_agp_disable(rdev);
5283 }
5284 /* initialize memory controller */
bcc1c2a1
AD
5285 r = evergreen_mc_init(rdev);
5286 if (r)
5287 return r;
5288 /* Memory manager */
5289 r = radeon_bo_init(rdev);
5290 if (r)
5291 return r;
45f9a39b 5292
e32eb50d
CK
5293 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
5294 r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
bcc1c2a1 5295
233d1ad5
AD
5296 rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
5297 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
5298
f2ba57b5
CK
5299 r = radeon_uvd_init(rdev);
5300 if (!r) {
5301 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
5302 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX],
5303 4096);
5304 }
5305
bcc1c2a1
AD
5306 rdev->ih.ring_obj = NULL;
5307 r600_ih_ring_init(rdev, 64 * 1024);
5308
5309 r = r600_pcie_gart_init(rdev);
5310 if (r)
5311 return r;
0fcdb61e 5312
148a03bc 5313 rdev->accel_working = true;
bcc1c2a1
AD
5314 r = evergreen_startup(rdev);
5315 if (r) {
fe251e2f
AD
5316 dev_err(rdev->dev, "disabling GPU acceleration\n");
5317 r700_cp_fini(rdev);
233d1ad5 5318 r600_dma_fini(rdev);
fe251e2f 5319 r600_irq_fini(rdev);
2948f5e6
AD
5320 if (rdev->flags & RADEON_IS_IGP)
5321 sumo_rlc_fini(rdev);
724c80e1 5322 radeon_wb_fini(rdev);
2898c348 5323 radeon_ib_pool_fini(rdev);
fe251e2f 5324 radeon_irq_kms_fini(rdev);
0fcdb61e 5325 evergreen_pcie_gart_fini(rdev);
bcc1c2a1
AD
5326 rdev->accel_working = false;
5327 }
77e00f2e
AD
5328
5329 /* Don't start up if the MC ucode is missing on BTC parts.
5330 * The default clocks and voltages before the MC ucode
5331 * is loaded are not suffient for advanced operations.
5332 */
5333 if (ASIC_IS_DCE5(rdev)) {
5334 if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
5335 DRM_ERROR("radeon: MC ucode required for NI+.\n");
5336 return -EINVAL;
5337 }
5338 }
5339
bcc1c2a1
AD
5340 return 0;
5341}
5342
5343void evergreen_fini(struct radeon_device *rdev)
5344{
69d2ae57 5345 r600_audio_fini(rdev);
45f9a39b 5346 r700_cp_fini(rdev);
233d1ad5 5347 r600_dma_fini(rdev);
bcc1c2a1 5348 r600_irq_fini(rdev);
2948f5e6
AD
5349 if (rdev->flags & RADEON_IS_IGP)
5350 sumo_rlc_fini(rdev);
724c80e1 5351 radeon_wb_fini(rdev);
2898c348 5352 radeon_ib_pool_fini(rdev);
bcc1c2a1 5353 radeon_irq_kms_fini(rdev);
bcc1c2a1 5354 evergreen_pcie_gart_fini(rdev);
e409b128 5355 uvd_v1_0_fini(rdev);
f2ba57b5 5356 radeon_uvd_fini(rdev);
16cdf04d 5357 r600_vram_scratch_fini(rdev);
bcc1c2a1
AD
5358 radeon_gem_fini(rdev);
5359 radeon_fence_driver_fini(rdev);
bcc1c2a1
AD
5360 radeon_agp_fini(rdev);
5361 radeon_bo_fini(rdev);
5362 radeon_atombios_fini(rdev);
5363 kfree(rdev->bios);
5364 rdev->bios = NULL;
bcc1c2a1 5365}
9e46a48d 5366
b07759bf 5367void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
9e46a48d 5368{
7e0e4196 5369 u32 link_width_cntl, speed_cntl;
9e46a48d 5370
d42dd579
AD
5371 if (radeon_pcie_gen2 == 0)
5372 return;
5373
9e46a48d
AD
5374 if (rdev->flags & RADEON_IS_IGP)
5375 return;
5376
5377 if (!(rdev->flags & RADEON_IS_PCIE))
5378 return;
5379
5380 /* x2 cards have a special sequence */
5381 if (ASIC_IS_X2(rdev))
5382 return;
5383
7e0e4196
KSS
5384 if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
5385 (rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))
197bbb3d
DA
5386 return;
5387
492d2b61 5388 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
3691feea
AD
5389 if (speed_cntl & LC_CURRENT_DATA_RATE) {
5390 DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5391 return;
5392 }
5393
197bbb3d
DA
5394 DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5395
9e46a48d
AD
5396 if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5397 (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5398
492d2b61 5399 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
9e46a48d 5400 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
492d2b61 5401 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
9e46a48d 5402
492d2b61 5403 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
9e46a48d 5404 speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
492d2b61 5405 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
9e46a48d 5406
492d2b61 5407 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
9e46a48d 5408 speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
492d2b61 5409 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
9e46a48d 5410
492d2b61 5411 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
9e46a48d 5412 speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
492d2b61 5413 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
9e46a48d 5414
492d2b61 5415 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
9e46a48d 5416 speed_cntl |= LC_GEN2_EN_STRAP;
492d2b61 5417 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
9e46a48d
AD
5418
5419 } else {
492d2b61 5420 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
9e46a48d
AD
5421 /* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5422 if (1)
5423 link_width_cntl |= LC_UPCONFIGURE_DIS;
5424 else
5425 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
492d2b61 5426 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
9e46a48d
AD
5427 }
5428}
f52382d7
AD
5429
5430void evergreen_program_aspm(struct radeon_device *rdev)
5431{
5432 u32 data, orig;
5433 u32 pcie_lc_cntl, pcie_lc_cntl_old;
5434 bool disable_l0s, disable_l1 = false, disable_plloff_in_l1 = false;
5435 /* fusion_platform = true
5436 * if the system is a fusion system
5437 * (APU or DGPU in a fusion system).
5438 * todo: check if the system is a fusion platform.
5439 */
5440 bool fusion_platform = false;
5441
1294d4a3
AD
5442 if (radeon_aspm == 0)
5443 return;
5444
f52382d7
AD
5445 if (!(rdev->flags & RADEON_IS_PCIE))
5446 return;
5447
5448 switch (rdev->family) {
5449 case CHIP_CYPRESS:
5450 case CHIP_HEMLOCK:
5451 case CHIP_JUNIPER:
5452 case CHIP_REDWOOD:
5453 case CHIP_CEDAR:
5454 case CHIP_SUMO:
5455 case CHIP_SUMO2:
5456 case CHIP_PALM:
5457 case CHIP_ARUBA:
5458 disable_l0s = true;
5459 break;
5460 default:
5461 disable_l0s = false;
5462 break;
5463 }
5464
5465 if (rdev->flags & RADEON_IS_IGP)
5466 fusion_platform = true; /* XXX also dGPUs in a fusion system */
5467
5468 data = orig = RREG32_PIF_PHY0(PB0_PIF_PAIRING);
5469 if (fusion_platform)
5470 data &= ~MULTI_PIF;
5471 else
5472 data |= MULTI_PIF;
5473 if (data != orig)
5474 WREG32_PIF_PHY0(PB0_PIF_PAIRING, data);
5475
5476 data = orig = RREG32_PIF_PHY1(PB1_PIF_PAIRING);
5477 if (fusion_platform)
5478 data &= ~MULTI_PIF;
5479 else
5480 data |= MULTI_PIF;
5481 if (data != orig)
5482 WREG32_PIF_PHY1(PB1_PIF_PAIRING, data);
5483
5484 pcie_lc_cntl = pcie_lc_cntl_old = RREG32_PCIE_PORT(PCIE_LC_CNTL);
5485 pcie_lc_cntl &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
5486 if (!disable_l0s) {
5487 if (rdev->family >= CHIP_BARTS)
5488 pcie_lc_cntl |= LC_L0S_INACTIVITY(7);
5489 else
5490 pcie_lc_cntl |= LC_L0S_INACTIVITY(3);
5491 }
5492
5493 if (!disable_l1) {
5494 if (rdev->family >= CHIP_BARTS)
5495 pcie_lc_cntl |= LC_L1_INACTIVITY(7);
5496 else
5497 pcie_lc_cntl |= LC_L1_INACTIVITY(8);
5498
5499 if (!disable_plloff_in_l1) {
5500 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5501 data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5502 data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5503 if (data != orig)
5504 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5505
5506 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5507 data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5508 data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5509 if (data != orig)
5510 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5511
5512 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5513 data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5514 data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5515 if (data != orig)
5516 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5517
5518 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5519 data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5520 data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5521 if (data != orig)
5522 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5523
5524 if (rdev->family >= CHIP_BARTS) {
5525 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5526 data &= ~PLL_RAMP_UP_TIME_0_MASK;
5527 data |= PLL_RAMP_UP_TIME_0(4);
5528 if (data != orig)
5529 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5530
5531 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5532 data &= ~PLL_RAMP_UP_TIME_1_MASK;
5533 data |= PLL_RAMP_UP_TIME_1(4);
5534 if (data != orig)
5535 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5536
5537 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5538 data &= ~PLL_RAMP_UP_TIME_0_MASK;
5539 data |= PLL_RAMP_UP_TIME_0(4);
5540 if (data != orig)
5541 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5542
5543 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5544 data &= ~PLL_RAMP_UP_TIME_1_MASK;
5545 data |= PLL_RAMP_UP_TIME_1(4);
5546 if (data != orig)
5547 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5548 }
5549
5550 data = orig = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5551 data &= ~LC_DYN_LANES_PWR_STATE_MASK;
5552 data |= LC_DYN_LANES_PWR_STATE(3);
5553 if (data != orig)
5554 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
5555
5556 if (rdev->family >= CHIP_BARTS) {
5557 data = orig = RREG32_PIF_PHY0(PB0_PIF_CNTL);
5558 data &= ~LS2_EXIT_TIME_MASK;
5559 data |= LS2_EXIT_TIME(1);
5560 if (data != orig)
5561 WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
5562
5563 data = orig = RREG32_PIF_PHY1(PB1_PIF_CNTL);
5564 data &= ~LS2_EXIT_TIME_MASK;
5565 data |= LS2_EXIT_TIME(1);
5566 if (data != orig)
5567 WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
5568 }
5569 }
5570 }
5571
5572 /* evergreen parts only */
5573 if (rdev->family < CHIP_BARTS)
5574 pcie_lc_cntl |= LC_PMI_TO_L1_DIS;
5575
5576 if (pcie_lc_cntl != pcie_lc_cntl_old)
5577 WREG32_PCIE_PORT(PCIE_LC_CNTL, pcie_lc_cntl);
5578}