2 * Copyright 2010 Advanced Micro Devices, Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
22 * Authors: Alex Deucher
24 #include <linux/firmware.h>
25 #include <linux/platform_device.h>
26 #include <linux/slab.h>
29 #include "radeon_asic.h"
30 #include <drm/radeon_drm.h>
31 #include "evergreend.h"
34 #include "evergreen_reg.h"
35 #include "evergreen_blit_shaders.h"
36 #include "radeon_ucode.h"
38 static const u32 crtc_offsets[6] =
40 EVERGREEN_CRTC0_REGISTER_OFFSET,
41 EVERGREEN_CRTC1_REGISTER_OFFSET,
42 EVERGREEN_CRTC2_REGISTER_OFFSET,
43 EVERGREEN_CRTC3_REGISTER_OFFSET,
44 EVERGREEN_CRTC4_REGISTER_OFFSET,
45 EVERGREEN_CRTC5_REGISTER_OFFSET
48 #include "clearstate_evergreen.h"
50 static const u32 sumo_rlc_save_restore_register_list[] =
135 static void evergreen_gpu_init(struct radeon_device *rdev);
136 void evergreen_fini(struct radeon_device *rdev);
137 void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
138 void evergreen_program_aspm(struct radeon_device *rdev);
139 extern void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
140 int ring, u32 cp_int_cntl);
141 extern void cayman_vm_decode_fault(struct radeon_device *rdev,
142 u32 status, u32 addr);
143 void cik_init_cp_pg_table(struct radeon_device *rdev);
145 extern u32 si_get_csb_size(struct radeon_device *rdev);
146 extern void si_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
148 static const u32 evergreen_golden_registers[] =
150 0x3f90, 0xffff0000, 0xff000000,
151 0x9148, 0xffff0000, 0xff000000,
152 0x3f94, 0xffff0000, 0xff000000,
153 0x914c, 0xffff0000, 0xff000000,
154 0x9b7c, 0xffffffff, 0x00000000,
155 0x8a14, 0xffffffff, 0x00000007,
156 0x8b10, 0xffffffff, 0x00000000,
157 0x960c, 0xffffffff, 0x54763210,
158 0x88c4, 0xffffffff, 0x000000c2,
159 0x88d4, 0xffffffff, 0x00000010,
160 0x8974, 0xffffffff, 0x00000000,
161 0xc78, 0x00000080, 0x00000080,
162 0x5eb4, 0xffffffff, 0x00000002,
163 0x5e78, 0xffffffff, 0x001000f0,
164 0x6104, 0x01000300, 0x00000000,
165 0x5bc0, 0x00300000, 0x00000000,
166 0x7030, 0xffffffff, 0x00000011,
167 0x7c30, 0xffffffff, 0x00000011,
168 0x10830, 0xffffffff, 0x00000011,
169 0x11430, 0xffffffff, 0x00000011,
170 0x12030, 0xffffffff, 0x00000011,
171 0x12c30, 0xffffffff, 0x00000011,
172 0xd02c, 0xffffffff, 0x08421000,
173 0x240c, 0xffffffff, 0x00000380,
174 0x8b24, 0xffffffff, 0x00ff0fff,
175 0x28a4c, 0x06000000, 0x06000000,
176 0x10c, 0x00000001, 0x00000001,
177 0x8d00, 0xffffffff, 0x100e4848,
178 0x8d04, 0xffffffff, 0x00164745,
179 0x8c00, 0xffffffff, 0xe4000003,
180 0x8c04, 0xffffffff, 0x40600060,
181 0x8c08, 0xffffffff, 0x001c001c,
182 0x8cf0, 0xffffffff, 0x08e00620,
183 0x8c20, 0xffffffff, 0x00800080,
184 0x8c24, 0xffffffff, 0x00800080,
185 0x8c18, 0xffffffff, 0x20202078,
186 0x8c1c, 0xffffffff, 0x00001010,
187 0x28350, 0xffffffff, 0x00000000,
188 0xa008, 0xffffffff, 0x00010000,
189 0x5cc, 0xffffffff, 0x00000001,
190 0x9508, 0xffffffff, 0x00000002,
191 0x913c, 0x0000000f, 0x0000000a
194 static const u32 evergreen_golden_registers2[] =
196 0x2f4c, 0xffffffff, 0x00000000,
197 0x54f4, 0xffffffff, 0x00000000,
198 0x54f0, 0xffffffff, 0x00000000,
199 0x5498, 0xffffffff, 0x00000000,
200 0x549c, 0xffffffff, 0x00000000,
201 0x5494, 0xffffffff, 0x00000000,
202 0x53cc, 0xffffffff, 0x00000000,
203 0x53c8, 0xffffffff, 0x00000000,
204 0x53c4, 0xffffffff, 0x00000000,
205 0x53c0, 0xffffffff, 0x00000000,
206 0x53bc, 0xffffffff, 0x00000000,
207 0x53b8, 0xffffffff, 0x00000000,
208 0x53b4, 0xffffffff, 0x00000000,
209 0x53b0, 0xffffffff, 0x00000000
212 static const u32 cypress_mgcg_init[] =
214 0x802c, 0xffffffff, 0xc0000000,
215 0x5448, 0xffffffff, 0x00000100,
216 0x55e4, 0xffffffff, 0x00000100,
217 0x160c, 0xffffffff, 0x00000100,
218 0x5644, 0xffffffff, 0x00000100,
219 0xc164, 0xffffffff, 0x00000100,
220 0x8a18, 0xffffffff, 0x00000100,
221 0x897c, 0xffffffff, 0x06000100,
222 0x8b28, 0xffffffff, 0x00000100,
223 0x9144, 0xffffffff, 0x00000100,
224 0x9a60, 0xffffffff, 0x00000100,
225 0x9868, 0xffffffff, 0x00000100,
226 0x8d58, 0xffffffff, 0x00000100,
227 0x9510, 0xffffffff, 0x00000100,
228 0x949c, 0xffffffff, 0x00000100,
229 0x9654, 0xffffffff, 0x00000100,
230 0x9030, 0xffffffff, 0x00000100,
231 0x9034, 0xffffffff, 0x00000100,
232 0x9038, 0xffffffff, 0x00000100,
233 0x903c, 0xffffffff, 0x00000100,
234 0x9040, 0xffffffff, 0x00000100,
235 0xa200, 0xffffffff, 0x00000100,
236 0xa204, 0xffffffff, 0x00000100,
237 0xa208, 0xffffffff, 0x00000100,
238 0xa20c, 0xffffffff, 0x00000100,
239 0x971c, 0xffffffff, 0x00000100,
240 0x977c, 0xffffffff, 0x00000100,
241 0x3f80, 0xffffffff, 0x00000100,
242 0xa210, 0xffffffff, 0x00000100,
243 0xa214, 0xffffffff, 0x00000100,
244 0x4d8, 0xffffffff, 0x00000100,
245 0x9784, 0xffffffff, 0x00000100,
246 0x9698, 0xffffffff, 0x00000100,
247 0x4d4, 0xffffffff, 0x00000200,
248 0x30cc, 0xffffffff, 0x00000100,
249 0xd0c0, 0xffffffff, 0xff000100,
250 0x802c, 0xffffffff, 0x40000000,
251 0x915c, 0xffffffff, 0x00010000,
252 0x9160, 0xffffffff, 0x00030002,
253 0x9178, 0xffffffff, 0x00070000,
254 0x917c, 0xffffffff, 0x00030002,
255 0x9180, 0xffffffff, 0x00050004,
256 0x918c, 0xffffffff, 0x00010006,
257 0x9190, 0xffffffff, 0x00090008,
258 0x9194, 0xffffffff, 0x00070000,
259 0x9198, 0xffffffff, 0x00030002,
260 0x919c, 0xffffffff, 0x00050004,
261 0x91a8, 0xffffffff, 0x00010006,
262 0x91ac, 0xffffffff, 0x00090008,
263 0x91b0, 0xffffffff, 0x00070000,
264 0x91b4, 0xffffffff, 0x00030002,
265 0x91b8, 0xffffffff, 0x00050004,
266 0x91c4, 0xffffffff, 0x00010006,
267 0x91c8, 0xffffffff, 0x00090008,
268 0x91cc, 0xffffffff, 0x00070000,
269 0x91d0, 0xffffffff, 0x00030002,
270 0x91d4, 0xffffffff, 0x00050004,
271 0x91e0, 0xffffffff, 0x00010006,
272 0x91e4, 0xffffffff, 0x00090008,
273 0x91e8, 0xffffffff, 0x00000000,
274 0x91ec, 0xffffffff, 0x00070000,
275 0x91f0, 0xffffffff, 0x00030002,
276 0x91f4, 0xffffffff, 0x00050004,
277 0x9200, 0xffffffff, 0x00010006,
278 0x9204, 0xffffffff, 0x00090008,
279 0x9208, 0xffffffff, 0x00070000,
280 0x920c, 0xffffffff, 0x00030002,
281 0x9210, 0xffffffff, 0x00050004,
282 0x921c, 0xffffffff, 0x00010006,
283 0x9220, 0xffffffff, 0x00090008,
284 0x9224, 0xffffffff, 0x00070000,
285 0x9228, 0xffffffff, 0x00030002,
286 0x922c, 0xffffffff, 0x00050004,
287 0x9238, 0xffffffff, 0x00010006,
288 0x923c, 0xffffffff, 0x00090008,
289 0x9240, 0xffffffff, 0x00070000,
290 0x9244, 0xffffffff, 0x00030002,
291 0x9248, 0xffffffff, 0x00050004,
292 0x9254, 0xffffffff, 0x00010006,
293 0x9258, 0xffffffff, 0x00090008,
294 0x925c, 0xffffffff, 0x00070000,
295 0x9260, 0xffffffff, 0x00030002,
296 0x9264, 0xffffffff, 0x00050004,
297 0x9270, 0xffffffff, 0x00010006,
298 0x9274, 0xffffffff, 0x00090008,
299 0x9278, 0xffffffff, 0x00070000,
300 0x927c, 0xffffffff, 0x00030002,
301 0x9280, 0xffffffff, 0x00050004,
302 0x928c, 0xffffffff, 0x00010006,
303 0x9290, 0xffffffff, 0x00090008,
304 0x9294, 0xffffffff, 0x00000000,
305 0x929c, 0xffffffff, 0x00000001,
306 0x802c, 0xffffffff, 0x40010000,
307 0x915c, 0xffffffff, 0x00010000,
308 0x9160, 0xffffffff, 0x00030002,
309 0x9178, 0xffffffff, 0x00070000,
310 0x917c, 0xffffffff, 0x00030002,
311 0x9180, 0xffffffff, 0x00050004,
312 0x918c, 0xffffffff, 0x00010006,
313 0x9190, 0xffffffff, 0x00090008,
314 0x9194, 0xffffffff, 0x00070000,
315 0x9198, 0xffffffff, 0x00030002,
316 0x919c, 0xffffffff, 0x00050004,
317 0x91a8, 0xffffffff, 0x00010006,
318 0x91ac, 0xffffffff, 0x00090008,
319 0x91b0, 0xffffffff, 0x00070000,
320 0x91b4, 0xffffffff, 0x00030002,
321 0x91b8, 0xffffffff, 0x00050004,
322 0x91c4, 0xffffffff, 0x00010006,
323 0x91c8, 0xffffffff, 0x00090008,
324 0x91cc, 0xffffffff, 0x00070000,
325 0x91d0, 0xffffffff, 0x00030002,
326 0x91d4, 0xffffffff, 0x00050004,
327 0x91e0, 0xffffffff, 0x00010006,
328 0x91e4, 0xffffffff, 0x00090008,
329 0x91e8, 0xffffffff, 0x00000000,
330 0x91ec, 0xffffffff, 0x00070000,
331 0x91f0, 0xffffffff, 0x00030002,
332 0x91f4, 0xffffffff, 0x00050004,
333 0x9200, 0xffffffff, 0x00010006,
334 0x9204, 0xffffffff, 0x00090008,
335 0x9208, 0xffffffff, 0x00070000,
336 0x920c, 0xffffffff, 0x00030002,
337 0x9210, 0xffffffff, 0x00050004,
338 0x921c, 0xffffffff, 0x00010006,
339 0x9220, 0xffffffff, 0x00090008,
340 0x9224, 0xffffffff, 0x00070000,
341 0x9228, 0xffffffff, 0x00030002,
342 0x922c, 0xffffffff, 0x00050004,
343 0x9238, 0xffffffff, 0x00010006,
344 0x923c, 0xffffffff, 0x00090008,
345 0x9240, 0xffffffff, 0x00070000,
346 0x9244, 0xffffffff, 0x00030002,
347 0x9248, 0xffffffff, 0x00050004,
348 0x9254, 0xffffffff, 0x00010006,
349 0x9258, 0xffffffff, 0x00090008,
350 0x925c, 0xffffffff, 0x00070000,
351 0x9260, 0xffffffff, 0x00030002,
352 0x9264, 0xffffffff, 0x00050004,
353 0x9270, 0xffffffff, 0x00010006,
354 0x9274, 0xffffffff, 0x00090008,
355 0x9278, 0xffffffff, 0x00070000,
356 0x927c, 0xffffffff, 0x00030002,
357 0x9280, 0xffffffff, 0x00050004,
358 0x928c, 0xffffffff, 0x00010006,
359 0x9290, 0xffffffff, 0x00090008,
360 0x9294, 0xffffffff, 0x00000000,
361 0x929c, 0xffffffff, 0x00000001,
362 0x802c, 0xffffffff, 0xc0000000
365 static const u32 redwood_mgcg_init[] =
367 0x802c, 0xffffffff, 0xc0000000,
368 0x5448, 0xffffffff, 0x00000100,
369 0x55e4, 0xffffffff, 0x00000100,
370 0x160c, 0xffffffff, 0x00000100,
371 0x5644, 0xffffffff, 0x00000100,
372 0xc164, 0xffffffff, 0x00000100,
373 0x8a18, 0xffffffff, 0x00000100,
374 0x897c, 0xffffffff, 0x06000100,
375 0x8b28, 0xffffffff, 0x00000100,
376 0x9144, 0xffffffff, 0x00000100,
377 0x9a60, 0xffffffff, 0x00000100,
378 0x9868, 0xffffffff, 0x00000100,
379 0x8d58, 0xffffffff, 0x00000100,
380 0x9510, 0xffffffff, 0x00000100,
381 0x949c, 0xffffffff, 0x00000100,
382 0x9654, 0xffffffff, 0x00000100,
383 0x9030, 0xffffffff, 0x00000100,
384 0x9034, 0xffffffff, 0x00000100,
385 0x9038, 0xffffffff, 0x00000100,
386 0x903c, 0xffffffff, 0x00000100,
387 0x9040, 0xffffffff, 0x00000100,
388 0xa200, 0xffffffff, 0x00000100,
389 0xa204, 0xffffffff, 0x00000100,
390 0xa208, 0xffffffff, 0x00000100,
391 0xa20c, 0xffffffff, 0x00000100,
392 0x971c, 0xffffffff, 0x00000100,
393 0x977c, 0xffffffff, 0x00000100,
394 0x3f80, 0xffffffff, 0x00000100,
395 0xa210, 0xffffffff, 0x00000100,
396 0xa214, 0xffffffff, 0x00000100,
397 0x4d8, 0xffffffff, 0x00000100,
398 0x9784, 0xffffffff, 0x00000100,
399 0x9698, 0xffffffff, 0x00000100,
400 0x4d4, 0xffffffff, 0x00000200,
401 0x30cc, 0xffffffff, 0x00000100,
402 0xd0c0, 0xffffffff, 0xff000100,
403 0x802c, 0xffffffff, 0x40000000,
404 0x915c, 0xffffffff, 0x00010000,
405 0x9160, 0xffffffff, 0x00030002,
406 0x9178, 0xffffffff, 0x00070000,
407 0x917c, 0xffffffff, 0x00030002,
408 0x9180, 0xffffffff, 0x00050004,
409 0x918c, 0xffffffff, 0x00010006,
410 0x9190, 0xffffffff, 0x00090008,
411 0x9194, 0xffffffff, 0x00070000,
412 0x9198, 0xffffffff, 0x00030002,
413 0x919c, 0xffffffff, 0x00050004,
414 0x91a8, 0xffffffff, 0x00010006,
415 0x91ac, 0xffffffff, 0x00090008,
416 0x91b0, 0xffffffff, 0x00070000,
417 0x91b4, 0xffffffff, 0x00030002,
418 0x91b8, 0xffffffff, 0x00050004,
419 0x91c4, 0xffffffff, 0x00010006,
420 0x91c8, 0xffffffff, 0x00090008,
421 0x91cc, 0xffffffff, 0x00070000,
422 0x91d0, 0xffffffff, 0x00030002,
423 0x91d4, 0xffffffff, 0x00050004,
424 0x91e0, 0xffffffff, 0x00010006,
425 0x91e4, 0xffffffff, 0x00090008,
426 0x91e8, 0xffffffff, 0x00000000,
427 0x91ec, 0xffffffff, 0x00070000,
428 0x91f0, 0xffffffff, 0x00030002,
429 0x91f4, 0xffffffff, 0x00050004,
430 0x9200, 0xffffffff, 0x00010006,
431 0x9204, 0xffffffff, 0x00090008,
432 0x9294, 0xffffffff, 0x00000000,
433 0x929c, 0xffffffff, 0x00000001,
434 0x802c, 0xffffffff, 0xc0000000
437 static const u32 cedar_golden_registers[] =
439 0x3f90, 0xffff0000, 0xff000000,
440 0x9148, 0xffff0000, 0xff000000,
441 0x3f94, 0xffff0000, 0xff000000,
442 0x914c, 0xffff0000, 0xff000000,
443 0x9b7c, 0xffffffff, 0x00000000,
444 0x8a14, 0xffffffff, 0x00000007,
445 0x8b10, 0xffffffff, 0x00000000,
446 0x960c, 0xffffffff, 0x54763210,
447 0x88c4, 0xffffffff, 0x000000c2,
448 0x88d4, 0xffffffff, 0x00000000,
449 0x8974, 0xffffffff, 0x00000000,
450 0xc78, 0x00000080, 0x00000080,
451 0x5eb4, 0xffffffff, 0x00000002,
452 0x5e78, 0xffffffff, 0x001000f0,
453 0x6104, 0x01000300, 0x00000000,
454 0x5bc0, 0x00300000, 0x00000000,
455 0x7030, 0xffffffff, 0x00000011,
456 0x7c30, 0xffffffff, 0x00000011,
457 0x10830, 0xffffffff, 0x00000011,
458 0x11430, 0xffffffff, 0x00000011,
459 0xd02c, 0xffffffff, 0x08421000,
460 0x240c, 0xffffffff, 0x00000380,
461 0x8b24, 0xffffffff, 0x00ff0fff,
462 0x28a4c, 0x06000000, 0x06000000,
463 0x10c, 0x00000001, 0x00000001,
464 0x8d00, 0xffffffff, 0x100e4848,
465 0x8d04, 0xffffffff, 0x00164745,
466 0x8c00, 0xffffffff, 0xe4000003,
467 0x8c04, 0xffffffff, 0x40600060,
468 0x8c08, 0xffffffff, 0x001c001c,
469 0x8cf0, 0xffffffff, 0x08e00410,
470 0x8c20, 0xffffffff, 0x00800080,
471 0x8c24, 0xffffffff, 0x00800080,
472 0x8c18, 0xffffffff, 0x20202078,
473 0x8c1c, 0xffffffff, 0x00001010,
474 0x28350, 0xffffffff, 0x00000000,
475 0xa008, 0xffffffff, 0x00010000,
476 0x5cc, 0xffffffff, 0x00000001,
477 0x9508, 0xffffffff, 0x00000002
480 static const u32 cedar_mgcg_init[] =
482 0x802c, 0xffffffff, 0xc0000000,
483 0x5448, 0xffffffff, 0x00000100,
484 0x55e4, 0xffffffff, 0x00000100,
485 0x160c, 0xffffffff, 0x00000100,
486 0x5644, 0xffffffff, 0x00000100,
487 0xc164, 0xffffffff, 0x00000100,
488 0x8a18, 0xffffffff, 0x00000100,
489 0x897c, 0xffffffff, 0x06000100,
490 0x8b28, 0xffffffff, 0x00000100,
491 0x9144, 0xffffffff, 0x00000100,
492 0x9a60, 0xffffffff, 0x00000100,
493 0x9868, 0xffffffff, 0x00000100,
494 0x8d58, 0xffffffff, 0x00000100,
495 0x9510, 0xffffffff, 0x00000100,
496 0x949c, 0xffffffff, 0x00000100,
497 0x9654, 0xffffffff, 0x00000100,
498 0x9030, 0xffffffff, 0x00000100,
499 0x9034, 0xffffffff, 0x00000100,
500 0x9038, 0xffffffff, 0x00000100,
501 0x903c, 0xffffffff, 0x00000100,
502 0x9040, 0xffffffff, 0x00000100,
503 0xa200, 0xffffffff, 0x00000100,
504 0xa204, 0xffffffff, 0x00000100,
505 0xa208, 0xffffffff, 0x00000100,
506 0xa20c, 0xffffffff, 0x00000100,
507 0x971c, 0xffffffff, 0x00000100,
508 0x977c, 0xffffffff, 0x00000100,
509 0x3f80, 0xffffffff, 0x00000100,
510 0xa210, 0xffffffff, 0x00000100,
511 0xa214, 0xffffffff, 0x00000100,
512 0x4d8, 0xffffffff, 0x00000100,
513 0x9784, 0xffffffff, 0x00000100,
514 0x9698, 0xffffffff, 0x00000100,
515 0x4d4, 0xffffffff, 0x00000200,
516 0x30cc, 0xffffffff, 0x00000100,
517 0xd0c0, 0xffffffff, 0xff000100,
518 0x802c, 0xffffffff, 0x40000000,
519 0x915c, 0xffffffff, 0x00010000,
520 0x9178, 0xffffffff, 0x00050000,
521 0x917c, 0xffffffff, 0x00030002,
522 0x918c, 0xffffffff, 0x00010004,
523 0x9190, 0xffffffff, 0x00070006,
524 0x9194, 0xffffffff, 0x00050000,
525 0x9198, 0xffffffff, 0x00030002,
526 0x91a8, 0xffffffff, 0x00010004,
527 0x91ac, 0xffffffff, 0x00070006,
528 0x91e8, 0xffffffff, 0x00000000,
529 0x9294, 0xffffffff, 0x00000000,
530 0x929c, 0xffffffff, 0x00000001,
531 0x802c, 0xffffffff, 0xc0000000
534 static const u32 juniper_mgcg_init[] =
536 0x802c, 0xffffffff, 0xc0000000,
537 0x5448, 0xffffffff, 0x00000100,
538 0x55e4, 0xffffffff, 0x00000100,
539 0x160c, 0xffffffff, 0x00000100,
540 0x5644, 0xffffffff, 0x00000100,
541 0xc164, 0xffffffff, 0x00000100,
542 0x8a18, 0xffffffff, 0x00000100,
543 0x897c, 0xffffffff, 0x06000100,
544 0x8b28, 0xffffffff, 0x00000100,
545 0x9144, 0xffffffff, 0x00000100,
546 0x9a60, 0xffffffff, 0x00000100,
547 0x9868, 0xffffffff, 0x00000100,
548 0x8d58, 0xffffffff, 0x00000100,
549 0x9510, 0xffffffff, 0x00000100,
550 0x949c, 0xffffffff, 0x00000100,
551 0x9654, 0xffffffff, 0x00000100,
552 0x9030, 0xffffffff, 0x00000100,
553 0x9034, 0xffffffff, 0x00000100,
554 0x9038, 0xffffffff, 0x00000100,
555 0x903c, 0xffffffff, 0x00000100,
556 0x9040, 0xffffffff, 0x00000100,
557 0xa200, 0xffffffff, 0x00000100,
558 0xa204, 0xffffffff, 0x00000100,
559 0xa208, 0xffffffff, 0x00000100,
560 0xa20c, 0xffffffff, 0x00000100,
561 0x971c, 0xffffffff, 0x00000100,
562 0xd0c0, 0xffffffff, 0xff000100,
563 0x802c, 0xffffffff, 0x40000000,
564 0x915c, 0xffffffff, 0x00010000,
565 0x9160, 0xffffffff, 0x00030002,
566 0x9178, 0xffffffff, 0x00070000,
567 0x917c, 0xffffffff, 0x00030002,
568 0x9180, 0xffffffff, 0x00050004,
569 0x918c, 0xffffffff, 0x00010006,
570 0x9190, 0xffffffff, 0x00090008,
571 0x9194, 0xffffffff, 0x00070000,
572 0x9198, 0xffffffff, 0x00030002,
573 0x919c, 0xffffffff, 0x00050004,
574 0x91a8, 0xffffffff, 0x00010006,
575 0x91ac, 0xffffffff, 0x00090008,
576 0x91b0, 0xffffffff, 0x00070000,
577 0x91b4, 0xffffffff, 0x00030002,
578 0x91b8, 0xffffffff, 0x00050004,
579 0x91c4, 0xffffffff, 0x00010006,
580 0x91c8, 0xffffffff, 0x00090008,
581 0x91cc, 0xffffffff, 0x00070000,
582 0x91d0, 0xffffffff, 0x00030002,
583 0x91d4, 0xffffffff, 0x00050004,
584 0x91e0, 0xffffffff, 0x00010006,
585 0x91e4, 0xffffffff, 0x00090008,
586 0x91e8, 0xffffffff, 0x00000000,
587 0x91ec, 0xffffffff, 0x00070000,
588 0x91f0, 0xffffffff, 0x00030002,
589 0x91f4, 0xffffffff, 0x00050004,
590 0x9200, 0xffffffff, 0x00010006,
591 0x9204, 0xffffffff, 0x00090008,
592 0x9208, 0xffffffff, 0x00070000,
593 0x920c, 0xffffffff, 0x00030002,
594 0x9210, 0xffffffff, 0x00050004,
595 0x921c, 0xffffffff, 0x00010006,
596 0x9220, 0xffffffff, 0x00090008,
597 0x9224, 0xffffffff, 0x00070000,
598 0x9228, 0xffffffff, 0x00030002,
599 0x922c, 0xffffffff, 0x00050004,
600 0x9238, 0xffffffff, 0x00010006,
601 0x923c, 0xffffffff, 0x00090008,
602 0x9240, 0xffffffff, 0x00070000,
603 0x9244, 0xffffffff, 0x00030002,
604 0x9248, 0xffffffff, 0x00050004,
605 0x9254, 0xffffffff, 0x00010006,
606 0x9258, 0xffffffff, 0x00090008,
607 0x925c, 0xffffffff, 0x00070000,
608 0x9260, 0xffffffff, 0x00030002,
609 0x9264, 0xffffffff, 0x00050004,
610 0x9270, 0xffffffff, 0x00010006,
611 0x9274, 0xffffffff, 0x00090008,
612 0x9278, 0xffffffff, 0x00070000,
613 0x927c, 0xffffffff, 0x00030002,
614 0x9280, 0xffffffff, 0x00050004,
615 0x928c, 0xffffffff, 0x00010006,
616 0x9290, 0xffffffff, 0x00090008,
617 0x9294, 0xffffffff, 0x00000000,
618 0x929c, 0xffffffff, 0x00000001,
619 0x802c, 0xffffffff, 0xc0000000,
620 0x977c, 0xffffffff, 0x00000100,
621 0x3f80, 0xffffffff, 0x00000100,
622 0xa210, 0xffffffff, 0x00000100,
623 0xa214, 0xffffffff, 0x00000100,
624 0x4d8, 0xffffffff, 0x00000100,
625 0x9784, 0xffffffff, 0x00000100,
626 0x9698, 0xffffffff, 0x00000100,
627 0x4d4, 0xffffffff, 0x00000200,
628 0x30cc, 0xffffffff, 0x00000100,
629 0x802c, 0xffffffff, 0xc0000000
632 static const u32 supersumo_golden_registers[] =
634 0x5eb4, 0xffffffff, 0x00000002,
635 0x5cc, 0xffffffff, 0x00000001,
636 0x7030, 0xffffffff, 0x00000011,
637 0x7c30, 0xffffffff, 0x00000011,
638 0x6104, 0x01000300, 0x00000000,
639 0x5bc0, 0x00300000, 0x00000000,
640 0x8c04, 0xffffffff, 0x40600060,
641 0x8c08, 0xffffffff, 0x001c001c,
642 0x8c20, 0xffffffff, 0x00800080,
643 0x8c24, 0xffffffff, 0x00800080,
644 0x8c18, 0xffffffff, 0x20202078,
645 0x8c1c, 0xffffffff, 0x00001010,
646 0x918c, 0xffffffff, 0x00010006,
647 0x91a8, 0xffffffff, 0x00010006,
648 0x91c4, 0xffffffff, 0x00010006,
649 0x91e0, 0xffffffff, 0x00010006,
650 0x9200, 0xffffffff, 0x00010006,
651 0x9150, 0xffffffff, 0x6e944040,
652 0x917c, 0xffffffff, 0x00030002,
653 0x9180, 0xffffffff, 0x00050004,
654 0x9198, 0xffffffff, 0x00030002,
655 0x919c, 0xffffffff, 0x00050004,
656 0x91b4, 0xffffffff, 0x00030002,
657 0x91b8, 0xffffffff, 0x00050004,
658 0x91d0, 0xffffffff, 0x00030002,
659 0x91d4, 0xffffffff, 0x00050004,
660 0x91f0, 0xffffffff, 0x00030002,
661 0x91f4, 0xffffffff, 0x00050004,
662 0x915c, 0xffffffff, 0x00010000,
663 0x9160, 0xffffffff, 0x00030002,
664 0x3f90, 0xffff0000, 0xff000000,
665 0x9178, 0xffffffff, 0x00070000,
666 0x9194, 0xffffffff, 0x00070000,
667 0x91b0, 0xffffffff, 0x00070000,
668 0x91cc, 0xffffffff, 0x00070000,
669 0x91ec, 0xffffffff, 0x00070000,
670 0x9148, 0xffff0000, 0xff000000,
671 0x9190, 0xffffffff, 0x00090008,
672 0x91ac, 0xffffffff, 0x00090008,
673 0x91c8, 0xffffffff, 0x00090008,
674 0x91e4, 0xffffffff, 0x00090008,
675 0x9204, 0xffffffff, 0x00090008,
676 0x3f94, 0xffff0000, 0xff000000,
677 0x914c, 0xffff0000, 0xff000000,
678 0x929c, 0xffffffff, 0x00000001,
679 0x8a18, 0xffffffff, 0x00000100,
680 0x8b28, 0xffffffff, 0x00000100,
681 0x9144, 0xffffffff, 0x00000100,
682 0x5644, 0xffffffff, 0x00000100,
683 0x9b7c, 0xffffffff, 0x00000000,
684 0x8030, 0xffffffff, 0x0000100a,
685 0x8a14, 0xffffffff, 0x00000007,
686 0x8b24, 0xffffffff, 0x00ff0fff,
687 0x8b10, 0xffffffff, 0x00000000,
688 0x28a4c, 0x06000000, 0x06000000,
689 0x4d8, 0xffffffff, 0x00000100,
690 0x913c, 0xffff000f, 0x0100000a,
691 0x960c, 0xffffffff, 0x54763210,
692 0x88c4, 0xffffffff, 0x000000c2,
693 0x88d4, 0xffffffff, 0x00000010,
694 0x8974, 0xffffffff, 0x00000000,
695 0xc78, 0x00000080, 0x00000080,
696 0x5e78, 0xffffffff, 0x001000f0,
697 0xd02c, 0xffffffff, 0x08421000,
698 0xa008, 0xffffffff, 0x00010000,
699 0x8d00, 0xffffffff, 0x100e4848,
700 0x8d04, 0xffffffff, 0x00164745,
701 0x8c00, 0xffffffff, 0xe4000003,
702 0x8cf0, 0x1fffffff, 0x08e00620,
703 0x28350, 0xffffffff, 0x00000000,
704 0x9508, 0xffffffff, 0x00000002
707 static const u32 sumo_golden_registers[] =
709 0x900c, 0x00ffffff, 0x0017071f,
710 0x8c18, 0xffffffff, 0x10101060,
711 0x8c1c, 0xffffffff, 0x00001010,
712 0x8c30, 0x0000000f, 0x00000005,
713 0x9688, 0x0000000f, 0x00000007
716 static const u32 wrestler_golden_registers[] =
718 0x5eb4, 0xffffffff, 0x00000002,
719 0x5cc, 0xffffffff, 0x00000001,
720 0x7030, 0xffffffff, 0x00000011,
721 0x7c30, 0xffffffff, 0x00000011,
722 0x6104, 0x01000300, 0x00000000,
723 0x5bc0, 0x00300000, 0x00000000,
724 0x918c, 0xffffffff, 0x00010006,
725 0x91a8, 0xffffffff, 0x00010006,
726 0x9150, 0xffffffff, 0x6e944040,
727 0x917c, 0xffffffff, 0x00030002,
728 0x9198, 0xffffffff, 0x00030002,
729 0x915c, 0xffffffff, 0x00010000,
730 0x3f90, 0xffff0000, 0xff000000,
731 0x9178, 0xffffffff, 0x00070000,
732 0x9194, 0xffffffff, 0x00070000,
733 0x9148, 0xffff0000, 0xff000000,
734 0x9190, 0xffffffff, 0x00090008,
735 0x91ac, 0xffffffff, 0x00090008,
736 0x3f94, 0xffff0000, 0xff000000,
737 0x914c, 0xffff0000, 0xff000000,
738 0x929c, 0xffffffff, 0x00000001,
739 0x8a18, 0xffffffff, 0x00000100,
740 0x8b28, 0xffffffff, 0x00000100,
741 0x9144, 0xffffffff, 0x00000100,
742 0x9b7c, 0xffffffff, 0x00000000,
743 0x8030, 0xffffffff, 0x0000100a,
744 0x8a14, 0xffffffff, 0x00000001,
745 0x8b24, 0xffffffff, 0x00ff0fff,
746 0x8b10, 0xffffffff, 0x00000000,
747 0x28a4c, 0x06000000, 0x06000000,
748 0x4d8, 0xffffffff, 0x00000100,
749 0x913c, 0xffff000f, 0x0100000a,
750 0x960c, 0xffffffff, 0x54763210,
751 0x88c4, 0xffffffff, 0x000000c2,
752 0x88d4, 0xffffffff, 0x00000010,
753 0x8974, 0xffffffff, 0x00000000,
754 0xc78, 0x00000080, 0x00000080,
755 0x5e78, 0xffffffff, 0x001000f0,
756 0xd02c, 0xffffffff, 0x08421000,
757 0xa008, 0xffffffff, 0x00010000,
758 0x8d00, 0xffffffff, 0x100e4848,
759 0x8d04, 0xffffffff, 0x00164745,
760 0x8c00, 0xffffffff, 0xe4000003,
761 0x8cf0, 0x1fffffff, 0x08e00410,
762 0x28350, 0xffffffff, 0x00000000,
763 0x9508, 0xffffffff, 0x00000002,
764 0x900c, 0xffffffff, 0x0017071f,
765 0x8c18, 0xffffffff, 0x10101060,
766 0x8c1c, 0xffffffff, 0x00001010
769 static const u32 barts_golden_registers[] =
771 0x5eb4, 0xffffffff, 0x00000002,
772 0x5e78, 0x8f311ff1, 0x001000f0,
773 0x3f90, 0xffff0000, 0xff000000,
774 0x9148, 0xffff0000, 0xff000000,
775 0x3f94, 0xffff0000, 0xff000000,
776 0x914c, 0xffff0000, 0xff000000,
777 0xc78, 0x00000080, 0x00000080,
778 0xbd4, 0x70073777, 0x00010001,
779 0xd02c, 0xbfffff1f, 0x08421000,
780 0xd0b8, 0x03773777, 0x02011003,
781 0x5bc0, 0x00200000, 0x50100000,
782 0x98f8, 0x33773777, 0x02011003,
783 0x98fc, 0xffffffff, 0x76543210,
784 0x7030, 0x31000311, 0x00000011,
785 0x2f48, 0x00000007, 0x02011003,
786 0x6b28, 0x00000010, 0x00000012,
787 0x7728, 0x00000010, 0x00000012,
788 0x10328, 0x00000010, 0x00000012,
789 0x10f28, 0x00000010, 0x00000012,
790 0x11b28, 0x00000010, 0x00000012,
791 0x12728, 0x00000010, 0x00000012,
792 0x240c, 0x000007ff, 0x00000380,
793 0x8a14, 0xf000001f, 0x00000007,
794 0x8b24, 0x3fff3fff, 0x00ff0fff,
795 0x8b10, 0x0000ff0f, 0x00000000,
796 0x28a4c, 0x07ffffff, 0x06000000,
797 0x10c, 0x00000001, 0x00010003,
798 0xa02c, 0xffffffff, 0x0000009b,
799 0x913c, 0x0000000f, 0x0100000a,
800 0x8d00, 0xffff7f7f, 0x100e4848,
801 0x8d04, 0x00ffffff, 0x00164745,
802 0x8c00, 0xfffc0003, 0xe4000003,
803 0x8c04, 0xf8ff00ff, 0x40600060,
804 0x8c08, 0x00ff00ff, 0x001c001c,
805 0x8cf0, 0x1fff1fff, 0x08e00620,
806 0x8c20, 0x0fff0fff, 0x00800080,
807 0x8c24, 0x0fff0fff, 0x00800080,
808 0x8c18, 0xffffffff, 0x20202078,
809 0x8c1c, 0x0000ffff, 0x00001010,
810 0x28350, 0x00000f01, 0x00000000,
811 0x9508, 0x3700001f, 0x00000002,
812 0x960c, 0xffffffff, 0x54763210,
813 0x88c4, 0x001f3ae3, 0x000000c2,
814 0x88d4, 0x0000001f, 0x00000010,
815 0x8974, 0xffffffff, 0x00000000
818 static const u32 turks_golden_registers[] =
820 0x5eb4, 0xffffffff, 0x00000002,
821 0x5e78, 0x8f311ff1, 0x001000f0,
822 0x8c8, 0x00003000, 0x00001070,
823 0x8cc, 0x000fffff, 0x00040035,
824 0x3f90, 0xffff0000, 0xfff00000,
825 0x9148, 0xffff0000, 0xfff00000,
826 0x3f94, 0xffff0000, 0xfff00000,
827 0x914c, 0xffff0000, 0xfff00000,
828 0xc78, 0x00000080, 0x00000080,
829 0xbd4, 0x00073007, 0x00010002,
830 0xd02c, 0xbfffff1f, 0x08421000,
831 0xd0b8, 0x03773777, 0x02010002,
832 0x5bc0, 0x00200000, 0x50100000,
833 0x98f8, 0x33773777, 0x00010002,
834 0x98fc, 0xffffffff, 0x33221100,
835 0x7030, 0x31000311, 0x00000011,
836 0x2f48, 0x33773777, 0x00010002,
837 0x6b28, 0x00000010, 0x00000012,
838 0x7728, 0x00000010, 0x00000012,
839 0x10328, 0x00000010, 0x00000012,
840 0x10f28, 0x00000010, 0x00000012,
841 0x11b28, 0x00000010, 0x00000012,
842 0x12728, 0x00000010, 0x00000012,
843 0x240c, 0x000007ff, 0x00000380,
844 0x8a14, 0xf000001f, 0x00000007,
845 0x8b24, 0x3fff3fff, 0x00ff0fff,
846 0x8b10, 0x0000ff0f, 0x00000000,
847 0x28a4c, 0x07ffffff, 0x06000000,
848 0x10c, 0x00000001, 0x00010003,
849 0xa02c, 0xffffffff, 0x0000009b,
850 0x913c, 0x0000000f, 0x0100000a,
851 0x8d00, 0xffff7f7f, 0x100e4848,
852 0x8d04, 0x00ffffff, 0x00164745,
853 0x8c00, 0xfffc0003, 0xe4000003,
854 0x8c04, 0xf8ff00ff, 0x40600060,
855 0x8c08, 0x00ff00ff, 0x001c001c,
856 0x8cf0, 0x1fff1fff, 0x08e00410,
857 0x8c20, 0x0fff0fff, 0x00800080,
858 0x8c24, 0x0fff0fff, 0x00800080,
859 0x8c18, 0xffffffff, 0x20202078,
860 0x8c1c, 0x0000ffff, 0x00001010,
861 0x28350, 0x00000f01, 0x00000000,
862 0x9508, 0x3700001f, 0x00000002,
863 0x960c, 0xffffffff, 0x54763210,
864 0x88c4, 0x001f3ae3, 0x000000c2,
865 0x88d4, 0x0000001f, 0x00000010,
866 0x8974, 0xffffffff, 0x00000000
869 static const u32 caicos_golden_registers[] =
871 0x5eb4, 0xffffffff, 0x00000002,
872 0x5e78, 0x8f311ff1, 0x001000f0,
873 0x8c8, 0x00003420, 0x00001450,
874 0x8cc, 0x000fffff, 0x00040035,
875 0x3f90, 0xffff0000, 0xfffc0000,
876 0x9148, 0xffff0000, 0xfffc0000,
877 0x3f94, 0xffff0000, 0xfffc0000,
878 0x914c, 0xffff0000, 0xfffc0000,
879 0xc78, 0x00000080, 0x00000080,
880 0xbd4, 0x00073007, 0x00010001,
881 0xd02c, 0xbfffff1f, 0x08421000,
882 0xd0b8, 0x03773777, 0x02010001,
883 0x5bc0, 0x00200000, 0x50100000,
884 0x98f8, 0x33773777, 0x02010001,
885 0x98fc, 0xffffffff, 0x33221100,
886 0x7030, 0x31000311, 0x00000011,
887 0x2f48, 0x33773777, 0x02010001,
888 0x6b28, 0x00000010, 0x00000012,
889 0x7728, 0x00000010, 0x00000012,
890 0x10328, 0x00000010, 0x00000012,
891 0x10f28, 0x00000010, 0x00000012,
892 0x11b28, 0x00000010, 0x00000012,
893 0x12728, 0x00000010, 0x00000012,
894 0x240c, 0x000007ff, 0x00000380,
895 0x8a14, 0xf000001f, 0x00000001,
896 0x8b24, 0x3fff3fff, 0x00ff0fff,
897 0x8b10, 0x0000ff0f, 0x00000000,
898 0x28a4c, 0x07ffffff, 0x06000000,
899 0x10c, 0x00000001, 0x00010003,
900 0xa02c, 0xffffffff, 0x0000009b,
901 0x913c, 0x0000000f, 0x0100000a,
902 0x8d00, 0xffff7f7f, 0x100e4848,
903 0x8d04, 0x00ffffff, 0x00164745,
904 0x8c00, 0xfffc0003, 0xe4000003,
905 0x8c04, 0xf8ff00ff, 0x40600060,
906 0x8c08, 0x00ff00ff, 0x001c001c,
907 0x8cf0, 0x1fff1fff, 0x08e00410,
908 0x8c20, 0x0fff0fff, 0x00800080,
909 0x8c24, 0x0fff0fff, 0x00800080,
910 0x8c18, 0xffffffff, 0x20202078,
911 0x8c1c, 0x0000ffff, 0x00001010,
912 0x28350, 0x00000f01, 0x00000000,
913 0x9508, 0x3700001f, 0x00000002,
914 0x960c, 0xffffffff, 0x54763210,
915 0x88c4, 0x001f3ae3, 0x000000c2,
916 0x88d4, 0x0000001f, 0x00000010,
917 0x8974, 0xffffffff, 0x00000000
920 static void evergreen_init_golden_registers(struct radeon_device *rdev)
922 switch (rdev->family) {
925 radeon_program_register_sequence(rdev,
926 evergreen_golden_registers,
927 (const u32)ARRAY_SIZE(evergreen_golden_registers));
928 radeon_program_register_sequence(rdev,
929 evergreen_golden_registers2,
930 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
931 radeon_program_register_sequence(rdev,
933 (const u32)ARRAY_SIZE(cypress_mgcg_init));
936 radeon_program_register_sequence(rdev,
937 evergreen_golden_registers,
938 (const u32)ARRAY_SIZE(evergreen_golden_registers));
939 radeon_program_register_sequence(rdev,
940 evergreen_golden_registers2,
941 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
942 radeon_program_register_sequence(rdev,
944 (const u32)ARRAY_SIZE(juniper_mgcg_init));
947 radeon_program_register_sequence(rdev,
948 evergreen_golden_registers,
949 (const u32)ARRAY_SIZE(evergreen_golden_registers));
950 radeon_program_register_sequence(rdev,
951 evergreen_golden_registers2,
952 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
953 radeon_program_register_sequence(rdev,
955 (const u32)ARRAY_SIZE(redwood_mgcg_init));
958 radeon_program_register_sequence(rdev,
959 cedar_golden_registers,
960 (const u32)ARRAY_SIZE(cedar_golden_registers));
961 radeon_program_register_sequence(rdev,
962 evergreen_golden_registers2,
963 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
964 radeon_program_register_sequence(rdev,
966 (const u32)ARRAY_SIZE(cedar_mgcg_init));
969 radeon_program_register_sequence(rdev,
970 wrestler_golden_registers,
971 (const u32)ARRAY_SIZE(wrestler_golden_registers));
974 radeon_program_register_sequence(rdev,
975 supersumo_golden_registers,
976 (const u32)ARRAY_SIZE(supersumo_golden_registers));
979 radeon_program_register_sequence(rdev,
980 supersumo_golden_registers,
981 (const u32)ARRAY_SIZE(supersumo_golden_registers));
982 radeon_program_register_sequence(rdev,
983 sumo_golden_registers,
984 (const u32)ARRAY_SIZE(sumo_golden_registers));
987 radeon_program_register_sequence(rdev,
988 barts_golden_registers,
989 (const u32)ARRAY_SIZE(barts_golden_registers));
992 radeon_program_register_sequence(rdev,
993 turks_golden_registers,
994 (const u32)ARRAY_SIZE(turks_golden_registers));
997 radeon_program_register_sequence(rdev,
998 caicos_golden_registers,
999 (const u32)ARRAY_SIZE(caicos_golden_registers));
1006 void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
1007 unsigned *bankh, unsigned *mtaspect,
1008 unsigned *tile_split)
1010 *bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
1011 *bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
1012 *mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
1013 *tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
1016 case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
1017 case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
1018 case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
1019 case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
1023 case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
1024 case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
1025 case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
1026 case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
1028 switch (*mtaspect) {
1030 case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
1031 case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
1032 case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
1033 case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
1037 static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
1038 u32 cntl_reg, u32 status_reg)
1041 struct atom_clock_dividers dividers;
1043 r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
1044 clock, false, ÷rs);
1048 WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
1050 for (i = 0; i < 100; i++) {
1051 if (RREG32(status_reg) & DCLK_STATUS)
1061 int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1064 u32 cg_scratch = RREG32(CG_SCRATCH1);
1066 r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
1069 cg_scratch &= 0xffff0000;
1070 cg_scratch |= vclk / 100; /* Mhz */
1072 r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
1075 cg_scratch &= 0x0000ffff;
1076 cg_scratch |= (dclk / 100) << 16; /* Mhz */
1079 WREG32(CG_SCRATCH1, cg_scratch);
1084 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1086 /* start off with something large */
1087 unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
1090 /* bypass vclk and dclk with bclk */
1091 WREG32_P(CG_UPLL_FUNC_CNTL_2,
1092 VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1093 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1095 /* put PLL in bypass mode */
1096 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1098 if (!vclk || !dclk) {
1099 /* keep the Bypass mode, put PLL to sleep */
1100 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1104 r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1105 16384, 0x03FFFFFF, 0, 128, 5,
1106 &fb_div, &vclk_div, &dclk_div);
1110 /* set VCO_MODE to 1 */
1111 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1113 /* toggle UPLL_SLEEP to 1 then back to 0 */
1114 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1115 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1117 /* deassert UPLL_RESET */
1118 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1122 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1126 /* assert UPLL_RESET again */
1127 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1129 /* disable spread spectrum. */
1130 WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1132 /* set feedback divider */
1133 WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1135 /* set ref divider to 0 */
1136 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1138 if (fb_div < 307200)
1139 WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1141 WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1143 /* set PDIV_A and PDIV_B */
1144 WREG32_P(CG_UPLL_FUNC_CNTL_2,
1145 UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1146 ~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1148 /* give the PLL some time to settle */
1151 /* deassert PLL_RESET */
1152 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1156 /* switch from bypass mode to normal mode */
1157 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1159 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1163 /* switch VCLK and DCLK selection */
1164 WREG32_P(CG_UPLL_FUNC_CNTL_2,
1165 VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1166 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1173 void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1178 err = pcie_capability_read_word(rdev->pdev, PCI_EXP_DEVCTL, &ctl);
1182 v = (ctl & PCI_EXP_DEVCTL_READRQ) >> 12;
1184 /* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1185 * to avoid hangs or perfomance issues
1187 if ((v == 0) || (v == 6) || (v == 7)) {
1188 ctl &= ~PCI_EXP_DEVCTL_READRQ;
1190 pcie_capability_write_word(rdev->pdev, PCI_EXP_DEVCTL, ctl);
1194 static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1196 if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1202 static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1206 pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1207 pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1216 * dce4_wait_for_vblank - vblank wait asic callback.
1218 * @rdev: radeon_device pointer
1219 * @crtc: crtc to wait for vblank on
1221 * Wait for vblank on the requested crtc (evergreen+).
1223 void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1227 if (crtc >= rdev->num_crtc)
1230 if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1233 /* depending on when we hit vblank, we may be close to active; if so,
1234 * wait for another frame.
1236 while (dce4_is_in_vblank(rdev, crtc)) {
1237 if (i++ % 100 == 0) {
1238 if (!dce4_is_counter_moving(rdev, crtc))
1243 while (!dce4_is_in_vblank(rdev, crtc)) {
1244 if (i++ % 100 == 0) {
1245 if (!dce4_is_counter_moving(rdev, crtc))
1252 * radeon_irq_kms_pflip_irq_get - pre-pageflip callback.
1254 * @rdev: radeon_device pointer
1255 * @crtc: crtc to prepare for pageflip on
1257 * Pre-pageflip callback (evergreen+).
1258 * Enables the pageflip irq (vblank irq).
1260 void evergreen_pre_page_flip(struct radeon_device *rdev, int crtc)
1262 /* enable the pflip int */
1263 radeon_irq_kms_pflip_irq_get(rdev, crtc);
1267 * evergreen_post_page_flip - pos-pageflip callback.
1269 * @rdev: radeon_device pointer
1270 * @crtc: crtc to cleanup pageflip on
1272 * Post-pageflip callback (evergreen+).
1273 * Disables the pageflip irq (vblank irq).
1275 void evergreen_post_page_flip(struct radeon_device *rdev, int crtc)
1277 /* disable the pflip int */
1278 radeon_irq_kms_pflip_irq_put(rdev, crtc);
1282 * evergreen_page_flip - pageflip callback.
1284 * @rdev: radeon_device pointer
1285 * @crtc_id: crtc to cleanup pageflip on
1286 * @crtc_base: new address of the crtc (GPU MC address)
1288 * Does the actual pageflip (evergreen+).
1289 * During vblank we take the crtc lock and wait for the update_pending
1290 * bit to go high, when it does, we release the lock, and allow the
1291 * double buffered update to take place.
1292 * Returns the current update pending status.
1294 u32 evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
1296 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1297 u32 tmp = RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset);
1300 /* Lock the graphics update lock */
1301 tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
1302 WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1304 /* update the scanout addresses */
1305 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1306 upper_32_bits(crtc_base));
1307 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1310 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1311 upper_32_bits(crtc_base));
1312 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1315 /* Wait for update_pending to go high. */
1316 for (i = 0; i < rdev->usec_timeout; i++) {
1317 if (RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING)
1321 DRM_DEBUG("Update pending now high. Unlocking vupdate_lock.\n");
1323 /* Unlock the lock, so double-buffering can take place inside vblank */
1324 tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
1325 WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1327 /* Return current update_pending status: */
1328 return RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING;
1331 /* get temperature in millidegrees */
1332 int evergreen_get_temp(struct radeon_device *rdev)
1335 int actual_temp = 0;
1337 if (rdev->family == CHIP_JUNIPER) {
1338 toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1340 temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1343 if (toffset & 0x100)
1344 actual_temp = temp / 2 - (0x200 - toffset);
1346 actual_temp = temp / 2 + toffset;
1348 actual_temp = actual_temp * 1000;
1351 temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1356 else if (temp & 0x200)
1358 else if (temp & 0x100) {
1359 actual_temp = temp & 0x1ff;
1360 actual_temp |= ~0x1ff;
1362 actual_temp = temp & 0xff;
1364 actual_temp = (actual_temp * 1000) / 2;
1370 int sumo_get_temp(struct radeon_device *rdev)
1372 u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1373 int actual_temp = temp - 49;
1375 return actual_temp * 1000;
1379 * sumo_pm_init_profile - Initialize power profiles callback.
1381 * @rdev: radeon_device pointer
1383 * Initialize the power states used in profile mode
1384 * (sumo, trinity, SI).
1385 * Used for profile mode only.
1387 void sumo_pm_init_profile(struct radeon_device *rdev)
1392 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1393 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1394 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1395 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1398 if (rdev->flags & RADEON_IS_MOBILITY)
1399 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1401 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1403 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1404 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1405 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1406 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1408 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1409 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1410 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1411 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1413 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1414 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1415 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1416 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1418 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1419 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1420 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1421 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1424 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1425 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1426 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1427 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1428 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1429 rdev->pm.power_state[idx].num_clock_modes - 1;
1431 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1432 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1433 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1434 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1435 rdev->pm.power_state[idx].num_clock_modes - 1;
1439 * btc_pm_init_profile - Initialize power profiles callback.
1441 * @rdev: radeon_device pointer
1443 * Initialize the power states used in profile mode
1445 * Used for profile mode only.
1447 void btc_pm_init_profile(struct radeon_device *rdev)
1452 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1453 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1454 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1455 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1456 /* starting with BTC, there is one state that is used for both
1457 * MH and SH. Difference is that we always use the high clock index for
1460 if (rdev->flags & RADEON_IS_MOBILITY)
1461 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1463 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1465 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1466 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1467 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1468 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1470 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1471 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1472 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1473 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1475 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1476 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1477 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1478 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1480 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1481 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1482 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1483 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1485 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1486 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1487 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1488 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1490 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1491 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1492 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1493 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1497 * evergreen_pm_misc - set additional pm hw parameters callback.
1499 * @rdev: radeon_device pointer
1501 * Set non-clock parameters associated with a power state
1502 * (voltage, etc.) (evergreen+).
1504 void evergreen_pm_misc(struct radeon_device *rdev)
1506 int req_ps_idx = rdev->pm.requested_power_state_index;
1507 int req_cm_idx = rdev->pm.requested_clock_mode_index;
1508 struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1509 struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1511 if (voltage->type == VOLTAGE_SW) {
1512 /* 0xff0x are flags rather then an actual voltage */
1513 if ((voltage->voltage & 0xff00) == 0xff00)
1515 if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1516 radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1517 rdev->pm.current_vddc = voltage->voltage;
1518 DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1521 /* starting with BTC, there is one state that is used for both
1522 * MH and SH. Difference is that we always use the high clock index for
1525 if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1526 (rdev->family >= CHIP_BARTS) &&
1527 rdev->pm.active_crtc_count &&
1528 ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1529 (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1530 voltage = &rdev->pm.power_state[req_ps_idx].
1531 clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1533 /* 0xff0x are flags rather then an actual voltage */
1534 if ((voltage->vddci & 0xff00) == 0xff00)
1536 if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1537 radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1538 rdev->pm.current_vddci = voltage->vddci;
1539 DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1545 * evergreen_pm_prepare - pre-power state change callback.
1547 * @rdev: radeon_device pointer
1549 * Prepare for a power state change (evergreen+).
1551 void evergreen_pm_prepare(struct radeon_device *rdev)
1553 struct drm_device *ddev = rdev->ddev;
1554 struct drm_crtc *crtc;
1555 struct radeon_crtc *radeon_crtc;
1558 /* disable any active CRTCs */
1559 list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1560 radeon_crtc = to_radeon_crtc(crtc);
1561 if (radeon_crtc->enabled) {
1562 tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1563 tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1564 WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1570 * evergreen_pm_finish - post-power state change callback.
1572 * @rdev: radeon_device pointer
1574 * Clean up after a power state change (evergreen+).
1576 void evergreen_pm_finish(struct radeon_device *rdev)
1578 struct drm_device *ddev = rdev->ddev;
1579 struct drm_crtc *crtc;
1580 struct radeon_crtc *radeon_crtc;
1583 /* enable any active CRTCs */
1584 list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1585 radeon_crtc = to_radeon_crtc(crtc);
1586 if (radeon_crtc->enabled) {
1587 tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1588 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1589 WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1595 * evergreen_hpd_sense - hpd sense callback.
1597 * @rdev: radeon_device pointer
1598 * @hpd: hpd (hotplug detect) pin
1600 * Checks if a digital monitor is connected (evergreen+).
1601 * Returns true if connected, false if not connected.
1603 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1605 bool connected = false;
1609 if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1613 if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1617 if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1621 if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1625 if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1629 if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1640 * evergreen_hpd_set_polarity - hpd set polarity callback.
1642 * @rdev: radeon_device pointer
1643 * @hpd: hpd (hotplug detect) pin
1645 * Set the polarity of the hpd pin (evergreen+).
1647 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1648 enum radeon_hpd_id hpd)
1651 bool connected = evergreen_hpd_sense(rdev, hpd);
1655 tmp = RREG32(DC_HPD1_INT_CONTROL);
1657 tmp &= ~DC_HPDx_INT_POLARITY;
1659 tmp |= DC_HPDx_INT_POLARITY;
1660 WREG32(DC_HPD1_INT_CONTROL, tmp);
1663 tmp = RREG32(DC_HPD2_INT_CONTROL);
1665 tmp &= ~DC_HPDx_INT_POLARITY;
1667 tmp |= DC_HPDx_INT_POLARITY;
1668 WREG32(DC_HPD2_INT_CONTROL, tmp);
1671 tmp = RREG32(DC_HPD3_INT_CONTROL);
1673 tmp &= ~DC_HPDx_INT_POLARITY;
1675 tmp |= DC_HPDx_INT_POLARITY;
1676 WREG32(DC_HPD3_INT_CONTROL, tmp);
1679 tmp = RREG32(DC_HPD4_INT_CONTROL);
1681 tmp &= ~DC_HPDx_INT_POLARITY;
1683 tmp |= DC_HPDx_INT_POLARITY;
1684 WREG32(DC_HPD4_INT_CONTROL, tmp);
1687 tmp = RREG32(DC_HPD5_INT_CONTROL);
1689 tmp &= ~DC_HPDx_INT_POLARITY;
1691 tmp |= DC_HPDx_INT_POLARITY;
1692 WREG32(DC_HPD5_INT_CONTROL, tmp);
1695 tmp = RREG32(DC_HPD6_INT_CONTROL);
1697 tmp &= ~DC_HPDx_INT_POLARITY;
1699 tmp |= DC_HPDx_INT_POLARITY;
1700 WREG32(DC_HPD6_INT_CONTROL, tmp);
1708 * evergreen_hpd_init - hpd setup callback.
1710 * @rdev: radeon_device pointer
1712 * Setup the hpd pins used by the card (evergreen+).
1713 * Enable the pin, set the polarity, and enable the hpd interrupts.
1715 void evergreen_hpd_init(struct radeon_device *rdev)
1717 struct drm_device *dev = rdev->ddev;
1718 struct drm_connector *connector;
1719 unsigned enabled = 0;
1720 u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1721 DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1723 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1724 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1726 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1727 connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1728 /* don't try to enable hpd on eDP or LVDS avoid breaking the
1729 * aux dp channel on imac and help (but not completely fix)
1730 * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1731 * also avoid interrupt storms during dpms.
1735 switch (radeon_connector->hpd.hpd) {
1737 WREG32(DC_HPD1_CONTROL, tmp);
1740 WREG32(DC_HPD2_CONTROL, tmp);
1743 WREG32(DC_HPD3_CONTROL, tmp);
1746 WREG32(DC_HPD4_CONTROL, tmp);
1749 WREG32(DC_HPD5_CONTROL, tmp);
1752 WREG32(DC_HPD6_CONTROL, tmp);
1757 radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1758 enabled |= 1 << radeon_connector->hpd.hpd;
1760 radeon_irq_kms_enable_hpd(rdev, enabled);
1764 * evergreen_hpd_fini - hpd tear down callback.
1766 * @rdev: radeon_device pointer
1768 * Tear down the hpd pins used by the card (evergreen+).
1769 * Disable the hpd interrupts.
1771 void evergreen_hpd_fini(struct radeon_device *rdev)
1773 struct drm_device *dev = rdev->ddev;
1774 struct drm_connector *connector;
1775 unsigned disabled = 0;
1777 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1778 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1779 switch (radeon_connector->hpd.hpd) {
1781 WREG32(DC_HPD1_CONTROL, 0);
1784 WREG32(DC_HPD2_CONTROL, 0);
1787 WREG32(DC_HPD3_CONTROL, 0);
1790 WREG32(DC_HPD4_CONTROL, 0);
1793 WREG32(DC_HPD5_CONTROL, 0);
1796 WREG32(DC_HPD6_CONTROL, 0);
1801 disabled |= 1 << radeon_connector->hpd.hpd;
1803 radeon_irq_kms_disable_hpd(rdev, disabled);
1806 /* watermark setup */
1808 static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1809 struct radeon_crtc *radeon_crtc,
1810 struct drm_display_mode *mode,
1811 struct drm_display_mode *other_mode)
1816 * There are 3 line buffers, each one shared by 2 display controllers.
1817 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1818 * the display controllers. The paritioning is done via one of four
1819 * preset allocations specified in bits 2:0:
1820 * first display controller
1821 * 0 - first half of lb (3840 * 2)
1822 * 1 - first 3/4 of lb (5760 * 2)
1823 * 2 - whole lb (7680 * 2), other crtc must be disabled
1824 * 3 - first 1/4 of lb (1920 * 2)
1825 * second display controller
1826 * 4 - second half of lb (3840 * 2)
1827 * 5 - second 3/4 of lb (5760 * 2)
1828 * 6 - whole lb (7680 * 2), other crtc must be disabled
1829 * 7 - last 1/4 of lb (1920 * 2)
1831 /* this can get tricky if we have two large displays on a paired group
1832 * of crtcs. Ideally for multiple large displays we'd assign them to
1833 * non-linked crtcs for maximum line buffer allocation.
1835 if (radeon_crtc->base.enabled && mode) {
1839 tmp = 2; /* whole */
1843 /* second controller of the pair uses second half of the lb */
1844 if (radeon_crtc->crtc_id % 2)
1846 WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1848 if (radeon_crtc->base.enabled && mode) {
1853 if (ASIC_IS_DCE5(rdev))
1859 if (ASIC_IS_DCE5(rdev))
1865 if (ASIC_IS_DCE5(rdev))
1871 if (ASIC_IS_DCE5(rdev))
1878 /* controller not enabled, so no lb used */
1882 u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
1884 u32 tmp = RREG32(MC_SHARED_CHMAP);
1886 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1899 struct evergreen_wm_params {
1900 u32 dram_channels; /* number of dram channels */
1901 u32 yclk; /* bandwidth per dram data pin in kHz */
1902 u32 sclk; /* engine clock in kHz */
1903 u32 disp_clk; /* display clock in kHz */
1904 u32 src_width; /* viewport width */
1905 u32 active_time; /* active display time in ns */
1906 u32 blank_time; /* blank time in ns */
1907 bool interlaced; /* mode is interlaced */
1908 fixed20_12 vsc; /* vertical scale ratio */
1909 u32 num_heads; /* number of active crtcs */
1910 u32 bytes_per_pixel; /* bytes per pixel display + overlay */
1911 u32 lb_size; /* line buffer allocated to pipe */
1912 u32 vtaps; /* vertical scaler taps */
1915 static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
1917 /* Calculate DRAM Bandwidth and the part allocated to display. */
1918 fixed20_12 dram_efficiency; /* 0.7 */
1919 fixed20_12 yclk, dram_channels, bandwidth;
1922 a.full = dfixed_const(1000);
1923 yclk.full = dfixed_const(wm->yclk);
1924 yclk.full = dfixed_div(yclk, a);
1925 dram_channels.full = dfixed_const(wm->dram_channels * 4);
1926 a.full = dfixed_const(10);
1927 dram_efficiency.full = dfixed_const(7);
1928 dram_efficiency.full = dfixed_div(dram_efficiency, a);
1929 bandwidth.full = dfixed_mul(dram_channels, yclk);
1930 bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
1932 return dfixed_trunc(bandwidth);
1935 static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
1937 /* Calculate DRAM Bandwidth and the part allocated to display. */
1938 fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
1939 fixed20_12 yclk, dram_channels, bandwidth;
1942 a.full = dfixed_const(1000);
1943 yclk.full = dfixed_const(wm->yclk);
1944 yclk.full = dfixed_div(yclk, a);
1945 dram_channels.full = dfixed_const(wm->dram_channels * 4);
1946 a.full = dfixed_const(10);
1947 disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
1948 disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
1949 bandwidth.full = dfixed_mul(dram_channels, yclk);
1950 bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
1952 return dfixed_trunc(bandwidth);
1955 static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
1957 /* Calculate the display Data return Bandwidth */
1958 fixed20_12 return_efficiency; /* 0.8 */
1959 fixed20_12 sclk, bandwidth;
1962 a.full = dfixed_const(1000);
1963 sclk.full = dfixed_const(wm->sclk);
1964 sclk.full = dfixed_div(sclk, a);
1965 a.full = dfixed_const(10);
1966 return_efficiency.full = dfixed_const(8);
1967 return_efficiency.full = dfixed_div(return_efficiency, a);
1968 a.full = dfixed_const(32);
1969 bandwidth.full = dfixed_mul(a, sclk);
1970 bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
1972 return dfixed_trunc(bandwidth);
1975 static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
1977 /* Calculate the DMIF Request Bandwidth */
1978 fixed20_12 disp_clk_request_efficiency; /* 0.8 */
1979 fixed20_12 disp_clk, bandwidth;
1982 a.full = dfixed_const(1000);
1983 disp_clk.full = dfixed_const(wm->disp_clk);
1984 disp_clk.full = dfixed_div(disp_clk, a);
1985 a.full = dfixed_const(10);
1986 disp_clk_request_efficiency.full = dfixed_const(8);
1987 disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
1988 a.full = dfixed_const(32);
1989 bandwidth.full = dfixed_mul(a, disp_clk);
1990 bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
1992 return dfixed_trunc(bandwidth);
1995 static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
1997 /* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
1998 u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
1999 u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
2000 u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
2002 return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
2005 static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
2007 /* Calculate the display mode Average Bandwidth
2008 * DisplayMode should contain the source and destination dimensions,
2012 fixed20_12 line_time;
2013 fixed20_12 src_width;
2014 fixed20_12 bandwidth;
2017 a.full = dfixed_const(1000);
2018 line_time.full = dfixed_const(wm->active_time + wm->blank_time);
2019 line_time.full = dfixed_div(line_time, a);
2020 bpp.full = dfixed_const(wm->bytes_per_pixel);
2021 src_width.full = dfixed_const(wm->src_width);
2022 bandwidth.full = dfixed_mul(src_width, bpp);
2023 bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
2024 bandwidth.full = dfixed_div(bandwidth, line_time);
2026 return dfixed_trunc(bandwidth);
2029 static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
2031 /* First calcualte the latency in ns */
2032 u32 mc_latency = 2000; /* 2000 ns. */
2033 u32 available_bandwidth = evergreen_available_bandwidth(wm);
2034 u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
2035 u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
2036 u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
2037 u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
2038 (wm->num_heads * cursor_line_pair_return_time);
2039 u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
2040 u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
2043 if (wm->num_heads == 0)
2046 a.full = dfixed_const(2);
2047 b.full = dfixed_const(1);
2048 if ((wm->vsc.full > a.full) ||
2049 ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
2051 ((wm->vsc.full >= a.full) && wm->interlaced))
2052 max_src_lines_per_dst_line = 4;
2054 max_src_lines_per_dst_line = 2;
2056 a.full = dfixed_const(available_bandwidth);
2057 b.full = dfixed_const(wm->num_heads);
2058 a.full = dfixed_div(a, b);
2060 b.full = dfixed_const(1000);
2061 c.full = dfixed_const(wm->disp_clk);
2062 b.full = dfixed_div(c, b);
2063 c.full = dfixed_const(wm->bytes_per_pixel);
2064 b.full = dfixed_mul(b, c);
2066 lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
2068 a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2069 b.full = dfixed_const(1000);
2070 c.full = dfixed_const(lb_fill_bw);
2071 b.full = dfixed_div(c, b);
2072 a.full = dfixed_div(a, b);
2073 line_fill_time = dfixed_trunc(a);
2075 if (line_fill_time < wm->active_time)
2078 return latency + (line_fill_time - wm->active_time);
2082 static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2084 if (evergreen_average_bandwidth(wm) <=
2085 (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2091 static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2093 if (evergreen_average_bandwidth(wm) <=
2094 (evergreen_available_bandwidth(wm) / wm->num_heads))
2100 static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2102 u32 lb_partitions = wm->lb_size / wm->src_width;
2103 u32 line_time = wm->active_time + wm->blank_time;
2104 u32 latency_tolerant_lines;
2108 a.full = dfixed_const(1);
2109 if (wm->vsc.full > a.full)
2110 latency_tolerant_lines = 1;
2112 if (lb_partitions <= (wm->vtaps + 1))
2113 latency_tolerant_lines = 1;
2115 latency_tolerant_lines = 2;
2118 latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2120 if (evergreen_latency_watermark(wm) <= latency_hiding)
2126 static void evergreen_program_watermarks(struct radeon_device *rdev,
2127 struct radeon_crtc *radeon_crtc,
2128 u32 lb_size, u32 num_heads)
2130 struct drm_display_mode *mode = &radeon_crtc->base.mode;
2131 struct evergreen_wm_params wm_low, wm_high;
2135 u32 latency_watermark_a = 0, latency_watermark_b = 0;
2136 u32 priority_a_mark = 0, priority_b_mark = 0;
2137 u32 priority_a_cnt = PRIORITY_OFF;
2138 u32 priority_b_cnt = PRIORITY_OFF;
2139 u32 pipe_offset = radeon_crtc->crtc_id * 16;
2140 u32 tmp, arb_control3;
2143 if (radeon_crtc->base.enabled && num_heads && mode) {
2144 pixel_period = 1000000 / (u32)mode->clock;
2145 line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2148 dram_channels = evergreen_get_number_of_dram_channels(rdev);
2150 /* watermark for high clocks */
2151 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2153 radeon_dpm_get_mclk(rdev, false) * 10;
2155 radeon_dpm_get_sclk(rdev, false) * 10;
2157 wm_high.yclk = rdev->pm.current_mclk * 10;
2158 wm_high.sclk = rdev->pm.current_sclk * 10;
2161 wm_high.disp_clk = mode->clock;
2162 wm_high.src_width = mode->crtc_hdisplay;
2163 wm_high.active_time = mode->crtc_hdisplay * pixel_period;
2164 wm_high.blank_time = line_time - wm_high.active_time;
2165 wm_high.interlaced = false;
2166 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2167 wm_high.interlaced = true;
2168 wm_high.vsc = radeon_crtc->vsc;
2170 if (radeon_crtc->rmx_type != RMX_OFF)
2172 wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2173 wm_high.lb_size = lb_size;
2174 wm_high.dram_channels = dram_channels;
2175 wm_high.num_heads = num_heads;
2177 /* watermark for low clocks */
2178 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2180 radeon_dpm_get_mclk(rdev, true) * 10;
2182 radeon_dpm_get_sclk(rdev, true) * 10;
2184 wm_low.yclk = rdev->pm.current_mclk * 10;
2185 wm_low.sclk = rdev->pm.current_sclk * 10;
2188 wm_low.disp_clk = mode->clock;
2189 wm_low.src_width = mode->crtc_hdisplay;
2190 wm_low.active_time = mode->crtc_hdisplay * pixel_period;
2191 wm_low.blank_time = line_time - wm_low.active_time;
2192 wm_low.interlaced = false;
2193 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2194 wm_low.interlaced = true;
2195 wm_low.vsc = radeon_crtc->vsc;
2197 if (radeon_crtc->rmx_type != RMX_OFF)
2199 wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2200 wm_low.lb_size = lb_size;
2201 wm_low.dram_channels = dram_channels;
2202 wm_low.num_heads = num_heads;
2204 /* set for high clocks */
2205 latency_watermark_a = min(evergreen_latency_watermark(&wm_high), (u32)65535);
2206 /* set for low clocks */
2207 latency_watermark_b = min(evergreen_latency_watermark(&wm_low), (u32)65535);
2209 /* possibly force display priority to high */
2210 /* should really do this at mode validation time... */
2211 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2212 !evergreen_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2213 !evergreen_check_latency_hiding(&wm_high) ||
2214 (rdev->disp_priority == 2)) {
2215 DRM_DEBUG_KMS("force priority a to high\n");
2216 priority_a_cnt |= PRIORITY_ALWAYS_ON;
2218 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2219 !evergreen_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2220 !evergreen_check_latency_hiding(&wm_low) ||
2221 (rdev->disp_priority == 2)) {
2222 DRM_DEBUG_KMS("force priority b to high\n");
2223 priority_b_cnt |= PRIORITY_ALWAYS_ON;
2226 a.full = dfixed_const(1000);
2227 b.full = dfixed_const(mode->clock);
2228 b.full = dfixed_div(b, a);
2229 c.full = dfixed_const(latency_watermark_a);
2230 c.full = dfixed_mul(c, b);
2231 c.full = dfixed_mul(c, radeon_crtc->hsc);
2232 c.full = dfixed_div(c, a);
2233 a.full = dfixed_const(16);
2234 c.full = dfixed_div(c, a);
2235 priority_a_mark = dfixed_trunc(c);
2236 priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2238 a.full = dfixed_const(1000);
2239 b.full = dfixed_const(mode->clock);
2240 b.full = dfixed_div(b, a);
2241 c.full = dfixed_const(latency_watermark_b);
2242 c.full = dfixed_mul(c, b);
2243 c.full = dfixed_mul(c, radeon_crtc->hsc);
2244 c.full = dfixed_div(c, a);
2245 a.full = dfixed_const(16);
2246 c.full = dfixed_div(c, a);
2247 priority_b_mark = dfixed_trunc(c);
2248 priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2252 arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2254 tmp &= ~LATENCY_WATERMARK_MASK(3);
2255 tmp |= LATENCY_WATERMARK_MASK(1);
2256 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2257 WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2258 (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2259 LATENCY_HIGH_WATERMARK(line_time)));
2261 tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2262 tmp &= ~LATENCY_WATERMARK_MASK(3);
2263 tmp |= LATENCY_WATERMARK_MASK(2);
2264 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2265 WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2266 (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2267 LATENCY_HIGH_WATERMARK(line_time)));
2268 /* restore original selection */
2269 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2271 /* write the priority marks */
2272 WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2273 WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2275 /* save values for DPM */
2276 radeon_crtc->line_time = line_time;
2277 radeon_crtc->wm_high = latency_watermark_a;
2278 radeon_crtc->wm_low = latency_watermark_b;
2282 * evergreen_bandwidth_update - update display watermarks callback.
2284 * @rdev: radeon_device pointer
2286 * Update the display watermarks based on the requested mode(s)
2289 void evergreen_bandwidth_update(struct radeon_device *rdev)
2291 struct drm_display_mode *mode0 = NULL;
2292 struct drm_display_mode *mode1 = NULL;
2293 u32 num_heads = 0, lb_size;
2296 radeon_update_display_priority(rdev);
2298 for (i = 0; i < rdev->num_crtc; i++) {
2299 if (rdev->mode_info.crtcs[i]->base.enabled)
2302 for (i = 0; i < rdev->num_crtc; i += 2) {
2303 mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2304 mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2305 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2306 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2307 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2308 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2313 * evergreen_mc_wait_for_idle - wait for MC idle callback.
2315 * @rdev: radeon_device pointer
2317 * Wait for the MC (memory controller) to be idle.
2319 * Returns 0 if the MC is idle, -1 if not.
2321 int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
2326 for (i = 0; i < rdev->usec_timeout; i++) {
2327 /* read MC_STATUS */
2328 tmp = RREG32(SRBM_STATUS) & 0x1F00;
2339 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2344 WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2346 WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2347 for (i = 0; i < rdev->usec_timeout; i++) {
2348 /* read MC_STATUS */
2349 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2350 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2352 printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
2362 static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
2367 if (rdev->gart.robj == NULL) {
2368 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2371 r = radeon_gart_table_vram_pin(rdev);
2374 radeon_gart_restore(rdev);
2375 /* Setup L2 cache */
2376 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2377 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2378 EFFECTIVE_L2_QUEUE_SIZE(7));
2379 WREG32(VM_L2_CNTL2, 0);
2380 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2381 /* Setup TLB control */
2382 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2383 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2384 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2385 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2386 if (rdev->flags & RADEON_IS_IGP) {
2387 WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2388 WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2389 WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2391 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2392 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2393 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2394 if ((rdev->family == CHIP_JUNIPER) ||
2395 (rdev->family == CHIP_CYPRESS) ||
2396 (rdev->family == CHIP_HEMLOCK) ||
2397 (rdev->family == CHIP_BARTS))
2398 WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
2400 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2401 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2402 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2403 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2404 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2405 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2406 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2407 WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2408 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2409 WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2410 (u32)(rdev->dummy_page.addr >> 12));
2411 WREG32(VM_CONTEXT1_CNTL, 0);
2413 evergreen_pcie_gart_tlb_flush(rdev);
2414 DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2415 (unsigned)(rdev->mc.gtt_size >> 20),
2416 (unsigned long long)rdev->gart.table_addr);
2417 rdev->gart.ready = true;
2421 static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
2425 /* Disable all tables */
2426 WREG32(VM_CONTEXT0_CNTL, 0);
2427 WREG32(VM_CONTEXT1_CNTL, 0);
2429 /* Setup L2 cache */
2430 WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2431 EFFECTIVE_L2_QUEUE_SIZE(7));
2432 WREG32(VM_L2_CNTL2, 0);
2433 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2434 /* Setup TLB control */
2435 tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2436 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2437 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2438 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2439 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2440 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2441 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2442 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2443 radeon_gart_table_vram_unpin(rdev);
2446 static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
2448 evergreen_pcie_gart_disable(rdev);
2449 radeon_gart_table_vram_free(rdev);
2450 radeon_gart_fini(rdev);
2454 static void evergreen_agp_enable(struct radeon_device *rdev)
2458 /* Setup L2 cache */
2459 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2460 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2461 EFFECTIVE_L2_QUEUE_SIZE(7));
2462 WREG32(VM_L2_CNTL2, 0);
2463 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2464 /* Setup TLB control */
2465 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2466 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2467 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2468 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2469 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2470 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2471 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2472 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2473 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2474 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2475 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2476 WREG32(VM_CONTEXT0_CNTL, 0);
2477 WREG32(VM_CONTEXT1_CNTL, 0);
2480 void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
2482 u32 crtc_enabled, tmp, frame_count, blackout;
2485 if (!ASIC_IS_NODCE(rdev)) {
2486 save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2487 save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2489 /* disable VGA render */
2490 WREG32(VGA_RENDER_CONTROL, 0);
2492 /* blank the display controllers */
2493 for (i = 0; i < rdev->num_crtc; i++) {
2494 crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2496 save->crtc_enabled[i] = true;
2497 if (ASIC_IS_DCE6(rdev)) {
2498 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2499 if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2500 radeon_wait_for_vblank(rdev, i);
2501 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2502 tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2503 WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2506 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2507 if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2508 radeon_wait_for_vblank(rdev, i);
2509 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2510 tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2511 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2512 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2515 /* wait for the next frame */
2516 frame_count = radeon_get_vblank_counter(rdev, i);
2517 for (j = 0; j < rdev->usec_timeout; j++) {
2518 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2523 /* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2524 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2525 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2526 tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2527 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2528 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2529 save->crtc_enabled[i] = false;
2532 save->crtc_enabled[i] = false;
2536 radeon_mc_wait_for_idle(rdev);
2538 blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2539 if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2540 /* Block CPU access */
2541 WREG32(BIF_FB_EN, 0);
2542 /* blackout the MC */
2543 blackout &= ~BLACKOUT_MODE_MASK;
2544 WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2546 /* wait for the MC to settle */
2549 /* lock double buffered regs */
2550 for (i = 0; i < rdev->num_crtc; i++) {
2551 if (save->crtc_enabled[i]) {
2552 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2553 if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2554 tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2555 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2557 tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2560 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2566 void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
2568 u32 tmp, frame_count;
2571 /* update crtc base addresses */
2572 for (i = 0; i < rdev->num_crtc; i++) {
2573 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2574 upper_32_bits(rdev->mc.vram_start));
2575 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2576 upper_32_bits(rdev->mc.vram_start));
2577 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
2578 (u32)rdev->mc.vram_start);
2579 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
2580 (u32)rdev->mc.vram_start);
2583 if (!ASIC_IS_NODCE(rdev)) {
2584 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2585 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2588 /* unlock regs and wait for update */
2589 for (i = 0; i < rdev->num_crtc; i++) {
2590 if (save->crtc_enabled[i]) {
2591 tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2592 if ((tmp & 0x3) != 0) {
2594 WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2596 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2597 if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2598 tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2599 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2601 tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2604 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2606 for (j = 0; j < rdev->usec_timeout; j++) {
2607 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2608 if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2615 /* unblackout the MC */
2616 tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2617 tmp &= ~BLACKOUT_MODE_MASK;
2618 WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2619 /* allow CPU access */
2620 WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2622 for (i = 0; i < rdev->num_crtc; i++) {
2623 if (save->crtc_enabled[i]) {
2624 if (ASIC_IS_DCE6(rdev)) {
2625 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2626 tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2627 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2628 WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2629 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2631 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2632 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2633 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2634 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2635 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2637 /* wait for the next frame */
2638 frame_count = radeon_get_vblank_counter(rdev, i);
2639 for (j = 0; j < rdev->usec_timeout; j++) {
2640 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2646 if (!ASIC_IS_NODCE(rdev)) {
2647 /* Unlock vga access */
2648 WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2650 WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2654 void evergreen_mc_program(struct radeon_device *rdev)
2656 struct evergreen_mc_save save;
2660 /* Initialize HDP */
2661 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2662 WREG32((0x2c14 + j), 0x00000000);
2663 WREG32((0x2c18 + j), 0x00000000);
2664 WREG32((0x2c1c + j), 0x00000000);
2665 WREG32((0x2c20 + j), 0x00000000);
2666 WREG32((0x2c24 + j), 0x00000000);
2668 WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2670 evergreen_mc_stop(rdev, &save);
2671 if (evergreen_mc_wait_for_idle(rdev)) {
2672 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2674 /* Lockout access through VGA aperture*/
2675 WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2676 /* Update configuration */
2677 if (rdev->flags & RADEON_IS_AGP) {
2678 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2679 /* VRAM before AGP */
2680 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2681 rdev->mc.vram_start >> 12);
2682 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2683 rdev->mc.gtt_end >> 12);
2685 /* VRAM after AGP */
2686 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2687 rdev->mc.gtt_start >> 12);
2688 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2689 rdev->mc.vram_end >> 12);
2692 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2693 rdev->mc.vram_start >> 12);
2694 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2695 rdev->mc.vram_end >> 12);
2697 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
2698 /* llano/ontario only */
2699 if ((rdev->family == CHIP_PALM) ||
2700 (rdev->family == CHIP_SUMO) ||
2701 (rdev->family == CHIP_SUMO2)) {
2702 tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2703 tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2704 tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2705 WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2707 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2708 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2709 WREG32(MC_VM_FB_LOCATION, tmp);
2710 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
2711 WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
2712 WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
2713 if (rdev->flags & RADEON_IS_AGP) {
2714 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
2715 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
2716 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
2718 WREG32(MC_VM_AGP_BASE, 0);
2719 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
2720 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
2722 if (evergreen_mc_wait_for_idle(rdev)) {
2723 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2725 evergreen_mc_resume(rdev, &save);
2726 /* we need to own VRAM, so turn off the VGA renderer here
2727 * to stop it overwriting our objects */
2728 rv515_vga_render_disable(rdev);
2734 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
2736 struct radeon_ring *ring = &rdev->ring[ib->ring];
2739 /* set to DX10/11 mode */
2740 radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
2741 radeon_ring_write(ring, 1);
2743 if (ring->rptr_save_reg) {
2744 next_rptr = ring->wptr + 3 + 4;
2745 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2746 radeon_ring_write(ring, ((ring->rptr_save_reg -
2747 PACKET3_SET_CONFIG_REG_START) >> 2));
2748 radeon_ring_write(ring, next_rptr);
2749 } else if (rdev->wb.enabled) {
2750 next_rptr = ring->wptr + 5 + 4;
2751 radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
2752 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
2753 radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
2754 radeon_ring_write(ring, next_rptr);
2755 radeon_ring_write(ring, 0);
2758 radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
2759 radeon_ring_write(ring,
2763 (ib->gpu_addr & 0xFFFFFFFC));
2764 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
2765 radeon_ring_write(ring, ib->length_dw);
2769 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
2771 const __be32 *fw_data;
2774 if (!rdev->me_fw || !rdev->pfp_fw)
2782 RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
2784 fw_data = (const __be32 *)rdev->pfp_fw->data;
2785 WREG32(CP_PFP_UCODE_ADDR, 0);
2786 for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
2787 WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
2788 WREG32(CP_PFP_UCODE_ADDR, 0);
2790 fw_data = (const __be32 *)rdev->me_fw->data;
2791 WREG32(CP_ME_RAM_WADDR, 0);
2792 for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
2793 WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
2795 WREG32(CP_PFP_UCODE_ADDR, 0);
2796 WREG32(CP_ME_RAM_WADDR, 0);
2797 WREG32(CP_ME_RAM_RADDR, 0);
2801 static int evergreen_cp_start(struct radeon_device *rdev)
2803 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2807 r = radeon_ring_lock(rdev, ring, 7);
2809 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2812 radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
2813 radeon_ring_write(ring, 0x1);
2814 radeon_ring_write(ring, 0x0);
2815 radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
2816 radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
2817 radeon_ring_write(ring, 0);
2818 radeon_ring_write(ring, 0);
2819 radeon_ring_unlock_commit(rdev, ring);
2822 WREG32(CP_ME_CNTL, cp_me);
2824 r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
2826 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2830 /* setup clear context state */
2831 radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2832 radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
2834 for (i = 0; i < evergreen_default_size; i++)
2835 radeon_ring_write(ring, evergreen_default_state[i]);
2837 radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2838 radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
2840 /* set clear context state */
2841 radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
2842 radeon_ring_write(ring, 0);
2844 /* SQ_VTX_BASE_VTX_LOC */
2845 radeon_ring_write(ring, 0xc0026f00);
2846 radeon_ring_write(ring, 0x00000000);
2847 radeon_ring_write(ring, 0x00000000);
2848 radeon_ring_write(ring, 0x00000000);
2851 radeon_ring_write(ring, 0xc0036f00);
2852 radeon_ring_write(ring, 0x00000bc4);
2853 radeon_ring_write(ring, 0xffffffff);
2854 radeon_ring_write(ring, 0xffffffff);
2855 radeon_ring_write(ring, 0xffffffff);
2857 radeon_ring_write(ring, 0xc0026900);
2858 radeon_ring_write(ring, 0x00000316);
2859 radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
2860 radeon_ring_write(ring, 0x00000010); /* */
2862 radeon_ring_unlock_commit(rdev, ring);
2867 static int evergreen_cp_resume(struct radeon_device *rdev)
2869 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2874 /* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
2875 WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
2881 RREG32(GRBM_SOFT_RESET);
2883 WREG32(GRBM_SOFT_RESET, 0);
2884 RREG32(GRBM_SOFT_RESET);
2886 /* Set ring buffer size */
2887 rb_bufsz = drm_order(ring->ring_size / 8);
2888 tmp = (drm_order(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
2890 tmp |= BUF_SWAP_32BIT;
2892 WREG32(CP_RB_CNTL, tmp);
2893 WREG32(CP_SEM_WAIT_TIMER, 0x0);
2894 WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
2896 /* Set the write pointer delay */
2897 WREG32(CP_RB_WPTR_DELAY, 0);
2899 /* Initialize the ring buffer's read and write pointers */
2900 WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
2901 WREG32(CP_RB_RPTR_WR, 0);
2903 WREG32(CP_RB_WPTR, ring->wptr);
2905 /* set the wb address whether it's enabled or not */
2906 WREG32(CP_RB_RPTR_ADDR,
2907 ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
2908 WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
2909 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
2911 if (rdev->wb.enabled)
2912 WREG32(SCRATCH_UMSK, 0xff);
2914 tmp |= RB_NO_UPDATE;
2915 WREG32(SCRATCH_UMSK, 0);
2919 WREG32(CP_RB_CNTL, tmp);
2921 WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
2922 WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
2924 ring->rptr = RREG32(CP_RB_RPTR);
2926 evergreen_cp_start(rdev);
2928 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
2930 ring->ready = false;
2939 static void evergreen_gpu_init(struct radeon_device *rdev)
2942 u32 mc_shared_chmap, mc_arb_ramcfg;
2946 u32 sq_lds_resource_mgmt;
2947 u32 sq_gpr_resource_mgmt_1;
2948 u32 sq_gpr_resource_mgmt_2;
2949 u32 sq_gpr_resource_mgmt_3;
2950 u32 sq_thread_resource_mgmt;
2951 u32 sq_thread_resource_mgmt_2;
2952 u32 sq_stack_resource_mgmt_1;
2953 u32 sq_stack_resource_mgmt_2;
2954 u32 sq_stack_resource_mgmt_3;
2955 u32 vgt_cache_invalidation;
2956 u32 hdp_host_path_cntl, tmp;
2957 u32 disabled_rb_mask;
2958 int i, j, num_shader_engines, ps_thread_count;
2960 switch (rdev->family) {
2963 rdev->config.evergreen.num_ses = 2;
2964 rdev->config.evergreen.max_pipes = 4;
2965 rdev->config.evergreen.max_tile_pipes = 8;
2966 rdev->config.evergreen.max_simds = 10;
2967 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
2968 rdev->config.evergreen.max_gprs = 256;
2969 rdev->config.evergreen.max_threads = 248;
2970 rdev->config.evergreen.max_gs_threads = 32;
2971 rdev->config.evergreen.max_stack_entries = 512;
2972 rdev->config.evergreen.sx_num_of_sets = 4;
2973 rdev->config.evergreen.sx_max_export_size = 256;
2974 rdev->config.evergreen.sx_max_export_pos_size = 64;
2975 rdev->config.evergreen.sx_max_export_smx_size = 192;
2976 rdev->config.evergreen.max_hw_contexts = 8;
2977 rdev->config.evergreen.sq_num_cf_insts = 2;
2979 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
2980 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2981 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2982 gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
2985 rdev->config.evergreen.num_ses = 1;
2986 rdev->config.evergreen.max_pipes = 4;
2987 rdev->config.evergreen.max_tile_pipes = 4;
2988 rdev->config.evergreen.max_simds = 10;
2989 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
2990 rdev->config.evergreen.max_gprs = 256;
2991 rdev->config.evergreen.max_threads = 248;
2992 rdev->config.evergreen.max_gs_threads = 32;
2993 rdev->config.evergreen.max_stack_entries = 512;
2994 rdev->config.evergreen.sx_num_of_sets = 4;
2995 rdev->config.evergreen.sx_max_export_size = 256;
2996 rdev->config.evergreen.sx_max_export_pos_size = 64;
2997 rdev->config.evergreen.sx_max_export_smx_size = 192;
2998 rdev->config.evergreen.max_hw_contexts = 8;
2999 rdev->config.evergreen.sq_num_cf_insts = 2;
3001 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3002 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3003 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3004 gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
3007 rdev->config.evergreen.num_ses = 1;
3008 rdev->config.evergreen.max_pipes = 4;
3009 rdev->config.evergreen.max_tile_pipes = 4;
3010 rdev->config.evergreen.max_simds = 5;
3011 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3012 rdev->config.evergreen.max_gprs = 256;
3013 rdev->config.evergreen.max_threads = 248;
3014 rdev->config.evergreen.max_gs_threads = 32;
3015 rdev->config.evergreen.max_stack_entries = 256;
3016 rdev->config.evergreen.sx_num_of_sets = 4;
3017 rdev->config.evergreen.sx_max_export_size = 256;
3018 rdev->config.evergreen.sx_max_export_pos_size = 64;
3019 rdev->config.evergreen.sx_max_export_smx_size = 192;
3020 rdev->config.evergreen.max_hw_contexts = 8;
3021 rdev->config.evergreen.sq_num_cf_insts = 2;
3023 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3024 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3025 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3026 gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
3030 rdev->config.evergreen.num_ses = 1;
3031 rdev->config.evergreen.max_pipes = 2;
3032 rdev->config.evergreen.max_tile_pipes = 2;
3033 rdev->config.evergreen.max_simds = 2;
3034 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3035 rdev->config.evergreen.max_gprs = 256;
3036 rdev->config.evergreen.max_threads = 192;
3037 rdev->config.evergreen.max_gs_threads = 16;
3038 rdev->config.evergreen.max_stack_entries = 256;
3039 rdev->config.evergreen.sx_num_of_sets = 4;
3040 rdev->config.evergreen.sx_max_export_size = 128;
3041 rdev->config.evergreen.sx_max_export_pos_size = 32;
3042 rdev->config.evergreen.sx_max_export_smx_size = 96;
3043 rdev->config.evergreen.max_hw_contexts = 4;
3044 rdev->config.evergreen.sq_num_cf_insts = 1;
3046 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3047 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3048 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3049 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3052 rdev->config.evergreen.num_ses = 1;
3053 rdev->config.evergreen.max_pipes = 2;
3054 rdev->config.evergreen.max_tile_pipes = 2;
3055 rdev->config.evergreen.max_simds = 2;
3056 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3057 rdev->config.evergreen.max_gprs = 256;
3058 rdev->config.evergreen.max_threads = 192;
3059 rdev->config.evergreen.max_gs_threads = 16;
3060 rdev->config.evergreen.max_stack_entries = 256;
3061 rdev->config.evergreen.sx_num_of_sets = 4;
3062 rdev->config.evergreen.sx_max_export_size = 128;
3063 rdev->config.evergreen.sx_max_export_pos_size = 32;
3064 rdev->config.evergreen.sx_max_export_smx_size = 96;
3065 rdev->config.evergreen.max_hw_contexts = 4;
3066 rdev->config.evergreen.sq_num_cf_insts = 1;
3068 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3069 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3070 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3071 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3074 rdev->config.evergreen.num_ses = 1;
3075 rdev->config.evergreen.max_pipes = 4;
3076 rdev->config.evergreen.max_tile_pipes = 4;
3077 if (rdev->pdev->device == 0x9648)
3078 rdev->config.evergreen.max_simds = 3;
3079 else if ((rdev->pdev->device == 0x9647) ||
3080 (rdev->pdev->device == 0x964a))
3081 rdev->config.evergreen.max_simds = 4;
3083 rdev->config.evergreen.max_simds = 5;
3084 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3085 rdev->config.evergreen.max_gprs = 256;
3086 rdev->config.evergreen.max_threads = 248;
3087 rdev->config.evergreen.max_gs_threads = 32;
3088 rdev->config.evergreen.max_stack_entries = 256;
3089 rdev->config.evergreen.sx_num_of_sets = 4;
3090 rdev->config.evergreen.sx_max_export_size = 256;
3091 rdev->config.evergreen.sx_max_export_pos_size = 64;
3092 rdev->config.evergreen.sx_max_export_smx_size = 192;
3093 rdev->config.evergreen.max_hw_contexts = 8;
3094 rdev->config.evergreen.sq_num_cf_insts = 2;
3096 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3097 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3098 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3099 gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
3102 rdev->config.evergreen.num_ses = 1;
3103 rdev->config.evergreen.max_pipes = 4;
3104 rdev->config.evergreen.max_tile_pipes = 4;
3105 rdev->config.evergreen.max_simds = 2;
3106 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3107 rdev->config.evergreen.max_gprs = 256;
3108 rdev->config.evergreen.max_threads = 248;
3109 rdev->config.evergreen.max_gs_threads = 32;
3110 rdev->config.evergreen.max_stack_entries = 512;
3111 rdev->config.evergreen.sx_num_of_sets = 4;
3112 rdev->config.evergreen.sx_max_export_size = 256;
3113 rdev->config.evergreen.sx_max_export_pos_size = 64;
3114 rdev->config.evergreen.sx_max_export_smx_size = 192;
3115 rdev->config.evergreen.max_hw_contexts = 8;
3116 rdev->config.evergreen.sq_num_cf_insts = 2;
3118 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3119 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3120 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3121 gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
3124 rdev->config.evergreen.num_ses = 2;
3125 rdev->config.evergreen.max_pipes = 4;
3126 rdev->config.evergreen.max_tile_pipes = 8;
3127 rdev->config.evergreen.max_simds = 7;
3128 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3129 rdev->config.evergreen.max_gprs = 256;
3130 rdev->config.evergreen.max_threads = 248;
3131 rdev->config.evergreen.max_gs_threads = 32;
3132 rdev->config.evergreen.max_stack_entries = 512;
3133 rdev->config.evergreen.sx_num_of_sets = 4;
3134 rdev->config.evergreen.sx_max_export_size = 256;
3135 rdev->config.evergreen.sx_max_export_pos_size = 64;
3136 rdev->config.evergreen.sx_max_export_smx_size = 192;
3137 rdev->config.evergreen.max_hw_contexts = 8;
3138 rdev->config.evergreen.sq_num_cf_insts = 2;
3140 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3141 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3142 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3143 gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
3146 rdev->config.evergreen.num_ses = 1;
3147 rdev->config.evergreen.max_pipes = 4;
3148 rdev->config.evergreen.max_tile_pipes = 4;
3149 rdev->config.evergreen.max_simds = 6;
3150 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3151 rdev->config.evergreen.max_gprs = 256;
3152 rdev->config.evergreen.max_threads = 248;
3153 rdev->config.evergreen.max_gs_threads = 32;
3154 rdev->config.evergreen.max_stack_entries = 256;
3155 rdev->config.evergreen.sx_num_of_sets = 4;
3156 rdev->config.evergreen.sx_max_export_size = 256;
3157 rdev->config.evergreen.sx_max_export_pos_size = 64;
3158 rdev->config.evergreen.sx_max_export_smx_size = 192;
3159 rdev->config.evergreen.max_hw_contexts = 8;
3160 rdev->config.evergreen.sq_num_cf_insts = 2;
3162 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3163 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3164 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3165 gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
3168 rdev->config.evergreen.num_ses = 1;
3169 rdev->config.evergreen.max_pipes = 2;
3170 rdev->config.evergreen.max_tile_pipes = 2;
3171 rdev->config.evergreen.max_simds = 2;
3172 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3173 rdev->config.evergreen.max_gprs = 256;
3174 rdev->config.evergreen.max_threads = 192;
3175 rdev->config.evergreen.max_gs_threads = 16;
3176 rdev->config.evergreen.max_stack_entries = 256;
3177 rdev->config.evergreen.sx_num_of_sets = 4;
3178 rdev->config.evergreen.sx_max_export_size = 128;
3179 rdev->config.evergreen.sx_max_export_pos_size = 32;
3180 rdev->config.evergreen.sx_max_export_smx_size = 96;
3181 rdev->config.evergreen.max_hw_contexts = 4;
3182 rdev->config.evergreen.sq_num_cf_insts = 1;
3184 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3185 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3186 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3187 gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
3191 /* Initialize HDP */
3192 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3193 WREG32((0x2c14 + j), 0x00000000);
3194 WREG32((0x2c18 + j), 0x00000000);
3195 WREG32((0x2c1c + j), 0x00000000);
3196 WREG32((0x2c20 + j), 0x00000000);
3197 WREG32((0x2c24 + j), 0x00000000);
3200 WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3202 evergreen_fix_pci_max_read_req_size(rdev);
3204 mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
3205 if ((rdev->family == CHIP_PALM) ||
3206 (rdev->family == CHIP_SUMO) ||
3207 (rdev->family == CHIP_SUMO2))
3208 mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3210 mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3212 /* setup tiling info dword. gb_addr_config is not adequate since it does
3213 * not have bank info, so create a custom tiling dword.
3214 * bits 3:0 num_pipes
3215 * bits 7:4 num_banks
3216 * bits 11:8 group_size
3217 * bits 15:12 row_size
3219 rdev->config.evergreen.tile_config = 0;
3220 switch (rdev->config.evergreen.max_tile_pipes) {
3223 rdev->config.evergreen.tile_config |= (0 << 0);
3226 rdev->config.evergreen.tile_config |= (1 << 0);
3229 rdev->config.evergreen.tile_config |= (2 << 0);
3232 rdev->config.evergreen.tile_config |= (3 << 0);
3235 /* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
3236 if (rdev->flags & RADEON_IS_IGP)
3237 rdev->config.evergreen.tile_config |= 1 << 4;
3239 switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3240 case 0: /* four banks */
3241 rdev->config.evergreen.tile_config |= 0 << 4;
3243 case 1: /* eight banks */
3244 rdev->config.evergreen.tile_config |= 1 << 4;
3246 case 2: /* sixteen banks */
3248 rdev->config.evergreen.tile_config |= 2 << 4;
3252 rdev->config.evergreen.tile_config |= 0 << 8;
3253 rdev->config.evergreen.tile_config |=
3254 ((gb_addr_config & 0x30000000) >> 28) << 12;
3256 num_shader_engines = (gb_addr_config & NUM_SHADER_ENGINES(3) >> 12) + 1;
3258 if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3262 efuse_straps_4 = RREG32_RCU(0x204);
3263 efuse_straps_3 = RREG32_RCU(0x203);
3264 tmp = (((efuse_straps_4 & 0xf) << 4) |
3265 ((efuse_straps_3 & 0xf0000000) >> 28));
3268 for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3269 u32 rb_disable_bitmap;
3271 WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3272 WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3273 rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3275 tmp |= rb_disable_bitmap;
3278 /* enabled rb are just the one not disabled :) */
3279 disabled_rb_mask = tmp;
3281 for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3283 /* if all the backends are disabled, fix it up here */
3284 if ((disabled_rb_mask & tmp) == tmp) {
3285 for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3286 disabled_rb_mask &= ~(1 << i);
3289 WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3290 WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3292 WREG32(GB_ADDR_CONFIG, gb_addr_config);
3293 WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3294 WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3295 WREG32(DMA_TILING_CONFIG, gb_addr_config);
3296 WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3297 WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3298 WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3300 if ((rdev->config.evergreen.max_backends == 1) &&
3301 (rdev->flags & RADEON_IS_IGP)) {
3302 if ((disabled_rb_mask & 3) == 1) {
3303 /* RB0 disabled, RB1 enabled */
3306 /* RB1 disabled, RB0 enabled */
3310 tmp = gb_addr_config & NUM_PIPES_MASK;
3311 tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3312 EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3314 WREG32(GB_BACKEND_MAP, tmp);
3316 WREG32(CGTS_SYS_TCC_DISABLE, 0);
3317 WREG32(CGTS_TCC_DISABLE, 0);
3318 WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3319 WREG32(CGTS_USER_TCC_DISABLE, 0);
3321 /* set HW defaults for 3D engine */
3322 WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3323 ROQ_IB2_START(0x2b)));
3325 WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3327 WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3332 sx_debug_1 = RREG32(SX_DEBUG_1);
3333 sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3334 WREG32(SX_DEBUG_1, sx_debug_1);
3337 smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3338 smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3339 smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3340 WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3342 if (rdev->family <= CHIP_SUMO2)
3343 WREG32(SMX_SAR_CTL0, 0x00010000);
3345 WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3346 POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3347 SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3349 WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3350 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3351 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3353 WREG32(VGT_NUM_INSTANCES, 1);
3354 WREG32(SPI_CONFIG_CNTL, 0);
3355 WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3356 WREG32(CP_PERFMON_CNTL, 0);
3358 WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3359 FETCH_FIFO_HIWATER(0x4) |
3360 DONE_FIFO_HIWATER(0xe0) |
3361 ALU_UPDATE_FIFO_HIWATER(0x8)));
3363 sq_config = RREG32(SQ_CONFIG);
3364 sq_config &= ~(PS_PRIO(3) |
3368 sq_config |= (VC_ENABLE |
3375 switch (rdev->family) {
3381 /* no vertex cache */
3382 sq_config &= ~VC_ENABLE;
3388 sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3390 sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3391 sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3392 sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3393 sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3394 sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3395 sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3396 sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3398 switch (rdev->family) {
3403 ps_thread_count = 96;
3406 ps_thread_count = 128;
3410 sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3411 sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3412 sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3413 sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3414 sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3415 sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3417 sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3418 sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3419 sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3420 sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3421 sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3422 sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3424 WREG32(SQ_CONFIG, sq_config);
3425 WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3426 WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3427 WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3428 WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3429 WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3430 WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3431 WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3432 WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3433 WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3434 WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3436 WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3437 FORCE_EOV_MAX_REZ_CNT(255)));
3439 switch (rdev->family) {
3445 vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3448 vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3451 vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3452 WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3454 WREG32(VGT_GS_VERTEX_REUSE, 16);
3455 WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3456 WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3458 WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3459 WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3461 WREG32(CB_PERF_CTR0_SEL_0, 0);
3462 WREG32(CB_PERF_CTR0_SEL_1, 0);
3463 WREG32(CB_PERF_CTR1_SEL_0, 0);
3464 WREG32(CB_PERF_CTR1_SEL_1, 0);
3465 WREG32(CB_PERF_CTR2_SEL_0, 0);
3466 WREG32(CB_PERF_CTR2_SEL_1, 0);
3467 WREG32(CB_PERF_CTR3_SEL_0, 0);
3468 WREG32(CB_PERF_CTR3_SEL_1, 0);
3470 /* clear render buffer base addresses */
3471 WREG32(CB_COLOR0_BASE, 0);
3472 WREG32(CB_COLOR1_BASE, 0);
3473 WREG32(CB_COLOR2_BASE, 0);
3474 WREG32(CB_COLOR3_BASE, 0);
3475 WREG32(CB_COLOR4_BASE, 0);
3476 WREG32(CB_COLOR5_BASE, 0);
3477 WREG32(CB_COLOR6_BASE, 0);
3478 WREG32(CB_COLOR7_BASE, 0);
3479 WREG32(CB_COLOR8_BASE, 0);
3480 WREG32(CB_COLOR9_BASE, 0);
3481 WREG32(CB_COLOR10_BASE, 0);
3482 WREG32(CB_COLOR11_BASE, 0);
3484 /* set the shader const cache sizes to 0 */
3485 for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3487 for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3490 tmp = RREG32(HDP_MISC_CNTL);
3491 tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3492 WREG32(HDP_MISC_CNTL, tmp);
3494 hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3495 WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3497 WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3503 int evergreen_mc_init(struct radeon_device *rdev)
3506 int chansize, numchan;
3508 /* Get VRAM informations */
3509 rdev->mc.vram_is_ddr = true;
3510 if ((rdev->family == CHIP_PALM) ||
3511 (rdev->family == CHIP_SUMO) ||
3512 (rdev->family == CHIP_SUMO2))
3513 tmp = RREG32(FUS_MC_ARB_RAMCFG);
3515 tmp = RREG32(MC_ARB_RAMCFG);
3516 if (tmp & CHANSIZE_OVERRIDE) {
3518 } else if (tmp & CHANSIZE_MASK) {
3523 tmp = RREG32(MC_SHARED_CHMAP);
3524 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3539 rdev->mc.vram_width = numchan * chansize;
3540 /* Could aper size report 0 ? */
3541 rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3542 rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
3543 /* Setup GPU memory space */
3544 if ((rdev->family == CHIP_PALM) ||
3545 (rdev->family == CHIP_SUMO) ||
3546 (rdev->family == CHIP_SUMO2)) {
3547 /* size in bytes on fusion */
3548 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3549 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3551 /* size in MB on evergreen/cayman/tn */
3552 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3553 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3555 rdev->mc.visible_vram_size = rdev->mc.aper_size;
3556 r700_vram_gtt_location(rdev, &rdev->mc);
3557 radeon_update_bandwidth_info(rdev);
3562 void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
3564 dev_info(rdev->dev, " GRBM_STATUS = 0x%08X\n",
3565 RREG32(GRBM_STATUS));
3566 dev_info(rdev->dev, " GRBM_STATUS_SE0 = 0x%08X\n",
3567 RREG32(GRBM_STATUS_SE0));
3568 dev_info(rdev->dev, " GRBM_STATUS_SE1 = 0x%08X\n",
3569 RREG32(GRBM_STATUS_SE1));
3570 dev_info(rdev->dev, " SRBM_STATUS = 0x%08X\n",
3571 RREG32(SRBM_STATUS));
3572 dev_info(rdev->dev, " SRBM_STATUS2 = 0x%08X\n",
3573 RREG32(SRBM_STATUS2));
3574 dev_info(rdev->dev, " R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3575 RREG32(CP_STALLED_STAT1));
3576 dev_info(rdev->dev, " R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3577 RREG32(CP_STALLED_STAT2));
3578 dev_info(rdev->dev, " R_00867C_CP_BUSY_STAT = 0x%08X\n",
3579 RREG32(CP_BUSY_STAT));
3580 dev_info(rdev->dev, " R_008680_CP_STAT = 0x%08X\n",
3582 dev_info(rdev->dev, " R_00D034_DMA_STATUS_REG = 0x%08X\n",
3583 RREG32(DMA_STATUS_REG));
3584 if (rdev->family >= CHIP_CAYMAN) {
3585 dev_info(rdev->dev, " R_00D834_DMA_STATUS_REG = 0x%08X\n",
3586 RREG32(DMA_STATUS_REG + 0x800));
3590 bool evergreen_is_display_hung(struct radeon_device *rdev)
3596 for (i = 0; i < rdev->num_crtc; i++) {
3597 if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3598 crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3599 crtc_hung |= (1 << i);
3603 for (j = 0; j < 10; j++) {
3604 for (i = 0; i < rdev->num_crtc; i++) {
3605 if (crtc_hung & (1 << i)) {
3606 tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3607 if (tmp != crtc_status[i])
3608 crtc_hung &= ~(1 << i);
3619 u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3625 tmp = RREG32(GRBM_STATUS);
3626 if (tmp & (PA_BUSY | SC_BUSY |
3628 TA_BUSY | VGT_BUSY |
3630 SPI_BUSY | VGT_BUSY_NO_DMA))
3631 reset_mask |= RADEON_RESET_GFX;
3633 if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3634 CP_BUSY | CP_COHERENCY_BUSY))
3635 reset_mask |= RADEON_RESET_CP;
3637 if (tmp & GRBM_EE_BUSY)
3638 reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3640 /* DMA_STATUS_REG */
3641 tmp = RREG32(DMA_STATUS_REG);
3642 if (!(tmp & DMA_IDLE))
3643 reset_mask |= RADEON_RESET_DMA;
3646 tmp = RREG32(SRBM_STATUS2);
3648 reset_mask |= RADEON_RESET_DMA;
3651 tmp = RREG32(SRBM_STATUS);
3652 if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3653 reset_mask |= RADEON_RESET_RLC;
3656 reset_mask |= RADEON_RESET_IH;
3659 reset_mask |= RADEON_RESET_SEM;
3661 if (tmp & GRBM_RQ_PENDING)
3662 reset_mask |= RADEON_RESET_GRBM;
3665 reset_mask |= RADEON_RESET_VMC;
3667 if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3668 MCC_BUSY | MCD_BUSY))
3669 reset_mask |= RADEON_RESET_MC;
3671 if (evergreen_is_display_hung(rdev))
3672 reset_mask |= RADEON_RESET_DISPLAY;
3675 tmp = RREG32(VM_L2_STATUS);
3677 reset_mask |= RADEON_RESET_VMC;
3679 /* Skip MC reset as it's mostly likely not hung, just busy */
3680 if (reset_mask & RADEON_RESET_MC) {
3681 DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3682 reset_mask &= ~RADEON_RESET_MC;
3688 static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3690 struct evergreen_mc_save save;
3691 u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3694 if (reset_mask == 0)
3697 dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3699 evergreen_print_gpu_status_regs(rdev);
3701 /* Disable CP parsing/prefetching */
3702 WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3704 if (reset_mask & RADEON_RESET_DMA) {
3706 tmp = RREG32(DMA_RB_CNTL);
3707 tmp &= ~DMA_RB_ENABLE;
3708 WREG32(DMA_RB_CNTL, tmp);
3713 evergreen_mc_stop(rdev, &save);
3714 if (evergreen_mc_wait_for_idle(rdev)) {
3715 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3718 if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
3719 grbm_soft_reset |= SOFT_RESET_DB |
3732 if (reset_mask & RADEON_RESET_CP) {
3733 grbm_soft_reset |= SOFT_RESET_CP |
3736 srbm_soft_reset |= SOFT_RESET_GRBM;
3739 if (reset_mask & RADEON_RESET_DMA)
3740 srbm_soft_reset |= SOFT_RESET_DMA;
3742 if (reset_mask & RADEON_RESET_DISPLAY)
3743 srbm_soft_reset |= SOFT_RESET_DC;
3745 if (reset_mask & RADEON_RESET_RLC)
3746 srbm_soft_reset |= SOFT_RESET_RLC;
3748 if (reset_mask & RADEON_RESET_SEM)
3749 srbm_soft_reset |= SOFT_RESET_SEM;
3751 if (reset_mask & RADEON_RESET_IH)
3752 srbm_soft_reset |= SOFT_RESET_IH;
3754 if (reset_mask & RADEON_RESET_GRBM)
3755 srbm_soft_reset |= SOFT_RESET_GRBM;
3757 if (reset_mask & RADEON_RESET_VMC)
3758 srbm_soft_reset |= SOFT_RESET_VMC;
3760 if (!(rdev->flags & RADEON_IS_IGP)) {
3761 if (reset_mask & RADEON_RESET_MC)
3762 srbm_soft_reset |= SOFT_RESET_MC;
3765 if (grbm_soft_reset) {
3766 tmp = RREG32(GRBM_SOFT_RESET);
3767 tmp |= grbm_soft_reset;
3768 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
3769 WREG32(GRBM_SOFT_RESET, tmp);
3770 tmp = RREG32(GRBM_SOFT_RESET);
3774 tmp &= ~grbm_soft_reset;
3775 WREG32(GRBM_SOFT_RESET, tmp);
3776 tmp = RREG32(GRBM_SOFT_RESET);
3779 if (srbm_soft_reset) {
3780 tmp = RREG32(SRBM_SOFT_RESET);
3781 tmp |= srbm_soft_reset;
3782 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
3783 WREG32(SRBM_SOFT_RESET, tmp);
3784 tmp = RREG32(SRBM_SOFT_RESET);
3788 tmp &= ~srbm_soft_reset;
3789 WREG32(SRBM_SOFT_RESET, tmp);
3790 tmp = RREG32(SRBM_SOFT_RESET);
3793 /* Wait a little for things to settle down */
3796 evergreen_mc_resume(rdev, &save);
3799 evergreen_print_gpu_status_regs(rdev);
3802 int evergreen_asic_reset(struct radeon_device *rdev)
3806 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3809 r600_set_bios_scratch_engine_hung(rdev, true);
3811 evergreen_gpu_soft_reset(rdev, reset_mask);
3813 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3816 r600_set_bios_scratch_engine_hung(rdev, false);
3822 * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
3824 * @rdev: radeon_device pointer
3825 * @ring: radeon_ring structure holding ring information
3827 * Check if the GFX engine is locked up.
3828 * Returns true if the engine appears to be locked up, false if not.
3830 bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
3832 u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3834 if (!(reset_mask & (RADEON_RESET_GFX |
3835 RADEON_RESET_COMPUTE |
3836 RADEON_RESET_CP))) {
3837 radeon_ring_lockup_update(ring);
3840 /* force CP activities */
3841 radeon_ring_force_activity(rdev, ring);
3842 return radeon_ring_test_lockup(rdev, ring);
3848 #define RLC_SAVE_RESTORE_LIST_END_MARKER 0x00000000
3849 #define RLC_CLEAR_STATE_END_MARKER 0x00000001
3851 void sumo_rlc_fini(struct radeon_device *rdev)
3855 /* save restore block */
3856 if (rdev->rlc.save_restore_obj) {
3857 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
3858 if (unlikely(r != 0))
3859 dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r);
3860 radeon_bo_unpin(rdev->rlc.save_restore_obj);
3861 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
3863 radeon_bo_unref(&rdev->rlc.save_restore_obj);
3864 rdev->rlc.save_restore_obj = NULL;
3867 /* clear state block */
3868 if (rdev->rlc.clear_state_obj) {
3869 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
3870 if (unlikely(r != 0))
3871 dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r);
3872 radeon_bo_unpin(rdev->rlc.clear_state_obj);
3873 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
3875 radeon_bo_unref(&rdev->rlc.clear_state_obj);
3876 rdev->rlc.clear_state_obj = NULL;
3879 /* clear state block */
3880 if (rdev->rlc.cp_table_obj) {
3881 r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
3882 if (unlikely(r != 0))
3883 dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
3884 radeon_bo_unpin(rdev->rlc.cp_table_obj);
3885 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
3887 radeon_bo_unref(&rdev->rlc.cp_table_obj);
3888 rdev->rlc.cp_table_obj = NULL;
3892 #define CP_ME_TABLE_SIZE 96
3894 int sumo_rlc_init(struct radeon_device *rdev)
3897 volatile u32 *dst_ptr;
3898 u32 dws, data, i, j, k, reg_num;
3899 u32 reg_list_num, reg_list_hdr_blk_index, reg_list_blk_index = 0;
3900 u64 reg_list_mc_addr;
3901 const struct cs_section_def *cs_data;
3904 src_ptr = rdev->rlc.reg_list;
3905 dws = rdev->rlc.reg_list_size;
3906 cs_data = rdev->rlc.cs_data;
3909 /* save restore block */
3910 if (rdev->rlc.save_restore_obj == NULL) {
3911 r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
3912 RADEON_GEM_DOMAIN_VRAM, NULL, &rdev->rlc.save_restore_obj);
3914 dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r);
3919 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
3920 if (unlikely(r != 0)) {
3921 sumo_rlc_fini(rdev);
3924 r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM,
3925 &rdev->rlc.save_restore_gpu_addr);
3927 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
3928 dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r);
3929 sumo_rlc_fini(rdev);
3933 r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void **)&rdev->rlc.sr_ptr);
3935 dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r);
3936 sumo_rlc_fini(rdev);
3939 /* write the sr buffer */
3940 dst_ptr = rdev->rlc.sr_ptr;
3941 if (rdev->family >= CHIP_TAHITI) {
3943 for (i = 0; i < rdev->rlc.reg_list_size; i++)
3944 dst_ptr[i] = src_ptr[i];
3948 * dw0: (reg2 << 16) | reg1
3949 * dw1: reg1 save space
3950 * dw2: reg2 save space
3952 for (i = 0; i < dws; i++) {
3953 data = src_ptr[i] >> 2;
3956 data |= (src_ptr[i] >> 2) << 16;
3957 j = (((i - 1) * 3) / 2);
3961 dst_ptr[j] = RLC_SAVE_RESTORE_LIST_END_MARKER;
3963 radeon_bo_kunmap(rdev->rlc.save_restore_obj);
3964 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
3968 /* clear state block */
3969 if (rdev->family >= CHIP_TAHITI) {
3970 rdev->rlc.clear_state_size = si_get_csb_size(rdev);
3971 dws = rdev->rlc.clear_state_size + (256 / 4);
3975 for (i = 0; cs_data[i].section != NULL; i++) {
3976 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
3978 dws += cs_data[i].section[j].reg_count;
3981 reg_list_blk_index = (3 * reg_list_num + 2);
3982 dws += reg_list_blk_index;
3983 rdev->rlc.clear_state_size = dws;
3986 if (rdev->rlc.clear_state_obj == NULL) {
3987 r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
3988 RADEON_GEM_DOMAIN_VRAM, NULL, &rdev->rlc.clear_state_obj);
3990 dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r);
3991 sumo_rlc_fini(rdev);
3995 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
3996 if (unlikely(r != 0)) {
3997 sumo_rlc_fini(rdev);
4000 r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM,
4001 &rdev->rlc.clear_state_gpu_addr);
4003 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4004 dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r);
4005 sumo_rlc_fini(rdev);
4009 r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void **)&rdev->rlc.cs_ptr);
4011 dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r);
4012 sumo_rlc_fini(rdev);
4015 /* set up the cs buffer */
4016 dst_ptr = rdev->rlc.cs_ptr;
4017 if (rdev->family >= CHIP_TAHITI) {
4018 reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256;
4019 dst_ptr[0] = upper_32_bits(reg_list_mc_addr);
4020 dst_ptr[1] = lower_32_bits(reg_list_mc_addr);
4021 dst_ptr[2] = rdev->rlc.clear_state_size;
4022 si_get_csb_buffer(rdev, &dst_ptr[(256/4)]);
4024 reg_list_hdr_blk_index = 0;
4025 reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4);
4026 data = upper_32_bits(reg_list_mc_addr);
4027 dst_ptr[reg_list_hdr_blk_index] = data;
4028 reg_list_hdr_blk_index++;
4029 for (i = 0; cs_data[i].section != NULL; i++) {
4030 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4031 reg_num = cs_data[i].section[j].reg_count;
4032 data = reg_list_mc_addr & 0xffffffff;
4033 dst_ptr[reg_list_hdr_blk_index] = data;
4034 reg_list_hdr_blk_index++;
4036 data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff;
4037 dst_ptr[reg_list_hdr_blk_index] = data;
4038 reg_list_hdr_blk_index++;
4040 data = 0x08000000 | (reg_num * 4);
4041 dst_ptr[reg_list_hdr_blk_index] = data;
4042 reg_list_hdr_blk_index++;
4044 for (k = 0; k < reg_num; k++) {
4045 data = cs_data[i].section[j].extent[k];
4046 dst_ptr[reg_list_blk_index + k] = data;
4048 reg_list_mc_addr += reg_num * 4;
4049 reg_list_blk_index += reg_num;
4052 dst_ptr[reg_list_hdr_blk_index] = RLC_CLEAR_STATE_END_MARKER;
4054 radeon_bo_kunmap(rdev->rlc.clear_state_obj);
4055 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4058 if (rdev->rlc.cp_table_size) {
4059 if (rdev->rlc.cp_table_obj == NULL) {
4060 r = radeon_bo_create(rdev, rdev->rlc.cp_table_size, PAGE_SIZE, true,
4061 RADEON_GEM_DOMAIN_VRAM, NULL, &rdev->rlc.cp_table_obj);
4063 dev_warn(rdev->dev, "(%d) create RLC cp table bo failed\n", r);
4064 sumo_rlc_fini(rdev);
4069 r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4070 if (unlikely(r != 0)) {
4071 dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4072 sumo_rlc_fini(rdev);
4075 r = radeon_bo_pin(rdev->rlc.cp_table_obj, RADEON_GEM_DOMAIN_VRAM,
4076 &rdev->rlc.cp_table_gpu_addr);
4078 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4079 dev_warn(rdev->dev, "(%d) pin RLC cp_table bo failed\n", r);
4080 sumo_rlc_fini(rdev);
4083 r = radeon_bo_kmap(rdev->rlc.cp_table_obj, (void **)&rdev->rlc.cp_table_ptr);
4085 dev_warn(rdev->dev, "(%d) map RLC cp table bo failed\n", r);
4086 sumo_rlc_fini(rdev);
4090 cik_init_cp_pg_table(rdev);
4092 radeon_bo_kunmap(rdev->rlc.cp_table_obj);
4093 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4100 static void evergreen_rlc_start(struct radeon_device *rdev)
4102 u32 mask = RLC_ENABLE;
4104 if (rdev->flags & RADEON_IS_IGP) {
4105 mask |= GFX_POWER_GATING_ENABLE | GFX_POWER_GATING_SRC;
4108 WREG32(RLC_CNTL, mask);
4111 int evergreen_rlc_resume(struct radeon_device *rdev)
4114 const __be32 *fw_data;
4119 r600_rlc_stop(rdev);
4121 WREG32(RLC_HB_CNTL, 0);
4123 if (rdev->flags & RADEON_IS_IGP) {
4124 if (rdev->family == CHIP_ARUBA) {
4125 u32 always_on_bitmap =
4126 3 | (3 << (16 * rdev->config.cayman.max_shader_engines));
4127 /* find out the number of active simds */
4128 u32 tmp = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
4129 tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
4130 tmp = hweight32(~tmp);
4131 if (tmp == rdev->config.cayman.max_simds_per_se) {
4132 WREG32(TN_RLC_LB_ALWAYS_ACTIVE_SIMD_MASK, always_on_bitmap);
4133 WREG32(TN_RLC_LB_PARAMS, 0x00601004);
4134 WREG32(TN_RLC_LB_INIT_SIMD_MASK, 0xffffffff);
4135 WREG32(TN_RLC_LB_CNTR_INIT, 0x00000000);
4136 WREG32(TN_RLC_LB_CNTR_MAX, 0x00002000);
4139 WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4140 WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4142 WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
4143 WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
4145 WREG32(RLC_HB_BASE, 0);
4146 WREG32(RLC_HB_RPTR, 0);
4147 WREG32(RLC_HB_WPTR, 0);
4148 WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4149 WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4151 WREG32(RLC_MC_CNTL, 0);
4152 WREG32(RLC_UCODE_CNTL, 0);
4154 fw_data = (const __be32 *)rdev->rlc_fw->data;
4155 if (rdev->family >= CHIP_ARUBA) {
4156 for (i = 0; i < ARUBA_RLC_UCODE_SIZE; i++) {
4157 WREG32(RLC_UCODE_ADDR, i);
4158 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4160 } else if (rdev->family >= CHIP_CAYMAN) {
4161 for (i = 0; i < CAYMAN_RLC_UCODE_SIZE; i++) {
4162 WREG32(RLC_UCODE_ADDR, i);
4163 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4166 for (i = 0; i < EVERGREEN_RLC_UCODE_SIZE; i++) {
4167 WREG32(RLC_UCODE_ADDR, i);
4168 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4171 WREG32(RLC_UCODE_ADDR, 0);
4173 evergreen_rlc_start(rdev);
4180 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
4182 if (crtc >= rdev->num_crtc)
4185 return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
4188 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
4192 if (rdev->family >= CHIP_CAYMAN) {
4193 cayman_cp_int_cntl_setup(rdev, 0,
4194 CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4195 cayman_cp_int_cntl_setup(rdev, 1, 0);
4196 cayman_cp_int_cntl_setup(rdev, 2, 0);
4197 tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4198 WREG32(CAYMAN_DMA1_CNTL, tmp);
4200 WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4201 tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4202 WREG32(DMA_CNTL, tmp);
4203 WREG32(GRBM_INT_CNTL, 0);
4204 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4205 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4206 if (rdev->num_crtc >= 4) {
4207 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4208 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4210 if (rdev->num_crtc >= 6) {
4211 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4212 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4215 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4216 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4217 if (rdev->num_crtc >= 4) {
4218 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4219 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4221 if (rdev->num_crtc >= 6) {
4222 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4223 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4226 /* only one DAC on DCE6 */
4227 if (!ASIC_IS_DCE6(rdev))
4228 WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
4229 WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
4231 tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4232 WREG32(DC_HPD1_INT_CONTROL, tmp);
4233 tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4234 WREG32(DC_HPD2_INT_CONTROL, tmp);
4235 tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4236 WREG32(DC_HPD3_INT_CONTROL, tmp);
4237 tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4238 WREG32(DC_HPD4_INT_CONTROL, tmp);
4239 tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4240 WREG32(DC_HPD5_INT_CONTROL, tmp);
4241 tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4242 WREG32(DC_HPD6_INT_CONTROL, tmp);
4246 int evergreen_irq_set(struct radeon_device *rdev)
4248 u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
4249 u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
4250 u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
4251 u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
4252 u32 grbm_int_cntl = 0;
4253 u32 grph1 = 0, grph2 = 0, grph3 = 0, grph4 = 0, grph5 = 0, grph6 = 0;
4254 u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
4255 u32 dma_cntl, dma_cntl1 = 0;
4256 u32 thermal_int = 0;
4258 if (!rdev->irq.installed) {
4259 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
4262 /* don't enable anything if the ih is disabled */
4263 if (!rdev->ih.enabled) {
4264 r600_disable_interrupts(rdev);
4265 /* force the active interrupt state to all disabled */
4266 evergreen_disable_interrupt_state(rdev);
4270 hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
4271 hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
4272 hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
4273 hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
4274 hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
4275 hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
4276 if (rdev->family == CHIP_ARUBA)
4277 thermal_int = RREG32(TN_CG_THERMAL_INT_CTRL) &
4278 ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4280 thermal_int = RREG32(CG_THERMAL_INT) &
4281 ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4283 afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4284 afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4285 afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4286 afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4287 afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4288 afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4290 dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4292 if (rdev->family >= CHIP_CAYMAN) {
4293 /* enable CP interrupts on all rings */
4294 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4295 DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4296 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4298 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
4299 DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
4300 cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
4302 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
4303 DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
4304 cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
4307 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4308 DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4309 cp_int_cntl |= RB_INT_ENABLE;
4310 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4314 if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
4315 DRM_DEBUG("r600_irq_set: sw int dma\n");
4316 dma_cntl |= TRAP_ENABLE;
4319 if (rdev->family >= CHIP_CAYMAN) {
4320 dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4321 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
4322 DRM_DEBUG("r600_irq_set: sw int dma1\n");
4323 dma_cntl1 |= TRAP_ENABLE;
4327 if (rdev->irq.dpm_thermal) {
4328 DRM_DEBUG("dpm thermal\n");
4329 thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
4332 if (rdev->irq.crtc_vblank_int[0] ||
4333 atomic_read(&rdev->irq.pflip[0])) {
4334 DRM_DEBUG("evergreen_irq_set: vblank 0\n");
4335 crtc1 |= VBLANK_INT_MASK;
4337 if (rdev->irq.crtc_vblank_int[1] ||
4338 atomic_read(&rdev->irq.pflip[1])) {
4339 DRM_DEBUG("evergreen_irq_set: vblank 1\n");
4340 crtc2 |= VBLANK_INT_MASK;
4342 if (rdev->irq.crtc_vblank_int[2] ||
4343 atomic_read(&rdev->irq.pflip[2])) {
4344 DRM_DEBUG("evergreen_irq_set: vblank 2\n");
4345 crtc3 |= VBLANK_INT_MASK;
4347 if (rdev->irq.crtc_vblank_int[3] ||
4348 atomic_read(&rdev->irq.pflip[3])) {
4349 DRM_DEBUG("evergreen_irq_set: vblank 3\n");
4350 crtc4 |= VBLANK_INT_MASK;
4352 if (rdev->irq.crtc_vblank_int[4] ||
4353 atomic_read(&rdev->irq.pflip[4])) {
4354 DRM_DEBUG("evergreen_irq_set: vblank 4\n");
4355 crtc5 |= VBLANK_INT_MASK;
4357 if (rdev->irq.crtc_vblank_int[5] ||
4358 atomic_read(&rdev->irq.pflip[5])) {
4359 DRM_DEBUG("evergreen_irq_set: vblank 5\n");
4360 crtc6 |= VBLANK_INT_MASK;
4362 if (rdev->irq.hpd[0]) {
4363 DRM_DEBUG("evergreen_irq_set: hpd 1\n");
4364 hpd1 |= DC_HPDx_INT_EN;
4366 if (rdev->irq.hpd[1]) {
4367 DRM_DEBUG("evergreen_irq_set: hpd 2\n");
4368 hpd2 |= DC_HPDx_INT_EN;
4370 if (rdev->irq.hpd[2]) {
4371 DRM_DEBUG("evergreen_irq_set: hpd 3\n");
4372 hpd3 |= DC_HPDx_INT_EN;
4374 if (rdev->irq.hpd[3]) {
4375 DRM_DEBUG("evergreen_irq_set: hpd 4\n");
4376 hpd4 |= DC_HPDx_INT_EN;
4378 if (rdev->irq.hpd[4]) {
4379 DRM_DEBUG("evergreen_irq_set: hpd 5\n");
4380 hpd5 |= DC_HPDx_INT_EN;
4382 if (rdev->irq.hpd[5]) {
4383 DRM_DEBUG("evergreen_irq_set: hpd 6\n");
4384 hpd6 |= DC_HPDx_INT_EN;
4386 if (rdev->irq.afmt[0]) {
4387 DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
4388 afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4390 if (rdev->irq.afmt[1]) {
4391 DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
4392 afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4394 if (rdev->irq.afmt[2]) {
4395 DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
4396 afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4398 if (rdev->irq.afmt[3]) {
4399 DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
4400 afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4402 if (rdev->irq.afmt[4]) {
4403 DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
4404 afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4406 if (rdev->irq.afmt[5]) {
4407 DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
4408 afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4411 if (rdev->family >= CHIP_CAYMAN) {
4412 cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
4413 cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
4414 cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
4416 WREG32(CP_INT_CNTL, cp_int_cntl);
4418 WREG32(DMA_CNTL, dma_cntl);
4420 if (rdev->family >= CHIP_CAYMAN)
4421 WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
4423 WREG32(GRBM_INT_CNTL, grbm_int_cntl);
4425 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
4426 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
4427 if (rdev->num_crtc >= 4) {
4428 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
4429 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
4431 if (rdev->num_crtc >= 6) {
4432 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
4433 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
4436 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, grph1);
4437 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, grph2);
4438 if (rdev->num_crtc >= 4) {
4439 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, grph3);
4440 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, grph4);
4442 if (rdev->num_crtc >= 6) {
4443 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, grph5);
4444 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, grph6);
4447 WREG32(DC_HPD1_INT_CONTROL, hpd1);
4448 WREG32(DC_HPD2_INT_CONTROL, hpd2);
4449 WREG32(DC_HPD3_INT_CONTROL, hpd3);
4450 WREG32(DC_HPD4_INT_CONTROL, hpd4);
4451 WREG32(DC_HPD5_INT_CONTROL, hpd5);
4452 WREG32(DC_HPD6_INT_CONTROL, hpd6);
4453 if (rdev->family == CHIP_ARUBA)
4454 WREG32(TN_CG_THERMAL_INT_CTRL, thermal_int);
4456 WREG32(CG_THERMAL_INT, thermal_int);
4458 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
4459 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
4460 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
4461 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
4462 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
4463 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
4468 static void evergreen_irq_ack(struct radeon_device *rdev)
4472 rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4473 rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4474 rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4475 rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4476 rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4477 rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4478 rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4479 rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4480 if (rdev->num_crtc >= 4) {
4481 rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4482 rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4484 if (rdev->num_crtc >= 6) {
4485 rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4486 rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4489 rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4490 rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4491 rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4492 rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4493 rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4494 rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4496 if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4497 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4498 if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4499 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4500 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
4501 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
4502 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
4503 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
4504 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
4505 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
4506 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
4507 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4509 if (rdev->num_crtc >= 4) {
4510 if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4511 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4512 if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4513 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4514 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4515 WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4516 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4517 WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4518 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4519 WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4520 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4521 WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4524 if (rdev->num_crtc >= 6) {
4525 if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4526 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4527 if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4528 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4529 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4530 WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4531 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4532 WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4533 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4534 WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4535 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4536 WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4539 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4540 tmp = RREG32(DC_HPD1_INT_CONTROL);
4541 tmp |= DC_HPDx_INT_ACK;
4542 WREG32(DC_HPD1_INT_CONTROL, tmp);
4544 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4545 tmp = RREG32(DC_HPD2_INT_CONTROL);
4546 tmp |= DC_HPDx_INT_ACK;
4547 WREG32(DC_HPD2_INT_CONTROL, tmp);
4549 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4550 tmp = RREG32(DC_HPD3_INT_CONTROL);
4551 tmp |= DC_HPDx_INT_ACK;
4552 WREG32(DC_HPD3_INT_CONTROL, tmp);
4554 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4555 tmp = RREG32(DC_HPD4_INT_CONTROL);
4556 tmp |= DC_HPDx_INT_ACK;
4557 WREG32(DC_HPD4_INT_CONTROL, tmp);
4559 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4560 tmp = RREG32(DC_HPD5_INT_CONTROL);
4561 tmp |= DC_HPDx_INT_ACK;
4562 WREG32(DC_HPD5_INT_CONTROL, tmp);
4564 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4565 tmp = RREG32(DC_HPD5_INT_CONTROL);
4566 tmp |= DC_HPDx_INT_ACK;
4567 WREG32(DC_HPD6_INT_CONTROL, tmp);
4569 if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4570 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4571 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4572 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4574 if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4575 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4576 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4577 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4579 if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4580 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4581 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4582 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4584 if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4585 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4586 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4587 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4589 if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4590 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4591 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4592 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4594 if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4595 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
4596 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4597 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
4601 static void evergreen_irq_disable(struct radeon_device *rdev)
4603 r600_disable_interrupts(rdev);
4604 /* Wait and acknowledge irq */
4606 evergreen_irq_ack(rdev);
4607 evergreen_disable_interrupt_state(rdev);
4610 void evergreen_irq_suspend(struct radeon_device *rdev)
4612 evergreen_irq_disable(rdev);
4613 r600_rlc_stop(rdev);
4616 static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
4620 if (rdev->wb.enabled)
4621 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
4623 wptr = RREG32(IH_RB_WPTR);
4625 if (wptr & RB_OVERFLOW) {
4626 /* When a ring buffer overflow happen start parsing interrupt
4627 * from the last not overwritten vector (wptr + 16). Hopefully
4628 * this should allow us to catchup.
4630 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
4631 wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
4632 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
4633 tmp = RREG32(IH_RB_CNTL);
4634 tmp |= IH_WPTR_OVERFLOW_CLEAR;
4635 WREG32(IH_RB_CNTL, tmp);
4637 return (wptr & rdev->ih.ptr_mask);
4640 int evergreen_irq_process(struct radeon_device *rdev)
4644 u32 src_id, src_data;
4646 bool queue_hotplug = false;
4647 bool queue_hdmi = false;
4648 bool queue_thermal = false;
4651 if (!rdev->ih.enabled || rdev->shutdown)
4654 wptr = evergreen_get_ih_wptr(rdev);
4657 /* is somebody else already processing irqs? */
4658 if (atomic_xchg(&rdev->ih.lock, 1))
4661 rptr = rdev->ih.rptr;
4662 DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
4664 /* Order reading of wptr vs. reading of IH ring data */
4667 /* display interrupts */
4668 evergreen_irq_ack(rdev);
4670 while (rptr != wptr) {
4671 /* wptr/rptr are in bytes! */
4672 ring_index = rptr / 4;
4673 src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
4674 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
4677 case 1: /* D1 vblank/vline */
4679 case 0: /* D1 vblank */
4680 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) {
4681 if (rdev->irq.crtc_vblank_int[0]) {
4682 drm_handle_vblank(rdev->ddev, 0);
4683 rdev->pm.vblank_sync = true;
4684 wake_up(&rdev->irq.vblank_queue);
4686 if (atomic_read(&rdev->irq.pflip[0]))
4687 radeon_crtc_handle_flip(rdev, 0);
4688 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
4689 DRM_DEBUG("IH: D1 vblank\n");
4692 case 1: /* D1 vline */
4693 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) {
4694 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
4695 DRM_DEBUG("IH: D1 vline\n");
4699 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4703 case 2: /* D2 vblank/vline */
4705 case 0: /* D2 vblank */
4706 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
4707 if (rdev->irq.crtc_vblank_int[1]) {
4708 drm_handle_vblank(rdev->ddev, 1);
4709 rdev->pm.vblank_sync = true;
4710 wake_up(&rdev->irq.vblank_queue);
4712 if (atomic_read(&rdev->irq.pflip[1]))
4713 radeon_crtc_handle_flip(rdev, 1);
4714 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
4715 DRM_DEBUG("IH: D2 vblank\n");
4718 case 1: /* D2 vline */
4719 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) {
4720 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
4721 DRM_DEBUG("IH: D2 vline\n");
4725 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4729 case 3: /* D3 vblank/vline */
4731 case 0: /* D3 vblank */
4732 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
4733 if (rdev->irq.crtc_vblank_int[2]) {
4734 drm_handle_vblank(rdev->ddev, 2);
4735 rdev->pm.vblank_sync = true;
4736 wake_up(&rdev->irq.vblank_queue);
4738 if (atomic_read(&rdev->irq.pflip[2]))
4739 radeon_crtc_handle_flip(rdev, 2);
4740 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
4741 DRM_DEBUG("IH: D3 vblank\n");
4744 case 1: /* D3 vline */
4745 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
4746 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
4747 DRM_DEBUG("IH: D3 vline\n");
4751 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4755 case 4: /* D4 vblank/vline */
4757 case 0: /* D4 vblank */
4758 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
4759 if (rdev->irq.crtc_vblank_int[3]) {
4760 drm_handle_vblank(rdev->ddev, 3);
4761 rdev->pm.vblank_sync = true;
4762 wake_up(&rdev->irq.vblank_queue);
4764 if (atomic_read(&rdev->irq.pflip[3]))
4765 radeon_crtc_handle_flip(rdev, 3);
4766 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
4767 DRM_DEBUG("IH: D4 vblank\n");
4770 case 1: /* D4 vline */
4771 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
4772 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
4773 DRM_DEBUG("IH: D4 vline\n");
4777 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4781 case 5: /* D5 vblank/vline */
4783 case 0: /* D5 vblank */
4784 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
4785 if (rdev->irq.crtc_vblank_int[4]) {
4786 drm_handle_vblank(rdev->ddev, 4);
4787 rdev->pm.vblank_sync = true;
4788 wake_up(&rdev->irq.vblank_queue);
4790 if (atomic_read(&rdev->irq.pflip[4]))
4791 radeon_crtc_handle_flip(rdev, 4);
4792 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
4793 DRM_DEBUG("IH: D5 vblank\n");
4796 case 1: /* D5 vline */
4797 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
4798 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
4799 DRM_DEBUG("IH: D5 vline\n");
4803 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4807 case 6: /* D6 vblank/vline */
4809 case 0: /* D6 vblank */
4810 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
4811 if (rdev->irq.crtc_vblank_int[5]) {
4812 drm_handle_vblank(rdev->ddev, 5);
4813 rdev->pm.vblank_sync = true;
4814 wake_up(&rdev->irq.vblank_queue);
4816 if (atomic_read(&rdev->irq.pflip[5]))
4817 radeon_crtc_handle_flip(rdev, 5);
4818 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
4819 DRM_DEBUG("IH: D6 vblank\n");
4822 case 1: /* D6 vline */
4823 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
4824 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
4825 DRM_DEBUG("IH: D6 vline\n");
4829 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4833 case 42: /* HPD hotplug */
4836 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4837 rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
4838 queue_hotplug = true;
4839 DRM_DEBUG("IH: HPD1\n");
4843 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4844 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
4845 queue_hotplug = true;
4846 DRM_DEBUG("IH: HPD2\n");
4850 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4851 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
4852 queue_hotplug = true;
4853 DRM_DEBUG("IH: HPD3\n");
4857 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4858 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
4859 queue_hotplug = true;
4860 DRM_DEBUG("IH: HPD4\n");
4864 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4865 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
4866 queue_hotplug = true;
4867 DRM_DEBUG("IH: HPD5\n");
4871 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4872 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
4873 queue_hotplug = true;
4874 DRM_DEBUG("IH: HPD6\n");
4878 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4885 if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4886 rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
4888 DRM_DEBUG("IH: HDMI0\n");
4892 if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4893 rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
4895 DRM_DEBUG("IH: HDMI1\n");
4899 if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4900 rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
4902 DRM_DEBUG("IH: HDMI2\n");
4906 if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4907 rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
4909 DRM_DEBUG("IH: HDMI3\n");
4913 if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4914 rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
4916 DRM_DEBUG("IH: HDMI4\n");
4920 if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4921 rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
4923 DRM_DEBUG("IH: HDMI5\n");
4927 DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
4931 DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
4932 radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
4936 addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
4937 status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
4938 dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
4939 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n",
4941 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
4943 cayman_vm_decode_fault(rdev, status, addr);
4944 /* reset addr and status */
4945 WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
4947 case 176: /* CP_INT in ring buffer */
4948 case 177: /* CP_INT in IB1 */
4949 case 178: /* CP_INT in IB2 */
4950 DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
4951 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4953 case 181: /* CP EOP event */
4954 DRM_DEBUG("IH: CP EOP\n");
4955 if (rdev->family >= CHIP_CAYMAN) {
4958 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4961 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
4964 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
4968 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4970 case 224: /* DMA trap event */
4971 DRM_DEBUG("IH: DMA trap\n");
4972 radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
4974 case 230: /* thermal low to high */
4975 DRM_DEBUG("IH: thermal low to high\n");
4976 rdev->pm.dpm.thermal.high_to_low = false;
4977 queue_thermal = true;
4979 case 231: /* thermal high to low */
4980 DRM_DEBUG("IH: thermal high to low\n");
4981 rdev->pm.dpm.thermal.high_to_low = true;
4982 queue_thermal = true;
4984 case 233: /* GUI IDLE */
4985 DRM_DEBUG("IH: GUI idle\n");
4987 case 244: /* DMA trap event */
4988 if (rdev->family >= CHIP_CAYMAN) {
4989 DRM_DEBUG("IH: DMA1 trap\n");
4990 radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
4994 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4998 /* wptr/rptr are in bytes! */
5000 rptr &= rdev->ih.ptr_mask;
5003 schedule_work(&rdev->hotplug_work);
5005 schedule_work(&rdev->audio_work);
5006 if (queue_thermal && rdev->pm.dpm_enabled)
5007 schedule_work(&rdev->pm.dpm.thermal.work);
5008 rdev->ih.rptr = rptr;
5009 WREG32(IH_RB_RPTR, rdev->ih.rptr);
5010 atomic_set(&rdev->ih.lock, 0);
5012 /* make sure wptr hasn't changed while processing */
5013 wptr = evergreen_get_ih_wptr(rdev);
5020 static int evergreen_startup(struct radeon_device *rdev)
5022 struct radeon_ring *ring;
5025 /* enable pcie gen2 link */
5026 evergreen_pcie_gen2_enable(rdev);
5028 evergreen_program_aspm(rdev);
5030 evergreen_mc_program(rdev);
5032 if (ASIC_IS_DCE5(rdev)) {
5033 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
5034 r = ni_init_microcode(rdev);
5036 DRM_ERROR("Failed to load firmware!\n");
5040 r = ni_mc_load_microcode(rdev);
5042 DRM_ERROR("Failed to load MC firmware!\n");
5046 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
5047 r = r600_init_microcode(rdev);
5049 DRM_ERROR("Failed to load firmware!\n");
5055 r = r600_vram_scratch_init(rdev);
5059 if (rdev->flags & RADEON_IS_AGP) {
5060 evergreen_agp_enable(rdev);
5062 r = evergreen_pcie_gart_enable(rdev);
5066 evergreen_gpu_init(rdev);
5068 /* allocate rlc buffers */
5069 if (rdev->flags & RADEON_IS_IGP) {
5070 rdev->rlc.reg_list = sumo_rlc_save_restore_register_list;
5071 rdev->rlc.reg_list_size =
5072 (u32)ARRAY_SIZE(sumo_rlc_save_restore_register_list);
5073 rdev->rlc.cs_data = evergreen_cs_data;
5074 r = sumo_rlc_init(rdev);
5076 DRM_ERROR("Failed to init rlc BOs!\n");
5081 /* allocate wb buffer */
5082 r = radeon_wb_init(rdev);
5086 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
5088 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
5092 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
5094 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
5098 r = uvd_v2_2_resume(rdev);
5100 r = radeon_fence_driver_start_ring(rdev,
5101 R600_RING_TYPE_UVD_INDEX);
5103 dev_err(rdev->dev, "UVD fences init error (%d).\n", r);
5107 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
5110 if (!rdev->irq.installed) {
5111 r = radeon_irq_kms_init(rdev);
5116 r = r600_irq_init(rdev);
5118 DRM_ERROR("radeon: IH init failed (%d).\n", r);
5119 radeon_irq_kms_fini(rdev);
5122 evergreen_irq_set(rdev);
5124 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
5125 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
5126 R600_CP_RB_RPTR, R600_CP_RB_WPTR,
5131 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
5132 r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
5133 DMA_RB_RPTR, DMA_RB_WPTR,
5134 DMA_PACKET(DMA_PACKET_NOP, 0, 0));
5138 r = evergreen_cp_load_microcode(rdev);
5141 r = evergreen_cp_resume(rdev);
5144 r = r600_dma_resume(rdev);
5148 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
5149 if (ring->ring_size) {
5150 r = radeon_ring_init(rdev, ring, ring->ring_size, 0,
5151 UVD_RBC_RB_RPTR, UVD_RBC_RB_WPTR,
5154 r = uvd_v1_0_init(rdev);
5157 DRM_ERROR("radeon: error initializing UVD (%d).\n", r);
5160 r = radeon_ib_pool_init(rdev);
5162 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
5166 r = r600_audio_init(rdev);
5168 DRM_ERROR("radeon: audio init failed\n");
5175 int evergreen_resume(struct radeon_device *rdev)
5179 /* reset the asic, the gfx blocks are often in a bad state
5180 * after the driver is unloaded or after a resume
5182 if (radeon_asic_reset(rdev))
5183 dev_warn(rdev->dev, "GPU reset failed !\n");
5184 /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
5185 * posting will perform necessary task to bring back GPU into good
5189 atom_asic_init(rdev->mode_info.atom_context);
5191 /* init golden registers */
5192 evergreen_init_golden_registers(rdev);
5194 rdev->accel_working = true;
5195 r = evergreen_startup(rdev);
5197 DRM_ERROR("evergreen startup failed on resume\n");
5198 rdev->accel_working = false;
5206 int evergreen_suspend(struct radeon_device *rdev)
5208 r600_audio_fini(rdev);
5209 uvd_v1_0_fini(rdev);
5210 radeon_uvd_suspend(rdev);
5212 r600_dma_stop(rdev);
5213 evergreen_irq_suspend(rdev);
5214 radeon_wb_disable(rdev);
5215 evergreen_pcie_gart_disable(rdev);
5220 /* Plan is to move initialization in that function and use
5221 * helper function so that radeon_device_init pretty much
5222 * do nothing more than calling asic specific function. This
5223 * should also allow to remove a bunch of callback function
5226 int evergreen_init(struct radeon_device *rdev)
5231 if (!radeon_get_bios(rdev)) {
5232 if (ASIC_IS_AVIVO(rdev))
5235 /* Must be an ATOMBIOS */
5236 if (!rdev->is_atom_bios) {
5237 dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
5240 r = radeon_atombios_init(rdev);
5243 /* reset the asic, the gfx blocks are often in a bad state
5244 * after the driver is unloaded or after a resume
5246 if (radeon_asic_reset(rdev))
5247 dev_warn(rdev->dev, "GPU reset failed !\n");
5248 /* Post card if necessary */
5249 if (!radeon_card_posted(rdev)) {
5251 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
5254 DRM_INFO("GPU not posted. posting now...\n");
5255 atom_asic_init(rdev->mode_info.atom_context);
5257 /* init golden registers */
5258 evergreen_init_golden_registers(rdev);
5259 /* Initialize scratch registers */
5260 r600_scratch_init(rdev);
5261 /* Initialize surface registers */
5262 radeon_surface_init(rdev);
5263 /* Initialize clocks */
5264 radeon_get_clock_info(rdev->ddev);
5266 r = radeon_fence_driver_init(rdev);
5269 /* initialize AGP */
5270 if (rdev->flags & RADEON_IS_AGP) {
5271 r = radeon_agp_init(rdev);
5273 radeon_agp_disable(rdev);
5275 /* initialize memory controller */
5276 r = evergreen_mc_init(rdev);
5279 /* Memory manager */
5280 r = radeon_bo_init(rdev);
5284 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
5285 r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
5287 rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
5288 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
5290 r = radeon_uvd_init(rdev);
5292 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
5293 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX],
5297 rdev->ih.ring_obj = NULL;
5298 r600_ih_ring_init(rdev, 64 * 1024);
5300 r = r600_pcie_gart_init(rdev);
5304 rdev->accel_working = true;
5305 r = evergreen_startup(rdev);
5307 dev_err(rdev->dev, "disabling GPU acceleration\n");
5309 r600_dma_fini(rdev);
5310 r600_irq_fini(rdev);
5311 if (rdev->flags & RADEON_IS_IGP)
5312 sumo_rlc_fini(rdev);
5313 radeon_wb_fini(rdev);
5314 radeon_ib_pool_fini(rdev);
5315 radeon_irq_kms_fini(rdev);
5316 evergreen_pcie_gart_fini(rdev);
5317 rdev->accel_working = false;
5320 /* Don't start up if the MC ucode is missing on BTC parts.
5321 * The default clocks and voltages before the MC ucode
5322 * is loaded are not suffient for advanced operations.
5324 if (ASIC_IS_DCE5(rdev)) {
5325 if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
5326 DRM_ERROR("radeon: MC ucode required for NI+.\n");
5334 void evergreen_fini(struct radeon_device *rdev)
5336 r600_audio_fini(rdev);
5338 r600_dma_fini(rdev);
5339 r600_irq_fini(rdev);
5340 if (rdev->flags & RADEON_IS_IGP)
5341 sumo_rlc_fini(rdev);
5342 radeon_wb_fini(rdev);
5343 radeon_ib_pool_fini(rdev);
5344 radeon_irq_kms_fini(rdev);
5345 evergreen_pcie_gart_fini(rdev);
5346 uvd_v1_0_fini(rdev);
5347 radeon_uvd_fini(rdev);
5348 r600_vram_scratch_fini(rdev);
5349 radeon_gem_fini(rdev);
5350 radeon_fence_driver_fini(rdev);
5351 radeon_agp_fini(rdev);
5352 radeon_bo_fini(rdev);
5353 radeon_atombios_fini(rdev);
5358 void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
5360 u32 link_width_cntl, speed_cntl;
5362 if (radeon_pcie_gen2 == 0)
5365 if (rdev->flags & RADEON_IS_IGP)
5368 if (!(rdev->flags & RADEON_IS_PCIE))
5371 /* x2 cards have a special sequence */
5372 if (ASIC_IS_X2(rdev))
5375 if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
5376 (rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))
5379 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5380 if (speed_cntl & LC_CURRENT_DATA_RATE) {
5381 DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5385 DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5387 if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5388 (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5390 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5391 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5392 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5394 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5395 speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
5396 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5398 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5399 speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
5400 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5402 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5403 speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
5404 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5406 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5407 speed_cntl |= LC_GEN2_EN_STRAP;
5408 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5411 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5412 /* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5414 link_width_cntl |= LC_UPCONFIGURE_DIS;
5416 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5417 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5421 void evergreen_program_aspm(struct radeon_device *rdev)
5424 u32 pcie_lc_cntl, pcie_lc_cntl_old;
5425 bool disable_l0s, disable_l1 = false, disable_plloff_in_l1 = false;
5426 /* fusion_platform = true
5427 * if the system is a fusion system
5428 * (APU or DGPU in a fusion system).
5429 * todo: check if the system is a fusion platform.
5431 bool fusion_platform = false;
5433 if (radeon_aspm == 0)
5436 if (!(rdev->flags & RADEON_IS_PCIE))
5439 switch (rdev->family) {
5452 disable_l0s = false;
5456 if (rdev->flags & RADEON_IS_IGP)
5457 fusion_platform = true; /* XXX also dGPUs in a fusion system */
5459 data = orig = RREG32_PIF_PHY0(PB0_PIF_PAIRING);
5460 if (fusion_platform)
5465 WREG32_PIF_PHY0(PB0_PIF_PAIRING, data);
5467 data = orig = RREG32_PIF_PHY1(PB1_PIF_PAIRING);
5468 if (fusion_platform)
5473 WREG32_PIF_PHY1(PB1_PIF_PAIRING, data);
5475 pcie_lc_cntl = pcie_lc_cntl_old = RREG32_PCIE_PORT(PCIE_LC_CNTL);
5476 pcie_lc_cntl &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
5478 if (rdev->family >= CHIP_BARTS)
5479 pcie_lc_cntl |= LC_L0S_INACTIVITY(7);
5481 pcie_lc_cntl |= LC_L0S_INACTIVITY(3);
5485 if (rdev->family >= CHIP_BARTS)
5486 pcie_lc_cntl |= LC_L1_INACTIVITY(7);
5488 pcie_lc_cntl |= LC_L1_INACTIVITY(8);
5490 if (!disable_plloff_in_l1) {
5491 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5492 data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5493 data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5495 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5497 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5498 data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5499 data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5501 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5503 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5504 data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5505 data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5507 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5509 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5510 data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5511 data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5513 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5515 if (rdev->family >= CHIP_BARTS) {
5516 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5517 data &= ~PLL_RAMP_UP_TIME_0_MASK;
5518 data |= PLL_RAMP_UP_TIME_0(4);
5520 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5522 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5523 data &= ~PLL_RAMP_UP_TIME_1_MASK;
5524 data |= PLL_RAMP_UP_TIME_1(4);
5526 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5528 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5529 data &= ~PLL_RAMP_UP_TIME_0_MASK;
5530 data |= PLL_RAMP_UP_TIME_0(4);
5532 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5534 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5535 data &= ~PLL_RAMP_UP_TIME_1_MASK;
5536 data |= PLL_RAMP_UP_TIME_1(4);
5538 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5541 data = orig = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5542 data &= ~LC_DYN_LANES_PWR_STATE_MASK;
5543 data |= LC_DYN_LANES_PWR_STATE(3);
5545 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
5547 if (rdev->family >= CHIP_BARTS) {
5548 data = orig = RREG32_PIF_PHY0(PB0_PIF_CNTL);
5549 data &= ~LS2_EXIT_TIME_MASK;
5550 data |= LS2_EXIT_TIME(1);
5552 WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
5554 data = orig = RREG32_PIF_PHY1(PB1_PIF_CNTL);
5555 data &= ~LS2_EXIT_TIME_MASK;
5556 data |= LS2_EXIT_TIME(1);
5558 WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
5563 /* evergreen parts only */
5564 if (rdev->family < CHIP_BARTS)
5565 pcie_lc_cntl |= LC_PMI_TO_L1_DIS;
5567 if (pcie_lc_cntl != pcie_lc_cntl_old)
5568 WREG32_PCIE_PORT(PCIE_LC_CNTL, pcie_lc_cntl);