drm/radeon/cik: enable/disable vce cg when encoding v2
[linux-2.6-block.git] / drivers / gpu / drm / radeon / si.c
CommitLineData
43b3cd99
AD
1/*
2 * Copyright 2011 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: Alex Deucher
23 */
0f0de06c 24#include <linux/firmware.h>
0f0de06c
AD
25#include <linux/slab.h>
26#include <linux/module.h>
760285e7 27#include <drm/drmP.h>
43b3cd99
AD
28#include "radeon.h"
29#include "radeon_asic.h"
760285e7 30#include <drm/radeon_drm.h>
43b3cd99
AD
31#include "sid.h"
32#include "atom.h"
48c0c902 33#include "si_blit_shaders.h"
bd8cd539 34#include "clearstate_si.h"
a0ceada6 35#include "radeon_ucode.h"
43b3cd99 36
0f0de06c
AD
37
38MODULE_FIRMWARE("radeon/TAHITI_pfp.bin");
39MODULE_FIRMWARE("radeon/TAHITI_me.bin");
40MODULE_FIRMWARE("radeon/TAHITI_ce.bin");
41MODULE_FIRMWARE("radeon/TAHITI_mc.bin");
42MODULE_FIRMWARE("radeon/TAHITI_rlc.bin");
a9e61410 43MODULE_FIRMWARE("radeon/TAHITI_smc.bin");
0f0de06c
AD
44MODULE_FIRMWARE("radeon/PITCAIRN_pfp.bin");
45MODULE_FIRMWARE("radeon/PITCAIRN_me.bin");
46MODULE_FIRMWARE("radeon/PITCAIRN_ce.bin");
47MODULE_FIRMWARE("radeon/PITCAIRN_mc.bin");
48MODULE_FIRMWARE("radeon/PITCAIRN_rlc.bin");
a9e61410 49MODULE_FIRMWARE("radeon/PITCAIRN_smc.bin");
0f0de06c
AD
50MODULE_FIRMWARE("radeon/VERDE_pfp.bin");
51MODULE_FIRMWARE("radeon/VERDE_me.bin");
52MODULE_FIRMWARE("radeon/VERDE_ce.bin");
53MODULE_FIRMWARE("radeon/VERDE_mc.bin");
54MODULE_FIRMWARE("radeon/VERDE_rlc.bin");
a9e61410 55MODULE_FIRMWARE("radeon/VERDE_smc.bin");
bcc7f5d2
AD
56MODULE_FIRMWARE("radeon/OLAND_pfp.bin");
57MODULE_FIRMWARE("radeon/OLAND_me.bin");
58MODULE_FIRMWARE("radeon/OLAND_ce.bin");
59MODULE_FIRMWARE("radeon/OLAND_mc.bin");
60MODULE_FIRMWARE("radeon/OLAND_rlc.bin");
a9e61410 61MODULE_FIRMWARE("radeon/OLAND_smc.bin");
c04c00b4
AD
62MODULE_FIRMWARE("radeon/HAINAN_pfp.bin");
63MODULE_FIRMWARE("radeon/HAINAN_me.bin");
64MODULE_FIRMWARE("radeon/HAINAN_ce.bin");
65MODULE_FIRMWARE("radeon/HAINAN_mc.bin");
66MODULE_FIRMWARE("radeon/HAINAN_rlc.bin");
a9e61410 67MODULE_FIRMWARE("radeon/HAINAN_smc.bin");
0f0de06c 68
b9d305df 69static void si_pcie_gen3_enable(struct radeon_device *rdev);
e0bcf165 70static void si_program_aspm(struct radeon_device *rdev);
1fd11777
AD
71extern void sumo_rlc_fini(struct radeon_device *rdev);
72extern int sumo_rlc_init(struct radeon_device *rdev);
25a857fb
AD
73extern int r600_ih_ring_alloc(struct radeon_device *rdev);
74extern void r600_ih_ring_fini(struct radeon_device *rdev);
0a96d72b 75extern void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev);
c476dde2
AD
76extern void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save);
77extern void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save);
ca7db22b 78extern u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev);
1c534671 79extern void evergreen_print_gpu_status_regs(struct radeon_device *rdev);
014bb209 80extern bool evergreen_is_display_hung(struct radeon_device *rdev);
811e4d58
AD
81static void si_enable_gui_idle_interrupt(struct radeon_device *rdev,
82 bool enable);
4a5c8ea5
AD
83static void si_init_pg(struct radeon_device *rdev);
84static void si_init_cg(struct radeon_device *rdev);
a6f4ae8d
AD
85static void si_fini_pg(struct radeon_device *rdev);
86static void si_fini_cg(struct radeon_device *rdev);
87static void si_rlc_stop(struct radeon_device *rdev);
0a96d72b 88
6d8cf000
AD
89static const u32 verde_rlc_save_restore_register_list[] =
90{
91 (0x8000 << 16) | (0x98f4 >> 2),
92 0x00000000,
93 (0x8040 << 16) | (0x98f4 >> 2),
94 0x00000000,
95 (0x8000 << 16) | (0xe80 >> 2),
96 0x00000000,
97 (0x8040 << 16) | (0xe80 >> 2),
98 0x00000000,
99 (0x8000 << 16) | (0x89bc >> 2),
100 0x00000000,
101 (0x8040 << 16) | (0x89bc >> 2),
102 0x00000000,
103 (0x8000 << 16) | (0x8c1c >> 2),
104 0x00000000,
105 (0x8040 << 16) | (0x8c1c >> 2),
106 0x00000000,
107 (0x9c00 << 16) | (0x98f0 >> 2),
108 0x00000000,
109 (0x9c00 << 16) | (0xe7c >> 2),
110 0x00000000,
111 (0x8000 << 16) | (0x9148 >> 2),
112 0x00000000,
113 (0x8040 << 16) | (0x9148 >> 2),
114 0x00000000,
115 (0x9c00 << 16) | (0x9150 >> 2),
116 0x00000000,
117 (0x9c00 << 16) | (0x897c >> 2),
118 0x00000000,
119 (0x9c00 << 16) | (0x8d8c >> 2),
120 0x00000000,
121 (0x9c00 << 16) | (0xac54 >> 2),
122 0X00000000,
123 0x3,
124 (0x9c00 << 16) | (0x98f8 >> 2),
125 0x00000000,
126 (0x9c00 << 16) | (0x9910 >> 2),
127 0x00000000,
128 (0x9c00 << 16) | (0x9914 >> 2),
129 0x00000000,
130 (0x9c00 << 16) | (0x9918 >> 2),
131 0x00000000,
132 (0x9c00 << 16) | (0x991c >> 2),
133 0x00000000,
134 (0x9c00 << 16) | (0x9920 >> 2),
135 0x00000000,
136 (0x9c00 << 16) | (0x9924 >> 2),
137 0x00000000,
138 (0x9c00 << 16) | (0x9928 >> 2),
139 0x00000000,
140 (0x9c00 << 16) | (0x992c >> 2),
141 0x00000000,
142 (0x9c00 << 16) | (0x9930 >> 2),
143 0x00000000,
144 (0x9c00 << 16) | (0x9934 >> 2),
145 0x00000000,
146 (0x9c00 << 16) | (0x9938 >> 2),
147 0x00000000,
148 (0x9c00 << 16) | (0x993c >> 2),
149 0x00000000,
150 (0x9c00 << 16) | (0x9940 >> 2),
151 0x00000000,
152 (0x9c00 << 16) | (0x9944 >> 2),
153 0x00000000,
154 (0x9c00 << 16) | (0x9948 >> 2),
155 0x00000000,
156 (0x9c00 << 16) | (0x994c >> 2),
157 0x00000000,
158 (0x9c00 << 16) | (0x9950 >> 2),
159 0x00000000,
160 (0x9c00 << 16) | (0x9954 >> 2),
161 0x00000000,
162 (0x9c00 << 16) | (0x9958 >> 2),
163 0x00000000,
164 (0x9c00 << 16) | (0x995c >> 2),
165 0x00000000,
166 (0x9c00 << 16) | (0x9960 >> 2),
167 0x00000000,
168 (0x9c00 << 16) | (0x9964 >> 2),
169 0x00000000,
170 (0x9c00 << 16) | (0x9968 >> 2),
171 0x00000000,
172 (0x9c00 << 16) | (0x996c >> 2),
173 0x00000000,
174 (0x9c00 << 16) | (0x9970 >> 2),
175 0x00000000,
176 (0x9c00 << 16) | (0x9974 >> 2),
177 0x00000000,
178 (0x9c00 << 16) | (0x9978 >> 2),
179 0x00000000,
180 (0x9c00 << 16) | (0x997c >> 2),
181 0x00000000,
182 (0x9c00 << 16) | (0x9980 >> 2),
183 0x00000000,
184 (0x9c00 << 16) | (0x9984 >> 2),
185 0x00000000,
186 (0x9c00 << 16) | (0x9988 >> 2),
187 0x00000000,
188 (0x9c00 << 16) | (0x998c >> 2),
189 0x00000000,
190 (0x9c00 << 16) | (0x8c00 >> 2),
191 0x00000000,
192 (0x9c00 << 16) | (0x8c14 >> 2),
193 0x00000000,
194 (0x9c00 << 16) | (0x8c04 >> 2),
195 0x00000000,
196 (0x9c00 << 16) | (0x8c08 >> 2),
197 0x00000000,
198 (0x8000 << 16) | (0x9b7c >> 2),
199 0x00000000,
200 (0x8040 << 16) | (0x9b7c >> 2),
201 0x00000000,
202 (0x8000 << 16) | (0xe84 >> 2),
203 0x00000000,
204 (0x8040 << 16) | (0xe84 >> 2),
205 0x00000000,
206 (0x8000 << 16) | (0x89c0 >> 2),
207 0x00000000,
208 (0x8040 << 16) | (0x89c0 >> 2),
209 0x00000000,
210 (0x8000 << 16) | (0x914c >> 2),
211 0x00000000,
212 (0x8040 << 16) | (0x914c >> 2),
213 0x00000000,
214 (0x8000 << 16) | (0x8c20 >> 2),
215 0x00000000,
216 (0x8040 << 16) | (0x8c20 >> 2),
217 0x00000000,
218 (0x8000 << 16) | (0x9354 >> 2),
219 0x00000000,
220 (0x8040 << 16) | (0x9354 >> 2),
221 0x00000000,
222 (0x9c00 << 16) | (0x9060 >> 2),
223 0x00000000,
224 (0x9c00 << 16) | (0x9364 >> 2),
225 0x00000000,
226 (0x9c00 << 16) | (0x9100 >> 2),
227 0x00000000,
228 (0x9c00 << 16) | (0x913c >> 2),
229 0x00000000,
230 (0x8000 << 16) | (0x90e0 >> 2),
231 0x00000000,
232 (0x8000 << 16) | (0x90e4 >> 2),
233 0x00000000,
234 (0x8000 << 16) | (0x90e8 >> 2),
235 0x00000000,
236 (0x8040 << 16) | (0x90e0 >> 2),
237 0x00000000,
238 (0x8040 << 16) | (0x90e4 >> 2),
239 0x00000000,
240 (0x8040 << 16) | (0x90e8 >> 2),
241 0x00000000,
242 (0x9c00 << 16) | (0x8bcc >> 2),
243 0x00000000,
244 (0x9c00 << 16) | (0x8b24 >> 2),
245 0x00000000,
246 (0x9c00 << 16) | (0x88c4 >> 2),
247 0x00000000,
248 (0x9c00 << 16) | (0x8e50 >> 2),
249 0x00000000,
250 (0x9c00 << 16) | (0x8c0c >> 2),
251 0x00000000,
252 (0x9c00 << 16) | (0x8e58 >> 2),
253 0x00000000,
254 (0x9c00 << 16) | (0x8e5c >> 2),
255 0x00000000,
256 (0x9c00 << 16) | (0x9508 >> 2),
257 0x00000000,
258 (0x9c00 << 16) | (0x950c >> 2),
259 0x00000000,
260 (0x9c00 << 16) | (0x9494 >> 2),
261 0x00000000,
262 (0x9c00 << 16) | (0xac0c >> 2),
263 0x00000000,
264 (0x9c00 << 16) | (0xac10 >> 2),
265 0x00000000,
266 (0x9c00 << 16) | (0xac14 >> 2),
267 0x00000000,
268 (0x9c00 << 16) | (0xae00 >> 2),
269 0x00000000,
270 (0x9c00 << 16) | (0xac08 >> 2),
271 0x00000000,
272 (0x9c00 << 16) | (0x88d4 >> 2),
273 0x00000000,
274 (0x9c00 << 16) | (0x88c8 >> 2),
275 0x00000000,
276 (0x9c00 << 16) | (0x88cc >> 2),
277 0x00000000,
278 (0x9c00 << 16) | (0x89b0 >> 2),
279 0x00000000,
280 (0x9c00 << 16) | (0x8b10 >> 2),
281 0x00000000,
282 (0x9c00 << 16) | (0x8a14 >> 2),
283 0x00000000,
284 (0x9c00 << 16) | (0x9830 >> 2),
285 0x00000000,
286 (0x9c00 << 16) | (0x9834 >> 2),
287 0x00000000,
288 (0x9c00 << 16) | (0x9838 >> 2),
289 0x00000000,
290 (0x9c00 << 16) | (0x9a10 >> 2),
291 0x00000000,
292 (0x8000 << 16) | (0x9870 >> 2),
293 0x00000000,
294 (0x8000 << 16) | (0x9874 >> 2),
295 0x00000000,
296 (0x8001 << 16) | (0x9870 >> 2),
297 0x00000000,
298 (0x8001 << 16) | (0x9874 >> 2),
299 0x00000000,
300 (0x8040 << 16) | (0x9870 >> 2),
301 0x00000000,
302 (0x8040 << 16) | (0x9874 >> 2),
303 0x00000000,
304 (0x8041 << 16) | (0x9870 >> 2),
305 0x00000000,
306 (0x8041 << 16) | (0x9874 >> 2),
307 0x00000000,
308 0x00000000
309};
310
205996c0
AD
311static const u32 tahiti_golden_rlc_registers[] =
312{
313 0xc424, 0xffffffff, 0x00601005,
314 0xc47c, 0xffffffff, 0x10104040,
315 0xc488, 0xffffffff, 0x0100000a,
316 0xc314, 0xffffffff, 0x00000800,
317 0xc30c, 0xffffffff, 0x800000f4,
318 0xf4a8, 0xffffffff, 0x00000000
319};
320
321static const u32 tahiti_golden_registers[] =
322{
323 0x9a10, 0x00010000, 0x00018208,
324 0x9830, 0xffffffff, 0x00000000,
325 0x9834, 0xf00fffff, 0x00000400,
326 0x9838, 0x0002021c, 0x00020200,
327 0xc78, 0x00000080, 0x00000000,
328 0xd030, 0x000300c0, 0x00800040,
329 0xd830, 0x000300c0, 0x00800040,
330 0x5bb0, 0x000000f0, 0x00000070,
331 0x5bc0, 0x00200000, 0x50100000,
332 0x7030, 0x31000311, 0x00000011,
333 0x277c, 0x00000003, 0x000007ff,
334 0x240c, 0x000007ff, 0x00000000,
335 0x8a14, 0xf000001f, 0x00000007,
336 0x8b24, 0xffffffff, 0x00ffffff,
337 0x8b10, 0x0000ff0f, 0x00000000,
338 0x28a4c, 0x07ffffff, 0x4e000000,
339 0x28350, 0x3f3f3fff, 0x2a00126a,
340 0x30, 0x000000ff, 0x0040,
341 0x34, 0x00000040, 0x00004040,
342 0x9100, 0x07ffffff, 0x03000000,
343 0x8e88, 0x01ff1f3f, 0x00000000,
344 0x8e84, 0x01ff1f3f, 0x00000000,
345 0x9060, 0x0000007f, 0x00000020,
346 0x9508, 0x00010000, 0x00010000,
347 0xac14, 0x00000200, 0x000002fb,
348 0xac10, 0xffffffff, 0x0000543b,
349 0xac0c, 0xffffffff, 0xa9210876,
350 0x88d0, 0xffffffff, 0x000fff40,
351 0x88d4, 0x0000001f, 0x00000010,
352 0x1410, 0x20000000, 0x20fffed8,
353 0x15c0, 0x000c0fc0, 0x000c0400
354};
355
356static const u32 tahiti_golden_registers2[] =
357{
358 0xc64, 0x00000001, 0x00000001
359};
360
361static const u32 pitcairn_golden_rlc_registers[] =
362{
363 0xc424, 0xffffffff, 0x00601004,
364 0xc47c, 0xffffffff, 0x10102020,
365 0xc488, 0xffffffff, 0x01000020,
366 0xc314, 0xffffffff, 0x00000800,
367 0xc30c, 0xffffffff, 0x800000a4
368};
369
370static const u32 pitcairn_golden_registers[] =
371{
372 0x9a10, 0x00010000, 0x00018208,
373 0x9830, 0xffffffff, 0x00000000,
374 0x9834, 0xf00fffff, 0x00000400,
375 0x9838, 0x0002021c, 0x00020200,
376 0xc78, 0x00000080, 0x00000000,
377 0xd030, 0x000300c0, 0x00800040,
378 0xd830, 0x000300c0, 0x00800040,
379 0x5bb0, 0x000000f0, 0x00000070,
380 0x5bc0, 0x00200000, 0x50100000,
381 0x7030, 0x31000311, 0x00000011,
382 0x2ae4, 0x00073ffe, 0x000022a2,
383 0x240c, 0x000007ff, 0x00000000,
384 0x8a14, 0xf000001f, 0x00000007,
385 0x8b24, 0xffffffff, 0x00ffffff,
386 0x8b10, 0x0000ff0f, 0x00000000,
387 0x28a4c, 0x07ffffff, 0x4e000000,
388 0x28350, 0x3f3f3fff, 0x2a00126a,
389 0x30, 0x000000ff, 0x0040,
390 0x34, 0x00000040, 0x00004040,
391 0x9100, 0x07ffffff, 0x03000000,
392 0x9060, 0x0000007f, 0x00000020,
393 0x9508, 0x00010000, 0x00010000,
394 0xac14, 0x000003ff, 0x000000f7,
395 0xac10, 0xffffffff, 0x00000000,
396 0xac0c, 0xffffffff, 0x32761054,
397 0x88d4, 0x0000001f, 0x00000010,
398 0x15c0, 0x000c0fc0, 0x000c0400
399};
400
401static const u32 verde_golden_rlc_registers[] =
402{
403 0xc424, 0xffffffff, 0x033f1005,
404 0xc47c, 0xffffffff, 0x10808020,
405 0xc488, 0xffffffff, 0x00800008,
406 0xc314, 0xffffffff, 0x00001000,
407 0xc30c, 0xffffffff, 0x80010014
408};
409
410static const u32 verde_golden_registers[] =
411{
412 0x9a10, 0x00010000, 0x00018208,
413 0x9830, 0xffffffff, 0x00000000,
414 0x9834, 0xf00fffff, 0x00000400,
415 0x9838, 0x0002021c, 0x00020200,
416 0xc78, 0x00000080, 0x00000000,
417 0xd030, 0x000300c0, 0x00800040,
418 0xd030, 0x000300c0, 0x00800040,
419 0xd830, 0x000300c0, 0x00800040,
420 0xd830, 0x000300c0, 0x00800040,
421 0x5bb0, 0x000000f0, 0x00000070,
422 0x5bc0, 0x00200000, 0x50100000,
423 0x7030, 0x31000311, 0x00000011,
424 0x2ae4, 0x00073ffe, 0x000022a2,
425 0x2ae4, 0x00073ffe, 0x000022a2,
426 0x2ae4, 0x00073ffe, 0x000022a2,
427 0x240c, 0x000007ff, 0x00000000,
428 0x240c, 0x000007ff, 0x00000000,
429 0x240c, 0x000007ff, 0x00000000,
430 0x8a14, 0xf000001f, 0x00000007,
431 0x8a14, 0xf000001f, 0x00000007,
432 0x8a14, 0xf000001f, 0x00000007,
433 0x8b24, 0xffffffff, 0x00ffffff,
434 0x8b10, 0x0000ff0f, 0x00000000,
435 0x28a4c, 0x07ffffff, 0x4e000000,
436 0x28350, 0x3f3f3fff, 0x0000124a,
437 0x28350, 0x3f3f3fff, 0x0000124a,
438 0x28350, 0x3f3f3fff, 0x0000124a,
439 0x30, 0x000000ff, 0x0040,
440 0x34, 0x00000040, 0x00004040,
441 0x9100, 0x07ffffff, 0x03000000,
442 0x9100, 0x07ffffff, 0x03000000,
443 0x8e88, 0x01ff1f3f, 0x00000000,
444 0x8e88, 0x01ff1f3f, 0x00000000,
445 0x8e88, 0x01ff1f3f, 0x00000000,
446 0x8e84, 0x01ff1f3f, 0x00000000,
447 0x8e84, 0x01ff1f3f, 0x00000000,
448 0x8e84, 0x01ff1f3f, 0x00000000,
449 0x9060, 0x0000007f, 0x00000020,
450 0x9508, 0x00010000, 0x00010000,
451 0xac14, 0x000003ff, 0x00000003,
452 0xac14, 0x000003ff, 0x00000003,
453 0xac14, 0x000003ff, 0x00000003,
454 0xac10, 0xffffffff, 0x00000000,
455 0xac10, 0xffffffff, 0x00000000,
456 0xac10, 0xffffffff, 0x00000000,
457 0xac0c, 0xffffffff, 0x00001032,
458 0xac0c, 0xffffffff, 0x00001032,
459 0xac0c, 0xffffffff, 0x00001032,
460 0x88d4, 0x0000001f, 0x00000010,
461 0x88d4, 0x0000001f, 0x00000010,
462 0x88d4, 0x0000001f, 0x00000010,
463 0x15c0, 0x000c0fc0, 0x000c0400
464};
465
466static const u32 oland_golden_rlc_registers[] =
467{
468 0xc424, 0xffffffff, 0x00601005,
469 0xc47c, 0xffffffff, 0x10104040,
470 0xc488, 0xffffffff, 0x0100000a,
471 0xc314, 0xffffffff, 0x00000800,
472 0xc30c, 0xffffffff, 0x800000f4
473};
474
475static const u32 oland_golden_registers[] =
476{
477 0x9a10, 0x00010000, 0x00018208,
478 0x9830, 0xffffffff, 0x00000000,
479 0x9834, 0xf00fffff, 0x00000400,
480 0x9838, 0x0002021c, 0x00020200,
481 0xc78, 0x00000080, 0x00000000,
482 0xd030, 0x000300c0, 0x00800040,
483 0xd830, 0x000300c0, 0x00800040,
484 0x5bb0, 0x000000f0, 0x00000070,
485 0x5bc0, 0x00200000, 0x50100000,
486 0x7030, 0x31000311, 0x00000011,
487 0x2ae4, 0x00073ffe, 0x000022a2,
488 0x240c, 0x000007ff, 0x00000000,
489 0x8a14, 0xf000001f, 0x00000007,
490 0x8b24, 0xffffffff, 0x00ffffff,
491 0x8b10, 0x0000ff0f, 0x00000000,
492 0x28a4c, 0x07ffffff, 0x4e000000,
493 0x28350, 0x3f3f3fff, 0x00000082,
494 0x30, 0x000000ff, 0x0040,
495 0x34, 0x00000040, 0x00004040,
496 0x9100, 0x07ffffff, 0x03000000,
497 0x9060, 0x0000007f, 0x00000020,
498 0x9508, 0x00010000, 0x00010000,
499 0xac14, 0x000003ff, 0x000000f3,
500 0xac10, 0xffffffff, 0x00000000,
501 0xac0c, 0xffffffff, 0x00003210,
502 0x88d4, 0x0000001f, 0x00000010,
503 0x15c0, 0x000c0fc0, 0x000c0400
504};
505
fffbdda4
AD
506static const u32 hainan_golden_registers[] =
507{
508 0x9a10, 0x00010000, 0x00018208,
509 0x9830, 0xffffffff, 0x00000000,
510 0x9834, 0xf00fffff, 0x00000400,
511 0x9838, 0x0002021c, 0x00020200,
512 0xd0c0, 0xff000fff, 0x00000100,
513 0xd030, 0x000300c0, 0x00800040,
514 0xd8c0, 0xff000fff, 0x00000100,
515 0xd830, 0x000300c0, 0x00800040,
516 0x2ae4, 0x00073ffe, 0x000022a2,
517 0x240c, 0x000007ff, 0x00000000,
518 0x8a14, 0xf000001f, 0x00000007,
519 0x8b24, 0xffffffff, 0x00ffffff,
520 0x8b10, 0x0000ff0f, 0x00000000,
521 0x28a4c, 0x07ffffff, 0x4e000000,
522 0x28350, 0x3f3f3fff, 0x00000000,
523 0x30, 0x000000ff, 0x0040,
524 0x34, 0x00000040, 0x00004040,
525 0x9100, 0x03e00000, 0x03600000,
526 0x9060, 0x0000007f, 0x00000020,
527 0x9508, 0x00010000, 0x00010000,
528 0xac14, 0x000003ff, 0x000000f1,
529 0xac10, 0xffffffff, 0x00000000,
530 0xac0c, 0xffffffff, 0x00003210,
531 0x88d4, 0x0000001f, 0x00000010,
532 0x15c0, 0x000c0fc0, 0x000c0400
533};
534
535static const u32 hainan_golden_registers2[] =
536{
537 0x98f8, 0xffffffff, 0x02010001
538};
539
205996c0
AD
540static const u32 tahiti_mgcg_cgcg_init[] =
541{
542 0xc400, 0xffffffff, 0xfffffffc,
543 0x802c, 0xffffffff, 0xe0000000,
544 0x9a60, 0xffffffff, 0x00000100,
545 0x92a4, 0xffffffff, 0x00000100,
546 0xc164, 0xffffffff, 0x00000100,
547 0x9774, 0xffffffff, 0x00000100,
548 0x8984, 0xffffffff, 0x06000100,
549 0x8a18, 0xffffffff, 0x00000100,
550 0x92a0, 0xffffffff, 0x00000100,
551 0xc380, 0xffffffff, 0x00000100,
552 0x8b28, 0xffffffff, 0x00000100,
553 0x9144, 0xffffffff, 0x00000100,
554 0x8d88, 0xffffffff, 0x00000100,
555 0x8d8c, 0xffffffff, 0x00000100,
556 0x9030, 0xffffffff, 0x00000100,
557 0x9034, 0xffffffff, 0x00000100,
558 0x9038, 0xffffffff, 0x00000100,
559 0x903c, 0xffffffff, 0x00000100,
560 0xad80, 0xffffffff, 0x00000100,
561 0xac54, 0xffffffff, 0x00000100,
562 0x897c, 0xffffffff, 0x06000100,
563 0x9868, 0xffffffff, 0x00000100,
564 0x9510, 0xffffffff, 0x00000100,
565 0xaf04, 0xffffffff, 0x00000100,
566 0xae04, 0xffffffff, 0x00000100,
567 0x949c, 0xffffffff, 0x00000100,
568 0x802c, 0xffffffff, 0xe0000000,
569 0x9160, 0xffffffff, 0x00010000,
570 0x9164, 0xffffffff, 0x00030002,
571 0x9168, 0xffffffff, 0x00040007,
572 0x916c, 0xffffffff, 0x00060005,
573 0x9170, 0xffffffff, 0x00090008,
574 0x9174, 0xffffffff, 0x00020001,
575 0x9178, 0xffffffff, 0x00040003,
576 0x917c, 0xffffffff, 0x00000007,
577 0x9180, 0xffffffff, 0x00060005,
578 0x9184, 0xffffffff, 0x00090008,
579 0x9188, 0xffffffff, 0x00030002,
580 0x918c, 0xffffffff, 0x00050004,
581 0x9190, 0xffffffff, 0x00000008,
582 0x9194, 0xffffffff, 0x00070006,
583 0x9198, 0xffffffff, 0x000a0009,
584 0x919c, 0xffffffff, 0x00040003,
585 0x91a0, 0xffffffff, 0x00060005,
586 0x91a4, 0xffffffff, 0x00000009,
587 0x91a8, 0xffffffff, 0x00080007,
588 0x91ac, 0xffffffff, 0x000b000a,
589 0x91b0, 0xffffffff, 0x00050004,
590 0x91b4, 0xffffffff, 0x00070006,
591 0x91b8, 0xffffffff, 0x0008000b,
592 0x91bc, 0xffffffff, 0x000a0009,
593 0x91c0, 0xffffffff, 0x000d000c,
594 0x91c4, 0xffffffff, 0x00060005,
595 0x91c8, 0xffffffff, 0x00080007,
596 0x91cc, 0xffffffff, 0x0000000b,
597 0x91d0, 0xffffffff, 0x000a0009,
598 0x91d4, 0xffffffff, 0x000d000c,
599 0x91d8, 0xffffffff, 0x00070006,
600 0x91dc, 0xffffffff, 0x00090008,
601 0x91e0, 0xffffffff, 0x0000000c,
602 0x91e4, 0xffffffff, 0x000b000a,
603 0x91e8, 0xffffffff, 0x000e000d,
604 0x91ec, 0xffffffff, 0x00080007,
605 0x91f0, 0xffffffff, 0x000a0009,
606 0x91f4, 0xffffffff, 0x0000000d,
607 0x91f8, 0xffffffff, 0x000c000b,
608 0x91fc, 0xffffffff, 0x000f000e,
609 0x9200, 0xffffffff, 0x00090008,
610 0x9204, 0xffffffff, 0x000b000a,
611 0x9208, 0xffffffff, 0x000c000f,
612 0x920c, 0xffffffff, 0x000e000d,
613 0x9210, 0xffffffff, 0x00110010,
614 0x9214, 0xffffffff, 0x000a0009,
615 0x9218, 0xffffffff, 0x000c000b,
616 0x921c, 0xffffffff, 0x0000000f,
617 0x9220, 0xffffffff, 0x000e000d,
618 0x9224, 0xffffffff, 0x00110010,
619 0x9228, 0xffffffff, 0x000b000a,
620 0x922c, 0xffffffff, 0x000d000c,
621 0x9230, 0xffffffff, 0x00000010,
622 0x9234, 0xffffffff, 0x000f000e,
623 0x9238, 0xffffffff, 0x00120011,
624 0x923c, 0xffffffff, 0x000c000b,
625 0x9240, 0xffffffff, 0x000e000d,
626 0x9244, 0xffffffff, 0x00000011,
627 0x9248, 0xffffffff, 0x0010000f,
628 0x924c, 0xffffffff, 0x00130012,
629 0x9250, 0xffffffff, 0x000d000c,
630 0x9254, 0xffffffff, 0x000f000e,
631 0x9258, 0xffffffff, 0x00100013,
632 0x925c, 0xffffffff, 0x00120011,
633 0x9260, 0xffffffff, 0x00150014,
634 0x9264, 0xffffffff, 0x000e000d,
635 0x9268, 0xffffffff, 0x0010000f,
636 0x926c, 0xffffffff, 0x00000013,
637 0x9270, 0xffffffff, 0x00120011,
638 0x9274, 0xffffffff, 0x00150014,
639 0x9278, 0xffffffff, 0x000f000e,
640 0x927c, 0xffffffff, 0x00110010,
641 0x9280, 0xffffffff, 0x00000014,
642 0x9284, 0xffffffff, 0x00130012,
643 0x9288, 0xffffffff, 0x00160015,
644 0x928c, 0xffffffff, 0x0010000f,
645 0x9290, 0xffffffff, 0x00120011,
646 0x9294, 0xffffffff, 0x00000015,
647 0x9298, 0xffffffff, 0x00140013,
648 0x929c, 0xffffffff, 0x00170016,
649 0x9150, 0xffffffff, 0x96940200,
650 0x8708, 0xffffffff, 0x00900100,
651 0xc478, 0xffffffff, 0x00000080,
652 0xc404, 0xffffffff, 0x0020003f,
653 0x30, 0xffffffff, 0x0000001c,
654 0x34, 0x000f0000, 0x000f0000,
655 0x160c, 0xffffffff, 0x00000100,
656 0x1024, 0xffffffff, 0x00000100,
657 0x102c, 0x00000101, 0x00000000,
658 0x20a8, 0xffffffff, 0x00000104,
659 0x264c, 0x000c0000, 0x000c0000,
660 0x2648, 0x000c0000, 0x000c0000,
661 0x55e4, 0xff000fff, 0x00000100,
662 0x55e8, 0x00000001, 0x00000001,
663 0x2f50, 0x00000001, 0x00000001,
664 0x30cc, 0xc0000fff, 0x00000104,
665 0xc1e4, 0x00000001, 0x00000001,
666 0xd0c0, 0xfffffff0, 0x00000100,
667 0xd8c0, 0xfffffff0, 0x00000100
668};
669
670static const u32 pitcairn_mgcg_cgcg_init[] =
671{
672 0xc400, 0xffffffff, 0xfffffffc,
673 0x802c, 0xffffffff, 0xe0000000,
674 0x9a60, 0xffffffff, 0x00000100,
675 0x92a4, 0xffffffff, 0x00000100,
676 0xc164, 0xffffffff, 0x00000100,
677 0x9774, 0xffffffff, 0x00000100,
678 0x8984, 0xffffffff, 0x06000100,
679 0x8a18, 0xffffffff, 0x00000100,
680 0x92a0, 0xffffffff, 0x00000100,
681 0xc380, 0xffffffff, 0x00000100,
682 0x8b28, 0xffffffff, 0x00000100,
683 0x9144, 0xffffffff, 0x00000100,
684 0x8d88, 0xffffffff, 0x00000100,
685 0x8d8c, 0xffffffff, 0x00000100,
686 0x9030, 0xffffffff, 0x00000100,
687 0x9034, 0xffffffff, 0x00000100,
688 0x9038, 0xffffffff, 0x00000100,
689 0x903c, 0xffffffff, 0x00000100,
690 0xad80, 0xffffffff, 0x00000100,
691 0xac54, 0xffffffff, 0x00000100,
692 0x897c, 0xffffffff, 0x06000100,
693 0x9868, 0xffffffff, 0x00000100,
694 0x9510, 0xffffffff, 0x00000100,
695 0xaf04, 0xffffffff, 0x00000100,
696 0xae04, 0xffffffff, 0x00000100,
697 0x949c, 0xffffffff, 0x00000100,
698 0x802c, 0xffffffff, 0xe0000000,
699 0x9160, 0xffffffff, 0x00010000,
700 0x9164, 0xffffffff, 0x00030002,
701 0x9168, 0xffffffff, 0x00040007,
702 0x916c, 0xffffffff, 0x00060005,
703 0x9170, 0xffffffff, 0x00090008,
704 0x9174, 0xffffffff, 0x00020001,
705 0x9178, 0xffffffff, 0x00040003,
706 0x917c, 0xffffffff, 0x00000007,
707 0x9180, 0xffffffff, 0x00060005,
708 0x9184, 0xffffffff, 0x00090008,
709 0x9188, 0xffffffff, 0x00030002,
710 0x918c, 0xffffffff, 0x00050004,
711 0x9190, 0xffffffff, 0x00000008,
712 0x9194, 0xffffffff, 0x00070006,
713 0x9198, 0xffffffff, 0x000a0009,
714 0x919c, 0xffffffff, 0x00040003,
715 0x91a0, 0xffffffff, 0x00060005,
716 0x91a4, 0xffffffff, 0x00000009,
717 0x91a8, 0xffffffff, 0x00080007,
718 0x91ac, 0xffffffff, 0x000b000a,
719 0x91b0, 0xffffffff, 0x00050004,
720 0x91b4, 0xffffffff, 0x00070006,
721 0x91b8, 0xffffffff, 0x0008000b,
722 0x91bc, 0xffffffff, 0x000a0009,
723 0x91c0, 0xffffffff, 0x000d000c,
724 0x9200, 0xffffffff, 0x00090008,
725 0x9204, 0xffffffff, 0x000b000a,
726 0x9208, 0xffffffff, 0x000c000f,
727 0x920c, 0xffffffff, 0x000e000d,
728 0x9210, 0xffffffff, 0x00110010,
729 0x9214, 0xffffffff, 0x000a0009,
730 0x9218, 0xffffffff, 0x000c000b,
731 0x921c, 0xffffffff, 0x0000000f,
732 0x9220, 0xffffffff, 0x000e000d,
733 0x9224, 0xffffffff, 0x00110010,
734 0x9228, 0xffffffff, 0x000b000a,
735 0x922c, 0xffffffff, 0x000d000c,
736 0x9230, 0xffffffff, 0x00000010,
737 0x9234, 0xffffffff, 0x000f000e,
738 0x9238, 0xffffffff, 0x00120011,
739 0x923c, 0xffffffff, 0x000c000b,
740 0x9240, 0xffffffff, 0x000e000d,
741 0x9244, 0xffffffff, 0x00000011,
742 0x9248, 0xffffffff, 0x0010000f,
743 0x924c, 0xffffffff, 0x00130012,
744 0x9250, 0xffffffff, 0x000d000c,
745 0x9254, 0xffffffff, 0x000f000e,
746 0x9258, 0xffffffff, 0x00100013,
747 0x925c, 0xffffffff, 0x00120011,
748 0x9260, 0xffffffff, 0x00150014,
749 0x9150, 0xffffffff, 0x96940200,
750 0x8708, 0xffffffff, 0x00900100,
751 0xc478, 0xffffffff, 0x00000080,
752 0xc404, 0xffffffff, 0x0020003f,
753 0x30, 0xffffffff, 0x0000001c,
754 0x34, 0x000f0000, 0x000f0000,
755 0x160c, 0xffffffff, 0x00000100,
756 0x1024, 0xffffffff, 0x00000100,
757 0x102c, 0x00000101, 0x00000000,
758 0x20a8, 0xffffffff, 0x00000104,
759 0x55e4, 0xff000fff, 0x00000100,
760 0x55e8, 0x00000001, 0x00000001,
761 0x2f50, 0x00000001, 0x00000001,
762 0x30cc, 0xc0000fff, 0x00000104,
763 0xc1e4, 0x00000001, 0x00000001,
764 0xd0c0, 0xfffffff0, 0x00000100,
765 0xd8c0, 0xfffffff0, 0x00000100
766};
767
768static const u32 verde_mgcg_cgcg_init[] =
769{
770 0xc400, 0xffffffff, 0xfffffffc,
771 0x802c, 0xffffffff, 0xe0000000,
772 0x9a60, 0xffffffff, 0x00000100,
773 0x92a4, 0xffffffff, 0x00000100,
774 0xc164, 0xffffffff, 0x00000100,
775 0x9774, 0xffffffff, 0x00000100,
776 0x8984, 0xffffffff, 0x06000100,
777 0x8a18, 0xffffffff, 0x00000100,
778 0x92a0, 0xffffffff, 0x00000100,
779 0xc380, 0xffffffff, 0x00000100,
780 0x8b28, 0xffffffff, 0x00000100,
781 0x9144, 0xffffffff, 0x00000100,
782 0x8d88, 0xffffffff, 0x00000100,
783 0x8d8c, 0xffffffff, 0x00000100,
784 0x9030, 0xffffffff, 0x00000100,
785 0x9034, 0xffffffff, 0x00000100,
786 0x9038, 0xffffffff, 0x00000100,
787 0x903c, 0xffffffff, 0x00000100,
788 0xad80, 0xffffffff, 0x00000100,
789 0xac54, 0xffffffff, 0x00000100,
790 0x897c, 0xffffffff, 0x06000100,
791 0x9868, 0xffffffff, 0x00000100,
792 0x9510, 0xffffffff, 0x00000100,
793 0xaf04, 0xffffffff, 0x00000100,
794 0xae04, 0xffffffff, 0x00000100,
795 0x949c, 0xffffffff, 0x00000100,
796 0x802c, 0xffffffff, 0xe0000000,
797 0x9160, 0xffffffff, 0x00010000,
798 0x9164, 0xffffffff, 0x00030002,
799 0x9168, 0xffffffff, 0x00040007,
800 0x916c, 0xffffffff, 0x00060005,
801 0x9170, 0xffffffff, 0x00090008,
802 0x9174, 0xffffffff, 0x00020001,
803 0x9178, 0xffffffff, 0x00040003,
804 0x917c, 0xffffffff, 0x00000007,
805 0x9180, 0xffffffff, 0x00060005,
806 0x9184, 0xffffffff, 0x00090008,
807 0x9188, 0xffffffff, 0x00030002,
808 0x918c, 0xffffffff, 0x00050004,
809 0x9190, 0xffffffff, 0x00000008,
810 0x9194, 0xffffffff, 0x00070006,
811 0x9198, 0xffffffff, 0x000a0009,
812 0x919c, 0xffffffff, 0x00040003,
813 0x91a0, 0xffffffff, 0x00060005,
814 0x91a4, 0xffffffff, 0x00000009,
815 0x91a8, 0xffffffff, 0x00080007,
816 0x91ac, 0xffffffff, 0x000b000a,
817 0x91b0, 0xffffffff, 0x00050004,
818 0x91b4, 0xffffffff, 0x00070006,
819 0x91b8, 0xffffffff, 0x0008000b,
820 0x91bc, 0xffffffff, 0x000a0009,
821 0x91c0, 0xffffffff, 0x000d000c,
822 0x9200, 0xffffffff, 0x00090008,
823 0x9204, 0xffffffff, 0x000b000a,
824 0x9208, 0xffffffff, 0x000c000f,
825 0x920c, 0xffffffff, 0x000e000d,
826 0x9210, 0xffffffff, 0x00110010,
827 0x9214, 0xffffffff, 0x000a0009,
828 0x9218, 0xffffffff, 0x000c000b,
829 0x921c, 0xffffffff, 0x0000000f,
830 0x9220, 0xffffffff, 0x000e000d,
831 0x9224, 0xffffffff, 0x00110010,
832 0x9228, 0xffffffff, 0x000b000a,
833 0x922c, 0xffffffff, 0x000d000c,
834 0x9230, 0xffffffff, 0x00000010,
835 0x9234, 0xffffffff, 0x000f000e,
836 0x9238, 0xffffffff, 0x00120011,
837 0x923c, 0xffffffff, 0x000c000b,
838 0x9240, 0xffffffff, 0x000e000d,
839 0x9244, 0xffffffff, 0x00000011,
840 0x9248, 0xffffffff, 0x0010000f,
841 0x924c, 0xffffffff, 0x00130012,
842 0x9250, 0xffffffff, 0x000d000c,
843 0x9254, 0xffffffff, 0x000f000e,
844 0x9258, 0xffffffff, 0x00100013,
845 0x925c, 0xffffffff, 0x00120011,
846 0x9260, 0xffffffff, 0x00150014,
847 0x9150, 0xffffffff, 0x96940200,
848 0x8708, 0xffffffff, 0x00900100,
849 0xc478, 0xffffffff, 0x00000080,
850 0xc404, 0xffffffff, 0x0020003f,
851 0x30, 0xffffffff, 0x0000001c,
852 0x34, 0x000f0000, 0x000f0000,
853 0x160c, 0xffffffff, 0x00000100,
854 0x1024, 0xffffffff, 0x00000100,
855 0x102c, 0x00000101, 0x00000000,
856 0x20a8, 0xffffffff, 0x00000104,
857 0x264c, 0x000c0000, 0x000c0000,
858 0x2648, 0x000c0000, 0x000c0000,
859 0x55e4, 0xff000fff, 0x00000100,
860 0x55e8, 0x00000001, 0x00000001,
861 0x2f50, 0x00000001, 0x00000001,
862 0x30cc, 0xc0000fff, 0x00000104,
863 0xc1e4, 0x00000001, 0x00000001,
864 0xd0c0, 0xfffffff0, 0x00000100,
865 0xd8c0, 0xfffffff0, 0x00000100
866};
867
868static const u32 oland_mgcg_cgcg_init[] =
869{
870 0xc400, 0xffffffff, 0xfffffffc,
871 0x802c, 0xffffffff, 0xe0000000,
872 0x9a60, 0xffffffff, 0x00000100,
873 0x92a4, 0xffffffff, 0x00000100,
874 0xc164, 0xffffffff, 0x00000100,
875 0x9774, 0xffffffff, 0x00000100,
876 0x8984, 0xffffffff, 0x06000100,
877 0x8a18, 0xffffffff, 0x00000100,
878 0x92a0, 0xffffffff, 0x00000100,
879 0xc380, 0xffffffff, 0x00000100,
880 0x8b28, 0xffffffff, 0x00000100,
881 0x9144, 0xffffffff, 0x00000100,
882 0x8d88, 0xffffffff, 0x00000100,
883 0x8d8c, 0xffffffff, 0x00000100,
884 0x9030, 0xffffffff, 0x00000100,
885 0x9034, 0xffffffff, 0x00000100,
886 0x9038, 0xffffffff, 0x00000100,
887 0x903c, 0xffffffff, 0x00000100,
888 0xad80, 0xffffffff, 0x00000100,
889 0xac54, 0xffffffff, 0x00000100,
890 0x897c, 0xffffffff, 0x06000100,
891 0x9868, 0xffffffff, 0x00000100,
892 0x9510, 0xffffffff, 0x00000100,
893 0xaf04, 0xffffffff, 0x00000100,
894 0xae04, 0xffffffff, 0x00000100,
895 0x949c, 0xffffffff, 0x00000100,
896 0x802c, 0xffffffff, 0xe0000000,
897 0x9160, 0xffffffff, 0x00010000,
898 0x9164, 0xffffffff, 0x00030002,
899 0x9168, 0xffffffff, 0x00040007,
900 0x916c, 0xffffffff, 0x00060005,
901 0x9170, 0xffffffff, 0x00090008,
902 0x9174, 0xffffffff, 0x00020001,
903 0x9178, 0xffffffff, 0x00040003,
904 0x917c, 0xffffffff, 0x00000007,
905 0x9180, 0xffffffff, 0x00060005,
906 0x9184, 0xffffffff, 0x00090008,
907 0x9188, 0xffffffff, 0x00030002,
908 0x918c, 0xffffffff, 0x00050004,
909 0x9190, 0xffffffff, 0x00000008,
910 0x9194, 0xffffffff, 0x00070006,
911 0x9198, 0xffffffff, 0x000a0009,
912 0x919c, 0xffffffff, 0x00040003,
913 0x91a0, 0xffffffff, 0x00060005,
914 0x91a4, 0xffffffff, 0x00000009,
915 0x91a8, 0xffffffff, 0x00080007,
916 0x91ac, 0xffffffff, 0x000b000a,
917 0x91b0, 0xffffffff, 0x00050004,
918 0x91b4, 0xffffffff, 0x00070006,
919 0x91b8, 0xffffffff, 0x0008000b,
920 0x91bc, 0xffffffff, 0x000a0009,
921 0x91c0, 0xffffffff, 0x000d000c,
922 0x91c4, 0xffffffff, 0x00060005,
923 0x91c8, 0xffffffff, 0x00080007,
924 0x91cc, 0xffffffff, 0x0000000b,
925 0x91d0, 0xffffffff, 0x000a0009,
926 0x91d4, 0xffffffff, 0x000d000c,
927 0x9150, 0xffffffff, 0x96940200,
928 0x8708, 0xffffffff, 0x00900100,
929 0xc478, 0xffffffff, 0x00000080,
930 0xc404, 0xffffffff, 0x0020003f,
931 0x30, 0xffffffff, 0x0000001c,
932 0x34, 0x000f0000, 0x000f0000,
933 0x160c, 0xffffffff, 0x00000100,
934 0x1024, 0xffffffff, 0x00000100,
935 0x102c, 0x00000101, 0x00000000,
936 0x20a8, 0xffffffff, 0x00000104,
937 0x264c, 0x000c0000, 0x000c0000,
938 0x2648, 0x000c0000, 0x000c0000,
939 0x55e4, 0xff000fff, 0x00000100,
940 0x55e8, 0x00000001, 0x00000001,
941 0x2f50, 0x00000001, 0x00000001,
942 0x30cc, 0xc0000fff, 0x00000104,
943 0xc1e4, 0x00000001, 0x00000001,
944 0xd0c0, 0xfffffff0, 0x00000100,
945 0xd8c0, 0xfffffff0, 0x00000100
946};
947
fffbdda4
AD
948static const u32 hainan_mgcg_cgcg_init[] =
949{
950 0xc400, 0xffffffff, 0xfffffffc,
951 0x802c, 0xffffffff, 0xe0000000,
952 0x9a60, 0xffffffff, 0x00000100,
953 0x92a4, 0xffffffff, 0x00000100,
954 0xc164, 0xffffffff, 0x00000100,
955 0x9774, 0xffffffff, 0x00000100,
956 0x8984, 0xffffffff, 0x06000100,
957 0x8a18, 0xffffffff, 0x00000100,
958 0x92a0, 0xffffffff, 0x00000100,
959 0xc380, 0xffffffff, 0x00000100,
960 0x8b28, 0xffffffff, 0x00000100,
961 0x9144, 0xffffffff, 0x00000100,
962 0x8d88, 0xffffffff, 0x00000100,
963 0x8d8c, 0xffffffff, 0x00000100,
964 0x9030, 0xffffffff, 0x00000100,
965 0x9034, 0xffffffff, 0x00000100,
966 0x9038, 0xffffffff, 0x00000100,
967 0x903c, 0xffffffff, 0x00000100,
968 0xad80, 0xffffffff, 0x00000100,
969 0xac54, 0xffffffff, 0x00000100,
970 0x897c, 0xffffffff, 0x06000100,
971 0x9868, 0xffffffff, 0x00000100,
972 0x9510, 0xffffffff, 0x00000100,
973 0xaf04, 0xffffffff, 0x00000100,
974 0xae04, 0xffffffff, 0x00000100,
975 0x949c, 0xffffffff, 0x00000100,
976 0x802c, 0xffffffff, 0xe0000000,
977 0x9160, 0xffffffff, 0x00010000,
978 0x9164, 0xffffffff, 0x00030002,
979 0x9168, 0xffffffff, 0x00040007,
980 0x916c, 0xffffffff, 0x00060005,
981 0x9170, 0xffffffff, 0x00090008,
982 0x9174, 0xffffffff, 0x00020001,
983 0x9178, 0xffffffff, 0x00040003,
984 0x917c, 0xffffffff, 0x00000007,
985 0x9180, 0xffffffff, 0x00060005,
986 0x9184, 0xffffffff, 0x00090008,
987 0x9188, 0xffffffff, 0x00030002,
988 0x918c, 0xffffffff, 0x00050004,
989 0x9190, 0xffffffff, 0x00000008,
990 0x9194, 0xffffffff, 0x00070006,
991 0x9198, 0xffffffff, 0x000a0009,
992 0x919c, 0xffffffff, 0x00040003,
993 0x91a0, 0xffffffff, 0x00060005,
994 0x91a4, 0xffffffff, 0x00000009,
995 0x91a8, 0xffffffff, 0x00080007,
996 0x91ac, 0xffffffff, 0x000b000a,
997 0x91b0, 0xffffffff, 0x00050004,
998 0x91b4, 0xffffffff, 0x00070006,
999 0x91b8, 0xffffffff, 0x0008000b,
1000 0x91bc, 0xffffffff, 0x000a0009,
1001 0x91c0, 0xffffffff, 0x000d000c,
1002 0x91c4, 0xffffffff, 0x00060005,
1003 0x91c8, 0xffffffff, 0x00080007,
1004 0x91cc, 0xffffffff, 0x0000000b,
1005 0x91d0, 0xffffffff, 0x000a0009,
1006 0x91d4, 0xffffffff, 0x000d000c,
1007 0x9150, 0xffffffff, 0x96940200,
1008 0x8708, 0xffffffff, 0x00900100,
1009 0xc478, 0xffffffff, 0x00000080,
1010 0xc404, 0xffffffff, 0x0020003f,
1011 0x30, 0xffffffff, 0x0000001c,
1012 0x34, 0x000f0000, 0x000f0000,
1013 0x160c, 0xffffffff, 0x00000100,
1014 0x1024, 0xffffffff, 0x00000100,
1015 0x20a8, 0xffffffff, 0x00000104,
1016 0x264c, 0x000c0000, 0x000c0000,
1017 0x2648, 0x000c0000, 0x000c0000,
1018 0x2f50, 0x00000001, 0x00000001,
1019 0x30cc, 0xc0000fff, 0x00000104,
1020 0xc1e4, 0x00000001, 0x00000001,
1021 0xd0c0, 0xfffffff0, 0x00000100,
1022 0xd8c0, 0xfffffff0, 0x00000100
1023};
1024
205996c0
AD
1025static u32 verde_pg_init[] =
1026{
1027 0x353c, 0xffffffff, 0x40000,
1028 0x3538, 0xffffffff, 0x200010ff,
1029 0x353c, 0xffffffff, 0x0,
1030 0x353c, 0xffffffff, 0x0,
1031 0x353c, 0xffffffff, 0x0,
1032 0x353c, 0xffffffff, 0x0,
1033 0x353c, 0xffffffff, 0x0,
1034 0x353c, 0xffffffff, 0x7007,
1035 0x3538, 0xffffffff, 0x300010ff,
1036 0x353c, 0xffffffff, 0x0,
1037 0x353c, 0xffffffff, 0x0,
1038 0x353c, 0xffffffff, 0x0,
1039 0x353c, 0xffffffff, 0x0,
1040 0x353c, 0xffffffff, 0x0,
1041 0x353c, 0xffffffff, 0x400000,
1042 0x3538, 0xffffffff, 0x100010ff,
1043 0x353c, 0xffffffff, 0x0,
1044 0x353c, 0xffffffff, 0x0,
1045 0x353c, 0xffffffff, 0x0,
1046 0x353c, 0xffffffff, 0x0,
1047 0x353c, 0xffffffff, 0x0,
1048 0x353c, 0xffffffff, 0x120200,
1049 0x3538, 0xffffffff, 0x500010ff,
1050 0x353c, 0xffffffff, 0x0,
1051 0x353c, 0xffffffff, 0x0,
1052 0x353c, 0xffffffff, 0x0,
1053 0x353c, 0xffffffff, 0x0,
1054 0x353c, 0xffffffff, 0x0,
1055 0x353c, 0xffffffff, 0x1e1e16,
1056 0x3538, 0xffffffff, 0x600010ff,
1057 0x353c, 0xffffffff, 0x0,
1058 0x353c, 0xffffffff, 0x0,
1059 0x353c, 0xffffffff, 0x0,
1060 0x353c, 0xffffffff, 0x0,
1061 0x353c, 0xffffffff, 0x0,
1062 0x353c, 0xffffffff, 0x171f1e,
1063 0x3538, 0xffffffff, 0x700010ff,
1064 0x353c, 0xffffffff, 0x0,
1065 0x353c, 0xffffffff, 0x0,
1066 0x353c, 0xffffffff, 0x0,
1067 0x353c, 0xffffffff, 0x0,
1068 0x353c, 0xffffffff, 0x0,
1069 0x353c, 0xffffffff, 0x0,
1070 0x3538, 0xffffffff, 0x9ff,
1071 0x3500, 0xffffffff, 0x0,
1072 0x3504, 0xffffffff, 0x10000800,
1073 0x3504, 0xffffffff, 0xf,
1074 0x3504, 0xffffffff, 0xf,
1075 0x3500, 0xffffffff, 0x4,
1076 0x3504, 0xffffffff, 0x1000051e,
1077 0x3504, 0xffffffff, 0xffff,
1078 0x3504, 0xffffffff, 0xffff,
1079 0x3500, 0xffffffff, 0x8,
1080 0x3504, 0xffffffff, 0x80500,
1081 0x3500, 0xffffffff, 0x12,
1082 0x3504, 0xffffffff, 0x9050c,
1083 0x3500, 0xffffffff, 0x1d,
1084 0x3504, 0xffffffff, 0xb052c,
1085 0x3500, 0xffffffff, 0x2a,
1086 0x3504, 0xffffffff, 0x1053e,
1087 0x3500, 0xffffffff, 0x2d,
1088 0x3504, 0xffffffff, 0x10546,
1089 0x3500, 0xffffffff, 0x30,
1090 0x3504, 0xffffffff, 0xa054e,
1091 0x3500, 0xffffffff, 0x3c,
1092 0x3504, 0xffffffff, 0x1055f,
1093 0x3500, 0xffffffff, 0x3f,
1094 0x3504, 0xffffffff, 0x10567,
1095 0x3500, 0xffffffff, 0x42,
1096 0x3504, 0xffffffff, 0x1056f,
1097 0x3500, 0xffffffff, 0x45,
1098 0x3504, 0xffffffff, 0x10572,
1099 0x3500, 0xffffffff, 0x48,
1100 0x3504, 0xffffffff, 0x20575,
1101 0x3500, 0xffffffff, 0x4c,
1102 0x3504, 0xffffffff, 0x190801,
1103 0x3500, 0xffffffff, 0x67,
1104 0x3504, 0xffffffff, 0x1082a,
1105 0x3500, 0xffffffff, 0x6a,
1106 0x3504, 0xffffffff, 0x1b082d,
1107 0x3500, 0xffffffff, 0x87,
1108 0x3504, 0xffffffff, 0x310851,
1109 0x3500, 0xffffffff, 0xba,
1110 0x3504, 0xffffffff, 0x891,
1111 0x3500, 0xffffffff, 0xbc,
1112 0x3504, 0xffffffff, 0x893,
1113 0x3500, 0xffffffff, 0xbe,
1114 0x3504, 0xffffffff, 0x20895,
1115 0x3500, 0xffffffff, 0xc2,
1116 0x3504, 0xffffffff, 0x20899,
1117 0x3500, 0xffffffff, 0xc6,
1118 0x3504, 0xffffffff, 0x2089d,
1119 0x3500, 0xffffffff, 0xca,
1120 0x3504, 0xffffffff, 0x8a1,
1121 0x3500, 0xffffffff, 0xcc,
1122 0x3504, 0xffffffff, 0x8a3,
1123 0x3500, 0xffffffff, 0xce,
1124 0x3504, 0xffffffff, 0x308a5,
1125 0x3500, 0xffffffff, 0xd3,
1126 0x3504, 0xffffffff, 0x6d08cd,
1127 0x3500, 0xffffffff, 0x142,
1128 0x3504, 0xffffffff, 0x2000095a,
1129 0x3504, 0xffffffff, 0x1,
1130 0x3500, 0xffffffff, 0x144,
1131 0x3504, 0xffffffff, 0x301f095b,
1132 0x3500, 0xffffffff, 0x165,
1133 0x3504, 0xffffffff, 0xc094d,
1134 0x3500, 0xffffffff, 0x173,
1135 0x3504, 0xffffffff, 0xf096d,
1136 0x3500, 0xffffffff, 0x184,
1137 0x3504, 0xffffffff, 0x15097f,
1138 0x3500, 0xffffffff, 0x19b,
1139 0x3504, 0xffffffff, 0xc0998,
1140 0x3500, 0xffffffff, 0x1a9,
1141 0x3504, 0xffffffff, 0x409a7,
1142 0x3500, 0xffffffff, 0x1af,
1143 0x3504, 0xffffffff, 0xcdc,
1144 0x3500, 0xffffffff, 0x1b1,
1145 0x3504, 0xffffffff, 0x800,
1146 0x3508, 0xffffffff, 0x6c9b2000,
1147 0x3510, 0xfc00, 0x2000,
1148 0x3544, 0xffffffff, 0xfc0,
1149 0x28d4, 0x00000100, 0x100
1150};
1151
1152static void si_init_golden_registers(struct radeon_device *rdev)
1153{
1154 switch (rdev->family) {
1155 case CHIP_TAHITI:
1156 radeon_program_register_sequence(rdev,
1157 tahiti_golden_registers,
1158 (const u32)ARRAY_SIZE(tahiti_golden_registers));
1159 radeon_program_register_sequence(rdev,
1160 tahiti_golden_rlc_registers,
1161 (const u32)ARRAY_SIZE(tahiti_golden_rlc_registers));
1162 radeon_program_register_sequence(rdev,
1163 tahiti_mgcg_cgcg_init,
1164 (const u32)ARRAY_SIZE(tahiti_mgcg_cgcg_init));
1165 radeon_program_register_sequence(rdev,
1166 tahiti_golden_registers2,
1167 (const u32)ARRAY_SIZE(tahiti_golden_registers2));
1168 break;
1169 case CHIP_PITCAIRN:
1170 radeon_program_register_sequence(rdev,
1171 pitcairn_golden_registers,
1172 (const u32)ARRAY_SIZE(pitcairn_golden_registers));
1173 radeon_program_register_sequence(rdev,
1174 pitcairn_golden_rlc_registers,
1175 (const u32)ARRAY_SIZE(pitcairn_golden_rlc_registers));
1176 radeon_program_register_sequence(rdev,
1177 pitcairn_mgcg_cgcg_init,
1178 (const u32)ARRAY_SIZE(pitcairn_mgcg_cgcg_init));
1179 break;
1180 case CHIP_VERDE:
1181 radeon_program_register_sequence(rdev,
1182 verde_golden_registers,
1183 (const u32)ARRAY_SIZE(verde_golden_registers));
1184 radeon_program_register_sequence(rdev,
1185 verde_golden_rlc_registers,
1186 (const u32)ARRAY_SIZE(verde_golden_rlc_registers));
1187 radeon_program_register_sequence(rdev,
1188 verde_mgcg_cgcg_init,
1189 (const u32)ARRAY_SIZE(verde_mgcg_cgcg_init));
1190 radeon_program_register_sequence(rdev,
1191 verde_pg_init,
1192 (const u32)ARRAY_SIZE(verde_pg_init));
1193 break;
1194 case CHIP_OLAND:
1195 radeon_program_register_sequence(rdev,
1196 oland_golden_registers,
1197 (const u32)ARRAY_SIZE(oland_golden_registers));
1198 radeon_program_register_sequence(rdev,
1199 oland_golden_rlc_registers,
1200 (const u32)ARRAY_SIZE(oland_golden_rlc_registers));
1201 radeon_program_register_sequence(rdev,
1202 oland_mgcg_cgcg_init,
1203 (const u32)ARRAY_SIZE(oland_mgcg_cgcg_init));
1204 break;
fffbdda4
AD
1205 case CHIP_HAINAN:
1206 radeon_program_register_sequence(rdev,
1207 hainan_golden_registers,
1208 (const u32)ARRAY_SIZE(hainan_golden_registers));
1209 radeon_program_register_sequence(rdev,
1210 hainan_golden_registers2,
1211 (const u32)ARRAY_SIZE(hainan_golden_registers2));
1212 radeon_program_register_sequence(rdev,
1213 hainan_mgcg_cgcg_init,
1214 (const u32)ARRAY_SIZE(hainan_mgcg_cgcg_init));
1215 break;
205996c0
AD
1216 default:
1217 break;
1218 }
1219}
1220
454d2e2a
AD
1221#define PCIE_BUS_CLK 10000
1222#define TCLK (PCIE_BUS_CLK / 10)
1223
1224/**
1225 * si_get_xclk - get the xclk
1226 *
1227 * @rdev: radeon_device pointer
1228 *
1229 * Returns the reference clock used by the gfx engine
1230 * (SI).
1231 */
1232u32 si_get_xclk(struct radeon_device *rdev)
1233{
1234 u32 reference_clock = rdev->clock.spll.reference_freq;
1235 u32 tmp;
1236
1237 tmp = RREG32(CG_CLKPIN_CNTL_2);
1238 if (tmp & MUX_TCLK_TO_XCLK)
1239 return TCLK;
1240
1241 tmp = RREG32(CG_CLKPIN_CNTL);
1242 if (tmp & XTALIN_DIVIDE)
1243 return reference_clock / 4;
1244
1245 return reference_clock;
1246}
1247
1bd47d2e
AD
1248/* get temperature in millidegrees */
1249int si_get_temp(struct radeon_device *rdev)
1250{
1251 u32 temp;
1252 int actual_temp = 0;
1253
1254 temp = (RREG32(CG_MULT_THERMAL_STATUS) & CTF_TEMP_MASK) >>
1255 CTF_TEMP_SHIFT;
1256
1257 if (temp & 0x200)
1258 actual_temp = 255;
1259 else
1260 actual_temp = temp & 0x1ff;
1261
1262 actual_temp = (actual_temp * 1000);
1263
1264 return actual_temp;
1265}
1266
8b074dd6
AD
1267#define TAHITI_IO_MC_REGS_SIZE 36
1268
1269static const u32 tahiti_io_mc_regs[TAHITI_IO_MC_REGS_SIZE][2] = {
1270 {0x0000006f, 0x03044000},
1271 {0x00000070, 0x0480c018},
1272 {0x00000071, 0x00000040},
1273 {0x00000072, 0x01000000},
1274 {0x00000074, 0x000000ff},
1275 {0x00000075, 0x00143400},
1276 {0x00000076, 0x08ec0800},
1277 {0x00000077, 0x040000cc},
1278 {0x00000079, 0x00000000},
1279 {0x0000007a, 0x21000409},
1280 {0x0000007c, 0x00000000},
1281 {0x0000007d, 0xe8000000},
1282 {0x0000007e, 0x044408a8},
1283 {0x0000007f, 0x00000003},
1284 {0x00000080, 0x00000000},
1285 {0x00000081, 0x01000000},
1286 {0x00000082, 0x02000000},
1287 {0x00000083, 0x00000000},
1288 {0x00000084, 0xe3f3e4f4},
1289 {0x00000085, 0x00052024},
1290 {0x00000087, 0x00000000},
1291 {0x00000088, 0x66036603},
1292 {0x00000089, 0x01000000},
1293 {0x0000008b, 0x1c0a0000},
1294 {0x0000008c, 0xff010000},
1295 {0x0000008e, 0xffffefff},
1296 {0x0000008f, 0xfff3efff},
1297 {0x00000090, 0xfff3efbf},
1298 {0x00000094, 0x00101101},
1299 {0x00000095, 0x00000fff},
1300 {0x00000096, 0x00116fff},
1301 {0x00000097, 0x60010000},
1302 {0x00000098, 0x10010000},
1303 {0x00000099, 0x00006000},
1304 {0x0000009a, 0x00001000},
1305 {0x0000009f, 0x00a77400}
1306};
1307
1308static const u32 pitcairn_io_mc_regs[TAHITI_IO_MC_REGS_SIZE][2] = {
1309 {0x0000006f, 0x03044000},
1310 {0x00000070, 0x0480c018},
1311 {0x00000071, 0x00000040},
1312 {0x00000072, 0x01000000},
1313 {0x00000074, 0x000000ff},
1314 {0x00000075, 0x00143400},
1315 {0x00000076, 0x08ec0800},
1316 {0x00000077, 0x040000cc},
1317 {0x00000079, 0x00000000},
1318 {0x0000007a, 0x21000409},
1319 {0x0000007c, 0x00000000},
1320 {0x0000007d, 0xe8000000},
1321 {0x0000007e, 0x044408a8},
1322 {0x0000007f, 0x00000003},
1323 {0x00000080, 0x00000000},
1324 {0x00000081, 0x01000000},
1325 {0x00000082, 0x02000000},
1326 {0x00000083, 0x00000000},
1327 {0x00000084, 0xe3f3e4f4},
1328 {0x00000085, 0x00052024},
1329 {0x00000087, 0x00000000},
1330 {0x00000088, 0x66036603},
1331 {0x00000089, 0x01000000},
1332 {0x0000008b, 0x1c0a0000},
1333 {0x0000008c, 0xff010000},
1334 {0x0000008e, 0xffffefff},
1335 {0x0000008f, 0xfff3efff},
1336 {0x00000090, 0xfff3efbf},
1337 {0x00000094, 0x00101101},
1338 {0x00000095, 0x00000fff},
1339 {0x00000096, 0x00116fff},
1340 {0x00000097, 0x60010000},
1341 {0x00000098, 0x10010000},
1342 {0x00000099, 0x00006000},
1343 {0x0000009a, 0x00001000},
1344 {0x0000009f, 0x00a47400}
1345};
1346
1347static const u32 verde_io_mc_regs[TAHITI_IO_MC_REGS_SIZE][2] = {
1348 {0x0000006f, 0x03044000},
1349 {0x00000070, 0x0480c018},
1350 {0x00000071, 0x00000040},
1351 {0x00000072, 0x01000000},
1352 {0x00000074, 0x000000ff},
1353 {0x00000075, 0x00143400},
1354 {0x00000076, 0x08ec0800},
1355 {0x00000077, 0x040000cc},
1356 {0x00000079, 0x00000000},
1357 {0x0000007a, 0x21000409},
1358 {0x0000007c, 0x00000000},
1359 {0x0000007d, 0xe8000000},
1360 {0x0000007e, 0x044408a8},
1361 {0x0000007f, 0x00000003},
1362 {0x00000080, 0x00000000},
1363 {0x00000081, 0x01000000},
1364 {0x00000082, 0x02000000},
1365 {0x00000083, 0x00000000},
1366 {0x00000084, 0xe3f3e4f4},
1367 {0x00000085, 0x00052024},
1368 {0x00000087, 0x00000000},
1369 {0x00000088, 0x66036603},
1370 {0x00000089, 0x01000000},
1371 {0x0000008b, 0x1c0a0000},
1372 {0x0000008c, 0xff010000},
1373 {0x0000008e, 0xffffefff},
1374 {0x0000008f, 0xfff3efff},
1375 {0x00000090, 0xfff3efbf},
1376 {0x00000094, 0x00101101},
1377 {0x00000095, 0x00000fff},
1378 {0x00000096, 0x00116fff},
1379 {0x00000097, 0x60010000},
1380 {0x00000098, 0x10010000},
1381 {0x00000099, 0x00006000},
1382 {0x0000009a, 0x00001000},
1383 {0x0000009f, 0x00a37400}
1384};
1385
bcc7f5d2
AD
1386static const u32 oland_io_mc_regs[TAHITI_IO_MC_REGS_SIZE][2] = {
1387 {0x0000006f, 0x03044000},
1388 {0x00000070, 0x0480c018},
1389 {0x00000071, 0x00000040},
1390 {0x00000072, 0x01000000},
1391 {0x00000074, 0x000000ff},
1392 {0x00000075, 0x00143400},
1393 {0x00000076, 0x08ec0800},
1394 {0x00000077, 0x040000cc},
1395 {0x00000079, 0x00000000},
1396 {0x0000007a, 0x21000409},
1397 {0x0000007c, 0x00000000},
1398 {0x0000007d, 0xe8000000},
1399 {0x0000007e, 0x044408a8},
1400 {0x0000007f, 0x00000003},
1401 {0x00000080, 0x00000000},
1402 {0x00000081, 0x01000000},
1403 {0x00000082, 0x02000000},
1404 {0x00000083, 0x00000000},
1405 {0x00000084, 0xe3f3e4f4},
1406 {0x00000085, 0x00052024},
1407 {0x00000087, 0x00000000},
1408 {0x00000088, 0x66036603},
1409 {0x00000089, 0x01000000},
1410 {0x0000008b, 0x1c0a0000},
1411 {0x0000008c, 0xff010000},
1412 {0x0000008e, 0xffffefff},
1413 {0x0000008f, 0xfff3efff},
1414 {0x00000090, 0xfff3efbf},
1415 {0x00000094, 0x00101101},
1416 {0x00000095, 0x00000fff},
1417 {0x00000096, 0x00116fff},
1418 {0x00000097, 0x60010000},
1419 {0x00000098, 0x10010000},
1420 {0x00000099, 0x00006000},
1421 {0x0000009a, 0x00001000},
1422 {0x0000009f, 0x00a17730}
1423};
1424
c04c00b4
AD
1425static const u32 hainan_io_mc_regs[TAHITI_IO_MC_REGS_SIZE][2] = {
1426 {0x0000006f, 0x03044000},
1427 {0x00000070, 0x0480c018},
1428 {0x00000071, 0x00000040},
1429 {0x00000072, 0x01000000},
1430 {0x00000074, 0x000000ff},
1431 {0x00000075, 0x00143400},
1432 {0x00000076, 0x08ec0800},
1433 {0x00000077, 0x040000cc},
1434 {0x00000079, 0x00000000},
1435 {0x0000007a, 0x21000409},
1436 {0x0000007c, 0x00000000},
1437 {0x0000007d, 0xe8000000},
1438 {0x0000007e, 0x044408a8},
1439 {0x0000007f, 0x00000003},
1440 {0x00000080, 0x00000000},
1441 {0x00000081, 0x01000000},
1442 {0x00000082, 0x02000000},
1443 {0x00000083, 0x00000000},
1444 {0x00000084, 0xe3f3e4f4},
1445 {0x00000085, 0x00052024},
1446 {0x00000087, 0x00000000},
1447 {0x00000088, 0x66036603},
1448 {0x00000089, 0x01000000},
1449 {0x0000008b, 0x1c0a0000},
1450 {0x0000008c, 0xff010000},
1451 {0x0000008e, 0xffffefff},
1452 {0x0000008f, 0xfff3efff},
1453 {0x00000090, 0xfff3efbf},
1454 {0x00000094, 0x00101101},
1455 {0x00000095, 0x00000fff},
1456 {0x00000096, 0x00116fff},
1457 {0x00000097, 0x60010000},
1458 {0x00000098, 0x10010000},
1459 {0x00000099, 0x00006000},
1460 {0x0000009a, 0x00001000},
1461 {0x0000009f, 0x00a07730}
1462};
1463
8b074dd6 1464/* ucode loading */
6c7bccea 1465int si_mc_load_microcode(struct radeon_device *rdev)
8b074dd6
AD
1466{
1467 const __be32 *fw_data;
1468 u32 running, blackout = 0;
1469 u32 *io_mc_regs;
1470 int i, ucode_size, regs_size;
1471
1472 if (!rdev->mc_fw)
1473 return -EINVAL;
1474
1475 switch (rdev->family) {
1476 case CHIP_TAHITI:
1477 io_mc_regs = (u32 *)&tahiti_io_mc_regs;
1478 ucode_size = SI_MC_UCODE_SIZE;
1479 regs_size = TAHITI_IO_MC_REGS_SIZE;
1480 break;
1481 case CHIP_PITCAIRN:
1482 io_mc_regs = (u32 *)&pitcairn_io_mc_regs;
1483 ucode_size = SI_MC_UCODE_SIZE;
1484 regs_size = TAHITI_IO_MC_REGS_SIZE;
1485 break;
1486 case CHIP_VERDE:
1487 default:
1488 io_mc_regs = (u32 *)&verde_io_mc_regs;
1489 ucode_size = SI_MC_UCODE_SIZE;
1490 regs_size = TAHITI_IO_MC_REGS_SIZE;
1491 break;
bcc7f5d2
AD
1492 case CHIP_OLAND:
1493 io_mc_regs = (u32 *)&oland_io_mc_regs;
1494 ucode_size = OLAND_MC_UCODE_SIZE;
1495 regs_size = TAHITI_IO_MC_REGS_SIZE;
1496 break;
c04c00b4
AD
1497 case CHIP_HAINAN:
1498 io_mc_regs = (u32 *)&hainan_io_mc_regs;
1499 ucode_size = OLAND_MC_UCODE_SIZE;
1500 regs_size = TAHITI_IO_MC_REGS_SIZE;
1501 break;
8b074dd6
AD
1502 }
1503
1504 running = RREG32(MC_SEQ_SUP_CNTL) & RUN_MASK;
1505
1506 if (running == 0) {
1507 if (running) {
1508 blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
1509 WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
1510 }
1511
1512 /* reset the engine and set to writable */
1513 WREG32(MC_SEQ_SUP_CNTL, 0x00000008);
1514 WREG32(MC_SEQ_SUP_CNTL, 0x00000010);
1515
1516 /* load mc io regs */
1517 for (i = 0; i < regs_size; i++) {
1518 WREG32(MC_SEQ_IO_DEBUG_INDEX, io_mc_regs[(i << 1)]);
1519 WREG32(MC_SEQ_IO_DEBUG_DATA, io_mc_regs[(i << 1) + 1]);
1520 }
1521 /* load the MC ucode */
1522 fw_data = (const __be32 *)rdev->mc_fw->data;
1523 for (i = 0; i < ucode_size; i++)
1524 WREG32(MC_SEQ_SUP_PGM, be32_to_cpup(fw_data++));
1525
1526 /* put the engine back into the active state */
1527 WREG32(MC_SEQ_SUP_CNTL, 0x00000008);
1528 WREG32(MC_SEQ_SUP_CNTL, 0x00000004);
1529 WREG32(MC_SEQ_SUP_CNTL, 0x00000001);
1530
1531 /* wait for training to complete */
1532 for (i = 0; i < rdev->usec_timeout; i++) {
1533 if (RREG32(MC_SEQ_TRAIN_WAKEUP_CNTL) & TRAIN_DONE_D0)
1534 break;
1535 udelay(1);
1536 }
1537 for (i = 0; i < rdev->usec_timeout; i++) {
1538 if (RREG32(MC_SEQ_TRAIN_WAKEUP_CNTL) & TRAIN_DONE_D1)
1539 break;
1540 udelay(1);
1541 }
1542
1543 if (running)
1544 WREG32(MC_SHARED_BLACKOUT_CNTL, blackout);
1545 }
1546
1547 return 0;
1548}
1549
0f0de06c
AD
1550static int si_init_microcode(struct radeon_device *rdev)
1551{
0f0de06c
AD
1552 const char *chip_name;
1553 const char *rlc_chip_name;
1554 size_t pfp_req_size, me_req_size, ce_req_size, rlc_req_size, mc_req_size;
a9e61410 1555 size_t smc_req_size;
0f0de06c
AD
1556 char fw_name[30];
1557 int err;
1558
1559 DRM_DEBUG("\n");
1560
0f0de06c
AD
1561 switch (rdev->family) {
1562 case CHIP_TAHITI:
1563 chip_name = "TAHITI";
1564 rlc_chip_name = "TAHITI";
1565 pfp_req_size = SI_PFP_UCODE_SIZE * 4;
1566 me_req_size = SI_PM4_UCODE_SIZE * 4;
1567 ce_req_size = SI_CE_UCODE_SIZE * 4;
1568 rlc_req_size = SI_RLC_UCODE_SIZE * 4;
1569 mc_req_size = SI_MC_UCODE_SIZE * 4;
a9e61410 1570 smc_req_size = ALIGN(TAHITI_SMC_UCODE_SIZE, 4);
0f0de06c
AD
1571 break;
1572 case CHIP_PITCAIRN:
1573 chip_name = "PITCAIRN";
1574 rlc_chip_name = "PITCAIRN";
1575 pfp_req_size = SI_PFP_UCODE_SIZE * 4;
1576 me_req_size = SI_PM4_UCODE_SIZE * 4;
1577 ce_req_size = SI_CE_UCODE_SIZE * 4;
1578 rlc_req_size = SI_RLC_UCODE_SIZE * 4;
1579 mc_req_size = SI_MC_UCODE_SIZE * 4;
a9e61410 1580 smc_req_size = ALIGN(PITCAIRN_SMC_UCODE_SIZE, 4);
0f0de06c
AD
1581 break;
1582 case CHIP_VERDE:
1583 chip_name = "VERDE";
1584 rlc_chip_name = "VERDE";
1585 pfp_req_size = SI_PFP_UCODE_SIZE * 4;
1586 me_req_size = SI_PM4_UCODE_SIZE * 4;
1587 ce_req_size = SI_CE_UCODE_SIZE * 4;
1588 rlc_req_size = SI_RLC_UCODE_SIZE * 4;
1589 mc_req_size = SI_MC_UCODE_SIZE * 4;
a9e61410 1590 smc_req_size = ALIGN(VERDE_SMC_UCODE_SIZE, 4);
0f0de06c 1591 break;
bcc7f5d2
AD
1592 case CHIP_OLAND:
1593 chip_name = "OLAND";
1594 rlc_chip_name = "OLAND";
1595 pfp_req_size = SI_PFP_UCODE_SIZE * 4;
1596 me_req_size = SI_PM4_UCODE_SIZE * 4;
1597 ce_req_size = SI_CE_UCODE_SIZE * 4;
1598 rlc_req_size = SI_RLC_UCODE_SIZE * 4;
1599 mc_req_size = OLAND_MC_UCODE_SIZE * 4;
a9e61410 1600 smc_req_size = ALIGN(OLAND_SMC_UCODE_SIZE, 4);
bcc7f5d2 1601 break;
c04c00b4
AD
1602 case CHIP_HAINAN:
1603 chip_name = "HAINAN";
1604 rlc_chip_name = "HAINAN";
1605 pfp_req_size = SI_PFP_UCODE_SIZE * 4;
1606 me_req_size = SI_PM4_UCODE_SIZE * 4;
1607 ce_req_size = SI_CE_UCODE_SIZE * 4;
1608 rlc_req_size = SI_RLC_UCODE_SIZE * 4;
1609 mc_req_size = OLAND_MC_UCODE_SIZE * 4;
a9e61410 1610 smc_req_size = ALIGN(HAINAN_SMC_UCODE_SIZE, 4);
c04c00b4 1611 break;
0f0de06c
AD
1612 default: BUG();
1613 }
1614
1615 DRM_INFO("Loading %s Microcode\n", chip_name);
1616
1617 snprintf(fw_name, sizeof(fw_name), "radeon/%s_pfp.bin", chip_name);
0a168933 1618 err = request_firmware(&rdev->pfp_fw, fw_name, rdev->dev);
0f0de06c
AD
1619 if (err)
1620 goto out;
1621 if (rdev->pfp_fw->size != pfp_req_size) {
1622 printk(KERN_ERR
1623 "si_cp: Bogus length %zu in firmware \"%s\"\n",
1624 rdev->pfp_fw->size, fw_name);
1625 err = -EINVAL;
1626 goto out;
1627 }
1628
1629 snprintf(fw_name, sizeof(fw_name), "radeon/%s_me.bin", chip_name);
0a168933 1630 err = request_firmware(&rdev->me_fw, fw_name, rdev->dev);
0f0de06c
AD
1631 if (err)
1632 goto out;
1633 if (rdev->me_fw->size != me_req_size) {
1634 printk(KERN_ERR
1635 "si_cp: Bogus length %zu in firmware \"%s\"\n",
1636 rdev->me_fw->size, fw_name);
1637 err = -EINVAL;
1638 }
1639
1640 snprintf(fw_name, sizeof(fw_name), "radeon/%s_ce.bin", chip_name);
0a168933 1641 err = request_firmware(&rdev->ce_fw, fw_name, rdev->dev);
0f0de06c
AD
1642 if (err)
1643 goto out;
1644 if (rdev->ce_fw->size != ce_req_size) {
1645 printk(KERN_ERR
1646 "si_cp: Bogus length %zu in firmware \"%s\"\n",
1647 rdev->ce_fw->size, fw_name);
1648 err = -EINVAL;
1649 }
1650
1651 snprintf(fw_name, sizeof(fw_name), "radeon/%s_rlc.bin", rlc_chip_name);
0a168933 1652 err = request_firmware(&rdev->rlc_fw, fw_name, rdev->dev);
0f0de06c
AD
1653 if (err)
1654 goto out;
1655 if (rdev->rlc_fw->size != rlc_req_size) {
1656 printk(KERN_ERR
1657 "si_rlc: Bogus length %zu in firmware \"%s\"\n",
1658 rdev->rlc_fw->size, fw_name);
1659 err = -EINVAL;
1660 }
1661
1662 snprintf(fw_name, sizeof(fw_name), "radeon/%s_mc.bin", chip_name);
0a168933 1663 err = request_firmware(&rdev->mc_fw, fw_name, rdev->dev);
0f0de06c
AD
1664 if (err)
1665 goto out;
1666 if (rdev->mc_fw->size != mc_req_size) {
1667 printk(KERN_ERR
1668 "si_mc: Bogus length %zu in firmware \"%s\"\n",
1669 rdev->mc_fw->size, fw_name);
1670 err = -EINVAL;
1671 }
1672
a9e61410 1673 snprintf(fw_name, sizeof(fw_name), "radeon/%s_smc.bin", chip_name);
0a168933 1674 err = request_firmware(&rdev->smc_fw, fw_name, rdev->dev);
8a53fa23
AD
1675 if (err) {
1676 printk(KERN_ERR
1677 "smc: error loading firmware \"%s\"\n",
1678 fw_name);
1679 release_firmware(rdev->smc_fw);
1680 rdev->smc_fw = NULL;
d8367112 1681 err = 0;
8a53fa23 1682 } else if (rdev->smc_fw->size != smc_req_size) {
a9e61410
AD
1683 printk(KERN_ERR
1684 "si_smc: Bogus length %zu in firmware \"%s\"\n",
1685 rdev->smc_fw->size, fw_name);
1686 err = -EINVAL;
1687 }
1688
0f0de06c 1689out:
0f0de06c
AD
1690 if (err) {
1691 if (err != -EINVAL)
1692 printk(KERN_ERR
1693 "si_cp: Failed to load firmware \"%s\"\n",
1694 fw_name);
1695 release_firmware(rdev->pfp_fw);
1696 rdev->pfp_fw = NULL;
1697 release_firmware(rdev->me_fw);
1698 rdev->me_fw = NULL;
1699 release_firmware(rdev->ce_fw);
1700 rdev->ce_fw = NULL;
1701 release_firmware(rdev->rlc_fw);
1702 rdev->rlc_fw = NULL;
1703 release_firmware(rdev->mc_fw);
1704 rdev->mc_fw = NULL;
a9e61410
AD
1705 release_firmware(rdev->smc_fw);
1706 rdev->smc_fw = NULL;
0f0de06c
AD
1707 }
1708 return err;
1709}
1710
43b3cd99
AD
1711/* watermark setup */
1712static u32 dce6_line_buffer_adjust(struct radeon_device *rdev,
1713 struct radeon_crtc *radeon_crtc,
1714 struct drm_display_mode *mode,
1715 struct drm_display_mode *other_mode)
1716{
290d2457
AD
1717 u32 tmp, buffer_alloc, i;
1718 u32 pipe_offset = radeon_crtc->crtc_id * 0x20;
43b3cd99
AD
1719 /*
1720 * Line Buffer Setup
1721 * There are 3 line buffers, each one shared by 2 display controllers.
1722 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1723 * the display controllers. The paritioning is done via one of four
1724 * preset allocations specified in bits 21:20:
1725 * 0 - half lb
1726 * 2 - whole lb, other crtc must be disabled
1727 */
1728 /* this can get tricky if we have two large displays on a paired group
1729 * of crtcs. Ideally for multiple large displays we'd assign them to
1730 * non-linked crtcs for maximum line buffer allocation.
1731 */
1732 if (radeon_crtc->base.enabled && mode) {
290d2457 1733 if (other_mode) {
43b3cd99 1734 tmp = 0; /* 1/2 */
290d2457
AD
1735 buffer_alloc = 1;
1736 } else {
43b3cd99 1737 tmp = 2; /* whole */
290d2457
AD
1738 buffer_alloc = 2;
1739 }
1740 } else {
43b3cd99 1741 tmp = 0;
290d2457
AD
1742 buffer_alloc = 0;
1743 }
43b3cd99
AD
1744
1745 WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset,
1746 DC_LB_MEMORY_CONFIG(tmp));
1747
290d2457
AD
1748 WREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset,
1749 DMIF_BUFFERS_ALLOCATED(buffer_alloc));
1750 for (i = 0; i < rdev->usec_timeout; i++) {
1751 if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) &
1752 DMIF_BUFFERS_ALLOCATED_COMPLETED)
1753 break;
1754 udelay(1);
1755 }
1756
43b3cd99
AD
1757 if (radeon_crtc->base.enabled && mode) {
1758 switch (tmp) {
1759 case 0:
1760 default:
1761 return 4096 * 2;
1762 case 2:
1763 return 8192 * 2;
1764 }
1765 }
1766
1767 /* controller not enabled, so no lb used */
1768 return 0;
1769}
1770
ca7db22b 1771static u32 si_get_number_of_dram_channels(struct radeon_device *rdev)
43b3cd99
AD
1772{
1773 u32 tmp = RREG32(MC_SHARED_CHMAP);
1774
1775 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1776 case 0:
1777 default:
1778 return 1;
1779 case 1:
1780 return 2;
1781 case 2:
1782 return 4;
1783 case 3:
1784 return 8;
1785 case 4:
1786 return 3;
1787 case 5:
1788 return 6;
1789 case 6:
1790 return 10;
1791 case 7:
1792 return 12;
1793 case 8:
1794 return 16;
1795 }
1796}
1797
1798struct dce6_wm_params {
1799 u32 dram_channels; /* number of dram channels */
1800 u32 yclk; /* bandwidth per dram data pin in kHz */
1801 u32 sclk; /* engine clock in kHz */
1802 u32 disp_clk; /* display clock in kHz */
1803 u32 src_width; /* viewport width */
1804 u32 active_time; /* active display time in ns */
1805 u32 blank_time; /* blank time in ns */
1806 bool interlaced; /* mode is interlaced */
1807 fixed20_12 vsc; /* vertical scale ratio */
1808 u32 num_heads; /* number of active crtcs */
1809 u32 bytes_per_pixel; /* bytes per pixel display + overlay */
1810 u32 lb_size; /* line buffer allocated to pipe */
1811 u32 vtaps; /* vertical scaler taps */
1812};
1813
1814static u32 dce6_dram_bandwidth(struct dce6_wm_params *wm)
1815{
1816 /* Calculate raw DRAM Bandwidth */
1817 fixed20_12 dram_efficiency; /* 0.7 */
1818 fixed20_12 yclk, dram_channels, bandwidth;
1819 fixed20_12 a;
1820
1821 a.full = dfixed_const(1000);
1822 yclk.full = dfixed_const(wm->yclk);
1823 yclk.full = dfixed_div(yclk, a);
1824 dram_channels.full = dfixed_const(wm->dram_channels * 4);
1825 a.full = dfixed_const(10);
1826 dram_efficiency.full = dfixed_const(7);
1827 dram_efficiency.full = dfixed_div(dram_efficiency, a);
1828 bandwidth.full = dfixed_mul(dram_channels, yclk);
1829 bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
1830
1831 return dfixed_trunc(bandwidth);
1832}
1833
1834static u32 dce6_dram_bandwidth_for_display(struct dce6_wm_params *wm)
1835{
1836 /* Calculate DRAM Bandwidth and the part allocated to display. */
1837 fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
1838 fixed20_12 yclk, dram_channels, bandwidth;
1839 fixed20_12 a;
1840
1841 a.full = dfixed_const(1000);
1842 yclk.full = dfixed_const(wm->yclk);
1843 yclk.full = dfixed_div(yclk, a);
1844 dram_channels.full = dfixed_const(wm->dram_channels * 4);
1845 a.full = dfixed_const(10);
1846 disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
1847 disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
1848 bandwidth.full = dfixed_mul(dram_channels, yclk);
1849 bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
1850
1851 return dfixed_trunc(bandwidth);
1852}
1853
1854static u32 dce6_data_return_bandwidth(struct dce6_wm_params *wm)
1855{
1856 /* Calculate the display Data return Bandwidth */
1857 fixed20_12 return_efficiency; /* 0.8 */
1858 fixed20_12 sclk, bandwidth;
1859 fixed20_12 a;
1860
1861 a.full = dfixed_const(1000);
1862 sclk.full = dfixed_const(wm->sclk);
1863 sclk.full = dfixed_div(sclk, a);
1864 a.full = dfixed_const(10);
1865 return_efficiency.full = dfixed_const(8);
1866 return_efficiency.full = dfixed_div(return_efficiency, a);
1867 a.full = dfixed_const(32);
1868 bandwidth.full = dfixed_mul(a, sclk);
1869 bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
1870
1871 return dfixed_trunc(bandwidth);
1872}
1873
1874static u32 dce6_get_dmif_bytes_per_request(struct dce6_wm_params *wm)
1875{
1876 return 32;
1877}
1878
1879static u32 dce6_dmif_request_bandwidth(struct dce6_wm_params *wm)
1880{
1881 /* Calculate the DMIF Request Bandwidth */
1882 fixed20_12 disp_clk_request_efficiency; /* 0.8 */
1883 fixed20_12 disp_clk, sclk, bandwidth;
1884 fixed20_12 a, b1, b2;
1885 u32 min_bandwidth;
1886
1887 a.full = dfixed_const(1000);
1888 disp_clk.full = dfixed_const(wm->disp_clk);
1889 disp_clk.full = dfixed_div(disp_clk, a);
1890 a.full = dfixed_const(dce6_get_dmif_bytes_per_request(wm) / 2);
1891 b1.full = dfixed_mul(a, disp_clk);
1892
1893 a.full = dfixed_const(1000);
1894 sclk.full = dfixed_const(wm->sclk);
1895 sclk.full = dfixed_div(sclk, a);
1896 a.full = dfixed_const(dce6_get_dmif_bytes_per_request(wm));
1897 b2.full = dfixed_mul(a, sclk);
1898
1899 a.full = dfixed_const(10);
1900 disp_clk_request_efficiency.full = dfixed_const(8);
1901 disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
1902
1903 min_bandwidth = min(dfixed_trunc(b1), dfixed_trunc(b2));
1904
1905 a.full = dfixed_const(min_bandwidth);
1906 bandwidth.full = dfixed_mul(a, disp_clk_request_efficiency);
1907
1908 return dfixed_trunc(bandwidth);
1909}
1910
1911static u32 dce6_available_bandwidth(struct dce6_wm_params *wm)
1912{
1913 /* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
1914 u32 dram_bandwidth = dce6_dram_bandwidth(wm);
1915 u32 data_return_bandwidth = dce6_data_return_bandwidth(wm);
1916 u32 dmif_req_bandwidth = dce6_dmif_request_bandwidth(wm);
1917
1918 return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
1919}
1920
1921static u32 dce6_average_bandwidth(struct dce6_wm_params *wm)
1922{
1923 /* Calculate the display mode Average Bandwidth
1924 * DisplayMode should contain the source and destination dimensions,
1925 * timing, etc.
1926 */
1927 fixed20_12 bpp;
1928 fixed20_12 line_time;
1929 fixed20_12 src_width;
1930 fixed20_12 bandwidth;
1931 fixed20_12 a;
1932
1933 a.full = dfixed_const(1000);
1934 line_time.full = dfixed_const(wm->active_time + wm->blank_time);
1935 line_time.full = dfixed_div(line_time, a);
1936 bpp.full = dfixed_const(wm->bytes_per_pixel);
1937 src_width.full = dfixed_const(wm->src_width);
1938 bandwidth.full = dfixed_mul(src_width, bpp);
1939 bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
1940 bandwidth.full = dfixed_div(bandwidth, line_time);
1941
1942 return dfixed_trunc(bandwidth);
1943}
1944
1945static u32 dce6_latency_watermark(struct dce6_wm_params *wm)
1946{
1947 /* First calcualte the latency in ns */
1948 u32 mc_latency = 2000; /* 2000 ns. */
1949 u32 available_bandwidth = dce6_available_bandwidth(wm);
1950 u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
1951 u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
1952 u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
1953 u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
1954 (wm->num_heads * cursor_line_pair_return_time);
1955 u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
1956 u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
1957 u32 tmp, dmif_size = 12288;
1958 fixed20_12 a, b, c;
1959
1960 if (wm->num_heads == 0)
1961 return 0;
1962
1963 a.full = dfixed_const(2);
1964 b.full = dfixed_const(1);
1965 if ((wm->vsc.full > a.full) ||
1966 ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
1967 (wm->vtaps >= 5) ||
1968 ((wm->vsc.full >= a.full) && wm->interlaced))
1969 max_src_lines_per_dst_line = 4;
1970 else
1971 max_src_lines_per_dst_line = 2;
1972
1973 a.full = dfixed_const(available_bandwidth);
1974 b.full = dfixed_const(wm->num_heads);
1975 a.full = dfixed_div(a, b);
1976
1977 b.full = dfixed_const(mc_latency + 512);
1978 c.full = dfixed_const(wm->disp_clk);
1979 b.full = dfixed_div(b, c);
1980
1981 c.full = dfixed_const(dmif_size);
1982 b.full = dfixed_div(c, b);
1983
1984 tmp = min(dfixed_trunc(a), dfixed_trunc(b));
1985
1986 b.full = dfixed_const(1000);
1987 c.full = dfixed_const(wm->disp_clk);
1988 b.full = dfixed_div(c, b);
1989 c.full = dfixed_const(wm->bytes_per_pixel);
1990 b.full = dfixed_mul(b, c);
1991
1992 lb_fill_bw = min(tmp, dfixed_trunc(b));
1993
1994 a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
1995 b.full = dfixed_const(1000);
1996 c.full = dfixed_const(lb_fill_bw);
1997 b.full = dfixed_div(c, b);
1998 a.full = dfixed_div(a, b);
1999 line_fill_time = dfixed_trunc(a);
2000
2001 if (line_fill_time < wm->active_time)
2002 return latency;
2003 else
2004 return latency + (line_fill_time - wm->active_time);
2005
2006}
2007
2008static bool dce6_average_bandwidth_vs_dram_bandwidth_for_display(struct dce6_wm_params *wm)
2009{
2010 if (dce6_average_bandwidth(wm) <=
2011 (dce6_dram_bandwidth_for_display(wm) / wm->num_heads))
2012 return true;
2013 else
2014 return false;
2015};
2016
2017static bool dce6_average_bandwidth_vs_available_bandwidth(struct dce6_wm_params *wm)
2018{
2019 if (dce6_average_bandwidth(wm) <=
2020 (dce6_available_bandwidth(wm) / wm->num_heads))
2021 return true;
2022 else
2023 return false;
2024};
2025
2026static bool dce6_check_latency_hiding(struct dce6_wm_params *wm)
2027{
2028 u32 lb_partitions = wm->lb_size / wm->src_width;
2029 u32 line_time = wm->active_time + wm->blank_time;
2030 u32 latency_tolerant_lines;
2031 u32 latency_hiding;
2032 fixed20_12 a;
2033
2034 a.full = dfixed_const(1);
2035 if (wm->vsc.full > a.full)
2036 latency_tolerant_lines = 1;
2037 else {
2038 if (lb_partitions <= (wm->vtaps + 1))
2039 latency_tolerant_lines = 1;
2040 else
2041 latency_tolerant_lines = 2;
2042 }
2043
2044 latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2045
2046 if (dce6_latency_watermark(wm) <= latency_hiding)
2047 return true;
2048 else
2049 return false;
2050}
2051
2052static void dce6_program_watermarks(struct radeon_device *rdev,
2053 struct radeon_crtc *radeon_crtc,
2054 u32 lb_size, u32 num_heads)
2055{
2056 struct drm_display_mode *mode = &radeon_crtc->base.mode;
c696e53f
AD
2057 struct dce6_wm_params wm_low, wm_high;
2058 u32 dram_channels;
43b3cd99
AD
2059 u32 pixel_period;
2060 u32 line_time = 0;
2061 u32 latency_watermark_a = 0, latency_watermark_b = 0;
2062 u32 priority_a_mark = 0, priority_b_mark = 0;
2063 u32 priority_a_cnt = PRIORITY_OFF;
2064 u32 priority_b_cnt = PRIORITY_OFF;
2065 u32 tmp, arb_control3;
2066 fixed20_12 a, b, c;
2067
2068 if (radeon_crtc->base.enabled && num_heads && mode) {
2069 pixel_period = 1000000 / (u32)mode->clock;
2070 line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2071 priority_a_cnt = 0;
2072 priority_b_cnt = 0;
2073
ca7db22b 2074 if (rdev->family == CHIP_ARUBA)
c696e53f 2075 dram_channels = evergreen_get_number_of_dram_channels(rdev);
ca7db22b 2076 else
c696e53f
AD
2077 dram_channels = si_get_number_of_dram_channels(rdev);
2078
2079 /* watermark for high clocks */
2080 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2081 wm_high.yclk =
2082 radeon_dpm_get_mclk(rdev, false) * 10;
2083 wm_high.sclk =
2084 radeon_dpm_get_sclk(rdev, false) * 10;
2085 } else {
2086 wm_high.yclk = rdev->pm.current_mclk * 10;
2087 wm_high.sclk = rdev->pm.current_sclk * 10;
2088 }
2089
2090 wm_high.disp_clk = mode->clock;
2091 wm_high.src_width = mode->crtc_hdisplay;
2092 wm_high.active_time = mode->crtc_hdisplay * pixel_period;
2093 wm_high.blank_time = line_time - wm_high.active_time;
2094 wm_high.interlaced = false;
2095 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2096 wm_high.interlaced = true;
2097 wm_high.vsc = radeon_crtc->vsc;
2098 wm_high.vtaps = 1;
2099 if (radeon_crtc->rmx_type != RMX_OFF)
2100 wm_high.vtaps = 2;
2101 wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2102 wm_high.lb_size = lb_size;
2103 wm_high.dram_channels = dram_channels;
2104 wm_high.num_heads = num_heads;
2105
2106 /* watermark for low clocks */
2107 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2108 wm_low.yclk =
2109 radeon_dpm_get_mclk(rdev, true) * 10;
2110 wm_low.sclk =
2111 radeon_dpm_get_sclk(rdev, true) * 10;
2112 } else {
2113 wm_low.yclk = rdev->pm.current_mclk * 10;
2114 wm_low.sclk = rdev->pm.current_sclk * 10;
2115 }
2116
2117 wm_low.disp_clk = mode->clock;
2118 wm_low.src_width = mode->crtc_hdisplay;
2119 wm_low.active_time = mode->crtc_hdisplay * pixel_period;
2120 wm_low.blank_time = line_time - wm_low.active_time;
2121 wm_low.interlaced = false;
2122 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2123 wm_low.interlaced = true;
2124 wm_low.vsc = radeon_crtc->vsc;
2125 wm_low.vtaps = 1;
2126 if (radeon_crtc->rmx_type != RMX_OFF)
2127 wm_low.vtaps = 2;
2128 wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2129 wm_low.lb_size = lb_size;
2130 wm_low.dram_channels = dram_channels;
2131 wm_low.num_heads = num_heads;
43b3cd99
AD
2132
2133 /* set for high clocks */
c696e53f 2134 latency_watermark_a = min(dce6_latency_watermark(&wm_high), (u32)65535);
43b3cd99 2135 /* set for low clocks */
c696e53f 2136 latency_watermark_b = min(dce6_latency_watermark(&wm_low), (u32)65535);
43b3cd99
AD
2137
2138 /* possibly force display priority to high */
2139 /* should really do this at mode validation time... */
c696e53f
AD
2140 if (!dce6_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2141 !dce6_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2142 !dce6_check_latency_hiding(&wm_high) ||
2143 (rdev->disp_priority == 2)) {
2144 DRM_DEBUG_KMS("force priority to high\n");
2145 priority_a_cnt |= PRIORITY_ALWAYS_ON;
2146 priority_b_cnt |= PRIORITY_ALWAYS_ON;
2147 }
2148 if (!dce6_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2149 !dce6_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2150 !dce6_check_latency_hiding(&wm_low) ||
43b3cd99
AD
2151 (rdev->disp_priority == 2)) {
2152 DRM_DEBUG_KMS("force priority to high\n");
2153 priority_a_cnt |= PRIORITY_ALWAYS_ON;
2154 priority_b_cnt |= PRIORITY_ALWAYS_ON;
2155 }
2156
2157 a.full = dfixed_const(1000);
2158 b.full = dfixed_const(mode->clock);
2159 b.full = dfixed_div(b, a);
2160 c.full = dfixed_const(latency_watermark_a);
2161 c.full = dfixed_mul(c, b);
2162 c.full = dfixed_mul(c, radeon_crtc->hsc);
2163 c.full = dfixed_div(c, a);
2164 a.full = dfixed_const(16);
2165 c.full = dfixed_div(c, a);
2166 priority_a_mark = dfixed_trunc(c);
2167 priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2168
2169 a.full = dfixed_const(1000);
2170 b.full = dfixed_const(mode->clock);
2171 b.full = dfixed_div(b, a);
2172 c.full = dfixed_const(latency_watermark_b);
2173 c.full = dfixed_mul(c, b);
2174 c.full = dfixed_mul(c, radeon_crtc->hsc);
2175 c.full = dfixed_div(c, a);
2176 a.full = dfixed_const(16);
2177 c.full = dfixed_div(c, a);
2178 priority_b_mark = dfixed_trunc(c);
2179 priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2180 }
2181
2182 /* select wm A */
2183 arb_control3 = RREG32(DPG_PIPE_ARBITRATION_CONTROL3 + radeon_crtc->crtc_offset);
2184 tmp = arb_control3;
2185 tmp &= ~LATENCY_WATERMARK_MASK(3);
2186 tmp |= LATENCY_WATERMARK_MASK(1);
2187 WREG32(DPG_PIPE_ARBITRATION_CONTROL3 + radeon_crtc->crtc_offset, tmp);
2188 WREG32(DPG_PIPE_LATENCY_CONTROL + radeon_crtc->crtc_offset,
2189 (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2190 LATENCY_HIGH_WATERMARK(line_time)));
2191 /* select wm B */
2192 tmp = RREG32(DPG_PIPE_ARBITRATION_CONTROL3 + radeon_crtc->crtc_offset);
2193 tmp &= ~LATENCY_WATERMARK_MASK(3);
2194 tmp |= LATENCY_WATERMARK_MASK(2);
2195 WREG32(DPG_PIPE_ARBITRATION_CONTROL3 + radeon_crtc->crtc_offset, tmp);
2196 WREG32(DPG_PIPE_LATENCY_CONTROL + radeon_crtc->crtc_offset,
2197 (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2198 LATENCY_HIGH_WATERMARK(line_time)));
2199 /* restore original selection */
2200 WREG32(DPG_PIPE_ARBITRATION_CONTROL3 + radeon_crtc->crtc_offset, arb_control3);
2201
2202 /* write the priority marks */
2203 WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2204 WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2205
7178d2a6
AD
2206 /* save values for DPM */
2207 radeon_crtc->line_time = line_time;
2208 radeon_crtc->wm_high = latency_watermark_a;
2209 radeon_crtc->wm_low = latency_watermark_b;
43b3cd99
AD
2210}
2211
2212void dce6_bandwidth_update(struct radeon_device *rdev)
2213{
2214 struct drm_display_mode *mode0 = NULL;
2215 struct drm_display_mode *mode1 = NULL;
2216 u32 num_heads = 0, lb_size;
2217 int i;
2218
2219 radeon_update_display_priority(rdev);
2220
2221 for (i = 0; i < rdev->num_crtc; i++) {
2222 if (rdev->mode_info.crtcs[i]->base.enabled)
2223 num_heads++;
2224 }
2225 for (i = 0; i < rdev->num_crtc; i += 2) {
2226 mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2227 mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2228 lb_size = dce6_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2229 dce6_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2230 lb_size = dce6_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2231 dce6_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2232 }
2233}
2234
0a96d72b
AD
2235/*
2236 * Core functions
2237 */
0a96d72b
AD
2238static void si_tiling_mode_table_init(struct radeon_device *rdev)
2239{
2240 const u32 num_tile_mode_states = 32;
2241 u32 reg_offset, gb_tile_moden, split_equal_to_row_size;
2242
2243 switch (rdev->config.si.mem_row_size_in_kb) {
2244 case 1:
2245 split_equal_to_row_size = ADDR_SURF_TILE_SPLIT_1KB;
2246 break;
2247 case 2:
2248 default:
2249 split_equal_to_row_size = ADDR_SURF_TILE_SPLIT_2KB;
2250 break;
2251 case 4:
2252 split_equal_to_row_size = ADDR_SURF_TILE_SPLIT_4KB;
2253 break;
2254 }
2255
2256 if ((rdev->family == CHIP_TAHITI) ||
2257 (rdev->family == CHIP_PITCAIRN)) {
2258 for (reg_offset = 0; reg_offset < num_tile_mode_states; reg_offset++) {
2259 switch (reg_offset) {
2260 case 0: /* non-AA compressed depth or any compressed stencil */
2261 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2262 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2263 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2264 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2265 NUM_BANKS(ADDR_SURF_16_BANK) |
2266 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2267 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2268 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2269 break;
2270 case 1: /* 2xAA/4xAA compressed depth only */
2271 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2272 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2273 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2274 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_128B) |
2275 NUM_BANKS(ADDR_SURF_16_BANK) |
2276 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2277 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2278 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2279 break;
2280 case 2: /* 8xAA compressed depth only */
2281 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2282 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2283 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2284 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2285 NUM_BANKS(ADDR_SURF_16_BANK) |
2286 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2287 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2288 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2289 break;
2290 case 3: /* 2xAA/4xAA compressed depth with stencil (for depth buffer) */
2291 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2292 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2293 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2294 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_128B) |
2295 NUM_BANKS(ADDR_SURF_16_BANK) |
2296 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2297 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2298 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2299 break;
2300 case 4: /* Maps w/ a dimension less than the 2D macro-tile dimensions (for mipmapped depth textures) */
2301 gb_tile_moden = (ARRAY_MODE(ARRAY_1D_TILED_THIN1) |
2302 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2303 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2304 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2305 NUM_BANKS(ADDR_SURF_16_BANK) |
2306 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2307 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2308 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2309 break;
2310 case 5: /* Uncompressed 16bpp depth - and stencil buffer allocated with it */
2311 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2312 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2313 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2314 TILE_SPLIT(split_equal_to_row_size) |
2315 NUM_BANKS(ADDR_SURF_16_BANK) |
2316 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2317 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2318 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2319 break;
2320 case 6: /* Uncompressed 32bpp depth - and stencil buffer allocated with it */
2321 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2322 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2323 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2324 TILE_SPLIT(split_equal_to_row_size) |
2325 NUM_BANKS(ADDR_SURF_16_BANK) |
2326 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2327 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2328 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_1));
2329 break;
2330 case 7: /* Uncompressed 8bpp stencil without depth (drivers typically do not use) */
2331 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2332 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2333 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2334 TILE_SPLIT(split_equal_to_row_size) |
2335 NUM_BANKS(ADDR_SURF_16_BANK) |
2336 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2337 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2338 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2339 break;
2340 case 8: /* 1D and 1D Array Surfaces */
2341 gb_tile_moden = (ARRAY_MODE(ARRAY_LINEAR_ALIGNED) |
2342 MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2343 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2344 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2345 NUM_BANKS(ADDR_SURF_16_BANK) |
2346 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2347 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2348 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2349 break;
2350 case 9: /* Displayable maps. */
2351 gb_tile_moden = (ARRAY_MODE(ARRAY_1D_TILED_THIN1) |
2352 MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2353 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2354 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2355 NUM_BANKS(ADDR_SURF_16_BANK) |
2356 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2357 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2358 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2359 break;
2360 case 10: /* Display 8bpp. */
2361 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2362 MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2363 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2364 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2365 NUM_BANKS(ADDR_SURF_16_BANK) |
2366 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2367 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2368 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2369 break;
2370 case 11: /* Display 16bpp. */
2371 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2372 MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2373 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2374 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2375 NUM_BANKS(ADDR_SURF_16_BANK) |
2376 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2377 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2378 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2379 break;
2380 case 12: /* Display 32bpp. */
2381 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2382 MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2383 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2384 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_512B) |
2385 NUM_BANKS(ADDR_SURF_16_BANK) |
2386 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2387 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2388 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_1));
2389 break;
2390 case 13: /* Thin. */
2391 gb_tile_moden = (ARRAY_MODE(ARRAY_1D_TILED_THIN1) |
2392 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2393 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2394 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2395 NUM_BANKS(ADDR_SURF_16_BANK) |
2396 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2397 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2398 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2399 break;
2400 case 14: /* Thin 8 bpp. */
2401 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2402 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2403 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2404 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2405 NUM_BANKS(ADDR_SURF_16_BANK) |
2406 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2407 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2408 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_1));
2409 break;
2410 case 15: /* Thin 16 bpp. */
2411 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2412 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2413 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2414 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2415 NUM_BANKS(ADDR_SURF_16_BANK) |
2416 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2417 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2418 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_1));
2419 break;
2420 case 16: /* Thin 32 bpp. */
2421 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2422 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2423 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2424 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_512B) |
2425 NUM_BANKS(ADDR_SURF_16_BANK) |
2426 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2427 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2428 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_1));
2429 break;
2430 case 17: /* Thin 64 bpp. */
2431 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2432 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2433 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2434 TILE_SPLIT(split_equal_to_row_size) |
2435 NUM_BANKS(ADDR_SURF_16_BANK) |
2436 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2437 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2438 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_1));
2439 break;
2440 case 21: /* 8 bpp PRT. */
2441 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2442 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2443 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2444 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2445 NUM_BANKS(ADDR_SURF_16_BANK) |
2446 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_2) |
2447 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2448 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2449 break;
2450 case 22: /* 16 bpp PRT */
2451 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2452 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2453 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2454 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2455 NUM_BANKS(ADDR_SURF_16_BANK) |
2456 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2457 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2458 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_4));
2459 break;
2460 case 23: /* 32 bpp PRT */
2461 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2462 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2463 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2464 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2465 NUM_BANKS(ADDR_SURF_16_BANK) |
2466 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2467 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2468 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2469 break;
2470 case 24: /* 64 bpp PRT */
2471 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2472 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2473 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2474 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_512B) |
2475 NUM_BANKS(ADDR_SURF_16_BANK) |
2476 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2477 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2478 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2479 break;
2480 case 25: /* 128 bpp PRT */
2481 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2482 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2483 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2484 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_1KB) |
2485 NUM_BANKS(ADDR_SURF_8_BANK) |
2486 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2487 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2488 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_1));
2489 break;
2490 default:
2491 gb_tile_moden = 0;
2492 break;
2493 }
64d7b8be 2494 rdev->config.si.tile_mode_array[reg_offset] = gb_tile_moden;
0a96d72b
AD
2495 WREG32(GB_TILE_MODE0 + (reg_offset * 4), gb_tile_moden);
2496 }
d0ae7fcc 2497 } else if ((rdev->family == CHIP_VERDE) ||
8b02859d
AD
2498 (rdev->family == CHIP_OLAND) ||
2499 (rdev->family == CHIP_HAINAN)) {
0a96d72b
AD
2500 for (reg_offset = 0; reg_offset < num_tile_mode_states; reg_offset++) {
2501 switch (reg_offset) {
2502 case 0: /* non-AA compressed depth or any compressed stencil */
2503 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2504 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2505 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2506 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2507 NUM_BANKS(ADDR_SURF_16_BANK) |
2508 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2509 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2510 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_4));
2511 break;
2512 case 1: /* 2xAA/4xAA compressed depth only */
2513 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2514 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2515 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2516 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_128B) |
2517 NUM_BANKS(ADDR_SURF_16_BANK) |
2518 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2519 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2520 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_4));
2521 break;
2522 case 2: /* 8xAA compressed depth only */
2523 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2524 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2525 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2526 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2527 NUM_BANKS(ADDR_SURF_16_BANK) |
2528 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2529 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2530 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_4));
2531 break;
2532 case 3: /* 2xAA/4xAA compressed depth with stencil (for depth buffer) */
2533 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2534 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2535 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2536 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_128B) |
2537 NUM_BANKS(ADDR_SURF_16_BANK) |
2538 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2539 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2540 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_4));
2541 break;
2542 case 4: /* Maps w/ a dimension less than the 2D macro-tile dimensions (for mipmapped depth textures) */
2543 gb_tile_moden = (ARRAY_MODE(ARRAY_1D_TILED_THIN1) |
2544 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2545 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2546 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2547 NUM_BANKS(ADDR_SURF_16_BANK) |
2548 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2549 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2550 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2551 break;
2552 case 5: /* Uncompressed 16bpp depth - and stencil buffer allocated with it */
2553 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2554 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2555 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2556 TILE_SPLIT(split_equal_to_row_size) |
2557 NUM_BANKS(ADDR_SURF_16_BANK) |
2558 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2559 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2560 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2561 break;
2562 case 6: /* Uncompressed 32bpp depth - and stencil buffer allocated with it */
2563 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2564 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2565 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2566 TILE_SPLIT(split_equal_to_row_size) |
2567 NUM_BANKS(ADDR_SURF_16_BANK) |
2568 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2569 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2570 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2571 break;
2572 case 7: /* Uncompressed 8bpp stencil without depth (drivers typically do not use) */
2573 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2574 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2575 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2576 TILE_SPLIT(split_equal_to_row_size) |
2577 NUM_BANKS(ADDR_SURF_16_BANK) |
2578 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2579 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2580 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_4));
2581 break;
2582 case 8: /* 1D and 1D Array Surfaces */
2583 gb_tile_moden = (ARRAY_MODE(ARRAY_LINEAR_ALIGNED) |
2584 MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2585 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2586 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2587 NUM_BANKS(ADDR_SURF_16_BANK) |
2588 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2589 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2590 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2591 break;
2592 case 9: /* Displayable maps. */
2593 gb_tile_moden = (ARRAY_MODE(ARRAY_1D_TILED_THIN1) |
2594 MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2595 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2596 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2597 NUM_BANKS(ADDR_SURF_16_BANK) |
2598 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2599 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2600 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2601 break;
2602 case 10: /* Display 8bpp. */
2603 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2604 MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2605 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2606 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2607 NUM_BANKS(ADDR_SURF_16_BANK) |
2608 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2609 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2610 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_4));
2611 break;
2612 case 11: /* Display 16bpp. */
2613 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2614 MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2615 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2616 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2617 NUM_BANKS(ADDR_SURF_16_BANK) |
2618 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2619 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2620 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2621 break;
2622 case 12: /* Display 32bpp. */
2623 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2624 MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2625 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2626 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_512B) |
2627 NUM_BANKS(ADDR_SURF_16_BANK) |
2628 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2629 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2630 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2631 break;
2632 case 13: /* Thin. */
2633 gb_tile_moden = (ARRAY_MODE(ARRAY_1D_TILED_THIN1) |
2634 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2635 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2636 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2637 NUM_BANKS(ADDR_SURF_16_BANK) |
2638 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2639 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2640 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2641 break;
2642 case 14: /* Thin 8 bpp. */
2643 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2644 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2645 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2646 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2647 NUM_BANKS(ADDR_SURF_16_BANK) |
2648 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2649 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2650 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2651 break;
2652 case 15: /* Thin 16 bpp. */
2653 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2654 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2655 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2656 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2657 NUM_BANKS(ADDR_SURF_16_BANK) |
2658 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2659 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2660 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2661 break;
2662 case 16: /* Thin 32 bpp. */
2663 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2664 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2665 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2666 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_512B) |
2667 NUM_BANKS(ADDR_SURF_16_BANK) |
2668 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2669 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2670 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2671 break;
2672 case 17: /* Thin 64 bpp. */
2673 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2674 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2675 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2676 TILE_SPLIT(split_equal_to_row_size) |
2677 NUM_BANKS(ADDR_SURF_16_BANK) |
2678 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2679 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2680 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2681 break;
2682 case 21: /* 8 bpp PRT. */
2683 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2684 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2685 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2686 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2687 NUM_BANKS(ADDR_SURF_16_BANK) |
2688 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_2) |
2689 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2690 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2691 break;
2692 case 22: /* 16 bpp PRT */
2693 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2694 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2695 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2696 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2697 NUM_BANKS(ADDR_SURF_16_BANK) |
2698 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2699 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2700 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_4));
2701 break;
2702 case 23: /* 32 bpp PRT */
2703 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2704 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2705 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2706 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2707 NUM_BANKS(ADDR_SURF_16_BANK) |
2708 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2709 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2710 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2711 break;
2712 case 24: /* 64 bpp PRT */
2713 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2714 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2715 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2716 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_512B) |
2717 NUM_BANKS(ADDR_SURF_16_BANK) |
2718 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2719 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2720 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2721 break;
2722 case 25: /* 128 bpp PRT */
2723 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2724 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2725 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2726 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_1KB) |
2727 NUM_BANKS(ADDR_SURF_8_BANK) |
2728 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2729 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2730 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_1));
2731 break;
2732 default:
2733 gb_tile_moden = 0;
2734 break;
2735 }
64d7b8be 2736 rdev->config.si.tile_mode_array[reg_offset] = gb_tile_moden;
0a96d72b
AD
2737 WREG32(GB_TILE_MODE0 + (reg_offset * 4), gb_tile_moden);
2738 }
2739 } else
2740 DRM_ERROR("unknown asic: 0x%x\n", rdev->family);
2741}
2742
1a8ca750
AD
2743static void si_select_se_sh(struct radeon_device *rdev,
2744 u32 se_num, u32 sh_num)
2745{
2746 u32 data = INSTANCE_BROADCAST_WRITES;
2747
2748 if ((se_num == 0xffffffff) && (sh_num == 0xffffffff))
79b52d6a 2749 data |= SH_BROADCAST_WRITES | SE_BROADCAST_WRITES;
1a8ca750
AD
2750 else if (se_num == 0xffffffff)
2751 data |= SE_BROADCAST_WRITES | SH_INDEX(sh_num);
2752 else if (sh_num == 0xffffffff)
2753 data |= SH_BROADCAST_WRITES | SE_INDEX(se_num);
2754 else
2755 data |= SH_INDEX(sh_num) | SE_INDEX(se_num);
2756 WREG32(GRBM_GFX_INDEX, data);
2757}
2758
2759static u32 si_create_bitmask(u32 bit_width)
2760{
2761 u32 i, mask = 0;
2762
2763 for (i = 0; i < bit_width; i++) {
2764 mask <<= 1;
2765 mask |= 1;
2766 }
2767 return mask;
2768}
2769
2770static u32 si_get_cu_enabled(struct radeon_device *rdev, u32 cu_per_sh)
2771{
2772 u32 data, mask;
2773
2774 data = RREG32(CC_GC_SHADER_ARRAY_CONFIG);
2775 if (data & 1)
2776 data &= INACTIVE_CUS_MASK;
2777 else
2778 data = 0;
2779 data |= RREG32(GC_USER_SHADER_ARRAY_CONFIG);
2780
2781 data >>= INACTIVE_CUS_SHIFT;
2782
2783 mask = si_create_bitmask(cu_per_sh);
2784
2785 return ~data & mask;
2786}
2787
2788static void si_setup_spi(struct radeon_device *rdev,
2789 u32 se_num, u32 sh_per_se,
2790 u32 cu_per_sh)
2791{
2792 int i, j, k;
2793 u32 data, mask, active_cu;
2794
2795 for (i = 0; i < se_num; i++) {
2796 for (j = 0; j < sh_per_se; j++) {
2797 si_select_se_sh(rdev, i, j);
2798 data = RREG32(SPI_STATIC_THREAD_MGMT_3);
2799 active_cu = si_get_cu_enabled(rdev, cu_per_sh);
2800
2801 mask = 1;
2802 for (k = 0; k < 16; k++) {
2803 mask <<= k;
2804 if (active_cu & mask) {
2805 data &= ~mask;
2806 WREG32(SPI_STATIC_THREAD_MGMT_3, data);
2807 break;
2808 }
2809 }
2810 }
2811 }
2812 si_select_se_sh(rdev, 0xffffffff, 0xffffffff);
2813}
2814
2815static u32 si_get_rb_disabled(struct radeon_device *rdev,
9fadb352 2816 u32 max_rb_num_per_se,
1a8ca750
AD
2817 u32 sh_per_se)
2818{
2819 u32 data, mask;
2820
2821 data = RREG32(CC_RB_BACKEND_DISABLE);
2822 if (data & 1)
2823 data &= BACKEND_DISABLE_MASK;
2824 else
2825 data = 0;
2826 data |= RREG32(GC_USER_RB_BACKEND_DISABLE);
2827
2828 data >>= BACKEND_DISABLE_SHIFT;
2829
9fadb352 2830 mask = si_create_bitmask(max_rb_num_per_se / sh_per_se);
1a8ca750
AD
2831
2832 return data & mask;
2833}
2834
2835static void si_setup_rb(struct radeon_device *rdev,
2836 u32 se_num, u32 sh_per_se,
9fadb352 2837 u32 max_rb_num_per_se)
1a8ca750
AD
2838{
2839 int i, j;
2840 u32 data, mask;
2841 u32 disabled_rbs = 0;
2842 u32 enabled_rbs = 0;
2843
2844 for (i = 0; i < se_num; i++) {
2845 for (j = 0; j < sh_per_se; j++) {
2846 si_select_se_sh(rdev, i, j);
9fadb352 2847 data = si_get_rb_disabled(rdev, max_rb_num_per_se, sh_per_se);
1a8ca750
AD
2848 disabled_rbs |= data << ((i * sh_per_se + j) * TAHITI_RB_BITMAP_WIDTH_PER_SH);
2849 }
2850 }
2851 si_select_se_sh(rdev, 0xffffffff, 0xffffffff);
2852
2853 mask = 1;
9fadb352 2854 for (i = 0; i < max_rb_num_per_se * se_num; i++) {
1a8ca750
AD
2855 if (!(disabled_rbs & mask))
2856 enabled_rbs |= mask;
2857 mask <<= 1;
2858 }
2859
439a1cff
MO
2860 rdev->config.si.backend_enable_mask = enabled_rbs;
2861
1a8ca750
AD
2862 for (i = 0; i < se_num; i++) {
2863 si_select_se_sh(rdev, i, 0xffffffff);
2864 data = 0;
2865 for (j = 0; j < sh_per_se; j++) {
2866 switch (enabled_rbs & 3) {
2867 case 1:
2868 data |= (RASTER_CONFIG_RB_MAP_0 << (i * sh_per_se + j) * 2);
2869 break;
2870 case 2:
2871 data |= (RASTER_CONFIG_RB_MAP_3 << (i * sh_per_se + j) * 2);
2872 break;
2873 case 3:
2874 default:
2875 data |= (RASTER_CONFIG_RB_MAP_2 << (i * sh_per_se + j) * 2);
2876 break;
2877 }
2878 enabled_rbs >>= 2;
2879 }
2880 WREG32(PA_SC_RASTER_CONFIG, data);
2881 }
2882 si_select_se_sh(rdev, 0xffffffff, 0xffffffff);
2883}
2884
0a96d72b
AD
2885static void si_gpu_init(struct radeon_device *rdev)
2886{
0a96d72b
AD
2887 u32 gb_addr_config = 0;
2888 u32 mc_shared_chmap, mc_arb_ramcfg;
0a96d72b 2889 u32 sx_debug_1;
0a96d72b
AD
2890 u32 hdp_host_path_cntl;
2891 u32 tmp;
2892 int i, j;
2893
2894 switch (rdev->family) {
2895 case CHIP_TAHITI:
2896 rdev->config.si.max_shader_engines = 2;
0a96d72b 2897 rdev->config.si.max_tile_pipes = 12;
1a8ca750
AD
2898 rdev->config.si.max_cu_per_sh = 8;
2899 rdev->config.si.max_sh_per_se = 2;
0a96d72b
AD
2900 rdev->config.si.max_backends_per_se = 4;
2901 rdev->config.si.max_texture_channel_caches = 12;
2902 rdev->config.si.max_gprs = 256;
2903 rdev->config.si.max_gs_threads = 32;
2904 rdev->config.si.max_hw_contexts = 8;
2905
2906 rdev->config.si.sc_prim_fifo_size_frontend = 0x20;
2907 rdev->config.si.sc_prim_fifo_size_backend = 0x100;
2908 rdev->config.si.sc_hiz_tile_fifo_size = 0x30;
2909 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130;
1a8ca750 2910 gb_addr_config = TAHITI_GB_ADDR_CONFIG_GOLDEN;
0a96d72b
AD
2911 break;
2912 case CHIP_PITCAIRN:
2913 rdev->config.si.max_shader_engines = 2;
0a96d72b 2914 rdev->config.si.max_tile_pipes = 8;
1a8ca750
AD
2915 rdev->config.si.max_cu_per_sh = 5;
2916 rdev->config.si.max_sh_per_se = 2;
0a96d72b
AD
2917 rdev->config.si.max_backends_per_se = 4;
2918 rdev->config.si.max_texture_channel_caches = 8;
2919 rdev->config.si.max_gprs = 256;
2920 rdev->config.si.max_gs_threads = 32;
2921 rdev->config.si.max_hw_contexts = 8;
2922
2923 rdev->config.si.sc_prim_fifo_size_frontend = 0x20;
2924 rdev->config.si.sc_prim_fifo_size_backend = 0x100;
2925 rdev->config.si.sc_hiz_tile_fifo_size = 0x30;
2926 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130;
1a8ca750 2927 gb_addr_config = TAHITI_GB_ADDR_CONFIG_GOLDEN;
0a96d72b
AD
2928 break;
2929 case CHIP_VERDE:
2930 default:
2931 rdev->config.si.max_shader_engines = 1;
0a96d72b 2932 rdev->config.si.max_tile_pipes = 4;
468ef1a5 2933 rdev->config.si.max_cu_per_sh = 5;
1a8ca750 2934 rdev->config.si.max_sh_per_se = 2;
0a96d72b
AD
2935 rdev->config.si.max_backends_per_se = 4;
2936 rdev->config.si.max_texture_channel_caches = 4;
2937 rdev->config.si.max_gprs = 256;
2938 rdev->config.si.max_gs_threads = 32;
2939 rdev->config.si.max_hw_contexts = 8;
2940
d0ae7fcc
AD
2941 rdev->config.si.sc_prim_fifo_size_frontend = 0x20;
2942 rdev->config.si.sc_prim_fifo_size_backend = 0x40;
2943 rdev->config.si.sc_hiz_tile_fifo_size = 0x30;
2944 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130;
2945 gb_addr_config = VERDE_GB_ADDR_CONFIG_GOLDEN;
2946 break;
2947 case CHIP_OLAND:
2948 rdev->config.si.max_shader_engines = 1;
2949 rdev->config.si.max_tile_pipes = 4;
2950 rdev->config.si.max_cu_per_sh = 6;
2951 rdev->config.si.max_sh_per_se = 1;
2952 rdev->config.si.max_backends_per_se = 2;
2953 rdev->config.si.max_texture_channel_caches = 4;
2954 rdev->config.si.max_gprs = 256;
2955 rdev->config.si.max_gs_threads = 16;
2956 rdev->config.si.max_hw_contexts = 8;
2957
0a96d72b
AD
2958 rdev->config.si.sc_prim_fifo_size_frontend = 0x20;
2959 rdev->config.si.sc_prim_fifo_size_backend = 0x40;
2960 rdev->config.si.sc_hiz_tile_fifo_size = 0x30;
2961 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130;
1a8ca750 2962 gb_addr_config = VERDE_GB_ADDR_CONFIG_GOLDEN;
0a96d72b 2963 break;
8b02859d
AD
2964 case CHIP_HAINAN:
2965 rdev->config.si.max_shader_engines = 1;
2966 rdev->config.si.max_tile_pipes = 4;
2967 rdev->config.si.max_cu_per_sh = 5;
2968 rdev->config.si.max_sh_per_se = 1;
2969 rdev->config.si.max_backends_per_se = 1;
2970 rdev->config.si.max_texture_channel_caches = 2;
2971 rdev->config.si.max_gprs = 256;
2972 rdev->config.si.max_gs_threads = 16;
2973 rdev->config.si.max_hw_contexts = 8;
2974
2975 rdev->config.si.sc_prim_fifo_size_frontend = 0x20;
2976 rdev->config.si.sc_prim_fifo_size_backend = 0x40;
2977 rdev->config.si.sc_hiz_tile_fifo_size = 0x30;
2978 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130;
2979 gb_addr_config = HAINAN_GB_ADDR_CONFIG_GOLDEN;
2980 break;
0a96d72b
AD
2981 }
2982
2983 /* Initialize HDP */
2984 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2985 WREG32((0x2c14 + j), 0x00000000);
2986 WREG32((0x2c18 + j), 0x00000000);
2987 WREG32((0x2c1c + j), 0x00000000);
2988 WREG32((0x2c20 + j), 0x00000000);
2989 WREG32((0x2c24 + j), 0x00000000);
2990 }
2991
2992 WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
2993
2994 evergreen_fix_pci_max_read_req_size(rdev);
2995
2996 WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2997
2998 mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
2999 mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3000
0a96d72b 3001 rdev->config.si.num_tile_pipes = rdev->config.si.max_tile_pipes;
0a96d72b
AD
3002 rdev->config.si.mem_max_burst_length_bytes = 256;
3003 tmp = (mc_arb_ramcfg & NOOFCOLS_MASK) >> NOOFCOLS_SHIFT;
3004 rdev->config.si.mem_row_size_in_kb = (4 * (1 << (8 + tmp))) / 1024;
3005 if (rdev->config.si.mem_row_size_in_kb > 4)
3006 rdev->config.si.mem_row_size_in_kb = 4;
3007 /* XXX use MC settings? */
3008 rdev->config.si.shader_engine_tile_size = 32;
3009 rdev->config.si.num_gpus = 1;
3010 rdev->config.si.multi_gpu_tile_size = 64;
3011
1a8ca750
AD
3012 /* fix up row size */
3013 gb_addr_config &= ~ROW_SIZE_MASK;
0a96d72b
AD
3014 switch (rdev->config.si.mem_row_size_in_kb) {
3015 case 1:
3016 default:
3017 gb_addr_config |= ROW_SIZE(0);
3018 break;
3019 case 2:
3020 gb_addr_config |= ROW_SIZE(1);
3021 break;
3022 case 4:
3023 gb_addr_config |= ROW_SIZE(2);
3024 break;
3025 }
3026
0a96d72b
AD
3027 /* setup tiling info dword. gb_addr_config is not adequate since it does
3028 * not have bank info, so create a custom tiling dword.
3029 * bits 3:0 num_pipes
3030 * bits 7:4 num_banks
3031 * bits 11:8 group_size
3032 * bits 15:12 row_size
3033 */
3034 rdev->config.si.tile_config = 0;
3035 switch (rdev->config.si.num_tile_pipes) {
3036 case 1:
3037 rdev->config.si.tile_config |= (0 << 0);
3038 break;
3039 case 2:
3040 rdev->config.si.tile_config |= (1 << 0);
3041 break;
3042 case 4:
3043 rdev->config.si.tile_config |= (2 << 0);
3044 break;
3045 case 8:
3046 default:
3047 /* XXX what about 12? */
3048 rdev->config.si.tile_config |= (3 << 0);
3049 break;
dca571a6
CK
3050 }
3051 switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3052 case 0: /* four banks */
1a8ca750 3053 rdev->config.si.tile_config |= 0 << 4;
dca571a6
CK
3054 break;
3055 case 1: /* eight banks */
3056 rdev->config.si.tile_config |= 1 << 4;
3057 break;
3058 case 2: /* sixteen banks */
3059 default:
3060 rdev->config.si.tile_config |= 2 << 4;
3061 break;
3062 }
0a96d72b
AD
3063 rdev->config.si.tile_config |=
3064 ((gb_addr_config & PIPE_INTERLEAVE_SIZE_MASK) >> PIPE_INTERLEAVE_SIZE_SHIFT) << 8;
3065 rdev->config.si.tile_config |=
3066 ((gb_addr_config & ROW_SIZE_MASK) >> ROW_SIZE_SHIFT) << 12;
3067
0a96d72b
AD
3068 WREG32(GB_ADDR_CONFIG, gb_addr_config);
3069 WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
7c1c7c18 3070 WREG32(DMIF_ADDR_CALC, gb_addr_config);
0a96d72b 3071 WREG32(HDP_ADDR_CONFIG, gb_addr_config);
8c5fd7ef
AD
3072 WREG32(DMA_TILING_CONFIG + DMA0_REGISTER_OFFSET, gb_addr_config);
3073 WREG32(DMA_TILING_CONFIG + DMA1_REGISTER_OFFSET, gb_addr_config);
1df0d523
AD
3074 if (rdev->has_uvd) {
3075 WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3076 WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3077 WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3078 }
0a96d72b 3079
1a8ca750 3080 si_tiling_mode_table_init(rdev);
0a96d72b 3081
1a8ca750
AD
3082 si_setup_rb(rdev, rdev->config.si.max_shader_engines,
3083 rdev->config.si.max_sh_per_se,
3084 rdev->config.si.max_backends_per_se);
0a96d72b 3085
1a8ca750
AD
3086 si_setup_spi(rdev, rdev->config.si.max_shader_engines,
3087 rdev->config.si.max_sh_per_se,
3088 rdev->config.si.max_cu_per_sh);
0a96d72b 3089
0a96d72b
AD
3090
3091 /* set HW defaults for 3D engine */
3092 WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3093 ROQ_IB2_START(0x2b)));
3094 WREG32(CP_MEQ_THRESHOLDS, MEQ1_START(0x30) | MEQ2_START(0x60));
3095
3096 sx_debug_1 = RREG32(SX_DEBUG_1);
3097 WREG32(SX_DEBUG_1, sx_debug_1);
3098
3099 WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3100
3101 WREG32(PA_SC_FIFO_SIZE, (SC_FRONTEND_PRIM_FIFO_SIZE(rdev->config.si.sc_prim_fifo_size_frontend) |
3102 SC_BACKEND_PRIM_FIFO_SIZE(rdev->config.si.sc_prim_fifo_size_backend) |
3103 SC_HIZ_TILE_FIFO_SIZE(rdev->config.si.sc_hiz_tile_fifo_size) |
3104 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.si.sc_earlyz_tile_fifo_size)));
3105
3106 WREG32(VGT_NUM_INSTANCES, 1);
3107
3108 WREG32(CP_PERFMON_CNTL, 0);
3109
3110 WREG32(SQ_CONFIG, 0);
3111
3112 WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3113 FORCE_EOV_MAX_REZ_CNT(255)));
3114
3115 WREG32(VGT_CACHE_INVALIDATION, CACHE_INVALIDATION(VC_AND_TC) |
3116 AUTO_INVLD_EN(ES_AND_GS_AUTO));
3117
3118 WREG32(VGT_GS_VERTEX_REUSE, 16);
3119 WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3120
3121 WREG32(CB_PERFCOUNTER0_SELECT0, 0);
3122 WREG32(CB_PERFCOUNTER0_SELECT1, 0);
3123 WREG32(CB_PERFCOUNTER1_SELECT0, 0);
3124 WREG32(CB_PERFCOUNTER1_SELECT1, 0);
3125 WREG32(CB_PERFCOUNTER2_SELECT0, 0);
3126 WREG32(CB_PERFCOUNTER2_SELECT1, 0);
3127 WREG32(CB_PERFCOUNTER3_SELECT0, 0);
3128 WREG32(CB_PERFCOUNTER3_SELECT1, 0);
3129
3130 tmp = RREG32(HDP_MISC_CNTL);
3131 tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3132 WREG32(HDP_MISC_CNTL, tmp);
3133
3134 hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3135 WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3136
3137 WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3138
3139 udelay(50);
3140}
c476dde2 3141
2ece2e8b
AD
3142/*
3143 * GPU scratch registers helpers function.
3144 */
3145static void si_scratch_init(struct radeon_device *rdev)
3146{
3147 int i;
3148
3149 rdev->scratch.num_reg = 7;
3150 rdev->scratch.reg_base = SCRATCH_REG0;
3151 for (i = 0; i < rdev->scratch.num_reg; i++) {
3152 rdev->scratch.free[i] = true;
3153 rdev->scratch.reg[i] = rdev->scratch.reg_base + (i * 4);
3154 }
3155}
3156
3157void si_fence_ring_emit(struct radeon_device *rdev,
3158 struct radeon_fence *fence)
3159{
3160 struct radeon_ring *ring = &rdev->ring[fence->ring];
3161 u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
3162
3163 /* flush read cache over gart */
3164 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
3165 radeon_ring_write(ring, (CP_COHER_CNTL2 - PACKET3_SET_CONFIG_REG_START) >> 2);
3166 radeon_ring_write(ring, 0);
3167 radeon_ring_write(ring, PACKET3(PACKET3_SURFACE_SYNC, 3));
3168 radeon_ring_write(ring, PACKET3_TCL1_ACTION_ENA |
3169 PACKET3_TC_ACTION_ENA |
3170 PACKET3_SH_KCACHE_ACTION_ENA |
3171 PACKET3_SH_ICACHE_ACTION_ENA);
3172 radeon_ring_write(ring, 0xFFFFFFFF);
3173 radeon_ring_write(ring, 0);
3174 radeon_ring_write(ring, 10); /* poll interval */
3175 /* EVENT_WRITE_EOP - flush caches, send int */
3176 radeon_ring_write(ring, PACKET3(PACKET3_EVENT_WRITE_EOP, 4));
3177 radeon_ring_write(ring, EVENT_TYPE(CACHE_FLUSH_AND_INV_TS_EVENT) | EVENT_INDEX(5));
3178 radeon_ring_write(ring, addr & 0xffffffff);
3179 radeon_ring_write(ring, (upper_32_bits(addr) & 0xff) | DATA_SEL(1) | INT_SEL(2));
3180 radeon_ring_write(ring, fence->seq);
3181 radeon_ring_write(ring, 0);
3182}
3183
3184/*
3185 * IB stuff
3186 */
3187void si_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
3188{
876dc9f3 3189 struct radeon_ring *ring = &rdev->ring[ib->ring];
2ece2e8b
AD
3190 u32 header;
3191
a85a7da4
AD
3192 if (ib->is_const_ib) {
3193 /* set switch buffer packet before const IB */
3194 radeon_ring_write(ring, PACKET3(PACKET3_SWITCH_BUFFER, 0));
3195 radeon_ring_write(ring, 0);
45df6803 3196
2ece2e8b 3197 header = PACKET3(PACKET3_INDIRECT_BUFFER_CONST, 2);
a85a7da4 3198 } else {
89d35807 3199 u32 next_rptr;
a85a7da4 3200 if (ring->rptr_save_reg) {
89d35807 3201 next_rptr = ring->wptr + 3 + 4 + 8;
a85a7da4
AD
3202 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
3203 radeon_ring_write(ring, ((ring->rptr_save_reg -
3204 PACKET3_SET_CONFIG_REG_START) >> 2));
3205 radeon_ring_write(ring, next_rptr);
89d35807
AD
3206 } else if (rdev->wb.enabled) {
3207 next_rptr = ring->wptr + 5 + 4 + 8;
3208 radeon_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
3209 radeon_ring_write(ring, (1 << 8));
3210 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
3211 radeon_ring_write(ring, upper_32_bits(ring->next_rptr_gpu_addr) & 0xffffffff);
3212 radeon_ring_write(ring, next_rptr);
a85a7da4
AD
3213 }
3214
2ece2e8b 3215 header = PACKET3(PACKET3_INDIRECT_BUFFER, 2);
a85a7da4 3216 }
2ece2e8b
AD
3217
3218 radeon_ring_write(ring, header);
3219 radeon_ring_write(ring,
3220#ifdef __BIG_ENDIAN
3221 (2 << 0) |
3222#endif
3223 (ib->gpu_addr & 0xFFFFFFFC));
3224 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFFFF);
4bf3dd92
CK
3225 radeon_ring_write(ring, ib->length_dw |
3226 (ib->vm ? (ib->vm->id << 24) : 0));
2ece2e8b 3227
a85a7da4
AD
3228 if (!ib->is_const_ib) {
3229 /* flush read cache over gart for this vmid */
3230 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
3231 radeon_ring_write(ring, (CP_COHER_CNTL2 - PACKET3_SET_CONFIG_REG_START) >> 2);
4bf3dd92 3232 radeon_ring_write(ring, ib->vm ? ib->vm->id : 0);
a85a7da4
AD
3233 radeon_ring_write(ring, PACKET3(PACKET3_SURFACE_SYNC, 3));
3234 radeon_ring_write(ring, PACKET3_TCL1_ACTION_ENA |
3235 PACKET3_TC_ACTION_ENA |
3236 PACKET3_SH_KCACHE_ACTION_ENA |
3237 PACKET3_SH_ICACHE_ACTION_ENA);
3238 radeon_ring_write(ring, 0xFFFFFFFF);
3239 radeon_ring_write(ring, 0);
3240 radeon_ring_write(ring, 10); /* poll interval */
3241 }
2ece2e8b
AD
3242}
3243
48c0c902
AD
3244/*
3245 * CP.
3246 */
3247static void si_cp_enable(struct radeon_device *rdev, bool enable)
3248{
3249 if (enable)
3250 WREG32(CP_ME_CNTL, 0);
3251 else {
50efa51a
AD
3252 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX)
3253 radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size);
48c0c902
AD
3254 WREG32(CP_ME_CNTL, (CP_ME_HALT | CP_PFP_HALT | CP_CE_HALT));
3255 WREG32(SCRATCH_UMSK, 0);
8c5fd7ef
AD
3256 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false;
3257 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false;
3258 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false;
48c0c902
AD
3259 }
3260 udelay(50);
3261}
3262
3263static int si_cp_load_microcode(struct radeon_device *rdev)
3264{
3265 const __be32 *fw_data;
3266 int i;
3267
3268 if (!rdev->me_fw || !rdev->pfp_fw)
3269 return -EINVAL;
3270
3271 si_cp_enable(rdev, false);
3272
3273 /* PFP */
3274 fw_data = (const __be32 *)rdev->pfp_fw->data;
3275 WREG32(CP_PFP_UCODE_ADDR, 0);
3276 for (i = 0; i < SI_PFP_UCODE_SIZE; i++)
3277 WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
3278 WREG32(CP_PFP_UCODE_ADDR, 0);
3279
3280 /* CE */
3281 fw_data = (const __be32 *)rdev->ce_fw->data;
3282 WREG32(CP_CE_UCODE_ADDR, 0);
3283 for (i = 0; i < SI_CE_UCODE_SIZE; i++)
3284 WREG32(CP_CE_UCODE_DATA, be32_to_cpup(fw_data++));
3285 WREG32(CP_CE_UCODE_ADDR, 0);
3286
3287 /* ME */
3288 fw_data = (const __be32 *)rdev->me_fw->data;
3289 WREG32(CP_ME_RAM_WADDR, 0);
3290 for (i = 0; i < SI_PM4_UCODE_SIZE; i++)
3291 WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
3292 WREG32(CP_ME_RAM_WADDR, 0);
3293
3294 WREG32(CP_PFP_UCODE_ADDR, 0);
3295 WREG32(CP_CE_UCODE_ADDR, 0);
3296 WREG32(CP_ME_RAM_WADDR, 0);
3297 WREG32(CP_ME_RAM_RADDR, 0);
3298 return 0;
3299}
3300
3301static int si_cp_start(struct radeon_device *rdev)
3302{
3303 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3304 int r, i;
3305
3306 r = radeon_ring_lock(rdev, ring, 7 + 4);
3307 if (r) {
3308 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3309 return r;
3310 }
3311 /* init the CP */
3312 radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
3313 radeon_ring_write(ring, 0x1);
3314 radeon_ring_write(ring, 0x0);
3315 radeon_ring_write(ring, rdev->config.si.max_hw_contexts - 1);
3316 radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
3317 radeon_ring_write(ring, 0);
3318 radeon_ring_write(ring, 0);
3319
3320 /* init the CE partitions */
3321 radeon_ring_write(ring, PACKET3(PACKET3_SET_BASE, 2));
3322 radeon_ring_write(ring, PACKET3_BASE_INDEX(CE_PARTITION_BASE));
3323 radeon_ring_write(ring, 0xc000);
3324 radeon_ring_write(ring, 0xe000);
3325 radeon_ring_unlock_commit(rdev, ring);
3326
3327 si_cp_enable(rdev, true);
3328
3329 r = radeon_ring_lock(rdev, ring, si_default_size + 10);
3330 if (r) {
3331 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3332 return r;
3333 }
3334
3335 /* setup clear context state */
3336 radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3337 radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
3338
3339 for (i = 0; i < si_default_size; i++)
3340 radeon_ring_write(ring, si_default_state[i]);
3341
3342 radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3343 radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
3344
3345 /* set clear context state */
3346 radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
3347 radeon_ring_write(ring, 0);
3348
3349 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONTEXT_REG, 2));
3350 radeon_ring_write(ring, 0x00000316);
3351 radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
3352 radeon_ring_write(ring, 0x00000010); /* VGT_OUT_DEALLOC_CNTL */
3353
3354 radeon_ring_unlock_commit(rdev, ring);
3355
3356 for (i = RADEON_RING_TYPE_GFX_INDEX; i <= CAYMAN_RING_TYPE_CP2_INDEX; ++i) {
3357 ring = &rdev->ring[i];
3358 r = radeon_ring_lock(rdev, ring, 2);
3359
3360 /* clear the compute context state */
3361 radeon_ring_write(ring, PACKET3_COMPUTE(PACKET3_CLEAR_STATE, 0));
3362 radeon_ring_write(ring, 0);
3363
3364 radeon_ring_unlock_commit(rdev, ring);
3365 }
3366
3367 return 0;
3368}
3369
3370static void si_cp_fini(struct radeon_device *rdev)
3371{
45df6803 3372 struct radeon_ring *ring;
48c0c902 3373 si_cp_enable(rdev, false);
45df6803
CK
3374
3375 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3376 radeon_ring_fini(rdev, ring);
3377 radeon_scratch_free(rdev, ring->rptr_save_reg);
3378
3379 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX];
3380 radeon_ring_fini(rdev, ring);
3381 radeon_scratch_free(rdev, ring->rptr_save_reg);
3382
3383 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX];
3384 radeon_ring_fini(rdev, ring);
3385 radeon_scratch_free(rdev, ring->rptr_save_reg);
48c0c902
AD
3386}
3387
3388static int si_cp_resume(struct radeon_device *rdev)
3389{
3390 struct radeon_ring *ring;
3391 u32 tmp;
3392 u32 rb_bufsz;
3393 int r;
3394
811e4d58
AD
3395 si_enable_gui_idle_interrupt(rdev, false);
3396
48c0c902
AD
3397 WREG32(CP_SEM_WAIT_TIMER, 0x0);
3398 WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
3399
3400 /* Set the write pointer delay */
3401 WREG32(CP_RB_WPTR_DELAY, 0);
3402
3403 WREG32(CP_DEBUG, 0);
3404 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
3405
3406 /* ring 0 - compute and gfx */
3407 /* Set ring buffer size */
3408 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
b72a8925
DV
3409 rb_bufsz = order_base_2(ring->ring_size / 8);
3410 tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
48c0c902
AD
3411#ifdef __BIG_ENDIAN
3412 tmp |= BUF_SWAP_32BIT;
3413#endif
3414 WREG32(CP_RB0_CNTL, tmp);
3415
3416 /* Initialize the ring buffer's read and write pointers */
3417 WREG32(CP_RB0_CNTL, tmp | RB_RPTR_WR_ENA);
3418 ring->wptr = 0;
3419 WREG32(CP_RB0_WPTR, ring->wptr);
3420
48fc7f7e 3421 /* set the wb address whether it's enabled or not */
48c0c902
AD
3422 WREG32(CP_RB0_RPTR_ADDR, (rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC);
3423 WREG32(CP_RB0_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
3424
3425 if (rdev->wb.enabled)
3426 WREG32(SCRATCH_UMSK, 0xff);
3427 else {
3428 tmp |= RB_NO_UPDATE;
3429 WREG32(SCRATCH_UMSK, 0);
3430 }
3431
3432 mdelay(1);
3433 WREG32(CP_RB0_CNTL, tmp);
3434
3435 WREG32(CP_RB0_BASE, ring->gpu_addr >> 8);
3436
3437 ring->rptr = RREG32(CP_RB0_RPTR);
3438
3439 /* ring1 - compute only */
3440 /* Set ring buffer size */
3441 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX];
b72a8925
DV
3442 rb_bufsz = order_base_2(ring->ring_size / 8);
3443 tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
48c0c902
AD
3444#ifdef __BIG_ENDIAN
3445 tmp |= BUF_SWAP_32BIT;
3446#endif
3447 WREG32(CP_RB1_CNTL, tmp);
3448
3449 /* Initialize the ring buffer's read and write pointers */
3450 WREG32(CP_RB1_CNTL, tmp | RB_RPTR_WR_ENA);
3451 ring->wptr = 0;
3452 WREG32(CP_RB1_WPTR, ring->wptr);
3453
48fc7f7e 3454 /* set the wb address whether it's enabled or not */
48c0c902
AD
3455 WREG32(CP_RB1_RPTR_ADDR, (rdev->wb.gpu_addr + RADEON_WB_CP1_RPTR_OFFSET) & 0xFFFFFFFC);
3456 WREG32(CP_RB1_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP1_RPTR_OFFSET) & 0xFF);
3457
3458 mdelay(1);
3459 WREG32(CP_RB1_CNTL, tmp);
3460
3461 WREG32(CP_RB1_BASE, ring->gpu_addr >> 8);
3462
3463 ring->rptr = RREG32(CP_RB1_RPTR);
3464
3465 /* ring2 - compute only */
3466 /* Set ring buffer size */
3467 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX];
b72a8925
DV
3468 rb_bufsz = order_base_2(ring->ring_size / 8);
3469 tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
48c0c902
AD
3470#ifdef __BIG_ENDIAN
3471 tmp |= BUF_SWAP_32BIT;
3472#endif
3473 WREG32(CP_RB2_CNTL, tmp);
3474
3475 /* Initialize the ring buffer's read and write pointers */
3476 WREG32(CP_RB2_CNTL, tmp | RB_RPTR_WR_ENA);
3477 ring->wptr = 0;
3478 WREG32(CP_RB2_WPTR, ring->wptr);
3479
48fc7f7e 3480 /* set the wb address whether it's enabled or not */
48c0c902
AD
3481 WREG32(CP_RB2_RPTR_ADDR, (rdev->wb.gpu_addr + RADEON_WB_CP2_RPTR_OFFSET) & 0xFFFFFFFC);
3482 WREG32(CP_RB2_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP2_RPTR_OFFSET) & 0xFF);
3483
3484 mdelay(1);
3485 WREG32(CP_RB2_CNTL, tmp);
3486
3487 WREG32(CP_RB2_BASE, ring->gpu_addr >> 8);
3488
3489 ring->rptr = RREG32(CP_RB2_RPTR);
3490
3491 /* start the rings */
3492 si_cp_start(rdev);
3493 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = true;
3494 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = true;
3495 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = true;
3496 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]);
3497 if (r) {
3498 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false;
3499 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false;
3500 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false;
3501 return r;
3502 }
3503 r = radeon_ring_test(rdev, CAYMAN_RING_TYPE_CP1_INDEX, &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]);
3504 if (r) {
3505 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false;
3506 }
3507 r = radeon_ring_test(rdev, CAYMAN_RING_TYPE_CP2_INDEX, &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]);
3508 if (r) {
3509 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false;
3510 }
3511
811e4d58
AD
3512 si_enable_gui_idle_interrupt(rdev, true);
3513
50efa51a
AD
3514 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX)
3515 radeon_ttm_set_active_vram_size(rdev, rdev->mc.real_vram_size);
3516
48c0c902
AD
3517 return 0;
3518}
3519
2483b4ea 3520u32 si_gpu_check_soft_reset(struct radeon_device *rdev)
06bc6df0 3521{
014bb209 3522 u32 reset_mask = 0;
1c534671 3523 u32 tmp;
06bc6df0 3524
014bb209
AD
3525 /* GRBM_STATUS */
3526 tmp = RREG32(GRBM_STATUS);
3527 if (tmp & (PA_BUSY | SC_BUSY |
3528 BCI_BUSY | SX_BUSY |
3529 TA_BUSY | VGT_BUSY |
3530 DB_BUSY | CB_BUSY |
3531 GDS_BUSY | SPI_BUSY |
3532 IA_BUSY | IA_BUSY_NO_DMA))
3533 reset_mask |= RADEON_RESET_GFX;
3534
3535 if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3536 CP_BUSY | CP_COHERENCY_BUSY))
3537 reset_mask |= RADEON_RESET_CP;
3538
3539 if (tmp & GRBM_EE_BUSY)
3540 reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3541
3542 /* GRBM_STATUS2 */
3543 tmp = RREG32(GRBM_STATUS2);
3544 if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3545 reset_mask |= RADEON_RESET_RLC;
3546
3547 /* DMA_STATUS_REG 0 */
3548 tmp = RREG32(DMA_STATUS_REG + DMA0_REGISTER_OFFSET);
3549 if (!(tmp & DMA_IDLE))
3550 reset_mask |= RADEON_RESET_DMA;
3551
3552 /* DMA_STATUS_REG 1 */
3553 tmp = RREG32(DMA_STATUS_REG + DMA1_REGISTER_OFFSET);
3554 if (!(tmp & DMA_IDLE))
3555 reset_mask |= RADEON_RESET_DMA1;
3556
3557 /* SRBM_STATUS2 */
3558 tmp = RREG32(SRBM_STATUS2);
3559 if (tmp & DMA_BUSY)
3560 reset_mask |= RADEON_RESET_DMA;
3561
3562 if (tmp & DMA1_BUSY)
3563 reset_mask |= RADEON_RESET_DMA1;
3564
3565 /* SRBM_STATUS */
3566 tmp = RREG32(SRBM_STATUS);
3567
3568 if (tmp & IH_BUSY)
3569 reset_mask |= RADEON_RESET_IH;
3570
3571 if (tmp & SEM_BUSY)
3572 reset_mask |= RADEON_RESET_SEM;
3573
3574 if (tmp & GRBM_RQ_PENDING)
3575 reset_mask |= RADEON_RESET_GRBM;
3576
3577 if (tmp & VMC_BUSY)
3578 reset_mask |= RADEON_RESET_VMC;
19fc42ed 3579
014bb209
AD
3580 if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3581 MCC_BUSY | MCD_BUSY))
3582 reset_mask |= RADEON_RESET_MC;
3583
3584 if (evergreen_is_display_hung(rdev))
3585 reset_mask |= RADEON_RESET_DISPLAY;
3586
3587 /* VM_L2_STATUS */
3588 tmp = RREG32(VM_L2_STATUS);
3589 if (tmp & L2_BUSY)
3590 reset_mask |= RADEON_RESET_VMC;
3591
d808fc88
AD
3592 /* Skip MC reset as it's mostly likely not hung, just busy */
3593 if (reset_mask & RADEON_RESET_MC) {
3594 DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3595 reset_mask &= ~RADEON_RESET_MC;
3596 }
3597
014bb209
AD
3598 return reset_mask;
3599}
3600
3601static void si_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3602{
3603 struct evergreen_mc_save save;
3604 u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3605 u32 tmp;
19fc42ed 3606
06bc6df0 3607 if (reset_mask == 0)
014bb209 3608 return;
06bc6df0
AD
3609
3610 dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3611
1c534671 3612 evergreen_print_gpu_status_regs(rdev);
06bc6df0
AD
3613 dev_info(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n",
3614 RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR));
3615 dev_info(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
3616 RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS));
3617
a6f4ae8d
AD
3618 /* disable PG/CG */
3619 si_fini_pg(rdev);
3620 si_fini_cg(rdev);
3621
3622 /* stop the rlc */
3623 si_rlc_stop(rdev);
3624
1c534671
AD
3625 /* Disable CP parsing/prefetching */
3626 WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT | CP_CE_HALT);
3627
3628 if (reset_mask & RADEON_RESET_DMA) {
3629 /* dma0 */
3630 tmp = RREG32(DMA_RB_CNTL + DMA0_REGISTER_OFFSET);
3631 tmp &= ~DMA_RB_ENABLE;
3632 WREG32(DMA_RB_CNTL + DMA0_REGISTER_OFFSET, tmp);
014bb209
AD
3633 }
3634 if (reset_mask & RADEON_RESET_DMA1) {
1c534671
AD
3635 /* dma1 */
3636 tmp = RREG32(DMA_RB_CNTL + DMA1_REGISTER_OFFSET);
3637 tmp &= ~DMA_RB_ENABLE;
3638 WREG32(DMA_RB_CNTL + DMA1_REGISTER_OFFSET, tmp);
3639 }
3640
f770d78a
AD
3641 udelay(50);
3642
3643 evergreen_mc_stop(rdev, &save);
3644 if (evergreen_mc_wait_for_idle(rdev)) {
3645 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3646 }
3647
1c534671
AD
3648 if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE | RADEON_RESET_CP)) {
3649 grbm_soft_reset = SOFT_RESET_CB |
3650 SOFT_RESET_DB |
3651 SOFT_RESET_GDS |
3652 SOFT_RESET_PA |
3653 SOFT_RESET_SC |
3654 SOFT_RESET_BCI |
3655 SOFT_RESET_SPI |
3656 SOFT_RESET_SX |
3657 SOFT_RESET_TC |
3658 SOFT_RESET_TA |
3659 SOFT_RESET_VGT |
3660 SOFT_RESET_IA;
3661 }
3662
3663 if (reset_mask & RADEON_RESET_CP) {
3664 grbm_soft_reset |= SOFT_RESET_CP | SOFT_RESET_VGT;
3665
3666 srbm_soft_reset |= SOFT_RESET_GRBM;
3667 }
06bc6df0
AD
3668
3669 if (reset_mask & RADEON_RESET_DMA)
014bb209
AD
3670 srbm_soft_reset |= SOFT_RESET_DMA;
3671
3672 if (reset_mask & RADEON_RESET_DMA1)
3673 srbm_soft_reset |= SOFT_RESET_DMA1;
3674
3675 if (reset_mask & RADEON_RESET_DISPLAY)
3676 srbm_soft_reset |= SOFT_RESET_DC;
3677
3678 if (reset_mask & RADEON_RESET_RLC)
3679 grbm_soft_reset |= SOFT_RESET_RLC;
3680
3681 if (reset_mask & RADEON_RESET_SEM)
3682 srbm_soft_reset |= SOFT_RESET_SEM;
3683
3684 if (reset_mask & RADEON_RESET_IH)
3685 srbm_soft_reset |= SOFT_RESET_IH;
3686
3687 if (reset_mask & RADEON_RESET_GRBM)
3688 srbm_soft_reset |= SOFT_RESET_GRBM;
3689
3690 if (reset_mask & RADEON_RESET_VMC)
3691 srbm_soft_reset |= SOFT_RESET_VMC;
3692
3693 if (reset_mask & RADEON_RESET_MC)
3694 srbm_soft_reset |= SOFT_RESET_MC;
1c534671
AD
3695
3696 if (grbm_soft_reset) {
3697 tmp = RREG32(GRBM_SOFT_RESET);
3698 tmp |= grbm_soft_reset;
3699 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
3700 WREG32(GRBM_SOFT_RESET, tmp);
3701 tmp = RREG32(GRBM_SOFT_RESET);
3702
3703 udelay(50);
3704
3705 tmp &= ~grbm_soft_reset;
3706 WREG32(GRBM_SOFT_RESET, tmp);
3707 tmp = RREG32(GRBM_SOFT_RESET);
3708 }
3709
3710 if (srbm_soft_reset) {
3711 tmp = RREG32(SRBM_SOFT_RESET);
3712 tmp |= srbm_soft_reset;
3713 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
3714 WREG32(SRBM_SOFT_RESET, tmp);
3715 tmp = RREG32(SRBM_SOFT_RESET);
3716
3717 udelay(50);
3718
3719 tmp &= ~srbm_soft_reset;
3720 WREG32(SRBM_SOFT_RESET, tmp);
3721 tmp = RREG32(SRBM_SOFT_RESET);
3722 }
06bc6df0
AD
3723
3724 /* Wait a little for things to settle down */
3725 udelay(50);
3726
c476dde2 3727 evergreen_mc_resume(rdev, &save);
1c534671
AD
3728 udelay(50);
3729
1c534671 3730 evergreen_print_gpu_status_regs(rdev);
c476dde2
AD
3731}
3732
4a5c8ea5
AD
3733static void si_set_clk_bypass_mode(struct radeon_device *rdev)
3734{
3735 u32 tmp, i;
3736
3737 tmp = RREG32(CG_SPLL_FUNC_CNTL);
3738 tmp |= SPLL_BYPASS_EN;
3739 WREG32(CG_SPLL_FUNC_CNTL, tmp);
3740
3741 tmp = RREG32(CG_SPLL_FUNC_CNTL_2);
3742 tmp |= SPLL_CTLREQ_CHG;
3743 WREG32(CG_SPLL_FUNC_CNTL_2, tmp);
3744
3745 for (i = 0; i < rdev->usec_timeout; i++) {
3746 if (RREG32(SPLL_STATUS) & SPLL_CHG_STATUS)
3747 break;
3748 udelay(1);
3749 }
3750
3751 tmp = RREG32(CG_SPLL_FUNC_CNTL_2);
3752 tmp &= ~(SPLL_CTLREQ_CHG | SCLK_MUX_UPDATE);
3753 WREG32(CG_SPLL_FUNC_CNTL_2, tmp);
3754
3755 tmp = RREG32(MPLL_CNTL_MODE);
3756 tmp &= ~MPLL_MCLK_SEL;
3757 WREG32(MPLL_CNTL_MODE, tmp);
3758}
3759
3760static void si_spll_powerdown(struct radeon_device *rdev)
3761{
3762 u32 tmp;
3763
3764 tmp = RREG32(SPLL_CNTL_MODE);
3765 tmp |= SPLL_SW_DIR_CONTROL;
3766 WREG32(SPLL_CNTL_MODE, tmp);
3767
3768 tmp = RREG32(CG_SPLL_FUNC_CNTL);
3769 tmp |= SPLL_RESET;
3770 WREG32(CG_SPLL_FUNC_CNTL, tmp);
3771
3772 tmp = RREG32(CG_SPLL_FUNC_CNTL);
3773 tmp |= SPLL_SLEEP;
3774 WREG32(CG_SPLL_FUNC_CNTL, tmp);
3775
3776 tmp = RREG32(SPLL_CNTL_MODE);
3777 tmp &= ~SPLL_SW_DIR_CONTROL;
3778 WREG32(SPLL_CNTL_MODE, tmp);
3779}
3780
3781static void si_gpu_pci_config_reset(struct radeon_device *rdev)
3782{
3783 struct evergreen_mc_save save;
3784 u32 tmp, i;
3785
3786 dev_info(rdev->dev, "GPU pci config reset\n");
3787
3788 /* disable dpm? */
3789
3790 /* disable cg/pg */
3791 si_fini_pg(rdev);
3792 si_fini_cg(rdev);
3793
3794 /* Disable CP parsing/prefetching */
3795 WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT | CP_CE_HALT);
3796 /* dma0 */
3797 tmp = RREG32(DMA_RB_CNTL + DMA0_REGISTER_OFFSET);
3798 tmp &= ~DMA_RB_ENABLE;
3799 WREG32(DMA_RB_CNTL + DMA0_REGISTER_OFFSET, tmp);
3800 /* dma1 */
3801 tmp = RREG32(DMA_RB_CNTL + DMA1_REGISTER_OFFSET);
3802 tmp &= ~DMA_RB_ENABLE;
3803 WREG32(DMA_RB_CNTL + DMA1_REGISTER_OFFSET, tmp);
3804 /* XXX other engines? */
3805
3806 /* halt the rlc, disable cp internal ints */
3807 si_rlc_stop(rdev);
3808
3809 udelay(50);
3810
3811 /* disable mem access */
3812 evergreen_mc_stop(rdev, &save);
3813 if (evergreen_mc_wait_for_idle(rdev)) {
3814 dev_warn(rdev->dev, "Wait for MC idle timed out !\n");
3815 }
3816
3817 /* set mclk/sclk to bypass */
3818 si_set_clk_bypass_mode(rdev);
3819 /* powerdown spll */
3820 si_spll_powerdown(rdev);
3821 /* disable BM */
3822 pci_clear_master(rdev->pdev);
3823 /* reset */
3824 radeon_pci_config_reset(rdev);
3825 /* wait for asic to come out of reset */
3826 for (i = 0; i < rdev->usec_timeout; i++) {
3827 if (RREG32(CONFIG_MEMSIZE) != 0xffffffff)
3828 break;
3829 udelay(1);
3830 }
3831}
3832
c476dde2
AD
3833int si_asic_reset(struct radeon_device *rdev)
3834{
014bb209
AD
3835 u32 reset_mask;
3836
3837 reset_mask = si_gpu_check_soft_reset(rdev);
3838
3839 if (reset_mask)
3840 r600_set_bios_scratch_engine_hung(rdev, true);
3841
4a5c8ea5 3842 /* try soft reset */
014bb209
AD
3843 si_gpu_soft_reset(rdev, reset_mask);
3844
3845 reset_mask = si_gpu_check_soft_reset(rdev);
3846
4a5c8ea5
AD
3847 /* try pci config reset */
3848 if (reset_mask && radeon_hard_reset)
3849 si_gpu_pci_config_reset(rdev);
3850
3851 reset_mask = si_gpu_check_soft_reset(rdev);
3852
014bb209
AD
3853 if (!reset_mask)
3854 r600_set_bios_scratch_engine_hung(rdev, false);
3855
3856 return 0;
c476dde2
AD
3857}
3858
123bc183
AD
3859/**
3860 * si_gfx_is_lockup - Check if the GFX engine is locked up
3861 *
3862 * @rdev: radeon_device pointer
3863 * @ring: radeon_ring structure holding ring information
3864 *
3865 * Check if the GFX engine is locked up.
3866 * Returns true if the engine appears to be locked up, false if not.
3867 */
3868bool si_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
3869{
3870 u32 reset_mask = si_gpu_check_soft_reset(rdev);
3871
3872 if (!(reset_mask & (RADEON_RESET_GFX |
3873 RADEON_RESET_COMPUTE |
3874 RADEON_RESET_CP))) {
3875 radeon_ring_lockup_update(ring);
3876 return false;
3877 }
3878 /* force CP activities */
3879 radeon_ring_force_activity(rdev, ring);
3880 return radeon_ring_test_lockup(rdev, ring);
3881}
3882
d2800ee5
AD
3883/* MC */
3884static void si_mc_program(struct radeon_device *rdev)
3885{
3886 struct evergreen_mc_save save;
3887 u32 tmp;
3888 int i, j;
3889
3890 /* Initialize HDP */
3891 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3892 WREG32((0x2c14 + j), 0x00000000);
3893 WREG32((0x2c18 + j), 0x00000000);
3894 WREG32((0x2c1c + j), 0x00000000);
3895 WREG32((0x2c20 + j), 0x00000000);
3896 WREG32((0x2c24 + j), 0x00000000);
3897 }
3898 WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
3899
3900 evergreen_mc_stop(rdev, &save);
3901 if (radeon_mc_wait_for_idle(rdev)) {
3902 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3903 }
5153550a
AD
3904 if (!ASIC_IS_NODCE(rdev))
3905 /* Lockout access through VGA aperture*/
3906 WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
d2800ee5
AD
3907 /* Update configuration */
3908 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
3909 rdev->mc.vram_start >> 12);
3910 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
3911 rdev->mc.vram_end >> 12);
3912 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR,
3913 rdev->vram_scratch.gpu_addr >> 12);
3914 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
3915 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
3916 WREG32(MC_VM_FB_LOCATION, tmp);
3917 /* XXX double check these! */
3918 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
3919 WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
3920 WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
3921 WREG32(MC_VM_AGP_BASE, 0);
3922 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
3923 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
3924 if (radeon_mc_wait_for_idle(rdev)) {
3925 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3926 }
3927 evergreen_mc_resume(rdev, &save);
5153550a
AD
3928 if (!ASIC_IS_NODCE(rdev)) {
3929 /* we need to own VRAM, so turn off the VGA renderer here
3930 * to stop it overwriting our objects */
3931 rv515_vga_render_disable(rdev);
3932 }
d2800ee5
AD
3933}
3934
1c49165d
AD
3935void si_vram_gtt_location(struct radeon_device *rdev,
3936 struct radeon_mc *mc)
d2800ee5
AD
3937{
3938 if (mc->mc_vram_size > 0xFFC0000000ULL) {
3939 /* leave room for at least 1024M GTT */
3940 dev_warn(rdev->dev, "limiting VRAM\n");
3941 mc->real_vram_size = 0xFFC0000000ULL;
3942 mc->mc_vram_size = 0xFFC0000000ULL;
3943 }
9ed8b1f9 3944 radeon_vram_location(rdev, &rdev->mc, 0);
d2800ee5 3945 rdev->mc.gtt_base_align = 0;
9ed8b1f9 3946 radeon_gtt_location(rdev, mc);
d2800ee5
AD
3947}
3948
3949static int si_mc_init(struct radeon_device *rdev)
3950{
3951 u32 tmp;
3952 int chansize, numchan;
3953
3954 /* Get VRAM informations */
3955 rdev->mc.vram_is_ddr = true;
3956 tmp = RREG32(MC_ARB_RAMCFG);
3957 if (tmp & CHANSIZE_OVERRIDE) {
3958 chansize = 16;
3959 } else if (tmp & CHANSIZE_MASK) {
3960 chansize = 64;
3961 } else {
3962 chansize = 32;
3963 }
3964 tmp = RREG32(MC_SHARED_CHMAP);
3965 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3966 case 0:
3967 default:
3968 numchan = 1;
3969 break;
3970 case 1:
3971 numchan = 2;
3972 break;
3973 case 2:
3974 numchan = 4;
3975 break;
3976 case 3:
3977 numchan = 8;
3978 break;
3979 case 4:
3980 numchan = 3;
3981 break;
3982 case 5:
3983 numchan = 6;
3984 break;
3985 case 6:
3986 numchan = 10;
3987 break;
3988 case 7:
3989 numchan = 12;
3990 break;
3991 case 8:
3992 numchan = 16;
3993 break;
3994 }
3995 rdev->mc.vram_width = numchan * chansize;
3996 /* Could aper size report 0 ? */
3997 rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3998 rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
3999 /* size in MB on si */
0ca223b0
AD
4000 tmp = RREG32(CONFIG_MEMSIZE);
4001 /* some boards may have garbage in the upper 16 bits */
4002 if (tmp & 0xffff0000) {
4003 DRM_INFO("Probable bad vram size: 0x%08x\n", tmp);
4004 if (tmp & 0xffff)
4005 tmp &= 0xffff;
4006 }
4007 rdev->mc.mc_vram_size = tmp * 1024ULL * 1024ULL;
4008 rdev->mc.real_vram_size = rdev->mc.mc_vram_size;
d2800ee5
AD
4009 rdev->mc.visible_vram_size = rdev->mc.aper_size;
4010 si_vram_gtt_location(rdev, &rdev->mc);
4011 radeon_update_bandwidth_info(rdev);
4012
4013 return 0;
4014}
4015
4016/*
4017 * GART
4018 */
4019void si_pcie_gart_tlb_flush(struct radeon_device *rdev)
4020{
4021 /* flush hdp cache */
4022 WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
4023
4024 /* bits 0-15 are the VM contexts0-15 */
4025 WREG32(VM_INVALIDATE_REQUEST, 1);
4026}
4027
1109ca09 4028static int si_pcie_gart_enable(struct radeon_device *rdev)
d2800ee5
AD
4029{
4030 int r, i;
4031
4032 if (rdev->gart.robj == NULL) {
4033 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
4034 return -EINVAL;
4035 }
4036 r = radeon_gart_table_vram_pin(rdev);
4037 if (r)
4038 return r;
4039 radeon_gart_restore(rdev);
4040 /* Setup TLB control */
4041 WREG32(MC_VM_MX_L1_TLB_CNTL,
4042 (0xA << 7) |
4043 ENABLE_L1_TLB |
4044 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
4045 ENABLE_ADVANCED_DRIVER_MODEL |
4046 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU);
4047 /* Setup L2 cache */
4048 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE |
4049 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
4050 ENABLE_L2_PDE0_CACHE_LRU_UPDATE_BY_WRITE |
4051 EFFECTIVE_L2_QUEUE_SIZE(7) |
4052 CONTEXT1_IDENTITY_ACCESS_MODE(1));
4053 WREG32(VM_L2_CNTL2, INVALIDATE_ALL_L1_TLBS | INVALIDATE_L2_CACHE);
4054 WREG32(VM_L2_CNTL3, L2_CACHE_BIGK_ASSOCIATIVITY |
4055 L2_CACHE_BIGK_FRAGMENT_SIZE(0));
4056 /* setup context0 */
4057 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
4058 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
4059 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
4060 WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
4061 (u32)(rdev->dummy_page.addr >> 12));
4062 WREG32(VM_CONTEXT0_CNTL2, 0);
4063 WREG32(VM_CONTEXT0_CNTL, (ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
4064 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT));
4065
4066 WREG32(0x15D4, 0);
4067 WREG32(0x15D8, 0);
4068 WREG32(0x15DC, 0);
4069
4070 /* empty context1-15 */
d2800ee5
AD
4071 /* set vm size, must be a multiple of 4 */
4072 WREG32(VM_CONTEXT1_PAGE_TABLE_START_ADDR, 0);
c21b328e 4073 WREG32(VM_CONTEXT1_PAGE_TABLE_END_ADDR, rdev->vm_manager.max_pfn);
23d4f1f2
AD
4074 /* Assign the pt base to something valid for now; the pts used for
4075 * the VMs are determined by the application and setup and assigned
4076 * on the fly in the vm part of radeon_gart.c
4077 */
d2800ee5
AD
4078 for (i = 1; i < 16; i++) {
4079 if (i < 8)
4080 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR + (i << 2),
4081 rdev->gart.table_addr >> 12);
4082 else
4083 WREG32(VM_CONTEXT8_PAGE_TABLE_BASE_ADDR + ((i - 8) << 2),
4084 rdev->gart.table_addr >> 12);
4085 }
4086
4087 /* enable context1-15 */
4088 WREG32(VM_CONTEXT1_PROTECTION_FAULT_DEFAULT_ADDR,
4089 (u32)(rdev->dummy_page.addr >> 12));
ae133a11 4090 WREG32(VM_CONTEXT1_CNTL2, 4);
fa87e62d 4091 WREG32(VM_CONTEXT1_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(1) |
ae133a11
CK
4092 RANGE_PROTECTION_FAULT_ENABLE_INTERRUPT |
4093 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT |
4094 DUMMY_PAGE_PROTECTION_FAULT_ENABLE_INTERRUPT |
4095 DUMMY_PAGE_PROTECTION_FAULT_ENABLE_DEFAULT |
4096 PDE0_PROTECTION_FAULT_ENABLE_INTERRUPT |
4097 PDE0_PROTECTION_FAULT_ENABLE_DEFAULT |
4098 VALID_PROTECTION_FAULT_ENABLE_INTERRUPT |
4099 VALID_PROTECTION_FAULT_ENABLE_DEFAULT |
4100 READ_PROTECTION_FAULT_ENABLE_INTERRUPT |
4101 READ_PROTECTION_FAULT_ENABLE_DEFAULT |
4102 WRITE_PROTECTION_FAULT_ENABLE_INTERRUPT |
4103 WRITE_PROTECTION_FAULT_ENABLE_DEFAULT);
d2800ee5
AD
4104
4105 si_pcie_gart_tlb_flush(rdev);
4106 DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
4107 (unsigned)(rdev->mc.gtt_size >> 20),
4108 (unsigned long long)rdev->gart.table_addr);
4109 rdev->gart.ready = true;
4110 return 0;
4111}
4112
1109ca09 4113static void si_pcie_gart_disable(struct radeon_device *rdev)
d2800ee5
AD
4114{
4115 /* Disable all tables */
4116 WREG32(VM_CONTEXT0_CNTL, 0);
4117 WREG32(VM_CONTEXT1_CNTL, 0);
4118 /* Setup TLB control */
4119 WREG32(MC_VM_MX_L1_TLB_CNTL, SYSTEM_ACCESS_MODE_NOT_IN_SYS |
4120 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU);
4121 /* Setup L2 cache */
4122 WREG32(VM_L2_CNTL, ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
4123 ENABLE_L2_PDE0_CACHE_LRU_UPDATE_BY_WRITE |
4124 EFFECTIVE_L2_QUEUE_SIZE(7) |
4125 CONTEXT1_IDENTITY_ACCESS_MODE(1));
4126 WREG32(VM_L2_CNTL2, 0);
4127 WREG32(VM_L2_CNTL3, L2_CACHE_BIGK_ASSOCIATIVITY |
4128 L2_CACHE_BIGK_FRAGMENT_SIZE(0));
4129 radeon_gart_table_vram_unpin(rdev);
4130}
4131
1109ca09 4132static void si_pcie_gart_fini(struct radeon_device *rdev)
d2800ee5
AD
4133{
4134 si_pcie_gart_disable(rdev);
4135 radeon_gart_table_vram_free(rdev);
4136 radeon_gart_fini(rdev);
4137}
4138
498dd8b3
AD
4139/* vm parser */
4140static bool si_vm_reg_valid(u32 reg)
4141{
4142 /* context regs are fine */
4143 if (reg >= 0x28000)
4144 return true;
4145
4146 /* check config regs */
4147 switch (reg) {
4148 case GRBM_GFX_INDEX:
f418b88a 4149 case CP_STRMOUT_CNTL:
498dd8b3
AD
4150 case VGT_VTX_VECT_EJECT_REG:
4151 case VGT_CACHE_INVALIDATION:
4152 case VGT_ESGS_RING_SIZE:
4153 case VGT_GSVS_RING_SIZE:
4154 case VGT_GS_VERTEX_REUSE:
4155 case VGT_PRIMITIVE_TYPE:
4156 case VGT_INDEX_TYPE:
4157 case VGT_NUM_INDICES:
4158 case VGT_NUM_INSTANCES:
4159 case VGT_TF_RING_SIZE:
4160 case VGT_HS_OFFCHIP_PARAM:
4161 case VGT_TF_MEMORY_BASE:
4162 case PA_CL_ENHANCE:
4163 case PA_SU_LINE_STIPPLE_VALUE:
4164 case PA_SC_LINE_STIPPLE_STATE:
4165 case PA_SC_ENHANCE:
4166 case SQC_CACHES:
4167 case SPI_STATIC_THREAD_MGMT_1:
4168 case SPI_STATIC_THREAD_MGMT_2:
4169 case SPI_STATIC_THREAD_MGMT_3:
4170 case SPI_PS_MAX_WAVE_ID:
4171 case SPI_CONFIG_CNTL:
4172 case SPI_CONFIG_CNTL_1:
4173 case TA_CNTL_AUX:
4174 return true;
4175 default:
4176 DRM_ERROR("Invalid register 0x%x in CS\n", reg);
4177 return false;
4178 }
4179}
4180
4181static int si_vm_packet3_ce_check(struct radeon_device *rdev,
4182 u32 *ib, struct radeon_cs_packet *pkt)
4183{
4184 switch (pkt->opcode) {
4185 case PACKET3_NOP:
4186 case PACKET3_SET_BASE:
4187 case PACKET3_SET_CE_DE_COUNTERS:
4188 case PACKET3_LOAD_CONST_RAM:
4189 case PACKET3_WRITE_CONST_RAM:
4190 case PACKET3_WRITE_CONST_RAM_OFFSET:
4191 case PACKET3_DUMP_CONST_RAM:
4192 case PACKET3_INCREMENT_CE_COUNTER:
4193 case PACKET3_WAIT_ON_DE_COUNTER:
4194 case PACKET3_CE_WRITE:
4195 break;
4196 default:
4197 DRM_ERROR("Invalid CE packet3: 0x%x\n", pkt->opcode);
4198 return -EINVAL;
4199 }
4200 return 0;
4201}
4202
e5b9e750
TS
4203static int si_vm_packet3_cp_dma_check(u32 *ib, u32 idx)
4204{
4205 u32 start_reg, reg, i;
4206 u32 command = ib[idx + 4];
4207 u32 info = ib[idx + 1];
4208 u32 idx_value = ib[idx];
4209 if (command & PACKET3_CP_DMA_CMD_SAS) {
4210 /* src address space is register */
4211 if (((info & 0x60000000) >> 29) == 0) {
4212 start_reg = idx_value << 2;
4213 if (command & PACKET3_CP_DMA_CMD_SAIC) {
4214 reg = start_reg;
4215 if (!si_vm_reg_valid(reg)) {
4216 DRM_ERROR("CP DMA Bad SRC register\n");
4217 return -EINVAL;
4218 }
4219 } else {
4220 for (i = 0; i < (command & 0x1fffff); i++) {
4221 reg = start_reg + (4 * i);
4222 if (!si_vm_reg_valid(reg)) {
4223 DRM_ERROR("CP DMA Bad SRC register\n");
4224 return -EINVAL;
4225 }
4226 }
4227 }
4228 }
4229 }
4230 if (command & PACKET3_CP_DMA_CMD_DAS) {
4231 /* dst address space is register */
4232 if (((info & 0x00300000) >> 20) == 0) {
4233 start_reg = ib[idx + 2];
4234 if (command & PACKET3_CP_DMA_CMD_DAIC) {
4235 reg = start_reg;
4236 if (!si_vm_reg_valid(reg)) {
4237 DRM_ERROR("CP DMA Bad DST register\n");
4238 return -EINVAL;
4239 }
4240 } else {
4241 for (i = 0; i < (command & 0x1fffff); i++) {
4242 reg = start_reg + (4 * i);
4243 if (!si_vm_reg_valid(reg)) {
4244 DRM_ERROR("CP DMA Bad DST register\n");
4245 return -EINVAL;
4246 }
4247 }
4248 }
4249 }
4250 }
4251 return 0;
4252}
4253
498dd8b3
AD
4254static int si_vm_packet3_gfx_check(struct radeon_device *rdev,
4255 u32 *ib, struct radeon_cs_packet *pkt)
4256{
e5b9e750 4257 int r;
498dd8b3
AD
4258 u32 idx = pkt->idx + 1;
4259 u32 idx_value = ib[idx];
4260 u32 start_reg, end_reg, reg, i;
4261
4262 switch (pkt->opcode) {
4263 case PACKET3_NOP:
4264 case PACKET3_SET_BASE:
4265 case PACKET3_CLEAR_STATE:
4266 case PACKET3_INDEX_BUFFER_SIZE:
4267 case PACKET3_DISPATCH_DIRECT:
4268 case PACKET3_DISPATCH_INDIRECT:
4269 case PACKET3_ALLOC_GDS:
4270 case PACKET3_WRITE_GDS_RAM:
4271 case PACKET3_ATOMIC_GDS:
4272 case PACKET3_ATOMIC:
4273 case PACKET3_OCCLUSION_QUERY:
4274 case PACKET3_SET_PREDICATION:
4275 case PACKET3_COND_EXEC:
4276 case PACKET3_PRED_EXEC:
4277 case PACKET3_DRAW_INDIRECT:
4278 case PACKET3_DRAW_INDEX_INDIRECT:
4279 case PACKET3_INDEX_BASE:
4280 case PACKET3_DRAW_INDEX_2:
4281 case PACKET3_CONTEXT_CONTROL:
4282 case PACKET3_INDEX_TYPE:
4283 case PACKET3_DRAW_INDIRECT_MULTI:
4284 case PACKET3_DRAW_INDEX_AUTO:
4285 case PACKET3_DRAW_INDEX_IMMD:
4286 case PACKET3_NUM_INSTANCES:
4287 case PACKET3_DRAW_INDEX_MULTI_AUTO:
4288 case PACKET3_STRMOUT_BUFFER_UPDATE:
4289 case PACKET3_DRAW_INDEX_OFFSET_2:
4290 case PACKET3_DRAW_INDEX_MULTI_ELEMENT:
4291 case PACKET3_DRAW_INDEX_INDIRECT_MULTI:
4292 case PACKET3_MPEG_INDEX:
4293 case PACKET3_WAIT_REG_MEM:
4294 case PACKET3_MEM_WRITE:
4295 case PACKET3_PFP_SYNC_ME:
4296 case PACKET3_SURFACE_SYNC:
4297 case PACKET3_EVENT_WRITE:
4298 case PACKET3_EVENT_WRITE_EOP:
4299 case PACKET3_EVENT_WRITE_EOS:
4300 case PACKET3_SET_CONTEXT_REG:
4301 case PACKET3_SET_CONTEXT_REG_INDIRECT:
4302 case PACKET3_SET_SH_REG:
4303 case PACKET3_SET_SH_REG_OFFSET:
4304 case PACKET3_INCREMENT_DE_COUNTER:
4305 case PACKET3_WAIT_ON_CE_COUNTER:
4306 case PACKET3_WAIT_ON_AVAIL_BUFFER:
4307 case PACKET3_ME_WRITE:
4308 break;
4309 case PACKET3_COPY_DATA:
4310 if ((idx_value & 0xf00) == 0) {
4311 reg = ib[idx + 3] * 4;
4312 if (!si_vm_reg_valid(reg))
4313 return -EINVAL;
4314 }
4315 break;
4316 case PACKET3_WRITE_DATA:
4317 if ((idx_value & 0xf00) == 0) {
4318 start_reg = ib[idx + 1] * 4;
4319 if (idx_value & 0x10000) {
4320 if (!si_vm_reg_valid(start_reg))
4321 return -EINVAL;
4322 } else {
4323 for (i = 0; i < (pkt->count - 2); i++) {
4324 reg = start_reg + (4 * i);
4325 if (!si_vm_reg_valid(reg))
4326 return -EINVAL;
4327 }
4328 }
4329 }
4330 break;
4331 case PACKET3_COND_WRITE:
4332 if (idx_value & 0x100) {
4333 reg = ib[idx + 5] * 4;
4334 if (!si_vm_reg_valid(reg))
4335 return -EINVAL;
4336 }
4337 break;
4338 case PACKET3_COPY_DW:
4339 if (idx_value & 0x2) {
4340 reg = ib[idx + 3] * 4;
4341 if (!si_vm_reg_valid(reg))
4342 return -EINVAL;
4343 }
4344 break;
4345 case PACKET3_SET_CONFIG_REG:
4346 start_reg = (idx_value << 2) + PACKET3_SET_CONFIG_REG_START;
4347 end_reg = 4 * pkt->count + start_reg - 4;
4348 if ((start_reg < PACKET3_SET_CONFIG_REG_START) ||
4349 (start_reg >= PACKET3_SET_CONFIG_REG_END) ||
4350 (end_reg >= PACKET3_SET_CONFIG_REG_END)) {
4351 DRM_ERROR("bad PACKET3_SET_CONFIG_REG\n");
4352 return -EINVAL;
4353 }
4354 for (i = 0; i < pkt->count; i++) {
4355 reg = start_reg + (4 * i);
4356 if (!si_vm_reg_valid(reg))
4357 return -EINVAL;
4358 }
4359 break;
5aa709be 4360 case PACKET3_CP_DMA:
e5b9e750
TS
4361 r = si_vm_packet3_cp_dma_check(ib, idx);
4362 if (r)
4363 return r;
5aa709be 4364 break;
498dd8b3
AD
4365 default:
4366 DRM_ERROR("Invalid GFX packet3: 0x%x\n", pkt->opcode);
4367 return -EINVAL;
4368 }
4369 return 0;
4370}
4371
4372static int si_vm_packet3_compute_check(struct radeon_device *rdev,
4373 u32 *ib, struct radeon_cs_packet *pkt)
4374{
e5b9e750 4375 int r;
498dd8b3
AD
4376 u32 idx = pkt->idx + 1;
4377 u32 idx_value = ib[idx];
4378 u32 start_reg, reg, i;
4379
4380 switch (pkt->opcode) {
4381 case PACKET3_NOP:
4382 case PACKET3_SET_BASE:
4383 case PACKET3_CLEAR_STATE:
4384 case PACKET3_DISPATCH_DIRECT:
4385 case PACKET3_DISPATCH_INDIRECT:
4386 case PACKET3_ALLOC_GDS:
4387 case PACKET3_WRITE_GDS_RAM:
4388 case PACKET3_ATOMIC_GDS:
4389 case PACKET3_ATOMIC:
4390 case PACKET3_OCCLUSION_QUERY:
4391 case PACKET3_SET_PREDICATION:
4392 case PACKET3_COND_EXEC:
4393 case PACKET3_PRED_EXEC:
4394 case PACKET3_CONTEXT_CONTROL:
4395 case PACKET3_STRMOUT_BUFFER_UPDATE:
4396 case PACKET3_WAIT_REG_MEM:
4397 case PACKET3_MEM_WRITE:
4398 case PACKET3_PFP_SYNC_ME:
4399 case PACKET3_SURFACE_SYNC:
4400 case PACKET3_EVENT_WRITE:
4401 case PACKET3_EVENT_WRITE_EOP:
4402 case PACKET3_EVENT_WRITE_EOS:
4403 case PACKET3_SET_CONTEXT_REG:
4404 case PACKET3_SET_CONTEXT_REG_INDIRECT:
4405 case PACKET3_SET_SH_REG:
4406 case PACKET3_SET_SH_REG_OFFSET:
4407 case PACKET3_INCREMENT_DE_COUNTER:
4408 case PACKET3_WAIT_ON_CE_COUNTER:
4409 case PACKET3_WAIT_ON_AVAIL_BUFFER:
4410 case PACKET3_ME_WRITE:
4411 break;
4412 case PACKET3_COPY_DATA:
4413 if ((idx_value & 0xf00) == 0) {
4414 reg = ib[idx + 3] * 4;
4415 if (!si_vm_reg_valid(reg))
4416 return -EINVAL;
4417 }
4418 break;
4419 case PACKET3_WRITE_DATA:
4420 if ((idx_value & 0xf00) == 0) {
4421 start_reg = ib[idx + 1] * 4;
4422 if (idx_value & 0x10000) {
4423 if (!si_vm_reg_valid(start_reg))
4424 return -EINVAL;
4425 } else {
4426 for (i = 0; i < (pkt->count - 2); i++) {
4427 reg = start_reg + (4 * i);
4428 if (!si_vm_reg_valid(reg))
4429 return -EINVAL;
4430 }
4431 }
4432 }
4433 break;
4434 case PACKET3_COND_WRITE:
4435 if (idx_value & 0x100) {
4436 reg = ib[idx + 5] * 4;
4437 if (!si_vm_reg_valid(reg))
4438 return -EINVAL;
4439 }
4440 break;
4441 case PACKET3_COPY_DW:
4442 if (idx_value & 0x2) {
4443 reg = ib[idx + 3] * 4;
4444 if (!si_vm_reg_valid(reg))
4445 return -EINVAL;
4446 }
4447 break;
e5b9e750
TS
4448 case PACKET3_CP_DMA:
4449 r = si_vm_packet3_cp_dma_check(ib, idx);
4450 if (r)
4451 return r;
4452 break;
498dd8b3
AD
4453 default:
4454 DRM_ERROR("Invalid Compute packet3: 0x%x\n", pkt->opcode);
4455 return -EINVAL;
4456 }
4457 return 0;
4458}
4459
4460int si_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib)
4461{
4462 int ret = 0;
4463 u32 idx = 0;
4464 struct radeon_cs_packet pkt;
4465
4466 do {
4467 pkt.idx = idx;
4e872ae2
IH
4468 pkt.type = RADEON_CP_PACKET_GET_TYPE(ib->ptr[idx]);
4469 pkt.count = RADEON_CP_PACKET_GET_COUNT(ib->ptr[idx]);
498dd8b3
AD
4470 pkt.one_reg_wr = 0;
4471 switch (pkt.type) {
4e872ae2 4472 case RADEON_PACKET_TYPE0:
498dd8b3
AD
4473 dev_err(rdev->dev, "Packet0 not allowed!\n");
4474 ret = -EINVAL;
4475 break;
4e872ae2 4476 case RADEON_PACKET_TYPE2:
498dd8b3
AD
4477 idx += 1;
4478 break;
4e872ae2
IH
4479 case RADEON_PACKET_TYPE3:
4480 pkt.opcode = RADEON_CP_PACKET3_GET_OPCODE(ib->ptr[idx]);
498dd8b3
AD
4481 if (ib->is_const_ib)
4482 ret = si_vm_packet3_ce_check(rdev, ib->ptr, &pkt);
4483 else {
876dc9f3 4484 switch (ib->ring) {
498dd8b3
AD
4485 case RADEON_RING_TYPE_GFX_INDEX:
4486 ret = si_vm_packet3_gfx_check(rdev, ib->ptr, &pkt);
4487 break;
4488 case CAYMAN_RING_TYPE_CP1_INDEX:
4489 case CAYMAN_RING_TYPE_CP2_INDEX:
4490 ret = si_vm_packet3_compute_check(rdev, ib->ptr, &pkt);
4491 break;
4492 default:
876dc9f3 4493 dev_err(rdev->dev, "Non-PM4 ring %d !\n", ib->ring);
498dd8b3
AD
4494 ret = -EINVAL;
4495 break;
4496 }
4497 }
4498 idx += pkt.count + 2;
4499 break;
4500 default:
4501 dev_err(rdev->dev, "Unknown packet type %d !\n", pkt.type);
4502 ret = -EINVAL;
4503 break;
4504 }
4505 if (ret)
4506 break;
4507 } while (idx < ib->length_dw);
4508
4509 return ret;
4510}
4511
d2800ee5
AD
4512/*
4513 * vm
4514 */
4515int si_vm_init(struct radeon_device *rdev)
4516{
4517 /* number of VMs */
4518 rdev->vm_manager.nvm = 16;
4519 /* base offset of vram pages */
4520 rdev->vm_manager.vram_base_offset = 0;
4521
4522 return 0;
4523}
4524
4525void si_vm_fini(struct radeon_device *rdev)
4526{
4527}
4528
fbf6dc7a
AD
4529/**
4530 * si_vm_decode_fault - print human readable fault info
4531 *
4532 * @rdev: radeon_device pointer
4533 * @status: VM_CONTEXT1_PROTECTION_FAULT_STATUS register value
4534 * @addr: VM_CONTEXT1_PROTECTION_FAULT_ADDR register value
4535 *
4536 * Print human readable fault information (SI).
4537 */
4538static void si_vm_decode_fault(struct radeon_device *rdev,
4539 u32 status, u32 addr)
4540{
4541 u32 mc_id = (status & MEMORY_CLIENT_ID_MASK) >> MEMORY_CLIENT_ID_SHIFT;
4542 u32 vmid = (status & FAULT_VMID_MASK) >> FAULT_VMID_SHIFT;
4543 u32 protections = (status & PROTECTIONS_MASK) >> PROTECTIONS_SHIFT;
4544 char *block;
4545
4546 if (rdev->family == CHIP_TAHITI) {
4547 switch (mc_id) {
4548 case 160:
4549 case 144:
4550 case 96:
4551 case 80:
4552 case 224:
4553 case 208:
4554 case 32:
4555 case 16:
4556 block = "CB";
4557 break;
4558 case 161:
4559 case 145:
4560 case 97:
4561 case 81:
4562 case 225:
4563 case 209:
4564 case 33:
4565 case 17:
4566 block = "CB_FMASK";
4567 break;
4568 case 162:
4569 case 146:
4570 case 98:
4571 case 82:
4572 case 226:
4573 case 210:
4574 case 34:
4575 case 18:
4576 block = "CB_CMASK";
4577 break;
4578 case 163:
4579 case 147:
4580 case 99:
4581 case 83:
4582 case 227:
4583 case 211:
4584 case 35:
4585 case 19:
4586 block = "CB_IMMED";
4587 break;
4588 case 164:
4589 case 148:
4590 case 100:
4591 case 84:
4592 case 228:
4593 case 212:
4594 case 36:
4595 case 20:
4596 block = "DB";
4597 break;
4598 case 165:
4599 case 149:
4600 case 101:
4601 case 85:
4602 case 229:
4603 case 213:
4604 case 37:
4605 case 21:
4606 block = "DB_HTILE";
4607 break;
4608 case 167:
4609 case 151:
4610 case 103:
4611 case 87:
4612 case 231:
4613 case 215:
4614 case 39:
4615 case 23:
4616 block = "DB_STEN";
4617 break;
4618 case 72:
4619 case 68:
4620 case 64:
4621 case 8:
4622 case 4:
4623 case 0:
4624 case 136:
4625 case 132:
4626 case 128:
4627 case 200:
4628 case 196:
4629 case 192:
4630 block = "TC";
4631 break;
4632 case 112:
4633 case 48:
4634 block = "CP";
4635 break;
4636 case 49:
4637 case 177:
4638 case 50:
4639 case 178:
4640 block = "SH";
4641 break;
4642 case 53:
4643 case 190:
4644 block = "VGT";
4645 break;
4646 case 117:
4647 block = "IH";
4648 break;
4649 case 51:
4650 case 115:
4651 block = "RLC";
4652 break;
4653 case 119:
4654 case 183:
4655 block = "DMA0";
4656 break;
4657 case 61:
4658 block = "DMA1";
4659 break;
4660 case 248:
4661 case 120:
4662 block = "HDP";
4663 break;
4664 default:
4665 block = "unknown";
4666 break;
4667 }
4668 } else {
4669 switch (mc_id) {
4670 case 32:
4671 case 16:
4672 case 96:
4673 case 80:
4674 case 160:
4675 case 144:
4676 case 224:
4677 case 208:
4678 block = "CB";
4679 break;
4680 case 33:
4681 case 17:
4682 case 97:
4683 case 81:
4684 case 161:
4685 case 145:
4686 case 225:
4687 case 209:
4688 block = "CB_FMASK";
4689 break;
4690 case 34:
4691 case 18:
4692 case 98:
4693 case 82:
4694 case 162:
4695 case 146:
4696 case 226:
4697 case 210:
4698 block = "CB_CMASK";
4699 break;
4700 case 35:
4701 case 19:
4702 case 99:
4703 case 83:
4704 case 163:
4705 case 147:
4706 case 227:
4707 case 211:
4708 block = "CB_IMMED";
4709 break;
4710 case 36:
4711 case 20:
4712 case 100:
4713 case 84:
4714 case 164:
4715 case 148:
4716 case 228:
4717 case 212:
4718 block = "DB";
4719 break;
4720 case 37:
4721 case 21:
4722 case 101:
4723 case 85:
4724 case 165:
4725 case 149:
4726 case 229:
4727 case 213:
4728 block = "DB_HTILE";
4729 break;
4730 case 39:
4731 case 23:
4732 case 103:
4733 case 87:
4734 case 167:
4735 case 151:
4736 case 231:
4737 case 215:
4738 block = "DB_STEN";
4739 break;
4740 case 72:
4741 case 68:
4742 case 8:
4743 case 4:
4744 case 136:
4745 case 132:
4746 case 200:
4747 case 196:
4748 block = "TC";
4749 break;
4750 case 112:
4751 case 48:
4752 block = "CP";
4753 break;
4754 case 49:
4755 case 177:
4756 case 50:
4757 case 178:
4758 block = "SH";
4759 break;
4760 case 53:
4761 block = "VGT";
4762 break;
4763 case 117:
4764 block = "IH";
4765 break;
4766 case 51:
4767 case 115:
4768 block = "RLC";
4769 break;
4770 case 119:
4771 case 183:
4772 block = "DMA0";
4773 break;
4774 case 61:
4775 block = "DMA1";
4776 break;
4777 case 248:
4778 case 120:
4779 block = "HDP";
4780 break;
4781 default:
4782 block = "unknown";
4783 break;
4784 }
4785 }
4786
4787 printk("VM fault (0x%02x, vmid %d) at page %u, %s from %s (%d)\n",
4788 protections, vmid, addr,
4789 (status & MEMORY_CLIENT_RW_MASK) ? "write" : "read",
4790 block, mc_id);
4791}
4792
498522b4 4793void si_vm_flush(struct radeon_device *rdev, int ridx, struct radeon_vm *vm)
d2800ee5 4794{
498522b4 4795 struct radeon_ring *ring = &rdev->ring[ridx];
d2800ee5 4796
ee60e29f 4797 if (vm == NULL)
d2800ee5
AD
4798 return;
4799
76c44f2c
AD
4800 /* write new base address */
4801 radeon_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
4802 radeon_ring_write(ring, (WRITE_DATA_ENGINE_SEL(0) |
4803 WRITE_DATA_DST_SEL(0)));
4804
ee60e29f 4805 if (vm->id < 8) {
76c44f2c
AD
4806 radeon_ring_write(ring,
4807 (VM_CONTEXT0_PAGE_TABLE_BASE_ADDR + (vm->id << 2)) >> 2);
ee60e29f 4808 } else {
76c44f2c
AD
4809 radeon_ring_write(ring,
4810 (VM_CONTEXT8_PAGE_TABLE_BASE_ADDR + ((vm->id - 8) << 2)) >> 2);
ee60e29f 4811 }
76c44f2c 4812 radeon_ring_write(ring, 0);
fa87e62d 4813 radeon_ring_write(ring, vm->pd_gpu_addr >> 12);
ee60e29f 4814
d2800ee5 4815 /* flush hdp cache */
76c44f2c
AD
4816 radeon_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
4817 radeon_ring_write(ring, (WRITE_DATA_ENGINE_SEL(0) |
4818 WRITE_DATA_DST_SEL(0)));
4819 radeon_ring_write(ring, HDP_MEM_COHERENCY_FLUSH_CNTL >> 2);
4820 radeon_ring_write(ring, 0);
ee60e29f
CK
4821 radeon_ring_write(ring, 0x1);
4822
d2800ee5 4823 /* bits 0-15 are the VM contexts0-15 */
76c44f2c
AD
4824 radeon_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
4825 radeon_ring_write(ring, (WRITE_DATA_ENGINE_SEL(0) |
4826 WRITE_DATA_DST_SEL(0)));
4827 radeon_ring_write(ring, VM_INVALIDATE_REQUEST >> 2);
4828 radeon_ring_write(ring, 0);
498522b4 4829 radeon_ring_write(ring, 1 << vm->id);
58f8cf56
CK
4830
4831 /* sync PFP to ME, otherwise we might get invalid PFP reads */
4832 radeon_ring_write(ring, PACKET3(PACKET3_PFP_SYNC_ME, 0));
4833 radeon_ring_write(ring, 0x0);
d2800ee5
AD
4834}
4835
f8f84ac5
AD
4836/*
4837 * Power and clock gating
4838 */
4839static void si_wait_for_rlc_serdes(struct radeon_device *rdev)
4840{
4841 int i;
4842
4843 for (i = 0; i < rdev->usec_timeout; i++) {
4844 if (RREG32(RLC_SERDES_MASTER_BUSY_0) == 0)
4845 break;
4846 udelay(1);
4847 }
4848
4849 for (i = 0; i < rdev->usec_timeout; i++) {
4850 if (RREG32(RLC_SERDES_MASTER_BUSY_1) == 0)
4851 break;
4852 udelay(1);
4853 }
4854}
4855
4856static void si_enable_gui_idle_interrupt(struct radeon_device *rdev,
4857 bool enable)
4858{
4859 u32 tmp = RREG32(CP_INT_CNTL_RING0);
4860 u32 mask;
4861 int i;
4862
4863 if (enable)
4864 tmp |= (CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4865 else
4866 tmp &= ~(CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4867 WREG32(CP_INT_CNTL_RING0, tmp);
4868
4869 if (!enable) {
4870 /* read a gfx register */
4871 tmp = RREG32(DB_DEPTH_INFO);
4872
4873 mask = RLC_BUSY_STATUS | GFX_POWER_STATUS | GFX_CLOCK_STATUS | GFX_LS_STATUS;
4874 for (i = 0; i < rdev->usec_timeout; i++) {
4875 if ((RREG32(RLC_STAT) & mask) == (GFX_CLOCK_STATUS | GFX_POWER_STATUS))
4876 break;
4877 udelay(1);
4878 }
4879 }
4880}
4881
4882static void si_set_uvd_dcm(struct radeon_device *rdev,
4883 bool sw_mode)
4884{
4885 u32 tmp, tmp2;
4886
4887 tmp = RREG32(UVD_CGC_CTRL);
4888 tmp &= ~(CLK_OD_MASK | CG_DT_MASK);
4889 tmp |= DCM | CG_DT(1) | CLK_OD(4);
4890
4891 if (sw_mode) {
4892 tmp &= ~0x7ffff800;
4893 tmp2 = DYN_OR_EN | DYN_RR_EN | G_DIV_ID(7);
4894 } else {
4895 tmp |= 0x7ffff800;
4896 tmp2 = 0;
4897 }
4898
4899 WREG32(UVD_CGC_CTRL, tmp);
4900 WREG32_UVD_CTX(UVD_CGC_CTRL2, tmp2);
4901}
4902
22c775ce 4903void si_init_uvd_internal_cg(struct radeon_device *rdev)
f8f84ac5
AD
4904{
4905 bool hw_mode = true;
4906
4907 if (hw_mode) {
4908 si_set_uvd_dcm(rdev, false);
4909 } else {
4910 u32 tmp = RREG32(UVD_CGC_CTRL);
4911 tmp &= ~DCM;
4912 WREG32(UVD_CGC_CTRL, tmp);
4913 }
4914}
4915
4916static u32 si_halt_rlc(struct radeon_device *rdev)
4917{
4918 u32 data, orig;
4919
4920 orig = data = RREG32(RLC_CNTL);
4921
4922 if (data & RLC_ENABLE) {
4923 data &= ~RLC_ENABLE;
4924 WREG32(RLC_CNTL, data);
4925
4926 si_wait_for_rlc_serdes(rdev);
4927 }
4928
4929 return orig;
4930}
4931
4932static void si_update_rlc(struct radeon_device *rdev, u32 rlc)
4933{
4934 u32 tmp;
4935
4936 tmp = RREG32(RLC_CNTL);
4937 if (tmp != rlc)
4938 WREG32(RLC_CNTL, rlc);
4939}
4940
4941static void si_enable_dma_pg(struct radeon_device *rdev, bool enable)
4942{
4943 u32 data, orig;
4944
4945 orig = data = RREG32(DMA_PG);
e16866ec 4946 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_SDMA))
f8f84ac5
AD
4947 data |= PG_CNTL_ENABLE;
4948 else
4949 data &= ~PG_CNTL_ENABLE;
4950 if (orig != data)
4951 WREG32(DMA_PG, data);
4952}
4953
4954static void si_init_dma_pg(struct radeon_device *rdev)
4955{
4956 u32 tmp;
4957
4958 WREG32(DMA_PGFSM_WRITE, 0x00002000);
4959 WREG32(DMA_PGFSM_CONFIG, 0x100010ff);
4960
4961 for (tmp = 0; tmp < 5; tmp++)
4962 WREG32(DMA_PGFSM_WRITE, 0);
4963}
4964
4965static void si_enable_gfx_cgpg(struct radeon_device *rdev,
4966 bool enable)
4967{
4968 u32 tmp;
4969
2b19d17f 4970 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_GFX_PG)) {
f8f84ac5
AD
4971 tmp = RLC_PUD(0x10) | RLC_PDD(0x10) | RLC_TTPD(0x10) | RLC_MSD(0x10);
4972 WREG32(RLC_TTOP_D, tmp);
4973
4974 tmp = RREG32(RLC_PG_CNTL);
4975 tmp |= GFX_PG_ENABLE;
4976 WREG32(RLC_PG_CNTL, tmp);
4977
4978 tmp = RREG32(RLC_AUTO_PG_CTRL);
4979 tmp |= AUTO_PG_EN;
4980 WREG32(RLC_AUTO_PG_CTRL, tmp);
4981 } else {
4982 tmp = RREG32(RLC_AUTO_PG_CTRL);
4983 tmp &= ~AUTO_PG_EN;
4984 WREG32(RLC_AUTO_PG_CTRL, tmp);
4985
4986 tmp = RREG32(DB_RENDER_CONTROL);
4987 }
4988}
4989
4990static void si_init_gfx_cgpg(struct radeon_device *rdev)
4991{
4992 u32 tmp;
4993
4994 WREG32(RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
4995
4996 tmp = RREG32(RLC_PG_CNTL);
4997 tmp |= GFX_PG_SRC;
4998 WREG32(RLC_PG_CNTL, tmp);
4999
5000 WREG32(RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
5001
5002 tmp = RREG32(RLC_AUTO_PG_CTRL);
5003
5004 tmp &= ~GRBM_REG_SGIT_MASK;
5005 tmp |= GRBM_REG_SGIT(0x700);
5006 tmp &= ~PG_AFTER_GRBM_REG_ST_MASK;
5007 WREG32(RLC_AUTO_PG_CTRL, tmp);
5008}
5009
ba19031a 5010static u32 si_get_cu_active_bitmap(struct radeon_device *rdev, u32 se, u32 sh)
f8f84ac5
AD
5011{
5012 u32 mask = 0, tmp, tmp1;
5013 int i;
5014
5015 si_select_se_sh(rdev, se, sh);
5016 tmp = RREG32(CC_GC_SHADER_ARRAY_CONFIG);
5017 tmp1 = RREG32(GC_USER_SHADER_ARRAY_CONFIG);
5018 si_select_se_sh(rdev, 0xffffffff, 0xffffffff);
5019
5020 tmp &= 0xffff0000;
5021
5022 tmp |= tmp1;
5023 tmp >>= 16;
5024
5025 for (i = 0; i < rdev->config.si.max_cu_per_sh; i ++) {
5026 mask <<= 1;
5027 mask |= 1;
5028 }
5029
5030 return (~tmp) & mask;
5031}
5032
5033static void si_init_ao_cu_mask(struct radeon_device *rdev)
5034{
5035 u32 i, j, k, active_cu_number = 0;
5036 u32 mask, counter, cu_bitmap;
5037 u32 tmp = 0;
5038
5039 for (i = 0; i < rdev->config.si.max_shader_engines; i++) {
5040 for (j = 0; j < rdev->config.si.max_sh_per_se; j++) {
5041 mask = 1;
5042 cu_bitmap = 0;
5043 counter = 0;
5044 for (k = 0; k < rdev->config.si.max_cu_per_sh; k++) {
ba19031a 5045 if (si_get_cu_active_bitmap(rdev, i, j) & mask) {
f8f84ac5
AD
5046 if (counter < 2)
5047 cu_bitmap |= mask;
5048 counter++;
5049 }
5050 mask <<= 1;
5051 }
5052
5053 active_cu_number += counter;
5054 tmp |= (cu_bitmap << (i * 16 + j * 8));
5055 }
5056 }
5057
5058 WREG32(RLC_PG_AO_CU_MASK, tmp);
5059
5060 tmp = RREG32(RLC_MAX_PG_CU);
5061 tmp &= ~MAX_PU_CU_MASK;
5062 tmp |= MAX_PU_CU(active_cu_number);
5063 WREG32(RLC_MAX_PG_CU, tmp);
5064}
5065
5066static void si_enable_cgcg(struct radeon_device *rdev,
5067 bool enable)
5068{
5069 u32 data, orig, tmp;
5070
5071 orig = data = RREG32(RLC_CGCG_CGLS_CTRL);
5072
e16866ec 5073 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_CGCG)) {
5594a558 5074 si_enable_gui_idle_interrupt(rdev, true);
f8f84ac5 5075
f8f84ac5
AD
5076 WREG32(RLC_GCPM_GENERAL_3, 0x00000080);
5077
5078 tmp = si_halt_rlc(rdev);
5079
5080 WREG32(RLC_SERDES_WR_MASTER_MASK_0, 0xffffffff);
5081 WREG32(RLC_SERDES_WR_MASTER_MASK_1, 0xffffffff);
5082 WREG32(RLC_SERDES_WR_CTRL, 0x00b000ff);
5083
5084 si_wait_for_rlc_serdes(rdev);
5085
5086 si_update_rlc(rdev, tmp);
5087
5088 WREG32(RLC_SERDES_WR_CTRL, 0x007000ff);
5089
5090 data |= CGCG_EN | CGLS_EN;
5091 } else {
5594a558
AD
5092 si_enable_gui_idle_interrupt(rdev, false);
5093
f8f84ac5
AD
5094 RREG32(CB_CGTT_SCLK_CTRL);
5095 RREG32(CB_CGTT_SCLK_CTRL);
5096 RREG32(CB_CGTT_SCLK_CTRL);
5097 RREG32(CB_CGTT_SCLK_CTRL);
5098
5099 data &= ~(CGCG_EN | CGLS_EN);
5100 }
5101
5102 if (orig != data)
5103 WREG32(RLC_CGCG_CGLS_CTRL, data);
5104}
5105
5106static void si_enable_mgcg(struct radeon_device *rdev,
5107 bool enable)
5108{
5109 u32 data, orig, tmp = 0;
5110
e16866ec 5111 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_MGCG)) {
f8f84ac5
AD
5112 orig = data = RREG32(CGTS_SM_CTRL_REG);
5113 data = 0x96940200;
5114 if (orig != data)
5115 WREG32(CGTS_SM_CTRL_REG, data);
5116
e16866ec
AD
5117 if (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_CP_LS) {
5118 orig = data = RREG32(CP_MEM_SLP_CNTL);
5119 data |= CP_MEM_LS_EN;
5120 if (orig != data)
5121 WREG32(CP_MEM_SLP_CNTL, data);
5122 }
f8f84ac5
AD
5123
5124 orig = data = RREG32(RLC_CGTT_MGCG_OVERRIDE);
5125 data &= 0xffffffc0;
5126 if (orig != data)
5127 WREG32(RLC_CGTT_MGCG_OVERRIDE, data);
5128
5129 tmp = si_halt_rlc(rdev);
5130
5131 WREG32(RLC_SERDES_WR_MASTER_MASK_0, 0xffffffff);
5132 WREG32(RLC_SERDES_WR_MASTER_MASK_1, 0xffffffff);
5133 WREG32(RLC_SERDES_WR_CTRL, 0x00d000ff);
5134
5135 si_update_rlc(rdev, tmp);
5136 } else {
5137 orig = data = RREG32(RLC_CGTT_MGCG_OVERRIDE);
5138 data |= 0x00000003;
5139 if (orig != data)
5140 WREG32(RLC_CGTT_MGCG_OVERRIDE, data);
5141
5142 data = RREG32(CP_MEM_SLP_CNTL);
5143 if (data & CP_MEM_LS_EN) {
5144 data &= ~CP_MEM_LS_EN;
5145 WREG32(CP_MEM_SLP_CNTL, data);
5146 }
5147 orig = data = RREG32(CGTS_SM_CTRL_REG);
5148 data |= LS_OVERRIDE | OVERRIDE;
5149 if (orig != data)
5150 WREG32(CGTS_SM_CTRL_REG, data);
5151
5152 tmp = si_halt_rlc(rdev);
5153
5154 WREG32(RLC_SERDES_WR_MASTER_MASK_0, 0xffffffff);
5155 WREG32(RLC_SERDES_WR_MASTER_MASK_1, 0xffffffff);
5156 WREG32(RLC_SERDES_WR_CTRL, 0x00e000ff);
5157
5158 si_update_rlc(rdev, tmp);
5159 }
5160}
5161
5162static void si_enable_uvd_mgcg(struct radeon_device *rdev,
5163 bool enable)
5164{
5165 u32 orig, data, tmp;
5166
e16866ec 5167 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_UVD_MGCG)) {
f8f84ac5
AD
5168 tmp = RREG32_UVD_CTX(UVD_CGC_MEM_CTRL);
5169 tmp |= 0x3fff;
5170 WREG32_UVD_CTX(UVD_CGC_MEM_CTRL, tmp);
5171
5172 orig = data = RREG32(UVD_CGC_CTRL);
5173 data |= DCM;
5174 if (orig != data)
5175 WREG32(UVD_CGC_CTRL, data);
5176
5177 WREG32_SMC(SMC_CG_IND_START + CG_CGTT_LOCAL_0, 0);
5178 WREG32_SMC(SMC_CG_IND_START + CG_CGTT_LOCAL_1, 0);
5179 } else {
5180 tmp = RREG32_UVD_CTX(UVD_CGC_MEM_CTRL);
5181 tmp &= ~0x3fff;
5182 WREG32_UVD_CTX(UVD_CGC_MEM_CTRL, tmp);
5183
5184 orig = data = RREG32(UVD_CGC_CTRL);
5185 data &= ~DCM;
5186 if (orig != data)
5187 WREG32(UVD_CGC_CTRL, data);
5188
5189 WREG32_SMC(SMC_CG_IND_START + CG_CGTT_LOCAL_0, 0xffffffff);
5190 WREG32_SMC(SMC_CG_IND_START + CG_CGTT_LOCAL_1, 0xffffffff);
5191 }
5192}
5193
5194static const u32 mc_cg_registers[] =
5195{
5196 MC_HUB_MISC_HUB_CG,
5197 MC_HUB_MISC_SIP_CG,
5198 MC_HUB_MISC_VM_CG,
5199 MC_XPB_CLK_GAT,
5200 ATC_MISC_CG,
5201 MC_CITF_MISC_WR_CG,
5202 MC_CITF_MISC_RD_CG,
5203 MC_CITF_MISC_VM_CG,
5204 VM_L2_CG,
5205};
5206
5207static void si_enable_mc_ls(struct radeon_device *rdev,
5208 bool enable)
5209{
5210 int i;
5211 u32 orig, data;
5212
5213 for (i = 0; i < ARRAY_SIZE(mc_cg_registers); i++) {
5214 orig = data = RREG32(mc_cg_registers[i]);
e16866ec 5215 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_MC_LS))
f8f84ac5
AD
5216 data |= MC_LS_ENABLE;
5217 else
5218 data &= ~MC_LS_ENABLE;
5219 if (data != orig)
5220 WREG32(mc_cg_registers[i], data);
5221 }
5222}
5223
e16866ec
AD
5224static void si_enable_mc_mgcg(struct radeon_device *rdev,
5225 bool enable)
f8f84ac5 5226{
e16866ec
AD
5227 int i;
5228 u32 orig, data;
5229
5230 for (i = 0; i < ARRAY_SIZE(mc_cg_registers); i++) {
5231 orig = data = RREG32(mc_cg_registers[i]);
5232 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_MC_MGCG))
5233 data |= MC_CG_ENABLE;
5234 else
5235 data &= ~MC_CG_ENABLE;
5236 if (data != orig)
5237 WREG32(mc_cg_registers[i], data);
f8f84ac5
AD
5238 }
5239}
5240
e16866ec
AD
5241static void si_enable_dma_mgcg(struct radeon_device *rdev,
5242 bool enable)
f8f84ac5 5243{
e16866ec
AD
5244 u32 orig, data, offset;
5245 int i;
f8f84ac5 5246
e16866ec
AD
5247 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_SDMA_MGCG)) {
5248 for (i = 0; i < 2; i++) {
5249 if (i == 0)
5250 offset = DMA0_REGISTER_OFFSET;
5251 else
5252 offset = DMA1_REGISTER_OFFSET;
5253 orig = data = RREG32(DMA_POWER_CNTL + offset);
5254 data &= ~MEM_POWER_OVERRIDE;
5255 if (data != orig)
5256 WREG32(DMA_POWER_CNTL + offset, data);
5257 WREG32(DMA_CLK_CTRL + offset, 0x00000100);
5258 }
5259 } else {
5260 for (i = 0; i < 2; i++) {
5261 if (i == 0)
5262 offset = DMA0_REGISTER_OFFSET;
5263 else
5264 offset = DMA1_REGISTER_OFFSET;
5265 orig = data = RREG32(DMA_POWER_CNTL + offset);
5266 data |= MEM_POWER_OVERRIDE;
5267 if (data != orig)
5268 WREG32(DMA_POWER_CNTL + offset, data);
5269
5270 orig = data = RREG32(DMA_CLK_CTRL + offset);
5271 data = 0xff000000;
5272 if (data != orig)
5273 WREG32(DMA_CLK_CTRL + offset, data);
5274 }
5275 }
f8f84ac5
AD
5276}
5277
e16866ec
AD
5278static void si_enable_bif_mgls(struct radeon_device *rdev,
5279 bool enable)
f8f84ac5 5280{
e16866ec 5281 u32 orig, data;
f8f84ac5 5282
e16866ec 5283 orig = data = RREG32_PCIE(PCIE_CNTL2);
f8f84ac5 5284
e16866ec
AD
5285 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_BIF_LS))
5286 data |= SLV_MEM_LS_EN | MST_MEM_LS_EN |
5287 REPLAY_MEM_LS_EN | SLV_MEM_AGGRESSIVE_LS_EN;
5288 else
5289 data &= ~(SLV_MEM_LS_EN | MST_MEM_LS_EN |
5290 REPLAY_MEM_LS_EN | SLV_MEM_AGGRESSIVE_LS_EN);
5291
5292 if (orig != data)
5293 WREG32_PCIE(PCIE_CNTL2, data);
f8f84ac5
AD
5294}
5295
e16866ec
AD
5296static void si_enable_hdp_mgcg(struct radeon_device *rdev,
5297 bool enable)
f8f84ac5 5298{
e16866ec 5299 u32 orig, data;
f8f84ac5 5300
e16866ec 5301 orig = data = RREG32(HDP_HOST_PATH_CNTL);
f8f84ac5 5302
e16866ec
AD
5303 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_HDP_MGCG))
5304 data &= ~CLOCK_GATING_DIS;
5305 else
5306 data |= CLOCK_GATING_DIS;
5307
5308 if (orig != data)
5309 WREG32(HDP_HOST_PATH_CNTL, data);
f8f84ac5
AD
5310}
5311
e16866ec
AD
5312static void si_enable_hdp_ls(struct radeon_device *rdev,
5313 bool enable)
347e7592 5314{
e16866ec 5315 u32 orig, data;
347e7592 5316
e16866ec 5317 orig = data = RREG32(HDP_MEM_POWER_LS);
347e7592 5318
e16866ec
AD
5319 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_HDP_LS))
5320 data |= HDP_LS_ENABLE;
5321 else
5322 data &= ~HDP_LS_ENABLE;
347e7592 5323
e16866ec
AD
5324 if (orig != data)
5325 WREG32(HDP_MEM_POWER_LS, data);
347e7592
AD
5326}
5327
68e3a092
AD
5328static void si_update_cg(struct radeon_device *rdev,
5329 u32 block, bool enable)
347e7592 5330{
e16866ec 5331 if (block & RADEON_CG_BLOCK_GFX) {
811e4d58 5332 si_enable_gui_idle_interrupt(rdev, false);
e16866ec
AD
5333 /* order matters! */
5334 if (enable) {
5335 si_enable_mgcg(rdev, true);
5336 si_enable_cgcg(rdev, true);
5337 } else {
5338 si_enable_cgcg(rdev, false);
5339 si_enable_mgcg(rdev, false);
347e7592 5340 }
811e4d58 5341 si_enable_gui_idle_interrupt(rdev, true);
347e7592
AD
5342 }
5343
e16866ec
AD
5344 if (block & RADEON_CG_BLOCK_MC) {
5345 si_enable_mc_mgcg(rdev, enable);
5346 si_enable_mc_ls(rdev, enable);
347e7592 5347 }
e16866ec
AD
5348
5349 if (block & RADEON_CG_BLOCK_SDMA) {
5350 si_enable_dma_mgcg(rdev, enable);
347e7592
AD
5351 }
5352
e16866ec
AD
5353 if (block & RADEON_CG_BLOCK_BIF) {
5354 si_enable_bif_mgls(rdev, enable);
6d8cf000 5355 }
6d8cf000 5356
e16866ec
AD
5357 if (block & RADEON_CG_BLOCK_UVD) {
5358 if (rdev->has_uvd) {
5359 si_enable_uvd_mgcg(rdev, enable);
bd8cd539
AD
5360 }
5361 }
bd8cd539 5362
e16866ec
AD
5363 if (block & RADEON_CG_BLOCK_HDP) {
5364 si_enable_hdp_mgcg(rdev, enable);
5365 si_enable_hdp_ls(rdev, enable);
347e7592 5366 }
e16866ec 5367}
f8f84ac5
AD
5368
5369static void si_init_cg(struct radeon_device *rdev)
5370{
e16866ec
AD
5371 si_update_cg(rdev, (RADEON_CG_BLOCK_GFX |
5372 RADEON_CG_BLOCK_MC |
5373 RADEON_CG_BLOCK_SDMA |
5374 RADEON_CG_BLOCK_BIF |
5375 RADEON_CG_BLOCK_HDP), true);
b2d70917 5376 if (rdev->has_uvd) {
e16866ec 5377 si_update_cg(rdev, RADEON_CG_BLOCK_UVD, true);
f8f84ac5 5378 si_init_uvd_internal_cg(rdev);
347e7592 5379 }
f8f84ac5 5380}
bd8cd539 5381
f8f84ac5
AD
5382static void si_fini_cg(struct radeon_device *rdev)
5383{
0116e1ef 5384 if (rdev->has_uvd) {
e16866ec 5385 si_update_cg(rdev, RADEON_CG_BLOCK_UVD, false);
347e7592 5386 }
e16866ec
AD
5387 si_update_cg(rdev, (RADEON_CG_BLOCK_GFX |
5388 RADEON_CG_BLOCK_MC |
5389 RADEON_CG_BLOCK_SDMA |
5390 RADEON_CG_BLOCK_BIF |
5391 RADEON_CG_BLOCK_HDP), false);
5392}
5393
59a82d0e
AD
5394u32 si_get_csb_size(struct radeon_device *rdev)
5395{
5396 u32 count = 0;
5397 const struct cs_section_def *sect = NULL;
5398 const struct cs_extent_def *ext = NULL;
5399
5400 if (rdev->rlc.cs_data == NULL)
5401 return 0;
5402
5403 /* begin clear state */
5404 count += 2;
5405 /* context control state */
5406 count += 3;
5407
5408 for (sect = rdev->rlc.cs_data; sect->section != NULL; ++sect) {
5409 for (ext = sect->section; ext->extent != NULL; ++ext) {
5410 if (sect->id == SECT_CONTEXT)
5411 count += 2 + ext->reg_count;
5412 else
5413 return 0;
5414 }
bd8cd539 5415 }
59a82d0e
AD
5416 /* pa_sc_raster_config */
5417 count += 3;
5418 /* end clear state */
5419 count += 2;
5420 /* clear state */
5421 count += 2;
5422
5423 return count;
5424}
5425
5426void si_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer)
5427{
5428 u32 count = 0, i;
5429 const struct cs_section_def *sect = NULL;
5430 const struct cs_extent_def *ext = NULL;
5431
5432 if (rdev->rlc.cs_data == NULL)
5433 return;
5434 if (buffer == NULL)
5435 return;
5436
6ba81e53
AD
5437 buffer[count++] = cpu_to_le32(PACKET3(PACKET3_PREAMBLE_CNTL, 0));
5438 buffer[count++] = cpu_to_le32(PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
59a82d0e 5439
6ba81e53
AD
5440 buffer[count++] = cpu_to_le32(PACKET3(PACKET3_CONTEXT_CONTROL, 1));
5441 buffer[count++] = cpu_to_le32(0x80000000);
5442 buffer[count++] = cpu_to_le32(0x80000000);
59a82d0e
AD
5443
5444 for (sect = rdev->rlc.cs_data; sect->section != NULL; ++sect) {
5445 for (ext = sect->section; ext->extent != NULL; ++ext) {
5446 if (sect->id == SECT_CONTEXT) {
6ba81e53
AD
5447 buffer[count++] =
5448 cpu_to_le32(PACKET3(PACKET3_SET_CONTEXT_REG, ext->reg_count));
5449 buffer[count++] = cpu_to_le32(ext->reg_index - 0xa000);
59a82d0e 5450 for (i = 0; i < ext->reg_count; i++)
6ba81e53 5451 buffer[count++] = cpu_to_le32(ext->extent[i]);
59a82d0e
AD
5452 } else {
5453 return;
bd8cd539 5454 }
bd8cd539
AD
5455 }
5456 }
bd8cd539 5457
6ba81e53
AD
5458 buffer[count++] = cpu_to_le32(PACKET3(PACKET3_SET_CONTEXT_REG, 1));
5459 buffer[count++] = cpu_to_le32(PA_SC_RASTER_CONFIG - PACKET3_SET_CONTEXT_REG_START);
59a82d0e
AD
5460 switch (rdev->family) {
5461 case CHIP_TAHITI:
5462 case CHIP_PITCAIRN:
6ba81e53 5463 buffer[count++] = cpu_to_le32(0x2a00126a);
59a82d0e
AD
5464 break;
5465 case CHIP_VERDE:
6ba81e53 5466 buffer[count++] = cpu_to_le32(0x0000124a);
59a82d0e
AD
5467 break;
5468 case CHIP_OLAND:
6ba81e53 5469 buffer[count++] = cpu_to_le32(0x00000082);
59a82d0e
AD
5470 break;
5471 case CHIP_HAINAN:
6ba81e53 5472 buffer[count++] = cpu_to_le32(0x00000000);
59a82d0e
AD
5473 break;
5474 default:
6ba81e53 5475 buffer[count++] = cpu_to_le32(0x00000000);
59a82d0e
AD
5476 break;
5477 }
5478
6ba81e53
AD
5479 buffer[count++] = cpu_to_le32(PACKET3(PACKET3_PREAMBLE_CNTL, 0));
5480 buffer[count++] = cpu_to_le32(PACKET3_PREAMBLE_END_CLEAR_STATE);
347e7592 5481
6ba81e53
AD
5482 buffer[count++] = cpu_to_le32(PACKET3(PACKET3_CLEAR_STATE, 0));
5483 buffer[count++] = cpu_to_le32(0);
59a82d0e
AD
5484}
5485
f8f84ac5
AD
5486static void si_init_pg(struct radeon_device *rdev)
5487{
0116e1ef
AD
5488 if (rdev->pg_flags) {
5489 if (rdev->pg_flags & RADEON_PG_SUPPORT_SDMA) {
5490 si_init_dma_pg(rdev);
0116e1ef 5491 }
f8f84ac5 5492 si_init_ao_cu_mask(rdev);
2b19d17f 5493 if (rdev->pg_flags & RADEON_PG_SUPPORT_GFX_PG) {
0116e1ef 5494 si_init_gfx_cgpg(rdev);
aa34dba8
AD
5495 } else {
5496 WREG32(RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
5497 WREG32(RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
0116e1ef 5498 }
ca6ebb39
AD
5499 si_enable_dma_pg(rdev, true);
5500 si_enable_gfx_cgpg(rdev, true);
f8f84ac5
AD
5501 } else {
5502 WREG32(RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
5503 WREG32(RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
5504 }
5505}
5506
5507static void si_fini_pg(struct radeon_device *rdev)
5508{
0116e1ef 5509 if (rdev->pg_flags) {
ca6ebb39
AD
5510 si_enable_dma_pg(rdev, false);
5511 si_enable_gfx_cgpg(rdev, false);
f8f84ac5 5512 }
347e7592
AD
5513}
5514
347e7592
AD
5515/*
5516 * RLC
5517 */
866d83de 5518void si_rlc_reset(struct radeon_device *rdev)
d719cef3 5519{
f8f84ac5 5520 u32 tmp = RREG32(GRBM_SOFT_RESET);
d719cef3 5521
f8f84ac5
AD
5522 tmp |= SOFT_RESET_RLC;
5523 WREG32(GRBM_SOFT_RESET, tmp);
5524 udelay(50);
5525 tmp &= ~SOFT_RESET_RLC;
5526 WREG32(GRBM_SOFT_RESET, tmp);
5527 udelay(50);
d719cef3
AD
5528}
5529
347e7592
AD
5530static void si_rlc_stop(struct radeon_device *rdev)
5531{
5532 WREG32(RLC_CNTL, 0);
d719cef3
AD
5533
5534 si_enable_gui_idle_interrupt(rdev, false);
5535
5536 si_wait_for_rlc_serdes(rdev);
347e7592
AD
5537}
5538
5539static void si_rlc_start(struct radeon_device *rdev)
5540{
5541 WREG32(RLC_CNTL, RLC_ENABLE);
d719cef3
AD
5542
5543 si_enable_gui_idle_interrupt(rdev, true);
5544
5545 udelay(50);
5546}
5547
5548static bool si_lbpw_supported(struct radeon_device *rdev)
5549{
5550 u32 tmp;
5551
5552 /* Enable LBPW only for DDR3 */
5553 tmp = RREG32(MC_SEQ_MISC0);
5554 if ((tmp & 0xF0000000) == 0xB0000000)
5555 return true;
5556 return false;
5557}
5558
5559static void si_enable_lbpw(struct radeon_device *rdev, bool enable)
5560{
5561 u32 tmp;
5562
5563 tmp = RREG32(RLC_LB_CNTL);
5564 if (enable)
5565 tmp |= LOAD_BALANCE_ENABLE;
5566 else
5567 tmp &= ~LOAD_BALANCE_ENABLE;
5568 WREG32(RLC_LB_CNTL, tmp);
5569
5570 if (!enable) {
5571 si_select_se_sh(rdev, 0xffffffff, 0xffffffff);
5572 WREG32(SPI_LB_CU_MASK, 0x00ff);
5573 }
347e7592
AD
5574}
5575
5576static int si_rlc_resume(struct radeon_device *rdev)
5577{
5578 u32 i;
5579 const __be32 *fw_data;
5580
5581 if (!rdev->rlc_fw)
5582 return -EINVAL;
5583
5584 si_rlc_stop(rdev);
5585
f8f84ac5
AD
5586 si_rlc_reset(rdev);
5587
5588 si_init_pg(rdev);
5589
5590 si_init_cg(rdev);
5591
347e7592
AD
5592 WREG32(RLC_RL_BASE, 0);
5593 WREG32(RLC_RL_SIZE, 0);
5594 WREG32(RLC_LB_CNTL, 0);
5595 WREG32(RLC_LB_CNTR_MAX, 0xffffffff);
5596 WREG32(RLC_LB_CNTR_INIT, 0);
d719cef3 5597 WREG32(RLC_LB_INIT_CU_MASK, 0xffffffff);
347e7592 5598
347e7592
AD
5599 WREG32(RLC_MC_CNTL, 0);
5600 WREG32(RLC_UCODE_CNTL, 0);
5601
5602 fw_data = (const __be32 *)rdev->rlc_fw->data;
5603 for (i = 0; i < SI_RLC_UCODE_SIZE; i++) {
5604 WREG32(RLC_UCODE_ADDR, i);
5605 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
5606 }
5607 WREG32(RLC_UCODE_ADDR, 0);
5608
d719cef3
AD
5609 si_enable_lbpw(rdev, si_lbpw_supported(rdev));
5610
347e7592
AD
5611 si_rlc_start(rdev);
5612
5613 return 0;
5614}
5615
25a857fb
AD
5616static void si_enable_interrupts(struct radeon_device *rdev)
5617{
5618 u32 ih_cntl = RREG32(IH_CNTL);
5619 u32 ih_rb_cntl = RREG32(IH_RB_CNTL);
5620
5621 ih_cntl |= ENABLE_INTR;
5622 ih_rb_cntl |= IH_RB_ENABLE;
5623 WREG32(IH_CNTL, ih_cntl);
5624 WREG32(IH_RB_CNTL, ih_rb_cntl);
5625 rdev->ih.enabled = true;
5626}
5627
5628static void si_disable_interrupts(struct radeon_device *rdev)
5629{
5630 u32 ih_rb_cntl = RREG32(IH_RB_CNTL);
5631 u32 ih_cntl = RREG32(IH_CNTL);
5632
5633 ih_rb_cntl &= ~IH_RB_ENABLE;
5634 ih_cntl &= ~ENABLE_INTR;
5635 WREG32(IH_RB_CNTL, ih_rb_cntl);
5636 WREG32(IH_CNTL, ih_cntl);
5637 /* set rptr, wptr to 0 */
5638 WREG32(IH_RB_RPTR, 0);
5639 WREG32(IH_RB_WPTR, 0);
5640 rdev->ih.enabled = false;
25a857fb
AD
5641 rdev->ih.rptr = 0;
5642}
5643
5644static void si_disable_interrupt_state(struct radeon_device *rdev)
5645{
5646 u32 tmp;
5647
811e4d58
AD
5648 tmp = RREG32(CP_INT_CNTL_RING0) &
5649 (CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
5650 WREG32(CP_INT_CNTL_RING0, tmp);
25a857fb
AD
5651 WREG32(CP_INT_CNTL_RING1, 0);
5652 WREG32(CP_INT_CNTL_RING2, 0);
8c5fd7ef
AD
5653 tmp = RREG32(DMA_CNTL + DMA0_REGISTER_OFFSET) & ~TRAP_ENABLE;
5654 WREG32(DMA_CNTL + DMA0_REGISTER_OFFSET, tmp);
5655 tmp = RREG32(DMA_CNTL + DMA1_REGISTER_OFFSET) & ~TRAP_ENABLE;
5656 WREG32(DMA_CNTL + DMA1_REGISTER_OFFSET, tmp);
25a857fb 5657 WREG32(GRBM_INT_CNTL, 0);
5153550a
AD
5658 if (rdev->num_crtc >= 2) {
5659 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
5660 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
5661 }
25a857fb
AD
5662 if (rdev->num_crtc >= 4) {
5663 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
5664 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
5665 }
5666 if (rdev->num_crtc >= 6) {
5667 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
5668 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
5669 }
5670
5153550a
AD
5671 if (rdev->num_crtc >= 2) {
5672 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
5673 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
5674 }
25a857fb
AD
5675 if (rdev->num_crtc >= 4) {
5676 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
5677 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
5678 }
5679 if (rdev->num_crtc >= 6) {
5680 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
5681 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
5682 }
5683
5153550a 5684 if (!ASIC_IS_NODCE(rdev)) {
e9a321c6 5685 WREG32(DAC_AUTODETECT_INT_CONTROL, 0);
5153550a
AD
5686
5687 tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
5688 WREG32(DC_HPD1_INT_CONTROL, tmp);
5689 tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
5690 WREG32(DC_HPD2_INT_CONTROL, tmp);
5691 tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
5692 WREG32(DC_HPD3_INT_CONTROL, tmp);
5693 tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
5694 WREG32(DC_HPD4_INT_CONTROL, tmp);
5695 tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
5696 WREG32(DC_HPD5_INT_CONTROL, tmp);
5697 tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
5698 WREG32(DC_HPD6_INT_CONTROL, tmp);
5699 }
25a857fb
AD
5700}
5701
5702static int si_irq_init(struct radeon_device *rdev)
5703{
5704 int ret = 0;
5705 int rb_bufsz;
5706 u32 interrupt_cntl, ih_cntl, ih_rb_cntl;
5707
5708 /* allocate ring */
5709 ret = r600_ih_ring_alloc(rdev);
5710 if (ret)
5711 return ret;
5712
5713 /* disable irqs */
5714 si_disable_interrupts(rdev);
5715
5716 /* init rlc */
5717 ret = si_rlc_resume(rdev);
5718 if (ret) {
5719 r600_ih_ring_fini(rdev);
5720 return ret;
5721 }
5722
5723 /* setup interrupt control */
5724 /* set dummy read address to ring address */
5725 WREG32(INTERRUPT_CNTL2, rdev->ih.gpu_addr >> 8);
5726 interrupt_cntl = RREG32(INTERRUPT_CNTL);
5727 /* IH_DUMMY_RD_OVERRIDE=0 - dummy read disabled with msi, enabled without msi
5728 * IH_DUMMY_RD_OVERRIDE=1 - dummy read controlled by IH_DUMMY_RD_EN
5729 */
5730 interrupt_cntl &= ~IH_DUMMY_RD_OVERRIDE;
5731 /* IH_REQ_NONSNOOP_EN=1 if ring is in non-cacheable memory, e.g., vram */
5732 interrupt_cntl &= ~IH_REQ_NONSNOOP_EN;
5733 WREG32(INTERRUPT_CNTL, interrupt_cntl);
5734
5735 WREG32(IH_RB_BASE, rdev->ih.gpu_addr >> 8);
b72a8925 5736 rb_bufsz = order_base_2(rdev->ih.ring_size / 4);
25a857fb
AD
5737
5738 ih_rb_cntl = (IH_WPTR_OVERFLOW_ENABLE |
5739 IH_WPTR_OVERFLOW_CLEAR |
5740 (rb_bufsz << 1));
5741
5742 if (rdev->wb.enabled)
5743 ih_rb_cntl |= IH_WPTR_WRITEBACK_ENABLE;
5744
5745 /* set the writeback address whether it's enabled or not */
5746 WREG32(IH_RB_WPTR_ADDR_LO, (rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFFFFFFFC);
5747 WREG32(IH_RB_WPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFF);
5748
5749 WREG32(IH_RB_CNTL, ih_rb_cntl);
5750
5751 /* set rptr, wptr to 0 */
5752 WREG32(IH_RB_RPTR, 0);
5753 WREG32(IH_RB_WPTR, 0);
5754
5755 /* Default settings for IH_CNTL (disabled at first) */
5756 ih_cntl = MC_WRREQ_CREDIT(0x10) | MC_WR_CLEAN_CNT(0x10) | MC_VMID(0);
5757 /* RPTR_REARM only works if msi's are enabled */
5758 if (rdev->msi_enabled)
5759 ih_cntl |= RPTR_REARM;
5760 WREG32(IH_CNTL, ih_cntl);
5761
5762 /* force the active interrupt state to all disabled */
5763 si_disable_interrupt_state(rdev);
5764
2099810f
DA
5765 pci_set_master(rdev->pdev);
5766
25a857fb
AD
5767 /* enable irqs */
5768 si_enable_interrupts(rdev);
5769
5770 return ret;
5771}
5772
5773int si_irq_set(struct radeon_device *rdev)
5774{
811e4d58 5775 u32 cp_int_cntl;
25a857fb
AD
5776 u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
5777 u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
5153550a 5778 u32 hpd1 = 0, hpd2 = 0, hpd3 = 0, hpd4 = 0, hpd5 = 0, hpd6 = 0;
25a857fb
AD
5779 u32 grbm_int_cntl = 0;
5780 u32 grph1 = 0, grph2 = 0, grph3 = 0, grph4 = 0, grph5 = 0, grph6 = 0;
8c5fd7ef 5781 u32 dma_cntl, dma_cntl1;
a9e61410 5782 u32 thermal_int = 0;
25a857fb
AD
5783
5784 if (!rdev->irq.installed) {
5785 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
5786 return -EINVAL;
5787 }
5788 /* don't enable anything if the ih is disabled */
5789 if (!rdev->ih.enabled) {
5790 si_disable_interrupts(rdev);
5791 /* force the active interrupt state to all disabled */
5792 si_disable_interrupt_state(rdev);
5793 return 0;
5794 }
5795
811e4d58
AD
5796 cp_int_cntl = RREG32(CP_INT_CNTL_RING0) &
5797 (CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
5798
5153550a
AD
5799 if (!ASIC_IS_NODCE(rdev)) {
5800 hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
5801 hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
5802 hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
5803 hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
5804 hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
5805 hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
5806 }
25a857fb 5807
8c5fd7ef
AD
5808 dma_cntl = RREG32(DMA_CNTL + DMA0_REGISTER_OFFSET) & ~TRAP_ENABLE;
5809 dma_cntl1 = RREG32(DMA_CNTL + DMA1_REGISTER_OFFSET) & ~TRAP_ENABLE;
5810
a9e61410
AD
5811 thermal_int = RREG32(CG_THERMAL_INT) &
5812 ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
5813
25a857fb 5814 /* enable CP interrupts on all rings */
736fc37f 5815 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
25a857fb
AD
5816 DRM_DEBUG("si_irq_set: sw int gfx\n");
5817 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
5818 }
736fc37f 5819 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
25a857fb
AD
5820 DRM_DEBUG("si_irq_set: sw int cp1\n");
5821 cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
5822 }
736fc37f 5823 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
25a857fb
AD
5824 DRM_DEBUG("si_irq_set: sw int cp2\n");
5825 cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
5826 }
8c5fd7ef
AD
5827 if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
5828 DRM_DEBUG("si_irq_set: sw int dma\n");
5829 dma_cntl |= TRAP_ENABLE;
5830 }
5831
5832 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
5833 DRM_DEBUG("si_irq_set: sw int dma1\n");
5834 dma_cntl1 |= TRAP_ENABLE;
5835 }
25a857fb 5836 if (rdev->irq.crtc_vblank_int[0] ||
736fc37f 5837 atomic_read(&rdev->irq.pflip[0])) {
25a857fb
AD
5838 DRM_DEBUG("si_irq_set: vblank 0\n");
5839 crtc1 |= VBLANK_INT_MASK;
5840 }
5841 if (rdev->irq.crtc_vblank_int[1] ||
736fc37f 5842 atomic_read(&rdev->irq.pflip[1])) {
25a857fb
AD
5843 DRM_DEBUG("si_irq_set: vblank 1\n");
5844 crtc2 |= VBLANK_INT_MASK;
5845 }
5846 if (rdev->irq.crtc_vblank_int[2] ||
736fc37f 5847 atomic_read(&rdev->irq.pflip[2])) {
25a857fb
AD
5848 DRM_DEBUG("si_irq_set: vblank 2\n");
5849 crtc3 |= VBLANK_INT_MASK;
5850 }
5851 if (rdev->irq.crtc_vblank_int[3] ||
736fc37f 5852 atomic_read(&rdev->irq.pflip[3])) {
25a857fb
AD
5853 DRM_DEBUG("si_irq_set: vblank 3\n");
5854 crtc4 |= VBLANK_INT_MASK;
5855 }
5856 if (rdev->irq.crtc_vblank_int[4] ||
736fc37f 5857 atomic_read(&rdev->irq.pflip[4])) {
25a857fb
AD
5858 DRM_DEBUG("si_irq_set: vblank 4\n");
5859 crtc5 |= VBLANK_INT_MASK;
5860 }
5861 if (rdev->irq.crtc_vblank_int[5] ||
736fc37f 5862 atomic_read(&rdev->irq.pflip[5])) {
25a857fb
AD
5863 DRM_DEBUG("si_irq_set: vblank 5\n");
5864 crtc6 |= VBLANK_INT_MASK;
5865 }
5866 if (rdev->irq.hpd[0]) {
5867 DRM_DEBUG("si_irq_set: hpd 1\n");
5868 hpd1 |= DC_HPDx_INT_EN;
5869 }
5870 if (rdev->irq.hpd[1]) {
5871 DRM_DEBUG("si_irq_set: hpd 2\n");
5872 hpd2 |= DC_HPDx_INT_EN;
5873 }
5874 if (rdev->irq.hpd[2]) {
5875 DRM_DEBUG("si_irq_set: hpd 3\n");
5876 hpd3 |= DC_HPDx_INT_EN;
5877 }
5878 if (rdev->irq.hpd[3]) {
5879 DRM_DEBUG("si_irq_set: hpd 4\n");
5880 hpd4 |= DC_HPDx_INT_EN;
5881 }
5882 if (rdev->irq.hpd[4]) {
5883 DRM_DEBUG("si_irq_set: hpd 5\n");
5884 hpd5 |= DC_HPDx_INT_EN;
5885 }
5886 if (rdev->irq.hpd[5]) {
5887 DRM_DEBUG("si_irq_set: hpd 6\n");
5888 hpd6 |= DC_HPDx_INT_EN;
5889 }
25a857fb
AD
5890
5891 WREG32(CP_INT_CNTL_RING0, cp_int_cntl);
5892 WREG32(CP_INT_CNTL_RING1, cp_int_cntl1);
5893 WREG32(CP_INT_CNTL_RING2, cp_int_cntl2);
5894
8c5fd7ef
AD
5895 WREG32(DMA_CNTL + DMA0_REGISTER_OFFSET, dma_cntl);
5896 WREG32(DMA_CNTL + DMA1_REGISTER_OFFSET, dma_cntl1);
5897
25a857fb
AD
5898 WREG32(GRBM_INT_CNTL, grbm_int_cntl);
5899
a9e61410
AD
5900 if (rdev->irq.dpm_thermal) {
5901 DRM_DEBUG("dpm thermal\n");
5902 thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
5903 }
5904
5153550a
AD
5905 if (rdev->num_crtc >= 2) {
5906 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
5907 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
5908 }
25a857fb
AD
5909 if (rdev->num_crtc >= 4) {
5910 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
5911 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
5912 }
5913 if (rdev->num_crtc >= 6) {
5914 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
5915 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
5916 }
5917
5153550a
AD
5918 if (rdev->num_crtc >= 2) {
5919 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, grph1);
5920 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, grph2);
5921 }
25a857fb
AD
5922 if (rdev->num_crtc >= 4) {
5923 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, grph3);
5924 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, grph4);
5925 }
5926 if (rdev->num_crtc >= 6) {
5927 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, grph5);
5928 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, grph6);
5929 }
5930
5153550a
AD
5931 if (!ASIC_IS_NODCE(rdev)) {
5932 WREG32(DC_HPD1_INT_CONTROL, hpd1);
5933 WREG32(DC_HPD2_INT_CONTROL, hpd2);
5934 WREG32(DC_HPD3_INT_CONTROL, hpd3);
5935 WREG32(DC_HPD4_INT_CONTROL, hpd4);
5936 WREG32(DC_HPD5_INT_CONTROL, hpd5);
5937 WREG32(DC_HPD6_INT_CONTROL, hpd6);
5938 }
25a857fb 5939
a9e61410
AD
5940 WREG32(CG_THERMAL_INT, thermal_int);
5941
25a857fb
AD
5942 return 0;
5943}
5944
5945static inline void si_irq_ack(struct radeon_device *rdev)
5946{
5947 u32 tmp;
5948
5153550a
AD
5949 if (ASIC_IS_NODCE(rdev))
5950 return;
5951
25a857fb
AD
5952 rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
5953 rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
5954 rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
5955 rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
5956 rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
5957 rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
5958 rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
5959 rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
5960 if (rdev->num_crtc >= 4) {
5961 rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
5962 rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
5963 }
5964 if (rdev->num_crtc >= 6) {
5965 rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
5966 rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
5967 }
5968
5969 if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
5970 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
5971 if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
5972 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
5973 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
5974 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
5975 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
5976 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
5977 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
5978 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
5979 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
5980 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
5981
5982 if (rdev->num_crtc >= 4) {
5983 if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
5984 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
5985 if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
5986 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
5987 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
5988 WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
5989 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
5990 WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
5991 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
5992 WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
5993 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
5994 WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
5995 }
5996
5997 if (rdev->num_crtc >= 6) {
5998 if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
5999 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
6000 if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
6001 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
6002 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
6003 WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
6004 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
6005 WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
6006 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
6007 WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
6008 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
6009 WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
6010 }
6011
6012 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
6013 tmp = RREG32(DC_HPD1_INT_CONTROL);
6014 tmp |= DC_HPDx_INT_ACK;
6015 WREG32(DC_HPD1_INT_CONTROL, tmp);
6016 }
6017 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
6018 tmp = RREG32(DC_HPD2_INT_CONTROL);
6019 tmp |= DC_HPDx_INT_ACK;
6020 WREG32(DC_HPD2_INT_CONTROL, tmp);
6021 }
6022 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
6023 tmp = RREG32(DC_HPD3_INT_CONTROL);
6024 tmp |= DC_HPDx_INT_ACK;
6025 WREG32(DC_HPD3_INT_CONTROL, tmp);
6026 }
6027 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
6028 tmp = RREG32(DC_HPD4_INT_CONTROL);
6029 tmp |= DC_HPDx_INT_ACK;
6030 WREG32(DC_HPD4_INT_CONTROL, tmp);
6031 }
6032 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
6033 tmp = RREG32(DC_HPD5_INT_CONTROL);
6034 tmp |= DC_HPDx_INT_ACK;
6035 WREG32(DC_HPD5_INT_CONTROL, tmp);
6036 }
6037 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
6038 tmp = RREG32(DC_HPD5_INT_CONTROL);
6039 tmp |= DC_HPDx_INT_ACK;
6040 WREG32(DC_HPD6_INT_CONTROL, tmp);
6041 }
6042}
6043
6044static void si_irq_disable(struct radeon_device *rdev)
6045{
6046 si_disable_interrupts(rdev);
6047 /* Wait and acknowledge irq */
6048 mdelay(1);
6049 si_irq_ack(rdev);
6050 si_disable_interrupt_state(rdev);
6051}
6052
6053static void si_irq_suspend(struct radeon_device *rdev)
6054{
6055 si_irq_disable(rdev);
6056 si_rlc_stop(rdev);
6057}
6058
9b136d51
AD
6059static void si_irq_fini(struct radeon_device *rdev)
6060{
6061 si_irq_suspend(rdev);
6062 r600_ih_ring_fini(rdev);
6063}
6064
25a857fb
AD
6065static inline u32 si_get_ih_wptr(struct radeon_device *rdev)
6066{
6067 u32 wptr, tmp;
6068
6069 if (rdev->wb.enabled)
6070 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
6071 else
6072 wptr = RREG32(IH_RB_WPTR);
6073
6074 if (wptr & RB_OVERFLOW) {
6075 /* When a ring buffer overflow happen start parsing interrupt
6076 * from the last not overwritten vector (wptr + 16). Hopefully
6077 * this should allow us to catchup.
6078 */
6079 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
6080 wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
6081 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
6082 tmp = RREG32(IH_RB_CNTL);
6083 tmp |= IH_WPTR_OVERFLOW_CLEAR;
6084 WREG32(IH_RB_CNTL, tmp);
6085 }
6086 return (wptr & rdev->ih.ptr_mask);
6087}
6088
6089/* SI IV Ring
6090 * Each IV ring entry is 128 bits:
6091 * [7:0] - interrupt source id
6092 * [31:8] - reserved
6093 * [59:32] - interrupt source data
6094 * [63:60] - reserved
6095 * [71:64] - RINGID
6096 * [79:72] - VMID
6097 * [127:80] - reserved
6098 */
6099int si_irq_process(struct radeon_device *rdev)
6100{
6101 u32 wptr;
6102 u32 rptr;
6103 u32 src_id, src_data, ring_id;
6104 u32 ring_index;
25a857fb 6105 bool queue_hotplug = false;
a9e61410 6106 bool queue_thermal = false;
fbf6dc7a 6107 u32 status, addr;
25a857fb
AD
6108
6109 if (!rdev->ih.enabled || rdev->shutdown)
6110 return IRQ_NONE;
6111
6112 wptr = si_get_ih_wptr(rdev);
c20dc369
CK
6113
6114restart_ih:
6115 /* is somebody else already processing irqs? */
6116 if (atomic_xchg(&rdev->ih.lock, 1))
6117 return IRQ_NONE;
6118
25a857fb
AD
6119 rptr = rdev->ih.rptr;
6120 DRM_DEBUG("si_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
6121
25a857fb
AD
6122 /* Order reading of wptr vs. reading of IH ring data */
6123 rmb();
6124
6125 /* display interrupts */
6126 si_irq_ack(rdev);
6127
25a857fb
AD
6128 while (rptr != wptr) {
6129 /* wptr/rptr are in bytes! */
6130 ring_index = rptr / 4;
6131 src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
6132 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
6133 ring_id = le32_to_cpu(rdev->ih.ring[ring_index + 2]) & 0xff;
6134
6135 switch (src_id) {
6136 case 1: /* D1 vblank/vline */
6137 switch (src_data) {
6138 case 0: /* D1 vblank */
6139 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) {
6140 if (rdev->irq.crtc_vblank_int[0]) {
6141 drm_handle_vblank(rdev->ddev, 0);
6142 rdev->pm.vblank_sync = true;
6143 wake_up(&rdev->irq.vblank_queue);
6144 }
736fc37f 6145 if (atomic_read(&rdev->irq.pflip[0]))
25a857fb
AD
6146 radeon_crtc_handle_flip(rdev, 0);
6147 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
6148 DRM_DEBUG("IH: D1 vblank\n");
6149 }
6150 break;
6151 case 1: /* D1 vline */
6152 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) {
6153 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
6154 DRM_DEBUG("IH: D1 vline\n");
6155 }
6156 break;
6157 default:
6158 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
6159 break;
6160 }
6161 break;
6162 case 2: /* D2 vblank/vline */
6163 switch (src_data) {
6164 case 0: /* D2 vblank */
6165 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
6166 if (rdev->irq.crtc_vblank_int[1]) {
6167 drm_handle_vblank(rdev->ddev, 1);
6168 rdev->pm.vblank_sync = true;
6169 wake_up(&rdev->irq.vblank_queue);
6170 }
736fc37f 6171 if (atomic_read(&rdev->irq.pflip[1]))
25a857fb
AD
6172 radeon_crtc_handle_flip(rdev, 1);
6173 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
6174 DRM_DEBUG("IH: D2 vblank\n");
6175 }
6176 break;
6177 case 1: /* D2 vline */
6178 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) {
6179 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
6180 DRM_DEBUG("IH: D2 vline\n");
6181 }
6182 break;
6183 default:
6184 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
6185 break;
6186 }
6187 break;
6188 case 3: /* D3 vblank/vline */
6189 switch (src_data) {
6190 case 0: /* D3 vblank */
6191 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
6192 if (rdev->irq.crtc_vblank_int[2]) {
6193 drm_handle_vblank(rdev->ddev, 2);
6194 rdev->pm.vblank_sync = true;
6195 wake_up(&rdev->irq.vblank_queue);
6196 }
736fc37f 6197 if (atomic_read(&rdev->irq.pflip[2]))
25a857fb
AD
6198 radeon_crtc_handle_flip(rdev, 2);
6199 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
6200 DRM_DEBUG("IH: D3 vblank\n");
6201 }
6202 break;
6203 case 1: /* D3 vline */
6204 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
6205 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
6206 DRM_DEBUG("IH: D3 vline\n");
6207 }
6208 break;
6209 default:
6210 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
6211 break;
6212 }
6213 break;
6214 case 4: /* D4 vblank/vline */
6215 switch (src_data) {
6216 case 0: /* D4 vblank */
6217 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
6218 if (rdev->irq.crtc_vblank_int[3]) {
6219 drm_handle_vblank(rdev->ddev, 3);
6220 rdev->pm.vblank_sync = true;
6221 wake_up(&rdev->irq.vblank_queue);
6222 }
736fc37f 6223 if (atomic_read(&rdev->irq.pflip[3]))
25a857fb
AD
6224 radeon_crtc_handle_flip(rdev, 3);
6225 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
6226 DRM_DEBUG("IH: D4 vblank\n");
6227 }
6228 break;
6229 case 1: /* D4 vline */
6230 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
6231 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
6232 DRM_DEBUG("IH: D4 vline\n");
6233 }
6234 break;
6235 default:
6236 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
6237 break;
6238 }
6239 break;
6240 case 5: /* D5 vblank/vline */
6241 switch (src_data) {
6242 case 0: /* D5 vblank */
6243 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
6244 if (rdev->irq.crtc_vblank_int[4]) {
6245 drm_handle_vblank(rdev->ddev, 4);
6246 rdev->pm.vblank_sync = true;
6247 wake_up(&rdev->irq.vblank_queue);
6248 }
736fc37f 6249 if (atomic_read(&rdev->irq.pflip[4]))
25a857fb
AD
6250 radeon_crtc_handle_flip(rdev, 4);
6251 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
6252 DRM_DEBUG("IH: D5 vblank\n");
6253 }
6254 break;
6255 case 1: /* D5 vline */
6256 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
6257 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
6258 DRM_DEBUG("IH: D5 vline\n");
6259 }
6260 break;
6261 default:
6262 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
6263 break;
6264 }
6265 break;
6266 case 6: /* D6 vblank/vline */
6267 switch (src_data) {
6268 case 0: /* D6 vblank */
6269 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
6270 if (rdev->irq.crtc_vblank_int[5]) {
6271 drm_handle_vblank(rdev->ddev, 5);
6272 rdev->pm.vblank_sync = true;
6273 wake_up(&rdev->irq.vblank_queue);
6274 }
736fc37f 6275 if (atomic_read(&rdev->irq.pflip[5]))
25a857fb
AD
6276 radeon_crtc_handle_flip(rdev, 5);
6277 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
6278 DRM_DEBUG("IH: D6 vblank\n");
6279 }
6280 break;
6281 case 1: /* D6 vline */
6282 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
6283 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
6284 DRM_DEBUG("IH: D6 vline\n");
6285 }
6286 break;
6287 default:
6288 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
6289 break;
6290 }
6291 break;
6292 case 42: /* HPD hotplug */
6293 switch (src_data) {
6294 case 0:
6295 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
6296 rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
6297 queue_hotplug = true;
6298 DRM_DEBUG("IH: HPD1\n");
6299 }
6300 break;
6301 case 1:
6302 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
6303 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
6304 queue_hotplug = true;
6305 DRM_DEBUG("IH: HPD2\n");
6306 }
6307 break;
6308 case 2:
6309 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
6310 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
6311 queue_hotplug = true;
6312 DRM_DEBUG("IH: HPD3\n");
6313 }
6314 break;
6315 case 3:
6316 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
6317 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
6318 queue_hotplug = true;
6319 DRM_DEBUG("IH: HPD4\n");
6320 }
6321 break;
6322 case 4:
6323 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
6324 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
6325 queue_hotplug = true;
6326 DRM_DEBUG("IH: HPD5\n");
6327 }
6328 break;
6329 case 5:
6330 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
6331 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
6332 queue_hotplug = true;
6333 DRM_DEBUG("IH: HPD6\n");
6334 }
6335 break;
6336 default:
6337 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
6338 break;
6339 }
6340 break;
b927e1c2
CK
6341 case 124: /* UVD */
6342 DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
6343 radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
6344 break;
ae133a11
CK
6345 case 146:
6346 case 147:
fbf6dc7a
AD
6347 addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
6348 status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
ae133a11
CK
6349 dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
6350 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n",
fbf6dc7a 6351 addr);
ae133a11 6352 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
fbf6dc7a
AD
6353 status);
6354 si_vm_decode_fault(rdev, status, addr);
ae133a11
CK
6355 /* reset addr and status */
6356 WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
6357 break;
25a857fb
AD
6358 case 176: /* RINGID0 CP_INT */
6359 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
6360 break;
6361 case 177: /* RINGID1 CP_INT */
6362 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
6363 break;
6364 case 178: /* RINGID2 CP_INT */
6365 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
6366 break;
6367 case 181: /* CP EOP event */
6368 DRM_DEBUG("IH: CP EOP\n");
6369 switch (ring_id) {
6370 case 0:
6371 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
6372 break;
6373 case 1:
6374 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
6375 break;
6376 case 2:
6377 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
6378 break;
6379 }
6380 break;
8c5fd7ef
AD
6381 case 224: /* DMA trap event */
6382 DRM_DEBUG("IH: DMA trap\n");
6383 radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
6384 break;
a9e61410
AD
6385 case 230: /* thermal low to high */
6386 DRM_DEBUG("IH: thermal low to high\n");
6387 rdev->pm.dpm.thermal.high_to_low = false;
6388 queue_thermal = true;
6389 break;
6390 case 231: /* thermal high to low */
6391 DRM_DEBUG("IH: thermal high to low\n");
6392 rdev->pm.dpm.thermal.high_to_low = true;
6393 queue_thermal = true;
6394 break;
25a857fb
AD
6395 case 233: /* GUI IDLE */
6396 DRM_DEBUG("IH: GUI idle\n");
25a857fb 6397 break;
8c5fd7ef
AD
6398 case 244: /* DMA trap event */
6399 DRM_DEBUG("IH: DMA1 trap\n");
6400 radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
6401 break;
25a857fb
AD
6402 default:
6403 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
6404 break;
6405 }
6406
6407 /* wptr/rptr are in bytes! */
6408 rptr += 16;
6409 rptr &= rdev->ih.ptr_mask;
6410 }
25a857fb
AD
6411 if (queue_hotplug)
6412 schedule_work(&rdev->hotplug_work);
a9e61410
AD
6413 if (queue_thermal && rdev->pm.dpm_enabled)
6414 schedule_work(&rdev->pm.dpm.thermal.work);
25a857fb
AD
6415 rdev->ih.rptr = rptr;
6416 WREG32(IH_RB_RPTR, rdev->ih.rptr);
c20dc369
CK
6417 atomic_set(&rdev->ih.lock, 0);
6418
6419 /* make sure wptr hasn't changed while processing */
6420 wptr = si_get_ih_wptr(rdev);
6421 if (wptr != rptr)
6422 goto restart_ih;
6423
25a857fb
AD
6424 return IRQ_HANDLED;
6425}
6426
9b136d51
AD
6427/*
6428 * startup/shutdown callbacks
6429 */
6430static int si_startup(struct radeon_device *rdev)
6431{
6432 struct radeon_ring *ring;
6433 int r;
6434
b9d305df
AD
6435 /* enable pcie gen2/3 link */
6436 si_pcie_gen3_enable(rdev);
e0bcf165
AD
6437 /* enable aspm */
6438 si_program_aspm(rdev);
b9d305df 6439
e5903d39
AD
6440 /* scratch needs to be initialized before MC */
6441 r = r600_vram_scratch_init(rdev);
6442 if (r)
6443 return r;
6444
6fab3feb
AD
6445 si_mc_program(rdev);
6446
6c7bccea
AD
6447 if (!rdev->pm.dpm_enabled) {
6448 r = si_mc_load_microcode(rdev);
6449 if (r) {
6450 DRM_ERROR("Failed to load MC firmware!\n");
6451 return r;
6452 }
9b136d51
AD
6453 }
6454
9b136d51
AD
6455 r = si_pcie_gart_enable(rdev);
6456 if (r)
6457 return r;
6458 si_gpu_init(rdev);
6459
9b136d51 6460 /* allocate rlc buffers */
1fd11777
AD
6461 if (rdev->family == CHIP_VERDE) {
6462 rdev->rlc.reg_list = verde_rlc_save_restore_register_list;
6463 rdev->rlc.reg_list_size =
6464 (u32)ARRAY_SIZE(verde_rlc_save_restore_register_list);
6465 }
6466 rdev->rlc.cs_data = si_cs_data;
6467 r = sumo_rlc_init(rdev);
9b136d51
AD
6468 if (r) {
6469 DRM_ERROR("Failed to init rlc BOs!\n");
6470 return r;
6471 }
6472
6473 /* allocate wb buffer */
6474 r = radeon_wb_init(rdev);
6475 if (r)
6476 return r;
6477
6478 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
6479 if (r) {
6480 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
6481 return r;
6482 }
6483
6484 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
6485 if (r) {
6486 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
6487 return r;
6488 }
6489
6490 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
6491 if (r) {
6492 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
6493 return r;
6494 }
6495
8c5fd7ef
AD
6496 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
6497 if (r) {
6498 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
6499 return r;
6500 }
6501
6502 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
6503 if (r) {
6504 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
6505 return r;
6506 }
6507
1df0d523 6508 if (rdev->has_uvd) {
e409b128 6509 r = uvd_v2_2_resume(rdev);
1df0d523
AD
6510 if (!r) {
6511 r = radeon_fence_driver_start_ring(rdev,
6512 R600_RING_TYPE_UVD_INDEX);
6513 if (r)
6514 dev_err(rdev->dev, "UVD fences init error (%d).\n", r);
6515 }
f2ba57b5 6516 if (r)
1df0d523 6517 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
f2ba57b5 6518 }
f2ba57b5 6519
9b136d51 6520 /* Enable IRQ */
e49f3959
AH
6521 if (!rdev->irq.installed) {
6522 r = radeon_irq_kms_init(rdev);
6523 if (r)
6524 return r;
6525 }
6526
9b136d51
AD
6527 r = si_irq_init(rdev);
6528 if (r) {
6529 DRM_ERROR("radeon: IH init failed (%d).\n", r);
6530 radeon_irq_kms_fini(rdev);
6531 return r;
6532 }
6533 si_irq_set(rdev);
6534
6535 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
6536 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
2e1e6dad 6537 RADEON_CP_PACKET2);
9b136d51
AD
6538 if (r)
6539 return r;
6540
6541 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX];
6542 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP1_RPTR_OFFSET,
2e1e6dad 6543 RADEON_CP_PACKET2);
9b136d51
AD
6544 if (r)
6545 return r;
6546
6547 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX];
6548 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP2_RPTR_OFFSET,
2e1e6dad 6549 RADEON_CP_PACKET2);
9b136d51
AD
6550 if (r)
6551 return r;
6552
8c5fd7ef
AD
6553 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
6554 r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
2e1e6dad 6555 DMA_PACKET(DMA_PACKET_NOP, 0, 0, 0, 0));
8c5fd7ef
AD
6556 if (r)
6557 return r;
6558
6559 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX];
6560 r = radeon_ring_init(rdev, ring, ring->ring_size, CAYMAN_WB_DMA1_RPTR_OFFSET,
2e1e6dad 6561 DMA_PACKET(DMA_PACKET_NOP, 0, 0, 0, 0));
8c5fd7ef
AD
6562 if (r)
6563 return r;
6564
9b136d51
AD
6565 r = si_cp_load_microcode(rdev);
6566 if (r)
6567 return r;
6568 r = si_cp_resume(rdev);
6569 if (r)
6570 return r;
6571
8c5fd7ef
AD
6572 r = cayman_dma_resume(rdev);
6573 if (r)
6574 return r;
6575
1df0d523
AD
6576 if (rdev->has_uvd) {
6577 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
6578 if (ring->ring_size) {
02c9f7fa 6579 r = radeon_ring_init(rdev, ring, ring->ring_size, 0,
2e1e6dad 6580 RADEON_CP_PACKET2);
1df0d523 6581 if (!r)
e409b128 6582 r = uvd_v1_0_init(rdev);
1df0d523
AD
6583 if (r)
6584 DRM_ERROR("radeon: failed initializing UVD (%d).\n", r);
6585 }
f2ba57b5
CK
6586 }
6587
2898c348
CK
6588 r = radeon_ib_pool_init(rdev);
6589 if (r) {
6590 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
9b136d51 6591 return r;
2898c348 6592 }
9b136d51 6593
c6105f24
CK
6594 r = radeon_vm_manager_init(rdev);
6595 if (r) {
6596 dev_err(rdev->dev, "vm manager initialization failed (%d).\n", r);
9b136d51 6597 return r;
c6105f24 6598 }
9b136d51 6599
b530602f
AD
6600 r = dce6_audio_init(rdev);
6601 if (r)
6602 return r;
6603
9b136d51
AD
6604 return 0;
6605}
6606
6607int si_resume(struct radeon_device *rdev)
6608{
6609 int r;
6610
6611 /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
6612 * posting will perform necessary task to bring back GPU into good
6613 * shape.
6614 */
6615 /* post card */
6616 atom_asic_init(rdev->mode_info.atom_context);
6617
205996c0
AD
6618 /* init golden registers */
6619 si_init_golden_registers(rdev);
6620
6c7bccea
AD
6621 radeon_pm_resume(rdev);
6622
9b136d51
AD
6623 rdev->accel_working = true;
6624 r = si_startup(rdev);
6625 if (r) {
6626 DRM_ERROR("si startup failed on resume\n");
6627 rdev->accel_working = false;
6628 return r;
6629 }
6630
6631 return r;
6632
6633}
6634
6635int si_suspend(struct radeon_device *rdev)
6636{
6c7bccea 6637 radeon_pm_suspend(rdev);
b530602f 6638 dce6_audio_fini(rdev);
fa3daf9a 6639 radeon_vm_manager_fini(rdev);
9b136d51 6640 si_cp_enable(rdev, false);
8c5fd7ef 6641 cayman_dma_stop(rdev);
1df0d523 6642 if (rdev->has_uvd) {
e409b128 6643 uvd_v1_0_fini(rdev);
1df0d523
AD
6644 radeon_uvd_suspend(rdev);
6645 }
e16866ec
AD
6646 si_fini_pg(rdev);
6647 si_fini_cg(rdev);
9b136d51
AD
6648 si_irq_suspend(rdev);
6649 radeon_wb_disable(rdev);
6650 si_pcie_gart_disable(rdev);
6651 return 0;
6652}
6653
6654/* Plan is to move initialization in that function and use
6655 * helper function so that radeon_device_init pretty much
6656 * do nothing more than calling asic specific function. This
6657 * should also allow to remove a bunch of callback function
6658 * like vram_info.
6659 */
6660int si_init(struct radeon_device *rdev)
6661{
6662 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
6663 int r;
6664
9b136d51
AD
6665 /* Read BIOS */
6666 if (!radeon_get_bios(rdev)) {
6667 if (ASIC_IS_AVIVO(rdev))
6668 return -EINVAL;
6669 }
6670 /* Must be an ATOMBIOS */
6671 if (!rdev->is_atom_bios) {
6672 dev_err(rdev->dev, "Expecting atombios for cayman GPU\n");
6673 return -EINVAL;
6674 }
6675 r = radeon_atombios_init(rdev);
6676 if (r)
6677 return r;
6678
6679 /* Post card if necessary */
6680 if (!radeon_card_posted(rdev)) {
6681 if (!rdev->bios) {
6682 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
6683 return -EINVAL;
6684 }
6685 DRM_INFO("GPU not posted. posting now...\n");
6686 atom_asic_init(rdev->mode_info.atom_context);
6687 }
205996c0
AD
6688 /* init golden registers */
6689 si_init_golden_registers(rdev);
9b136d51
AD
6690 /* Initialize scratch registers */
6691 si_scratch_init(rdev);
6692 /* Initialize surface registers */
6693 radeon_surface_init(rdev);
6694 /* Initialize clocks */
6695 radeon_get_clock_info(rdev->ddev);
6696
6697 /* Fence driver */
6698 r = radeon_fence_driver_init(rdev);
6699 if (r)
6700 return r;
6701
6702 /* initialize memory controller */
6703 r = si_mc_init(rdev);
6704 if (r)
6705 return r;
6706 /* Memory manager */
6707 r = radeon_bo_init(rdev);
6708 if (r)
6709 return r;
6710
01ac8794
AD
6711 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->ce_fw ||
6712 !rdev->rlc_fw || !rdev->mc_fw) {
6713 r = si_init_microcode(rdev);
6714 if (r) {
6715 DRM_ERROR("Failed to load firmware!\n");
6716 return r;
6717 }
6718 }
6719
6c7bccea
AD
6720 /* Initialize power management */
6721 radeon_pm_init(rdev);
6722
9b136d51
AD
6723 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
6724 ring->ring_obj = NULL;
6725 r600_ring_init(rdev, ring, 1024 * 1024);
6726
6727 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX];
6728 ring->ring_obj = NULL;
6729 r600_ring_init(rdev, ring, 1024 * 1024);
6730
6731 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX];
6732 ring->ring_obj = NULL;
6733 r600_ring_init(rdev, ring, 1024 * 1024);
6734
8c5fd7ef
AD
6735 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
6736 ring->ring_obj = NULL;
6737 r600_ring_init(rdev, ring, 64 * 1024);
6738
6739 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX];
6740 ring->ring_obj = NULL;
6741 r600_ring_init(rdev, ring, 64 * 1024);
6742
1df0d523
AD
6743 if (rdev->has_uvd) {
6744 r = radeon_uvd_init(rdev);
6745 if (!r) {
6746 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
6747 ring->ring_obj = NULL;
6748 r600_ring_init(rdev, ring, 4096);
6749 }
f2ba57b5
CK
6750 }
6751
9b136d51
AD
6752 rdev->ih.ring_obj = NULL;
6753 r600_ih_ring_init(rdev, 64 * 1024);
6754
6755 r = r600_pcie_gart_init(rdev);
6756 if (r)
6757 return r;
6758
9b136d51 6759 rdev->accel_working = true;
9b136d51
AD
6760 r = si_startup(rdev);
6761 if (r) {
6762 dev_err(rdev->dev, "disabling GPU acceleration\n");
6763 si_cp_fini(rdev);
8c5fd7ef 6764 cayman_dma_fini(rdev);
9b136d51 6765 si_irq_fini(rdev);
1fd11777 6766 sumo_rlc_fini(rdev);
9b136d51 6767 radeon_wb_fini(rdev);
2898c348 6768 radeon_ib_pool_fini(rdev);
9b136d51
AD
6769 radeon_vm_manager_fini(rdev);
6770 radeon_irq_kms_fini(rdev);
6771 si_pcie_gart_fini(rdev);
6772 rdev->accel_working = false;
6773 }
6774
6775 /* Don't start up if the MC ucode is missing.
6776 * The default clocks and voltages before the MC ucode
6777 * is loaded are not suffient for advanced operations.
6778 */
6779 if (!rdev->mc_fw) {
6780 DRM_ERROR("radeon: MC ucode required for NI+.\n");
6781 return -EINVAL;
6782 }
6783
6784 return 0;
6785}
6786
6787void si_fini(struct radeon_device *rdev)
6788{
6c7bccea 6789 radeon_pm_fini(rdev);
9b136d51 6790 si_cp_fini(rdev);
8c5fd7ef 6791 cayman_dma_fini(rdev);
f8f84ac5 6792 si_fini_pg(rdev);
e16866ec 6793 si_fini_cg(rdev);
9b136d51 6794 si_irq_fini(rdev);
1fd11777 6795 sumo_rlc_fini(rdev);
9b136d51
AD
6796 radeon_wb_fini(rdev);
6797 radeon_vm_manager_fini(rdev);
2898c348 6798 radeon_ib_pool_fini(rdev);
9b136d51 6799 radeon_irq_kms_fini(rdev);
2858c00d 6800 if (rdev->has_uvd) {
e409b128 6801 uvd_v1_0_fini(rdev);
1df0d523 6802 radeon_uvd_fini(rdev);
2858c00d 6803 }
9b136d51
AD
6804 si_pcie_gart_fini(rdev);
6805 r600_vram_scratch_fini(rdev);
6806 radeon_gem_fini(rdev);
9b136d51
AD
6807 radeon_fence_driver_fini(rdev);
6808 radeon_bo_fini(rdev);
6809 radeon_atombios_fini(rdev);
6810 kfree(rdev->bios);
6811 rdev->bios = NULL;
6812}
6813
6759a0a7 6814/**
d0418894 6815 * si_get_gpu_clock_counter - return GPU clock counter snapshot
6759a0a7
MO
6816 *
6817 * @rdev: radeon_device pointer
6818 *
6819 * Fetches a GPU clock counter snapshot (SI).
6820 * Returns the 64 bit clock counter snapshot.
6821 */
d0418894 6822uint64_t si_get_gpu_clock_counter(struct radeon_device *rdev)
6759a0a7
MO
6823{
6824 uint64_t clock;
6825
6826 mutex_lock(&rdev->gpu_clock_mutex);
6827 WREG32(RLC_CAPTURE_GPU_CLOCK_COUNT, 1);
6828 clock = (uint64_t)RREG32(RLC_GPU_CLOCK_COUNT_LSB) |
6829 ((uint64_t)RREG32(RLC_GPU_CLOCK_COUNT_MSB) << 32ULL);
6830 mutex_unlock(&rdev->gpu_clock_mutex);
6831 return clock;
6832}
2539eb02 6833
2539eb02
CK
6834int si_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
6835{
facd112d 6836 unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
2539eb02
CK
6837 int r;
6838
4ed10835
CK
6839 /* bypass vclk and dclk with bclk */
6840 WREG32_P(CG_UPLL_FUNC_CNTL_2,
6841 VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
6842 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
6843
6844 /* put PLL in bypass mode */
6845 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
6846
6847 if (!vclk || !dclk) {
6848 /* keep the Bypass mode, put PLL to sleep */
6849 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
6850 return 0;
6851 }
6852
facd112d
CK
6853 r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
6854 16384, 0x03FFFFFF, 0, 128, 5,
6855 &fb_div, &vclk_div, &dclk_div);
6856 if (r)
6857 return r;
2539eb02
CK
6858
6859 /* set RESET_ANTI_MUX to 0 */
6860 WREG32_P(CG_UPLL_FUNC_CNTL_5, 0, ~RESET_ANTI_MUX_MASK);
6861
6862 /* set VCO_MODE to 1 */
6863 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
6864
6865 /* toggle UPLL_SLEEP to 1 then back to 0 */
6866 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
6867 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
6868
6869 /* deassert UPLL_RESET */
6870 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
6871
6872 mdelay(1);
6873
facd112d 6874 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
2539eb02
CK
6875 if (r)
6876 return r;
6877
6878 /* assert UPLL_RESET again */
6879 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
6880
6881 /* disable spread spectrum. */
6882 WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
6883
6884 /* set feedback divider */
facd112d 6885 WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
2539eb02
CK
6886
6887 /* set ref divider to 0 */
6888 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
6889
facd112d 6890 if (fb_div < 307200)
2539eb02
CK
6891 WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
6892 else
6893 WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
6894
6895 /* set PDIV_A and PDIV_B */
6896 WREG32_P(CG_UPLL_FUNC_CNTL_2,
facd112d 6897 UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
2539eb02
CK
6898 ~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
6899
6900 /* give the PLL some time to settle */
6901 mdelay(15);
6902
6903 /* deassert PLL_RESET */
6904 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
6905
6906 mdelay(15);
6907
6908 /* switch from bypass mode to normal mode */
6909 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
6910
facd112d 6911 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
2539eb02
CK
6912 if (r)
6913 return r;
6914
6915 /* switch VCLK and DCLK selection */
6916 WREG32_P(CG_UPLL_FUNC_CNTL_2,
6917 VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
6918 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
6919
6920 mdelay(100);
6921
6922 return 0;
6923}
b9d305df
AD
6924
6925static void si_pcie_gen3_enable(struct radeon_device *rdev)
6926{
6927 struct pci_dev *root = rdev->pdev->bus->self;
6928 int bridge_pos, gpu_pos;
6929 u32 speed_cntl, mask, current_data_rate;
6930 int ret, i;
6931 u16 tmp16;
6932
6933 if (radeon_pcie_gen2 == 0)
6934 return;
6935
6936 if (rdev->flags & RADEON_IS_IGP)
6937 return;
6938
6939 if (!(rdev->flags & RADEON_IS_PCIE))
6940 return;
6941
6942 ret = drm_pcie_get_speed_cap_mask(rdev->ddev, &mask);
6943 if (ret != 0)
6944 return;
6945
6946 if (!(mask & (DRM_PCIE_SPEED_50 | DRM_PCIE_SPEED_80)))
6947 return;
6948
6949 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
6950 current_data_rate = (speed_cntl & LC_CURRENT_DATA_RATE_MASK) >>
6951 LC_CURRENT_DATA_RATE_SHIFT;
6952 if (mask & DRM_PCIE_SPEED_80) {
6953 if (current_data_rate == 2) {
6954 DRM_INFO("PCIE gen 3 link speeds already enabled\n");
6955 return;
6956 }
6957 DRM_INFO("enabling PCIE gen 3 link speeds, disable with radeon.pcie_gen2=0\n");
6958 } else if (mask & DRM_PCIE_SPEED_50) {
6959 if (current_data_rate == 1) {
6960 DRM_INFO("PCIE gen 2 link speeds already enabled\n");
6961 return;
6962 }
6963 DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
6964 }
6965
6966 bridge_pos = pci_pcie_cap(root);
6967 if (!bridge_pos)
6968 return;
6969
6970 gpu_pos = pci_pcie_cap(rdev->pdev);
6971 if (!gpu_pos)
6972 return;
6973
6974 if (mask & DRM_PCIE_SPEED_80) {
6975 /* re-try equalization if gen3 is not already enabled */
6976 if (current_data_rate != 2) {
6977 u16 bridge_cfg, gpu_cfg;
6978 u16 bridge_cfg2, gpu_cfg2;
6979 u32 max_lw, current_lw, tmp;
6980
6981 pci_read_config_word(root, bridge_pos + PCI_EXP_LNKCTL, &bridge_cfg);
6982 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, &gpu_cfg);
6983
6984 tmp16 = bridge_cfg | PCI_EXP_LNKCTL_HAWD;
6985 pci_write_config_word(root, bridge_pos + PCI_EXP_LNKCTL, tmp16);
6986
6987 tmp16 = gpu_cfg | PCI_EXP_LNKCTL_HAWD;
6988 pci_write_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, tmp16);
6989
6990 tmp = RREG32_PCIE(PCIE_LC_STATUS1);
6991 max_lw = (tmp & LC_DETECTED_LINK_WIDTH_MASK) >> LC_DETECTED_LINK_WIDTH_SHIFT;
6992 current_lw = (tmp & LC_OPERATING_LINK_WIDTH_MASK) >> LC_OPERATING_LINK_WIDTH_SHIFT;
6993
6994 if (current_lw < max_lw) {
6995 tmp = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
6996 if (tmp & LC_RENEGOTIATION_SUPPORT) {
6997 tmp &= ~(LC_LINK_WIDTH_MASK | LC_UPCONFIGURE_DIS);
6998 tmp |= (max_lw << LC_LINK_WIDTH_SHIFT);
6999 tmp |= LC_UPCONFIGURE_SUPPORT | LC_RENEGOTIATE_EN | LC_RECONFIG_NOW;
7000 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, tmp);
7001 }
7002 }
7003
7004 for (i = 0; i < 10; i++) {
7005 /* check status */
7006 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_DEVSTA, &tmp16);
7007 if (tmp16 & PCI_EXP_DEVSTA_TRPND)
7008 break;
7009
7010 pci_read_config_word(root, bridge_pos + PCI_EXP_LNKCTL, &bridge_cfg);
7011 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, &gpu_cfg);
7012
7013 pci_read_config_word(root, bridge_pos + PCI_EXP_LNKCTL2, &bridge_cfg2);
7014 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, &gpu_cfg2);
7015
7016 tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL4);
7017 tmp |= LC_SET_QUIESCE;
7018 WREG32_PCIE_PORT(PCIE_LC_CNTL4, tmp);
7019
7020 tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL4);
7021 tmp |= LC_REDO_EQ;
7022 WREG32_PCIE_PORT(PCIE_LC_CNTL4, tmp);
7023
7024 mdelay(100);
7025
7026 /* linkctl */
7027 pci_read_config_word(root, bridge_pos + PCI_EXP_LNKCTL, &tmp16);
7028 tmp16 &= ~PCI_EXP_LNKCTL_HAWD;
7029 tmp16 |= (bridge_cfg & PCI_EXP_LNKCTL_HAWD);
7030 pci_write_config_word(root, bridge_pos + PCI_EXP_LNKCTL, tmp16);
7031
7032 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, &tmp16);
7033 tmp16 &= ~PCI_EXP_LNKCTL_HAWD;
7034 tmp16 |= (gpu_cfg & PCI_EXP_LNKCTL_HAWD);
7035 pci_write_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, tmp16);
7036
7037 /* linkctl2 */
7038 pci_read_config_word(root, bridge_pos + PCI_EXP_LNKCTL2, &tmp16);
7039 tmp16 &= ~((1 << 4) | (7 << 9));
7040 tmp16 |= (bridge_cfg2 & ((1 << 4) | (7 << 9)));
7041 pci_write_config_word(root, bridge_pos + PCI_EXP_LNKCTL2, tmp16);
7042
7043 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, &tmp16);
7044 tmp16 &= ~((1 << 4) | (7 << 9));
7045 tmp16 |= (gpu_cfg2 & ((1 << 4) | (7 << 9)));
7046 pci_write_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, tmp16);
7047
7048 tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL4);
7049 tmp &= ~LC_SET_QUIESCE;
7050 WREG32_PCIE_PORT(PCIE_LC_CNTL4, tmp);
7051 }
7052 }
7053 }
7054
7055 /* set the link speed */
7056 speed_cntl |= LC_FORCE_EN_SW_SPEED_CHANGE | LC_FORCE_DIS_HW_SPEED_CHANGE;
7057 speed_cntl &= ~LC_FORCE_DIS_SW_SPEED_CHANGE;
7058 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
7059
7060 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, &tmp16);
7061 tmp16 &= ~0xf;
7062 if (mask & DRM_PCIE_SPEED_80)
7063 tmp16 |= 3; /* gen3 */
7064 else if (mask & DRM_PCIE_SPEED_50)
7065 tmp16 |= 2; /* gen2 */
7066 else
7067 tmp16 |= 1; /* gen1 */
7068 pci_write_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, tmp16);
7069
7070 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
7071 speed_cntl |= LC_INITIATE_LINK_SPEED_CHANGE;
7072 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
7073
7074 for (i = 0; i < rdev->usec_timeout; i++) {
7075 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
7076 if ((speed_cntl & LC_INITIATE_LINK_SPEED_CHANGE) == 0)
7077 break;
7078 udelay(1);
7079 }
7080}
7081
e0bcf165
AD
7082static void si_program_aspm(struct radeon_device *rdev)
7083{
7084 u32 data, orig;
7085 bool disable_l0s = false, disable_l1 = false, disable_plloff_in_l1 = false;
7086 bool disable_clkreq = false;
7087
1294d4a3
AD
7088 if (radeon_aspm == 0)
7089 return;
7090
e0bcf165
AD
7091 if (!(rdev->flags & RADEON_IS_PCIE))
7092 return;
7093
7094 orig = data = RREG32_PCIE_PORT(PCIE_LC_N_FTS_CNTL);
7095 data &= ~LC_XMIT_N_FTS_MASK;
7096 data |= LC_XMIT_N_FTS(0x24) | LC_XMIT_N_FTS_OVERRIDE_EN;
7097 if (orig != data)
7098 WREG32_PCIE_PORT(PCIE_LC_N_FTS_CNTL, data);
7099
7100 orig = data = RREG32_PCIE_PORT(PCIE_LC_CNTL3);
7101 data |= LC_GO_TO_RECOVERY;
7102 if (orig != data)
7103 WREG32_PCIE_PORT(PCIE_LC_CNTL3, data);
7104
7105 orig = data = RREG32_PCIE(PCIE_P_CNTL);
7106 data |= P_IGNORE_EDB_ERR;
7107 if (orig != data)
7108 WREG32_PCIE(PCIE_P_CNTL, data);
7109
7110 orig = data = RREG32_PCIE_PORT(PCIE_LC_CNTL);
7111 data &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
7112 data |= LC_PMI_TO_L1_DIS;
7113 if (!disable_l0s)
7114 data |= LC_L0S_INACTIVITY(7);
7115
7116 if (!disable_l1) {
7117 data |= LC_L1_INACTIVITY(7);
7118 data &= ~LC_PMI_TO_L1_DIS;
7119 if (orig != data)
7120 WREG32_PCIE_PORT(PCIE_LC_CNTL, data);
7121
7122 if (!disable_plloff_in_l1) {
7123 bool clk_req_support;
7124
7125 orig = data = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
7126 data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
7127 data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
7128 if (orig != data)
7129 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
7130
7131 orig = data = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
7132 data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
7133 data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
7134 if (orig != data)
7135 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
7136
7137 orig = data = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
7138 data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
7139 data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
7140 if (orig != data)
7141 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
7142
7143 orig = data = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
7144 data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
7145 data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
7146 if (orig != data)
7147 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
7148
7149 if ((rdev->family != CHIP_OLAND) && (rdev->family != CHIP_HAINAN)) {
7150 orig = data = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
7151 data &= ~PLL_RAMP_UP_TIME_0_MASK;
7152 if (orig != data)
7153 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
7154
7155 orig = data = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
7156 data &= ~PLL_RAMP_UP_TIME_1_MASK;
7157 if (orig != data)
7158 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
7159
7160 orig = data = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_2);
7161 data &= ~PLL_RAMP_UP_TIME_2_MASK;
7162 if (orig != data)
7163 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_2, data);
7164
7165 orig = data = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_3);
7166 data &= ~PLL_RAMP_UP_TIME_3_MASK;
7167 if (orig != data)
7168 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_3, data);
7169
7170 orig = data = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
7171 data &= ~PLL_RAMP_UP_TIME_0_MASK;
7172 if (orig != data)
7173 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
7174
7175 orig = data = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
7176 data &= ~PLL_RAMP_UP_TIME_1_MASK;
7177 if (orig != data)
7178 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
7179
7180 orig = data = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_2);
7181 data &= ~PLL_RAMP_UP_TIME_2_MASK;
7182 if (orig != data)
7183 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_2, data);
7184
7185 orig = data = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_3);
7186 data &= ~PLL_RAMP_UP_TIME_3_MASK;
7187 if (orig != data)
7188 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_3, data);
7189 }
7190 orig = data = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
7191 data &= ~LC_DYN_LANES_PWR_STATE_MASK;
7192 data |= LC_DYN_LANES_PWR_STATE(3);
7193 if (orig != data)
7194 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
7195
7196 orig = data = RREG32_PIF_PHY0(PB0_PIF_CNTL);
7197 data &= ~LS2_EXIT_TIME_MASK;
7198 if ((rdev->family == CHIP_OLAND) || (rdev->family == CHIP_HAINAN))
7199 data |= LS2_EXIT_TIME(5);
7200 if (orig != data)
7201 WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
7202
7203 orig = data = RREG32_PIF_PHY1(PB1_PIF_CNTL);
7204 data &= ~LS2_EXIT_TIME_MASK;
7205 if ((rdev->family == CHIP_OLAND) || (rdev->family == CHIP_HAINAN))
7206 data |= LS2_EXIT_TIME(5);
7207 if (orig != data)
7208 WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
7209
7210 if (!disable_clkreq) {
7211 struct pci_dev *root = rdev->pdev->bus->self;
7212 u32 lnkcap;
7213
7214 clk_req_support = false;
7215 pcie_capability_read_dword(root, PCI_EXP_LNKCAP, &lnkcap);
7216 if (lnkcap & PCI_EXP_LNKCAP_CLKPM)
7217 clk_req_support = true;
7218 } else {
7219 clk_req_support = false;
7220 }
7221
7222 if (clk_req_support) {
7223 orig = data = RREG32_PCIE_PORT(PCIE_LC_CNTL2);
7224 data |= LC_ALLOW_PDWN_IN_L1 | LC_ALLOW_PDWN_IN_L23;
7225 if (orig != data)
7226 WREG32_PCIE_PORT(PCIE_LC_CNTL2, data);
7227
7228 orig = data = RREG32(THM_CLK_CNTL);
7229 data &= ~(CMON_CLK_SEL_MASK | TMON_CLK_SEL_MASK);
7230 data |= CMON_CLK_SEL(1) | TMON_CLK_SEL(1);
7231 if (orig != data)
7232 WREG32(THM_CLK_CNTL, data);
7233
7234 orig = data = RREG32(MISC_CLK_CNTL);
7235 data &= ~(DEEP_SLEEP_CLK_SEL_MASK | ZCLK_SEL_MASK);
7236 data |= DEEP_SLEEP_CLK_SEL(1) | ZCLK_SEL(1);
7237 if (orig != data)
7238 WREG32(MISC_CLK_CNTL, data);
7239
7240 orig = data = RREG32(CG_CLKPIN_CNTL);
7241 data &= ~BCLK_AS_XCLK;
7242 if (orig != data)
7243 WREG32(CG_CLKPIN_CNTL, data);
7244
7245 orig = data = RREG32(CG_CLKPIN_CNTL_2);
7246 data &= ~FORCE_BIF_REFCLK_EN;
7247 if (orig != data)
7248 WREG32(CG_CLKPIN_CNTL_2, data);
7249
7250 orig = data = RREG32(MPLL_BYPASSCLK_SEL);
7251 data &= ~MPLL_CLKOUT_SEL_MASK;
7252 data |= MPLL_CLKOUT_SEL(4);
7253 if (orig != data)
7254 WREG32(MPLL_BYPASSCLK_SEL, data);
7255
7256 orig = data = RREG32(SPLL_CNTL_MODE);
7257 data &= ~SPLL_REFCLK_SEL_MASK;
7258 if (orig != data)
7259 WREG32(SPLL_CNTL_MODE, data);
7260 }
7261 }
7262 } else {
7263 if (orig != data)
7264 WREG32_PCIE_PORT(PCIE_LC_CNTL, data);
7265 }
7266
7267 orig = data = RREG32_PCIE(PCIE_CNTL2);
7268 data |= SLV_MEM_LS_EN | MST_MEM_LS_EN | REPLAY_MEM_LS_EN;
7269 if (orig != data)
7270 WREG32_PCIE(PCIE_CNTL2, data);
7271
7272 if (!disable_l0s) {
7273 data = RREG32_PCIE_PORT(PCIE_LC_N_FTS_CNTL);
7274 if((data & LC_N_FTS_MASK) == LC_N_FTS_MASK) {
7275 data = RREG32_PCIE(PCIE_LC_STATUS1);
7276 if ((data & LC_REVERSE_XMIT) && (data & LC_REVERSE_RCVR)) {
7277 orig = data = RREG32_PCIE_PORT(PCIE_LC_CNTL);
7278 data &= ~LC_L0S_INACTIVITY_MASK;
7279 if (orig != data)
7280 WREG32_PCIE_PORT(PCIE_LC_CNTL, data);
7281 }
7282 }
7283 }
7284}