Commit | Line | Data |
---|---|---|
265280b9 AP |
1 | /* |
2 | * Copyright 2021 Advanced Micro Devices, Inc. | |
3 | * | |
4 | * Permission is hereby granted, free of charge, to any person obtaining a | |
5 | * copy of this software and associated documentation files (the "Software"), | |
6 | * to deal in the Software without restriction, including without limitation | |
7 | * the rights to use, copy, modify, merge, publish, distribute, sublicense, | |
8 | * and/or sell copies of the Software, and to permit persons to whom the | |
9 | * Software is furnished to do so, subject to the following conditions: | |
10 | * | |
11 | * The above copyright notice and this permission notice shall be included in | |
12 | * all copies or substantial portions of the Software. | |
13 | * | |
14 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | |
15 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | |
16 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL | |
17 | * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR | |
18 | * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, | |
19 | * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR | |
20 | * OTHER DEALINGS IN THE SOFTWARE. | |
21 | * | |
22 | * Authors: AMD | |
23 | * | |
24 | */ | |
25 | ||
26 | #include "dccg.h" | |
27 | #include "clk_mgr_internal.h" | |
265280b9 AP |
28 | #include "dcn32/dcn32_clk_mgr_smu_msg.h" |
29 | #include "dcn20/dcn20_clk_mgr.h" | |
30 | #include "dce100/dce_clk_mgr.h" | |
405bb9ee | 31 | #include "dcn31/dcn31_clk_mgr.h" |
9312f9d7 | 32 | #include "dcn32/dcn32_clk_mgr.h" |
265280b9 AP |
33 | #include "reg_helper.h" |
34 | #include "core_types.h" | |
35 | #include "dm_helpers.h" | |
d5a43956 | 36 | #include "link.h" |
a71e1310 | 37 | #include "dc_state_priv.h" |
265280b9 AP |
38 | #include "atomfirmware.h" |
39 | #include "smu13_driver_if.h" | |
40 | ||
41 | #include "dcn/dcn_3_2_0_offset.h" | |
42 | #include "dcn/dcn_3_2_0_sh_mask.h" | |
43 | ||
44 | #include "dcn32/dcn32_clk_mgr.h" | |
041a1109 | 45 | #include "dml/dcn32/dcn32_fpu.h" |
265280b9 AP |
46 | |
47 | #define DCN_BASE__INST0_SEG1 0x000000C0 | |
48 | ||
49 | #define mmCLK1_CLK_PLL_REQ 0x16E37 | |
50 | #define mmCLK1_CLK0_DFS_CNTL 0x16E69 | |
51 | #define mmCLK1_CLK1_DFS_CNTL 0x16E6C | |
52 | #define mmCLK1_CLK2_DFS_CNTL 0x16E6F | |
53 | #define mmCLK1_CLK3_DFS_CNTL 0x16E72 | |
54 | #define mmCLK1_CLK4_DFS_CNTL 0x16E75 | |
55 | ||
a6db1993 CL |
56 | #define mmCLK1_CLK0_CURRENT_CNT 0x16EE7 |
57 | #define mmCLK1_CLK1_CURRENT_CNT 0x16EE8 | |
58 | #define mmCLK1_CLK2_CURRENT_CNT 0x16EE9 | |
59 | #define mmCLK1_CLK3_CURRENT_CNT 0x16EEA | |
60 | #define mmCLK1_CLK4_CURRENT_CNT 0x16EEB | |
61 | ||
62 | #define mmCLK4_CLK0_CURRENT_CNT 0x1B0C9 | |
63 | ||
265280b9 AP |
64 | #define CLK1_CLK_PLL_REQ__FbMult_int_MASK 0x000001ffUL |
65 | #define CLK1_CLK_PLL_REQ__PllSpineDiv_MASK 0x0000f000UL | |
66 | #define CLK1_CLK_PLL_REQ__FbMult_frac_MASK 0xffff0000UL | |
67 | #define CLK1_CLK_PLL_REQ__FbMult_int__SHIFT 0x00000000 | |
68 | #define CLK1_CLK_PLL_REQ__PllSpineDiv__SHIFT 0x0000000c | |
69 | #define CLK1_CLK_PLL_REQ__FbMult_frac__SHIFT 0x00000010 | |
70 | ||
71 | #define mmCLK01_CLK0_CLK_PLL_REQ 0x16E37 | |
72 | #define mmCLK01_CLK0_CLK0_DFS_CNTL 0x16E64 | |
73 | #define mmCLK01_CLK0_CLK1_DFS_CNTL 0x16E67 | |
74 | #define mmCLK01_CLK0_CLK2_DFS_CNTL 0x16E6A | |
75 | #define mmCLK01_CLK0_CLK3_DFS_CNTL 0x16E6D | |
76 | #define mmCLK01_CLK0_CLK4_DFS_CNTL 0x16E70 | |
77 | ||
78 | #define CLK0_CLK_PLL_REQ__FbMult_int_MASK 0x000001ffL | |
79 | #define CLK0_CLK_PLL_REQ__PllSpineDiv_MASK 0x0000f000L | |
80 | #define CLK0_CLK_PLL_REQ__FbMult_frac_MASK 0xffff0000L | |
81 | #define CLK0_CLK_PLL_REQ__FbMult_int__SHIFT 0x00000000 | |
82 | #define CLK0_CLK_PLL_REQ__PllSpineDiv__SHIFT 0x0000000c | |
83 | #define CLK0_CLK_PLL_REQ__FbMult_frac__SHIFT 0x00000010 | |
84 | ||
85 | #undef FN | |
86 | #define FN(reg_name, field_name) \ | |
87 | clk_mgr->clk_mgr_shift->field_name, clk_mgr->clk_mgr_mask->field_name | |
88 | ||
89 | #define REG(reg) \ | |
90 | (clk_mgr->regs->reg) | |
91 | ||
92 | #define BASE_INNER(seg) DCN_BASE__INST0_SEG ## seg | |
93 | ||
94 | #define BASE(seg) BASE_INNER(seg) | |
95 | ||
96 | #define SR(reg_name)\ | |
97 | .reg_name = BASE(reg ## reg_name ## _BASE_IDX) + \ | |
98 | reg ## reg_name | |
99 | ||
100 | #define CLK_SR_DCN32(reg_name)\ | |
101 | .reg_name = mm ## reg_name | |
102 | ||
103 | static const struct clk_mgr_registers clk_mgr_regs_dcn32 = { | |
104 | CLK_REG_LIST_DCN32() | |
105 | }; | |
106 | ||
107 | static const struct clk_mgr_shift clk_mgr_shift_dcn32 = { | |
108 | CLK_COMMON_MASK_SH_LIST_DCN32(__SHIFT) | |
109 | }; | |
110 | ||
111 | static const struct clk_mgr_mask clk_mgr_mask_dcn32 = { | |
112 | CLK_COMMON_MASK_SH_LIST_DCN32(_MASK) | |
113 | }; | |
114 | ||
115 | ||
116 | #define CLK_SR_DCN321(reg_name, block, inst)\ | |
117 | .reg_name = mm ## block ## _ ## reg_name | |
118 | ||
119 | static const struct clk_mgr_registers clk_mgr_regs_dcn321 = { | |
120 | CLK_REG_LIST_DCN321() | |
121 | }; | |
122 | ||
123 | static const struct clk_mgr_shift clk_mgr_shift_dcn321 = { | |
124 | CLK_COMMON_MASK_SH_LIST_DCN321(__SHIFT) | |
125 | }; | |
126 | ||
127 | static const struct clk_mgr_mask clk_mgr_mask_dcn321 = { | |
128 | CLK_COMMON_MASK_SH_LIST_DCN321(_MASK) | |
129 | }; | |
130 | ||
131 | ||
132 | /* Query SMU for all clock states for a particular clock */ | |
133 | static void dcn32_init_single_clock(struct clk_mgr_internal *clk_mgr, PPCLK_e clk, unsigned int *entry_0, | |
134 | unsigned int *num_levels) | |
135 | { | |
136 | unsigned int i; | |
137 | char *entry_i = (char *)entry_0; | |
138 | ||
139 | uint32_t ret = dcn30_smu_get_dpm_freq_by_index(clk_mgr, clk, 0xFF); | |
140 | ||
141 | if (ret & (1 << 31)) | |
142 | /* fine-grained, only min and max */ | |
143 | *num_levels = 2; | |
144 | else | |
145 | /* discrete, a number of fixed states */ | |
146 | /* will set num_levels to 0 on failure */ | |
147 | *num_levels = ret & 0xFF; | |
148 | ||
149 | /* if the initial message failed, num_levels will be 0 */ | |
150 | for (i = 0; i < *num_levels; i++) { | |
151 | *((unsigned int *)entry_i) = (dcn30_smu_get_dpm_freq_by_index(clk_mgr, clk, i) & 0xFFFF); | |
152 | entry_i += sizeof(clk_mgr->base.bw_params->clk_table.entries[0]); | |
153 | } | |
154 | } | |
155 | ||
156 | static void dcn32_build_wm_range_table(struct clk_mgr_internal *clk_mgr) | |
157 | { | |
041a1109 RS |
158 | DC_FP_START(); |
159 | dcn32_build_wm_range_table_fpu(clk_mgr); | |
160 | DC_FP_END(); | |
265280b9 AP |
161 | } |
162 | ||
163 | void dcn32_init_clocks(struct clk_mgr *clk_mgr_base) | |
164 | { | |
165 | struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base); | |
166 | unsigned int num_levels; | |
d6170e41 | 167 | struct clk_limit_num_entries *num_entries_per_clk = &clk_mgr_base->bw_params->clk_table.num_entries_per_clk; |
f7f69740 | 168 | unsigned int i; |
265280b9 AP |
169 | |
170 | memset(&(clk_mgr_base->clks), 0, sizeof(struct dc_clocks)); | |
171 | clk_mgr_base->clks.p_state_change_support = true; | |
172 | clk_mgr_base->clks.prev_p_state_change_support = true; | |
173 | clk_mgr_base->clks.fclk_prev_p_state_change_support = true; | |
174 | clk_mgr->smu_present = false; | |
3141d6cb | 175 | clk_mgr->dpm_present = false; |
265280b9 AP |
176 | |
177 | if (!clk_mgr_base->bw_params) | |
178 | return; | |
179 | ||
180 | if (!clk_mgr_base->force_smu_not_present && dcn30_smu_get_smu_version(clk_mgr, &clk_mgr->smu_ver)) | |
181 | clk_mgr->smu_present = true; | |
182 | ||
183 | if (!clk_mgr->smu_present) | |
184 | return; | |
185 | ||
186 | dcn30_smu_check_driver_if_version(clk_mgr); | |
187 | dcn30_smu_check_msg_header_version(clk_mgr); | |
188 | ||
189 | /* DCFCLK */ | |
190 | dcn32_init_single_clock(clk_mgr, PPCLK_DCFCLK, | |
191 | &clk_mgr_base->bw_params->clk_table.entries[0].dcfclk_mhz, | |
d6170e41 | 192 | &num_entries_per_clk->num_dcfclk_levels); |
3b718dca | 193 | clk_mgr_base->bw_params->dc_mode_limit.dcfclk_mhz = dcn30_smu_get_dc_mode_max_dpm_freq(clk_mgr, PPCLK_DCFCLK); |
265280b9 AP |
194 | |
195 | /* SOCCLK */ | |
196 | dcn32_init_single_clock(clk_mgr, PPCLK_SOCCLK, | |
197 | &clk_mgr_base->bw_params->clk_table.entries[0].socclk_mhz, | |
d6170e41 | 198 | &num_entries_per_clk->num_socclk_levels); |
3b718dca | 199 | clk_mgr_base->bw_params->dc_mode_limit.socclk_mhz = dcn30_smu_get_dc_mode_max_dpm_freq(clk_mgr, PPCLK_SOCCLK); |
d6170e41 | 200 | |
265280b9 | 201 | /* DTBCLK */ |
3b718dca | 202 | if (!clk_mgr->base.ctx->dc->debug.disable_dtb_ref_clk_switch) { |
405bb9ee AL |
203 | dcn32_init_single_clock(clk_mgr, PPCLK_DTBCLK, |
204 | &clk_mgr_base->bw_params->clk_table.entries[0].dtbclk_mhz, | |
d6170e41 | 205 | &num_entries_per_clk->num_dtbclk_levels); |
3b718dca AZ |
206 | clk_mgr_base->bw_params->dc_mode_limit.dtbclk_mhz = |
207 | dcn30_smu_get_dc_mode_max_dpm_freq(clk_mgr, PPCLK_DTBCLK); | |
208 | } | |
265280b9 AP |
209 | |
210 | /* DISPCLK */ | |
211 | dcn32_init_single_clock(clk_mgr, PPCLK_DISPCLK, | |
212 | &clk_mgr_base->bw_params->clk_table.entries[0].dispclk_mhz, | |
d6170e41 DV |
213 | &num_entries_per_clk->num_dispclk_levels); |
214 | num_levels = num_entries_per_clk->num_dispclk_levels; | |
3b718dca AZ |
215 | clk_mgr_base->bw_params->dc_mode_limit.dispclk_mhz = dcn30_smu_get_dc_mode_max_dpm_freq(clk_mgr, PPCLK_DISPCLK); |
216 | //HW recommends limit of 1950 MHz in display clock for all DCN3.2.x | |
217 | if (clk_mgr_base->bw_params->dc_mode_limit.dispclk_mhz > 1950) | |
218 | clk_mgr_base->bw_params->dc_mode_limit.dispclk_mhz = 1950; | |
3141d6cb | 219 | |
4f5b8d78 DV |
220 | /* DPPCLK */ |
221 | dcn32_init_single_clock(clk_mgr, PPCLK_DPPCLK, | |
222 | &clk_mgr_base->bw_params->clk_table.entries[0].dppclk_mhz, | |
223 | &num_entries_per_clk->num_dppclk_levels); | |
224 | num_levels = num_entries_per_clk->num_dppclk_levels; | |
225 | clk_mgr_base->bw_params->dc_mode_limit.dppclk_mhz = dcn30_smu_get_dc_mode_max_dpm_freq(clk_mgr, PPCLK_DPPCLK); | |
226 | //HW recommends limit of 1950 MHz in display clock for all DCN3.2.x | |
227 | if (clk_mgr_base->bw_params->dc_mode_limit.dppclk_mhz > 1950) | |
228 | clk_mgr_base->bw_params->dc_mode_limit.dppclk_mhz = 1950; | |
229 | ||
d6170e41 DV |
230 | if (num_entries_per_clk->num_dcfclk_levels && |
231 | num_entries_per_clk->num_dtbclk_levels && | |
232 | num_entries_per_clk->num_dispclk_levels) | |
3141d6cb | 233 | clk_mgr->dpm_present = true; |
265280b9 | 234 | |
074efb5c | 235 | if (clk_mgr_base->ctx->dc->debug.min_disp_clk_khz) { |
074efb5c RS |
236 | for (i = 0; i < num_levels; i++) |
237 | if (clk_mgr_base->bw_params->clk_table.entries[i].dispclk_mhz | |
238 | < khz_to_mhz_ceil(clk_mgr_base->ctx->dc->debug.min_disp_clk_khz)) | |
239 | clk_mgr_base->bw_params->clk_table.entries[i].dispclk_mhz | |
240 | = khz_to_mhz_ceil(clk_mgr_base->ctx->dc->debug.min_disp_clk_khz); | |
241 | } | |
f7f69740 JL |
242 | for (i = 0; i < num_levels; i++) |
243 | if (clk_mgr_base->bw_params->clk_table.entries[i].dispclk_mhz > 1950) | |
244 | clk_mgr_base->bw_params->clk_table.entries[i].dispclk_mhz = 1950; | |
074efb5c RS |
245 | |
246 | if (clk_mgr_base->ctx->dc->debug.min_dpp_clk_khz) { | |
074efb5c RS |
247 | for (i = 0; i < num_levels; i++) |
248 | if (clk_mgr_base->bw_params->clk_table.entries[i].dppclk_mhz | |
249 | < khz_to_mhz_ceil(clk_mgr_base->ctx->dc->debug.min_dpp_clk_khz)) | |
250 | clk_mgr_base->bw_params->clk_table.entries[i].dppclk_mhz | |
251 | = khz_to_mhz_ceil(clk_mgr_base->ctx->dc->debug.min_dpp_clk_khz); | |
252 | } | |
265280b9 | 253 | |
4f5b8d78 DV |
254 | for (i = 0; i < num_levels; i++) |
255 | if (clk_mgr_base->bw_params->clk_table.entries[i].dppclk_mhz > 1950) | |
256 | clk_mgr_base->bw_params->clk_table.entries[i].dppclk_mhz = 1950; | |
257 | ||
265280b9 AP |
258 | /* Get UCLK, update bounding box */ |
259 | clk_mgr_base->funcs->get_memclk_states_from_smu(clk_mgr_base); | |
260 | ||
261 | /* WM range table */ | |
262 | dcn32_build_wm_range_table(clk_mgr); | |
263 | } | |
264 | ||
128c1ca0 AL |
265 | static void dcn32_update_clocks_update_dtb_dto(struct clk_mgr_internal *clk_mgr, |
266 | struct dc_state *context, | |
267 | int ref_dtbclk_khz) | |
268 | { | |
269 | struct dccg *dccg = clk_mgr->dccg; | |
270 | uint32_t tg_mask = 0; | |
271 | int i; | |
272 | ||
273 | for (i = 0; i < clk_mgr->base.ctx->dc->res_pool->pipe_count; i++) { | |
274 | struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i]; | |
275 | struct dtbclk_dto_params dto_params = {0}; | |
276 | ||
277 | /* use mask to program DTO once per tg */ | |
278 | if (pipe_ctx->stream_res.tg && | |
279 | !(tg_mask & (1 << pipe_ctx->stream_res.tg->inst))) { | |
280 | tg_mask |= (1 << pipe_ctx->stream_res.tg->inst); | |
281 | ||
282 | dto_params.otg_inst = pipe_ctx->stream_res.tg->inst; | |
283 | dto_params.ref_dtbclk_khz = ref_dtbclk_khz; | |
284 | ||
285 | dccg->funcs->set_dtbclk_dto(clk_mgr->dccg, &dto_params); | |
286 | //dccg->funcs->set_audio_dtbclk_dto(clk_mgr->dccg, &dto_params); | |
287 | } | |
288 | } | |
289 | } | |
290 | ||
04e6931a AL |
291 | /* Since DPPCLK request to PMFW needs to be exact (due to DPP DTO programming), |
292 | * update DPPCLK to be the exact frequency that will be set after the DPPCLK | |
293 | * divider is updated. This will prevent rounding issues that could cause DPP | |
294 | * refclk and DPP DTO to not match up. | |
295 | */ | |
296 | static void dcn32_update_dppclk_dispclk_freq(struct clk_mgr_internal *clk_mgr, struct dc_clocks *new_clocks) | |
297 | { | |
298 | int dpp_divider = 0; | |
299 | int disp_divider = 0; | |
300 | ||
a3a88587 GS |
301 | if (new_clocks->dppclk_khz) { |
302 | dpp_divider = DENTIST_DIVIDER_RANGE_SCALE_FACTOR | |
303 | * clk_mgr->base.dentist_vco_freq_khz / new_clocks->dppclk_khz; | |
304 | new_clocks->dppclk_khz = (DENTIST_DIVIDER_RANGE_SCALE_FACTOR * clk_mgr->base.dentist_vco_freq_khz) / dpp_divider; | |
305 | } | |
306 | if (new_clocks->dispclk_khz > 0) { | |
307 | disp_divider = DENTIST_DIVIDER_RANGE_SCALE_FACTOR | |
308 | * clk_mgr->base.dentist_vco_freq_khz / new_clocks->dispclk_khz; | |
309 | new_clocks->dispclk_khz = (DENTIST_DIVIDER_RANGE_SCALE_FACTOR * clk_mgr->base.dentist_vco_freq_khz) / disp_divider; | |
310 | } | |
04e6931a AL |
311 | } |
312 | ||
cd487b6d AL |
313 | void dcn32_update_clocks_update_dpp_dto(struct clk_mgr_internal *clk_mgr, |
314 | struct dc_state *context, bool safe_to_lower) | |
315 | { | |
316 | int i; | |
317 | ||
318 | clk_mgr->dccg->ref_dppclk = clk_mgr->base.clks.dppclk_khz; | |
319 | for (i = 0; i < clk_mgr->base.ctx->dc->res_pool->pipe_count; i++) { | |
3999edf8 | 320 | int dpp_inst = 0, dppclk_khz, prev_dppclk_khz; |
cd487b6d AL |
321 | |
322 | dppclk_khz = context->res_ctx.pipe_ctx[i].plane_res.bw.dppclk_khz; | |
323 | ||
324 | if (context->res_ctx.pipe_ctx[i].plane_res.dpp) | |
325 | dpp_inst = context->res_ctx.pipe_ctx[i].plane_res.dpp->inst; | |
326 | else if (!context->res_ctx.pipe_ctx[i].plane_res.dpp && dppclk_khz == 0) { | |
327 | /* dpp == NULL && dppclk_khz == 0 is valid because of pipe harvesting. | |
328 | * In this case just continue in loop | |
329 | */ | |
330 | continue; | |
331 | } else if (!context->res_ctx.pipe_ctx[i].plane_res.dpp && dppclk_khz > 0) { | |
332 | /* The software state is not valid if dpp resource is NULL and | |
333 | * dppclk_khz > 0. | |
334 | */ | |
335 | ASSERT(false); | |
336 | continue; | |
337 | } | |
338 | ||
339 | prev_dppclk_khz = clk_mgr->dccg->pipe_dppclk_khz[i]; | |
340 | ||
341 | if (safe_to_lower || prev_dppclk_khz < dppclk_khz) | |
342 | clk_mgr->dccg->funcs->update_dpp_dto( | |
343 | clk_mgr->dccg, dpp_inst, dppclk_khz); | |
344 | } | |
345 | } | |
346 | ||
fc41c734 DV |
347 | static void dcn32_update_clocks_update_dentist( |
348 | struct clk_mgr_internal *clk_mgr, | |
7bd571b2 | 349 | struct dc_state *context) |
fc41c734 DV |
350 | { |
351 | uint32_t new_disp_divider = 0; | |
fc41c734 DV |
352 | uint32_t new_dispclk_wdivider = 0; |
353 | uint32_t old_dispclk_wdivider = 0; | |
354 | uint32_t i; | |
7bd571b2 AL |
355 | uint32_t dentist_dispclk_wdivider_readback = 0; |
356 | struct dc *dc = clk_mgr->base.ctx->dc; | |
fc41c734 | 357 | |
7bd571b2 | 358 | if (clk_mgr->base.clks.dispclk_khz == 0) |
fc41c734 DV |
359 | return; |
360 | ||
361 | new_disp_divider = DENTIST_DIVIDER_RANGE_SCALE_FACTOR | |
362 | * clk_mgr->base.dentist_vco_freq_khz / clk_mgr->base.clks.dispclk_khz; | |
fc41c734 DV |
363 | |
364 | new_dispclk_wdivider = dentist_get_did_from_divider(new_disp_divider); | |
7bd571b2 AL |
365 | REG_GET(DENTIST_DISPCLK_CNTL, |
366 | DENTIST_DISPCLK_WDIVIDER, &old_dispclk_wdivider); | |
fc41c734 DV |
367 | |
368 | /* When changing divider to or from 127, some extra programming is required to prevent corruption */ | |
369 | if (old_dispclk_wdivider == 127 && new_dispclk_wdivider != 127) { | |
370 | for (i = 0; i < clk_mgr->base.ctx->dc->res_pool->pipe_count; i++) { | |
371 | struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i]; | |
372 | uint32_t fifo_level; | |
373 | struct dccg *dccg = clk_mgr->base.ctx->dc->res_pool->dccg; | |
374 | struct stream_encoder *stream_enc = pipe_ctx->stream_res.stream_enc; | |
375 | int32_t N; | |
376 | int32_t j; | |
377 | ||
b3551d0b | 378 | if (!resource_is_pipe_type(pipe_ctx, OTG_MASTER)) |
fc41c734 DV |
379 | continue; |
380 | /* Virtual encoders don't have this function */ | |
381 | if (!stream_enc->funcs->get_fifo_cal_average_level) | |
382 | continue; | |
383 | fifo_level = stream_enc->funcs->get_fifo_cal_average_level( | |
384 | stream_enc); | |
385 | N = fifo_level / 4; | |
386 | dccg->funcs->set_fifo_errdet_ovr_en( | |
387 | dccg, | |
388 | true); | |
389 | for (j = 0; j < N - 4; j++) | |
390 | dccg->funcs->otg_drop_pixel( | |
391 | dccg, | |
392 | pipe_ctx->stream_res.tg->inst); | |
393 | dccg->funcs->set_fifo_errdet_ovr_en( | |
394 | dccg, | |
395 | false); | |
396 | } | |
397 | } else if (new_dispclk_wdivider == 127 && old_dispclk_wdivider != 127) { | |
398 | /* request clock with 126 divider first */ | |
399 | uint32_t temp_disp_divider = dentist_get_divider_from_did(126); | |
400 | uint32_t temp_dispclk_khz = (DENTIST_DIVIDER_RANGE_SCALE_FACTOR * clk_mgr->base.dentist_vco_freq_khz) / temp_disp_divider; | |
401 | ||
402 | if (clk_mgr->smu_present) | |
3fc39411 WL |
403 | /* |
404 | * SMU uses discrete dispclk presets. We applied | |
405 | * the same formula to increase our dppclk_khz | |
406 | * to the next matching discrete value. By | |
407 | * contract, we should use the preset dispclk | |
408 | * floored in Mhz to describe the intended clock. | |
409 | */ | |
410 | dcn32_smu_set_hard_min_by_freq(clk_mgr, PPCLK_DISPCLK, | |
411 | khz_to_mhz_floor(temp_dispclk_khz)); | |
fc41c734 | 412 | |
7bd571b2 AL |
413 | if (dc->debug.override_dispclk_programming) { |
414 | REG_GET(DENTIST_DISPCLK_CNTL, | |
415 | DENTIST_DISPCLK_WDIVIDER, &dentist_dispclk_wdivider_readback); | |
416 | ||
417 | if (dentist_dispclk_wdivider_readback != 126) { | |
418 | REG_UPDATE(DENTIST_DISPCLK_CNTL, | |
419 | DENTIST_DISPCLK_WDIVIDER, 126); | |
420 | REG_WAIT(DENTIST_DISPCLK_CNTL, DENTIST_DISPCLK_CHG_DONE, 1, 50, 2000); | |
421 | } | |
422 | } | |
423 | ||
fc41c734 DV |
424 | for (i = 0; i < clk_mgr->base.ctx->dc->res_pool->pipe_count; i++) { |
425 | struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i]; | |
426 | struct dccg *dccg = clk_mgr->base.ctx->dc->res_pool->dccg; | |
427 | struct stream_encoder *stream_enc = pipe_ctx->stream_res.stream_enc; | |
428 | uint32_t fifo_level; | |
429 | int32_t N; | |
430 | int32_t j; | |
431 | ||
b3551d0b | 432 | if (!resource_is_pipe_type(pipe_ctx, OTG_MASTER)) |
fc41c734 DV |
433 | continue; |
434 | /* Virtual encoders don't have this function */ | |
435 | if (!stream_enc->funcs->get_fifo_cal_average_level) | |
436 | continue; | |
437 | fifo_level = stream_enc->funcs->get_fifo_cal_average_level( | |
438 | stream_enc); | |
439 | N = fifo_level / 4; | |
440 | dccg->funcs->set_fifo_errdet_ovr_en(dccg, true); | |
441 | for (j = 0; j < 12 - N; j++) | |
442 | dccg->funcs->otg_add_pixel(dccg, | |
443 | pipe_ctx->stream_res.tg->inst); | |
444 | dccg->funcs->set_fifo_errdet_ovr_en(dccg, false); | |
445 | } | |
446 | } | |
447 | ||
448 | /* do requested DISPCLK updates*/ | |
449 | if (clk_mgr->smu_present) | |
3fc39411 WL |
450 | /* |
451 | * SMU uses discrete dispclk presets. We applied | |
452 | * the same formula to increase our dppclk_khz | |
453 | * to the next matching discrete value. By | |
454 | * contract, we should use the preset dispclk | |
455 | * floored in Mhz to describe the intended clock. | |
456 | */ | |
457 | dcn32_smu_set_hard_min_by_freq(clk_mgr, PPCLK_DISPCLK, | |
458 | khz_to_mhz_floor(clk_mgr->base.clks.dispclk_khz)); | |
7bd571b2 AL |
459 | |
460 | if (dc->debug.override_dispclk_programming) { | |
461 | REG_GET(DENTIST_DISPCLK_CNTL, | |
462 | DENTIST_DISPCLK_WDIVIDER, &dentist_dispclk_wdivider_readback); | |
463 | ||
464 | if (dentist_dispclk_wdivider_readback > new_dispclk_wdivider) { | |
465 | REG_UPDATE(DENTIST_DISPCLK_CNTL, | |
466 | DENTIST_DISPCLK_WDIVIDER, new_dispclk_wdivider); | |
467 | REG_WAIT(DENTIST_DISPCLK_CNTL, DENTIST_DISPCLK_CHG_DONE, 1, 50, 2000); | |
468 | } | |
469 | } | |
470 | ||
fc41c734 DV |
471 | } |
472 | ||
d170e938 AL |
473 | static int dcn32_get_dispclk_from_dentist(struct clk_mgr *clk_mgr_base) |
474 | { | |
475 | struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base); | |
476 | uint32_t dispclk_wdivider; | |
477 | int disp_divider; | |
478 | ||
479 | REG_GET(DENTIST_DISPCLK_CNTL, DENTIST_DISPCLK_WDIVIDER, &dispclk_wdivider); | |
480 | disp_divider = dentist_get_divider_from_did(dispclk_wdivider); | |
481 | ||
482 | /* Return DISPCLK freq in Khz */ | |
483 | if (disp_divider) | |
484 | return (DENTIST_DIVIDER_RANGE_SCALE_FACTOR * clk_mgr->base.dentist_vco_freq_khz) / disp_divider; | |
485 | ||
486 | return 0; | |
487 | } | |
488 | ||
a71e1310 RV |
489 | static bool dcn32_check_native_scaling(struct pipe_ctx *pipe) |
490 | { | |
491 | bool is_native_scaling = false; | |
492 | int width = pipe->plane_state->src_rect.width; | |
493 | int height = pipe->plane_state->src_rect.height; | |
494 | ||
495 | if (pipe->stream->timing.h_addressable == width && | |
496 | pipe->stream->timing.v_addressable == height && | |
497 | pipe->plane_state->dst_rect.width == width && | |
498 | pipe->plane_state->dst_rect.height == height) | |
499 | is_native_scaling = true; | |
500 | ||
501 | return is_native_scaling; | |
502 | } | |
503 | ||
504 | static void dcn32_auto_dpm_test_log( | |
505 | struct dc_clocks *new_clocks, | |
506 | struct clk_mgr_internal *clk_mgr, | |
507 | struct dc_state *context) | |
77ad5f6f | 508 | { |
43484c4b | 509 | unsigned int dispclk_khz_reg, dppclk_khz_reg, dprefclk_khz_reg, dcfclk_khz_reg, dtbclk_khz_reg, |
a71e1310 | 510 | fclk_khz_reg, mall_ss_size_bytes; |
43484c4b RV |
511 | int dramclk_khz_override, fclk_khz_override, num_fclk_levels; |
512 | ||
a71e1310 RV |
513 | struct pipe_ctx *pipe_ctx_list[MAX_PIPES]; |
514 | int active_pipe_count = 0; | |
515 | ||
516 | for (int i = 0; i < MAX_PIPES; i++) { | |
517 | struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i]; | |
518 | ||
519 | if (pipe_ctx->stream && dc_state_get_pipe_subvp_type(context, pipe_ctx) != SUBVP_PHANTOM) { | |
520 | pipe_ctx_list[active_pipe_count] = pipe_ctx; | |
521 | active_pipe_count++; | |
522 | } | |
523 | } | |
524 | ||
461bf81a EB |
525 | msleep(5); |
526 | ||
a71e1310 | 527 | mall_ss_size_bytes = context->bw_ctx.bw.dcn.mall_ss_size_bytes; |
43484c4b RV |
528 | |
529 | dispclk_khz_reg = REG_READ(CLK1_CLK0_CURRENT_CNT); // DISPCLK | |
530 | dppclk_khz_reg = REG_READ(CLK1_CLK1_CURRENT_CNT); // DPPCLK | |
531 | dprefclk_khz_reg = REG_READ(CLK1_CLK2_CURRENT_CNT); // DPREFCLK | |
532 | dcfclk_khz_reg = REG_READ(CLK1_CLK3_CURRENT_CNT); // DCFCLK | |
533 | dtbclk_khz_reg = REG_READ(CLK1_CLK4_CURRENT_CNT); // DTBCLK | |
534 | fclk_khz_reg = REG_READ(CLK4_CLK0_CURRENT_CNT); // FCLK | |
a6db1993 CL |
535 | |
536 | // Overrides for these clocks in case there is no p_state change support | |
43484c4b RV |
537 | dramclk_khz_override = new_clocks->dramclk_khz; |
538 | fclk_khz_override = new_clocks->fclk_khz; | |
a6db1993 | 539 | |
43484c4b | 540 | num_fclk_levels = clk_mgr->base.bw_params->clk_table.num_entries_per_clk.num_fclk_levels - 1; |
a6db1993 CL |
541 | |
542 | if (!new_clocks->p_state_change_support) { | |
543 | dramclk_khz_override = clk_mgr->base.bw_params->max_memclk_mhz * 1000; | |
544 | } | |
545 | if (!new_clocks->fclk_p_state_change_support) { | |
546 | fclk_khz_override = clk_mgr->base.bw_params->clk_table.entries[num_fclk_levels].fclk_mhz * 1000; | |
547 | } | |
548 | ||
77ad5f6f EB |
549 | //////////////////////////////////////////////////////////////////////////// |
550 | // IMPORTANT: When adding more clocks to these logs, do NOT put a newline | |
551 | // anywhere other than at the very end of the string. | |
552 | // | |
553 | // Formatting example (make sure to have " - " between each entry): | |
554 | // | |
555 | // AutoDPMTest: clk1:%d - clk2:%d - clk3:%d - clk4:%d\n" | |
556 | //////////////////////////////////////////////////////////////////////////// | |
a71e1310 | 557 | if (new_clocks && active_pipe_count > 0 && |
77ad5f6f EB |
558 | new_clocks->dramclk_khz > 0 && |
559 | new_clocks->fclk_khz > 0 && | |
560 | new_clocks->dcfclk_khz > 0 && | |
561 | new_clocks->dppclk_khz > 0) { | |
562 | ||
a71e1310 RV |
563 | uint32_t pix_clk_list[MAX_PIPES] = {0}; |
564 | int p_state_list[MAX_PIPES] = {0}; | |
565 | int disp_src_width_list[MAX_PIPES] = {0}; | |
566 | int disp_src_height_list[MAX_PIPES] = {0}; | |
567 | uint64_t disp_src_refresh_list[MAX_PIPES] = {0}; | |
568 | bool is_scaled_list[MAX_PIPES] = {0}; | |
569 | ||
570 | for (int i = 0; i < active_pipe_count; i++) { | |
571 | struct pipe_ctx *curr_pipe_ctx = pipe_ctx_list[i]; | |
572 | uint64_t refresh_rate; | |
573 | ||
574 | pix_clk_list[i] = curr_pipe_ctx->stream->timing.pix_clk_100hz; | |
575 | p_state_list[i] = curr_pipe_ctx->p_state_type; | |
576 | ||
577 | refresh_rate = (curr_pipe_ctx->stream->timing.pix_clk_100hz * (uint64_t)100 + | |
578 | curr_pipe_ctx->stream->timing.v_total * curr_pipe_ctx->stream->timing.h_total - (uint64_t)1); | |
579 | refresh_rate = div_u64(refresh_rate, curr_pipe_ctx->stream->timing.v_total); | |
580 | refresh_rate = div_u64(refresh_rate, curr_pipe_ctx->stream->timing.h_total); | |
581 | disp_src_refresh_list[i] = refresh_rate; | |
582 | ||
583 | if (curr_pipe_ctx->plane_state) { | |
584 | is_scaled_list[i] = !(dcn32_check_native_scaling(curr_pipe_ctx)); | |
585 | disp_src_width_list[i] = curr_pipe_ctx->plane_state->src_rect.width; | |
586 | disp_src_height_list[i] = curr_pipe_ctx->plane_state->src_rect.height; | |
587 | } | |
588 | } | |
589 | ||
a6db1993 CL |
590 | DC_LOG_AUTO_DPM_TEST("AutoDPMTest: dramclk:%d - fclk:%d - " |
591 | "dcfclk:%d - dppclk:%d - dispclk_hw:%d - " | |
592 | "dppclk_hw:%d - dprefclk_hw:%d - dcfclk_hw:%d - " | |
a71e1310 RV |
593 | "dtbclk_hw:%d - fclk_hw:%d - pix_clk_0:%d - pix_clk_1:%d - " |
594 | "pix_clk_2:%d - pix_clk_3:%d - mall_ss_size:%d - p_state_type_0:%d - " | |
595 | "p_state_type_1:%d - p_state_type_2:%d - p_state_type_3:%d - " | |
596 | "pix_width_0:%d - pix_height_0:%d - refresh_rate_0:%lld - is_scaled_0:%d - " | |
597 | "pix_width_1:%d - pix_height_1:%d - refresh_rate_1:%lld - is_scaled_1:%d - " | |
598 | "pix_width_2:%d - pix_height_2:%d - refresh_rate_2:%lld - is_scaled_2:%d - " | |
4069d43b | 599 | "pix_width_3:%d - pix_height_3:%d - refresh_rate_3:%lld - is_scaled_3:%d - LOG_END\n", |
a6db1993 CL |
600 | dramclk_khz_override, |
601 | fclk_khz_override, | |
602 | new_clocks->dcfclk_khz, | |
603 | new_clocks->dppclk_khz, | |
604 | dispclk_khz_reg, | |
605 | dppclk_khz_reg, | |
606 | dprefclk_khz_reg, | |
607 | dcfclk_khz_reg, | |
608 | dtbclk_khz_reg, | |
a71e1310 RV |
609 | fclk_khz_reg, |
610 | pix_clk_list[0], pix_clk_list[1], pix_clk_list[3], pix_clk_list[2], | |
611 | mall_ss_size_bytes, | |
612 | p_state_list[0], p_state_list[1], p_state_list[2], p_state_list[3], | |
613 | disp_src_width_list[0], disp_src_height_list[0], disp_src_refresh_list[0], is_scaled_list[0], | |
614 | disp_src_width_list[1], disp_src_height_list[1], disp_src_refresh_list[1], is_scaled_list[1], | |
615 | disp_src_width_list[2], disp_src_height_list[2], disp_src_refresh_list[2], is_scaled_list[2], | |
616 | disp_src_width_list[3], disp_src_height_list[3], disp_src_refresh_list[3], is_scaled_list[3]); | |
77ad5f6f EB |
617 | } |
618 | } | |
d170e938 | 619 | |
265280b9 AP |
620 | static void dcn32_update_clocks(struct clk_mgr *clk_mgr_base, |
621 | struct dc_state *context, | |
622 | bool safe_to_lower) | |
623 | { | |
624 | struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base); | |
625 | struct dc_clocks *new_clocks = &context->bw_ctx.bw.dcn.clk; | |
626 | struct dc *dc = clk_mgr_base->ctx->dc; | |
627 | int display_count; | |
628 | bool update_dppclk = false; | |
629 | bool update_dispclk = false; | |
630 | bool enter_display_off = false; | |
631 | bool dpp_clock_lowered = false; | |
632 | struct dmcu *dmcu = clk_mgr_base->ctx->dc->res_pool->dmcu; | |
633 | bool force_reset = false; | |
0c9ed604 | 634 | bool update_uclk = false, update_fclk = false; |
265280b9 AP |
635 | bool p_state_change_support; |
636 | bool fclk_p_state_change_support; | |
265280b9 AP |
637 | |
638 | if (clk_mgr_base->clks.dispclk_khz == 0 || | |
639 | (dc->debug.force_clock_mode & 0x1)) { | |
640 | /* This is from resume or boot up, if forced_clock cfg option used, | |
641 | * we bypass program dispclk and DPPCLK, but need set them for S3. | |
642 | */ | |
643 | force_reset = true; | |
644 | ||
645 | dcn2_read_clocks_from_hw_dentist(clk_mgr_base); | |
646 | ||
647 | /* Force_clock_mode 0x1: force reset the clock even it is the same clock | |
648 | * as long as it is in Passive level. | |
649 | */ | |
650 | } | |
651 | display_count = clk_mgr_helper_get_active_display_cnt(dc, context); | |
652 | ||
653 | if (display_count == 0) | |
654 | enter_display_off = true; | |
655 | ||
e127306d JL |
656 | if (clk_mgr->smu_present) { |
657 | if (enter_display_off == safe_to_lower) | |
658 | dcn30_smu_set_num_of_displays(clk_mgr, display_count); | |
265280b9 | 659 | |
c1969fba DV |
660 | clk_mgr_base->clks.fclk_prev_p_state_change_support = clk_mgr_base->clks.fclk_p_state_change_support; |
661 | ||
c8cefb99 | 662 | fclk_p_state_change_support = new_clocks->fclk_p_state_change_support; |
c1969fba | 663 | |
1d8355ad AL |
664 | if (should_update_pstate_support(safe_to_lower, fclk_p_state_change_support, clk_mgr_base->clks.fclk_p_state_change_support) && |
665 | !dc->work_arounds.clock_update_disable_mask.fclk) { | |
c1969fba DV |
666 | clk_mgr_base->clks.fclk_p_state_change_support = fclk_p_state_change_support; |
667 | ||
3867bbd4 DV |
668 | /* To enable FCLK P-state switching, send FCLK_PSTATE_SUPPORTED message to PMFW */ |
669 | if (clk_mgr_base->ctx->dce_version != DCN_VERSION_3_21 && clk_mgr_base->clks.fclk_p_state_change_support) { | |
c1969fba DV |
670 | /* Handle the code for sending a message to PMFW that FCLK P-state change is supported */ |
671 | dcn32_smu_send_fclk_pstate_message(clk_mgr, FCLK_PSTATE_SUPPORTED); | |
672 | } | |
673 | } | |
674 | ||
e127306d JL |
675 | if (dc->debug.force_min_dcfclk_mhz > 0) |
676 | new_clocks->dcfclk_khz = (new_clocks->dcfclk_khz > (dc->debug.force_min_dcfclk_mhz * 1000)) ? | |
677 | new_clocks->dcfclk_khz : (dc->debug.force_min_dcfclk_mhz * 1000); | |
265280b9 | 678 | |
1d8355ad AL |
679 | if (should_set_clock(safe_to_lower, new_clocks->dcfclk_khz, clk_mgr_base->clks.dcfclk_khz) && |
680 | !dc->work_arounds.clock_update_disable_mask.dcfclk) { | |
e127306d | 681 | clk_mgr_base->clks.dcfclk_khz = new_clocks->dcfclk_khz; |
405bb9ee | 682 | dcn32_smu_set_hard_min_by_freq(clk_mgr, PPCLK_DCFCLK, khz_to_mhz_ceil(clk_mgr_base->clks.dcfclk_khz)); |
e127306d | 683 | } |
265280b9 | 684 | |
1d8355ad AL |
685 | if (should_set_clock(safe_to_lower, new_clocks->dcfclk_deep_sleep_khz, clk_mgr_base->clks.dcfclk_deep_sleep_khz) && |
686 | !dc->work_arounds.clock_update_disable_mask.dcfclk_ds) { | |
e127306d JL |
687 | clk_mgr_base->clks.dcfclk_deep_sleep_khz = new_clocks->dcfclk_deep_sleep_khz; |
688 | dcn30_smu_set_min_deep_sleep_dcef_clk(clk_mgr, khz_to_mhz_ceil(clk_mgr_base->clks.dcfclk_deep_sleep_khz)); | |
689 | } | |
690 | ||
691 | if (should_set_clock(safe_to_lower, new_clocks->socclk_khz, clk_mgr_base->clks.socclk_khz)) | |
692 | /* We don't actually care about socclk, don't notify SMU of hard min */ | |
693 | clk_mgr_base->clks.socclk_khz = new_clocks->socclk_khz; | |
265280b9 | 694 | |
e127306d | 695 | clk_mgr_base->clks.prev_p_state_change_support = clk_mgr_base->clks.p_state_change_support; |
e127306d | 696 | clk_mgr_base->clks.prev_num_ways = clk_mgr_base->clks.num_ways; |
265280b9 | 697 | |
e127306d JL |
698 | if (clk_mgr_base->clks.num_ways != new_clocks->num_ways && |
699 | clk_mgr_base->clks.num_ways < new_clocks->num_ways) { | |
700 | clk_mgr_base->clks.num_ways = new_clocks->num_ways; | |
701 | dcn32_smu_send_cab_for_uclk_message(clk_mgr, clk_mgr_base->clks.num_ways); | |
702 | } | |
265280b9 | 703 | |
c8cefb99 | 704 | p_state_change_support = new_clocks->p_state_change_support; |
1d8355ad AL |
705 | if (should_update_pstate_support(safe_to_lower, p_state_change_support, clk_mgr_base->clks.p_state_change_support) && |
706 | !dc->work_arounds.clock_update_disable_mask.uclk) { | |
e127306d | 707 | clk_mgr_base->clks.p_state_change_support = p_state_change_support; |
265280b9 | 708 | |
e127306d | 709 | /* to disable P-State switching, set UCLK min = max */ |
12a6e62b AL |
710 | if (!clk_mgr_base->clks.p_state_change_support) { |
711 | if (dc->clk_mgr->dc_mode_softmax_enabled) { | |
712 | /* On DCN32x we will never have the functional UCLK min above the softmax | |
713 | * since we calculate mode support based on softmax being the max UCLK | |
714 | * frequency. | |
715 | */ | |
716 | dcn32_smu_set_hard_min_by_freq(clk_mgr, PPCLK_UCLK, | |
717 | dc->clk_mgr->bw_params->dc_mode_softmax_memclk); | |
718 | } else { | |
719 | dcn32_smu_set_hard_min_by_freq(clk_mgr, PPCLK_UCLK, dc->clk_mgr->bw_params->max_memclk_mhz); | |
720 | } | |
721 | } | |
e127306d | 722 | } |
265280b9 | 723 | |
aa298b30 AL |
724 | if (context->bw_ctx.bw.dcn.clk.fw_based_mclk_switching) |
725 | dcn32_smu_wait_for_dmub_ack_mclk(clk_mgr, true); | |
726 | else | |
727 | dcn32_smu_wait_for_dmub_ack_mclk(clk_mgr, false); | |
728 | ||
c1969fba DV |
729 | /* Always update saved value, even if new value not set due to P-State switching unsupported. Also check safe_to_lower for FCLK */ |
730 | if (safe_to_lower && (clk_mgr_base->clks.fclk_p_state_change_support != clk_mgr_base->clks.fclk_prev_p_state_change_support)) { | |
731 | update_fclk = true; | |
732 | } | |
265280b9 | 733 | |
1d8355ad AL |
734 | if (clk_mgr_base->ctx->dce_version != DCN_VERSION_3_21 && !clk_mgr_base->clks.fclk_p_state_change_support && update_fclk && |
735 | !dc->work_arounds.clock_update_disable_mask.fclk) { | |
c1969fba DV |
736 | /* Handle code for sending a message to PMFW that FCLK P-state change is not supported */ |
737 | dcn32_smu_send_fclk_pstate_message(clk_mgr, FCLK_PSTATE_NOTSUPPORTED); | |
265280b9 | 738 | } |
265280b9 | 739 | |
e127306d | 740 | /* Always update saved value, even if new value not set due to P-State switching unsupported */ |
1d8355ad AL |
741 | if (should_set_clock(safe_to_lower, new_clocks->dramclk_khz, clk_mgr_base->clks.dramclk_khz) && |
742 | !dc->work_arounds.clock_update_disable_mask.uclk) { | |
e127306d JL |
743 | clk_mgr_base->clks.dramclk_khz = new_clocks->dramclk_khz; |
744 | update_uclk = true; | |
745 | } | |
746 | ||
747 | /* set UCLK to requested value if P-State switching is supported, or to re-enable P-State switching */ | |
748 | if (clk_mgr_base->clks.p_state_change_support && | |
1d8355ad AL |
749 | (update_uclk || !clk_mgr_base->clks.prev_p_state_change_support) && |
750 | !dc->work_arounds.clock_update_disable_mask.uclk) | |
405bb9ee | 751 | dcn32_smu_set_hard_min_by_freq(clk_mgr, PPCLK_UCLK, khz_to_mhz_ceil(clk_mgr_base->clks.dramclk_khz)); |
265280b9 | 752 | |
e127306d JL |
753 | if (clk_mgr_base->clks.num_ways != new_clocks->num_ways && |
754 | clk_mgr_base->clks.num_ways > new_clocks->num_ways) { | |
755 | clk_mgr_base->clks.num_ways = new_clocks->num_ways; | |
756 | dcn32_smu_send_cab_for_uclk_message(clk_mgr, clk_mgr_base->clks.num_ways); | |
757 | } | |
265280b9 AP |
758 | } |
759 | ||
04e6931a | 760 | dcn32_update_dppclk_dispclk_freq(clk_mgr, new_clocks); |
265280b9 AP |
761 | if (should_set_clock(safe_to_lower, new_clocks->dppclk_khz, clk_mgr_base->clks.dppclk_khz)) { |
762 | if (clk_mgr_base->clks.dppclk_khz > new_clocks->dppclk_khz) | |
763 | dpp_clock_lowered = true; | |
764 | ||
765 | clk_mgr_base->clks.dppclk_khz = new_clocks->dppclk_khz; | |
e127306d | 766 | |
04e6931a | 767 | if (clk_mgr->smu_present && !dpp_clock_lowered) |
3fc39411 WL |
768 | /* |
769 | * SMU uses discrete dppclk presets. We applied | |
770 | * the same formula to increase our dppclk_khz | |
771 | * to the next matching discrete value. By | |
772 | * contract, we should use the preset dppclk | |
773 | * floored in Mhz to describe the intended clock. | |
774 | */ | |
775 | dcn32_smu_set_hard_min_by_freq(clk_mgr, PPCLK_DPPCLK, | |
776 | khz_to_mhz_floor(clk_mgr_base->clks.dppclk_khz)); | |
e127306d | 777 | |
265280b9 AP |
778 | update_dppclk = true; |
779 | } | |
780 | ||
781 | if (should_set_clock(safe_to_lower, new_clocks->dispclk_khz, clk_mgr_base->clks.dispclk_khz)) { | |
782 | clk_mgr_base->clks.dispclk_khz = new_clocks->dispclk_khz; | |
e127306d | 783 | |
265280b9 AP |
784 | update_dispclk = true; |
785 | } | |
786 | ||
405bb9ee | 787 | if (!new_clocks->dtbclk_en) { |
f6015da7 | 788 | new_clocks->ref_dtbclk_khz = clk_mgr_base->bw_params->clk_table.entries[0].dtbclk_mhz * 1000; |
405bb9ee AL |
789 | } |
790 | ||
791 | /* clock limits are received with MHz precision, divide by 1000 to prevent setting clocks at every call */ | |
792 | if (!dc->debug.disable_dtb_ref_clk_switch && | |
793 | should_set_clock(safe_to_lower, new_clocks->ref_dtbclk_khz / 1000, clk_mgr_base->clks.ref_dtbclk_khz / 1000)) { | |
794 | /* DCCG requires KHz precision for DTBCLK */ | |
795 | clk_mgr_base->clks.ref_dtbclk_khz = | |
796 | dcn32_smu_set_hard_min_by_freq(clk_mgr, PPCLK_DTBCLK, khz_to_mhz_ceil(new_clocks->ref_dtbclk_khz)); | |
a71e1310 | 797 | |
128c1ca0 | 798 | dcn32_update_clocks_update_dtb_dto(clk_mgr, context, clk_mgr_base->clks.ref_dtbclk_khz); |
405bb9ee AL |
799 | } |
800 | ||
265280b9 AP |
801 | if (dc->config.forced_clocks == false || (force_reset && safe_to_lower)) { |
802 | if (dpp_clock_lowered) { | |
803 | /* if clock is being lowered, increase DTO before lowering refclk */ | |
cd487b6d | 804 | dcn32_update_clocks_update_dpp_dto(clk_mgr, context, safe_to_lower); |
7bd571b2 | 805 | dcn32_update_clocks_update_dentist(clk_mgr, context); |
04e6931a | 806 | if (clk_mgr->smu_present) |
3fc39411 WL |
807 | /* |
808 | * SMU uses discrete dppclk presets. We applied | |
809 | * the same formula to increase our dppclk_khz | |
810 | * to the next matching discrete value. By | |
811 | * contract, we should use the preset dppclk | |
812 | * floored in Mhz to describe the intended clock. | |
813 | */ | |
814 | dcn32_smu_set_hard_min_by_freq(clk_mgr, PPCLK_DPPCLK, | |
815 | khz_to_mhz_floor(clk_mgr_base->clks.dppclk_khz)); | |
265280b9 AP |
816 | } else { |
817 | /* if clock is being raised, increase refclk before lowering DTO */ | |
818 | if (update_dppclk || update_dispclk) | |
7bd571b2 | 819 | dcn32_update_clocks_update_dentist(clk_mgr, context); |
265280b9 AP |
820 | /* There is a check inside dcn20_update_clocks_update_dpp_dto which ensures |
821 | * that we do not lower dto when it is not safe to lower. We do not need to | |
822 | * compare the current and new dppclk before calling this function. | |
823 | */ | |
cd487b6d | 824 | dcn32_update_clocks_update_dpp_dto(clk_mgr, context, safe_to_lower); |
265280b9 AP |
825 | } |
826 | } | |
827 | ||
828 | if (update_dispclk && dmcu && dmcu->funcs->is_dmcu_initialized(dmcu)) | |
829 | /*update dmcu for wait_loop count*/ | |
830 | dmcu->funcs->set_psr_wait_loop(dmcu, | |
831 | clk_mgr_base->clks.dispclk_khz / 1000 / 7); | |
77ad5f6f | 832 | |
838a59ca | 833 | if (dc->config.enable_auto_dpm_test_logs) { |
a71e1310 | 834 | dcn32_auto_dpm_test_log(new_clocks, clk_mgr, context); |
77ad5f6f | 835 | } |
265280b9 AP |
836 | } |
837 | ||
3e838f7c RS |
838 | static uint32_t dcn32_get_vco_frequency_from_reg(struct clk_mgr_internal *clk_mgr) |
839 | { | |
840 | struct fixed31_32 pll_req; | |
841 | uint32_t pll_req_reg = 0; | |
842 | ||
843 | /* get FbMult value */ | |
844 | if (ASICREV_IS_GC_11_0_2(clk_mgr->base.ctx->asic_id.hw_internal_rev)) | |
845 | pll_req_reg = REG_READ(CLK0_CLK_PLL_REQ); | |
846 | else | |
847 | pll_req_reg = REG_READ(CLK1_CLK_PLL_REQ); | |
848 | ||
849 | /* set up a fixed-point number | |
850 | * this works because the int part is on the right edge of the register | |
851 | * and the frac part is on the left edge | |
852 | */ | |
10a9035c | 853 | pll_req = dc_fixpt_from_int(pll_req_reg & clk_mgr->clk_mgr_mask->FbMult_int); |
3e838f7c RS |
854 | pll_req.value |= pll_req_reg & clk_mgr->clk_mgr_mask->FbMult_frac; |
855 | ||
856 | /* multiply by REFCLK period */ | |
857 | pll_req = dc_fixpt_mul_int(pll_req, clk_mgr->dfs_ref_freq_khz); | |
858 | ||
859 | return dc_fixpt_floor(pll_req); | |
860 | } | |
861 | ||
862 | static void dcn32_dump_clk_registers(struct clk_state_registers_and_bypass *regs_and_bypass, | |
863 | struct clk_mgr *clk_mgr_base, struct clk_log_info *log_info) | |
864 | { | |
865 | struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base); | |
866 | uint32_t dprefclk_did = 0; | |
867 | uint32_t dcfclk_did = 0; | |
868 | uint32_t dtbclk_did = 0; | |
869 | uint32_t dispclk_did = 0; | |
870 | uint32_t dppclk_did = 0; | |
871 | uint32_t target_div = 0; | |
872 | ||
873 | if (ASICREV_IS_GC_11_0_2(clk_mgr->base.ctx->asic_id.hw_internal_rev)) { | |
874 | /* DFS Slice 0 is used for DISPCLK */ | |
875 | dispclk_did = REG_READ(CLK0_CLK0_DFS_CNTL); | |
876 | /* DFS Slice 1 is used for DPPCLK */ | |
877 | dppclk_did = REG_READ(CLK0_CLK1_DFS_CNTL); | |
878 | /* DFS Slice 2 is used for DPREFCLK */ | |
879 | dprefclk_did = REG_READ(CLK0_CLK2_DFS_CNTL); | |
880 | /* DFS Slice 3 is used for DCFCLK */ | |
881 | dcfclk_did = REG_READ(CLK0_CLK3_DFS_CNTL); | |
882 | /* DFS Slice 4 is used for DTBCLK */ | |
883 | dtbclk_did = REG_READ(CLK0_CLK4_DFS_CNTL); | |
884 | } else { | |
885 | /* DFS Slice 0 is used for DISPCLK */ | |
886 | dispclk_did = REG_READ(CLK1_CLK0_DFS_CNTL); | |
887 | /* DFS Slice 1 is used for DPPCLK */ | |
888 | dppclk_did = REG_READ(CLK1_CLK1_DFS_CNTL); | |
889 | /* DFS Slice 2 is used for DPREFCLK */ | |
890 | dprefclk_did = REG_READ(CLK1_CLK2_DFS_CNTL); | |
891 | /* DFS Slice 3 is used for DCFCLK */ | |
892 | dcfclk_did = REG_READ(CLK1_CLK3_DFS_CNTL); | |
893 | /* DFS Slice 4 is used for DTBCLK */ | |
894 | dtbclk_did = REG_READ(CLK1_CLK4_DFS_CNTL); | |
895 | } | |
896 | ||
897 | /* Convert DISPCLK DFS Slice DID to divider*/ | |
898 | target_div = dentist_get_divider_from_did(dispclk_did); | |
899 | //Get dispclk in khz | |
900 | regs_and_bypass->dispclk = (DENTIST_DIVIDER_RANGE_SCALE_FACTOR | |
901 | * clk_mgr->base.dentist_vco_freq_khz) / target_div; | |
902 | ||
903 | /* Convert DISPCLK DFS Slice DID to divider*/ | |
904 | target_div = dentist_get_divider_from_did(dppclk_did); | |
905 | //Get dppclk in khz | |
906 | regs_and_bypass->dppclk = (DENTIST_DIVIDER_RANGE_SCALE_FACTOR | |
907 | * clk_mgr->base.dentist_vco_freq_khz) / target_div; | |
908 | ||
909 | /* Convert DPREFCLK DFS Slice DID to divider*/ | |
910 | target_div = dentist_get_divider_from_did(dprefclk_did); | |
911 | //Get dprefclk in khz | |
912 | regs_and_bypass->dprefclk = (DENTIST_DIVIDER_RANGE_SCALE_FACTOR | |
913 | * clk_mgr->base.dentist_vco_freq_khz) / target_div; | |
914 | ||
915 | /* Convert DCFCLK DFS Slice DID to divider*/ | |
916 | target_div = dentist_get_divider_from_did(dcfclk_did); | |
917 | //Get dcfclk in khz | |
918 | regs_and_bypass->dcfclk = (DENTIST_DIVIDER_RANGE_SCALE_FACTOR | |
919 | * clk_mgr->base.dentist_vco_freq_khz) / target_div; | |
920 | ||
921 | /* Convert DTBCLK DFS Slice DID to divider*/ | |
922 | target_div = dentist_get_divider_from_did(dtbclk_did); | |
923 | //Get dtbclk in khz | |
924 | regs_and_bypass->dtbclk = (DENTIST_DIVIDER_RANGE_SCALE_FACTOR | |
925 | * clk_mgr->base.dentist_vco_freq_khz) / target_div; | |
926 | } | |
927 | ||
b94b02d7 | 928 | static void dcn32_clock_read_ss_info(struct clk_mgr_internal *clk_mgr) |
265280b9 AP |
929 | { |
930 | struct dc_bios *bp = clk_mgr->base.ctx->dc_bios; | |
931 | int ss_info_num = bp->funcs->get_ss_entry_number( | |
932 | bp, AS_SIGNAL_TYPE_GPU_PLL); | |
933 | ||
934 | if (ss_info_num) { | |
935 | struct spread_spectrum_info info = { { 0 } }; | |
936 | enum bp_result result = bp->funcs->get_spread_spectrum_info( | |
937 | bp, AS_SIGNAL_TYPE_GPU_PLL, 0, &info); | |
938 | ||
939 | /* SSInfo.spreadSpectrumPercentage !=0 would be sign | |
940 | * that SS is enabled | |
941 | */ | |
942 | if (result == BP_RESULT_OK && | |
943 | info.spread_spectrum_percentage != 0) { | |
944 | clk_mgr->ss_on_dprefclk = true; | |
945 | clk_mgr->dprefclk_ss_divider = info.spread_percentage_divider; | |
946 | ||
947 | if (info.type.CENTER_MODE == 0) { | |
948 | /* Currently for DP Reference clock we | |
949 | * need only SS percentage for | |
950 | * downspread | |
951 | */ | |
952 | clk_mgr->dprefclk_ss_percentage = | |
953 | info.spread_spectrum_percentage; | |
954 | } | |
955 | } | |
956 | } | |
957 | } | |
958 | static void dcn32_notify_wm_ranges(struct clk_mgr *clk_mgr_base) | |
959 | { | |
e06c5f59 | 960 | unsigned int i; |
265280b9 AP |
961 | struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base); |
962 | WatermarksExternal_t *table = (WatermarksExternal_t *) clk_mgr->wm_range_table; | |
963 | ||
964 | if (!clk_mgr->smu_present) | |
965 | return; | |
966 | ||
967 | if (!table) | |
968 | return; | |
969 | ||
970 | memset(table, 0, sizeof(*table)); | |
971 | ||
e06c5f59 AL |
972 | /* collect valid ranges, place in pmfw table */ |
973 | for (i = 0; i < WM_SET_COUNT; i++) | |
974 | if (clk_mgr->base.bw_params->wm_table.nv_entries[i].valid) { | |
975 | table->Watermarks.WatermarkRow[i].WmSetting = i; | |
976 | table->Watermarks.WatermarkRow[i].Flags = clk_mgr->base.bw_params->wm_table.nv_entries[i].pmfw_breakdown.wm_type; | |
977 | } | |
265280b9 AP |
978 | dcn30_smu_set_dram_addr_high(clk_mgr, clk_mgr->wm_range_table_addr >> 32); |
979 | dcn30_smu_set_dram_addr_low(clk_mgr, clk_mgr->wm_range_table_addr & 0xFFFFFFFF); | |
980 | dcn32_smu_transfer_wm_table_dram_2_smu(clk_mgr); | |
981 | } | |
982 | ||
983 | /* Set min memclk to minimum, either constrained by the current mode or DPM0 */ | |
984 | static void dcn32_set_hard_min_memclk(struct clk_mgr *clk_mgr_base, bool current_mode) | |
985 | { | |
986 | struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base); | |
987 | ||
988 | if (!clk_mgr->smu_present) | |
989 | return; | |
990 | ||
991 | if (current_mode) { | |
992 | if (clk_mgr_base->clks.p_state_change_support) | |
405bb9ee | 993 | dcn32_smu_set_hard_min_by_freq(clk_mgr, PPCLK_UCLK, |
265280b9 AP |
994 | khz_to_mhz_ceil(clk_mgr_base->clks.dramclk_khz)); |
995 | else | |
405bb9ee | 996 | dcn32_smu_set_hard_min_by_freq(clk_mgr, PPCLK_UCLK, |
2b1b838e | 997 | clk_mgr_base->bw_params->max_memclk_mhz); |
265280b9 | 998 | } else { |
405bb9ee | 999 | dcn32_smu_set_hard_min_by_freq(clk_mgr, PPCLK_UCLK, |
265280b9 AP |
1000 | clk_mgr_base->bw_params->clk_table.entries[0].memclk_mhz); |
1001 | } | |
1002 | } | |
1003 | ||
1004 | /* Set max memclk to highest DPM value */ | |
1005 | static void dcn32_set_hard_max_memclk(struct clk_mgr *clk_mgr_base) | |
1006 | { | |
1007 | struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base); | |
1008 | ||
1009 | if (!clk_mgr->smu_present) | |
1010 | return; | |
1011 | ||
12a6e62b | 1012 | dcn30_smu_set_hard_max_by_freq(clk_mgr, PPCLK_UCLK, clk_mgr_base->bw_params->max_memclk_mhz); |
265280b9 AP |
1013 | } |
1014 | ||
1015 | /* Get current memclk states, update bounding box */ | |
1016 | static void dcn32_get_memclk_states_from_smu(struct clk_mgr *clk_mgr_base) | |
1017 | { | |
1018 | struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base); | |
d6170e41 | 1019 | struct clk_limit_num_entries *num_entries_per_clk = &clk_mgr_base->bw_params->clk_table.num_entries_per_clk; |
265280b9 AP |
1020 | unsigned int num_levels; |
1021 | ||
1022 | if (!clk_mgr->smu_present) | |
1023 | return; | |
1024 | ||
d6170e41 | 1025 | /* Refresh memclk and fclk states */ |
265280b9 AP |
1026 | dcn32_init_single_clock(clk_mgr, PPCLK_UCLK, |
1027 | &clk_mgr_base->bw_params->clk_table.entries[0].memclk_mhz, | |
d6170e41 | 1028 | &num_entries_per_clk->num_memclk_levels); |
3b718dca | 1029 | clk_mgr_base->bw_params->dc_mode_limit.memclk_mhz = dcn30_smu_get_dc_mode_max_dpm_freq(clk_mgr, PPCLK_UCLK); |
12a6e62b | 1030 | clk_mgr_base->bw_params->dc_mode_softmax_memclk = clk_mgr_base->bw_params->dc_mode_limit.memclk_mhz; |
d6170e41 | 1031 | |
33151fb7 DV |
1032 | /* memclk must have at least one level */ |
1033 | num_entries_per_clk->num_memclk_levels = num_entries_per_clk->num_memclk_levels ? num_entries_per_clk->num_memclk_levels : 1; | |
1034 | ||
d6170e41 DV |
1035 | dcn32_init_single_clock(clk_mgr, PPCLK_FCLK, |
1036 | &clk_mgr_base->bw_params->clk_table.entries[0].fclk_mhz, | |
1037 | &num_entries_per_clk->num_fclk_levels); | |
3b718dca | 1038 | clk_mgr_base->bw_params->dc_mode_limit.fclk_mhz = dcn30_smu_get_dc_mode_max_dpm_freq(clk_mgr, PPCLK_FCLK); |
d6170e41 DV |
1039 | |
1040 | if (num_entries_per_clk->num_memclk_levels >= num_entries_per_clk->num_fclk_levels) { | |
1041 | num_levels = num_entries_per_clk->num_memclk_levels; | |
1042 | } else { | |
1043 | num_levels = num_entries_per_clk->num_fclk_levels; | |
1044 | } | |
12a6e62b AL |
1045 | clk_mgr_base->bw_params->max_memclk_mhz = |
1046 | clk_mgr_base->bw_params->clk_table.entries[num_entries_per_clk->num_memclk_levels - 1].memclk_mhz; | |
265280b9 AP |
1047 | clk_mgr_base->bw_params->clk_table.num_entries = num_levels ? num_levels : 1; |
1048 | ||
3141d6cb ST |
1049 | if (clk_mgr->dpm_present && !num_levels) |
1050 | clk_mgr->dpm_present = false; | |
1051 | ||
1052 | if (!clk_mgr->dpm_present) | |
1053 | dcn32_patch_dpm_table(clk_mgr_base->bw_params); | |
1054 | ||
c5da61cf | 1055 | DC_FP_START(); |
265280b9 AP |
1056 | /* Refresh bounding box */ |
1057 | clk_mgr_base->ctx->dc->res_pool->funcs->update_bw_bounding_box( | |
1058 | clk_mgr->base.ctx->dc, clk_mgr_base->bw_params); | |
c5da61cf | 1059 | DC_FP_END(); |
265280b9 AP |
1060 | } |
1061 | ||
1062 | static bool dcn32_are_clock_states_equal(struct dc_clocks *a, | |
1063 | struct dc_clocks *b) | |
1064 | { | |
1065 | if (a->dispclk_khz != b->dispclk_khz) | |
1066 | return false; | |
1067 | else if (a->dppclk_khz != b->dppclk_khz) | |
1068 | return false; | |
1069 | else if (a->dcfclk_khz != b->dcfclk_khz) | |
1070 | return false; | |
1071 | else if (a->dcfclk_deep_sleep_khz != b->dcfclk_deep_sleep_khz) | |
1072 | return false; | |
1073 | else if (a->dramclk_khz != b->dramclk_khz) | |
1074 | return false; | |
1075 | else if (a->p_state_change_support != b->p_state_change_support) | |
1076 | return false; | |
1077 | else if (a->fclk_p_state_change_support != b->fclk_p_state_change_support) | |
1078 | return false; | |
1079 | ||
1080 | return true; | |
1081 | } | |
1082 | ||
1083 | static void dcn32_enable_pme_wa(struct clk_mgr *clk_mgr_base) | |
1084 | { | |
1085 | struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base); | |
1086 | ||
1087 | if (!clk_mgr->smu_present) | |
1088 | return; | |
1089 | ||
aeb73c60 | 1090 | dcn32_smu_set_pme_workaround(clk_mgr); |
265280b9 AP |
1091 | } |
1092 | ||
1093 | static bool dcn32_is_smu_present(struct clk_mgr *clk_mgr_base) | |
1094 | { | |
1095 | struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base); | |
1096 | return clk_mgr->smu_present; | |
1097 | } | |
1098 | ||
12a6e62b AL |
1099 | static void dcn32_set_max_memclk(struct clk_mgr *clk_mgr_base, unsigned int memclk_mhz) |
1100 | { | |
1101 | struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base); | |
1102 | ||
1103 | if (!clk_mgr->smu_present) | |
1104 | return; | |
1105 | ||
1106 | dcn30_smu_set_hard_max_by_freq(clk_mgr, PPCLK_UCLK, memclk_mhz); | |
1107 | } | |
1108 | ||
1109 | static void dcn32_set_min_memclk(struct clk_mgr *clk_mgr_base, unsigned int memclk_mhz) | |
1110 | { | |
1111 | struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base); | |
1112 | ||
1113 | if (!clk_mgr->smu_present) | |
1114 | return; | |
1115 | ||
1116 | dcn32_smu_set_hard_min_by_freq(clk_mgr, PPCLK_UCLK, memclk_mhz); | |
1117 | } | |
265280b9 AP |
1118 | |
1119 | static struct clk_mgr_funcs dcn32_funcs = { | |
2d7a1ef8 RS |
1120 | .get_dp_ref_clk_frequency = dce12_get_dp_ref_freq_khz, |
1121 | .get_dtb_ref_clk_frequency = dcn31_get_dtb_ref_freq_khz, | |
265280b9 | 1122 | .update_clocks = dcn32_update_clocks, |
3e838f7c | 1123 | .dump_clk_registers = dcn32_dump_clk_registers, |
265280b9 AP |
1124 | .init_clocks = dcn32_init_clocks, |
1125 | .notify_wm_ranges = dcn32_notify_wm_ranges, | |
1126 | .set_hard_min_memclk = dcn32_set_hard_min_memclk, | |
1127 | .set_hard_max_memclk = dcn32_set_hard_max_memclk, | |
12a6e62b AL |
1128 | .set_max_memclk = dcn32_set_max_memclk, |
1129 | .set_min_memclk = dcn32_set_min_memclk, | |
265280b9 AP |
1130 | .get_memclk_states_from_smu = dcn32_get_memclk_states_from_smu, |
1131 | .are_clock_states_equal = dcn32_are_clock_states_equal, | |
1132 | .enable_pme_wa = dcn32_enable_pme_wa, | |
1133 | .is_smu_present = dcn32_is_smu_present, | |
d170e938 | 1134 | .get_dispclk_from_dentist = dcn32_get_dispclk_from_dentist, |
265280b9 AP |
1135 | }; |
1136 | ||
1137 | void dcn32_clk_mgr_construct( | |
1138 | struct dc_context *ctx, | |
1139 | struct clk_mgr_internal *clk_mgr, | |
1140 | struct pp_smu_funcs *pp_smu, | |
1141 | struct dccg *dccg) | |
1142 | { | |
d1c5c3e2 AP |
1143 | struct clk_log_info log_info = {0}; |
1144 | ||
265280b9 AP |
1145 | clk_mgr->base.ctx = ctx; |
1146 | clk_mgr->base.funcs = &dcn32_funcs; | |
1147 | if (ASICREV_IS_GC_11_0_2(clk_mgr->base.ctx->asic_id.hw_internal_rev)) { | |
1148 | clk_mgr->regs = &clk_mgr_regs_dcn321; | |
1149 | clk_mgr->clk_mgr_shift = &clk_mgr_shift_dcn321; | |
1150 | clk_mgr->clk_mgr_mask = &clk_mgr_mask_dcn321; | |
1151 | } else { | |
1152 | clk_mgr->regs = &clk_mgr_regs_dcn32; | |
1153 | clk_mgr->clk_mgr_shift = &clk_mgr_shift_dcn32; | |
1154 | clk_mgr->clk_mgr_mask = &clk_mgr_mask_dcn32; | |
1155 | } | |
1156 | ||
1157 | clk_mgr->dccg = dccg; | |
1158 | clk_mgr->dfs_bypass_disp_clk = 0; | |
1159 | ||
1160 | clk_mgr->dprefclk_ss_percentage = 0; | |
1161 | clk_mgr->dprefclk_ss_divider = 1000; | |
1162 | clk_mgr->ss_on_dprefclk = false; | |
1163 | clk_mgr->dfs_ref_freq_khz = 100000; | |
1164 | ||
b6a93844 ST |
1165 | /* Changed from DCN3.2_clock_frequency doc to match |
1166 | * dcn32_dump_clk_registers from 4 * dentist_vco_freq_khz / | |
1167 | * dprefclk DID divider | |
1168 | */ | |
1169 | clk_mgr->base.dprefclk_khz = 716666; | |
405bb9ee AL |
1170 | if (ctx->dc->debug.disable_dtb_ref_clk_switch) { |
1171 | //initialize DTB ref clock value if DPM disabled | |
1172 | if (ctx->dce_version == DCN_VERSION_3_21) | |
1173 | clk_mgr->base.clks.ref_dtbclk_khz = 477800; | |
1174 | else | |
1175 | clk_mgr->base.clks.ref_dtbclk_khz = 268750; | |
1176 | } | |
265280b9 | 1177 | |
d1c5c3e2 | 1178 | |
265280b9 | 1179 | /* integer part is now VCO frequency in kHz */ |
3e838f7c RS |
1180 | clk_mgr->base.dentist_vco_freq_khz = dcn32_get_vco_frequency_from_reg(clk_mgr); |
1181 | ||
265280b9 AP |
1182 | /* in case we don't get a value from the register, use default */ |
1183 | if (clk_mgr->base.dentist_vco_freq_khz == 0) | |
1184 | clk_mgr->base.dentist_vco_freq_khz = 4300000; /* Updated as per HW docs */ | |
1185 | ||
d1c5c3e2 AP |
1186 | dcn32_dump_clk_registers(&clk_mgr->base.boot_snapshot, &clk_mgr->base, &log_info); |
1187 | ||
405bb9ee AL |
1188 | if (ctx->dc->debug.disable_dtb_ref_clk_switch && |
1189 | clk_mgr->base.clks.ref_dtbclk_khz != clk_mgr->base.boot_snapshot.dtbclk) { | |
1190 | clk_mgr->base.clks.ref_dtbclk_khz = clk_mgr->base.boot_snapshot.dtbclk; | |
2267a195 DV |
1191 | } |
1192 | ||
265280b9 | 1193 | if (clk_mgr->base.boot_snapshot.dprefclk != 0) { |
b6a93844 | 1194 | clk_mgr->base.dprefclk_khz = clk_mgr->base.boot_snapshot.dprefclk; |
265280b9 AP |
1195 | } |
1196 | dcn32_clock_read_ss_info(clk_mgr); | |
1197 | ||
1198 | clk_mgr->dfs_bypass_enabled = false; | |
1199 | ||
1200 | clk_mgr->smu_present = false; | |
1201 | ||
1202 | clk_mgr->base.bw_params = kzalloc(sizeof(*clk_mgr->base.bw_params), GFP_KERNEL); | |
1203 | ||
1204 | /* need physical address of table to give to PMFW */ | |
1205 | clk_mgr->wm_range_table = dm_helpers_allocate_gpu_mem(clk_mgr->base.ctx, | |
1206 | DC_MEM_ALLOC_TYPE_GART, sizeof(WatermarksExternal_t), | |
1207 | &clk_mgr->wm_range_table_addr); | |
1208 | } | |
1209 | ||
1210 | void dcn32_clk_mgr_destroy(struct clk_mgr_internal *clk_mgr) | |
1211 | { | |
447395e1 | 1212 | kfree(clk_mgr->base.bw_params); |
265280b9 AP |
1213 | |
1214 | if (clk_mgr->wm_range_table) | |
1215 | dm_helpers_free_gpu_mem(clk_mgr->base.ctx, DC_MEM_ALLOC_TYPE_GART, | |
1216 | clk_mgr->wm_range_table); | |
1217 | } | |
1218 |