Commit | Line | Data |
---|---|---|
fcfe0bdc MC |
1 | /* |
2 | * Copyright © 2018 Intel Corporation | |
3 | * | |
4 | * Permission is hereby granted, free of charge, to any person obtaining a | |
5 | * copy of this software and associated documentation files (the "Software"), | |
6 | * to deal in the Software without restriction, including without limitation | |
7 | * the rights to use, copy, modify, merge, publish, distribute, sublicense, | |
8 | * and/or sell copies of the Software, and to permit persons to whom the | |
9 | * Software is furnished to do so, subject to the following conditions: | |
10 | * | |
11 | * The above copyright notice and this permission notice (including the next | |
12 | * paragraph) shall be included in all copies or substantial portions of the | |
13 | * Software. | |
14 | * | |
15 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | |
16 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | |
17 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL | |
18 | * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | |
19 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING | |
20 | * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER | |
21 | * DEALINGS IN THE SOFTWARE. | |
22 | * | |
23 | * Authors: | |
24 | * Madhav Chauhan <madhav.chauhan@intel.com> | |
25 | * Jani Nikula <jani.nikula@intel.com> | |
26 | */ | |
27 | ||
bfee32bf | 28 | #include <drm/drm_mipi_dsi.h> |
e2758048 | 29 | #include <drm/drm_atomic_helper.h> |
fcfe0bdc MC |
30 | #include "intel_dsi.h" |
31 | ||
32bbc3d4 MC |
32 | static inline int header_credits_available(struct drm_i915_private *dev_priv, |
33 | enum transcoder dsi_trans) | |
34 | { | |
35 | return (I915_READ(DSI_CMD_TXCTL(dsi_trans)) & FREE_HEADER_CREDIT_MASK) | |
36 | >> FREE_HEADER_CREDIT_SHIFT; | |
37 | } | |
38 | ||
39 | static inline int payload_credits_available(struct drm_i915_private *dev_priv, | |
40 | enum transcoder dsi_trans) | |
41 | { | |
42 | return (I915_READ(DSI_CMD_TXCTL(dsi_trans)) & FREE_PLOAD_CREDIT_MASK) | |
43 | >> FREE_PLOAD_CREDIT_SHIFT; | |
44 | } | |
45 | ||
46 | static void wait_for_header_credits(struct drm_i915_private *dev_priv, | |
47 | enum transcoder dsi_trans) | |
48 | { | |
49 | if (wait_for_us(header_credits_available(dev_priv, dsi_trans) >= | |
50 | MAX_HEADER_CREDIT, 100)) | |
51 | DRM_ERROR("DSI header credits not released\n"); | |
52 | } | |
53 | ||
54 | static void wait_for_payload_credits(struct drm_i915_private *dev_priv, | |
55 | enum transcoder dsi_trans) | |
56 | { | |
57 | if (wait_for_us(payload_credits_available(dev_priv, dsi_trans) >= | |
58 | MAX_PLOAD_CREDIT, 100)) | |
59 | DRM_ERROR("DSI payload credits not released\n"); | |
60 | } | |
61 | ||
d364dc66 | 62 | static enum transcoder dsi_port_to_transcoder(enum port port) |
ca8fc99f MC |
63 | { |
64 | if (port == PORT_A) | |
65 | return TRANSCODER_DSI_0; | |
66 | else | |
67 | return TRANSCODER_DSI_1; | |
68 | } | |
69 | ||
32bbc3d4 MC |
70 | static void wait_for_cmds_dispatched_to_panel(struct intel_encoder *encoder) |
71 | { | |
72 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
73 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
74 | struct mipi_dsi_device *dsi; | |
75 | enum port port; | |
76 | enum transcoder dsi_trans; | |
77 | int ret; | |
78 | ||
79 | /* wait for header/payload credits to be released */ | |
80 | for_each_dsi_port(port, intel_dsi->ports) { | |
81 | dsi_trans = dsi_port_to_transcoder(port); | |
82 | wait_for_header_credits(dev_priv, dsi_trans); | |
83 | wait_for_payload_credits(dev_priv, dsi_trans); | |
84 | } | |
85 | ||
86 | /* send nop DCS command */ | |
87 | for_each_dsi_port(port, intel_dsi->ports) { | |
88 | dsi = intel_dsi->dsi_hosts[port]->device; | |
89 | dsi->mode_flags |= MIPI_DSI_MODE_LPM; | |
90 | dsi->channel = 0; | |
91 | ret = mipi_dsi_dcs_nop(dsi); | |
92 | if (ret < 0) | |
93 | DRM_ERROR("error sending DCS NOP command\n"); | |
94 | } | |
95 | ||
96 | /* wait for header credits to be released */ | |
97 | for_each_dsi_port(port, intel_dsi->ports) { | |
98 | dsi_trans = dsi_port_to_transcoder(port); | |
99 | wait_for_header_credits(dev_priv, dsi_trans); | |
100 | } | |
101 | ||
102 | /* wait for LP TX in progress bit to be cleared */ | |
103 | for_each_dsi_port(port, intel_dsi->ports) { | |
104 | dsi_trans = dsi_port_to_transcoder(port); | |
105 | if (wait_for_us(!(I915_READ(DSI_LP_MSG(dsi_trans)) & | |
106 | LPTX_IN_PROGRESS), 20)) | |
107 | DRM_ERROR("LPTX bit not cleared\n"); | |
108 | } | |
109 | } | |
110 | ||
c5f9c934 MC |
111 | static bool add_payld_to_queue(struct intel_dsi_host *host, const u8 *data, |
112 | u32 len) | |
113 | { | |
114 | struct intel_dsi *intel_dsi = host->intel_dsi; | |
115 | struct drm_i915_private *dev_priv = to_i915(intel_dsi->base.base.dev); | |
116 | enum transcoder dsi_trans = dsi_port_to_transcoder(host->port); | |
117 | int free_credits; | |
118 | int i, j; | |
119 | ||
120 | for (i = 0; i < len; i += 4) { | |
121 | u32 tmp = 0; | |
122 | ||
123 | free_credits = payload_credits_available(dev_priv, dsi_trans); | |
124 | if (free_credits < 1) { | |
125 | DRM_ERROR("Payload credit not available\n"); | |
126 | return false; | |
127 | } | |
128 | ||
129 | for (j = 0; j < min_t(u32, len - i, 4); j++) | |
130 | tmp |= *data++ << 8 * j; | |
131 | ||
132 | I915_WRITE(DSI_CMD_TXPYLD(dsi_trans), tmp); | |
133 | } | |
134 | ||
135 | return true; | |
136 | } | |
137 | ||
138 | static int dsi_send_pkt_hdr(struct intel_dsi_host *host, | |
139 | struct mipi_dsi_packet pkt, bool enable_lpdt) | |
140 | { | |
141 | struct intel_dsi *intel_dsi = host->intel_dsi; | |
142 | struct drm_i915_private *dev_priv = to_i915(intel_dsi->base.base.dev); | |
143 | enum transcoder dsi_trans = dsi_port_to_transcoder(host->port); | |
144 | u32 tmp; | |
145 | int free_credits; | |
146 | ||
147 | /* check if header credit available */ | |
148 | free_credits = header_credits_available(dev_priv, dsi_trans); | |
149 | if (free_credits < 1) { | |
150 | DRM_ERROR("send pkt header failed, not enough hdr credits\n"); | |
151 | return -1; | |
152 | } | |
153 | ||
154 | tmp = I915_READ(DSI_CMD_TXHDR(dsi_trans)); | |
155 | ||
156 | if (pkt.payload) | |
157 | tmp |= PAYLOAD_PRESENT; | |
158 | else | |
159 | tmp &= ~PAYLOAD_PRESENT; | |
160 | ||
161 | tmp &= ~VBLANK_FENCE; | |
162 | ||
163 | if (enable_lpdt) | |
164 | tmp |= LP_DATA_TRANSFER; | |
165 | ||
166 | tmp &= ~(PARAM_WC_MASK | VC_MASK | DT_MASK); | |
167 | tmp |= ((pkt.header[0] & VC_MASK) << VC_SHIFT); | |
168 | tmp |= ((pkt.header[0] & DT_MASK) << DT_SHIFT); | |
169 | tmp |= (pkt.header[1] << PARAM_WC_LOWER_SHIFT); | |
170 | tmp |= (pkt.header[2] << PARAM_WC_UPPER_SHIFT); | |
171 | I915_WRITE(DSI_CMD_TXHDR(dsi_trans), tmp); | |
172 | ||
173 | return 0; | |
174 | } | |
175 | ||
176 | static int dsi_send_pkt_payld(struct intel_dsi_host *host, | |
177 | struct mipi_dsi_packet pkt) | |
178 | { | |
179 | /* payload queue can accept *256 bytes*, check limit */ | |
180 | if (pkt.payload_length > MAX_PLOAD_CREDIT * 4) { | |
181 | DRM_ERROR("payload size exceeds max queue limit\n"); | |
182 | return -1; | |
183 | } | |
184 | ||
185 | /* load data into command payload queue */ | |
186 | if (!add_payld_to_queue(host, pkt.payload, | |
187 | pkt.payload_length)) { | |
188 | DRM_ERROR("adding payload to queue failed\n"); | |
189 | return -1; | |
190 | } | |
191 | ||
192 | return 0; | |
193 | } | |
194 | ||
3f4b9d9d MC |
195 | static void dsi_program_swing_and_deemphasis(struct intel_encoder *encoder) |
196 | { | |
197 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
198 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
199 | enum port port; | |
200 | u32 tmp; | |
201 | int lane; | |
202 | ||
203 | for_each_dsi_port(port, intel_dsi->ports) { | |
204 | ||
205 | /* | |
206 | * Program voltage swing and pre-emphasis level values as per | |
207 | * table in BSPEC under DDI buffer programing | |
208 | */ | |
209 | tmp = I915_READ(ICL_PORT_TX_DW5_LN0(port)); | |
210 | tmp &= ~(SCALING_MODE_SEL_MASK | RTERM_SELECT_MASK); | |
211 | tmp |= SCALING_MODE_SEL(0x2); | |
212 | tmp |= TAP2_DISABLE | TAP3_DISABLE; | |
213 | tmp |= RTERM_SELECT(0x6); | |
214 | I915_WRITE(ICL_PORT_TX_DW5_GRP(port), tmp); | |
215 | ||
216 | tmp = I915_READ(ICL_PORT_TX_DW5_AUX(port)); | |
217 | tmp &= ~(SCALING_MODE_SEL_MASK | RTERM_SELECT_MASK); | |
218 | tmp |= SCALING_MODE_SEL(0x2); | |
219 | tmp |= TAP2_DISABLE | TAP3_DISABLE; | |
220 | tmp |= RTERM_SELECT(0x6); | |
221 | I915_WRITE(ICL_PORT_TX_DW5_AUX(port), tmp); | |
222 | ||
223 | tmp = I915_READ(ICL_PORT_TX_DW2_LN0(port)); | |
224 | tmp &= ~(SWING_SEL_LOWER_MASK | SWING_SEL_UPPER_MASK | | |
225 | RCOMP_SCALAR_MASK); | |
226 | tmp |= SWING_SEL_UPPER(0x2); | |
227 | tmp |= SWING_SEL_LOWER(0x2); | |
228 | tmp |= RCOMP_SCALAR(0x98); | |
229 | I915_WRITE(ICL_PORT_TX_DW2_GRP(port), tmp); | |
230 | ||
231 | tmp = I915_READ(ICL_PORT_TX_DW2_AUX(port)); | |
232 | tmp &= ~(SWING_SEL_LOWER_MASK | SWING_SEL_UPPER_MASK | | |
233 | RCOMP_SCALAR_MASK); | |
234 | tmp |= SWING_SEL_UPPER(0x2); | |
235 | tmp |= SWING_SEL_LOWER(0x2); | |
236 | tmp |= RCOMP_SCALAR(0x98); | |
237 | I915_WRITE(ICL_PORT_TX_DW2_AUX(port), tmp); | |
238 | ||
239 | tmp = I915_READ(ICL_PORT_TX_DW4_AUX(port)); | |
240 | tmp &= ~(POST_CURSOR_1_MASK | POST_CURSOR_2_MASK | | |
241 | CURSOR_COEFF_MASK); | |
242 | tmp |= POST_CURSOR_1(0x0); | |
243 | tmp |= POST_CURSOR_2(0x0); | |
244 | tmp |= CURSOR_COEFF(0x3f); | |
245 | I915_WRITE(ICL_PORT_TX_DW4_AUX(port), tmp); | |
246 | ||
247 | for (lane = 0; lane <= 3; lane++) { | |
248 | /* Bspec: must not use GRP register for write */ | |
249 | tmp = I915_READ(ICL_PORT_TX_DW4_LN(port, lane)); | |
250 | tmp &= ~(POST_CURSOR_1_MASK | POST_CURSOR_2_MASK | | |
251 | CURSOR_COEFF_MASK); | |
252 | tmp |= POST_CURSOR_1(0x0); | |
253 | tmp |= POST_CURSOR_2(0x0); | |
254 | tmp |= CURSOR_COEFF(0x3f); | |
255 | I915_WRITE(ICL_PORT_TX_DW4_LN(port, lane), tmp); | |
256 | } | |
257 | } | |
258 | } | |
259 | ||
5a8507b5 MC |
260 | static void configure_dual_link_mode(struct intel_encoder *encoder, |
261 | const struct intel_crtc_state *pipe_config) | |
262 | { | |
263 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
264 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
265 | u32 dss_ctl1; | |
266 | ||
267 | dss_ctl1 = I915_READ(DSS_CTL1); | |
268 | dss_ctl1 |= SPLITTER_ENABLE; | |
269 | dss_ctl1 &= ~OVERLAP_PIXELS_MASK; | |
270 | dss_ctl1 |= OVERLAP_PIXELS(intel_dsi->pixel_overlap); | |
271 | ||
272 | if (intel_dsi->dual_link == DSI_DUAL_LINK_FRONT_BACK) { | |
273 | const struct drm_display_mode *adjusted_mode = | |
274 | &pipe_config->base.adjusted_mode; | |
275 | u32 dss_ctl2; | |
276 | u16 hactive = adjusted_mode->crtc_hdisplay; | |
277 | u16 dl_buffer_depth; | |
278 | ||
279 | dss_ctl1 &= ~DUAL_LINK_MODE_INTERLEAVE; | |
280 | dl_buffer_depth = hactive / 2 + intel_dsi->pixel_overlap; | |
281 | ||
282 | if (dl_buffer_depth > MAX_DL_BUFFER_TARGET_DEPTH) | |
283 | DRM_ERROR("DL buffer depth exceed max value\n"); | |
284 | ||
285 | dss_ctl1 &= ~LEFT_DL_BUF_TARGET_DEPTH_MASK; | |
286 | dss_ctl1 |= LEFT_DL_BUF_TARGET_DEPTH(dl_buffer_depth); | |
287 | dss_ctl2 = I915_READ(DSS_CTL2); | |
288 | dss_ctl2 &= ~RIGHT_DL_BUF_TARGET_DEPTH_MASK; | |
289 | dss_ctl2 |= RIGHT_DL_BUF_TARGET_DEPTH(dl_buffer_depth); | |
290 | I915_WRITE(DSS_CTL2, dss_ctl2); | |
291 | } else { | |
292 | /* Interleave */ | |
293 | dss_ctl1 |= DUAL_LINK_MODE_INTERLEAVE; | |
294 | } | |
295 | ||
296 | I915_WRITE(DSS_CTL1, dss_ctl1); | |
297 | } | |
298 | ||
fcfe0bdc MC |
299 | static void gen11_dsi_program_esc_clk_div(struct intel_encoder *encoder) |
300 | { | |
301 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
302 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
303 | enum port port; | |
304 | u32 bpp = mipi_dsi_pixel_format_to_bpp(intel_dsi->pixel_format); | |
305 | u32 afe_clk_khz; /* 8X Clock */ | |
306 | u32 esc_clk_div_m; | |
307 | ||
308 | afe_clk_khz = DIV_ROUND_CLOSEST(intel_dsi->pclk * bpp, | |
309 | intel_dsi->lane_count); | |
310 | ||
311 | esc_clk_div_m = DIV_ROUND_UP(afe_clk_khz, DSI_MAX_ESC_CLK); | |
312 | ||
313 | for_each_dsi_port(port, intel_dsi->ports) { | |
314 | I915_WRITE(ICL_DSI_ESC_CLK_DIV(port), | |
315 | esc_clk_div_m & ICL_ESC_CLK_DIV_MASK); | |
316 | POSTING_READ(ICL_DSI_ESC_CLK_DIV(port)); | |
317 | } | |
318 | ||
319 | for_each_dsi_port(port, intel_dsi->ports) { | |
320 | I915_WRITE(ICL_DPHY_ESC_CLK_DIV(port), | |
321 | esc_clk_div_m & ICL_ESC_CLK_DIV_MASK); | |
322 | POSTING_READ(ICL_DPHY_ESC_CLK_DIV(port)); | |
323 | } | |
324 | } | |
325 | ||
b1cb21a5 MC |
326 | static void gen11_dsi_enable_io_power(struct intel_encoder *encoder) |
327 | { | |
328 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
329 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
330 | enum port port; | |
331 | u32 tmp; | |
332 | ||
333 | for_each_dsi_port(port, intel_dsi->ports) { | |
334 | tmp = I915_READ(ICL_DSI_IO_MODECTL(port)); | |
335 | tmp |= COMBO_PHY_MODE_DSI; | |
336 | I915_WRITE(ICL_DSI_IO_MODECTL(port), tmp); | |
337 | } | |
338 | ||
339 | for_each_dsi_port(port, intel_dsi->ports) { | |
340 | intel_display_power_get(dev_priv, port == PORT_A ? | |
341 | POWER_DOMAIN_PORT_DDI_A_IO : | |
342 | POWER_DOMAIN_PORT_DDI_B_IO); | |
343 | } | |
344 | } | |
345 | ||
45f09f7a MC |
346 | static void gen11_dsi_power_up_lanes(struct intel_encoder *encoder) |
347 | { | |
348 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
349 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
350 | enum port port; | |
351 | u32 tmp; | |
352 | u32 lane_mask; | |
353 | ||
354 | switch (intel_dsi->lane_count) { | |
355 | case 1: | |
356 | lane_mask = PWR_DOWN_LN_3_1_0; | |
357 | break; | |
358 | case 2: | |
359 | lane_mask = PWR_DOWN_LN_3_1; | |
360 | break; | |
361 | case 3: | |
362 | lane_mask = PWR_DOWN_LN_3; | |
363 | break; | |
364 | case 4: | |
365 | default: | |
366 | lane_mask = PWR_UP_ALL_LANES; | |
367 | break; | |
368 | } | |
369 | ||
370 | for_each_dsi_port(port, intel_dsi->ports) { | |
371 | tmp = I915_READ(ICL_PORT_CL_DW10(port)); | |
372 | tmp &= ~PWR_DOWN_LN_MASK; | |
373 | I915_WRITE(ICL_PORT_CL_DW10(port), tmp | lane_mask); | |
374 | } | |
375 | } | |
376 | ||
fc41001d MC |
377 | static void gen11_dsi_config_phy_lanes_sequence(struct intel_encoder *encoder) |
378 | { | |
379 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
380 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
381 | enum port port; | |
382 | u32 tmp; | |
383 | int lane; | |
384 | ||
385 | /* Step 4b(i) set loadgen select for transmit and aux lanes */ | |
386 | for_each_dsi_port(port, intel_dsi->ports) { | |
387 | tmp = I915_READ(ICL_PORT_TX_DW4_AUX(port)); | |
388 | tmp &= ~LOADGEN_SELECT; | |
389 | I915_WRITE(ICL_PORT_TX_DW4_AUX(port), tmp); | |
390 | for (lane = 0; lane <= 3; lane++) { | |
391 | tmp = I915_READ(ICL_PORT_TX_DW4_LN(port, lane)); | |
392 | tmp &= ~LOADGEN_SELECT; | |
393 | if (lane != 2) | |
394 | tmp |= LOADGEN_SELECT; | |
395 | I915_WRITE(ICL_PORT_TX_DW4_LN(port, lane), tmp); | |
396 | } | |
397 | } | |
398 | ||
399 | /* Step 4b(ii) set latency optimization for transmit and aux lanes */ | |
400 | for_each_dsi_port(port, intel_dsi->ports) { | |
401 | tmp = I915_READ(ICL_PORT_TX_DW2_AUX(port)); | |
402 | tmp &= ~FRC_LATENCY_OPTIM_MASK; | |
403 | tmp |= FRC_LATENCY_OPTIM_VAL(0x5); | |
404 | I915_WRITE(ICL_PORT_TX_DW2_AUX(port), tmp); | |
405 | tmp = I915_READ(ICL_PORT_TX_DW2_LN0(port)); | |
406 | tmp &= ~FRC_LATENCY_OPTIM_MASK; | |
407 | tmp |= FRC_LATENCY_OPTIM_VAL(0x5); | |
408 | I915_WRITE(ICL_PORT_TX_DW2_GRP(port), tmp); | |
409 | } | |
410 | ||
411 | } | |
412 | ||
3f4b9d9d MC |
413 | static void gen11_dsi_voltage_swing_program_seq(struct intel_encoder *encoder) |
414 | { | |
415 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
416 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
417 | u32 tmp; | |
418 | enum port port; | |
419 | ||
420 | /* clear common keeper enable bit */ | |
421 | for_each_dsi_port(port, intel_dsi->ports) { | |
422 | tmp = I915_READ(ICL_PORT_PCS_DW1_LN0(port)); | |
423 | tmp &= ~COMMON_KEEPER_EN; | |
424 | I915_WRITE(ICL_PORT_PCS_DW1_GRP(port), tmp); | |
425 | tmp = I915_READ(ICL_PORT_PCS_DW1_AUX(port)); | |
426 | tmp &= ~COMMON_KEEPER_EN; | |
427 | I915_WRITE(ICL_PORT_PCS_DW1_AUX(port), tmp); | |
428 | } | |
429 | ||
430 | /* | |
431 | * Set SUS Clock Config bitfield to 11b | |
432 | * Note: loadgen select program is done | |
433 | * as part of lane phy sequence configuration | |
434 | */ | |
435 | for_each_dsi_port(port, intel_dsi->ports) { | |
436 | tmp = I915_READ(ICL_PORT_CL_DW5(port)); | |
437 | tmp |= SUS_CLOCK_CONFIG; | |
438 | I915_WRITE(ICL_PORT_CL_DW5(port), tmp); | |
439 | } | |
440 | ||
441 | /* Clear training enable to change swing values */ | |
442 | for_each_dsi_port(port, intel_dsi->ports) { | |
443 | tmp = I915_READ(ICL_PORT_TX_DW5_LN0(port)); | |
444 | tmp &= ~TX_TRAINING_EN; | |
445 | I915_WRITE(ICL_PORT_TX_DW5_GRP(port), tmp); | |
446 | tmp = I915_READ(ICL_PORT_TX_DW5_AUX(port)); | |
447 | tmp &= ~TX_TRAINING_EN; | |
448 | I915_WRITE(ICL_PORT_TX_DW5_AUX(port), tmp); | |
449 | } | |
450 | ||
451 | /* Program swing and de-emphasis */ | |
452 | dsi_program_swing_and_deemphasis(encoder); | |
453 | ||
454 | /* Set training enable to trigger update */ | |
455 | for_each_dsi_port(port, intel_dsi->ports) { | |
456 | tmp = I915_READ(ICL_PORT_TX_DW5_LN0(port)); | |
457 | tmp |= TX_TRAINING_EN; | |
458 | I915_WRITE(ICL_PORT_TX_DW5_GRP(port), tmp); | |
459 | tmp = I915_READ(ICL_PORT_TX_DW5_AUX(port)); | |
460 | tmp |= TX_TRAINING_EN; | |
461 | I915_WRITE(ICL_PORT_TX_DW5_AUX(port), tmp); | |
462 | } | |
463 | } | |
464 | ||
ba3df888 MC |
465 | static void gen11_dsi_enable_ddi_buffer(struct intel_encoder *encoder) |
466 | { | |
467 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
468 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
469 | u32 tmp; | |
470 | enum port port; | |
471 | ||
472 | for_each_dsi_port(port, intel_dsi->ports) { | |
473 | tmp = I915_READ(DDI_BUF_CTL(port)); | |
474 | tmp |= DDI_BUF_CTL_ENABLE; | |
475 | I915_WRITE(DDI_BUF_CTL(port), tmp); | |
476 | ||
477 | if (wait_for_us(!(I915_READ(DDI_BUF_CTL(port)) & | |
478 | DDI_BUF_IS_IDLE), | |
479 | 500)) | |
480 | DRM_ERROR("DDI port:%c buffer idle\n", port_name(port)); | |
481 | } | |
482 | } | |
483 | ||
70a7b836 MC |
484 | static void gen11_dsi_setup_dphy_timings(struct intel_encoder *encoder) |
485 | { | |
486 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
487 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
488 | u32 tmp; | |
489 | enum port port; | |
490 | ||
491 | /* Program T-INIT master registers */ | |
492 | for_each_dsi_port(port, intel_dsi->ports) { | |
493 | tmp = I915_READ(ICL_DSI_T_INIT_MASTER(port)); | |
494 | tmp &= ~MASTER_INIT_TIMER_MASK; | |
495 | tmp |= intel_dsi->init_count; | |
496 | I915_WRITE(ICL_DSI_T_INIT_MASTER(port), tmp); | |
497 | } | |
e72cce53 MC |
498 | |
499 | /* Program DPHY clock lanes timings */ | |
500 | for_each_dsi_port(port, intel_dsi->ports) { | |
501 | I915_WRITE(DPHY_CLK_TIMING_PARAM(port), intel_dsi->dphy_reg); | |
502 | ||
503 | /* shadow register inside display core */ | |
504 | I915_WRITE(DSI_CLK_TIMING_PARAM(port), intel_dsi->dphy_reg); | |
505 | } | |
506 | ||
507 | /* Program DPHY data lanes timings */ | |
508 | for_each_dsi_port(port, intel_dsi->ports) { | |
509 | I915_WRITE(DPHY_DATA_TIMING_PARAM(port), | |
510 | intel_dsi->dphy_data_lane_reg); | |
511 | ||
512 | /* shadow register inside display core */ | |
513 | I915_WRITE(DSI_DATA_TIMING_PARAM(port), | |
514 | intel_dsi->dphy_data_lane_reg); | |
515 | } | |
5fea8645 MC |
516 | |
517 | /* | |
518 | * If DSI link operating at or below an 800 MHz, | |
519 | * TA_SURE should be override and programmed to | |
520 | * a value '0' inside TA_PARAM_REGISTERS otherwise | |
521 | * leave all fields at HW default values. | |
522 | */ | |
523 | if (intel_dsi_bitrate(intel_dsi) <= 800000) { | |
524 | for_each_dsi_port(port, intel_dsi->ports) { | |
525 | tmp = I915_READ(DPHY_TA_TIMING_PARAM(port)); | |
526 | tmp &= ~TA_SURE_MASK; | |
527 | tmp |= TA_SURE_OVERRIDE | TA_SURE(0); | |
528 | I915_WRITE(DPHY_TA_TIMING_PARAM(port), tmp); | |
529 | ||
530 | /* shadow register inside display core */ | |
531 | tmp = I915_READ(DSI_TA_TIMING_PARAM(port)); | |
532 | tmp &= ~TA_SURE_MASK; | |
533 | tmp |= TA_SURE_OVERRIDE | TA_SURE(0); | |
534 | I915_WRITE(DSI_TA_TIMING_PARAM(port), tmp); | |
535 | } | |
536 | } | |
70a7b836 MC |
537 | } |
538 | ||
32250c8e MC |
539 | static void gen11_dsi_gate_clocks(struct intel_encoder *encoder) |
540 | { | |
541 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
542 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
543 | u32 tmp; | |
544 | enum port port; | |
545 | ||
546 | mutex_lock(&dev_priv->dpll_lock); | |
547 | tmp = I915_READ(DPCLKA_CFGCR0_ICL); | |
548 | for_each_dsi_port(port, intel_dsi->ports) { | |
549 | tmp |= DPCLKA_CFGCR0_DDI_CLK_OFF(port); | |
550 | } | |
551 | ||
552 | I915_WRITE(DPCLKA_CFGCR0_ICL, tmp); | |
553 | mutex_unlock(&dev_priv->dpll_lock); | |
554 | } | |
555 | ||
1026bea0 MC |
556 | static void gen11_dsi_ungate_clocks(struct intel_encoder *encoder) |
557 | { | |
558 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
559 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
560 | u32 tmp; | |
561 | enum port port; | |
562 | ||
563 | mutex_lock(&dev_priv->dpll_lock); | |
564 | tmp = I915_READ(DPCLKA_CFGCR0_ICL); | |
565 | for_each_dsi_port(port, intel_dsi->ports) { | |
566 | tmp &= ~DPCLKA_CFGCR0_DDI_CLK_OFF(port); | |
567 | } | |
568 | ||
569 | I915_WRITE(DPCLKA_CFGCR0_ICL, tmp); | |
570 | mutex_unlock(&dev_priv->dpll_lock); | |
571 | } | |
572 | ||
949fc52a JN |
573 | static void gen11_dsi_map_pll(struct intel_encoder *encoder, |
574 | const struct intel_crtc_state *crtc_state) | |
575 | { | |
576 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
577 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
578 | struct intel_shared_dpll *pll = crtc_state->shared_dpll; | |
579 | enum port port; | |
580 | u32 val; | |
581 | ||
582 | mutex_lock(&dev_priv->dpll_lock); | |
583 | ||
584 | val = I915_READ(DPCLKA_CFGCR0_ICL); | |
585 | for_each_dsi_port(port, intel_dsi->ports) { | |
586 | val &= ~DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(port); | |
587 | val |= DPCLKA_CFGCR0_DDI_CLK_SEL(pll->info->id, port); | |
588 | } | |
589 | I915_WRITE(DPCLKA_CFGCR0_ICL, val); | |
590 | POSTING_READ(DPCLKA_CFGCR0_ICL); | |
591 | ||
592 | mutex_unlock(&dev_priv->dpll_lock); | |
593 | } | |
594 | ||
70f4f502 MC |
595 | static void |
596 | gen11_dsi_configure_transcoder(struct intel_encoder *encoder, | |
597 | const struct intel_crtc_state *pipe_config) | |
d364dc66 MC |
598 | { |
599 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
600 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
70f4f502 MC |
601 | struct intel_crtc *intel_crtc = to_intel_crtc(pipe_config->base.crtc); |
602 | enum pipe pipe = intel_crtc->pipe; | |
d364dc66 MC |
603 | u32 tmp; |
604 | enum port port; | |
605 | enum transcoder dsi_trans; | |
606 | ||
607 | for_each_dsi_port(port, intel_dsi->ports) { | |
608 | dsi_trans = dsi_port_to_transcoder(port); | |
609 | tmp = I915_READ(DSI_TRANS_FUNC_CONF(dsi_trans)); | |
610 | ||
611 | if (intel_dsi->eotp_pkt) | |
612 | tmp &= ~EOTP_DISABLED; | |
613 | else | |
614 | tmp |= EOTP_DISABLED; | |
615 | ||
616 | /* enable link calibration if freq > 1.5Gbps */ | |
617 | if (intel_dsi_bitrate(intel_dsi) >= 1500 * 1000) { | |
618 | tmp &= ~LINK_CALIBRATION_MASK; | |
619 | tmp |= CALIBRATION_ENABLED_INITIAL_ONLY; | |
620 | } | |
621 | ||
622 | /* configure continuous clock */ | |
623 | tmp &= ~CONTINUOUS_CLK_MASK; | |
624 | if (intel_dsi->clock_stop) | |
625 | tmp |= CLK_ENTER_LP_AFTER_DATA; | |
626 | else | |
627 | tmp |= CLK_HS_CONTINUOUS; | |
628 | ||
629 | /* configure buffer threshold limit to minimum */ | |
630 | tmp &= ~PIX_BUF_THRESHOLD_MASK; | |
631 | tmp |= PIX_BUF_THRESHOLD_1_4; | |
632 | ||
633 | /* set virtual channel to '0' */ | |
634 | tmp &= ~PIX_VIRT_CHAN_MASK; | |
635 | tmp |= PIX_VIRT_CHAN(0); | |
636 | ||
637 | /* program BGR transmission */ | |
638 | if (intel_dsi->bgr_enabled) | |
639 | tmp |= BGR_TRANSMISSION; | |
640 | ||
641 | /* select pixel format */ | |
642 | tmp &= ~PIX_FMT_MASK; | |
643 | switch (intel_dsi->pixel_format) { | |
644 | default: | |
645 | MISSING_CASE(intel_dsi->pixel_format); | |
646 | /* fallthrough */ | |
647 | case MIPI_DSI_FMT_RGB565: | |
648 | tmp |= PIX_FMT_RGB565; | |
649 | break; | |
650 | case MIPI_DSI_FMT_RGB666_PACKED: | |
651 | tmp |= PIX_FMT_RGB666_PACKED; | |
652 | break; | |
653 | case MIPI_DSI_FMT_RGB666: | |
654 | tmp |= PIX_FMT_RGB666_LOOSE; | |
655 | break; | |
656 | case MIPI_DSI_FMT_RGB888: | |
657 | tmp |= PIX_FMT_RGB888; | |
658 | break; | |
659 | } | |
660 | ||
661 | /* program DSI operation mode */ | |
662 | if (is_vid_mode(intel_dsi)) { | |
663 | tmp &= ~OP_MODE_MASK; | |
664 | switch (intel_dsi->video_mode_format) { | |
665 | default: | |
666 | MISSING_CASE(intel_dsi->video_mode_format); | |
667 | /* fallthrough */ | |
668 | case VIDEO_MODE_NON_BURST_WITH_SYNC_EVENTS: | |
669 | tmp |= VIDEO_MODE_SYNC_EVENT; | |
670 | break; | |
671 | case VIDEO_MODE_NON_BURST_WITH_SYNC_PULSE: | |
672 | tmp |= VIDEO_MODE_SYNC_PULSE; | |
673 | break; | |
674 | } | |
675 | } | |
676 | ||
677 | I915_WRITE(DSI_TRANS_FUNC_CONF(dsi_trans), tmp); | |
678 | } | |
70f4f502 MC |
679 | |
680 | /* enable port sync mode if dual link */ | |
681 | if (intel_dsi->dual_link) { | |
682 | for_each_dsi_port(port, intel_dsi->ports) { | |
683 | dsi_trans = dsi_port_to_transcoder(port); | |
684 | tmp = I915_READ(TRANS_DDI_FUNC_CTL2(dsi_trans)); | |
685 | tmp |= PORT_SYNC_MODE_ENABLE; | |
686 | I915_WRITE(TRANS_DDI_FUNC_CTL2(dsi_trans), tmp); | |
687 | } | |
688 | ||
5a8507b5 MC |
689 | /* configure stream splitting */ |
690 | configure_dual_link_mode(encoder, pipe_config); | |
70f4f502 MC |
691 | } |
692 | ||
693 | for_each_dsi_port(port, intel_dsi->ports) { | |
694 | dsi_trans = dsi_port_to_transcoder(port); | |
695 | ||
696 | /* select data lane width */ | |
697 | tmp = I915_READ(TRANS_DDI_FUNC_CTL(dsi_trans)); | |
698 | tmp &= ~DDI_PORT_WIDTH_MASK; | |
699 | tmp |= DDI_PORT_WIDTH(intel_dsi->lane_count); | |
700 | ||
701 | /* select input pipe */ | |
702 | tmp &= ~TRANS_DDI_EDP_INPUT_MASK; | |
703 | switch (pipe) { | |
704 | default: | |
705 | MISSING_CASE(pipe); | |
706 | /* fallthrough */ | |
707 | case PIPE_A: | |
708 | tmp |= TRANS_DDI_EDP_INPUT_A_ON; | |
709 | break; | |
710 | case PIPE_B: | |
711 | tmp |= TRANS_DDI_EDP_INPUT_B_ONOFF; | |
712 | break; | |
713 | case PIPE_C: | |
714 | tmp |= TRANS_DDI_EDP_INPUT_C_ONOFF; | |
715 | break; | |
716 | } | |
717 | ||
718 | /* enable DDI buffer */ | |
719 | tmp |= TRANS_DDI_FUNC_ENABLE; | |
720 | I915_WRITE(TRANS_DDI_FUNC_CTL(dsi_trans), tmp); | |
721 | } | |
722 | ||
723 | /* wait for link ready */ | |
724 | for_each_dsi_port(port, intel_dsi->ports) { | |
725 | dsi_trans = dsi_port_to_transcoder(port); | |
726 | if (wait_for_us((I915_READ(DSI_TRANS_FUNC_CONF(dsi_trans)) & | |
727 | LINK_READY), 2500)) | |
728 | DRM_ERROR("DSI link not ready\n"); | |
729 | } | |
d364dc66 MC |
730 | } |
731 | ||
d1aeb5f3 MC |
732 | static void |
733 | gen11_dsi_set_transcoder_timings(struct intel_encoder *encoder, | |
734 | const struct intel_crtc_state *pipe_config) | |
735 | { | |
736 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
737 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
738 | const struct drm_display_mode *adjusted_mode = | |
739 | &pipe_config->base.adjusted_mode; | |
740 | enum port port; | |
741 | enum transcoder dsi_trans; | |
742 | /* horizontal timings */ | |
743 | u16 htotal, hactive, hsync_start, hsync_end, hsync_size; | |
744 | u16 hfront_porch, hback_porch; | |
745 | /* vertical timings */ | |
746 | u16 vtotal, vactive, vsync_start, vsync_end, vsync_shift; | |
747 | ||
748 | hactive = adjusted_mode->crtc_hdisplay; | |
749 | htotal = adjusted_mode->crtc_htotal; | |
750 | hsync_start = adjusted_mode->crtc_hsync_start; | |
751 | hsync_end = adjusted_mode->crtc_hsync_end; | |
752 | hsync_size = hsync_end - hsync_start; | |
753 | hfront_porch = (adjusted_mode->crtc_hsync_start - | |
754 | adjusted_mode->crtc_hdisplay); | |
755 | hback_porch = (adjusted_mode->crtc_htotal - | |
756 | adjusted_mode->crtc_hsync_end); | |
757 | vactive = adjusted_mode->crtc_vdisplay; | |
758 | vtotal = adjusted_mode->crtc_vtotal; | |
759 | vsync_start = adjusted_mode->crtc_vsync_start; | |
760 | vsync_end = adjusted_mode->crtc_vsync_end; | |
761 | vsync_shift = hsync_start - htotal / 2; | |
762 | ||
763 | if (intel_dsi->dual_link) { | |
764 | hactive /= 2; | |
765 | if (intel_dsi->dual_link == DSI_DUAL_LINK_FRONT_BACK) | |
766 | hactive += intel_dsi->pixel_overlap; | |
767 | htotal /= 2; | |
768 | } | |
769 | ||
770 | /* minimum hactive as per bspec: 256 pixels */ | |
771 | if (adjusted_mode->crtc_hdisplay < 256) | |
772 | DRM_ERROR("hactive is less then 256 pixels\n"); | |
773 | ||
774 | /* if RGB666 format, then hactive must be multiple of 4 pixels */ | |
775 | if (intel_dsi->pixel_format == MIPI_DSI_FMT_RGB666 && hactive % 4 != 0) | |
776 | DRM_ERROR("hactive pixels are not multiple of 4\n"); | |
777 | ||
778 | /* program TRANS_HTOTAL register */ | |
779 | for_each_dsi_port(port, intel_dsi->ports) { | |
780 | dsi_trans = dsi_port_to_transcoder(port); | |
781 | I915_WRITE(HTOTAL(dsi_trans), | |
782 | (hactive - 1) | ((htotal - 1) << 16)); | |
783 | } | |
784 | ||
785 | /* TRANS_HSYNC register to be programmed only for video mode */ | |
786 | if (intel_dsi->operation_mode == INTEL_DSI_VIDEO_MODE) { | |
787 | if (intel_dsi->video_mode_format == | |
788 | VIDEO_MODE_NON_BURST_WITH_SYNC_PULSE) { | |
789 | /* BSPEC: hsync size should be atleast 16 pixels */ | |
790 | if (hsync_size < 16) | |
791 | DRM_ERROR("hsync size < 16 pixels\n"); | |
792 | } | |
793 | ||
794 | if (hback_porch < 16) | |
795 | DRM_ERROR("hback porch < 16 pixels\n"); | |
796 | ||
797 | if (intel_dsi->dual_link) { | |
798 | hsync_start /= 2; | |
799 | hsync_end /= 2; | |
800 | } | |
801 | ||
802 | for_each_dsi_port(port, intel_dsi->ports) { | |
803 | dsi_trans = dsi_port_to_transcoder(port); | |
804 | I915_WRITE(HSYNC(dsi_trans), | |
805 | (hsync_start - 1) | ((hsync_end - 1) << 16)); | |
806 | } | |
807 | } | |
808 | ||
809 | /* program TRANS_VTOTAL register */ | |
810 | for_each_dsi_port(port, intel_dsi->ports) { | |
811 | dsi_trans = dsi_port_to_transcoder(port); | |
812 | /* | |
813 | * FIXME: Programing this by assuming progressive mode, since | |
814 | * non-interlaced info from VBT is not saved inside | |
815 | * struct drm_display_mode. | |
816 | * For interlace mode: program required pixel minus 2 | |
817 | */ | |
818 | I915_WRITE(VTOTAL(dsi_trans), | |
819 | (vactive - 1) | ((vtotal - 1) << 16)); | |
820 | } | |
821 | ||
822 | if (vsync_end < vsync_start || vsync_end > vtotal) | |
823 | DRM_ERROR("Invalid vsync_end value\n"); | |
824 | ||
825 | if (vsync_start < vactive) | |
826 | DRM_ERROR("vsync_start less than vactive\n"); | |
827 | ||
828 | /* program TRANS_VSYNC register */ | |
829 | for_each_dsi_port(port, intel_dsi->ports) { | |
830 | dsi_trans = dsi_port_to_transcoder(port); | |
831 | I915_WRITE(VSYNC(dsi_trans), | |
832 | (vsync_start - 1) | ((vsync_end - 1) << 16)); | |
833 | } | |
834 | ||
835 | /* | |
836 | * FIXME: It has to be programmed only for interlaced | |
837 | * modes. Put the check condition here once interlaced | |
838 | * info available as described above. | |
839 | * program TRANS_VSYNCSHIFT register | |
840 | */ | |
841 | for_each_dsi_port(port, intel_dsi->ports) { | |
842 | dsi_trans = dsi_port_to_transcoder(port); | |
843 | I915_WRITE(VSYNCSHIFT(dsi_trans), vsync_shift); | |
844 | } | |
845 | } | |
846 | ||
303e347c MC |
847 | static void gen11_dsi_enable_transcoder(struct intel_encoder *encoder) |
848 | { | |
849 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
850 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
851 | enum port port; | |
852 | enum transcoder dsi_trans; | |
853 | u32 tmp; | |
854 | ||
855 | for_each_dsi_port(port, intel_dsi->ports) { | |
856 | dsi_trans = dsi_port_to_transcoder(port); | |
857 | tmp = I915_READ(PIPECONF(dsi_trans)); | |
858 | tmp |= PIPECONF_ENABLE; | |
859 | I915_WRITE(PIPECONF(dsi_trans), tmp); | |
860 | ||
861 | /* wait for transcoder to be enabled */ | |
862 | if (intel_wait_for_register(dev_priv, PIPECONF(dsi_trans), | |
863 | I965_PIPECONF_ACTIVE, | |
864 | I965_PIPECONF_ACTIVE, 10)) | |
865 | DRM_ERROR("DSI transcoder not enabled\n"); | |
866 | } | |
867 | } | |
868 | ||
5a4712f4 MC |
869 | static void gen11_dsi_setup_timeouts(struct intel_encoder *encoder) |
870 | { | |
871 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
872 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
873 | enum port port; | |
874 | enum transcoder dsi_trans; | |
875 | u32 tmp, hs_tx_timeout, lp_rx_timeout, ta_timeout, divisor, mul; | |
876 | ||
877 | /* | |
878 | * escape clock count calculation: | |
879 | * BYTE_CLK_COUNT = TIME_NS/(8 * UI) | |
880 | * UI (nsec) = (10^6)/Bitrate | |
881 | * TIME_NS = (BYTE_CLK_COUNT * 8 * 10^6)/ Bitrate | |
882 | * ESCAPE_CLK_COUNT = TIME_NS/ESC_CLK_NS | |
883 | */ | |
884 | divisor = intel_dsi_tlpx_ns(intel_dsi) * intel_dsi_bitrate(intel_dsi) * 1000; | |
885 | mul = 8 * 1000000; | |
886 | hs_tx_timeout = DIV_ROUND_UP(intel_dsi->hs_tx_timeout * mul, | |
887 | divisor); | |
888 | lp_rx_timeout = DIV_ROUND_UP(intel_dsi->lp_rx_timeout * mul, divisor); | |
889 | ta_timeout = DIV_ROUND_UP(intel_dsi->turn_arnd_val * mul, divisor); | |
890 | ||
891 | for_each_dsi_port(port, intel_dsi->ports) { | |
892 | dsi_trans = dsi_port_to_transcoder(port); | |
893 | ||
894 | /* program hst_tx_timeout */ | |
895 | tmp = I915_READ(DSI_HSTX_TO(dsi_trans)); | |
896 | tmp &= ~HSTX_TIMEOUT_VALUE_MASK; | |
897 | tmp |= HSTX_TIMEOUT_VALUE(hs_tx_timeout); | |
898 | I915_WRITE(DSI_HSTX_TO(dsi_trans), tmp); | |
899 | ||
900 | /* FIXME: DSI_CALIB_TO */ | |
901 | ||
902 | /* program lp_rx_host timeout */ | |
903 | tmp = I915_READ(DSI_LPRX_HOST_TO(dsi_trans)); | |
904 | tmp &= ~LPRX_TIMEOUT_VALUE_MASK; | |
905 | tmp |= LPRX_TIMEOUT_VALUE(lp_rx_timeout); | |
906 | I915_WRITE(DSI_LPRX_HOST_TO(dsi_trans), tmp); | |
907 | ||
908 | /* FIXME: DSI_PWAIT_TO */ | |
909 | ||
910 | /* program turn around timeout */ | |
911 | tmp = I915_READ(DSI_TA_TO(dsi_trans)); | |
912 | tmp &= ~TA_TIMEOUT_VALUE_MASK; | |
913 | tmp |= TA_TIMEOUT_VALUE(ta_timeout); | |
914 | I915_WRITE(DSI_TA_TO(dsi_trans), tmp); | |
915 | } | |
916 | } | |
917 | ||
70f4f502 MC |
918 | static void |
919 | gen11_dsi_enable_port_and_phy(struct intel_encoder *encoder, | |
920 | const struct intel_crtc_state *pipe_config) | |
45f09f7a MC |
921 | { |
922 | /* step 4a: power up all lanes of the DDI used by DSI */ | |
923 | gen11_dsi_power_up_lanes(encoder); | |
fc41001d MC |
924 | |
925 | /* step 4b: configure lane sequencing of the Combo-PHY transmitters */ | |
926 | gen11_dsi_config_phy_lanes_sequence(encoder); | |
3f4b9d9d MC |
927 | |
928 | /* step 4c: configure voltage swing and skew */ | |
929 | gen11_dsi_voltage_swing_program_seq(encoder); | |
ba3df888 MC |
930 | |
931 | /* enable DDI buffer */ | |
932 | gen11_dsi_enable_ddi_buffer(encoder); | |
70a7b836 MC |
933 | |
934 | /* setup D-PHY timings */ | |
935 | gen11_dsi_setup_dphy_timings(encoder); | |
d364dc66 | 936 | |
5a4712f4 MC |
937 | /* step 4h: setup DSI protocol timeouts */ |
938 | gen11_dsi_setup_timeouts(encoder); | |
939 | ||
d364dc66 | 940 | /* Step (4h, 4i, 4j, 4k): Configure transcoder */ |
70f4f502 | 941 | gen11_dsi_configure_transcoder(encoder, pipe_config); |
32250c8e MC |
942 | |
943 | /* Step 4l: Gate DDI clocks */ | |
944 | gen11_dsi_gate_clocks(encoder); | |
45f09f7a MC |
945 | } |
946 | ||
bfee32bf MC |
947 | static void gen11_dsi_powerup_panel(struct intel_encoder *encoder) |
948 | { | |
949 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
950 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
951 | struct mipi_dsi_device *dsi; | |
952 | enum port port; | |
953 | enum transcoder dsi_trans; | |
954 | u32 tmp; | |
955 | int ret; | |
956 | ||
957 | /* set maximum return packet size */ | |
958 | for_each_dsi_port(port, intel_dsi->ports) { | |
959 | dsi_trans = dsi_port_to_transcoder(port); | |
960 | ||
961 | /* | |
962 | * FIXME: This uses the number of DW's currently in the payload | |
963 | * receive queue. This is probably not what we want here. | |
964 | */ | |
965 | tmp = I915_READ(DSI_CMD_RXCTL(dsi_trans)); | |
966 | tmp &= NUMBER_RX_PLOAD_DW_MASK; | |
967 | /* multiply "Number Rx Payload DW" by 4 to get max value */ | |
968 | tmp = tmp * 4; | |
969 | dsi = intel_dsi->dsi_hosts[port]->device; | |
970 | ret = mipi_dsi_set_maximum_return_packet_size(dsi, tmp); | |
971 | if (ret < 0) | |
972 | DRM_ERROR("error setting max return pkt size%d\n", tmp); | |
973 | } | |
c2661638 MC |
974 | |
975 | /* panel power on related mipi dsi vbt sequences */ | |
976 | intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_POWER_ON); | |
977 | intel_dsi_msleep(intel_dsi, intel_dsi->panel_on_delay); | |
978 | intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_DEASSERT_RESET); | |
979 | intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_INIT_OTP); | |
980 | intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_DISPLAY_ON); | |
32bbc3d4 MC |
981 | |
982 | /* ensure all panel commands dispatched before enabling transcoder */ | |
983 | wait_for_cmds_dispatched_to_panel(encoder); | |
bfee32bf MC |
984 | } |
985 | ||
95f2f4db VK |
986 | static void gen11_dsi_pre_pll_enable(struct intel_encoder *encoder, |
987 | const struct intel_crtc_state *pipe_config, | |
988 | const struct drm_connector_state *conn_state) | |
fcfe0bdc | 989 | { |
b1cb21a5 MC |
990 | /* step2: enable IO power */ |
991 | gen11_dsi_enable_io_power(encoder); | |
992 | ||
fcfe0bdc MC |
993 | /* step3: enable DSI PLL */ |
994 | gen11_dsi_program_esc_clk_div(encoder); | |
95f2f4db VK |
995 | } |
996 | ||
997 | static void gen11_dsi_pre_enable(struct intel_encoder *encoder, | |
998 | const struct intel_crtc_state *pipe_config, | |
999 | const struct drm_connector_state *conn_state) | |
1000 | { | |
1001 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
45f09f7a | 1002 | |
949fc52a JN |
1003 | /* step3b */ |
1004 | gen11_dsi_map_pll(encoder, pipe_config); | |
1005 | ||
45f09f7a | 1006 | /* step4: enable DSI port and DPHY */ |
70f4f502 | 1007 | gen11_dsi_enable_port_and_phy(encoder, pipe_config); |
d1aeb5f3 | 1008 | |
bfee32bf MC |
1009 | /* step5: program and powerup panel */ |
1010 | gen11_dsi_powerup_panel(encoder); | |
1011 | ||
d1aeb5f3 MC |
1012 | /* step6c: configure transcoder timings */ |
1013 | gen11_dsi_set_transcoder_timings(encoder, pipe_config); | |
303e347c MC |
1014 | |
1015 | /* step6d: enable dsi transcoder */ | |
1016 | gen11_dsi_enable_transcoder(encoder); | |
20801315 MC |
1017 | |
1018 | /* step7: enable backlight */ | |
1019 | intel_panel_enable_backlight(pipe_config, conn_state); | |
1020 | intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_BACKLIGHT_ON); | |
fcfe0bdc | 1021 | } |
d9d996b6 | 1022 | |
4e123bd3 MC |
1023 | static void gen11_dsi_disable_transcoder(struct intel_encoder *encoder) |
1024 | { | |
1025 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
1026 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
1027 | enum port port; | |
1028 | enum transcoder dsi_trans; | |
1029 | u32 tmp; | |
1030 | ||
1031 | for_each_dsi_port(port, intel_dsi->ports) { | |
1032 | dsi_trans = dsi_port_to_transcoder(port); | |
1033 | ||
1034 | /* disable transcoder */ | |
1035 | tmp = I915_READ(PIPECONF(dsi_trans)); | |
1036 | tmp &= ~PIPECONF_ENABLE; | |
1037 | I915_WRITE(PIPECONF(dsi_trans), tmp); | |
1038 | ||
1039 | /* wait for transcoder to be disabled */ | |
1040 | if (intel_wait_for_register(dev_priv, PIPECONF(dsi_trans), | |
1041 | I965_PIPECONF_ACTIVE, 0, 50)) | |
1042 | DRM_ERROR("DSI trancoder not disabled\n"); | |
1043 | } | |
1044 | } | |
1045 | ||
522cc3f7 MC |
1046 | static void gen11_dsi_powerdown_panel(struct intel_encoder *encoder) |
1047 | { | |
1048 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
1049 | ||
1050 | intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_DISPLAY_OFF); | |
1051 | intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_ASSERT_RESET); | |
1052 | intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_POWER_OFF); | |
1053 | ||
1054 | /* ensure cmds dispatched to panel */ | |
1055 | wait_for_cmds_dispatched_to_panel(encoder); | |
1056 | } | |
1057 | ||
4769b598 MC |
1058 | static void gen11_dsi_deconfigure_trancoder(struct intel_encoder *encoder) |
1059 | { | |
1060 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
1061 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
1062 | enum port port; | |
1063 | enum transcoder dsi_trans; | |
1064 | u32 tmp; | |
1065 | ||
1066 | /* put dsi link in ULPS */ | |
1067 | for_each_dsi_port(port, intel_dsi->ports) { | |
1068 | dsi_trans = dsi_port_to_transcoder(port); | |
1069 | tmp = I915_READ(DSI_LP_MSG(dsi_trans)); | |
1070 | tmp |= LINK_ENTER_ULPS; | |
1071 | tmp &= ~LINK_ULPS_TYPE_LP11; | |
1072 | I915_WRITE(DSI_LP_MSG(dsi_trans), tmp); | |
1073 | ||
1074 | if (wait_for_us((I915_READ(DSI_LP_MSG(dsi_trans)) & | |
1075 | LINK_IN_ULPS), | |
1076 | 10)) | |
1077 | DRM_ERROR("DSI link not in ULPS\n"); | |
1078 | } | |
7aa32f7c MC |
1079 | |
1080 | /* disable ddi function */ | |
1081 | for_each_dsi_port(port, intel_dsi->ports) { | |
1082 | dsi_trans = dsi_port_to_transcoder(port); | |
1083 | tmp = I915_READ(TRANS_DDI_FUNC_CTL(dsi_trans)); | |
1084 | tmp &= ~TRANS_DDI_FUNC_ENABLE; | |
1085 | I915_WRITE(TRANS_DDI_FUNC_CTL(dsi_trans), tmp); | |
1086 | } | |
9c83ab1b MC |
1087 | |
1088 | /* disable port sync mode if dual link */ | |
1089 | if (intel_dsi->dual_link) { | |
1090 | for_each_dsi_port(port, intel_dsi->ports) { | |
1091 | dsi_trans = dsi_port_to_transcoder(port); | |
1092 | tmp = I915_READ(TRANS_DDI_FUNC_CTL2(dsi_trans)); | |
1093 | tmp &= ~PORT_SYNC_MODE_ENABLE; | |
1094 | I915_WRITE(TRANS_DDI_FUNC_CTL2(dsi_trans), tmp); | |
1095 | } | |
1096 | } | |
4769b598 MC |
1097 | } |
1098 | ||
019cec36 MC |
1099 | static void gen11_dsi_disable_port(struct intel_encoder *encoder) |
1100 | { | |
1101 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
1102 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
1103 | u32 tmp; | |
1104 | enum port port; | |
1105 | ||
1026bea0 | 1106 | gen11_dsi_ungate_clocks(encoder); |
019cec36 MC |
1107 | for_each_dsi_port(port, intel_dsi->ports) { |
1108 | tmp = I915_READ(DDI_BUF_CTL(port)); | |
1109 | tmp &= ~DDI_BUF_CTL_ENABLE; | |
1110 | I915_WRITE(DDI_BUF_CTL(port), tmp); | |
1111 | ||
1112 | if (wait_for_us((I915_READ(DDI_BUF_CTL(port)) & | |
1113 | DDI_BUF_IS_IDLE), | |
1114 | 8)) | |
1115 | DRM_ERROR("DDI port:%c buffer not idle\n", | |
1116 | port_name(port)); | |
1117 | } | |
1026bea0 | 1118 | gen11_dsi_ungate_clocks(encoder); |
019cec36 MC |
1119 | } |
1120 | ||
0f0fe849 MC |
1121 | static void gen11_dsi_disable_io_power(struct intel_encoder *encoder) |
1122 | { | |
1123 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
1124 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
1125 | enum port port; | |
1126 | u32 tmp; | |
1127 | ||
1128 | intel_display_power_put(dev_priv, POWER_DOMAIN_PORT_DDI_A_IO); | |
1129 | ||
1130 | if (intel_dsi->dual_link) | |
1131 | intel_display_power_put(dev_priv, POWER_DOMAIN_PORT_DDI_B_IO); | |
1132 | ||
1133 | /* set mode to DDI */ | |
1134 | for_each_dsi_port(port, intel_dsi->ports) { | |
1135 | tmp = I915_READ(ICL_DSI_IO_MODECTL(port)); | |
1136 | tmp &= ~COMBO_PHY_MODE_DSI; | |
1137 | I915_WRITE(ICL_DSI_IO_MODECTL(port), tmp); | |
1138 | } | |
1139 | } | |
1140 | ||
e2758048 MC |
1141 | static void gen11_dsi_disable(struct intel_encoder *encoder, |
1142 | const struct intel_crtc_state *old_crtc_state, | |
1143 | const struct drm_connector_state *old_conn_state) | |
d9d996b6 MC |
1144 | { |
1145 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
1146 | ||
1147 | /* step1: turn off backlight */ | |
1148 | intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_BACKLIGHT_OFF); | |
1149 | intel_panel_disable_backlight(old_conn_state); | |
4e123bd3 MC |
1150 | |
1151 | /* step2d,e: disable transcoder and wait */ | |
1152 | gen11_dsi_disable_transcoder(encoder); | |
522cc3f7 MC |
1153 | |
1154 | /* step2f,g: powerdown panel */ | |
1155 | gen11_dsi_powerdown_panel(encoder); | |
4769b598 MC |
1156 | |
1157 | /* step2h,i,j: deconfig trancoder */ | |
1158 | gen11_dsi_deconfigure_trancoder(encoder); | |
019cec36 MC |
1159 | |
1160 | /* step3: disable port */ | |
1161 | gen11_dsi_disable_port(encoder); | |
0f0fe849 MC |
1162 | |
1163 | /* step4: disable IO power */ | |
1164 | gen11_dsi_disable_io_power(encoder); | |
d9d996b6 | 1165 | } |
bf4d57ff | 1166 | |
8327af28 VK |
1167 | static void gen11_dsi_get_config(struct intel_encoder *encoder, |
1168 | struct intel_crtc_state *pipe_config) | |
1169 | { | |
1170 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
1171 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
1172 | u32 pll_id; | |
1173 | ||
1174 | /* FIXME: adapt icl_ddi_clock_get() for DSI and use that? */ | |
1175 | pll_id = intel_get_shared_dpll_id(dev_priv, pipe_config->shared_dpll); | |
1176 | pipe_config->port_clock = cnl_calc_wrpll_link(dev_priv, pll_id); | |
1177 | pipe_config->base.adjusted_mode.crtc_clock = intel_dsi->pclk; | |
1178 | pipe_config->output_types |= BIT(INTEL_OUTPUT_DSI); | |
1179 | } | |
1180 | ||
d04afb15 MC |
1181 | static bool gen11_dsi_compute_config(struct intel_encoder *encoder, |
1182 | struct intel_crtc_state *pipe_config, | |
1183 | struct drm_connector_state *conn_state) | |
1184 | { | |
1185 | struct intel_dsi *intel_dsi = container_of(encoder, struct intel_dsi, | |
1186 | base); | |
1187 | struct intel_connector *intel_connector = intel_dsi->attached_connector; | |
1188 | struct intel_crtc *crtc = to_intel_crtc(pipe_config->base.crtc); | |
1189 | const struct drm_display_mode *fixed_mode = | |
1190 | intel_connector->panel.fixed_mode; | |
1191 | struct drm_display_mode *adjusted_mode = | |
1192 | &pipe_config->base.adjusted_mode; | |
1193 | ||
1194 | intel_fixed_panel_mode(fixed_mode, adjusted_mode); | |
1195 | intel_pch_panel_fitting(crtc, pipe_config, conn_state->scaling_mode); | |
1196 | ||
1197 | adjusted_mode->flags = 0; | |
1198 | ||
1199 | /* Dual link goes to trancoder DSI'0' */ | |
1200 | if (intel_dsi->ports == BIT(PORT_B)) | |
1201 | pipe_config->cpu_transcoder = TRANSCODER_DSI_1; | |
1202 | else | |
1203 | pipe_config->cpu_transcoder = TRANSCODER_DSI_0; | |
1204 | ||
1205 | pipe_config->clock_set = true; | |
1206 | pipe_config->port_clock = intel_dsi_bitrate(intel_dsi) / 5; | |
1207 | ||
1208 | return true; | |
1209 | } | |
1210 | ||
ab841148 MC |
1211 | static u64 gen11_dsi_get_power_domains(struct intel_encoder *encoder, |
1212 | struct intel_crtc_state *crtc_state) | |
1213 | { | |
1214 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
1215 | u64 domains = 0; | |
1216 | enum port port; | |
1217 | ||
1218 | for_each_dsi_port(port, intel_dsi->ports) | |
1219 | if (port == PORT_A) | |
1220 | domains |= BIT_ULL(POWER_DOMAIN_PORT_DDI_A_IO); | |
1221 | else | |
1222 | domains |= BIT_ULL(POWER_DOMAIN_PORT_DDI_B_IO); | |
1223 | ||
1224 | return domains; | |
1225 | } | |
1226 | ||
1227 | static bool gen11_dsi_get_hw_state(struct intel_encoder *encoder, | |
1228 | enum pipe *pipe) | |
1229 | { | |
1230 | struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); | |
1231 | struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base); | |
1232 | u32 tmp; | |
1233 | enum port port; | |
1234 | enum transcoder dsi_trans; | |
1235 | bool ret = false; | |
1236 | ||
1237 | if (!intel_display_power_get_if_enabled(dev_priv, | |
1238 | encoder->power_domain)) | |
1239 | return false; | |
1240 | ||
1241 | for_each_dsi_port(port, intel_dsi->ports) { | |
1242 | dsi_trans = dsi_port_to_transcoder(port); | |
1243 | tmp = I915_READ(TRANS_DDI_FUNC_CTL(dsi_trans)); | |
1244 | switch (tmp & TRANS_DDI_EDP_INPUT_MASK) { | |
1245 | case TRANS_DDI_EDP_INPUT_A_ON: | |
1246 | *pipe = PIPE_A; | |
1247 | break; | |
1248 | case TRANS_DDI_EDP_INPUT_B_ONOFF: | |
1249 | *pipe = PIPE_B; | |
1250 | break; | |
1251 | case TRANS_DDI_EDP_INPUT_C_ONOFF: | |
1252 | *pipe = PIPE_C; | |
1253 | break; | |
1254 | default: | |
1255 | DRM_ERROR("Invalid PIPE input\n"); | |
1256 | goto out; | |
1257 | } | |
1258 | ||
1259 | tmp = I915_READ(PIPECONF(dsi_trans)); | |
1260 | ret = tmp & PIPECONF_ENABLE; | |
1261 | } | |
1262 | out: | |
1263 | intel_display_power_put(dev_priv, encoder->power_domain); | |
1264 | return ret; | |
1265 | } | |
1266 | ||
e2758048 MC |
1267 | static void gen11_dsi_encoder_destroy(struct drm_encoder *encoder) |
1268 | { | |
1269 | intel_encoder_destroy(encoder); | |
1270 | } | |
1271 | ||
1272 | static const struct drm_encoder_funcs gen11_dsi_encoder_funcs = { | |
1273 | .destroy = gen11_dsi_encoder_destroy, | |
1274 | }; | |
1275 | ||
1276 | static const struct drm_connector_funcs gen11_dsi_connector_funcs = { | |
1277 | .late_register = intel_connector_register, | |
1278 | .early_unregister = intel_connector_unregister, | |
1279 | .destroy = intel_connector_destroy, | |
1280 | .fill_modes = drm_helper_probe_single_connector_modes, | |
1281 | .atomic_get_property = intel_digital_connector_atomic_get_property, | |
1282 | .atomic_set_property = intel_digital_connector_atomic_set_property, | |
1283 | .atomic_destroy_state = drm_atomic_helper_connector_destroy_state, | |
1284 | .atomic_duplicate_state = intel_digital_connector_duplicate_state, | |
1285 | }; | |
1286 | ||
1287 | static const struct drm_connector_helper_funcs gen11_dsi_connector_helper_funcs = { | |
1288 | .get_modes = intel_dsi_get_modes, | |
1289 | .mode_valid = intel_dsi_mode_valid, | |
1290 | .atomic_check = intel_digital_connector_atomic_check, | |
1291 | }; | |
1292 | ||
c5f9c934 MC |
1293 | static int gen11_dsi_host_attach(struct mipi_dsi_host *host, |
1294 | struct mipi_dsi_device *dsi) | |
1295 | { | |
1296 | return 0; | |
1297 | } | |
1298 | ||
1299 | static int gen11_dsi_host_detach(struct mipi_dsi_host *host, | |
1300 | struct mipi_dsi_device *dsi) | |
1301 | { | |
1302 | return 0; | |
1303 | } | |
1304 | ||
1305 | static ssize_t gen11_dsi_host_transfer(struct mipi_dsi_host *host, | |
1306 | const struct mipi_dsi_msg *msg) | |
1307 | { | |
1308 | struct intel_dsi_host *intel_dsi_host = to_intel_dsi_host(host); | |
1309 | struct mipi_dsi_packet dsi_pkt; | |
1310 | ssize_t ret; | |
1311 | bool enable_lpdt = false; | |
1312 | ||
1313 | ret = mipi_dsi_create_packet(&dsi_pkt, msg); | |
1314 | if (ret < 0) | |
1315 | return ret; | |
1316 | ||
1317 | if (msg->flags & MIPI_DSI_MSG_USE_LPM) | |
1318 | enable_lpdt = true; | |
1319 | ||
1320 | /* send packet header */ | |
1321 | ret = dsi_send_pkt_hdr(intel_dsi_host, dsi_pkt, enable_lpdt); | |
1322 | if (ret < 0) | |
1323 | return ret; | |
1324 | ||
1325 | /* only long packet contains payload */ | |
1326 | if (mipi_dsi_packet_format_is_long(msg->type)) { | |
1327 | ret = dsi_send_pkt_payld(intel_dsi_host, dsi_pkt); | |
1328 | if (ret < 0) | |
1329 | return ret; | |
1330 | } | |
1331 | ||
1332 | //TODO: add payload receive code if needed | |
1333 | ||
1334 | ret = sizeof(dsi_pkt.header) + dsi_pkt.payload_length; | |
1335 | ||
1336 | return ret; | |
1337 | } | |
1338 | ||
1339 | static const struct mipi_dsi_host_ops gen11_dsi_host_ops = { | |
1340 | .attach = gen11_dsi_host_attach, | |
1341 | .detach = gen11_dsi_host_detach, | |
1342 | .transfer = gen11_dsi_host_transfer, | |
1343 | }; | |
1344 | ||
bf4d57ff MC |
1345 | void icl_dsi_init(struct drm_i915_private *dev_priv) |
1346 | { | |
e2758048 MC |
1347 | struct drm_device *dev = &dev_priv->drm; |
1348 | struct intel_dsi *intel_dsi; | |
1349 | struct intel_encoder *encoder; | |
1350 | struct intel_connector *intel_connector; | |
1351 | struct drm_connector *connector; | |
1352 | struct drm_display_mode *scan, *fixed_mode = NULL; | |
bf4d57ff MC |
1353 | enum port port; |
1354 | ||
1355 | if (!intel_bios_is_dsi_present(dev_priv, &port)) | |
1356 | return; | |
e2758048 MC |
1357 | |
1358 | intel_dsi = kzalloc(sizeof(*intel_dsi), GFP_KERNEL); | |
1359 | if (!intel_dsi) | |
1360 | return; | |
1361 | ||
1362 | intel_connector = intel_connector_alloc(); | |
1363 | if (!intel_connector) { | |
1364 | kfree(intel_dsi); | |
1365 | return; | |
1366 | } | |
1367 | ||
1368 | encoder = &intel_dsi->base; | |
1369 | intel_dsi->attached_connector = intel_connector; | |
1370 | connector = &intel_connector->base; | |
1371 | ||
1372 | /* register DSI encoder with DRM subsystem */ | |
1373 | drm_encoder_init(dev, &encoder->base, &gen11_dsi_encoder_funcs, | |
1374 | DRM_MODE_ENCODER_DSI, "DSI %c", port_name(port)); | |
1375 | ||
95f2f4db | 1376 | encoder->pre_pll_enable = gen11_dsi_pre_pll_enable; |
e2758048 MC |
1377 | encoder->pre_enable = gen11_dsi_pre_enable; |
1378 | encoder->disable = gen11_dsi_disable; | |
1379 | encoder->port = port; | |
8327af28 | 1380 | encoder->get_config = gen11_dsi_get_config; |
d04afb15 | 1381 | encoder->compute_config = gen11_dsi_compute_config; |
ab841148 | 1382 | encoder->get_hw_state = gen11_dsi_get_hw_state; |
e2758048 MC |
1383 | encoder->type = INTEL_OUTPUT_DSI; |
1384 | encoder->cloneable = 0; | |
1385 | encoder->crtc_mask = BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_C); | |
1386 | encoder->power_domain = POWER_DOMAIN_PORT_DSI; | |
ab841148 | 1387 | encoder->get_power_domains = gen11_dsi_get_power_domains; |
e2758048 MC |
1388 | |
1389 | /* register DSI connector with DRM subsystem */ | |
1390 | drm_connector_init(dev, connector, &gen11_dsi_connector_funcs, | |
1391 | DRM_MODE_CONNECTOR_DSI); | |
1392 | drm_connector_helper_add(connector, &gen11_dsi_connector_helper_funcs); | |
1393 | connector->display_info.subpixel_order = SubPixelHorizontalRGB; | |
1394 | connector->interlace_allowed = false; | |
1395 | connector->doublescan_allowed = false; | |
ab841148 | 1396 | intel_connector->get_hw_state = intel_connector_get_hw_state; |
e2758048 MC |
1397 | |
1398 | /* attach connector to encoder */ | |
1399 | intel_connector_attach_encoder(intel_connector, encoder); | |
1400 | ||
1401 | /* fill mode info from VBT */ | |
1402 | mutex_lock(&dev->mode_config.mutex); | |
1403 | intel_dsi_vbt_get_modes(intel_dsi); | |
1404 | list_for_each_entry(scan, &connector->probed_modes, head) { | |
1405 | if (scan->type & DRM_MODE_TYPE_PREFERRED) { | |
1406 | fixed_mode = drm_mode_duplicate(dev, scan); | |
1407 | break; | |
1408 | } | |
1409 | } | |
1410 | mutex_unlock(&dev->mode_config.mutex); | |
1411 | ||
1412 | if (!fixed_mode) { | |
1413 | DRM_ERROR("DSI fixed mode info missing\n"); | |
1414 | goto err; | |
1415 | } | |
1416 | ||
1417 | connector->display_info.width_mm = fixed_mode->width_mm; | |
1418 | connector->display_info.height_mm = fixed_mode->height_mm; | |
1419 | intel_panel_init(&intel_connector->panel, fixed_mode, NULL); | |
1420 | intel_panel_setup_backlight(connector, INVALID_PIPE); | |
1421 | ||
c5f9c934 | 1422 | |
972d607c MC |
1423 | if (dev_priv->vbt.dsi.config->dual_link) |
1424 | intel_dsi->ports = BIT(PORT_A) | BIT(PORT_B); | |
1425 | else | |
1426 | intel_dsi->ports = BIT(port); | |
1427 | ||
1428 | intel_dsi->dcs_backlight_ports = dev_priv->vbt.dsi.bl_ports; | |
1429 | intel_dsi->dcs_cabc_ports = dev_priv->vbt.dsi.cabc_ports; | |
1430 | ||
c5f9c934 MC |
1431 | for_each_dsi_port(port, intel_dsi->ports) { |
1432 | struct intel_dsi_host *host; | |
1433 | ||
1434 | host = intel_dsi_host_init(intel_dsi, &gen11_dsi_host_ops, port); | |
1435 | if (!host) | |
1436 | goto err; | |
1437 | ||
1438 | intel_dsi->dsi_hosts[port] = host; | |
1439 | } | |
1440 | ||
e2758048 MC |
1441 | if (!intel_dsi_vbt_init(intel_dsi, MIPI_DSI_GENERIC_PANEL_ID)) { |
1442 | DRM_DEBUG_KMS("no device found\n"); | |
1443 | goto err; | |
1444 | } | |
1445 | ||
1446 | return; | |
1447 | ||
1448 | err: | |
1449 | drm_encoder_cleanup(&encoder->base); | |
1450 | kfree(intel_dsi); | |
1451 | kfree(intel_connector); | |
bf4d57ff | 1452 | } |