1 // SPDX-License-Identifier: GPL-2.0-only
3 * Copyright (c) 2015-2018 The Linux Foundation. All rights reserved.
6 #define pr_fmt(fmt) "[drm:%s:%d] " fmt, __func__, __LINE__
7 #include "dpu_encoder_phys.h"
8 #include "dpu_hw_interrupts.h"
9 #include "dpu_core_irq.h"
10 #include "dpu_formats.h"
11 #include "dpu_trace.h"
13 #define DPU_DEBUG_CMDENC(e, fmt, ...) DPU_DEBUG("enc%d intf%d " fmt, \
14 (e) && (e)->base.parent ? \
15 (e)->base.parent->base.id : -1, \
16 (e) ? (e)->base.intf_idx - INTF_0 : -1, ##__VA_ARGS__)
18 #define DPU_ERROR_CMDENC(e, fmt, ...) DPU_ERROR("enc%d intf%d " fmt, \
19 (e) && (e)->base.parent ? \
20 (e)->base.parent->base.id : -1, \
21 (e) ? (e)->base.intf_idx - INTF_0 : -1, ##__VA_ARGS__)
23 #define to_dpu_encoder_phys_cmd(x) \
24 container_of(x, struct dpu_encoder_phys_cmd, base)
26 #define PP_TIMEOUT_MAX_TRIALS 10
29 * Tearcheck sync start and continue thresholds are empirically found
30 * based on common panels In the future, may want to allow panels to override
31 * these default values
33 #define DEFAULT_TEARCHECK_SYNC_THRESH_START 4
34 #define DEFAULT_TEARCHECK_SYNC_THRESH_CONTINUE 4
36 #define DPU_ENC_WR_PTR_START_TIMEOUT_US 20000
38 static bool dpu_encoder_phys_cmd_is_master(struct dpu_encoder_phys *phys_enc)
40 return (phys_enc->split_role != ENC_ROLE_SLAVE) ? true : false;
43 static bool dpu_encoder_phys_cmd_mode_fixup(
44 struct dpu_encoder_phys *phys_enc,
45 const struct drm_display_mode *mode,
46 struct drm_display_mode *adj_mode)
49 DPU_DEBUG_CMDENC(to_dpu_encoder_phys_cmd(phys_enc), "\n");
53 static void _dpu_encoder_phys_cmd_update_intf_cfg(
54 struct dpu_encoder_phys *phys_enc)
56 struct dpu_encoder_phys_cmd *cmd_enc =
57 to_dpu_encoder_phys_cmd(phys_enc);
58 struct dpu_hw_ctl *ctl;
59 struct dpu_hw_intf_cfg intf_cfg = { 0 };
64 ctl = phys_enc->hw_ctl;
65 if (!ctl || !ctl->ops.setup_intf_cfg)
68 intf_cfg.intf = phys_enc->intf_idx;
69 intf_cfg.intf_mode_sel = DPU_CTL_MODE_SEL_CMD;
70 intf_cfg.stream_sel = cmd_enc->stream_sel;
71 intf_cfg.mode_3d = dpu_encoder_helper_get_3d_blend_mode(phys_enc);
72 ctl->ops.setup_intf_cfg(ctl, &intf_cfg);
75 static void dpu_encoder_phys_cmd_pp_tx_done_irq(void *arg, int irq_idx)
77 struct dpu_encoder_phys *phys_enc = arg;
78 unsigned long lock_flags;
80 u32 event = DPU_ENCODER_FRAME_EVENT_DONE;
82 if (!phys_enc || !phys_enc->hw_pp)
85 DPU_ATRACE_BEGIN("pp_done_irq");
86 /* notify all synchronous clients first, then asynchronous clients */
87 if (phys_enc->parent_ops->handle_frame_done)
88 phys_enc->parent_ops->handle_frame_done(phys_enc->parent,
91 spin_lock_irqsave(phys_enc->enc_spinlock, lock_flags);
92 new_cnt = atomic_add_unless(&phys_enc->pending_kickoff_cnt, -1, 0);
93 spin_unlock_irqrestore(phys_enc->enc_spinlock, lock_flags);
95 trace_dpu_enc_phys_cmd_pp_tx_done(DRMID(phys_enc->parent),
96 phys_enc->hw_pp->idx - PINGPONG_0,
99 /* Signal any waiting atomic commit thread */
100 wake_up_all(&phys_enc->pending_kickoff_wq);
101 DPU_ATRACE_END("pp_done_irq");
104 static void dpu_encoder_phys_cmd_pp_rd_ptr_irq(void *arg, int irq_idx)
106 struct dpu_encoder_phys *phys_enc = arg;
107 struct dpu_encoder_phys_cmd *cmd_enc;
109 if (!phys_enc || !phys_enc->hw_pp)
112 DPU_ATRACE_BEGIN("rd_ptr_irq");
113 cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
115 if (phys_enc->parent_ops->handle_vblank_virt)
116 phys_enc->parent_ops->handle_vblank_virt(phys_enc->parent,
119 atomic_add_unless(&cmd_enc->pending_vblank_cnt, -1, 0);
120 wake_up_all(&cmd_enc->pending_vblank_wq);
121 DPU_ATRACE_END("rd_ptr_irq");
124 static void dpu_encoder_phys_cmd_ctl_start_irq(void *arg, int irq_idx)
126 struct dpu_encoder_phys *phys_enc = arg;
127 struct dpu_encoder_phys_cmd *cmd_enc;
129 if (!phys_enc || !phys_enc->hw_ctl)
132 DPU_ATRACE_BEGIN("ctl_start_irq");
133 cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
135 atomic_add_unless(&phys_enc->pending_ctlstart_cnt, -1, 0);
137 /* Signal any waiting ctl start interrupt */
138 wake_up_all(&phys_enc->pending_kickoff_wq);
139 DPU_ATRACE_END("ctl_start_irq");
142 static void dpu_encoder_phys_cmd_underrun_irq(void *arg, int irq_idx)
144 struct dpu_encoder_phys *phys_enc = arg;
149 if (phys_enc->parent_ops->handle_underrun_virt)
150 phys_enc->parent_ops->handle_underrun_virt(phys_enc->parent,
154 static void _dpu_encoder_phys_cmd_setup_irq_hw_idx(
155 struct dpu_encoder_phys *phys_enc)
157 struct dpu_encoder_irq *irq;
159 irq = &phys_enc->irq[INTR_IDX_CTL_START];
160 irq->hw_idx = phys_enc->hw_ctl->idx;
161 irq->irq_idx = -EINVAL;
163 irq = &phys_enc->irq[INTR_IDX_PINGPONG];
164 irq->hw_idx = phys_enc->hw_pp->idx;
165 irq->irq_idx = -EINVAL;
167 irq = &phys_enc->irq[INTR_IDX_RDPTR];
168 irq->hw_idx = phys_enc->hw_pp->idx;
169 irq->irq_idx = -EINVAL;
171 irq = &phys_enc->irq[INTR_IDX_UNDERRUN];
172 irq->hw_idx = phys_enc->intf_idx;
173 irq->irq_idx = -EINVAL;
176 static void dpu_encoder_phys_cmd_mode_set(
177 struct dpu_encoder_phys *phys_enc,
178 struct drm_display_mode *mode,
179 struct drm_display_mode *adj_mode)
181 struct dpu_encoder_phys_cmd *cmd_enc =
182 to_dpu_encoder_phys_cmd(phys_enc);
184 if (!phys_enc || !mode || !adj_mode) {
185 DPU_ERROR("invalid args\n");
188 phys_enc->cached_mode = *adj_mode;
189 DPU_DEBUG_CMDENC(cmd_enc, "caching mode:\n");
190 drm_mode_debug_printmodeline(adj_mode);
192 _dpu_encoder_phys_cmd_setup_irq_hw_idx(phys_enc);
195 static int _dpu_encoder_phys_cmd_handle_ppdone_timeout(
196 struct dpu_encoder_phys *phys_enc)
198 struct dpu_encoder_phys_cmd *cmd_enc =
199 to_dpu_encoder_phys_cmd(phys_enc);
200 u32 frame_event = DPU_ENCODER_FRAME_EVENT_ERROR;
203 if (!phys_enc || !phys_enc->hw_pp || !phys_enc->hw_ctl)
206 cmd_enc->pp_timeout_report_cnt++;
207 if (cmd_enc->pp_timeout_report_cnt == PP_TIMEOUT_MAX_TRIALS) {
208 frame_event |= DPU_ENCODER_FRAME_EVENT_PANEL_DEAD;
210 } else if (cmd_enc->pp_timeout_report_cnt == 1) {
214 trace_dpu_enc_phys_cmd_pdone_timeout(DRMID(phys_enc->parent),
215 phys_enc->hw_pp->idx - PINGPONG_0,
216 cmd_enc->pp_timeout_report_cnt,
217 atomic_read(&phys_enc->pending_kickoff_cnt),
220 /* to avoid flooding, only log first time, and "dead" time */
222 DRM_ERROR("id:%d pp:%d kickoff timeout %d cnt %d koff_cnt %d\n",
223 DRMID(phys_enc->parent),
224 phys_enc->hw_pp->idx - PINGPONG_0,
225 phys_enc->hw_ctl->idx - CTL_0,
226 cmd_enc->pp_timeout_report_cnt,
227 atomic_read(&phys_enc->pending_kickoff_cnt));
229 dpu_encoder_helper_unregister_irq(phys_enc, INTR_IDX_RDPTR);
232 atomic_add_unless(&phys_enc->pending_kickoff_cnt, -1, 0);
234 /* request a ctl reset before the next kickoff */
235 phys_enc->enable_state = DPU_ENC_ERR_NEEDS_HW_RESET;
237 if (phys_enc->parent_ops->handle_frame_done)
238 phys_enc->parent_ops->handle_frame_done(
239 phys_enc->parent, phys_enc, frame_event);
244 static int _dpu_encoder_phys_cmd_wait_for_idle(
245 struct dpu_encoder_phys *phys_enc)
247 struct dpu_encoder_phys_cmd *cmd_enc =
248 to_dpu_encoder_phys_cmd(phys_enc);
249 struct dpu_encoder_wait_info wait_info;
253 DPU_ERROR("invalid encoder\n");
257 wait_info.wq = &phys_enc->pending_kickoff_wq;
258 wait_info.atomic_cnt = &phys_enc->pending_kickoff_cnt;
259 wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
261 ret = dpu_encoder_helper_wait_for_irq(phys_enc, INTR_IDX_PINGPONG,
263 if (ret == -ETIMEDOUT)
264 _dpu_encoder_phys_cmd_handle_ppdone_timeout(phys_enc);
266 cmd_enc->pp_timeout_report_cnt = 0;
271 static int dpu_encoder_phys_cmd_control_vblank_irq(
272 struct dpu_encoder_phys *phys_enc,
278 if (!phys_enc || !phys_enc->hw_pp) {
279 DPU_ERROR("invalid encoder\n");
283 refcount = atomic_read(&phys_enc->vblank_refcount);
285 /* Slave encoders don't report vblank */
286 if (!dpu_encoder_phys_cmd_is_master(phys_enc))
289 /* protect against negative */
290 if (!enable && refcount == 0) {
295 DRM_DEBUG_KMS("id:%u pp:%d enable=%s/%d\n", DRMID(phys_enc->parent),
296 phys_enc->hw_pp->idx - PINGPONG_0,
297 enable ? "true" : "false", refcount);
299 if (enable && atomic_inc_return(&phys_enc->vblank_refcount) == 1)
300 ret = dpu_encoder_helper_register_irq(phys_enc, INTR_IDX_RDPTR);
301 else if (!enable && atomic_dec_return(&phys_enc->vblank_refcount) == 0)
302 ret = dpu_encoder_helper_unregister_irq(phys_enc,
307 DRM_ERROR("vblank irq err id:%u pp:%d ret:%d, enable %s/%d\n",
308 DRMID(phys_enc->parent),
309 phys_enc->hw_pp->idx - PINGPONG_0, ret,
310 enable ? "true" : "false", refcount);
316 static void dpu_encoder_phys_cmd_irq_control(struct dpu_encoder_phys *phys_enc,
319 struct dpu_encoder_phys_cmd *cmd_enc;
324 cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
326 trace_dpu_enc_phys_cmd_irq_ctrl(DRMID(phys_enc->parent),
327 phys_enc->hw_pp->idx - PINGPONG_0,
328 enable, atomic_read(&phys_enc->vblank_refcount));
331 dpu_encoder_helper_register_irq(phys_enc, INTR_IDX_PINGPONG);
332 dpu_encoder_helper_register_irq(phys_enc, INTR_IDX_UNDERRUN);
333 dpu_encoder_phys_cmd_control_vblank_irq(phys_enc, true);
335 if (dpu_encoder_phys_cmd_is_master(phys_enc))
336 dpu_encoder_helper_register_irq(phys_enc,
339 if (dpu_encoder_phys_cmd_is_master(phys_enc))
340 dpu_encoder_helper_unregister_irq(phys_enc,
343 dpu_encoder_helper_unregister_irq(phys_enc, INTR_IDX_UNDERRUN);
344 dpu_encoder_phys_cmd_control_vblank_irq(phys_enc, false);
345 dpu_encoder_helper_unregister_irq(phys_enc, INTR_IDX_PINGPONG);
349 static void dpu_encoder_phys_cmd_tearcheck_config(
350 struct dpu_encoder_phys *phys_enc)
352 struct dpu_encoder_phys_cmd *cmd_enc =
353 to_dpu_encoder_phys_cmd(phys_enc);
354 struct dpu_hw_tear_check tc_cfg = { 0 };
355 struct drm_display_mode *mode;
356 bool tc_enable = true;
358 struct msm_drm_private *priv;
359 struct dpu_kms *dpu_kms;
361 if (!phys_enc || !phys_enc->hw_pp) {
362 DPU_ERROR("invalid encoder\n");
365 mode = &phys_enc->cached_mode;
367 DPU_DEBUG_CMDENC(cmd_enc, "pp %d\n", phys_enc->hw_pp->idx - PINGPONG_0);
369 if (!phys_enc->hw_pp->ops.setup_tearcheck ||
370 !phys_enc->hw_pp->ops.enable_tearcheck) {
371 DPU_DEBUG_CMDENC(cmd_enc, "tearcheck not supported\n");
375 dpu_kms = phys_enc->dpu_kms;
376 if (!dpu_kms || !dpu_kms->dev || !dpu_kms->dev->dev_private) {
377 DPU_ERROR("invalid device\n");
380 priv = dpu_kms->dev->dev_private;
383 * TE default: dsi byte clock calculated base on 70 fps;
384 * around 14 ms to complete a kickoff cycle if te disabled;
385 * vclk_line base on 60 fps; write is faster than read;
386 * init == start == rdptr;
388 * vsync_count is ratio of MDP VSYNC clock frequency to LCD panel
389 * frequency divided by the no. of rows (lines) in the LCDpanel.
391 vsync_hz = dpu_kms_get_clk_rate(dpu_kms, "vsync");
393 DPU_DEBUG_CMDENC(cmd_enc, "invalid - vsync_hz %u\n",
398 tc_cfg.vsync_count = vsync_hz /
399 (mode->vtotal * drm_mode_vrefresh(mode));
401 /* enable external TE after kickoff to avoid premature autorefresh */
402 tc_cfg.hw_vsync_mode = 0;
405 * By setting sync_cfg_height to near max register value, we essentially
406 * disable dpu hw generated TE signal, since hw TE will arrive first.
407 * Only caveat is if due to error, we hit wrap-around.
409 tc_cfg.sync_cfg_height = 0xFFF0;
410 tc_cfg.vsync_init_val = mode->vdisplay;
411 tc_cfg.sync_threshold_start = DEFAULT_TEARCHECK_SYNC_THRESH_START;
412 tc_cfg.sync_threshold_continue = DEFAULT_TEARCHECK_SYNC_THRESH_CONTINUE;
413 tc_cfg.start_pos = mode->vdisplay;
414 tc_cfg.rd_ptr_irq = mode->vdisplay + 1;
416 DPU_DEBUG_CMDENC(cmd_enc,
417 "tc %d vsync_clk_speed_hz %u vtotal %u vrefresh %u\n",
418 phys_enc->hw_pp->idx - PINGPONG_0, vsync_hz,
419 mode->vtotal, drm_mode_vrefresh(mode));
420 DPU_DEBUG_CMDENC(cmd_enc,
421 "tc %d enable %u start_pos %u rd_ptr_irq %u\n",
422 phys_enc->hw_pp->idx - PINGPONG_0, tc_enable, tc_cfg.start_pos,
424 DPU_DEBUG_CMDENC(cmd_enc,
425 "tc %d hw_vsync_mode %u vsync_count %u vsync_init_val %u\n",
426 phys_enc->hw_pp->idx - PINGPONG_0, tc_cfg.hw_vsync_mode,
427 tc_cfg.vsync_count, tc_cfg.vsync_init_val);
428 DPU_DEBUG_CMDENC(cmd_enc,
429 "tc %d cfgheight %u thresh_start %u thresh_cont %u\n",
430 phys_enc->hw_pp->idx - PINGPONG_0, tc_cfg.sync_cfg_height,
431 tc_cfg.sync_threshold_start, tc_cfg.sync_threshold_continue);
433 phys_enc->hw_pp->ops.setup_tearcheck(phys_enc->hw_pp, &tc_cfg);
434 phys_enc->hw_pp->ops.enable_tearcheck(phys_enc->hw_pp, tc_enable);
437 static void _dpu_encoder_phys_cmd_pingpong_config(
438 struct dpu_encoder_phys *phys_enc)
440 struct dpu_encoder_phys_cmd *cmd_enc =
441 to_dpu_encoder_phys_cmd(phys_enc);
443 if (!phys_enc || !phys_enc->hw_ctl || !phys_enc->hw_pp
444 || !phys_enc->hw_ctl->ops.setup_intf_cfg) {
445 DPU_ERROR("invalid arg(s), enc %d\n", phys_enc != 0);
449 DPU_DEBUG_CMDENC(cmd_enc, "pp %d, enabling mode:\n",
450 phys_enc->hw_pp->idx - PINGPONG_0);
451 drm_mode_debug_printmodeline(&phys_enc->cached_mode);
453 _dpu_encoder_phys_cmd_update_intf_cfg(phys_enc);
454 dpu_encoder_phys_cmd_tearcheck_config(phys_enc);
457 static bool dpu_encoder_phys_cmd_needs_single_flush(
458 struct dpu_encoder_phys *phys_enc)
461 * we do separate flush for each CTL and let
462 * CTL_START synchronize them
467 static void dpu_encoder_phys_cmd_enable_helper(
468 struct dpu_encoder_phys *phys_enc)
470 struct dpu_hw_ctl *ctl;
473 if (!phys_enc || !phys_enc->hw_ctl || !phys_enc->hw_pp) {
474 DPU_ERROR("invalid arg(s), encoder %d\n", phys_enc != 0);
478 dpu_encoder_helper_split_config(phys_enc, phys_enc->intf_idx);
480 _dpu_encoder_phys_cmd_pingpong_config(phys_enc);
482 if (!dpu_encoder_phys_cmd_is_master(phys_enc))
485 ctl = phys_enc->hw_ctl;
486 ctl->ops.get_bitmask_intf(ctl, &flush_mask, phys_enc->intf_idx);
487 ctl->ops.update_pending_flush(ctl, flush_mask);
490 static void dpu_encoder_phys_cmd_enable(struct dpu_encoder_phys *phys_enc)
492 struct dpu_encoder_phys_cmd *cmd_enc =
493 to_dpu_encoder_phys_cmd(phys_enc);
495 if (!phys_enc || !phys_enc->hw_pp) {
496 DPU_ERROR("invalid phys encoder\n");
500 DPU_DEBUG_CMDENC(cmd_enc, "pp %d\n", phys_enc->hw_pp->idx - PINGPONG_0);
502 if (phys_enc->enable_state == DPU_ENC_ENABLED) {
503 DPU_ERROR("already enabled\n");
507 dpu_encoder_phys_cmd_enable_helper(phys_enc);
508 phys_enc->enable_state = DPU_ENC_ENABLED;
511 static void _dpu_encoder_phys_cmd_connect_te(
512 struct dpu_encoder_phys *phys_enc, bool enable)
514 if (!phys_enc || !phys_enc->hw_pp ||
515 !phys_enc->hw_pp->ops.connect_external_te)
518 trace_dpu_enc_phys_cmd_connect_te(DRMID(phys_enc->parent), enable);
519 phys_enc->hw_pp->ops.connect_external_te(phys_enc->hw_pp, enable);
522 static void dpu_encoder_phys_cmd_prepare_idle_pc(
523 struct dpu_encoder_phys *phys_enc)
525 _dpu_encoder_phys_cmd_connect_te(phys_enc, false);
528 static int dpu_encoder_phys_cmd_get_line_count(
529 struct dpu_encoder_phys *phys_enc)
531 struct dpu_hw_pingpong *hw_pp;
533 if (!phys_enc || !phys_enc->hw_pp)
536 if (!dpu_encoder_phys_cmd_is_master(phys_enc))
539 hw_pp = phys_enc->hw_pp;
540 if (!hw_pp->ops.get_line_count)
543 return hw_pp->ops.get_line_count(hw_pp);
546 static void dpu_encoder_phys_cmd_disable(struct dpu_encoder_phys *phys_enc)
548 struct dpu_encoder_phys_cmd *cmd_enc =
549 to_dpu_encoder_phys_cmd(phys_enc);
551 if (!phys_enc || !phys_enc->hw_pp) {
552 DPU_ERROR("invalid encoder\n");
555 DRM_DEBUG_KMS("id:%u pp:%d state:%d\n", DRMID(phys_enc->parent),
556 phys_enc->hw_pp->idx - PINGPONG_0,
557 phys_enc->enable_state);
559 if (phys_enc->enable_state == DPU_ENC_DISABLED) {
560 DPU_ERROR_CMDENC(cmd_enc, "already disabled\n");
564 if (phys_enc->hw_pp->ops.enable_tearcheck)
565 phys_enc->hw_pp->ops.enable_tearcheck(phys_enc->hw_pp, false);
566 phys_enc->enable_state = DPU_ENC_DISABLED;
569 static void dpu_encoder_phys_cmd_destroy(struct dpu_encoder_phys *phys_enc)
571 struct dpu_encoder_phys_cmd *cmd_enc =
572 to_dpu_encoder_phys_cmd(phys_enc);
575 DPU_ERROR("invalid encoder\n");
581 static void dpu_encoder_phys_cmd_get_hw_resources(
582 struct dpu_encoder_phys *phys_enc,
583 struct dpu_encoder_hw_resources *hw_res)
585 hw_res->intfs[phys_enc->intf_idx - INTF_0] = INTF_MODE_CMD;
588 static void dpu_encoder_phys_cmd_prepare_for_kickoff(
589 struct dpu_encoder_phys *phys_enc)
591 struct dpu_encoder_phys_cmd *cmd_enc =
592 to_dpu_encoder_phys_cmd(phys_enc);
595 if (!phys_enc || !phys_enc->hw_pp) {
596 DPU_ERROR("invalid encoder\n");
599 DRM_DEBUG_KMS("id:%u pp:%d pending_cnt:%d\n", DRMID(phys_enc->parent),
600 phys_enc->hw_pp->idx - PINGPONG_0,
601 atomic_read(&phys_enc->pending_kickoff_cnt));
604 * Mark kickoff request as outstanding. If there are more than one,
605 * outstanding, then we have to wait for the previous one to complete
607 ret = _dpu_encoder_phys_cmd_wait_for_idle(phys_enc);
609 /* force pending_kickoff_cnt 0 to discard failed kickoff */
610 atomic_set(&phys_enc->pending_kickoff_cnt, 0);
611 DRM_ERROR("failed wait_for_idle: id:%u ret:%d pp:%d\n",
612 DRMID(phys_enc->parent), ret,
613 phys_enc->hw_pp->idx - PINGPONG_0);
616 DPU_DEBUG_CMDENC(cmd_enc, "pp:%d pending_cnt %d\n",
617 phys_enc->hw_pp->idx - PINGPONG_0,
618 atomic_read(&phys_enc->pending_kickoff_cnt));
621 static int _dpu_encoder_phys_cmd_wait_for_ctl_start(
622 struct dpu_encoder_phys *phys_enc)
624 struct dpu_encoder_phys_cmd *cmd_enc =
625 to_dpu_encoder_phys_cmd(phys_enc);
626 struct dpu_encoder_wait_info wait_info;
629 if (!phys_enc || !phys_enc->hw_ctl) {
630 DPU_ERROR("invalid argument(s)\n");
634 wait_info.wq = &phys_enc->pending_kickoff_wq;
635 wait_info.atomic_cnt = &phys_enc->pending_ctlstart_cnt;
636 wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
638 ret = dpu_encoder_helper_wait_for_irq(phys_enc, INTR_IDX_CTL_START,
640 if (ret == -ETIMEDOUT) {
641 DPU_ERROR_CMDENC(cmd_enc, "ctl start interrupt wait failed\n");
649 static int dpu_encoder_phys_cmd_wait_for_tx_complete(
650 struct dpu_encoder_phys *phys_enc)
653 struct dpu_encoder_phys_cmd *cmd_enc;
658 cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
660 rc = _dpu_encoder_phys_cmd_wait_for_idle(phys_enc);
662 DRM_ERROR("failed wait_for_idle: id:%u ret:%d intf:%d\n",
663 DRMID(phys_enc->parent), rc,
664 phys_enc->intf_idx - INTF_0);
670 static int dpu_encoder_phys_cmd_wait_for_commit_done(
671 struct dpu_encoder_phys *phys_enc)
674 struct dpu_encoder_phys_cmd *cmd_enc;
679 cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
681 /* only required for master controller */
682 if (dpu_encoder_phys_cmd_is_master(phys_enc))
683 rc = _dpu_encoder_phys_cmd_wait_for_ctl_start(phys_enc);
685 /* required for both controllers */
686 if (!rc && cmd_enc->serialize_wait4pp)
687 dpu_encoder_phys_cmd_prepare_for_kickoff(phys_enc);
692 static int dpu_encoder_phys_cmd_wait_for_vblank(
693 struct dpu_encoder_phys *phys_enc)
696 struct dpu_encoder_phys_cmd *cmd_enc;
697 struct dpu_encoder_wait_info wait_info;
702 cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
704 /* only required for master controller */
705 if (!dpu_encoder_phys_cmd_is_master(phys_enc))
708 wait_info.wq = &cmd_enc->pending_vblank_wq;
709 wait_info.atomic_cnt = &cmd_enc->pending_vblank_cnt;
710 wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
712 atomic_inc(&cmd_enc->pending_vblank_cnt);
714 rc = dpu_encoder_helper_wait_for_irq(phys_enc, INTR_IDX_RDPTR,
720 static void dpu_encoder_phys_cmd_handle_post_kickoff(
721 struct dpu_encoder_phys *phys_enc)
727 * re-enable external TE, either for the first time after enabling
728 * or if disabled for Autorefresh
730 _dpu_encoder_phys_cmd_connect_te(phys_enc, true);
733 static void dpu_encoder_phys_cmd_trigger_start(
734 struct dpu_encoder_phys *phys_enc)
739 dpu_encoder_helper_trigger_start(phys_enc);
742 static void dpu_encoder_phys_cmd_init_ops(
743 struct dpu_encoder_phys_ops *ops)
745 ops->is_master = dpu_encoder_phys_cmd_is_master;
746 ops->mode_set = dpu_encoder_phys_cmd_mode_set;
747 ops->mode_fixup = dpu_encoder_phys_cmd_mode_fixup;
748 ops->enable = dpu_encoder_phys_cmd_enable;
749 ops->disable = dpu_encoder_phys_cmd_disable;
750 ops->destroy = dpu_encoder_phys_cmd_destroy;
751 ops->get_hw_resources = dpu_encoder_phys_cmd_get_hw_resources;
752 ops->control_vblank_irq = dpu_encoder_phys_cmd_control_vblank_irq;
753 ops->wait_for_commit_done = dpu_encoder_phys_cmd_wait_for_commit_done;
754 ops->prepare_for_kickoff = dpu_encoder_phys_cmd_prepare_for_kickoff;
755 ops->wait_for_tx_complete = dpu_encoder_phys_cmd_wait_for_tx_complete;
756 ops->wait_for_vblank = dpu_encoder_phys_cmd_wait_for_vblank;
757 ops->trigger_start = dpu_encoder_phys_cmd_trigger_start;
758 ops->needs_single_flush = dpu_encoder_phys_cmd_needs_single_flush;
759 ops->irq_control = dpu_encoder_phys_cmd_irq_control;
760 ops->restore = dpu_encoder_phys_cmd_enable_helper;
761 ops->prepare_idle_pc = dpu_encoder_phys_cmd_prepare_idle_pc;
762 ops->handle_post_kickoff = dpu_encoder_phys_cmd_handle_post_kickoff;
763 ops->get_line_count = dpu_encoder_phys_cmd_get_line_count;
766 struct dpu_encoder_phys *dpu_encoder_phys_cmd_init(
767 struct dpu_enc_phys_init_params *p)
769 struct dpu_encoder_phys *phys_enc = NULL;
770 struct dpu_encoder_phys_cmd *cmd_enc = NULL;
771 struct dpu_encoder_irq *irq;
774 DPU_DEBUG("intf %d\n", p->intf_idx - INTF_0);
776 cmd_enc = kzalloc(sizeof(*cmd_enc), GFP_KERNEL);
779 DPU_ERROR("failed to allocate\n");
782 phys_enc = &cmd_enc->base;
783 phys_enc->hw_mdptop = p->dpu_kms->hw_mdp;
784 phys_enc->intf_idx = p->intf_idx;
786 dpu_encoder_phys_cmd_init_ops(&phys_enc->ops);
787 phys_enc->parent = p->parent;
788 phys_enc->parent_ops = p->parent_ops;
789 phys_enc->dpu_kms = p->dpu_kms;
790 phys_enc->split_role = p->split_role;
791 phys_enc->intf_mode = INTF_MODE_CMD;
792 phys_enc->enc_spinlock = p->enc_spinlock;
793 cmd_enc->stream_sel = 0;
794 phys_enc->enable_state = DPU_ENC_DISABLED;
795 for (i = 0; i < INTR_IDX_MAX; i++) {
796 irq = &phys_enc->irq[i];
797 INIT_LIST_HEAD(&irq->cb.list);
798 irq->irq_idx = -EINVAL;
799 irq->hw_idx = -EINVAL;
800 irq->cb.arg = phys_enc;
803 irq = &phys_enc->irq[INTR_IDX_CTL_START];
804 irq->name = "ctl_start";
805 irq->intr_type = DPU_IRQ_TYPE_CTL_START;
806 irq->intr_idx = INTR_IDX_CTL_START;
807 irq->cb.func = dpu_encoder_phys_cmd_ctl_start_irq;
809 irq = &phys_enc->irq[INTR_IDX_PINGPONG];
810 irq->name = "pp_done";
811 irq->intr_type = DPU_IRQ_TYPE_PING_PONG_COMP;
812 irq->intr_idx = INTR_IDX_PINGPONG;
813 irq->cb.func = dpu_encoder_phys_cmd_pp_tx_done_irq;
815 irq = &phys_enc->irq[INTR_IDX_RDPTR];
816 irq->name = "pp_rd_ptr";
817 irq->intr_type = DPU_IRQ_TYPE_PING_PONG_RD_PTR;
818 irq->intr_idx = INTR_IDX_RDPTR;
819 irq->cb.func = dpu_encoder_phys_cmd_pp_rd_ptr_irq;
821 irq = &phys_enc->irq[INTR_IDX_UNDERRUN];
822 irq->name = "underrun";
823 irq->intr_type = DPU_IRQ_TYPE_INTF_UNDER_RUN;
824 irq->intr_idx = INTR_IDX_UNDERRUN;
825 irq->cb.func = dpu_encoder_phys_cmd_underrun_irq;
827 atomic_set(&phys_enc->vblank_refcount, 0);
828 atomic_set(&phys_enc->pending_kickoff_cnt, 0);
829 atomic_set(&phys_enc->pending_ctlstart_cnt, 0);
830 atomic_set(&cmd_enc->pending_vblank_cnt, 0);
831 init_waitqueue_head(&phys_enc->pending_kickoff_wq);
832 init_waitqueue_head(&cmd_enc->pending_vblank_wq);
834 DPU_DEBUG_CMDENC(cmd_enc, "created\n");