drm/msm: dpu: Allow planes to extend past active display
[linux-2.6-block.git] / drivers / gpu / drm / msm / disp / dpu1 / dpu_crtc.c
1 /*
2  * Copyright (c) 2014-2018 The Linux Foundation. All rights reserved.
3  * Copyright (C) 2013 Red Hat
4  * Author: Rob Clark <robdclark@gmail.com>
5  *
6  * This program is free software; you can redistribute it and/or modify it
7  * under the terms of the GNU General Public License version 2 as published by
8  * the Free Software Foundation.
9  *
10  * This program is distributed in the hope that it will be useful, but WITHOUT
11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License for
13  * more details.
14  *
15  * You should have received a copy of the GNU General Public License along with
16  * this program.  If not, see <http://www.gnu.org/licenses/>.
17  */
18
19 #define pr_fmt(fmt)     "[drm:%s:%d] " fmt, __func__, __LINE__
20 #include <linux/sort.h>
21 #include <linux/debugfs.h>
22 #include <linux/ktime.h>
23 #include <drm/drm_mode.h>
24 #include <drm/drm_crtc.h>
25 #include <drm/drm_crtc_helper.h>
26 #include <drm/drm_flip_work.h>
27 #include <drm/drm_rect.h>
28
29 #include "dpu_kms.h"
30 #include "dpu_hw_lm.h"
31 #include "dpu_hw_ctl.h"
32 #include "dpu_crtc.h"
33 #include "dpu_plane.h"
34 #include "dpu_encoder.h"
35 #include "dpu_vbif.h"
36 #include "dpu_power_handle.h"
37 #include "dpu_core_perf.h"
38 #include "dpu_trace.h"
39
40 #define DPU_DRM_BLEND_OP_NOT_DEFINED    0
41 #define DPU_DRM_BLEND_OP_OPAQUE         1
42 #define DPU_DRM_BLEND_OP_PREMULTIPLIED  2
43 #define DPU_DRM_BLEND_OP_COVERAGE       3
44 #define DPU_DRM_BLEND_OP_MAX            4
45
46 /* layer mixer index on dpu_crtc */
47 #define LEFT_MIXER 0
48 #define RIGHT_MIXER 1
49
50 #define MISR_BUFF_SIZE                  256
51
52 static inline struct dpu_kms *_dpu_crtc_get_kms(struct drm_crtc *crtc)
53 {
54         struct msm_drm_private *priv;
55
56         if (!crtc || !crtc->dev || !crtc->dev->dev_private) {
57                 DPU_ERROR("invalid crtc\n");
58                 return NULL;
59         }
60         priv = crtc->dev->dev_private;
61         if (!priv || !priv->kms) {
62                 DPU_ERROR("invalid kms\n");
63                 return NULL;
64         }
65
66         return to_dpu_kms(priv->kms);
67 }
68
69 static inline int _dpu_crtc_power_enable(struct dpu_crtc *dpu_crtc, bool enable)
70 {
71         struct drm_crtc *crtc;
72         struct msm_drm_private *priv;
73         struct dpu_kms *dpu_kms;
74
75         if (!dpu_crtc) {
76                 DPU_ERROR("invalid dpu crtc\n");
77                 return -EINVAL;
78         }
79
80         crtc = &dpu_crtc->base;
81         if (!crtc->dev || !crtc->dev->dev_private) {
82                 DPU_ERROR("invalid drm device\n");
83                 return -EINVAL;
84         }
85
86         priv = crtc->dev->dev_private;
87         if (!priv->kms) {
88                 DPU_ERROR("invalid kms\n");
89                 return -EINVAL;
90         }
91
92         dpu_kms = to_dpu_kms(priv->kms);
93
94         if (enable)
95                 pm_runtime_get_sync(&dpu_kms->pdev->dev);
96         else
97                 pm_runtime_put_sync(&dpu_kms->pdev->dev);
98
99         return 0;
100 }
101
102 /**
103  * _dpu_crtc_rp_to_crtc - get crtc from resource pool object
104  * @rp: Pointer to resource pool
105  * return: Pointer to drm crtc if success; null otherwise
106  */
107 static struct drm_crtc *_dpu_crtc_rp_to_crtc(struct dpu_crtc_respool *rp)
108 {
109         if (!rp)
110                 return NULL;
111
112         return container_of(rp, struct dpu_crtc_state, rp)->base.crtc;
113 }
114
115 /**
116  * _dpu_crtc_rp_reclaim - reclaim unused, or all if forced, resources in pool
117  * @rp: Pointer to resource pool
118  * @force: True to reclaim all resources; otherwise, reclaim only unused ones
119  * return: None
120  */
121 static void _dpu_crtc_rp_reclaim(struct dpu_crtc_respool *rp, bool force)
122 {
123         struct dpu_crtc_res *res, *next;
124         struct drm_crtc *crtc;
125
126         crtc = _dpu_crtc_rp_to_crtc(rp);
127         if (!crtc) {
128                 DPU_ERROR("invalid crtc\n");
129                 return;
130         }
131
132         DPU_DEBUG("crtc%d.%u %s\n", crtc->base.id, rp->sequence_id,
133                         force ? "destroy" : "free_unused");
134
135         list_for_each_entry_safe(res, next, &rp->res_list, list) {
136                 if (!force && !(res->flags & DPU_CRTC_RES_FLAG_FREE))
137                         continue;
138                 DPU_DEBUG("crtc%d.%u reclaim res:0x%x/0x%llx/%pK/%d\n",
139                                 crtc->base.id, rp->sequence_id,
140                                 res->type, res->tag, res->val,
141                                 atomic_read(&res->refcount));
142                 list_del(&res->list);
143                 if (res->ops.put)
144                         res->ops.put(res->val);
145                 kfree(res);
146         }
147 }
148
149 /**
150  * _dpu_crtc_rp_free_unused - free unused resource in pool
151  * @rp: Pointer to resource pool
152  * return: none
153  */
154 static void _dpu_crtc_rp_free_unused(struct dpu_crtc_respool *rp)
155 {
156         mutex_lock(rp->rp_lock);
157         _dpu_crtc_rp_reclaim(rp, false);
158         mutex_unlock(rp->rp_lock);
159 }
160
161 /**
162  * _dpu_crtc_rp_destroy - destroy resource pool
163  * @rp: Pointer to resource pool
164  * return: None
165  */
166 static void _dpu_crtc_rp_destroy(struct dpu_crtc_respool *rp)
167 {
168         mutex_lock(rp->rp_lock);
169         list_del_init(&rp->rp_list);
170         _dpu_crtc_rp_reclaim(rp, true);
171         mutex_unlock(rp->rp_lock);
172 }
173
174 /**
175  * _dpu_crtc_hw_blk_get - get callback for hardware block
176  * @val: Resource handle
177  * @type: Resource type
178  * @tag: Search tag for given resource
179  * return: Resource handle
180  */
181 static void *_dpu_crtc_hw_blk_get(void *val, u32 type, u64 tag)
182 {
183         DPU_DEBUG("res:%d/0x%llx/%pK\n", type, tag, val);
184         return dpu_hw_blk_get(val, type, tag);
185 }
186
187 /**
188  * _dpu_crtc_hw_blk_put - put callback for hardware block
189  * @val: Resource handle
190  * return: None
191  */
192 static void _dpu_crtc_hw_blk_put(void *val)
193 {
194         DPU_DEBUG("res://%pK\n", val);
195         dpu_hw_blk_put(val);
196 }
197
198 /**
199  * _dpu_crtc_rp_duplicate - duplicate resource pool and reset reference count
200  * @rp: Pointer to original resource pool
201  * @dup_rp: Pointer to duplicated resource pool
202  * return: None
203  */
204 static void _dpu_crtc_rp_duplicate(struct dpu_crtc_respool *rp,
205                 struct dpu_crtc_respool *dup_rp)
206 {
207         struct dpu_crtc_res *res, *dup_res;
208         struct drm_crtc *crtc;
209
210         if (!rp || !dup_rp || !rp->rp_head) {
211                 DPU_ERROR("invalid resource pool\n");
212                 return;
213         }
214
215         crtc = _dpu_crtc_rp_to_crtc(rp);
216         if (!crtc) {
217                 DPU_ERROR("invalid crtc\n");
218                 return;
219         }
220
221         DPU_DEBUG("crtc%d.%u duplicate\n", crtc->base.id, rp->sequence_id);
222
223         mutex_lock(rp->rp_lock);
224         dup_rp->sequence_id = rp->sequence_id + 1;
225         INIT_LIST_HEAD(&dup_rp->res_list);
226         dup_rp->ops = rp->ops;
227         list_for_each_entry(res, &rp->res_list, list) {
228                 dup_res = kzalloc(sizeof(struct dpu_crtc_res), GFP_KERNEL);
229                 if (!dup_res) {
230                         mutex_unlock(rp->rp_lock);
231                         return;
232                 }
233                 INIT_LIST_HEAD(&dup_res->list);
234                 atomic_set(&dup_res->refcount, 0);
235                 dup_res->type = res->type;
236                 dup_res->tag = res->tag;
237                 dup_res->val = res->val;
238                 dup_res->ops = res->ops;
239                 dup_res->flags = DPU_CRTC_RES_FLAG_FREE;
240                 DPU_DEBUG("crtc%d.%u dup res:0x%x/0x%llx/%pK/%d\n",
241                                 crtc->base.id, dup_rp->sequence_id,
242                                 dup_res->type, dup_res->tag, dup_res->val,
243                                 atomic_read(&dup_res->refcount));
244                 list_add_tail(&dup_res->list, &dup_rp->res_list);
245                 if (dup_res->ops.get)
246                         dup_res->ops.get(dup_res->val, 0, -1);
247         }
248
249         dup_rp->rp_lock = rp->rp_lock;
250         dup_rp->rp_head = rp->rp_head;
251         INIT_LIST_HEAD(&dup_rp->rp_list);
252         list_add_tail(&dup_rp->rp_list, rp->rp_head);
253         mutex_unlock(rp->rp_lock);
254 }
255
256 /**
257  * _dpu_crtc_rp_reset - reset resource pool after allocation
258  * @rp: Pointer to original resource pool
259  * @rp_lock: Pointer to serialization resource pool lock
260  * @rp_head: Pointer to crtc resource pool head
261  * return: None
262  */
263 static void _dpu_crtc_rp_reset(struct dpu_crtc_respool *rp,
264                 struct mutex *rp_lock, struct list_head *rp_head)
265 {
266         if (!rp || !rp_lock || !rp_head) {
267                 DPU_ERROR("invalid resource pool\n");
268                 return;
269         }
270
271         mutex_lock(rp_lock);
272         rp->rp_lock = rp_lock;
273         rp->rp_head = rp_head;
274         INIT_LIST_HEAD(&rp->rp_list);
275         rp->sequence_id = 0;
276         INIT_LIST_HEAD(&rp->res_list);
277         rp->ops.get = _dpu_crtc_hw_blk_get;
278         rp->ops.put = _dpu_crtc_hw_blk_put;
279         list_add_tail(&rp->rp_list, rp->rp_head);
280         mutex_unlock(rp_lock);
281 }
282
283 static void dpu_crtc_destroy(struct drm_crtc *crtc)
284 {
285         struct dpu_crtc *dpu_crtc = to_dpu_crtc(crtc);
286
287         DPU_DEBUG("\n");
288
289         if (!crtc)
290                 return;
291
292         dpu_crtc->phandle = NULL;
293
294         drm_crtc_cleanup(crtc);
295         mutex_destroy(&dpu_crtc->crtc_lock);
296         kfree(dpu_crtc);
297 }
298
299 static void _dpu_crtc_setup_blend_cfg(struct dpu_crtc_mixer *mixer,
300                 struct dpu_plane_state *pstate, struct dpu_format *format)
301 {
302         struct dpu_hw_mixer *lm = mixer->hw_lm;
303         uint32_t blend_op;
304         struct drm_format_name_buf format_name;
305
306         /* default to opaque blending */
307         blend_op = DPU_BLEND_FG_ALPHA_FG_CONST |
308                 DPU_BLEND_BG_ALPHA_BG_CONST;
309
310         if (format->alpha_enable) {
311                 /* coverage blending */
312                 blend_op = DPU_BLEND_FG_ALPHA_FG_PIXEL |
313                         DPU_BLEND_BG_ALPHA_FG_PIXEL |
314                         DPU_BLEND_BG_INV_ALPHA;
315         }
316
317         lm->ops.setup_blend_config(lm, pstate->stage,
318                                 0xFF, 0, blend_op);
319
320         DPU_DEBUG("format:%s, alpha_en:%u blend_op:0x%x\n",
321                 drm_get_format_name(format->base.pixel_format, &format_name),
322                 format->alpha_enable, blend_op);
323 }
324
325 static void _dpu_crtc_program_lm_output_roi(struct drm_crtc *crtc)
326 {
327         struct dpu_crtc *dpu_crtc;
328         struct dpu_crtc_state *crtc_state;
329         int lm_idx, lm_horiz_position;
330
331         dpu_crtc = to_dpu_crtc(crtc);
332         crtc_state = to_dpu_crtc_state(crtc->state);
333
334         lm_horiz_position = 0;
335         for (lm_idx = 0; lm_idx < dpu_crtc->num_mixers; lm_idx++) {
336                 const struct drm_rect *lm_roi = &crtc_state->lm_bounds[lm_idx];
337                 struct dpu_hw_mixer *hw_lm = dpu_crtc->mixers[lm_idx].hw_lm;
338                 struct dpu_hw_mixer_cfg cfg;
339
340                 if (!lm_roi || !drm_rect_visible(lm_roi))
341                         continue;
342
343                 cfg.out_width = drm_rect_width(lm_roi);
344                 cfg.out_height = drm_rect_height(lm_roi);
345                 cfg.right_mixer = lm_horiz_position++;
346                 cfg.flags = 0;
347                 hw_lm->ops.setup_mixer_out(hw_lm, &cfg);
348         }
349 }
350
351 static void _dpu_crtc_blend_setup_mixer(struct drm_crtc *crtc,
352         struct dpu_crtc *dpu_crtc, struct dpu_crtc_mixer *mixer)
353 {
354         struct drm_plane *plane;
355         struct drm_framebuffer *fb;
356         struct drm_plane_state *state;
357         struct dpu_crtc_state *cstate;
358         struct dpu_plane_state *pstate = NULL;
359         struct dpu_format *format;
360         struct dpu_hw_ctl *ctl;
361         struct dpu_hw_mixer *lm;
362         struct dpu_hw_stage_cfg *stage_cfg;
363
364         u32 flush_mask;
365         uint32_t stage_idx, lm_idx;
366         int zpos_cnt[DPU_STAGE_MAX + 1] = { 0 };
367         bool bg_alpha_enable = false;
368
369         if (!dpu_crtc || !mixer) {
370                 DPU_ERROR("invalid dpu_crtc or mixer\n");
371                 return;
372         }
373
374         ctl = mixer->hw_ctl;
375         lm = mixer->hw_lm;
376         stage_cfg = &dpu_crtc->stage_cfg;
377         cstate = to_dpu_crtc_state(crtc->state);
378
379         drm_atomic_crtc_for_each_plane(plane, crtc) {
380                 state = plane->state;
381                 if (!state)
382                         continue;
383
384                 pstate = to_dpu_plane_state(state);
385                 fb = state->fb;
386
387                 dpu_plane_get_ctl_flush(plane, ctl, &flush_mask);
388
389                 DPU_DEBUG("crtc %d stage:%d - plane %d sspp %d fb %d\n",
390                                 crtc->base.id,
391                                 pstate->stage,
392                                 plane->base.id,
393                                 dpu_plane_pipe(plane) - SSPP_VIG0,
394                                 state->fb ? state->fb->base.id : -1);
395
396                 format = to_dpu_format(msm_framebuffer_format(pstate->base.fb));
397                 if (!format) {
398                         DPU_ERROR("invalid format\n");
399                         return;
400                 }
401
402                 if (pstate->stage == DPU_STAGE_BASE && format->alpha_enable)
403                         bg_alpha_enable = true;
404
405                 stage_idx = zpos_cnt[pstate->stage]++;
406                 stage_cfg->stage[pstate->stage][stage_idx] =
407                                         dpu_plane_pipe(plane);
408                 stage_cfg->multirect_index[pstate->stage][stage_idx] =
409                                         pstate->multirect_index;
410
411                 trace_dpu_crtc_setup_mixer(DRMID(crtc), DRMID(plane),
412                                            state, pstate, stage_idx,
413                                            dpu_plane_pipe(plane) - SSPP_VIG0,
414                                            format->base.pixel_format,
415                                            fb ? fb->modifier : 0);
416
417                 /* blend config update */
418                 for (lm_idx = 0; lm_idx < dpu_crtc->num_mixers; lm_idx++) {
419                         _dpu_crtc_setup_blend_cfg(mixer + lm_idx,
420                                                 pstate, format);
421
422                         mixer[lm_idx].flush_mask |= flush_mask;
423
424                         if (bg_alpha_enable && !format->alpha_enable)
425                                 mixer[lm_idx].mixer_op_mode = 0;
426                         else
427                                 mixer[lm_idx].mixer_op_mode |=
428                                                 1 << pstate->stage;
429                 }
430         }
431
432          _dpu_crtc_program_lm_output_roi(crtc);
433 }
434
435 /**
436  * _dpu_crtc_blend_setup - configure crtc mixers
437  * @crtc: Pointer to drm crtc structure
438  */
439 static void _dpu_crtc_blend_setup(struct drm_crtc *crtc)
440 {
441         struct dpu_crtc *dpu_crtc;
442         struct dpu_crtc_state *dpu_crtc_state;
443         struct dpu_crtc_mixer *mixer;
444         struct dpu_hw_ctl *ctl;
445         struct dpu_hw_mixer *lm;
446
447         int i;
448
449         if (!crtc)
450                 return;
451
452         dpu_crtc = to_dpu_crtc(crtc);
453         dpu_crtc_state = to_dpu_crtc_state(crtc->state);
454         mixer = dpu_crtc->mixers;
455
456         DPU_DEBUG("%s\n", dpu_crtc->name);
457
458         if (dpu_crtc->num_mixers > CRTC_DUAL_MIXERS) {
459                 DPU_ERROR("invalid number mixers: %d\n", dpu_crtc->num_mixers);
460                 return;
461         }
462
463         for (i = 0; i < dpu_crtc->num_mixers; i++) {
464                 if (!mixer[i].hw_lm || !mixer[i].hw_ctl) {
465                         DPU_ERROR("invalid lm or ctl assigned to mixer\n");
466                         return;
467                 }
468                 mixer[i].mixer_op_mode = 0;
469                 mixer[i].flush_mask = 0;
470                 if (mixer[i].hw_ctl->ops.clear_all_blendstages)
471                         mixer[i].hw_ctl->ops.clear_all_blendstages(
472                                         mixer[i].hw_ctl);
473         }
474
475         /* initialize stage cfg */
476         memset(&dpu_crtc->stage_cfg, 0, sizeof(struct dpu_hw_stage_cfg));
477
478         _dpu_crtc_blend_setup_mixer(crtc, dpu_crtc, mixer);
479
480         for (i = 0; i < dpu_crtc->num_mixers; i++) {
481                 ctl = mixer[i].hw_ctl;
482                 lm = mixer[i].hw_lm;
483
484                 lm->ops.setup_alpha_out(lm, mixer[i].mixer_op_mode);
485
486                 mixer[i].flush_mask |= ctl->ops.get_bitmask_mixer(ctl,
487                         mixer[i].hw_lm->idx);
488
489                 /* stage config flush mask */
490                 ctl->ops.update_pending_flush(ctl, mixer[i].flush_mask);
491
492                 DPU_DEBUG("lm %d, op_mode 0x%X, ctl %d, flush mask 0x%x\n",
493                         mixer[i].hw_lm->idx - LM_0,
494                         mixer[i].mixer_op_mode,
495                         ctl->idx - CTL_0,
496                         mixer[i].flush_mask);
497
498                 ctl->ops.setup_blendstage(ctl, mixer[i].hw_lm->idx,
499                         &dpu_crtc->stage_cfg);
500         }
501 }
502
503 /**
504  *  _dpu_crtc_complete_flip - signal pending page_flip events
505  * Any pending vblank events are added to the vblank_event_list
506  * so that the next vblank interrupt shall signal them.
507  * However PAGE_FLIP events are not handled through the vblank_event_list.
508  * This API signals any pending PAGE_FLIP events requested through
509  * DRM_IOCTL_MODE_PAGE_FLIP and are cached in the dpu_crtc->event.
510  * @crtc: Pointer to drm crtc structure
511  */
512 static void _dpu_crtc_complete_flip(struct drm_crtc *crtc)
513 {
514         struct dpu_crtc *dpu_crtc = to_dpu_crtc(crtc);
515         struct drm_device *dev = crtc->dev;
516         unsigned long flags;
517
518         spin_lock_irqsave(&dev->event_lock, flags);
519         if (dpu_crtc->event) {
520                 DRM_DEBUG_VBL("%s: send event: %pK\n", dpu_crtc->name,
521                               dpu_crtc->event);
522                 trace_dpu_crtc_complete_flip(DRMID(crtc));
523                 drm_crtc_send_vblank_event(crtc, dpu_crtc->event);
524                 dpu_crtc->event = NULL;
525         }
526         spin_unlock_irqrestore(&dev->event_lock, flags);
527 }
528
529 enum dpu_intf_mode dpu_crtc_get_intf_mode(struct drm_crtc *crtc)
530 {
531         struct drm_encoder *encoder;
532
533         if (!crtc || !crtc->dev) {
534                 DPU_ERROR("invalid crtc\n");
535                 return INTF_MODE_NONE;
536         }
537
538         drm_for_each_encoder(encoder, crtc->dev)
539                 if (encoder->crtc == crtc)
540                         return dpu_encoder_get_intf_mode(encoder);
541
542         return INTF_MODE_NONE;
543 }
544
545 static void dpu_crtc_vblank_cb(void *data)
546 {
547         struct drm_crtc *crtc = (struct drm_crtc *)data;
548         struct dpu_crtc *dpu_crtc = to_dpu_crtc(crtc);
549
550         /* keep statistics on vblank callback - with auto reset via debugfs */
551         if (ktime_compare(dpu_crtc->vblank_cb_time, ktime_set(0, 0)) == 0)
552                 dpu_crtc->vblank_cb_time = ktime_get();
553         else
554                 dpu_crtc->vblank_cb_count++;
555         _dpu_crtc_complete_flip(crtc);
556         drm_crtc_handle_vblank(crtc);
557         trace_dpu_crtc_vblank_cb(DRMID(crtc));
558 }
559
560 static void dpu_crtc_frame_event_work(struct kthread_work *work)
561 {
562         struct msm_drm_private *priv;
563         struct dpu_crtc_frame_event *fevent;
564         struct drm_crtc *crtc;
565         struct dpu_crtc *dpu_crtc;
566         struct dpu_kms *dpu_kms;
567         unsigned long flags;
568         bool frame_done = false;
569
570         if (!work) {
571                 DPU_ERROR("invalid work handle\n");
572                 return;
573         }
574
575         fevent = container_of(work, struct dpu_crtc_frame_event, work);
576         if (!fevent->crtc || !fevent->crtc->state) {
577                 DPU_ERROR("invalid crtc\n");
578                 return;
579         }
580
581         crtc = fevent->crtc;
582         dpu_crtc = to_dpu_crtc(crtc);
583
584         dpu_kms = _dpu_crtc_get_kms(crtc);
585         if (!dpu_kms) {
586                 DPU_ERROR("invalid kms handle\n");
587                 return;
588         }
589         priv = dpu_kms->dev->dev_private;
590         DPU_ATRACE_BEGIN("crtc_frame_event");
591
592         DRM_DEBUG_KMS("crtc%d event:%u ts:%lld\n", crtc->base.id, fevent->event,
593                         ktime_to_ns(fevent->ts));
594
595         if (fevent->event & (DPU_ENCODER_FRAME_EVENT_DONE
596                                 | DPU_ENCODER_FRAME_EVENT_ERROR
597                                 | DPU_ENCODER_FRAME_EVENT_PANEL_DEAD)) {
598
599                 if (atomic_read(&dpu_crtc->frame_pending) < 1) {
600                         /* this should not happen */
601                         DRM_ERROR("crtc%d ev:%u ts:%lld frame_pending:%d\n",
602                                         crtc->base.id,
603                                         fevent->event,
604                                         ktime_to_ns(fevent->ts),
605                                         atomic_read(&dpu_crtc->frame_pending));
606                 } else if (atomic_dec_return(&dpu_crtc->frame_pending) == 0) {
607                         /* release bandwidth and other resources */
608                         trace_dpu_crtc_frame_event_done(DRMID(crtc),
609                                                         fevent->event);
610                         dpu_core_perf_crtc_release_bw(crtc);
611                 } else {
612                         trace_dpu_crtc_frame_event_more_pending(DRMID(crtc),
613                                                                 fevent->event);
614                 }
615
616                 if (fevent->event & DPU_ENCODER_FRAME_EVENT_DONE)
617                         dpu_core_perf_crtc_update(crtc, 0, false);
618
619                 if (fevent->event & (DPU_ENCODER_FRAME_EVENT_DONE
620                                         | DPU_ENCODER_FRAME_EVENT_ERROR))
621                         frame_done = true;
622         }
623
624         if (fevent->event & DPU_ENCODER_FRAME_EVENT_PANEL_DEAD)
625                 DPU_ERROR("crtc%d ts:%lld received panel dead event\n",
626                                 crtc->base.id, ktime_to_ns(fevent->ts));
627
628         if (frame_done)
629                 complete_all(&dpu_crtc->frame_done_comp);
630
631         spin_lock_irqsave(&dpu_crtc->spin_lock, flags);
632         list_add_tail(&fevent->list, &dpu_crtc->frame_event_list);
633         spin_unlock_irqrestore(&dpu_crtc->spin_lock, flags);
634         DPU_ATRACE_END("crtc_frame_event");
635 }
636
637 /*
638  * dpu_crtc_frame_event_cb - crtc frame event callback API. CRTC module
639  * registers this API to encoder for all frame event callbacks like
640  * frame_error, frame_done, idle_timeout, etc. Encoder may call different events
641  * from different context - IRQ, user thread, commit_thread, etc. Each event
642  * should be carefully reviewed and should be processed in proper task context
643  * to avoid schedulin delay or properly manage the irq context's bottom half
644  * processing.
645  */
646 static void dpu_crtc_frame_event_cb(void *data, u32 event)
647 {
648         struct drm_crtc *crtc = (struct drm_crtc *)data;
649         struct dpu_crtc *dpu_crtc;
650         struct msm_drm_private *priv;
651         struct dpu_crtc_frame_event *fevent;
652         unsigned long flags;
653         u32 crtc_id;
654
655         if (!crtc || !crtc->dev || !crtc->dev->dev_private) {
656                 DPU_ERROR("invalid parameters\n");
657                 return;
658         }
659
660         /* Nothing to do on idle event */
661         if (event & DPU_ENCODER_FRAME_EVENT_IDLE)
662                 return;
663
664         dpu_crtc = to_dpu_crtc(crtc);
665         priv = crtc->dev->dev_private;
666         crtc_id = drm_crtc_index(crtc);
667
668         trace_dpu_crtc_frame_event_cb(DRMID(crtc), event);
669
670         spin_lock_irqsave(&dpu_crtc->spin_lock, flags);
671         fevent = list_first_entry_or_null(&dpu_crtc->frame_event_list,
672                         struct dpu_crtc_frame_event, list);
673         if (fevent)
674                 list_del_init(&fevent->list);
675         spin_unlock_irqrestore(&dpu_crtc->spin_lock, flags);
676
677         if (!fevent) {
678                 DRM_ERROR("crtc%d event %d overflow\n", crtc->base.id, event);
679                 return;
680         }
681
682         fevent->event = event;
683         fevent->crtc = crtc;
684         fevent->ts = ktime_get();
685         kthread_queue_work(&priv->event_thread[crtc_id].worker, &fevent->work);
686 }
687
688 void dpu_crtc_complete_commit(struct drm_crtc *crtc,
689                 struct drm_crtc_state *old_state)
690 {
691         if (!crtc || !crtc->state) {
692                 DPU_ERROR("invalid crtc\n");
693                 return;
694         }
695         trace_dpu_crtc_complete_commit(DRMID(crtc));
696 }
697
698 static void _dpu_crtc_setup_mixer_for_encoder(
699                 struct drm_crtc *crtc,
700                 struct drm_encoder *enc)
701 {
702         struct dpu_crtc *dpu_crtc = to_dpu_crtc(crtc);
703         struct dpu_kms *dpu_kms = _dpu_crtc_get_kms(crtc);
704         struct dpu_rm *rm = &dpu_kms->rm;
705         struct dpu_crtc_mixer *mixer;
706         struct dpu_hw_ctl *last_valid_ctl = NULL;
707         int i;
708         struct dpu_rm_hw_iter lm_iter, ctl_iter;
709
710         dpu_rm_init_hw_iter(&lm_iter, enc->base.id, DPU_HW_BLK_LM);
711         dpu_rm_init_hw_iter(&ctl_iter, enc->base.id, DPU_HW_BLK_CTL);
712
713         /* Set up all the mixers and ctls reserved by this encoder */
714         for (i = dpu_crtc->num_mixers; i < ARRAY_SIZE(dpu_crtc->mixers); i++) {
715                 mixer = &dpu_crtc->mixers[i];
716
717                 if (!dpu_rm_get_hw(rm, &lm_iter))
718                         break;
719                 mixer->hw_lm = (struct dpu_hw_mixer *)lm_iter.hw;
720
721                 /* CTL may be <= LMs, if <, multiple LMs controlled by 1 CTL */
722                 if (!dpu_rm_get_hw(rm, &ctl_iter)) {
723                         DPU_DEBUG("no ctl assigned to lm %d, using previous\n",
724                                         mixer->hw_lm->idx - LM_0);
725                         mixer->hw_ctl = last_valid_ctl;
726                 } else {
727                         mixer->hw_ctl = (struct dpu_hw_ctl *)ctl_iter.hw;
728                         last_valid_ctl = mixer->hw_ctl;
729                 }
730
731                 /* Shouldn't happen, mixers are always >= ctls */
732                 if (!mixer->hw_ctl) {
733                         DPU_ERROR("no valid ctls found for lm %d\n",
734                                         mixer->hw_lm->idx - LM_0);
735                         return;
736                 }
737
738                 mixer->encoder = enc;
739
740                 dpu_crtc->num_mixers++;
741                 DPU_DEBUG("setup mixer %d: lm %d\n",
742                                 i, mixer->hw_lm->idx - LM_0);
743                 DPU_DEBUG("setup mixer %d: ctl %d\n",
744                                 i, mixer->hw_ctl->idx - CTL_0);
745         }
746 }
747
748 static void _dpu_crtc_setup_mixers(struct drm_crtc *crtc)
749 {
750         struct dpu_crtc *dpu_crtc = to_dpu_crtc(crtc);
751         struct drm_encoder *enc;
752
753         dpu_crtc->num_mixers = 0;
754         dpu_crtc->mixers_swapped = false;
755         memset(dpu_crtc->mixers, 0, sizeof(dpu_crtc->mixers));
756
757         mutex_lock(&dpu_crtc->crtc_lock);
758         /* Check for mixers on all encoders attached to this crtc */
759         list_for_each_entry(enc, &crtc->dev->mode_config.encoder_list, head) {
760                 if (enc->crtc != crtc)
761                         continue;
762
763                 _dpu_crtc_setup_mixer_for_encoder(crtc, enc);
764         }
765
766         mutex_unlock(&dpu_crtc->crtc_lock);
767 }
768
769 static void _dpu_crtc_setup_lm_bounds(struct drm_crtc *crtc,
770                 struct drm_crtc_state *state)
771 {
772         struct dpu_crtc *dpu_crtc;
773         struct dpu_crtc_state *cstate;
774         struct drm_display_mode *adj_mode;
775         u32 crtc_split_width;
776         int i;
777
778         if (!crtc || !state) {
779                 DPU_ERROR("invalid args\n");
780                 return;
781         }
782
783         dpu_crtc = to_dpu_crtc(crtc);
784         cstate = to_dpu_crtc_state(state);
785
786         adj_mode = &state->adjusted_mode;
787         crtc_split_width = dpu_crtc_get_mixer_width(dpu_crtc, cstate, adj_mode);
788
789         for (i = 0; i < dpu_crtc->num_mixers; i++) {
790                 struct drm_rect *r = &cstate->lm_bounds[i];
791                 r->x1 = crtc_split_width * i;
792                 r->y1 = 0;
793                 r->x2 = r->x1 + crtc_split_width;
794                 r->y2 = dpu_crtc_get_mixer_height(dpu_crtc, cstate, adj_mode);
795
796                 trace_dpu_crtc_setup_lm_bounds(DRMID(crtc), i, r);
797         }
798
799         drm_mode_debug_printmodeline(adj_mode);
800 }
801
802 static void dpu_crtc_atomic_begin(struct drm_crtc *crtc,
803                 struct drm_crtc_state *old_state)
804 {
805         struct dpu_crtc *dpu_crtc;
806         struct drm_encoder *encoder;
807         struct drm_device *dev;
808         unsigned long flags;
809         struct dpu_crtc_smmu_state_data *smmu_state;
810
811         if (!crtc) {
812                 DPU_ERROR("invalid crtc\n");
813                 return;
814         }
815
816         if (!crtc->state->enable) {
817                 DPU_DEBUG("crtc%d -> enable %d, skip atomic_begin\n",
818                                 crtc->base.id, crtc->state->enable);
819                 return;
820         }
821
822         DPU_DEBUG("crtc%d\n", crtc->base.id);
823
824         dpu_crtc = to_dpu_crtc(crtc);
825         dev = crtc->dev;
826         smmu_state = &dpu_crtc->smmu_state;
827
828         if (!dpu_crtc->num_mixers) {
829                 _dpu_crtc_setup_mixers(crtc);
830                 _dpu_crtc_setup_lm_bounds(crtc, crtc->state);
831         }
832
833         if (dpu_crtc->event) {
834                 WARN_ON(dpu_crtc->event);
835         } else {
836                 spin_lock_irqsave(&dev->event_lock, flags);
837                 dpu_crtc->event = crtc->state->event;
838                 crtc->state->event = NULL;
839                 spin_unlock_irqrestore(&dev->event_lock, flags);
840         }
841
842         list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
843                 if (encoder->crtc != crtc)
844                         continue;
845
846                 /* encoder will trigger pending mask now */
847                 dpu_encoder_trigger_kickoff_pending(encoder);
848         }
849
850         /*
851          * If no mixers have been allocated in dpu_crtc_atomic_check(),
852          * it means we are trying to flush a CRTC whose state is disabled:
853          * nothing else needs to be done.
854          */
855         if (unlikely(!dpu_crtc->num_mixers))
856                 return;
857
858         _dpu_crtc_blend_setup(crtc);
859
860         /*
861          * PP_DONE irq is only used by command mode for now.
862          * It is better to request pending before FLUSH and START trigger
863          * to make sure no pp_done irq missed.
864          * This is safe because no pp_done will happen before SW trigger
865          * in command mode.
866          */
867 }
868
869 static void dpu_crtc_atomic_flush(struct drm_crtc *crtc,
870                 struct drm_crtc_state *old_crtc_state)
871 {
872         struct dpu_crtc *dpu_crtc;
873         struct drm_device *dev;
874         struct drm_plane *plane;
875         struct msm_drm_private *priv;
876         struct msm_drm_thread *event_thread;
877         unsigned long flags;
878         struct dpu_crtc_state *cstate;
879
880         if (!crtc || !crtc->dev || !crtc->dev->dev_private) {
881                 DPU_ERROR("invalid crtc\n");
882                 return;
883         }
884
885         if (!crtc->state->enable) {
886                 DPU_DEBUG("crtc%d -> enable %d, skip atomic_flush\n",
887                                 crtc->base.id, crtc->state->enable);
888                 return;
889         }
890
891         DPU_DEBUG("crtc%d\n", crtc->base.id);
892
893         dpu_crtc = to_dpu_crtc(crtc);
894         cstate = to_dpu_crtc_state(crtc->state);
895         dev = crtc->dev;
896         priv = dev->dev_private;
897
898         if (crtc->index >= ARRAY_SIZE(priv->event_thread)) {
899                 DPU_ERROR("invalid crtc index[%d]\n", crtc->index);
900                 return;
901         }
902
903         event_thread = &priv->event_thread[crtc->index];
904
905         if (dpu_crtc->event) {
906                 DPU_DEBUG("already received dpu_crtc->event\n");
907         } else {
908                 spin_lock_irqsave(&dev->event_lock, flags);
909                 dpu_crtc->event = crtc->state->event;
910                 crtc->state->event = NULL;
911                 spin_unlock_irqrestore(&dev->event_lock, flags);
912         }
913
914         /*
915          * If no mixers has been allocated in dpu_crtc_atomic_check(),
916          * it means we are trying to flush a CRTC whose state is disabled:
917          * nothing else needs to be done.
918          */
919         if (unlikely(!dpu_crtc->num_mixers))
920                 return;
921
922         /*
923          * For planes without commit update, drm framework will not add
924          * those planes to current state since hardware update is not
925          * required. However, if those planes were power collapsed since
926          * last commit cycle, driver has to restore the hardware state
927          * of those planes explicitly here prior to plane flush.
928          */
929         drm_atomic_crtc_for_each_plane(plane, crtc)
930                 dpu_plane_restore(plane);
931
932         /* update performance setting before crtc kickoff */
933         dpu_core_perf_crtc_update(crtc, 1, false);
934
935         /*
936          * Final plane updates: Give each plane a chance to complete all
937          *                      required writes/flushing before crtc's "flush
938          *                      everything" call below.
939          */
940         drm_atomic_crtc_for_each_plane(plane, crtc) {
941                 if (dpu_crtc->smmu_state.transition_error)
942                         dpu_plane_set_error(plane, true);
943                 dpu_plane_flush(plane);
944         }
945
946         /* Kickoff will be scheduled by outer layer */
947 }
948
949 /**
950  * dpu_crtc_destroy_state - state destroy hook
951  * @crtc: drm CRTC
952  * @state: CRTC state object to release
953  */
954 static void dpu_crtc_destroy_state(struct drm_crtc *crtc,
955                 struct drm_crtc_state *state)
956 {
957         struct dpu_crtc *dpu_crtc;
958         struct dpu_crtc_state *cstate;
959
960         if (!crtc || !state) {
961                 DPU_ERROR("invalid argument(s)\n");
962                 return;
963         }
964
965         dpu_crtc = to_dpu_crtc(crtc);
966         cstate = to_dpu_crtc_state(state);
967
968         DPU_DEBUG("crtc%d\n", crtc->base.id);
969
970         _dpu_crtc_rp_destroy(&cstate->rp);
971
972         __drm_atomic_helper_crtc_destroy_state(state);
973
974         kfree(cstate);
975 }
976
977 static int _dpu_crtc_wait_for_frame_done(struct drm_crtc *crtc)
978 {
979         struct dpu_crtc *dpu_crtc;
980         int ret, rc = 0;
981
982         if (!crtc) {
983                 DPU_ERROR("invalid argument\n");
984                 return -EINVAL;
985         }
986         dpu_crtc = to_dpu_crtc(crtc);
987
988         if (!atomic_read(&dpu_crtc->frame_pending)) {
989                 DPU_DEBUG("no frames pending\n");
990                 return 0;
991         }
992
993         DPU_ATRACE_BEGIN("frame done completion wait");
994         ret = wait_for_completion_timeout(&dpu_crtc->frame_done_comp,
995                         msecs_to_jiffies(DPU_FRAME_DONE_TIMEOUT));
996         if (!ret) {
997                 DRM_ERROR("frame done wait timed out, ret:%d\n", ret);
998                 rc = -ETIMEDOUT;
999         }
1000         DPU_ATRACE_END("frame done completion wait");
1001
1002         return rc;
1003 }
1004
1005 void dpu_crtc_commit_kickoff(struct drm_crtc *crtc)
1006 {
1007         struct drm_encoder *encoder;
1008         struct drm_device *dev;
1009         struct dpu_crtc *dpu_crtc;
1010         struct msm_drm_private *priv;
1011         struct dpu_kms *dpu_kms;
1012         struct dpu_crtc_state *cstate;
1013         int ret;
1014
1015         if (!crtc) {
1016                 DPU_ERROR("invalid argument\n");
1017                 return;
1018         }
1019         dev = crtc->dev;
1020         dpu_crtc = to_dpu_crtc(crtc);
1021         dpu_kms = _dpu_crtc_get_kms(crtc);
1022
1023         if (!dpu_kms || !dpu_kms->dev || !dpu_kms->dev->dev_private) {
1024                 DPU_ERROR("invalid argument\n");
1025                 return;
1026         }
1027
1028         priv = dpu_kms->dev->dev_private;
1029         cstate = to_dpu_crtc_state(crtc->state);
1030
1031         /*
1032          * If no mixers has been allocated in dpu_crtc_atomic_check(),
1033          * it means we are trying to start a CRTC whose state is disabled:
1034          * nothing else needs to be done.
1035          */
1036         if (unlikely(!dpu_crtc->num_mixers))
1037                 return;
1038
1039         DPU_ATRACE_BEGIN("crtc_commit");
1040
1041         list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
1042                 struct dpu_encoder_kickoff_params params = { 0 };
1043
1044                 if (encoder->crtc != crtc)
1045                         continue;
1046
1047                 /*
1048                  * Encoder will flush/start now, unless it has a tx pending.
1049                  * If so, it may delay and flush at an irq event (e.g. ppdone)
1050                  */
1051                 dpu_encoder_prepare_for_kickoff(encoder, &params);
1052         }
1053
1054         /* wait for frame_event_done completion */
1055         DPU_ATRACE_BEGIN("wait_for_frame_done_event");
1056         ret = _dpu_crtc_wait_for_frame_done(crtc);
1057         DPU_ATRACE_END("wait_for_frame_done_event");
1058         if (ret) {
1059                 DPU_ERROR("crtc%d wait for frame done failed;frame_pending%d\n",
1060                                 crtc->base.id,
1061                                 atomic_read(&dpu_crtc->frame_pending));
1062                 goto end;
1063         }
1064
1065         if (atomic_inc_return(&dpu_crtc->frame_pending) == 1) {
1066                 /* acquire bandwidth and other resources */
1067                 DPU_DEBUG("crtc%d first commit\n", crtc->base.id);
1068         } else
1069                 DPU_DEBUG("crtc%d commit\n", crtc->base.id);
1070
1071         dpu_crtc->play_count++;
1072
1073         dpu_vbif_clear_errors(dpu_kms);
1074
1075         list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
1076                 if (encoder->crtc != crtc)
1077                         continue;
1078
1079                 dpu_encoder_kickoff(encoder);
1080         }
1081
1082 end:
1083         reinit_completion(&dpu_crtc->frame_done_comp);
1084         DPU_ATRACE_END("crtc_commit");
1085 }
1086
1087 /**
1088  * _dpu_crtc_vblank_enable_no_lock - update power resource and vblank request
1089  * @dpu_crtc: Pointer to dpu crtc structure
1090  * @enable: Whether to enable/disable vblanks
1091  *
1092  * @Return: error code
1093  */
1094 static int _dpu_crtc_vblank_enable_no_lock(
1095                 struct dpu_crtc *dpu_crtc, bool enable)
1096 {
1097         struct drm_device *dev;
1098         struct drm_crtc *crtc;
1099         struct drm_encoder *enc;
1100
1101         if (!dpu_crtc) {
1102                 DPU_ERROR("invalid crtc\n");
1103                 return -EINVAL;
1104         }
1105
1106         crtc = &dpu_crtc->base;
1107         dev = crtc->dev;
1108
1109         if (enable) {
1110                 int ret;
1111
1112                 /* drop lock since power crtc cb may try to re-acquire lock */
1113                 mutex_unlock(&dpu_crtc->crtc_lock);
1114                 ret = _dpu_crtc_power_enable(dpu_crtc, true);
1115                 mutex_lock(&dpu_crtc->crtc_lock);
1116                 if (ret)
1117                         return ret;
1118
1119                 list_for_each_entry(enc, &dev->mode_config.encoder_list, head) {
1120                         if (enc->crtc != crtc)
1121                                 continue;
1122
1123                         trace_dpu_crtc_vblank_enable(DRMID(&dpu_crtc->base),
1124                                                      DRMID(enc), enable,
1125                                                      dpu_crtc);
1126
1127                         dpu_encoder_register_vblank_callback(enc,
1128                                         dpu_crtc_vblank_cb, (void *)crtc);
1129                 }
1130         } else {
1131                 list_for_each_entry(enc, &dev->mode_config.encoder_list, head) {
1132                         if (enc->crtc != crtc)
1133                                 continue;
1134
1135                         trace_dpu_crtc_vblank_enable(DRMID(&dpu_crtc->base),
1136                                                      DRMID(enc), enable,
1137                                                      dpu_crtc);
1138
1139                         dpu_encoder_register_vblank_callback(enc, NULL, NULL);
1140                 }
1141
1142                 /* drop lock since power crtc cb may try to re-acquire lock */
1143                 mutex_unlock(&dpu_crtc->crtc_lock);
1144                 _dpu_crtc_power_enable(dpu_crtc, false);
1145                 mutex_lock(&dpu_crtc->crtc_lock);
1146         }
1147
1148         return 0;
1149 }
1150
1151 /**
1152  * _dpu_crtc_set_suspend - notify crtc of suspend enable/disable
1153  * @crtc: Pointer to drm crtc object
1154  * @enable: true to enable suspend, false to indicate resume
1155  */
1156 static void _dpu_crtc_set_suspend(struct drm_crtc *crtc, bool enable)
1157 {
1158         struct dpu_crtc *dpu_crtc;
1159         struct msm_drm_private *priv;
1160         struct dpu_kms *dpu_kms;
1161         int ret = 0;
1162
1163         if (!crtc || !crtc->dev || !crtc->dev->dev_private) {
1164                 DPU_ERROR("invalid crtc\n");
1165                 return;
1166         }
1167         dpu_crtc = to_dpu_crtc(crtc);
1168         priv = crtc->dev->dev_private;
1169
1170         if (!priv->kms) {
1171                 DPU_ERROR("invalid crtc kms\n");
1172                 return;
1173         }
1174         dpu_kms = to_dpu_kms(priv->kms);
1175
1176         DRM_DEBUG_KMS("crtc%d suspend = %d\n", crtc->base.id, enable);
1177
1178         mutex_lock(&dpu_crtc->crtc_lock);
1179
1180         /*
1181          * If the vblank is enabled, release a power reference on suspend
1182          * and take it back during resume (if it is still enabled).
1183          */
1184         trace_dpu_crtc_set_suspend(DRMID(&dpu_crtc->base), enable, dpu_crtc);
1185         if (dpu_crtc->suspend == enable)
1186                 DPU_DEBUG("crtc%d suspend already set to %d, ignoring update\n",
1187                                 crtc->base.id, enable);
1188         else if (dpu_crtc->enabled && dpu_crtc->vblank_requested) {
1189                 ret = _dpu_crtc_vblank_enable_no_lock(dpu_crtc, !enable);
1190                 if (ret)
1191                         DPU_ERROR("%s vblank enable failed: %d\n",
1192                                         dpu_crtc->name, ret);
1193         }
1194
1195         dpu_crtc->suspend = enable;
1196         mutex_unlock(&dpu_crtc->crtc_lock);
1197 }
1198
1199 /**
1200  * dpu_crtc_duplicate_state - state duplicate hook
1201  * @crtc: Pointer to drm crtc structure
1202  * @Returns: Pointer to new drm_crtc_state structure
1203  */
1204 static struct drm_crtc_state *dpu_crtc_duplicate_state(struct drm_crtc *crtc)
1205 {
1206         struct dpu_crtc *dpu_crtc;
1207         struct dpu_crtc_state *cstate, *old_cstate;
1208
1209         if (!crtc || !crtc->state) {
1210                 DPU_ERROR("invalid argument(s)\n");
1211                 return NULL;
1212         }
1213
1214         dpu_crtc = to_dpu_crtc(crtc);
1215         old_cstate = to_dpu_crtc_state(crtc->state);
1216         cstate = kmemdup(old_cstate, sizeof(*old_cstate), GFP_KERNEL);
1217         if (!cstate) {
1218                 DPU_ERROR("failed to allocate state\n");
1219                 return NULL;
1220         }
1221
1222         /* duplicate base helper */
1223         __drm_atomic_helper_crtc_duplicate_state(crtc, &cstate->base);
1224
1225         _dpu_crtc_rp_duplicate(&old_cstate->rp, &cstate->rp);
1226
1227         return &cstate->base;
1228 }
1229
1230 /**
1231  * dpu_crtc_reset - reset hook for CRTCs
1232  * Resets the atomic state for @crtc by freeing the state pointer (which might
1233  * be NULL, e.g. at driver load time) and allocating a new empty state object.
1234  * @crtc: Pointer to drm crtc structure
1235  */
1236 static void dpu_crtc_reset(struct drm_crtc *crtc)
1237 {
1238         struct dpu_crtc *dpu_crtc;
1239         struct dpu_crtc_state *cstate;
1240
1241         if (!crtc) {
1242                 DPU_ERROR("invalid crtc\n");
1243                 return;
1244         }
1245
1246         /* revert suspend actions, if necessary */
1247         if (dpu_kms_is_suspend_state(crtc->dev))
1248                 _dpu_crtc_set_suspend(crtc, false);
1249
1250         /* remove previous state, if present */
1251         if (crtc->state) {
1252                 dpu_crtc_destroy_state(crtc, crtc->state);
1253                 crtc->state = 0;
1254         }
1255
1256         dpu_crtc = to_dpu_crtc(crtc);
1257         cstate = kzalloc(sizeof(*cstate), GFP_KERNEL);
1258         if (!cstate) {
1259                 DPU_ERROR("failed to allocate state\n");
1260                 return;
1261         }
1262
1263         _dpu_crtc_rp_reset(&cstate->rp, &dpu_crtc->rp_lock,
1264                         &dpu_crtc->rp_head);
1265
1266         cstate->base.crtc = crtc;
1267         crtc->state = &cstate->base;
1268 }
1269
1270 static void dpu_crtc_handle_power_event(u32 event_type, void *arg)
1271 {
1272         struct drm_crtc *crtc = arg;
1273         struct dpu_crtc *dpu_crtc;
1274         struct drm_encoder *encoder;
1275         struct dpu_crtc_mixer *m;
1276         u32 i, misr_status;
1277
1278         if (!crtc) {
1279                 DPU_ERROR("invalid crtc\n");
1280                 return;
1281         }
1282         dpu_crtc = to_dpu_crtc(crtc);
1283
1284         mutex_lock(&dpu_crtc->crtc_lock);
1285
1286         trace_dpu_crtc_handle_power_event(DRMID(crtc), event_type);
1287
1288         switch (event_type) {
1289         case DPU_POWER_EVENT_POST_ENABLE:
1290                 /* restore encoder; crtc will be programmed during commit */
1291                 drm_for_each_encoder(encoder, crtc->dev) {
1292                         if (encoder->crtc != crtc)
1293                                 continue;
1294
1295                         dpu_encoder_virt_restore(encoder);
1296                 }
1297
1298                 for (i = 0; i < dpu_crtc->num_mixers; ++i) {
1299                         m = &dpu_crtc->mixers[i];
1300                         if (!m->hw_lm || !m->hw_lm->ops.setup_misr ||
1301                                         !dpu_crtc->misr_enable)
1302                                 continue;
1303
1304                         m->hw_lm->ops.setup_misr(m->hw_lm, true,
1305                                         dpu_crtc->misr_frame_count);
1306                 }
1307                 break;
1308         case DPU_POWER_EVENT_PRE_DISABLE:
1309                 for (i = 0; i < dpu_crtc->num_mixers; ++i) {
1310                         m = &dpu_crtc->mixers[i];
1311                         if (!m->hw_lm || !m->hw_lm->ops.collect_misr ||
1312                                         !dpu_crtc->misr_enable)
1313                                 continue;
1314
1315                         misr_status = m->hw_lm->ops.collect_misr(m->hw_lm);
1316                         dpu_crtc->misr_data[i] = misr_status ? misr_status :
1317                                                         dpu_crtc->misr_data[i];
1318                 }
1319                 break;
1320         case DPU_POWER_EVENT_POST_DISABLE:
1321                 /**
1322                  * Nothing to do. All the planes on the CRTC will be
1323                  * programmed for every frame
1324                  */
1325                 break;
1326         default:
1327                 DPU_DEBUG("event:%d not handled\n", event_type);
1328                 break;
1329         }
1330
1331         mutex_unlock(&dpu_crtc->crtc_lock);
1332 }
1333
1334 static void dpu_crtc_disable(struct drm_crtc *crtc)
1335 {
1336         struct dpu_crtc *dpu_crtc;
1337         struct dpu_crtc_state *cstate;
1338         struct drm_display_mode *mode;
1339         struct drm_encoder *encoder;
1340         struct msm_drm_private *priv;
1341         int ret;
1342         unsigned long flags;
1343
1344         if (!crtc || !crtc->dev || !crtc->dev->dev_private || !crtc->state) {
1345                 DPU_ERROR("invalid crtc\n");
1346                 return;
1347         }
1348         dpu_crtc = to_dpu_crtc(crtc);
1349         cstate = to_dpu_crtc_state(crtc->state);
1350         mode = &cstate->base.adjusted_mode;
1351         priv = crtc->dev->dev_private;
1352
1353         DRM_DEBUG_KMS("crtc%d\n", crtc->base.id);
1354
1355         if (dpu_kms_is_suspend_state(crtc->dev))
1356                 _dpu_crtc_set_suspend(crtc, true);
1357
1358         /* Disable/save vblank irq handling */
1359         drm_crtc_vblank_off(crtc);
1360
1361         mutex_lock(&dpu_crtc->crtc_lock);
1362
1363         /* wait for frame_event_done completion */
1364         if (_dpu_crtc_wait_for_frame_done(crtc))
1365                 DPU_ERROR("crtc%d wait for frame done failed;frame_pending%d\n",
1366                                 crtc->base.id,
1367                                 atomic_read(&dpu_crtc->frame_pending));
1368
1369         trace_dpu_crtc_disable(DRMID(crtc), false, dpu_crtc);
1370         if (dpu_crtc->enabled && !dpu_crtc->suspend &&
1371                         dpu_crtc->vblank_requested) {
1372                 ret = _dpu_crtc_vblank_enable_no_lock(dpu_crtc, false);
1373                 if (ret)
1374                         DPU_ERROR("%s vblank enable failed: %d\n",
1375                                         dpu_crtc->name, ret);
1376         }
1377         dpu_crtc->enabled = false;
1378
1379         if (atomic_read(&dpu_crtc->frame_pending)) {
1380                 trace_dpu_crtc_disable_frame_pending(DRMID(crtc),
1381                                      atomic_read(&dpu_crtc->frame_pending));
1382                 dpu_core_perf_crtc_release_bw(crtc);
1383                 atomic_set(&dpu_crtc->frame_pending, 0);
1384         }
1385
1386         dpu_core_perf_crtc_update(crtc, 0, true);
1387
1388         drm_for_each_encoder(encoder, crtc->dev) {
1389                 if (encoder->crtc != crtc)
1390                         continue;
1391                 dpu_encoder_register_frame_event_callback(encoder, NULL, NULL);
1392         }
1393
1394         if (dpu_crtc->power_event)
1395                 dpu_power_handle_unregister_event(dpu_crtc->phandle,
1396                                 dpu_crtc->power_event);
1397
1398         memset(dpu_crtc->mixers, 0, sizeof(dpu_crtc->mixers));
1399         dpu_crtc->num_mixers = 0;
1400         dpu_crtc->mixers_swapped = false;
1401
1402         /* disable clk & bw control until clk & bw properties are set */
1403         cstate->bw_control = false;
1404         cstate->bw_split_vote = false;
1405
1406         mutex_unlock(&dpu_crtc->crtc_lock);
1407
1408         if (crtc->state->event && !crtc->state->active) {
1409                 spin_lock_irqsave(&crtc->dev->event_lock, flags);
1410                 drm_crtc_send_vblank_event(crtc, crtc->state->event);
1411                 crtc->state->event = NULL;
1412                 spin_unlock_irqrestore(&crtc->dev->event_lock, flags);
1413         }
1414 }
1415
1416 static void dpu_crtc_enable(struct drm_crtc *crtc,
1417                 struct drm_crtc_state *old_crtc_state)
1418 {
1419         struct dpu_crtc *dpu_crtc;
1420         struct drm_encoder *encoder;
1421         struct msm_drm_private *priv;
1422         int ret;
1423
1424         if (!crtc || !crtc->dev || !crtc->dev->dev_private) {
1425                 DPU_ERROR("invalid crtc\n");
1426                 return;
1427         }
1428         priv = crtc->dev->dev_private;
1429
1430         DRM_DEBUG_KMS("crtc%d\n", crtc->base.id);
1431         dpu_crtc = to_dpu_crtc(crtc);
1432
1433         drm_for_each_encoder(encoder, crtc->dev) {
1434                 if (encoder->crtc != crtc)
1435                         continue;
1436                 dpu_encoder_register_frame_event_callback(encoder,
1437                                 dpu_crtc_frame_event_cb, (void *)crtc);
1438         }
1439
1440         mutex_lock(&dpu_crtc->crtc_lock);
1441         trace_dpu_crtc_enable(DRMID(crtc), true, dpu_crtc);
1442         if (!dpu_crtc->enabled && !dpu_crtc->suspend &&
1443                         dpu_crtc->vblank_requested) {
1444                 ret = _dpu_crtc_vblank_enable_no_lock(dpu_crtc, true);
1445                 if (ret)
1446                         DPU_ERROR("%s vblank enable failed: %d\n",
1447                                         dpu_crtc->name, ret);
1448         }
1449         dpu_crtc->enabled = true;
1450
1451         mutex_unlock(&dpu_crtc->crtc_lock);
1452
1453         /* Enable/restore vblank irq handling */
1454         drm_crtc_vblank_on(crtc);
1455
1456         dpu_crtc->power_event = dpu_power_handle_register_event(
1457                 dpu_crtc->phandle,
1458                 DPU_POWER_EVENT_POST_ENABLE | DPU_POWER_EVENT_POST_DISABLE |
1459                 DPU_POWER_EVENT_PRE_DISABLE,
1460                 dpu_crtc_handle_power_event, crtc, dpu_crtc->name);
1461
1462 }
1463
1464 struct plane_state {
1465         struct dpu_plane_state *dpu_pstate;
1466         const struct drm_plane_state *drm_pstate;
1467         int stage;
1468         u32 pipe_id;
1469 };
1470
1471 static int dpu_crtc_atomic_check(struct drm_crtc *crtc,
1472                 struct drm_crtc_state *state)
1473 {
1474         struct dpu_crtc *dpu_crtc;
1475         struct plane_state *pstates;
1476         struct dpu_crtc_state *cstate;
1477
1478         const struct drm_plane_state *pstate;
1479         struct drm_plane *plane;
1480         struct drm_display_mode *mode;
1481
1482         int cnt = 0, rc = 0, mixer_width, i, z_pos;
1483
1484         struct dpu_multirect_plane_states multirect_plane[DPU_STAGE_MAX * 2];
1485         int multirect_count = 0;
1486         const struct drm_plane_state *pipe_staged[SSPP_MAX];
1487         int left_zpos_cnt = 0, right_zpos_cnt = 0;
1488         struct drm_rect crtc_rect = { 0 };
1489
1490         if (!crtc) {
1491                 DPU_ERROR("invalid crtc\n");
1492                 return -EINVAL;
1493         }
1494
1495         pstates = kzalloc(sizeof(*pstates) * DPU_STAGE_MAX * 4, GFP_KERNEL);
1496
1497         dpu_crtc = to_dpu_crtc(crtc);
1498         cstate = to_dpu_crtc_state(state);
1499
1500         if (!state->enable || !state->active) {
1501                 DPU_DEBUG("crtc%d -> enable %d, active %d, skip atomic_check\n",
1502                                 crtc->base.id, state->enable, state->active);
1503                 goto end;
1504         }
1505
1506         mode = &state->adjusted_mode;
1507         DPU_DEBUG("%s: check", dpu_crtc->name);
1508
1509         /* force a full mode set if active state changed */
1510         if (state->active_changed)
1511                 state->mode_changed = true;
1512
1513         memset(pipe_staged, 0, sizeof(pipe_staged));
1514
1515         mixer_width = dpu_crtc_get_mixer_width(dpu_crtc, cstate, mode);
1516
1517         _dpu_crtc_setup_lm_bounds(crtc, state);
1518
1519         crtc_rect.x2 = mode->hdisplay;
1520         crtc_rect.y2 = mode->vdisplay;
1521
1522          /* get plane state for all drm planes associated with crtc state */
1523         drm_atomic_crtc_state_for_each_plane_state(plane, pstate, state) {
1524                 struct drm_rect dst, clip = crtc_rect;
1525
1526                 if (IS_ERR_OR_NULL(pstate)) {
1527                         rc = PTR_ERR(pstate);
1528                         DPU_ERROR("%s: failed to get plane%d state, %d\n",
1529                                         dpu_crtc->name, plane->base.id, rc);
1530                         goto end;
1531                 }
1532                 if (cnt >= DPU_STAGE_MAX * 4)
1533                         continue;
1534
1535                 pstates[cnt].dpu_pstate = to_dpu_plane_state(pstate);
1536                 pstates[cnt].drm_pstate = pstate;
1537                 pstates[cnt].stage = pstate->normalized_zpos;
1538                 pstates[cnt].pipe_id = dpu_plane_pipe(plane);
1539
1540                 if (pipe_staged[pstates[cnt].pipe_id]) {
1541                         multirect_plane[multirect_count].r0 =
1542                                 pipe_staged[pstates[cnt].pipe_id];
1543                         multirect_plane[multirect_count].r1 = pstate;
1544                         multirect_count++;
1545
1546                         pipe_staged[pstates[cnt].pipe_id] = NULL;
1547                 } else {
1548                         pipe_staged[pstates[cnt].pipe_id] = pstate;
1549                 }
1550
1551                 cnt++;
1552
1553                 dst = drm_plane_state_dest(pstate);
1554                 if (!drm_rect_intersect(&clip, &dst)) {
1555                         DPU_ERROR("invalid vertical/horizontal destination\n");
1556                         DPU_ERROR("display: " DRM_RECT_FMT " plane: "
1557                                   DRM_RECT_FMT "\n", DRM_RECT_ARG(&crtc_rect),
1558                                   DRM_RECT_ARG(&dst));
1559                         rc = -E2BIG;
1560                         goto end;
1561                 }
1562         }
1563
1564         for (i = 1; i < SSPP_MAX; i++) {
1565                 if (pipe_staged[i]) {
1566                         dpu_plane_clear_multirect(pipe_staged[i]);
1567
1568                         if (is_dpu_plane_virtual(pipe_staged[i]->plane)) {
1569                                 DPU_ERROR(
1570                                         "r1 only virt plane:%d not supported\n",
1571                                         pipe_staged[i]->plane->base.id);
1572                                 rc  = -EINVAL;
1573                                 goto end;
1574                         }
1575                 }
1576         }
1577
1578         z_pos = -1;
1579         for (i = 0; i < cnt; i++) {
1580                 /* reset counts at every new blend stage */
1581                 if (pstates[i].stage != z_pos) {
1582                         left_zpos_cnt = 0;
1583                         right_zpos_cnt = 0;
1584                         z_pos = pstates[i].stage;
1585                 }
1586
1587                 /* verify z_pos setting before using it */
1588                 if (z_pos >= DPU_STAGE_MAX - DPU_STAGE_0) {
1589                         DPU_ERROR("> %d plane stages assigned\n",
1590                                         DPU_STAGE_MAX - DPU_STAGE_0);
1591                         rc = -EINVAL;
1592                         goto end;
1593                 } else if (pstates[i].drm_pstate->crtc_x < mixer_width) {
1594                         if (left_zpos_cnt == 2) {
1595                                 DPU_ERROR("> 2 planes @ stage %d on left\n",
1596                                         z_pos);
1597                                 rc = -EINVAL;
1598                                 goto end;
1599                         }
1600                         left_zpos_cnt++;
1601
1602                 } else {
1603                         if (right_zpos_cnt == 2) {
1604                                 DPU_ERROR("> 2 planes @ stage %d on right\n",
1605                                         z_pos);
1606                                 rc = -EINVAL;
1607                                 goto end;
1608                         }
1609                         right_zpos_cnt++;
1610                 }
1611
1612                 pstates[i].dpu_pstate->stage = z_pos + DPU_STAGE_0;
1613                 DPU_DEBUG("%s: zpos %d", dpu_crtc->name, z_pos);
1614         }
1615
1616         for (i = 0; i < multirect_count; i++) {
1617                 if (dpu_plane_validate_multirect_v2(&multirect_plane[i])) {
1618                         DPU_ERROR(
1619                         "multirect validation failed for planes (%d - %d)\n",
1620                                         multirect_plane[i].r0->plane->base.id,
1621                                         multirect_plane[i].r1->plane->base.id);
1622                         rc = -EINVAL;
1623                         goto end;
1624                 }
1625         }
1626
1627         rc = dpu_core_perf_crtc_check(crtc, state);
1628         if (rc) {
1629                 DPU_ERROR("crtc%d failed performance check %d\n",
1630                                 crtc->base.id, rc);
1631                 goto end;
1632         }
1633
1634         /* validate source split:
1635          * use pstates sorted by stage to check planes on same stage
1636          * we assume that all pipes are in source split so its valid to compare
1637          * without taking into account left/right mixer placement
1638          */
1639         for (i = 1; i < cnt; i++) {
1640                 struct plane_state *prv_pstate, *cur_pstate;
1641                 struct drm_rect left_rect, right_rect;
1642                 int32_t left_pid, right_pid;
1643                 int32_t stage;
1644
1645                 prv_pstate = &pstates[i - 1];
1646                 cur_pstate = &pstates[i];
1647                 if (prv_pstate->stage != cur_pstate->stage)
1648                         continue;
1649
1650                 stage = cur_pstate->stage;
1651
1652                 left_pid = prv_pstate->dpu_pstate->base.plane->base.id;
1653                 left_rect = drm_plane_state_dest(prv_pstate->drm_pstate);
1654
1655                 right_pid = cur_pstate->dpu_pstate->base.plane->base.id;
1656                 right_rect = drm_plane_state_dest(cur_pstate->drm_pstate);
1657
1658                 if (right_rect.x1 < left_rect.x1) {
1659                         swap(left_pid, right_pid);
1660                         swap(left_rect, right_rect);
1661                 }
1662
1663                 /**
1664                  * - planes are enumerated in pipe-priority order such that
1665                  *   planes with lower drm_id must be left-most in a shared
1666                  *   blend-stage when using source split.
1667                  * - planes in source split must be contiguous in width
1668                  * - planes in source split must have same dest yoff and height
1669                  */
1670                 if (right_pid < left_pid) {
1671                         DPU_ERROR(
1672                                 "invalid src split cfg. priority mismatch. stage: %d left: %d right: %d\n",
1673                                 stage, left_pid, right_pid);
1674                         rc = -EINVAL;
1675                         goto end;
1676                 } else if (right_rect.x1 != drm_rect_width(&left_rect)) {
1677                         DPU_ERROR("non-contiguous coordinates for src split. "
1678                                   "stage: %d left: " DRM_RECT_FMT " right: "
1679                                   DRM_RECT_FMT "\n", stage,
1680                                   DRM_RECT_ARG(&left_rect),
1681                                   DRM_RECT_ARG(&right_rect));
1682                         rc = -EINVAL;
1683                         goto end;
1684                 } else if (left_rect.y1 != right_rect.y1 ||
1685                            drm_rect_height(&left_rect) != drm_rect_height(&right_rect)) {
1686                         DPU_ERROR("source split at stage: %d. invalid "
1687                                   "yoff/height: left: " DRM_RECT_FMT " right: "
1688                                   DRM_RECT_FMT "\n", stage,
1689                                   DRM_RECT_ARG(&left_rect),
1690                                   DRM_RECT_ARG(&right_rect));
1691                         rc = -EINVAL;
1692                         goto end;
1693                 }
1694         }
1695
1696 end:
1697         _dpu_crtc_rp_free_unused(&cstate->rp);
1698         kfree(pstates);
1699         return rc;
1700 }
1701
1702 int dpu_crtc_vblank(struct drm_crtc *crtc, bool en)
1703 {
1704         struct dpu_crtc *dpu_crtc;
1705         int ret;
1706
1707         if (!crtc) {
1708                 DPU_ERROR("invalid crtc\n");
1709                 return -EINVAL;
1710         }
1711         dpu_crtc = to_dpu_crtc(crtc);
1712
1713         mutex_lock(&dpu_crtc->crtc_lock);
1714         trace_dpu_crtc_vblank(DRMID(&dpu_crtc->base), en, dpu_crtc);
1715         if (dpu_crtc->enabled && !dpu_crtc->suspend) {
1716                 ret = _dpu_crtc_vblank_enable_no_lock(dpu_crtc, en);
1717                 if (ret)
1718                         DPU_ERROR("%s vblank enable failed: %d\n",
1719                                         dpu_crtc->name, ret);
1720         }
1721         dpu_crtc->vblank_requested = en;
1722         mutex_unlock(&dpu_crtc->crtc_lock);
1723
1724         return 0;
1725 }
1726
1727 #ifdef CONFIG_DEBUG_FS
1728 static int _dpu_debugfs_status_show(struct seq_file *s, void *data)
1729 {
1730         struct dpu_crtc *dpu_crtc;
1731         struct dpu_plane_state *pstate = NULL;
1732         struct dpu_crtc_mixer *m;
1733
1734         struct drm_crtc *crtc;
1735         struct drm_plane *plane;
1736         struct drm_display_mode *mode;
1737         struct drm_framebuffer *fb;
1738         struct drm_plane_state *state;
1739         struct dpu_crtc_state *cstate;
1740
1741         int i, out_width;
1742
1743         if (!s || !s->private)
1744                 return -EINVAL;
1745
1746         dpu_crtc = s->private;
1747         crtc = &dpu_crtc->base;
1748         cstate = to_dpu_crtc_state(crtc->state);
1749
1750         mutex_lock(&dpu_crtc->crtc_lock);
1751         mode = &crtc->state->adjusted_mode;
1752         out_width = dpu_crtc_get_mixer_width(dpu_crtc, cstate, mode);
1753
1754         seq_printf(s, "crtc:%d width:%d height:%d\n", crtc->base.id,
1755                                 mode->hdisplay, mode->vdisplay);
1756
1757         seq_puts(s, "\n");
1758
1759         for (i = 0; i < dpu_crtc->num_mixers; ++i) {
1760                 m = &dpu_crtc->mixers[i];
1761                 if (!m->hw_lm)
1762                         seq_printf(s, "\tmixer[%d] has no lm\n", i);
1763                 else if (!m->hw_ctl)
1764                         seq_printf(s, "\tmixer[%d] has no ctl\n", i);
1765                 else
1766                         seq_printf(s, "\tmixer:%d ctl:%d width:%d height:%d\n",
1767                                 m->hw_lm->idx - LM_0, m->hw_ctl->idx - CTL_0,
1768                                 out_width, mode->vdisplay);
1769         }
1770
1771         seq_puts(s, "\n");
1772
1773         drm_atomic_crtc_for_each_plane(plane, crtc) {
1774                 pstate = to_dpu_plane_state(plane->state);
1775                 state = plane->state;
1776
1777                 if (!pstate || !state)
1778                         continue;
1779
1780                 seq_printf(s, "\tplane:%u stage:%d\n", plane->base.id,
1781                         pstate->stage);
1782
1783                 if (plane->state->fb) {
1784                         fb = plane->state->fb;
1785
1786                         seq_printf(s, "\tfb:%d image format:%4.4s wxh:%ux%u ",
1787                                 fb->base.id, (char *) &fb->format->format,
1788                                 fb->width, fb->height);
1789                         for (i = 0; i < ARRAY_SIZE(fb->format->cpp); ++i)
1790                                 seq_printf(s, "cpp[%d]:%u ",
1791                                                 i, fb->format->cpp[i]);
1792                         seq_puts(s, "\n\t");
1793
1794                         seq_printf(s, "modifier:%8llu ", fb->modifier);
1795                         seq_puts(s, "\n");
1796
1797                         seq_puts(s, "\t");
1798                         for (i = 0; i < ARRAY_SIZE(fb->pitches); i++)
1799                                 seq_printf(s, "pitches[%d]:%8u ", i,
1800                                                         fb->pitches[i]);
1801                         seq_puts(s, "\n");
1802
1803                         seq_puts(s, "\t");
1804                         for (i = 0; i < ARRAY_SIZE(fb->offsets); i++)
1805                                 seq_printf(s, "offsets[%d]:%8u ", i,
1806                                                         fb->offsets[i]);
1807                         seq_puts(s, "\n");
1808                 }
1809
1810                 seq_printf(s, "\tsrc_x:%4d src_y:%4d src_w:%4d src_h:%4d\n",
1811                         state->src_x, state->src_y, state->src_w, state->src_h);
1812
1813                 seq_printf(s, "\tdst x:%4d dst_y:%4d dst_w:%4d dst_h:%4d\n",
1814                         state->crtc_x, state->crtc_y, state->crtc_w,
1815                         state->crtc_h);
1816                 seq_printf(s, "\tmultirect: mode: %d index: %d\n",
1817                         pstate->multirect_mode, pstate->multirect_index);
1818
1819                 seq_puts(s, "\n");
1820         }
1821         if (dpu_crtc->vblank_cb_count) {
1822                 ktime_t diff = ktime_sub(ktime_get(), dpu_crtc->vblank_cb_time);
1823                 s64 diff_ms = ktime_to_ms(diff);
1824                 s64 fps = diff_ms ? div_s64(
1825                                 dpu_crtc->vblank_cb_count * 1000, diff_ms) : 0;
1826
1827                 seq_printf(s,
1828                         "vblank fps:%lld count:%u total:%llums total_framecount:%llu\n",
1829                                 fps, dpu_crtc->vblank_cb_count,
1830                                 ktime_to_ms(diff), dpu_crtc->play_count);
1831
1832                 /* reset time & count for next measurement */
1833                 dpu_crtc->vblank_cb_count = 0;
1834                 dpu_crtc->vblank_cb_time = ktime_set(0, 0);
1835         }
1836
1837         seq_printf(s, "vblank_enable:%d\n", dpu_crtc->vblank_requested);
1838
1839         mutex_unlock(&dpu_crtc->crtc_lock);
1840
1841         return 0;
1842 }
1843
1844 static int _dpu_debugfs_status_open(struct inode *inode, struct file *file)
1845 {
1846         return single_open(file, _dpu_debugfs_status_show, inode->i_private);
1847 }
1848
1849 static ssize_t _dpu_crtc_misr_setup(struct file *file,
1850                 const char __user *user_buf, size_t count, loff_t *ppos)
1851 {
1852         struct dpu_crtc *dpu_crtc;
1853         struct dpu_crtc_mixer *m;
1854         int i = 0, rc;
1855         char buf[MISR_BUFF_SIZE + 1];
1856         u32 frame_count, enable;
1857         size_t buff_copy;
1858
1859         if (!file || !file->private_data)
1860                 return -EINVAL;
1861
1862         dpu_crtc = file->private_data;
1863         buff_copy = min_t(size_t, count, MISR_BUFF_SIZE);
1864         if (copy_from_user(buf, user_buf, buff_copy)) {
1865                 DPU_ERROR("buffer copy failed\n");
1866                 return -EINVAL;
1867         }
1868
1869         buf[buff_copy] = 0; /* end of string */
1870
1871         if (sscanf(buf, "%u %u", &enable, &frame_count) != 2)
1872                 return -EINVAL;
1873
1874         rc = _dpu_crtc_power_enable(dpu_crtc, true);
1875         if (rc)
1876                 return rc;
1877
1878         mutex_lock(&dpu_crtc->crtc_lock);
1879         dpu_crtc->misr_enable = enable;
1880         dpu_crtc->misr_frame_count = frame_count;
1881         for (i = 0; i < dpu_crtc->num_mixers; ++i) {
1882                 dpu_crtc->misr_data[i] = 0;
1883                 m = &dpu_crtc->mixers[i];
1884                 if (!m->hw_lm || !m->hw_lm->ops.setup_misr)
1885                         continue;
1886
1887                 m->hw_lm->ops.setup_misr(m->hw_lm, enable, frame_count);
1888         }
1889         mutex_unlock(&dpu_crtc->crtc_lock);
1890         _dpu_crtc_power_enable(dpu_crtc, false);
1891
1892         return count;
1893 }
1894
1895 static ssize_t _dpu_crtc_misr_read(struct file *file,
1896                 char __user *user_buff, size_t count, loff_t *ppos)
1897 {
1898         struct dpu_crtc *dpu_crtc;
1899         struct dpu_crtc_mixer *m;
1900         int i = 0, rc;
1901         u32 misr_status;
1902         ssize_t len = 0;
1903         char buf[MISR_BUFF_SIZE + 1] = {'\0'};
1904
1905         if (*ppos)
1906                 return 0;
1907
1908         if (!file || !file->private_data)
1909                 return -EINVAL;
1910
1911         dpu_crtc = file->private_data;
1912         rc = _dpu_crtc_power_enable(dpu_crtc, true);
1913         if (rc)
1914                 return rc;
1915
1916         mutex_lock(&dpu_crtc->crtc_lock);
1917         if (!dpu_crtc->misr_enable) {
1918                 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
1919                         "disabled\n");
1920                 goto buff_check;
1921         }
1922
1923         for (i = 0; i < dpu_crtc->num_mixers; ++i) {
1924                 m = &dpu_crtc->mixers[i];
1925                 if (!m->hw_lm || !m->hw_lm->ops.collect_misr)
1926                         continue;
1927
1928                 misr_status = m->hw_lm->ops.collect_misr(m->hw_lm);
1929                 dpu_crtc->misr_data[i] = misr_status ? misr_status :
1930                                                         dpu_crtc->misr_data[i];
1931                 len += snprintf(buf + len, MISR_BUFF_SIZE - len, "lm idx:%d\n",
1932                                         m->hw_lm->idx - LM_0);
1933                 len += snprintf(buf + len, MISR_BUFF_SIZE - len, "0x%x\n",
1934                                                         dpu_crtc->misr_data[i]);
1935         }
1936
1937 buff_check:
1938         if (count <= len) {
1939                 len = 0;
1940                 goto end;
1941         }
1942
1943         if (copy_to_user(user_buff, buf, len)) {
1944                 len = -EFAULT;
1945                 goto end;
1946         }
1947
1948         *ppos += len;   /* increase offset */
1949
1950 end:
1951         mutex_unlock(&dpu_crtc->crtc_lock);
1952         _dpu_crtc_power_enable(dpu_crtc, false);
1953         return len;
1954 }
1955
1956 #define DEFINE_DPU_DEBUGFS_SEQ_FOPS(__prefix)                          \
1957 static int __prefix ## _open(struct inode *inode, struct file *file)    \
1958 {                                                                       \
1959         return single_open(file, __prefix ## _show, inode->i_private);  \
1960 }                                                                       \
1961 static const struct file_operations __prefix ## _fops = {               \
1962         .owner = THIS_MODULE,                                           \
1963         .open = __prefix ## _open,                                      \
1964         .release = single_release,                                      \
1965         .read = seq_read,                                               \
1966         .llseek = seq_lseek,                                            \
1967 }
1968
1969 static int dpu_crtc_debugfs_state_show(struct seq_file *s, void *v)
1970 {
1971         struct drm_crtc *crtc = (struct drm_crtc *) s->private;
1972         struct dpu_crtc *dpu_crtc = to_dpu_crtc(crtc);
1973         struct dpu_crtc_res *res;
1974         struct dpu_crtc_respool *rp;
1975         int i;
1976
1977         seq_printf(s, "client type: %d\n", dpu_crtc_get_client_type(crtc));
1978         seq_printf(s, "intf_mode: %d\n", dpu_crtc_get_intf_mode(crtc));
1979         seq_printf(s, "core_clk_rate: %llu\n",
1980                         dpu_crtc->cur_perf.core_clk_rate);
1981         for (i = DPU_POWER_HANDLE_DBUS_ID_MNOC;
1982                         i < DPU_POWER_HANDLE_DBUS_ID_MAX; i++) {
1983                 seq_printf(s, "bw_ctl[%s]: %llu\n",
1984                                 dpu_power_handle_get_dbus_name(i),
1985                                 dpu_crtc->cur_perf.bw_ctl[i]);
1986                 seq_printf(s, "max_per_pipe_ib[%s]: %llu\n",
1987                                 dpu_power_handle_get_dbus_name(i),
1988                                 dpu_crtc->cur_perf.max_per_pipe_ib[i]);
1989         }
1990
1991         mutex_lock(&dpu_crtc->rp_lock);
1992         list_for_each_entry(rp, &dpu_crtc->rp_head, rp_list) {
1993                 seq_printf(s, "rp.%d: ", rp->sequence_id);
1994                 list_for_each_entry(res, &rp->res_list, list)
1995                         seq_printf(s, "0x%x/0x%llx/%pK/%d ",
1996                                         res->type, res->tag, res->val,
1997                                         atomic_read(&res->refcount));
1998                 seq_puts(s, "\n");
1999         }
2000         mutex_unlock(&dpu_crtc->rp_lock);
2001
2002         return 0;
2003 }
2004 DEFINE_DPU_DEBUGFS_SEQ_FOPS(dpu_crtc_debugfs_state);
2005
2006 static int _dpu_crtc_init_debugfs(struct drm_crtc *crtc)
2007 {
2008         struct dpu_crtc *dpu_crtc;
2009         struct dpu_kms *dpu_kms;
2010
2011         static const struct file_operations debugfs_status_fops = {
2012                 .open =         _dpu_debugfs_status_open,
2013                 .read =         seq_read,
2014                 .llseek =       seq_lseek,
2015                 .release =      single_release,
2016         };
2017         static const struct file_operations debugfs_misr_fops = {
2018                 .open =         simple_open,
2019                 .read =         _dpu_crtc_misr_read,
2020                 .write =        _dpu_crtc_misr_setup,
2021         };
2022
2023         if (!crtc)
2024                 return -EINVAL;
2025         dpu_crtc = to_dpu_crtc(crtc);
2026
2027         dpu_kms = _dpu_crtc_get_kms(crtc);
2028         if (!dpu_kms)
2029                 return -EINVAL;
2030
2031         dpu_crtc->debugfs_root = debugfs_create_dir(dpu_crtc->name,
2032                         crtc->dev->primary->debugfs_root);
2033         if (!dpu_crtc->debugfs_root)
2034                 return -ENOMEM;
2035
2036         /* don't error check these */
2037         debugfs_create_file("status", 0400,
2038                         dpu_crtc->debugfs_root,
2039                         dpu_crtc, &debugfs_status_fops);
2040         debugfs_create_file("state", 0600,
2041                         dpu_crtc->debugfs_root,
2042                         &dpu_crtc->base,
2043                         &dpu_crtc_debugfs_state_fops);
2044         debugfs_create_file("misr_data", 0600, dpu_crtc->debugfs_root,
2045                                         dpu_crtc, &debugfs_misr_fops);
2046
2047         return 0;
2048 }
2049
2050 static void _dpu_crtc_destroy_debugfs(struct drm_crtc *crtc)
2051 {
2052         struct dpu_crtc *dpu_crtc;
2053
2054         if (!crtc)
2055                 return;
2056         dpu_crtc = to_dpu_crtc(crtc);
2057         debugfs_remove_recursive(dpu_crtc->debugfs_root);
2058 }
2059 #else
2060 static int _dpu_crtc_init_debugfs(struct drm_crtc *crtc)
2061 {
2062         return 0;
2063 }
2064
2065 static void _dpu_crtc_destroy_debugfs(struct drm_crtc *crtc)
2066 {
2067 }
2068 #endif /* CONFIG_DEBUG_FS */
2069
2070 static int dpu_crtc_late_register(struct drm_crtc *crtc)
2071 {
2072         return _dpu_crtc_init_debugfs(crtc);
2073 }
2074
2075 static void dpu_crtc_early_unregister(struct drm_crtc *crtc)
2076 {
2077         _dpu_crtc_destroy_debugfs(crtc);
2078 }
2079
2080 static const struct drm_crtc_funcs dpu_crtc_funcs = {
2081         .set_config = drm_atomic_helper_set_config,
2082         .destroy = dpu_crtc_destroy,
2083         .page_flip = drm_atomic_helper_page_flip,
2084         .reset = dpu_crtc_reset,
2085         .atomic_duplicate_state = dpu_crtc_duplicate_state,
2086         .atomic_destroy_state = dpu_crtc_destroy_state,
2087         .late_register = dpu_crtc_late_register,
2088         .early_unregister = dpu_crtc_early_unregister,
2089 };
2090
2091 static const struct drm_crtc_helper_funcs dpu_crtc_helper_funcs = {
2092         .disable = dpu_crtc_disable,
2093         .atomic_enable = dpu_crtc_enable,
2094         .atomic_check = dpu_crtc_atomic_check,
2095         .atomic_begin = dpu_crtc_atomic_begin,
2096         .atomic_flush = dpu_crtc_atomic_flush,
2097 };
2098
2099 /* initialize crtc */
2100 struct drm_crtc *dpu_crtc_init(struct drm_device *dev, struct drm_plane *plane,
2101                                 struct drm_plane *cursor)
2102 {
2103         struct drm_crtc *crtc = NULL;
2104         struct dpu_crtc *dpu_crtc = NULL;
2105         struct msm_drm_private *priv = NULL;
2106         struct dpu_kms *kms = NULL;
2107         int i;
2108
2109         priv = dev->dev_private;
2110         kms = to_dpu_kms(priv->kms);
2111
2112         dpu_crtc = kzalloc(sizeof(*dpu_crtc), GFP_KERNEL);
2113         if (!dpu_crtc)
2114                 return ERR_PTR(-ENOMEM);
2115
2116         crtc = &dpu_crtc->base;
2117         crtc->dev = dev;
2118
2119         mutex_init(&dpu_crtc->crtc_lock);
2120         spin_lock_init(&dpu_crtc->spin_lock);
2121         atomic_set(&dpu_crtc->frame_pending, 0);
2122
2123         mutex_init(&dpu_crtc->rp_lock);
2124         INIT_LIST_HEAD(&dpu_crtc->rp_head);
2125
2126         init_completion(&dpu_crtc->frame_done_comp);
2127
2128         INIT_LIST_HEAD(&dpu_crtc->frame_event_list);
2129
2130         for (i = 0; i < ARRAY_SIZE(dpu_crtc->frame_events); i++) {
2131                 INIT_LIST_HEAD(&dpu_crtc->frame_events[i].list);
2132                 list_add(&dpu_crtc->frame_events[i].list,
2133                                 &dpu_crtc->frame_event_list);
2134                 kthread_init_work(&dpu_crtc->frame_events[i].work,
2135                                 dpu_crtc_frame_event_work);
2136         }
2137
2138         drm_crtc_init_with_planes(dev, crtc, plane, cursor, &dpu_crtc_funcs,
2139                                 NULL);
2140
2141         drm_crtc_helper_add(crtc, &dpu_crtc_helper_funcs);
2142         plane->crtc = crtc;
2143
2144         /* save user friendly CRTC name for later */
2145         snprintf(dpu_crtc->name, DPU_CRTC_NAME_SIZE, "crtc%u", crtc->base.id);
2146
2147         /* initialize event handling */
2148         spin_lock_init(&dpu_crtc->event_lock);
2149
2150         dpu_crtc->phandle = &kms->phandle;
2151
2152         DPU_DEBUG("%s: successfully initialized crtc\n", dpu_crtc->name);
2153         return crtc;
2154 }