Merge branch 'next-keys2' of git://git.kernel.org/pub/scm/linux/kernel/git/jmorris...
[linux-2.6-block.git] / drivers / gpu / drm / arm / malidp_planes.c
CommitLineData
ad49f860
LD
1/*
2 * (C) COPYRIGHT 2016 ARM Limited. All rights reserved.
3 * Author: Liviu Dudau <Liviu.Dudau@arm.com>
4 *
5 * This program is free software and is provided to you under the terms of the
6 * GNU General Public License version 2 as published by the Free Software
7 * Foundation, and any use by you of this program is subject to the terms
8 * of such GNU licence.
9 *
10 * ARM Mali DP plane manipulation routines.
11 */
12
1f23a56a
JF
13#include <linux/iommu.h>
14
ad49f860 15#include <drm/drmP.h>
b9c3315c 16#include <drm/drm_atomic.h>
ad49f860
LD
17#include <drm/drm_atomic_helper.h>
18#include <drm/drm_fb_cma_helper.h>
19#include <drm/drm_gem_cma_helper.h>
1f23a56a 20#include <drm/drm_gem_framebuffer_helper.h>
ad49f860 21#include <drm/drm_plane_helper.h>
88d4d90f 22#include <drm/drm_print.h>
ad49f860
LD
23
24#include "malidp_hw.h"
25#include "malidp_drv.h"
26
27/* Layer specific register offsets */
28#define MALIDP_LAYER_FORMAT 0x000
ad7fda2e 29#define LAYER_FORMAT_MASK 0x3f
ad49f860
LD
30#define MALIDP_LAYER_CONTROL 0x004
31#define LAYER_ENABLE (1 << 0)
28ce675b
MA
32#define LAYER_FLOWCFG_MASK 7
33#define LAYER_FLOWCFG(x) (((x) & LAYER_FLOWCFG_MASK) << 1)
34#define LAYER_FLOWCFG_SCALE_SE 3
ad49f860
LD
35#define LAYER_ROT_OFFSET 8
36#define LAYER_H_FLIP (1 << 10)
37#define LAYER_V_FLIP (1 << 11)
38#define LAYER_ROT_MASK (0xf << 8)
c57eb710
BS
39#define LAYER_COMP_MASK (0x3 << 12)
40#define LAYER_COMP_PIXEL (0x3 << 12)
41#define LAYER_COMP_PLANE (0x2 << 12)
187f7f21 42#define LAYER_PMUL_ENABLE (0x1 << 14)
f0437819
AH
43#define LAYER_ALPHA_OFFSET (16)
44#define LAYER_ALPHA_MASK (0xff)
45#define LAYER_ALPHA(x) (((x) & LAYER_ALPHA_MASK) << LAYER_ALPHA_OFFSET)
c57eb710 46#define MALIDP_LAYER_COMPOSE 0x008
ad49f860
LD
47#define MALIDP_LAYER_SIZE 0x00c
48#define LAYER_H_VAL(x) (((x) & 0x1fff) << 0)
49#define LAYER_V_VAL(x) (((x) & 0x1fff) << 16)
50#define MALIDP_LAYER_COMP_SIZE 0x010
51#define MALIDP_LAYER_OFFSET 0x014
d1479f61
MA
52#define MALIDP550_LS_ENABLE 0x01c
53#define MALIDP550_LS_R1_IN_SIZE 0x020
ad49f860 54
c57eb710
BS
55/*
56 * This 4-entry look-up-table is used to determine the full 8-bit alpha value
57 * for formats with 1- or 2-bit alpha channels.
58 * We set it to give 100%/0% opacity for 1-bit formats and 100%/66%/33%/0%
59 * opacity for 2-bit formats.
60 */
61#define MALIDP_ALPHA_LUT 0xffaa5500
62
1f23a56a
JF
63/* page sizes the MMU prefetcher can support */
64#define MALIDP_MMU_PREFETCH_PARTIAL_PGSIZES (SZ_4K | SZ_64K)
65#define MALIDP_MMU_PREFETCH_FULL_PGSIZES (SZ_1M | SZ_2M)
66
67/* readahead for partial-frame prefetch */
68#define MALIDP_MMU_PREFETCH_READAHEAD 8
69
ad49f860
LD
70static void malidp_de_plane_destroy(struct drm_plane *plane)
71{
72 struct malidp_plane *mp = to_malidp_plane(plane);
73
ad49f860 74 drm_plane_cleanup(plane);
084ffbd7 75 kfree(mp);
ad49f860
LD
76}
77
fe10cd67
MA
78/*
79 * Replicate what the default ->reset hook does: free the state pointer and
80 * allocate a new empty object. We just need enough space to store
81 * a malidp_plane_state instead of a drm_plane_state.
82 */
83static void malidp_plane_reset(struct drm_plane *plane)
84{
85 struct malidp_plane_state *state = to_malidp_plane_state(plane->state);
86
87 if (state)
88 __drm_atomic_helper_plane_destroy_state(&state->base);
89 kfree(state);
90 plane->state = NULL;
91 state = kzalloc(sizeof(*state), GFP_KERNEL);
ffcf4626
AG
92 if (state)
93 __drm_atomic_helper_plane_reset(plane, &state->base);
fe10cd67
MA
94}
95
ed8b0c0f
BX
96static struct
97drm_plane_state *malidp_duplicate_plane_state(struct drm_plane *plane)
ad49f860
LD
98{
99 struct malidp_plane_state *state, *m_state;
100
101 if (!plane->state)
102 return NULL;
103
104 state = kmalloc(sizeof(*state), GFP_KERNEL);
94d8b9b7
SV
105 if (!state)
106 return NULL;
107
108 m_state = to_malidp_plane_state(plane->state);
109 __drm_atomic_helper_plane_duplicate_state(plane, &state->base);
110 state->rotmem_size = m_state->rotmem_size;
111 state->format = m_state->format;
112 state->n_planes = m_state->n_planes;
ad49f860 113
1f23a56a
JF
114 state->mmu_prefetch_mode = m_state->mmu_prefetch_mode;
115 state->mmu_prefetch_pgsize = m_state->mmu_prefetch_pgsize;
116
ad49f860
LD
117 return &state->base;
118}
119
ed8b0c0f
BX
120static void malidp_destroy_plane_state(struct drm_plane *plane,
121 struct drm_plane_state *state)
ad49f860
LD
122{
123 struct malidp_plane_state *m_state = to_malidp_plane_state(state);
124
125 __drm_atomic_helper_plane_destroy_state(state);
126 kfree(m_state);
127}
128
1f23a56a
JF
129static const char * const prefetch_mode_names[] = {
130 [MALIDP_PREFETCH_MODE_NONE] = "MMU_PREFETCH_NONE",
131 [MALIDP_PREFETCH_MODE_PARTIAL] = "MMU_PREFETCH_PARTIAL",
132 [MALIDP_PREFETCH_MODE_FULL] = "MMU_PREFETCH_FULL",
133};
134
88d4d90f
MA
135static void malidp_plane_atomic_print_state(struct drm_printer *p,
136 const struct drm_plane_state *state)
137{
138 struct malidp_plane_state *ms = to_malidp_plane_state(state);
88d4d90f
MA
139
140 drm_printf(p, "\trotmem_size=%u\n", ms->rotmem_size);
141 drm_printf(p, "\tformat_id=%u\n", ms->format);
142 drm_printf(p, "\tn_planes=%u\n", ms->n_planes);
1f23a56a
JF
143 drm_printf(p, "\tmmu_prefetch_mode=%s\n",
144 prefetch_mode_names[ms->mmu_prefetch_mode]);
145 drm_printf(p, "\tmmu_prefetch_pgsize=%d\n", ms->mmu_prefetch_pgsize);
88d4d90f
MA
146}
147
ad49f860
LD
148static const struct drm_plane_funcs malidp_de_plane_funcs = {
149 .update_plane = drm_atomic_helper_update_plane,
150 .disable_plane = drm_atomic_helper_disable_plane,
151 .destroy = malidp_de_plane_destroy,
fe10cd67 152 .reset = malidp_plane_reset,
ad49f860
LD
153 .atomic_duplicate_state = malidp_duplicate_plane_state,
154 .atomic_destroy_state = malidp_destroy_plane_state,
88d4d90f 155 .atomic_print_state = malidp_plane_atomic_print_state,
ad49f860
LD
156};
157
28ce675b
MA
158static int malidp_se_check_scaling(struct malidp_plane *mp,
159 struct drm_plane_state *state)
160{
161 struct drm_crtc_state *crtc_state =
162 drm_atomic_get_existing_crtc_state(state->state, state->crtc);
163 struct malidp_crtc_state *mc;
28ce675b
MA
164 u32 src_w, src_h;
165 int ret;
166
167 if (!crtc_state)
168 return -EINVAL;
169
f2f2c85c
DC
170 mc = to_malidp_crtc_state(crtc_state);
171
81af63a4 172 ret = drm_atomic_helper_check_plane_state(state, crtc_state,
a01cb8ba 173 0, INT_MAX, true, true);
28ce675b
MA
174 if (ret)
175 return ret;
176
e0521c05
LD
177 if (state->rotation & MALIDP_ROTATED_MASK) {
178 src_w = state->src_h >> 16;
179 src_h = state->src_w >> 16;
180 } else {
181 src_w = state->src_w >> 16;
182 src_h = state->src_h >> 16;
183 }
184
28ce675b
MA
185 if ((state->crtc_w == src_w) && (state->crtc_h == src_h)) {
186 /* Scaling not necessary for this plane. */
187 mc->scaled_planes_mask &= ~(mp->layer->id);
188 return 0;
189 }
190
191 if (mp->layer->id & (DE_SMART | DE_GRAPHICS2))
192 return -EINVAL;
193
28ce675b
MA
194 mc->scaled_planes_mask |= mp->layer->id;
195 /* Defer scaling requirements calculation to the crtc check. */
196 return 0;
197}
198
1f23a56a
JF
199static u32 malidp_get_pgsize_bitmap(struct malidp_plane *mp)
200{
201 u32 pgsize_bitmap = 0;
202
203 if (iommu_present(&platform_bus_type)) {
204 struct iommu_domain *mmu_dom =
205 iommu_get_domain_for_dev(mp->base.dev->dev);
206
207 if (mmu_dom)
208 pgsize_bitmap = mmu_dom->pgsize_bitmap;
209 }
210
211 return pgsize_bitmap;
212}
213
214/*
215 * Check if the framebuffer is entirely made up of pages at least pgsize in
216 * size. Only a heuristic: assumes that each scatterlist entry has been aligned
217 * to the largest page size smaller than its length and that the MMU maps to
218 * the largest page size possible.
219 */
220static bool malidp_check_pages_threshold(struct malidp_plane_state *ms,
221 u32 pgsize)
222{
223 int i;
224
225 for (i = 0; i < ms->n_planes; i++) {
226 struct drm_gem_object *obj;
227 struct drm_gem_cma_object *cma_obj;
228 struct sg_table *sgt;
229 struct scatterlist *sgl;
230
231 obj = drm_gem_fb_get_obj(ms->base.fb, i);
232 cma_obj = to_drm_gem_cma_obj(obj);
233
234 if (cma_obj->sgt)
235 sgt = cma_obj->sgt;
236 else
237 sgt = obj->dev->driver->gem_prime_get_sg_table(obj);
238
239 if (!sgt)
240 return false;
241
242 sgl = sgt->sgl;
243
244 while (sgl) {
245 if (sgl->length < pgsize) {
246 if (!cma_obj->sgt)
247 kfree(sgt);
248 return false;
249 }
250
251 sgl = sg_next(sgl);
252 }
253 if (!cma_obj->sgt)
254 kfree(sgt);
255 }
256
257 return true;
258}
259
260/*
261 * Check if it is possible to enable partial-frame MMU prefetch given the
262 * current format, AFBC state and rotation.
263 */
264static bool malidp_partial_prefetch_supported(u32 format, u64 modifier,
265 unsigned int rotation)
266{
267 bool afbc, sparse;
268
269 /* rotation and horizontal flip not supported for partial prefetch */
270 if (rotation & (DRM_MODE_ROTATE_90 | DRM_MODE_ROTATE_180 |
271 DRM_MODE_ROTATE_270 | DRM_MODE_REFLECT_X))
272 return false;
273
274 afbc = modifier & DRM_FORMAT_MOD_ARM_AFBC(0);
275 sparse = modifier & AFBC_FORMAT_MOD_SPARSE;
276
277 switch (format) {
278 case DRM_FORMAT_ARGB2101010:
279 case DRM_FORMAT_RGBA1010102:
280 case DRM_FORMAT_BGRA1010102:
281 case DRM_FORMAT_ARGB8888:
282 case DRM_FORMAT_RGBA8888:
283 case DRM_FORMAT_BGRA8888:
284 case DRM_FORMAT_XRGB8888:
285 case DRM_FORMAT_XBGR8888:
286 case DRM_FORMAT_RGBX8888:
287 case DRM_FORMAT_BGRX8888:
288 case DRM_FORMAT_RGB888:
289 case DRM_FORMAT_RGBA5551:
290 case DRM_FORMAT_RGB565:
291 /* always supported */
292 return true;
293
294 case DRM_FORMAT_ABGR2101010:
295 case DRM_FORMAT_ABGR8888:
296 case DRM_FORMAT_ABGR1555:
297 case DRM_FORMAT_BGR565:
298 /* supported, but if AFBC then must be sparse mode */
299 return (!afbc) || (afbc && sparse);
300
301 case DRM_FORMAT_BGR888:
302 /* supported, but not for AFBC */
303 return !afbc;
304
305 case DRM_FORMAT_YUYV:
306 case DRM_FORMAT_UYVY:
307 case DRM_FORMAT_NV12:
308 case DRM_FORMAT_YUV420:
309 /* not supported */
310 return false;
311
312 default:
313 return false;
314 }
315}
316
317/*
318 * Select the preferred MMU prefetch mode. Full-frame prefetch is preferred as
319 * long as the framebuffer is all large pages. Otherwise partial-frame prefetch
320 * is selected as long as it is supported for the current format. The selected
321 * page size for prefetch is returned in pgsize_bitmap.
322 */
323static enum mmu_prefetch_mode malidp_mmu_prefetch_select_mode
324 (struct malidp_plane_state *ms, u32 *pgsize_bitmap)
325{
326 u32 pgsizes;
327
328 /* get the full-frame prefetch page size(s) supported by the MMU */
329 pgsizes = *pgsize_bitmap & MALIDP_MMU_PREFETCH_FULL_PGSIZES;
330
331 while (pgsizes) {
332 u32 largest_pgsize = 1 << __fls(pgsizes);
333
334 if (malidp_check_pages_threshold(ms, largest_pgsize)) {
335 *pgsize_bitmap = largest_pgsize;
336 return MALIDP_PREFETCH_MODE_FULL;
337 }
338
339 pgsizes -= largest_pgsize;
340 }
341
342 /* get the partial-frame prefetch page size(s) supported by the MMU */
343 pgsizes = *pgsize_bitmap & MALIDP_MMU_PREFETCH_PARTIAL_PGSIZES;
344
345 if (malidp_partial_prefetch_supported(ms->base.fb->format->format,
346 ms->base.fb->modifier,
347 ms->base.rotation)) {
348 /* partial prefetch using the smallest page size */
349 *pgsize_bitmap = 1 << __ffs(pgsizes);
350 return MALIDP_PREFETCH_MODE_PARTIAL;
351 }
352 *pgsize_bitmap = 0;
353 return MALIDP_PREFETCH_MODE_NONE;
354}
355
356static u32 malidp_calc_mmu_control_value(enum mmu_prefetch_mode mode,
357 u8 readahead, u8 n_planes, u32 pgsize)
358{
359 u32 mmu_ctrl = 0;
360
361 if (mode != MALIDP_PREFETCH_MODE_NONE) {
362 mmu_ctrl |= MALIDP_MMU_CTRL_EN;
363
364 if (mode == MALIDP_PREFETCH_MODE_PARTIAL) {
365 mmu_ctrl |= MALIDP_MMU_CTRL_MODE;
366 mmu_ctrl |= MALIDP_MMU_CTRL_PP_NUM_REQ(readahead);
367 }
368
369 if (pgsize == SZ_64K || pgsize == SZ_2M) {
370 int i;
371
372 for (i = 0; i < n_planes; i++)
373 mmu_ctrl |= MALIDP_MMU_CTRL_PX_PS(i);
374 }
375 }
376
377 return mmu_ctrl;
378}
379
380static void malidp_de_prefetch_settings(struct malidp_plane *mp,
381 struct malidp_plane_state *ms)
382{
383 if (!mp->layer->mmu_ctrl_offset)
384 return;
385
386 /* get the page sizes supported by the MMU */
387 ms->mmu_prefetch_pgsize = malidp_get_pgsize_bitmap(mp);
388 ms->mmu_prefetch_mode =
389 malidp_mmu_prefetch_select_mode(ms, &ms->mmu_prefetch_pgsize);
390}
391
ad49f860
LD
392static int malidp_de_plane_check(struct drm_plane *plane,
393 struct drm_plane_state *state)
394{
395 struct malidp_plane *mp = to_malidp_plane(plane);
396 struct malidp_plane_state *ms = to_malidp_plane_state(state);
fcad73b9 397 bool rotated = state->rotation & MALIDP_ROTATED_MASK;
a46a096a 398 struct drm_framebuffer *fb;
187f7f21 399 u16 pixel_alpha = state->pixel_blend_mode;
b9c3315c 400 int i, ret;
ad49f860
LD
401
402 if (!state->crtc || !state->fb)
403 return 0;
404
a46a096a
BS
405 fb = state->fb;
406
a6993b21
LD
407 ms->format = malidp_hw_get_format_id(&mp->hwdev->hw->map,
408 mp->layer->id,
409 fb->format->format);
70c94a3c 410 if (ms->format == MALIDP_INVALID_FORMAT_ID)
ad49f860
LD
411 return -EINVAL;
412
bcb0b461 413 ms->n_planes = fb->format->num_planes;
70c94a3c 414 for (i = 0; i < ms->n_planes; i++) {
fcad73b9
LD
415 u8 alignment = malidp_hw_get_pitch_align(mp->hwdev, rotated);
416 if (fb->pitches[i] & (alignment - 1)) {
a46a096a
BS
417 DRM_DEBUG_KMS("Invalid pitch %u for plane %d\n",
418 fb->pitches[i], i);
419 return -EINVAL;
420 }
421 }
422
ad49f860
LD
423 if ((state->crtc_w > mp->hwdev->max_line_size) ||
424 (state->crtc_h > mp->hwdev->max_line_size) ||
425 (state->crtc_w < mp->hwdev->min_line_size) ||
b2a2ddb0 426 (state->crtc_h < mp->hwdev->min_line_size))
ad49f860
LD
427 return -EINVAL;
428
83d642ee
MA
429 /*
430 * DP550/650 video layers can accept 3 plane formats only if
431 * fb->pitches[1] == fb->pitches[2] since they don't have a
432 * third plane stride register.
433 */
434 if (ms->n_planes == 3 &&
a6993b21 435 !(mp->hwdev->hw->features & MALIDP_DEVICE_LV_HAS_3_STRIDES) &&
83d642ee
MA
436 (state->fb->pitches[1] != state->fb->pitches[2]))
437 return -EINVAL;
438
28ce675b
MA
439 ret = malidp_se_check_scaling(mp, state);
440 if (ret)
441 return ret;
442
66da13a5
LD
443 /* validate the rotation constraints for each layer */
444 if (state->rotation != DRM_MODE_ROTATE_0) {
445 if (mp->layer->rot == ROTATE_NONE)
446 return -EINVAL;
447 if ((mp->layer->rot == ROTATE_COMPRESSED) && !(fb->modifier))
448 return -EINVAL;
449 /*
450 * packed RGB888 / BGR888 can't be rotated or flipped
451 * unless they are stored in a compressed way
452 */
453 if ((fb->format->format == DRM_FORMAT_RGB888 ||
454 fb->format->format == DRM_FORMAT_BGR888) && !(fb->modifier))
455 return -EINVAL;
456 }
ad49f860
LD
457
458 ms->rotmem_size = 0;
459 if (state->rotation & MALIDP_ROTATED_MASK) {
460 int val;
461
c6cf387e
AKH
462 val = mp->hwdev->hw->rotmem_required(mp->hwdev, state->crtc_w,
463 state->crtc_h,
a6993b21 464 fb->format->format);
ad49f860
LD
465 if (val < 0)
466 return val;
467
468 ms->rotmem_size = val;
469 }
470
187f7f21
LL
471 /* HW can't support plane + pixel blending */
472 if ((state->alpha != DRM_BLEND_ALPHA_OPAQUE) &&
473 (pixel_alpha != DRM_MODE_BLEND_PIXEL_NONE) &&
474 fb->format->has_alpha)
475 return -EINVAL;
476
1f23a56a
JF
477 malidp_de_prefetch_settings(mp, ms);
478
ad49f860
LD
479 return 0;
480}
481
83d642ee
MA
482static void malidp_de_set_plane_pitches(struct malidp_plane *mp,
483 int num_planes, unsigned int pitches[3])
484{
485 int i;
486 int num_strides = num_planes;
487
488 if (!mp->layer->stride_offset)
489 return;
490
491 if (num_planes == 3)
a6993b21 492 num_strides = (mp->hwdev->hw->features &
83d642ee
MA
493 MALIDP_DEVICE_LV_HAS_3_STRIDES) ? 3 : 2;
494
495 for (i = 0; i < num_strides; ++i)
496 malidp_hw_write(mp->hwdev, pitches[i],
497 mp->layer->base +
498 mp->layer->stride_offset + i * 4);
499}
500
6e810eb5
MA
501static const s16
502malidp_yuv2rgb_coeffs[][DRM_COLOR_RANGE_MAX][MALIDP_COLORADJ_NUM_COEFFS] = {
503 [DRM_COLOR_YCBCR_BT601][DRM_COLOR_YCBCR_LIMITED_RANGE] = {
504 1192, 0, 1634,
505 1192, -401, -832,
506 1192, 2066, 0,
507 64, 512, 512
508 },
509 [DRM_COLOR_YCBCR_BT601][DRM_COLOR_YCBCR_FULL_RANGE] = {
510 1024, 0, 1436,
511 1024, -352, -731,
512 1024, 1815, 0,
513 0, 512, 512
514 },
515 [DRM_COLOR_YCBCR_BT709][DRM_COLOR_YCBCR_LIMITED_RANGE] = {
516 1192, 0, 1836,
517 1192, -218, -546,
518 1192, 2163, 0,
519 64, 512, 512
520 },
521 [DRM_COLOR_YCBCR_BT709][DRM_COLOR_YCBCR_FULL_RANGE] = {
522 1024, 0, 1613,
523 1024, -192, -479,
524 1024, 1900, 0,
525 0, 512, 512
526 },
527 [DRM_COLOR_YCBCR_BT2020][DRM_COLOR_YCBCR_LIMITED_RANGE] = {
528 1024, 0, 1476,
529 1024, -165, -572,
530 1024, 1884, 0,
531 0, 512, 512
532 },
533 [DRM_COLOR_YCBCR_BT2020][DRM_COLOR_YCBCR_FULL_RANGE] = {
534 1024, 0, 1510,
535 1024, -168, -585,
536 1024, 1927, 0,
537 0, 512, 512
538 }
539};
540
541static void malidp_de_set_color_encoding(struct malidp_plane *plane,
542 enum drm_color_encoding enc,
543 enum drm_color_range range)
544{
545 unsigned int i;
546
547 for (i = 0; i < MALIDP_COLORADJ_NUM_COEFFS; i++) {
548 /* coefficients are signed, two's complement values */
549 malidp_hw_write(plane->hwdev, malidp_yuv2rgb_coeffs[enc][range][i],
550 plane->layer->base + plane->layer->yuv2rgb_offset +
551 i * 4);
552 }
553}
554
1f23a56a
JF
555static void malidp_de_set_mmu_control(struct malidp_plane *mp,
556 struct malidp_plane_state *ms)
557{
558 u32 mmu_ctrl;
559
560 /* check hardware supports MMU prefetch */
561 if (!mp->layer->mmu_ctrl_offset)
562 return;
563
564 mmu_ctrl = malidp_calc_mmu_control_value(ms->mmu_prefetch_mode,
565 MALIDP_MMU_PREFETCH_READAHEAD,
566 ms->n_planes,
567 ms->mmu_prefetch_pgsize);
568
569 malidp_hw_write(mp->hwdev, mmu_ctrl,
570 mp->layer->base + mp->layer->mmu_ctrl_offset);
571}
572
ad49f860
LD
573static void malidp_de_plane_update(struct drm_plane *plane,
574 struct drm_plane_state *old_state)
575{
ad49f860 576 struct malidp_plane *mp;
70c94a3c 577 struct malidp_plane_state *ms = to_malidp_plane_state(plane->state);
187f7f21
LL
578 struct drm_plane_state *state = plane->state;
579 u16 pixel_alpha = state->pixel_blend_mode;
580 u8 plane_alpha = state->alpha >> 8;
70c94a3c
BS
581 u32 src_w, src_h, dest_w, dest_h, val;
582 int i;
ad49f860
LD
583
584 mp = to_malidp_plane(plane);
ad49f860
LD
585
586 /* convert src values from Q16 fixed point to integer */
187f7f21
LL
587 src_w = state->src_w >> 16;
588 src_h = state->src_h >> 16;
589 dest_w = state->crtc_w;
590 dest_h = state->crtc_h;
ad49f860 591
ad7fda2e
AKH
592 val = malidp_hw_read(mp->hwdev, mp->layer->base);
593 val = (val & ~LAYER_FORMAT_MASK) | ms->format;
594 malidp_hw_write(mp->hwdev, val, mp->layer->base);
ad49f860 595
70c94a3c 596 for (i = 0; i < ms->n_planes; i++) {
ad49f860 597 /* calculate the offset for the layer's plane registers */
e40eda3d 598 u16 ptr = mp->layer->ptr + (i << 4);
187f7f21
LL
599 dma_addr_t fb_addr = drm_fb_cma_get_gem_addr(state->fb,
600 state, i);
ad49f860 601
e40eda3d
LD
602 malidp_hw_write(mp->hwdev, lower_32_bits(fb_addr), ptr);
603 malidp_hw_write(mp->hwdev, upper_32_bits(fb_addr), ptr + 4);
ad49f860 604 }
1f23a56a
JF
605
606 malidp_de_set_mmu_control(mp, ms);
607
83d642ee 608 malidp_de_set_plane_pitches(mp, ms->n_planes,
187f7f21 609 state->fb->pitches);
ad49f860 610
6e810eb5
MA
611 if ((plane->state->color_encoding != old_state->color_encoding) ||
612 (plane->state->color_range != old_state->color_range))
613 malidp_de_set_color_encoding(mp, plane->state->color_encoding,
614 plane->state->color_range);
615
ad49f860
LD
616 malidp_hw_write(mp->hwdev, LAYER_H_VAL(src_w) | LAYER_V_VAL(src_h),
617 mp->layer->base + MALIDP_LAYER_SIZE);
618
619 malidp_hw_write(mp->hwdev, LAYER_H_VAL(dest_w) | LAYER_V_VAL(dest_h),
620 mp->layer->base + MALIDP_LAYER_COMP_SIZE);
621
187f7f21
LL
622 malidp_hw_write(mp->hwdev, LAYER_H_VAL(state->crtc_x) |
623 LAYER_V_VAL(state->crtc_y),
ad49f860
LD
624 mp->layer->base + MALIDP_LAYER_OFFSET);
625
791d54fa
AG
626 if (mp->layer->id == DE_SMART) {
627 /*
628 * Enable the first rectangle in the SMART layer to be
629 * able to use it as a drm plane.
630 */
631 malidp_hw_write(mp->hwdev, 1,
632 mp->layer->base + MALIDP550_LS_ENABLE);
d1479f61
MA
633 malidp_hw_write(mp->hwdev,
634 LAYER_H_VAL(src_w) | LAYER_V_VAL(src_h),
635 mp->layer->base + MALIDP550_LS_R1_IN_SIZE);
791d54fa 636 }
d1479f61 637
c57eb710
BS
638 /* first clear the rotation bits */
639 val = malidp_hw_read(mp->hwdev, mp->layer->base + MALIDP_LAYER_CONTROL);
640 val &= ~LAYER_ROT_MASK;
ad49f860
LD
641
642 /* setup the rotation and axis flip bits */
187f7f21 643 if (state->rotation & DRM_MODE_ROTATE_MASK)
c2c446ad 644 val |= ilog2(plane->state->rotation & DRM_MODE_ROTATE_MASK) <<
c7ffa59c 645 LAYER_ROT_OFFSET;
187f7f21 646 if (state->rotation & DRM_MODE_REFLECT_X)
ad49f860 647 val |= LAYER_H_FLIP;
187f7f21 648 if (state->rotation & DRM_MODE_REFLECT_Y)
7916efe5 649 val |= LAYER_V_FLIP;
ad49f860 650
187f7f21 651 val &= ~(LAYER_COMP_MASK | LAYER_PMUL_ENABLE | LAYER_ALPHA(0xff));
f0437819 652
187f7f21 653 if (state->alpha != DRM_BLEND_ALPHA_OPAQUE) {
f0437819 654 val |= LAYER_COMP_PLANE;
187f7f21
LL
655 } else if (state->fb->format->has_alpha) {
656 /* We only care about blend mode if the format has alpha */
657 switch (pixel_alpha) {
658 case DRM_MODE_BLEND_PREMULTI:
659 val |= LAYER_COMP_PIXEL | LAYER_PMUL_ENABLE;
660 break;
661 case DRM_MODE_BLEND_COVERAGE:
662 val |= LAYER_COMP_PIXEL;
663 break;
664 }
f0437819 665 }
187f7f21 666 val |= LAYER_ALPHA(plane_alpha);
c57eb710 667
28ce675b 668 val &= ~LAYER_FLOWCFG(LAYER_FLOWCFG_MASK);
187f7f21 669 if (state->crtc) {
28ce675b 670 struct malidp_crtc_state *m =
187f7f21 671 to_malidp_crtc_state(state->crtc->state);
28ce675b
MA
672
673 if (m->scaler_config.scale_enable &&
674 m->scaler_config.plane_src_id == mp->layer->id)
675 val |= LAYER_FLOWCFG(LAYER_FLOWCFG_SCALE_SE);
676 }
677
ad49f860
LD
678 /* set the 'enable layer' bit */
679 val |= LAYER_ENABLE;
680
c57eb710
BS
681 malidp_hw_write(mp->hwdev, val,
682 mp->layer->base + MALIDP_LAYER_CONTROL);
ad49f860
LD
683}
684
685static void malidp_de_plane_disable(struct drm_plane *plane,
686 struct drm_plane_state *state)
687{
688 struct malidp_plane *mp = to_malidp_plane(plane);
689
28ce675b
MA
690 malidp_hw_clearbits(mp->hwdev,
691 LAYER_ENABLE | LAYER_FLOWCFG(LAYER_FLOWCFG_MASK),
ad49f860
LD
692 mp->layer->base + MALIDP_LAYER_CONTROL);
693}
694
695static const struct drm_plane_helper_funcs malidp_de_plane_helper_funcs = {
696 .atomic_check = malidp_de_plane_check,
697 .atomic_update = malidp_de_plane_update,
698 .atomic_disable = malidp_de_plane_disable,
699};
700
701int malidp_de_planes_init(struct drm_device *drm)
702{
703 struct malidp_drm *malidp = drm->dev_private;
a6993b21 704 const struct malidp_hw_regmap *map = &malidp->dev->hw->map;
ad49f860
LD
705 struct malidp_plane *plane = NULL;
706 enum drm_plane_type plane_type;
707 unsigned long crtcs = 1 << drm->mode_config.num_crtc;
c2c446ad
RF
708 unsigned long flags = DRM_MODE_ROTATE_0 | DRM_MODE_ROTATE_90 | DRM_MODE_ROTATE_180 |
709 DRM_MODE_ROTATE_270 | DRM_MODE_REFLECT_X | DRM_MODE_REFLECT_Y;
187f7f21
LL
710 unsigned int blend_caps = BIT(DRM_MODE_BLEND_PIXEL_NONE) |
711 BIT(DRM_MODE_BLEND_PREMULTI) |
712 BIT(DRM_MODE_BLEND_COVERAGE);
ad49f860
LD
713 u32 *formats;
714 int ret, i, j, n;
715
6211b486 716 formats = kcalloc(map->n_pixel_formats, sizeof(*formats), GFP_KERNEL);
ad49f860
LD
717 if (!formats) {
718 ret = -ENOMEM;
719 goto cleanup;
720 }
721
722 for (i = 0; i < map->n_layers; i++) {
723 u8 id = map->layers[i].id;
724
725 plane = kzalloc(sizeof(*plane), GFP_KERNEL);
726 if (!plane) {
727 ret = -ENOMEM;
728 goto cleanup;
729 }
730
731 /* build the list of DRM supported formats based on the map */
6211b486
BS
732 for (n = 0, j = 0; j < map->n_pixel_formats; j++) {
733 if ((map->pixel_formats[j].layer & id) == id)
734 formats[n++] = map->pixel_formats[j].format;
ad49f860
LD
735 }
736
737 plane_type = (i == 0) ? DRM_PLANE_TYPE_PRIMARY :
738 DRM_PLANE_TYPE_OVERLAY;
739 ret = drm_universal_plane_init(drm, &plane->base, crtcs,
740 &malidp_de_plane_funcs, formats,
e6fc3b68 741 n, NULL, plane_type, NULL);
ad49f860
LD
742 if (ret < 0)
743 goto cleanup;
744
ad49f860
LD
745 drm_plane_helper_add(&plane->base,
746 &malidp_de_plane_helper_funcs);
747 plane->hwdev = malidp->dev;
748 plane->layer = &map->layers[i];
15807780 749
187f7f21
LL
750 drm_plane_create_alpha_property(&plane->base);
751 drm_plane_create_blend_mode_property(&plane->base, blend_caps);
752
d1479f61 753 if (id == DE_SMART) {
d1479f61 754 /* Skip the features which the SMART layer doesn't have. */
15807780 755 continue;
d1479f61 756 }
15807780 757
c2c446ad 758 drm_plane_create_rotation_property(&plane->base, DRM_MODE_ROTATE_0, flags);
c57eb710
BS
759 malidp_hw_write(malidp->dev, MALIDP_ALPHA_LUT,
760 plane->layer->base + MALIDP_LAYER_COMPOSE);
6e810eb5
MA
761
762 /* Attach the YUV->RGB property only to video layers */
763 if (id & (DE_VIDEO1 | DE_VIDEO2)) {
764 /* default encoding for YUV->RGB is BT601 NARROW */
765 enum drm_color_encoding enc = DRM_COLOR_YCBCR_BT601;
766 enum drm_color_range range = DRM_COLOR_YCBCR_LIMITED_RANGE;
767
768 ret = drm_plane_create_color_properties(&plane->base,
769 BIT(DRM_COLOR_YCBCR_BT601) | \
770 BIT(DRM_COLOR_YCBCR_BT709) | \
771 BIT(DRM_COLOR_YCBCR_BT2020),
772 BIT(DRM_COLOR_YCBCR_LIMITED_RANGE) | \
773 BIT(DRM_COLOR_YCBCR_FULL_RANGE),
774 enc, range);
775 if (!ret)
776 /* program the HW registers */
777 malidp_de_set_color_encoding(plane, enc, range);
778 else
779 DRM_WARN("Failed to create video layer %d color properties\n", id);
780 }
ad49f860
LD
781 }
782
783 kfree(formats);
784
785 return 0;
786
787cleanup:
ad49f860
LD
788 kfree(formats);
789
790 return ret;
791}