drm/amd/dc: fix semicolon.cocci warnings
[linux-2.6-block.git] / drivers / gpu / drm / amd / display / dc / core / dc_resource.c
1 /*
2 * Copyright 2012-15 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: AMD
23  *
24  */
25 #include "dm_services.h"
26
27 #include "resource.h"
28 #include "include/irq_service_interface.h"
29 #include "link_encoder.h"
30 #include "stream_encoder.h"
31 #include "opp.h"
32 #include "timing_generator.h"
33 #include "transform.h"
34 #include "set_mode_types.h"
35 #include "virtual/virtual_stream_encoder.h"
36
37 #include "dce80/dce80_resource.h"
38 #include "dce100/dce100_resource.h"
39 #include "dce110/dce110_resource.h"
40 #include "dce112/dce112_resource.h"
41
42 enum dce_version resource_parse_asic_id(struct hw_asic_id asic_id)
43 {
44         enum dce_version dc_version = DCE_VERSION_UNKNOWN;
45         switch (asic_id.chip_family) {
46
47         case FAMILY_CI:
48         case FAMILY_KV:
49                 dc_version = DCE_VERSION_8_0;
50                 break;
51         case FAMILY_CZ:
52                 dc_version = DCE_VERSION_11_0;
53                 break;
54
55         case FAMILY_VI:
56                 if (ASIC_REV_IS_TONGA_P(asic_id.hw_internal_rev) ||
57                                 ASIC_REV_IS_FIJI_P(asic_id.hw_internal_rev)) {
58                         dc_version = DCE_VERSION_10_0;
59                         break;
60                 }
61                 if (ASIC_REV_IS_POLARIS10_P(asic_id.hw_internal_rev) ||
62                                 ASIC_REV_IS_POLARIS11_M(asic_id.hw_internal_rev) ||
63                                 ASIC_REV_IS_POLARIS12_V(asic_id.hw_internal_rev)) {
64                         dc_version = DCE_VERSION_11_2;
65                 }
66                 break;
67         default:
68                 dc_version = DCE_VERSION_UNKNOWN;
69                 break;
70         }
71         return dc_version;
72 }
73
74 struct resource_pool *dc_create_resource_pool(
75                                 struct core_dc *dc,
76                                 int num_virtual_links,
77                                 enum dce_version dc_version,
78                                 struct hw_asic_id asic_id)
79 {
80
81         switch (dc_version) {
82         case DCE_VERSION_8_0:
83                 return dce80_create_resource_pool(
84                         num_virtual_links, dc);
85         case DCE_VERSION_10_0:
86                 return dce100_create_resource_pool(
87                                 num_virtual_links, dc);
88         case DCE_VERSION_11_0:
89                 return dce110_create_resource_pool(
90                         num_virtual_links, dc, asic_id);
91         case DCE_VERSION_11_2:
92                 return dce112_create_resource_pool(
93                         num_virtual_links, dc);
94         default:
95                 break;
96         }
97
98         return false;
99 }
100
101 void dc_destroy_resource_pool(struct core_dc *dc)
102 {
103         if (dc) {
104                 if (dc->res_pool)
105                         dc->res_pool->funcs->destroy(&dc->res_pool);
106
107                 if (dc->hwseq)
108                         dm_free(dc->hwseq);
109         }
110 }
111
112 static void update_num_audio(
113         const struct resource_straps *straps,
114         unsigned int *num_audio,
115         struct audio_support *aud_support)
116 {
117         if (straps->hdmi_disable == 0) {
118                 aud_support->hdmi_audio_native = true;
119                 aud_support->hdmi_audio_on_dongle = true;
120                 aud_support->dp_audio = true;
121         } else {
122                 if (straps->dc_pinstraps_audio & 0x2) {
123                         aud_support->hdmi_audio_on_dongle = true;
124                         aud_support->dp_audio = true;
125                 } else {
126                         aud_support->dp_audio = true;
127                 }
128         }
129
130         switch (straps->audio_stream_number) {
131         case 0: /* multi streams supported */
132                 break;
133         case 1: /* multi streams not supported */
134                 *num_audio = 1;
135                 break;
136         default:
137                 DC_ERR("DC: unexpected audio fuse!\n");
138         }
139 }
140
141 bool resource_construct(
142         unsigned int num_virtual_links,
143         struct core_dc *dc,
144         struct resource_pool *pool,
145         const struct resource_create_funcs *create_funcs)
146 {
147         struct dc_context *ctx = dc->ctx;
148         const struct resource_caps *caps = pool->res_cap;
149         int i;
150         unsigned int num_audio = caps->num_audio;
151         struct resource_straps straps = {0};
152
153         if (create_funcs->read_dce_straps)
154                 create_funcs->read_dce_straps(dc->ctx, &straps);
155
156         pool->audio_count = 0;
157         if (create_funcs->create_audio) {
158                 /* find the total number of streams available via the
159                  * AZALIA_F0_CODEC_PIN_CONTROL_RESPONSE_CONFIGURATION_DEFAULT
160                  * registers (one for each pin) starting from pin 1
161                  * up to the max number of audio pins.
162                  * We stop on the first pin where
163                  * PORT_CONNECTIVITY == 1 (as instructed by HW team).
164                  */
165                 update_num_audio(&straps, &num_audio, &pool->audio_support);
166                 for (i = 0; i < pool->pipe_count && i < num_audio; i++) {
167                         struct audio *aud = create_funcs->create_audio(ctx, i);
168
169                         if (aud == NULL) {
170                                 DC_ERR("DC: failed to create audio!\n");
171                                 return false;
172                         }
173
174                         if (!aud->funcs->endpoint_valid(aud)) {
175                                 aud->funcs->destroy(&aud);
176                                 break;
177                         }
178
179                         pool->audios[i] = aud;
180                         pool->audio_count++;
181                 }
182         }
183
184         pool->stream_enc_count = 0;
185         if (create_funcs->create_stream_encoder) {
186                 for (i = 0; i < caps->num_stream_encoder; i++) {
187                         pool->stream_enc[i] = create_funcs->create_stream_encoder(i, ctx);
188                         if (pool->stream_enc[i] == NULL)
189                                 DC_ERR("DC: failed to create stream_encoder!\n");
190                         pool->stream_enc_count++;
191                 }
192         }
193
194         for (i = 0; i < num_virtual_links; i++) {
195                 pool->stream_enc[pool->stream_enc_count] =
196                         virtual_stream_encoder_create(
197                                         ctx, ctx->dc_bios);
198                 if (pool->stream_enc[pool->stream_enc_count] == NULL) {
199                         DC_ERR("DC: failed to create stream_encoder!\n");
200                         return false;
201                 }
202                 pool->stream_enc_count++;
203         }
204
205         dc->hwseq = create_funcs->create_hwseq(ctx);
206
207         return true;
208 }
209
210
211 void resource_unreference_clock_source(
212                 struct resource_context *res_ctx,
213                 struct clock_source **clock_source)
214 {
215         int i;
216         for (i = 0; i < res_ctx->pool->clk_src_count; i++) {
217                 if (res_ctx->pool->clock_sources[i] != *clock_source)
218                         continue;
219
220                 res_ctx->clock_source_ref_count[i]--;
221
222                 if (res_ctx->clock_source_ref_count[i] == 0)
223                         (*clock_source)->funcs->cs_power_down(*clock_source);
224
225                 break;
226         }
227
228         if (res_ctx->pool->dp_clock_source == *clock_source) {
229                 res_ctx->dp_clock_source_ref_count--;
230
231                 if (res_ctx->dp_clock_source_ref_count == 0)
232                         (*clock_source)->funcs->cs_power_down(*clock_source);
233         }
234         *clock_source = NULL;
235 }
236
237 void resource_reference_clock_source(
238                 struct resource_context *res_ctx,
239                 struct clock_source *clock_source)
240 {
241         int i;
242         for (i = 0; i < res_ctx->pool->clk_src_count; i++) {
243                 if (res_ctx->pool->clock_sources[i] != clock_source)
244                         continue;
245
246                 res_ctx->clock_source_ref_count[i]++;
247                 break;
248         }
249
250         if (res_ctx->pool->dp_clock_source == clock_source)
251                 res_ctx->dp_clock_source_ref_count++;
252 }
253
254 bool resource_are_streams_timing_synchronizable(
255         const struct core_stream *stream1,
256         const struct core_stream *stream2)
257 {
258         if (stream1->public.timing.h_total != stream2->public.timing.h_total)
259                 return false;
260
261         if (stream1->public.timing.v_total != stream2->public.timing.v_total)
262                 return false;
263
264         if (stream1->public.timing.h_addressable
265                                 != stream2->public.timing.h_addressable)
266                 return false;
267
268         if (stream1->public.timing.v_addressable
269                                 != stream2->public.timing.v_addressable)
270                 return false;
271
272         if (stream1->public.timing.pix_clk_khz
273                                 != stream2->public.timing.pix_clk_khz)
274                 return false;
275
276         if (stream1->phy_pix_clk != stream2->phy_pix_clk
277                         && !dc_is_dp_signal(stream1->signal)
278                         && !dc_is_dp_signal(stream2->signal))
279                 return false;
280
281         return true;
282 }
283
284 static bool is_sharable_clk_src(
285         const struct pipe_ctx *pipe_with_clk_src,
286         const struct pipe_ctx *pipe)
287 {
288         if (pipe_with_clk_src->clock_source == NULL)
289                 return false;
290
291         if (pipe_with_clk_src->stream->signal == SIGNAL_TYPE_VIRTUAL)
292                 return false;
293
294         if (dc_is_dp_signal(pipe_with_clk_src->stream->signal))
295                 return false;
296
297         if (dc_is_hdmi_signal(pipe_with_clk_src->stream->signal)
298                         && dc_is_dvi_signal(pipe->stream->signal))
299                 return false;
300
301         if (dc_is_hdmi_signal(pipe->stream->signal)
302                         && dc_is_dvi_signal(pipe_with_clk_src->stream->signal))
303                 return false;
304
305         if (!resource_are_streams_timing_synchronizable(
306                         pipe_with_clk_src->stream, pipe->stream))
307                 return false;
308
309         return true;
310 }
311
312 struct clock_source *resource_find_used_clk_src_for_sharing(
313                                         struct resource_context *res_ctx,
314                                         struct pipe_ctx *pipe_ctx)
315 {
316         int i;
317
318         for (i = 0; i < MAX_PIPES; i++) {
319                 if (is_sharable_clk_src(&res_ctx->pipe_ctx[i], pipe_ctx))
320                         return res_ctx->pipe_ctx[i].clock_source;
321         }
322
323         return NULL;
324 }
325
326 static enum pixel_format convert_pixel_format_to_dalsurface(
327                 enum surface_pixel_format surface_pixel_format)
328 {
329         enum pixel_format dal_pixel_format = PIXEL_FORMAT_UNKNOWN;
330
331         switch (surface_pixel_format) {
332         case SURFACE_PIXEL_FORMAT_GRPH_PALETA_256_COLORS:
333                 dal_pixel_format = PIXEL_FORMAT_INDEX8;
334                 break;
335         case SURFACE_PIXEL_FORMAT_GRPH_ARGB1555:
336                 dal_pixel_format = PIXEL_FORMAT_RGB565;
337                 break;
338         case SURFACE_PIXEL_FORMAT_GRPH_RGB565:
339                 dal_pixel_format = PIXEL_FORMAT_RGB565;
340                 break;
341         case SURFACE_PIXEL_FORMAT_GRPH_ARGB8888:
342                 dal_pixel_format = PIXEL_FORMAT_ARGB8888;
343                 break;
344         case SURFACE_PIXEL_FORMAT_GRPH_ABGR8888:
345                 dal_pixel_format = PIXEL_FORMAT_ARGB8888;
346                 break;
347         case SURFACE_PIXEL_FORMAT_GRPH_ARGB2101010:
348                 dal_pixel_format = PIXEL_FORMAT_ARGB2101010;
349                 break;
350         case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010:
351                 dal_pixel_format = PIXEL_FORMAT_ARGB2101010;
352                 break;
353         case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010_XR_BIAS:
354                 dal_pixel_format = PIXEL_FORMAT_ARGB2101010_XRBIAS;
355                 break;
356         case SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616F:
357         case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616F:
358                 dal_pixel_format = PIXEL_FORMAT_FP16;
359                 break;
360         case SURFACE_PIXEL_FORMAT_VIDEO_420_YCbCr:
361         case SURFACE_PIXEL_FORMAT_VIDEO_420_YCrCb:
362                 dal_pixel_format = PIXEL_FORMAT_420BPP12;
363                 break;
364         case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCbCr:
365         case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCrCb:
366                 dal_pixel_format = PIXEL_FORMAT_420BPP15;
367                 break;
368         case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616:
369         default:
370                 dal_pixel_format = PIXEL_FORMAT_UNKNOWN;
371                 break;
372         }
373         return dal_pixel_format;
374 }
375
376 static void rect_swap_helper(struct rect *rect)
377 {
378         uint32_t temp = 0;
379
380         temp = rect->height;
381         rect->height = rect->width;
382         rect->width = temp;
383
384         temp = rect->x;
385         rect->x = rect->y;
386         rect->y = temp;
387 }
388
389 static void calculate_viewport(
390                 const struct dc_surface *surface,
391                 struct pipe_ctx *pipe_ctx)
392 {
393         struct rect stream_src = pipe_ctx->stream->public.src;
394         struct rect src = surface->src_rect;
395         struct rect dst = surface->dst_rect;
396         struct rect surface_clip = surface->clip_rect;
397         struct rect clip = {0};
398
399
400         if (surface->rotation == ROTATION_ANGLE_90 ||
401             surface->rotation == ROTATION_ANGLE_270) {
402                 rect_swap_helper(&src);
403                 rect_swap_helper(&dst);
404                 rect_swap_helper(&surface_clip);
405                 rect_swap_helper(&stream_src);
406         }
407
408         /* The actual clip is an intersection between stream
409          * source and surface clip
410          */
411         clip.x = stream_src.x > surface_clip.x ?
412                         stream_src.x : surface_clip.x;
413
414         clip.width = stream_src.x + stream_src.width <
415                         surface_clip.x + surface_clip.width ?
416                         stream_src.x + stream_src.width - clip.x :
417                         surface_clip.x + surface_clip.width - clip.x ;
418
419         clip.y = stream_src.y > surface_clip.y ?
420                         stream_src.y : surface_clip.y;
421
422         clip.height = stream_src.y + stream_src.height <
423                         surface_clip.y + surface_clip.height ?
424                         stream_src.y + stream_src.height - clip.y :
425                         surface_clip.y + surface_clip.height - clip.y ;
426
427         /* offset = src.ofs + (clip.ofs - dst.ofs) * scl_ratio
428          * num_pixels = clip.num_pix * scl_ratio
429          */
430         pipe_ctx->scl_data.viewport.x = src.x + (clip.x - dst.x) *
431                         src.width / dst.width;
432         pipe_ctx->scl_data.viewport.width = clip.width *
433                         src.width / dst.width;
434
435         pipe_ctx->scl_data.viewport.y = src.y + (clip.y - dst.y) *
436                         src.height / dst.height;
437         pipe_ctx->scl_data.viewport.height = clip.height *
438                         src.height / dst.height;
439
440         /* Minimum viewport such that 420/422 chroma vp is non 0 */
441         if (pipe_ctx->scl_data.viewport.width < 2)
442                 pipe_ctx->scl_data.viewport.width = 2;
443         if (pipe_ctx->scl_data.viewport.height < 2)
444                 pipe_ctx->scl_data.viewport.height = 2;
445 }
446
447 static void calculate_recout(
448                 const struct dc_surface *surface,
449                 struct pipe_ctx *pipe_ctx)
450 {
451         struct core_stream *stream = pipe_ctx->stream;
452         struct rect clip = surface->clip_rect;
453
454         pipe_ctx->scl_data.recout.x = stream->public.dst.x;
455         if (stream->public.src.x < clip.x)
456                 pipe_ctx->scl_data.recout.x += (clip.x
457                         - stream->public.src.x) * stream->public.dst.width
458                                                 / stream->public.src.width;
459
460         pipe_ctx->scl_data.recout.width = clip.width *
461                         stream->public.dst.width / stream->public.src.width;
462         if (pipe_ctx->scl_data.recout.width + pipe_ctx->scl_data.recout.x >
463                         stream->public.dst.x + stream->public.dst.width)
464                 pipe_ctx->scl_data.recout.width =
465                         stream->public.dst.x + stream->public.dst.width
466                                                 - pipe_ctx->scl_data.recout.x;
467
468         pipe_ctx->scl_data.recout.y = stream->public.dst.y;
469         if (stream->public.src.y < clip.y)
470                 pipe_ctx->scl_data.recout.y += (clip.y
471                         - stream->public.src.y) * stream->public.dst.height
472                                                 / stream->public.src.height;
473
474         pipe_ctx->scl_data.recout.height = clip.height *
475                         stream->public.dst.height / stream->public.src.height;
476         if (pipe_ctx->scl_data.recout.height + pipe_ctx->scl_data.recout.y >
477                         stream->public.dst.y + stream->public.dst.height)
478                 pipe_ctx->scl_data.recout.height =
479                         stream->public.dst.y + stream->public.dst.height
480                                                 - pipe_ctx->scl_data.recout.y;
481 }
482
483 static void calculate_scaling_ratios(
484                 const struct dc_surface *surface,
485                 struct pipe_ctx *pipe_ctx)
486 {
487         struct core_stream *stream = pipe_ctx->stream;
488         const uint32_t in_w = stream->public.src.width;
489         const uint32_t in_h = stream->public.src.height;
490         const uint32_t out_w = stream->public.dst.width;
491         const uint32_t out_h = stream->public.dst.height;
492
493         pipe_ctx->scl_data.ratios.horz = dal_fixed31_32_from_fraction(
494                                         surface->src_rect.width,
495                                         surface->dst_rect.width);
496         pipe_ctx->scl_data.ratios.vert = dal_fixed31_32_from_fraction(
497                                         surface->src_rect.height,
498                                         surface->dst_rect.height);
499
500         if (surface->stereo_format == PLANE_STEREO_FORMAT_SIDE_BY_SIDE)
501                 pipe_ctx->scl_data.ratios.horz.value *= 2;
502         else if (surface->stereo_format == PLANE_STEREO_FORMAT_TOP_AND_BOTTOM)
503                 pipe_ctx->scl_data.ratios.vert.value *= 2;
504
505         pipe_ctx->scl_data.ratios.vert.value = div64_s64(
506                 pipe_ctx->scl_data.ratios.vert.value * in_h, out_h);
507         pipe_ctx->scl_data.ratios.horz.value = div64_s64(
508                 pipe_ctx->scl_data.ratios.horz.value * in_w, out_w);
509
510         pipe_ctx->scl_data.ratios.horz_c = pipe_ctx->scl_data.ratios.horz;
511         pipe_ctx->scl_data.ratios.vert_c = pipe_ctx->scl_data.ratios.vert;
512
513         if (pipe_ctx->scl_data.format == PIXEL_FORMAT_420BPP12) {
514                 pipe_ctx->scl_data.ratios.horz_c.value /= 2;
515                 pipe_ctx->scl_data.ratios.vert_c.value /= 2;
516         }
517 }
518
519 bool resource_build_scaling_params(
520         const struct dc_surface *surface,
521         struct pipe_ctx *pipe_ctx)
522 {
523         bool res;
524         struct dc_crtc_timing *timing = &pipe_ctx->stream->public.timing;
525         /* Important: scaling ratio calculation requires pixel format,
526          * lb depth calculation requires recout and taps require scaling ratios.
527          */
528         pipe_ctx->scl_data.format = convert_pixel_format_to_dalsurface(surface->format);
529
530         calculate_viewport(surface, pipe_ctx);
531
532         if (pipe_ctx->scl_data.viewport.height < 16 || pipe_ctx->scl_data.viewport.width < 16)
533                 return false;
534
535         calculate_scaling_ratios(surface, pipe_ctx);
536
537         calculate_recout(surface, pipe_ctx);
538
539         /**
540          * Setting line buffer pixel depth to 24bpp yields banding
541          * on certain displays, such as the Sharp 4k
542          */
543         pipe_ctx->scl_data.lb_params.depth = LB_PIXEL_DEPTH_30BPP;
544
545         pipe_ctx->scl_data.h_active = timing->h_addressable;
546         pipe_ctx->scl_data.v_active = timing->v_addressable;
547
548         /* Taps calculations */
549         res = pipe_ctx->xfm->funcs->transform_get_optimal_number_of_taps(
550                 pipe_ctx->xfm, &pipe_ctx->scl_data, &surface->scaling_quality);
551
552         if (!res) {
553                 /* Try 24 bpp linebuffer */
554                 pipe_ctx->scl_data.lb_params.depth = LB_PIXEL_DEPTH_24BPP;
555
556                 res = pipe_ctx->xfm->funcs->transform_get_optimal_number_of_taps(
557                         pipe_ctx->xfm, &pipe_ctx->scl_data, &surface->scaling_quality);
558         }
559
560         dm_logger_write(pipe_ctx->stream->ctx->logger, LOG_SCALER,
561                                 "%s: Viewport:\nheight:%d width:%d x:%d "
562                                 "y:%d\n dst_rect:\nheight:%d width:%d x:%d "
563                                 "y:%d\n",
564                                 __func__,
565                                 pipe_ctx->scl_data.viewport.height,
566                                 pipe_ctx->scl_data.viewport.width,
567                                 pipe_ctx->scl_data.viewport.x,
568                                 pipe_ctx->scl_data.viewport.y,
569                                 surface->dst_rect.height,
570                                 surface->dst_rect.width,
571                                 surface->dst_rect.x,
572                                 surface->dst_rect.y);
573
574         return res;
575 }
576
577
578 enum dc_status resource_build_scaling_params_for_context(
579         const struct core_dc *dc,
580         struct validate_context *context)
581 {
582         int i;
583
584         for (i = 0; i < MAX_PIPES; i++) {
585                 if (context->res_ctx.pipe_ctx[i].surface != NULL &&
586                                 context->res_ctx.pipe_ctx[i].stream != NULL)
587                         if (!resource_build_scaling_params(
588                                 &context->res_ctx.pipe_ctx[i].surface->public,
589                                 &context->res_ctx.pipe_ctx[i]))
590                                 return DC_FAIL_SCALING;
591         }
592
593         return DC_OK;
594 }
595
596 static void detach_surfaces_for_stream(
597                 struct validate_context *context,
598                 const struct dc_stream *dc_stream)
599 {
600         int i;
601         struct core_stream *stream = DC_STREAM_TO_CORE(dc_stream);
602
603         for (i = 0; i < context->res_ctx.pool->pipe_count; i++) {
604                 struct pipe_ctx *cur_pipe = &context->res_ctx.pipe_ctx[i];
605                 if (cur_pipe->stream == stream) {
606                         cur_pipe->surface = NULL;
607                         cur_pipe->top_pipe = NULL;
608                         cur_pipe->bottom_pipe = NULL;
609                 }
610         }
611 }
612
613 struct pipe_ctx *find_idle_secondary_pipe(struct resource_context *res_ctx)
614 {
615         int i;
616         struct pipe_ctx *secondary_pipe = NULL;
617
618         /*
619          * search backwards for the second pipe to keep pipe
620          * assignment more consistent
621          */
622
623         for (i = res_ctx->pool->pipe_count - 1; i >= 0; i--) {
624                 if (res_ctx->pipe_ctx[i].stream == NULL) {
625                         secondary_pipe = &res_ctx->pipe_ctx[i];
626                         secondary_pipe->pipe_idx = i;
627                         break;
628                 }
629         }
630
631
632         return secondary_pipe;
633 }
634
635 struct pipe_ctx *resource_get_head_pipe_for_stream(
636                 struct resource_context *res_ctx,
637                 const struct core_stream *stream)
638 {
639         int i;
640         for (i = 0; i < res_ctx->pool->pipe_count; i++) {
641                 if (res_ctx->pipe_ctx[i].stream == stream &&
642                                 !res_ctx->pipe_ctx[i].top_pipe) {
643                         return &res_ctx->pipe_ctx[i];
644                         break;
645                 }
646         }
647         return NULL;
648 }
649
650 /*
651  * A free_pipe for a stream is defined here as a pipe
652  * that has no surface attached yet
653  */
654 static struct pipe_ctx *acquire_free_pipe_for_stream(
655                 struct resource_context *res_ctx,
656                 const struct dc_stream *dc_stream)
657 {
658         int i;
659         struct core_stream *stream = DC_STREAM_TO_CORE(dc_stream);
660
661         struct pipe_ctx *head_pipe = NULL;
662
663         /* Find head pipe, which has the back end set up*/
664
665         head_pipe = resource_get_head_pipe_for_stream(res_ctx, stream);
666
667         if (!head_pipe)
668                 ASSERT(0);
669
670         if (!head_pipe->surface)
671                 return head_pipe;
672
673         /* Re-use pipe already acquired for this stream if available*/
674         for (i = res_ctx->pool->pipe_count - 1; i >= 0; i--) {
675                 if (res_ctx->pipe_ctx[i].stream == stream &&
676                                 !res_ctx->pipe_ctx[i].surface) {
677                         return &res_ctx->pipe_ctx[i];
678                 }
679         }
680
681         /*
682          * At this point we have no re-useable pipe for this stream and we need
683          * to acquire an idle one to satisfy the request
684          */
685
686         if(!res_ctx->pool->funcs->acquire_idle_pipe_for_layer)
687                 return NULL;
688
689         return res_ctx->pool->funcs->acquire_idle_pipe_for_layer(res_ctx, stream);
690
691 }
692
693 static void release_free_pipes_for_stream(
694                 struct resource_context *res_ctx,
695                 const struct dc_stream *dc_stream)
696 {
697         int i;
698         struct core_stream *stream = DC_STREAM_TO_CORE(dc_stream);
699
700         for (i = res_ctx->pool->pipe_count - 1; i >= 0; i--) {
701                 if (res_ctx->pipe_ctx[i].stream == stream &&
702                                 !res_ctx->pipe_ctx[i].surface) {
703                         res_ctx->pipe_ctx[i].stream = NULL;
704                 }
705         }
706 }
707
708 bool resource_attach_surfaces_to_context(
709                 const struct dc_surface * const *surfaces,
710                 int surface_count,
711                 const struct dc_stream *dc_stream,
712                 struct validate_context *context)
713 {
714         int i;
715         struct pipe_ctx *tail_pipe;
716         struct dc_stream_status *stream_status = NULL;
717
718
719         if (surface_count > MAX_SURFACE_NUM) {
720                 dm_error("Surface: can not attach %d surfaces! Maximum is: %d\n",
721                         surface_count, MAX_SURFACE_NUM);
722                 return false;
723         }
724
725         for (i = 0; i < context->stream_count; i++)
726                 if (&context->streams[i]->public == dc_stream) {
727                         stream_status = &context->stream_status[i];
728                         break;
729                 }
730         if (stream_status == NULL) {
731                 dm_error("Existing stream not found; failed to attach surfaces\n");
732                 return false;
733         }
734
735         /* retain new surfaces */
736         for (i = 0; i < surface_count; i++)
737                 dc_surface_retain(surfaces[i]);
738
739         detach_surfaces_for_stream(context, dc_stream);
740
741         /* release existing surfaces*/
742         for (i = 0; i < stream_status->surface_count; i++)
743                 dc_surface_release(stream_status->surfaces[i]);
744
745         for (i = surface_count; i < stream_status->surface_count; i++)
746                 stream_status->surfaces[i] = NULL;
747
748         stream_status->surface_count = 0;
749
750         if (surface_count == 0)
751                 return true;
752
753         tail_pipe = NULL;
754         for (i = 0; i < surface_count; i++) {
755                 struct core_surface *surface = DC_SURFACE_TO_CORE(surfaces[i]);
756                 struct pipe_ctx *free_pipe = acquire_free_pipe_for_stream(
757                                 &context->res_ctx, dc_stream);
758
759                 if (!free_pipe) {
760                         stream_status->surfaces[i] = NULL;
761                         return false;
762                 }
763
764                 free_pipe->surface = surface;
765
766                 if (tail_pipe) {
767                         free_pipe->top_pipe = tail_pipe;
768                         tail_pipe->bottom_pipe = free_pipe;
769                 }
770
771                 tail_pipe = free_pipe;
772         }
773
774         release_free_pipes_for_stream(&context->res_ctx, dc_stream);
775
776         /* assign new surfaces*/
777         for (i = 0; i < surface_count; i++)
778                 stream_status->surfaces[i] = surfaces[i];
779
780         stream_status->surface_count = surface_count;
781
782         return true;
783 }
784
785
786 static bool is_timing_changed(const struct core_stream *cur_stream,
787                 const struct core_stream *new_stream)
788 {
789         if (cur_stream == NULL)
790                 return true;
791
792         /* If sink pointer changed, it means this is a hotplug, we should do
793          * full hw setting.
794          */
795         if (cur_stream->sink != new_stream->sink)
796                 return true;
797
798         /* If output color space is changed, need to reprogram info frames */
799         if (cur_stream->public.output_color_space !=
800                         new_stream->public.output_color_space)
801                 return true;
802
803         return memcmp(
804                 &cur_stream->public.timing,
805                 &new_stream->public.timing,
806                 sizeof(struct dc_crtc_timing)) != 0;
807 }
808
809 static bool are_stream_backends_same(
810         const struct core_stream *stream_a, const struct core_stream *stream_b)
811 {
812         if (stream_a == stream_b)
813                 return true;
814
815         if (stream_a == NULL || stream_b == NULL)
816                 return false;
817
818         if (is_timing_changed(stream_a, stream_b))
819                 return false;
820
821         return true;
822 }
823
824 bool is_stream_unchanged(
825         const struct core_stream *old_stream, const struct core_stream *stream)
826 {
827         if (old_stream == stream)
828                 return true;
829
830         if (!are_stream_backends_same(old_stream, stream))
831                 return false;
832
833         return true;
834 }
835
836 bool resource_validate_attach_surfaces(
837                 const struct dc_validation_set set[],
838                 int set_count,
839                 const struct validate_context *old_context,
840                 struct validate_context *context)
841 {
842         int i, j;
843
844         for (i = 0; i < set_count; i++) {
845                 for (j = 0; j < old_context->stream_count; j++)
846                         if (is_stream_unchanged(
847                                         old_context->streams[j],
848                                         context->streams[i])) {
849                                 if (!resource_attach_surfaces_to_context(
850                                                 old_context->stream_status[j].surfaces,
851                                                 old_context->stream_status[j].surface_count,
852                                                 &context->streams[i]->public,
853                                                 context))
854                                         return false;
855                                 context->stream_status[i] = old_context->stream_status[j];
856                         }
857                 if (set[i].surface_count != 0)
858                         if (!resource_attach_surfaces_to_context(
859                                         set[i].surfaces,
860                                         set[i].surface_count,
861                                         &context->streams[i]->public,
862                                         context))
863                                 return false;
864
865         }
866
867         return true;
868 }
869
870 /* Maximum TMDS single link pixel clock 165MHz */
871 #define TMDS_MAX_PIXEL_CLOCK_IN_KHZ 165000
872
873 static void set_stream_engine_in_use(
874                 struct resource_context *res_ctx,
875                 struct stream_encoder *stream_enc)
876 {
877         int i;
878
879         for (i = 0; i < res_ctx->pool->stream_enc_count; i++) {
880                 if (res_ctx->pool->stream_enc[i] == stream_enc)
881                         res_ctx->is_stream_enc_acquired[i] = true;
882         }
883 }
884
885 /* TODO: release audio object */
886 static void set_audio_in_use(
887                 struct resource_context *res_ctx,
888                 struct audio *audio)
889 {
890         int i;
891         for (i = 0; i < res_ctx->pool->audio_count; i++) {
892                 if (res_ctx->pool->audios[i] == audio) {
893                         res_ctx->is_audio_acquired[i] = true;
894                 }
895         }
896 }
897
898 static int acquire_first_free_pipe(
899                 struct resource_context *res_ctx,
900                 struct core_stream *stream)
901 {
902         int i;
903
904         for (i = 0; i < res_ctx->pool->pipe_count; i++) {
905                 if (!res_ctx->pipe_ctx[i].stream) {
906                         struct pipe_ctx *pipe_ctx = &res_ctx->pipe_ctx[i];
907
908                         pipe_ctx->tg = res_ctx->pool->timing_generators[i];
909                         pipe_ctx->mi = res_ctx->pool->mis[i];
910                         pipe_ctx->ipp = res_ctx->pool->ipps[i];
911                         pipe_ctx->xfm = res_ctx->pool->transforms[i];
912                         pipe_ctx->opp = res_ctx->pool->opps[i];
913                         pipe_ctx->dis_clk = res_ctx->pool->display_clock;
914                         pipe_ctx->pipe_idx = i;
915
916                         pipe_ctx->stream = stream;
917                         return i;
918                 }
919         }
920         return -1;
921 }
922
923 static struct stream_encoder *find_first_free_match_stream_enc_for_link(
924                 struct resource_context *res_ctx,
925                 struct core_stream *stream)
926 {
927         int i;
928         int j = -1;
929         struct core_link *link = stream->sink->link;
930
931         for (i = 0; i < res_ctx->pool->stream_enc_count; i++) {
932                 if (!res_ctx->is_stream_enc_acquired[i] &&
933                                         res_ctx->pool->stream_enc[i]) {
934                         /* Store first available for MST second display
935                          * in daisy chain use case */
936                         j = i;
937                         if (res_ctx->pool->stream_enc[i]->id ==
938                                         link->link_enc->preferred_engine)
939                                 return res_ctx->pool->stream_enc[i];
940                 }
941         }
942
943         /*
944          * below can happen in cases when stream encoder is acquired:
945          * 1) for second MST display in chain, so preferred engine already
946          * acquired;
947          * 2) for another link, which preferred engine already acquired by any
948          * MST configuration.
949          *
950          * If signal is of DP type and preferred engine not found, return last available
951          *
952          * TODO - This is just a patch up and a generic solution is
953          * required for non DP connectors.
954          */
955
956         if (j >= 0 && dc_is_dp_signal(stream->signal))
957                 return res_ctx->pool->stream_enc[j];
958
959         return NULL;
960 }
961
962 static struct audio *find_first_free_audio(struct resource_context *res_ctx)
963 {
964         int i;
965         for (i = 0; i < res_ctx->pool->audio_count; i++) {
966                 if (res_ctx->is_audio_acquired[i] == false) {
967                         return res_ctx->pool->audios[i];
968                 }
969         }
970
971         return 0;
972 }
973
974 static void update_stream_signal(struct core_stream *stream)
975 {
976         const struct dc_sink *dc_sink = stream->public.sink;
977
978         if (dc_sink->sink_signal == SIGNAL_TYPE_NONE)
979                 stream->signal = stream->sink->link->public.connector_signal;
980         else if (dc_sink->sink_signal == SIGNAL_TYPE_DVI_SINGLE_LINK ||
981                         dc_sink->sink_signal == SIGNAL_TYPE_DVI_DUAL_LINK)
982                 /* For asic supports dual link DVI, we should adjust signal type
983                  * based on timing pixel clock. If pixel clock more than 165Mhz,
984                  * signal is dual link, otherwise, single link.
985                  */
986                 if (stream->public.timing.pix_clk_khz > TMDS_MAX_PIXEL_CLOCK_IN_KHZ)
987                         stream->signal = SIGNAL_TYPE_DVI_DUAL_LINK;
988                 else
989                         stream->signal = SIGNAL_TYPE_DVI_SINGLE_LINK;
990         else
991                 stream->signal = dc_sink->sink_signal;
992 }
993
994 bool resource_is_stream_unchanged(
995         const struct validate_context *old_context, const struct core_stream *stream)
996 {
997         int i;
998
999         for (i = 0; i < old_context->stream_count; i++) {
1000                 const struct core_stream *old_stream = old_context->streams[i];
1001
1002                 if (are_stream_backends_same(old_stream, stream))
1003                                 return true;
1004         }
1005
1006         return false;
1007 }
1008
1009 static void copy_pipe_ctx(
1010         const struct pipe_ctx *from_pipe_ctx, struct pipe_ctx *to_pipe_ctx)
1011 {
1012         struct core_surface *surface = to_pipe_ctx->surface;
1013         struct core_stream *stream = to_pipe_ctx->stream;
1014
1015         *to_pipe_ctx = *from_pipe_ctx;
1016         to_pipe_ctx->stream = stream;
1017         if (surface != NULL)
1018                 to_pipe_ctx->surface = surface;
1019 }
1020
1021 static struct core_stream *find_pll_sharable_stream(
1022                 const struct core_stream *stream_needs_pll,
1023                 struct validate_context *context)
1024 {
1025         int i;
1026
1027         for (i = 0; i < context->stream_count; i++) {
1028                 struct core_stream *stream_has_pll = context->streams[i];
1029
1030                 /* We are looking for non dp, non virtual stream */
1031                 if (resource_are_streams_timing_synchronizable(
1032                         stream_needs_pll, stream_has_pll)
1033                         && !dc_is_dp_signal(stream_has_pll->signal)
1034                         && stream_has_pll->sink->link->public.connector_signal
1035                         != SIGNAL_TYPE_VIRTUAL)
1036                         return stream_has_pll;
1037
1038         }
1039
1040         return NULL;
1041 }
1042
1043 static int get_norm_pix_clk(const struct dc_crtc_timing *timing)
1044 {
1045         uint32_t pix_clk = timing->pix_clk_khz;
1046         uint32_t normalized_pix_clk = pix_clk;
1047
1048         if (timing->pixel_encoding == PIXEL_ENCODING_YCBCR420)
1049                 pix_clk /= 2;
1050         if (timing->pixel_encoding != PIXEL_ENCODING_YCBCR422) {
1051                 switch (timing->display_color_depth) {
1052                 case COLOR_DEPTH_888:
1053                         normalized_pix_clk = pix_clk;
1054                         break;
1055                 case COLOR_DEPTH_101010:
1056                         normalized_pix_clk = (pix_clk * 30) / 24;
1057                         break;
1058                 case COLOR_DEPTH_121212:
1059                         normalized_pix_clk = (pix_clk * 36) / 24;
1060                 break;
1061                 case COLOR_DEPTH_161616:
1062                         normalized_pix_clk = (pix_clk * 48) / 24;
1063                 break;
1064                 default:
1065                         ASSERT(0);
1066                 break;
1067                 }
1068         }
1069         return normalized_pix_clk;
1070 }
1071
1072 static void calculate_phy_pix_clks(
1073                 const struct core_dc *dc,
1074                 struct validate_context *context)
1075 {
1076         int i;
1077
1078         for (i = 0; i < context->stream_count; i++) {
1079                 struct core_stream *stream = context->streams[i];
1080
1081                 update_stream_signal(stream);
1082
1083                 /* update actual pixel clock on all streams */
1084                 if (dc_is_hdmi_signal(stream->signal))
1085                         stream->phy_pix_clk = get_norm_pix_clk(
1086                                 &stream->public.timing);
1087                 else
1088                         stream->phy_pix_clk =
1089                                 stream->public.timing.pix_clk_khz;
1090         }
1091 }
1092
1093 enum dc_status resource_map_pool_resources(
1094                 const struct core_dc *dc,
1095                 struct validate_context *context)
1096 {
1097         int i, j;
1098
1099         calculate_phy_pix_clks(dc, context);
1100
1101         for (i = 0; i < context->stream_count; i++) {
1102                 struct core_stream *stream = context->streams[i];
1103
1104                 if (!resource_is_stream_unchanged(dc->current_context, stream))
1105                         continue;
1106
1107                 /* mark resources used for stream that is already active */
1108                 for (j = 0; j < MAX_PIPES; j++) {
1109                         struct pipe_ctx *pipe_ctx =
1110                                 &context->res_ctx.pipe_ctx[j];
1111                         const struct pipe_ctx *old_pipe_ctx =
1112                                 &dc->current_context->res_ctx.pipe_ctx[j];
1113
1114                         if (!are_stream_backends_same(old_pipe_ctx->stream, stream))
1115                                 continue;
1116
1117                         pipe_ctx->stream = stream;
1118                         copy_pipe_ctx(old_pipe_ctx, pipe_ctx);
1119
1120                         /* Split pipe resource, do not acquire back end */
1121                         if (!pipe_ctx->stream_enc)
1122                                 continue;
1123
1124                         set_stream_engine_in_use(
1125                                 &context->res_ctx,
1126                                 pipe_ctx->stream_enc);
1127
1128                         /* Switch to dp clock source only if there is
1129                          * no non dp stream that shares the same timing
1130                          * with the dp stream.
1131                          */
1132                         if (dc_is_dp_signal(pipe_ctx->stream->signal) &&
1133                                 !find_pll_sharable_stream(stream, context))
1134                                 pipe_ctx->clock_source =
1135                                         context->res_ctx.pool->dp_clock_source;
1136
1137                         resource_reference_clock_source(
1138                                 &context->res_ctx,
1139                                 pipe_ctx->clock_source);
1140
1141                         set_audio_in_use(&context->res_ctx,
1142                                          pipe_ctx->audio);
1143                 }
1144         }
1145
1146         for (i = 0; i < context->stream_count; i++) {
1147                 struct core_stream *stream = context->streams[i];
1148                 struct pipe_ctx *pipe_ctx = NULL;
1149                 int pipe_idx = -1;
1150
1151                 if (resource_is_stream_unchanged(dc->current_context, stream))
1152                         continue;
1153                 /* acquire new resources */
1154                 pipe_idx = acquire_first_free_pipe(&context->res_ctx, stream);
1155                 if (pipe_idx < 0)
1156                         return DC_NO_CONTROLLER_RESOURCE;
1157
1158
1159                 pipe_ctx = &context->res_ctx.pipe_ctx[pipe_idx];
1160
1161                 pipe_ctx->stream_enc =
1162                         find_first_free_match_stream_enc_for_link(
1163                                 &context->res_ctx, stream);
1164
1165                 if (!pipe_ctx->stream_enc)
1166                         return DC_NO_STREAM_ENG_RESOURCE;
1167
1168                 set_stream_engine_in_use(
1169                         &context->res_ctx,
1170                         pipe_ctx->stream_enc);
1171
1172                 /* TODO: Add check if ASIC support and EDID audio */
1173                 if (!stream->sink->converter_disable_audio &&
1174                         dc_is_audio_capable_signal(pipe_ctx->stream->signal) &&
1175                         stream->public.audio_info.mode_count) {
1176                         pipe_ctx->audio = find_first_free_audio(
1177                                 &context->res_ctx);
1178
1179                         /*
1180                          * Audio assigned in order first come first get.
1181                          * There are asics which has number of audio
1182                          * resources less then number of pipes
1183                          */
1184                         if (pipe_ctx->audio)
1185                                 set_audio_in_use(
1186                                         &context->res_ctx,
1187                                         pipe_ctx->audio);
1188                 }
1189
1190                 context->stream_status[i].primary_otg_inst = pipe_ctx->tg->inst;
1191         }
1192
1193         return DC_OK;
1194 }
1195
1196 /* first stream in the context is used to populate the rest */
1197 void validate_guaranteed_copy_streams(
1198                 struct validate_context *context,
1199                 int max_streams)
1200 {
1201         int i;
1202
1203         for (i = 1; i < max_streams; i++) {
1204                 context->streams[i] = context->streams[0];
1205
1206                 copy_pipe_ctx(&context->res_ctx.pipe_ctx[0],
1207                               &context->res_ctx.pipe_ctx[i]);
1208                 context->res_ctx.pipe_ctx[i].stream =
1209                                 context->res_ctx.pipe_ctx[0].stream;
1210
1211                 dc_stream_retain(&context->streams[i]->public);
1212                 context->stream_count++;
1213         }
1214 }
1215
1216 static void patch_gamut_packet_checksum(
1217                 struct encoder_info_packet *gamut_packet)
1218 {
1219         /* For gamut we recalc checksum */
1220         if (gamut_packet->valid) {
1221                 uint8_t chk_sum = 0;
1222                 uint8_t *ptr;
1223                 uint8_t i;
1224
1225                 /*start of the Gamut data. */
1226                 ptr = &gamut_packet->sb[3];
1227
1228                 for (i = 0; i <= gamut_packet->sb[1]; i++)
1229                         chk_sum += ptr[i];
1230
1231                 gamut_packet->sb[2] = (uint8_t) (0x100 - chk_sum);
1232         }
1233 }
1234
1235 static void set_avi_info_frame(
1236                 struct encoder_info_packet *info_packet,
1237                 struct pipe_ctx *pipe_ctx)
1238 {
1239         struct core_stream *stream = pipe_ctx->stream;
1240         enum dc_color_space color_space = COLOR_SPACE_UNKNOWN;
1241         struct info_frame info_frame = { {0} };
1242         uint32_t pixel_encoding = 0;
1243         enum scanning_type scan_type = SCANNING_TYPE_NODATA;
1244         enum dc_aspect_ratio aspect = ASPECT_RATIO_NO_DATA;
1245         bool itc = false;
1246         uint8_t cn0_cn1 = 0;
1247         uint8_t *check_sum = NULL;
1248         uint8_t byte_index = 0;
1249
1250         color_space = pipe_ctx->stream->public.output_color_space;
1251
1252         /* Initialize header */
1253         info_frame.avi_info_packet.info_packet_hdmi.bits.header.
1254                         info_frame_type = HDMI_INFOFRAME_TYPE_AVI;
1255         /* InfoFrameVersion_3 is defined by CEA861F (Section 6.4), but shall
1256         * not be used in HDMI 2.0 (Section 10.1) */
1257         info_frame.avi_info_packet.info_packet_hdmi.bits.header.version = 2;
1258         info_frame.avi_info_packet.info_packet_hdmi.bits.header.length =
1259                         HDMI_AVI_INFOFRAME_SIZE;
1260
1261         /*
1262          * IDO-defined (Y2,Y1,Y0 = 1,1,1) shall not be used by devices built
1263          * according to HDMI 2.0 spec (Section 10.1)
1264          */
1265
1266         switch (stream->public.timing.pixel_encoding) {
1267         case PIXEL_ENCODING_YCBCR422:
1268                 pixel_encoding = 1;
1269                 break;
1270
1271         case PIXEL_ENCODING_YCBCR444:
1272                 pixel_encoding = 2;
1273                 break;
1274         case PIXEL_ENCODING_YCBCR420:
1275                 pixel_encoding = 3;
1276                 break;
1277
1278         case PIXEL_ENCODING_RGB:
1279         default:
1280                 pixel_encoding = 0;
1281         }
1282
1283         /* Y0_Y1_Y2 : The pixel encoding */
1284         /* H14b AVI InfoFrame has extension on Y-field from 2 bits to 3 bits */
1285         info_frame.avi_info_packet.info_packet_hdmi.bits.Y0_Y1_Y2 =
1286                 pixel_encoding;
1287
1288         /* A0 = 1 Active Format Information valid */
1289         info_frame.avi_info_packet.info_packet_hdmi.bits.A0 =
1290                 ACTIVE_FORMAT_VALID;
1291
1292         /* B0, B1 = 3; Bar info data is valid */
1293         info_frame.avi_info_packet.info_packet_hdmi.bits.B0_B1 =
1294                 BAR_INFO_BOTH_VALID;
1295
1296         info_frame.avi_info_packet.info_packet_hdmi.bits.SC0_SC1 =
1297                         PICTURE_SCALING_UNIFORM;
1298
1299         /* S0, S1 : Underscan / Overscan */
1300         /* TODO: un-hardcode scan type */
1301         scan_type = SCANNING_TYPE_UNDERSCAN;
1302         info_frame.avi_info_packet.info_packet_hdmi.bits.S0_S1 = scan_type;
1303
1304         /* C0, C1 : Colorimetry */
1305         if (color_space == COLOR_SPACE_YCBCR709 ||
1306                         color_space == COLOR_SPACE_YCBCR709_LIMITED)
1307                 info_frame.avi_info_packet.info_packet_hdmi.bits.C0_C1 =
1308                                 COLORIMETRY_ITU709;
1309         else if (color_space == COLOR_SPACE_YCBCR601 ||
1310                         color_space == COLOR_SPACE_YCBCR601_LIMITED)
1311                 info_frame.avi_info_packet.info_packet_hdmi.bits.C0_C1 =
1312                                 COLORIMETRY_ITU601;
1313         else {
1314                 if (stream->public.timing.pixel_encoding != PIXEL_ENCODING_RGB)
1315                         BREAK_TO_DEBUGGER();
1316                 info_frame.avi_info_packet.info_packet_hdmi.bits.C0_C1 =
1317                                 COLORIMETRY_NO_DATA;
1318         }
1319         if (color_space == COLOR_SPACE_2020_RGB_FULLRANGE ||
1320                         color_space == COLOR_SPACE_2020_RGB_LIMITEDRANGE ||
1321                         color_space == COLOR_SPACE_2020_YCBCR) {
1322                 info_frame.avi_info_packet.info_packet_hdmi.bits.EC0_EC2 =
1323                                 COLORIMETRYEX_BT2020RGBYCBCR;
1324                 info_frame.avi_info_packet.info_packet_hdmi.bits.C0_C1 =
1325                                 COLORIMETRY_EXTENDED;
1326         } else if (color_space == COLOR_SPACE_ADOBERGB) {
1327                 info_frame.avi_info_packet.info_packet_hdmi.bits.EC0_EC2 =
1328                                 COLORIMETRYEX_ADOBERGB;
1329                 info_frame.avi_info_packet.info_packet_hdmi.bits.C0_C1 =
1330                                 COLORIMETRY_EXTENDED;
1331         }
1332
1333         /* TODO: un-hardcode aspect ratio */
1334         aspect = stream->public.timing.aspect_ratio;
1335
1336         switch (aspect) {
1337         case ASPECT_RATIO_4_3:
1338         case ASPECT_RATIO_16_9:
1339                 info_frame.avi_info_packet.info_packet_hdmi.bits.M0_M1 = aspect;
1340                 break;
1341
1342         case ASPECT_RATIO_NO_DATA:
1343         case ASPECT_RATIO_64_27:
1344         case ASPECT_RATIO_256_135:
1345         default:
1346                 info_frame.avi_info_packet.info_packet_hdmi.bits.M0_M1 = 0;
1347         }
1348
1349         /* Active Format Aspect ratio - same as Picture Aspect Ratio. */
1350         info_frame.avi_info_packet.info_packet_hdmi.bits.R0_R3 =
1351                         ACTIVE_FORMAT_ASPECT_RATIO_SAME_AS_PICTURE;
1352
1353         /* TODO: un-hardcode cn0_cn1 and itc */
1354         cn0_cn1 = 0;
1355         itc = false;
1356
1357         if (itc) {
1358                 info_frame.avi_info_packet.info_packet_hdmi.bits.ITC = 1;
1359                 info_frame.avi_info_packet.info_packet_hdmi.bits.CN0_CN1 =
1360                         cn0_cn1;
1361         }
1362
1363         /* TODO : We should handle YCC quantization */
1364         /* but we do not have matrix calculation */
1365         if (color_space == COLOR_SPACE_SRGB) {
1366                 info_frame.avi_info_packet.info_packet_hdmi.bits.Q0_Q1 =
1367                                                 RGB_QUANTIZATION_FULL_RANGE;
1368                 info_frame.avi_info_packet.info_packet_hdmi.bits.YQ0_YQ1 =
1369                                                 YYC_QUANTIZATION_FULL_RANGE;
1370         } else if (color_space == COLOR_SPACE_SRGB_LIMITED) {
1371                 info_frame.avi_info_packet.info_packet_hdmi.bits.Q0_Q1 =
1372                                                 RGB_QUANTIZATION_LIMITED_RANGE;
1373                 info_frame.avi_info_packet.info_packet_hdmi.bits.YQ0_YQ1 =
1374                                                 YYC_QUANTIZATION_LIMITED_RANGE;
1375         } else {
1376                 info_frame.avi_info_packet.info_packet_hdmi.bits.Q0_Q1 =
1377                                                 RGB_QUANTIZATION_DEFAULT_RANGE;
1378                 info_frame.avi_info_packet.info_packet_hdmi.bits.YQ0_YQ1 =
1379                                                 YYC_QUANTIZATION_LIMITED_RANGE;
1380         }
1381
1382         info_frame.avi_info_packet.info_packet_hdmi.bits.VIC0_VIC7 =
1383                                         stream->public.timing.vic;
1384
1385         /* pixel repetition
1386          * PR0 - PR3 start from 0 whereas pHwPathMode->mode.timing.flags.pixel
1387          * repetition start from 1 */
1388         info_frame.avi_info_packet.info_packet_hdmi.bits.PR0_PR3 = 0;
1389
1390         /* Bar Info
1391          * barTop:    Line Number of End of Top Bar.
1392          * barBottom: Line Number of Start of Bottom Bar.
1393          * barLeft:   Pixel Number of End of Left Bar.
1394          * barRight:  Pixel Number of Start of Right Bar. */
1395         info_frame.avi_info_packet.info_packet_hdmi.bits.bar_top =
1396                         stream->public.timing.v_border_top;
1397         info_frame.avi_info_packet.info_packet_hdmi.bits.bar_bottom =
1398                 (stream->public.timing.v_border_top
1399                         - stream->public.timing.v_border_bottom + 1);
1400         info_frame.avi_info_packet.info_packet_hdmi.bits.bar_left =
1401                         stream->public.timing.h_border_left;
1402         info_frame.avi_info_packet.info_packet_hdmi.bits.bar_right =
1403                 (stream->public.timing.h_total
1404                         - stream->public.timing.h_border_right + 1);
1405
1406         /* check_sum - Calculate AFMT_AVI_INFO0 ~ AFMT_AVI_INFO3 */
1407         check_sum =
1408                 &info_frame.
1409                 avi_info_packet.info_packet_hdmi.packet_raw_data.sb[0];
1410         *check_sum = HDMI_INFOFRAME_TYPE_AVI + HDMI_AVI_INFOFRAME_SIZE + 2;
1411
1412         for (byte_index = 1; byte_index <= HDMI_AVI_INFOFRAME_SIZE; byte_index++)
1413                 *check_sum += info_frame.avi_info_packet.info_packet_hdmi.
1414                                 packet_raw_data.sb[byte_index];
1415
1416         /* one byte complement */
1417         *check_sum = (uint8_t) (0x100 - *check_sum);
1418
1419         /* Store in hw_path_mode */
1420         info_packet->hb0 =
1421                 info_frame.avi_info_packet.info_packet_hdmi.packet_raw_data.hb0;
1422         info_packet->hb1 =
1423                 info_frame.avi_info_packet.info_packet_hdmi.packet_raw_data.hb1;
1424         info_packet->hb2 =
1425                 info_frame.avi_info_packet.info_packet_hdmi.packet_raw_data.hb2;
1426
1427         for (byte_index = 0; byte_index < sizeof(info_frame.avi_info_packet.
1428                                 info_packet_hdmi.packet_raw_data.sb); byte_index++)
1429                 info_packet->sb[byte_index] = info_frame.avi_info_packet.
1430                                 info_packet_hdmi.packet_raw_data.sb[byte_index];
1431
1432         info_packet->valid = true;
1433 }
1434
1435 static void set_vendor_info_packet(
1436                 struct encoder_info_packet *info_packet,
1437                 struct core_stream *stream)
1438 {
1439         uint32_t length = 0;
1440         bool hdmi_vic_mode = false;
1441         uint8_t checksum = 0;
1442         uint32_t i = 0;
1443         enum dc_timing_3d_format format;
1444
1445         format = stream->public.timing.timing_3d_format;
1446
1447         /* Can be different depending on packet content */
1448         length = 5;
1449
1450         if (stream->public.timing.hdmi_vic != 0
1451                         && stream->public.timing.h_total >= 3840
1452                         && stream->public.timing.v_total >= 2160)
1453                 hdmi_vic_mode = true;
1454
1455         /* According to HDMI 1.4a CTS, VSIF should be sent
1456          * for both 3D stereo and HDMI VIC modes.
1457          * For all other modes, there is no VSIF sent.  */
1458
1459         if (format == TIMING_3D_FORMAT_NONE && !hdmi_vic_mode)
1460                 return;
1461
1462         /* 24bit IEEE Registration identifier (0x000c03). LSB first. */
1463         info_packet->sb[1] = 0x03;
1464         info_packet->sb[2] = 0x0C;
1465         info_packet->sb[3] = 0x00;
1466
1467         /*PB4: 5 lower bytes = 0 (reserved). 3 higher bits = HDMI_Video_Format.
1468          * The value for HDMI_Video_Format are:
1469          * 0x0 (0b000) - No additional HDMI video format is presented in this
1470          * packet
1471          * 0x1 (0b001) - Extended resolution format present. 1 byte of HDMI_VIC
1472          * parameter follows
1473          * 0x2 (0b010) - 3D format indication present. 3D_Structure and
1474          * potentially 3D_Ext_Data follows
1475          * 0x3..0x7 (0b011..0b111) - reserved for future use */
1476         if (format != TIMING_3D_FORMAT_NONE)
1477                 info_packet->sb[4] = (2 << 5);
1478         else if (hdmi_vic_mode)
1479                 info_packet->sb[4] = (1 << 5);
1480
1481         /* PB5: If PB4 claims 3D timing (HDMI_Video_Format = 0x2):
1482          * 4 lower bites = 0 (reserved). 4 higher bits = 3D_Structure.
1483          * The value for 3D_Structure are:
1484          * 0x0 - Frame Packing
1485          * 0x1 - Field Alternative
1486          * 0x2 - Line Alternative
1487          * 0x3 - Side-by-Side (full)
1488          * 0x4 - L + depth
1489          * 0x5 - L + depth + graphics + graphics-depth
1490          * 0x6 - Top-and-Bottom
1491          * 0x7 - Reserved for future use
1492          * 0x8 - Side-by-Side (Half)
1493          * 0x9..0xE - Reserved for future use
1494          * 0xF - Not used */
1495         switch (format) {
1496         case TIMING_3D_FORMAT_HW_FRAME_PACKING:
1497         case TIMING_3D_FORMAT_SW_FRAME_PACKING:
1498                 info_packet->sb[5] = (0x0 << 4);
1499                 break;
1500
1501         case TIMING_3D_FORMAT_SIDE_BY_SIDE:
1502         case TIMING_3D_FORMAT_SBS_SW_PACKED:
1503                 info_packet->sb[5] = (0x8 << 4);
1504                 length = 6;
1505                 break;
1506
1507         case TIMING_3D_FORMAT_TOP_AND_BOTTOM:
1508         case TIMING_3D_FORMAT_TB_SW_PACKED:
1509                 info_packet->sb[5] = (0x6 << 4);
1510                 break;
1511
1512         default:
1513                 break;
1514         }
1515
1516         /*PB5: If PB4 is set to 0x1 (extended resolution format)
1517          * fill PB5 with the correct HDMI VIC code */
1518         if (hdmi_vic_mode)
1519                 info_packet->sb[5] = stream->public.timing.hdmi_vic;
1520
1521         /* Header */
1522         info_packet->hb0 = HDMI_INFOFRAME_TYPE_VENDOR; /* VSIF packet type. */
1523         info_packet->hb1 = 0x01; /* Version */
1524
1525         /* 4 lower bits = Length, 4 higher bits = 0 (reserved) */
1526         info_packet->hb2 = (uint8_t) (length);
1527
1528         /* Calculate checksum */
1529         checksum = 0;
1530         checksum += info_packet->hb0;
1531         checksum += info_packet->hb1;
1532         checksum += info_packet->hb2;
1533
1534         for (i = 1; i <= length; i++)
1535                 checksum += info_packet->sb[i];
1536
1537         info_packet->sb[0] = (uint8_t) (0x100 - checksum);
1538
1539         info_packet->valid = true;
1540 }
1541
1542 static void set_spd_info_packet(
1543                 struct encoder_info_packet *info_packet,
1544                 struct core_stream *stream)
1545 {
1546         /* SPD info packet for FreeSync */
1547
1548         unsigned char checksum = 0;
1549         unsigned int idx, payload_size = 0;
1550
1551         /* Check if Freesync is supported. Return if false. If true,
1552          * set the corresponding bit in the info packet
1553          */
1554         if (stream->public.freesync_ctx.supported == false)
1555                 return;
1556
1557         if (dc_is_hdmi_signal(stream->signal)) {
1558
1559                 /* HEADER */
1560
1561                 /* HB0  = Packet Type = 0x83 (Source Product
1562                  *        Descriptor InfoFrame)
1563                  */
1564                 info_packet->hb0 = HDMI_INFOFRAME_TYPE_SPD;
1565
1566                 /* HB1  = Version = 0x01 */
1567                 info_packet->hb1 = 0x01;
1568
1569                 /* HB2  = [Bits 7:5 = 0] [Bits 4:0 = Length = 0x08] */
1570                 info_packet->hb2 = 0x08;
1571
1572                 payload_size = 0x08;
1573
1574         } else if (dc_is_dp_signal(stream->signal)) {
1575
1576                 /* HEADER */
1577
1578                 /* HB0  = Secondary-data Packet ID = 0 - Only non-zero
1579                  *        when used to associate audio related info packets
1580                  */
1581                 info_packet->hb0 = 0x00;
1582
1583                 /* HB1  = Packet Type = 0x83 (Source Product
1584                  *        Descriptor InfoFrame)
1585                  */
1586                 info_packet->hb1 = HDMI_INFOFRAME_TYPE_SPD;
1587
1588                 /* HB2  = [Bits 7:0 = Least significant eight bits -
1589                  *        For INFOFRAME, the value must be 1Bh]
1590                  */
1591                 info_packet->hb2 = 0x1B;
1592
1593                 /* HB3  = [Bits 7:2 = INFOFRAME SDP Version Number = 0x1]
1594                  *        [Bits 1:0 = Most significant two bits = 0x00]
1595                  */
1596                 info_packet->hb3 = 0x04;
1597
1598                 payload_size = 0x1B;
1599         }
1600
1601         /* PB1 = 0x1A (24bit AMD IEEE OUI (0x00001A) - Byte 0) */
1602         info_packet->sb[1] = 0x1A;
1603
1604         /* PB2 = 0x00 (24bit AMD IEEE OUI (0x00001A) - Byte 1) */
1605         info_packet->sb[2] = 0x00;
1606
1607         /* PB3 = 0x00 (24bit AMD IEEE OUI (0x00001A) - Byte 2) */
1608         info_packet->sb[3] = 0x00;
1609
1610         /* PB4 = Reserved */
1611         info_packet->sb[4] = 0x00;
1612
1613         /* PB5 = Reserved */
1614         info_packet->sb[5] = 0x00;
1615
1616         /* PB6 = [Bits 7:3 = Reserved] */
1617         info_packet->sb[6] = 0x00;
1618
1619         if (stream->public.freesync_ctx.supported == true)
1620                 /* PB6 = [Bit 0 = FreeSync Supported] */
1621                 info_packet->sb[6] |= 0x01;
1622
1623         if (stream->public.freesync_ctx.enabled == true)
1624                 /* PB6 = [Bit 1 = FreeSync Enabled] */
1625                 info_packet->sb[6] |= 0x02;
1626
1627         if (stream->public.freesync_ctx.active == true)
1628                 /* PB6 = [Bit 2 = FreeSync Active] */
1629                 info_packet->sb[6] |= 0x04;
1630
1631         /* PB7 = FreeSync Minimum refresh rate (Hz) */
1632         info_packet->sb[7] = (unsigned char) (stream->public.freesync_ctx.
1633                         min_refresh_in_micro_hz / 1000000);
1634
1635         /* PB8 = FreeSync Maximum refresh rate (Hz)
1636          *
1637          * Note: We do not use the maximum capable refresh rate
1638          * of the panel, because we should never go above the field
1639          * rate of the mode timing set.
1640          */
1641         info_packet->sb[8] = (unsigned char) (stream->public.freesync_ctx.
1642                         nominal_refresh_in_micro_hz / 1000000);
1643
1644         /* PB9 - PB27  = Reserved */
1645         for (idx = 9; idx <= 27; idx++)
1646                 info_packet->sb[idx] = 0x00;
1647
1648         /* Calculate checksum */
1649         checksum += info_packet->hb0;
1650         checksum += info_packet->hb1;
1651         checksum += info_packet->hb2;
1652         checksum += info_packet->hb3;
1653
1654         for (idx = 1; idx <= payload_size; idx++)
1655                 checksum += info_packet->sb[idx];
1656
1657         /* PB0 = Checksum (one byte complement) */
1658         info_packet->sb[0] = (unsigned char) (0x100 - checksum);
1659
1660         info_packet->valid = true;
1661 }
1662
1663 static void set_hdr_static_info_packet(
1664                 struct encoder_info_packet *info_packet,
1665                 struct core_surface *surface,
1666                 struct core_stream *stream)
1667 {
1668         uint16_t i = 0;
1669         enum signal_type signal = stream->signal;
1670         struct dc_hdr_static_metadata hdr_metadata;
1671         uint32_t data;
1672
1673         if (!surface)
1674                 return;
1675
1676         hdr_metadata = surface->public.hdr_static_ctx;
1677
1678         if (!hdr_metadata.is_hdr)
1679                 return;
1680
1681         if (dc_is_hdmi_signal(signal)) {
1682                 info_packet->valid = true;
1683
1684                 info_packet->hb0 = 0x87;
1685                 info_packet->hb1 = 0x01;
1686                 info_packet->hb2 = 0x1A;
1687                 i = 1;
1688         } else if (dc_is_dp_signal(signal)) {
1689                 info_packet->valid = true;
1690
1691                 info_packet->hb0 = 0x00;
1692                 info_packet->hb1 = 0x87;
1693                 info_packet->hb2 = 0x1D;
1694                 info_packet->hb3 = (0x13 << 2);
1695                 i = 2;
1696         }
1697
1698         data = hdr_metadata.is_hdr;
1699         info_packet->sb[i++] = data ? 0x02 : 0x00;
1700         info_packet->sb[i++] = 0x00;
1701
1702         data = hdr_metadata.chromaticity_green_x / 2;
1703         info_packet->sb[i++] = data & 0xFF;
1704         info_packet->sb[i++] = (data & 0xFF00) >> 8;
1705
1706         data = hdr_metadata.chromaticity_green_y / 2;
1707         info_packet->sb[i++] = data & 0xFF;
1708         info_packet->sb[i++] = (data & 0xFF00) >> 8;
1709
1710         data = hdr_metadata.chromaticity_blue_x / 2;
1711         info_packet->sb[i++] = data & 0xFF;
1712         info_packet->sb[i++] = (data & 0xFF00) >> 8;
1713
1714         data = hdr_metadata.chromaticity_blue_y / 2;
1715         info_packet->sb[i++] = data & 0xFF;
1716         info_packet->sb[i++] = (data & 0xFF00) >> 8;
1717
1718         data = hdr_metadata.chromaticity_red_x / 2;
1719         info_packet->sb[i++] = data & 0xFF;
1720         info_packet->sb[i++] = (data & 0xFF00) >> 8;
1721
1722         data = hdr_metadata.chromaticity_red_y / 2;
1723         info_packet->sb[i++] = data & 0xFF;
1724         info_packet->sb[i++] = (data & 0xFF00) >> 8;
1725
1726         data = hdr_metadata.chromaticity_white_point_x / 2;
1727         info_packet->sb[i++] = data & 0xFF;
1728         info_packet->sb[i++] = (data & 0xFF00) >> 8;
1729
1730         data = hdr_metadata.chromaticity_white_point_y / 2;
1731         info_packet->sb[i++] = data & 0xFF;
1732         info_packet->sb[i++] = (data & 0xFF00) >> 8;
1733
1734         data = hdr_metadata.max_luminance;
1735         info_packet->sb[i++] = data & 0xFF;
1736         info_packet->sb[i++] = (data & 0xFF00) >> 8;
1737
1738         data = hdr_metadata.min_luminance;
1739         info_packet->sb[i++] = data & 0xFF;
1740         info_packet->sb[i++] = (data & 0xFF00) >> 8;
1741
1742         data = hdr_metadata.maximum_content_light_level;
1743         info_packet->sb[i++] = data & 0xFF;
1744         info_packet->sb[i++] = (data & 0xFF00) >> 8;
1745
1746         data = hdr_metadata.maximum_frame_average_light_level;
1747         info_packet->sb[i++] = data & 0xFF;
1748         info_packet->sb[i++] = (data & 0xFF00) >> 8;
1749
1750         if (dc_is_hdmi_signal(signal)) {
1751                 uint32_t checksum = 0;
1752
1753                 checksum += info_packet->hb0;
1754                 checksum += info_packet->hb1;
1755                 checksum += info_packet->hb2;
1756
1757                 for (i = 1; i <= info_packet->hb2; i++)
1758                         checksum += info_packet->sb[i];
1759
1760                 info_packet->sb[0] = 0x100 - checksum;
1761         } else if (dc_is_dp_signal(signal)) {
1762                 info_packet->sb[0] = 0x01;
1763                 info_packet->sb[1] = 0x1A;
1764         }
1765 }
1766
1767 static void set_vsc_info_packet(
1768                 struct encoder_info_packet *info_packet,
1769                 struct core_stream *stream)
1770 {
1771         unsigned int vscPacketRevision = 0;
1772         unsigned int i;
1773
1774         if (stream->sink->link->public.psr_caps.psr_version != 0) {
1775                 vscPacketRevision = 2;
1776         }
1777
1778         /* VSC packet not needed based on the features
1779          * supported by this DP display
1780          */
1781         if (vscPacketRevision == 0)
1782                 return;
1783
1784         if (vscPacketRevision == 0x2) {
1785                 /* Secondary-data Packet ID = 0*/
1786                 info_packet->hb0 = 0x00;
1787                 /* 07h - Packet Type Value indicating Video
1788                  * Stream Configuration packet
1789                  */
1790                 info_packet->hb1 = 0x07;
1791                 /* 02h = VSC SDP supporting 3D stereo and PSR
1792                  * (applies to eDP v1.3 or higher).
1793                  */
1794                 info_packet->hb2 = 0x02;
1795                 /* 08h = VSC packet supporting 3D stereo + PSR
1796                  * (HB2 = 02h).
1797                  */
1798                 info_packet->hb3 = 0x08;
1799
1800                 for (i = 0; i < 28; i++)
1801                         info_packet->sb[i] = 0;
1802
1803                 info_packet->valid = true;
1804         }
1805
1806         /*TODO: stereo 3D support and extend pixel encoding colorimetry*/
1807 }
1808
1809 void resource_validate_ctx_destruct(struct validate_context *context)
1810 {
1811         int i, j;
1812
1813         for (i = 0; i < context->stream_count; i++) {
1814                 for (j = 0; j < context->stream_status[i].surface_count; j++)
1815                         dc_surface_release(
1816                                 context->stream_status[i].surfaces[j]);
1817
1818                 context->stream_status[i].surface_count = 0;
1819                 dc_stream_release(&context->streams[i]->public);
1820                 context->streams[i] = NULL;
1821         }
1822 }
1823
1824 /*
1825  * Copy src_ctx into dst_ctx and retain all surfaces and streams referenced
1826  * by the src_ctx
1827  */
1828 void resource_validate_ctx_copy_construct(
1829                 const struct validate_context *src_ctx,
1830                 struct validate_context *dst_ctx)
1831 {
1832         int i, j;
1833
1834         *dst_ctx = *src_ctx;
1835
1836         for (i = 0; i < dst_ctx->res_ctx.pool->pipe_count; i++) {
1837                 struct pipe_ctx *cur_pipe = &dst_ctx->res_ctx.pipe_ctx[i];
1838
1839                 if (cur_pipe->top_pipe)
1840                         cur_pipe->top_pipe =  &dst_ctx->res_ctx.pipe_ctx[cur_pipe->top_pipe->pipe_idx];
1841
1842                 if (cur_pipe->bottom_pipe)
1843                         cur_pipe->bottom_pipe = &dst_ctx->res_ctx.pipe_ctx[cur_pipe->bottom_pipe->pipe_idx];
1844
1845         }
1846
1847         for (i = 0; i < dst_ctx->stream_count; i++) {
1848                 dc_stream_retain(&dst_ctx->streams[i]->public);
1849                 for (j = 0; j < dst_ctx->stream_status[i].surface_count; j++)
1850                         dc_surface_retain(
1851                                 dst_ctx->stream_status[i].surfaces[j]);
1852         }
1853 }
1854
1855 struct clock_source *dc_resource_find_first_free_pll(
1856                 struct resource_context *res_ctx)
1857 {
1858         int i;
1859
1860         for (i = 0; i < res_ctx->pool->clk_src_count; ++i) {
1861                 if (res_ctx->clock_source_ref_count[i] == 0)
1862                         return res_ctx->pool->clock_sources[i];
1863         }
1864
1865         return NULL;
1866 }
1867
1868 void resource_build_info_frame(struct pipe_ctx *pipe_ctx)
1869 {
1870         enum signal_type signal = SIGNAL_TYPE_NONE;
1871         struct encoder_info_frame *info = &pipe_ctx->encoder_info_frame;
1872
1873         /* default all packets to invalid */
1874         info->avi.valid = false;
1875         info->gamut.valid = false;
1876         info->vendor.valid = false;
1877         info->hdrsmd.valid = false;
1878         info->vsc.valid = false;
1879
1880         signal = pipe_ctx->stream->signal;
1881
1882         /* HDMi and DP have different info packets*/
1883         if (dc_is_hdmi_signal(signal)) {
1884                 set_avi_info_frame(&info->avi, pipe_ctx);
1885
1886                 set_vendor_info_packet(&info->vendor, pipe_ctx->stream);
1887
1888                 set_spd_info_packet(&info->spd, pipe_ctx->stream);
1889
1890                 set_hdr_static_info_packet(&info->hdrsmd,
1891                                 pipe_ctx->surface, pipe_ctx->stream);
1892
1893         } else if (dc_is_dp_signal(signal)) {
1894                 set_vsc_info_packet(&info->vsc, pipe_ctx->stream);
1895
1896                 set_spd_info_packet(&info->spd, pipe_ctx->stream);
1897
1898                 set_hdr_static_info_packet(&info->hdrsmd,
1899                                 pipe_ctx->surface, pipe_ctx->stream);
1900         }
1901
1902         patch_gamut_packet_checksum(&info->gamut);
1903 }
1904
1905 enum dc_status resource_map_clock_resources(
1906                 const struct core_dc *dc,
1907                 struct validate_context *context)
1908 {
1909         int i, j;
1910
1911         /* acquire new resources */
1912         for (i = 0; i < context->stream_count; i++) {
1913                 const struct core_stream *stream = context->streams[i];
1914
1915                 if (resource_is_stream_unchanged(dc->current_context, stream))
1916                         continue;
1917
1918                 for (j = 0; j < MAX_PIPES; j++) {
1919                         struct pipe_ctx *pipe_ctx =
1920                                 &context->res_ctx.pipe_ctx[j];
1921
1922                         if (context->res_ctx.pipe_ctx[j].stream != stream)
1923                                 continue;
1924
1925                         if (dc_is_dp_signal(pipe_ctx->stream->signal)
1926                                 || pipe_ctx->stream->signal == SIGNAL_TYPE_VIRTUAL)
1927                                 pipe_ctx->clock_source =
1928                                         context->res_ctx.pool->dp_clock_source;
1929                         else {
1930                                 pipe_ctx->clock_source = NULL;
1931
1932                                 if (!dc->public.config.disable_disp_pll_sharing)
1933                                         resource_find_used_clk_src_for_sharing(
1934                                                 &context->res_ctx,
1935                                                 pipe_ctx);
1936
1937                                 if (pipe_ctx->clock_source == NULL)
1938                                         pipe_ctx->clock_source =
1939                                                 dc_resource_find_first_free_pll(&context->res_ctx);
1940                         }
1941
1942                         if (pipe_ctx->clock_source == NULL)
1943                                 return DC_NO_CLOCK_SOURCE_RESOURCE;
1944
1945                         resource_reference_clock_source(
1946                                 &context->res_ctx,
1947                                 pipe_ctx->clock_source);
1948
1949                         /* only one cs per stream regardless of mpo */
1950                         break;
1951                 }
1952         }
1953
1954         return DC_OK;
1955 }
1956
1957 /*
1958  * Note: We need to disable output if clock sources change,
1959  * since bios does optimization and doesn't apply if changing
1960  * PHY when not already disabled.
1961  */
1962 bool pipe_need_reprogram(
1963                 struct pipe_ctx *pipe_ctx_old,
1964                 struct pipe_ctx *pipe_ctx)
1965 {
1966         if (pipe_ctx_old->stream->sink != pipe_ctx->stream->sink)
1967                 return true;
1968
1969         if (pipe_ctx_old->stream->signal != pipe_ctx->stream->signal)
1970                 return true;
1971
1972         if (pipe_ctx_old->audio != pipe_ctx->audio)
1973                 return true;
1974
1975         if (pipe_ctx_old->clock_source != pipe_ctx->clock_source
1976                         && pipe_ctx_old->stream != pipe_ctx->stream)
1977                 return true;
1978
1979         if (pipe_ctx_old->stream_enc != pipe_ctx->stream_enc)
1980                 return true;
1981
1982         if (is_timing_changed(pipe_ctx_old->stream, pipe_ctx->stream))
1983                 return true;
1984
1985
1986         return false;
1987 }