2 * Copyright 2012-15 Advanced Micro Devices, Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
25 #include "dm_services.h"
28 #include "include/irq_service_interface.h"
29 #include "link_encoder.h"
30 #include "stream_encoder.h"
32 #include "timing_generator.h"
33 #include "transform.h"
34 #include "set_mode_types.h"
35 #include "virtual/virtual_stream_encoder.h"
37 #include "dce80/dce80_resource.h"
38 #include "dce100/dce100_resource.h"
39 #include "dce110/dce110_resource.h"
40 #include "dce112/dce112_resource.h"
42 enum dce_version resource_parse_asic_id(struct hw_asic_id asic_id)
44 enum dce_version dc_version = DCE_VERSION_UNKNOWN;
45 switch (asic_id.chip_family) {
49 dc_version = DCE_VERSION_8_0;
52 dc_version = DCE_VERSION_11_0;
56 if (ASIC_REV_IS_TONGA_P(asic_id.hw_internal_rev) ||
57 ASIC_REV_IS_FIJI_P(asic_id.hw_internal_rev)) {
58 dc_version = DCE_VERSION_10_0;
61 if (ASIC_REV_IS_POLARIS10_P(asic_id.hw_internal_rev) ||
62 ASIC_REV_IS_POLARIS11_M(asic_id.hw_internal_rev) ||
63 ASIC_REV_IS_POLARIS12_V(asic_id.hw_internal_rev)) {
64 dc_version = DCE_VERSION_11_2;
68 dc_version = DCE_VERSION_UNKNOWN;
74 struct resource_pool *dc_create_resource_pool(
76 int num_virtual_links,
77 enum dce_version dc_version,
78 struct hw_asic_id asic_id)
83 return dce80_create_resource_pool(
84 num_virtual_links, dc);
85 case DCE_VERSION_10_0:
86 return dce100_create_resource_pool(
87 num_virtual_links, dc);
88 case DCE_VERSION_11_0:
89 return dce110_create_resource_pool(
90 num_virtual_links, dc, asic_id);
91 case DCE_VERSION_11_2:
92 return dce112_create_resource_pool(
93 num_virtual_links, dc);
101 void dc_destroy_resource_pool(struct core_dc *dc)
105 dc->res_pool->funcs->destroy(&dc->res_pool);
112 static void update_num_audio(
113 const struct resource_straps *straps,
114 unsigned int *num_audio,
115 struct audio_support *aud_support)
117 if (straps->hdmi_disable == 0) {
118 aud_support->hdmi_audio_native = true;
119 aud_support->hdmi_audio_on_dongle = true;
120 aud_support->dp_audio = true;
122 if (straps->dc_pinstraps_audio & 0x2) {
123 aud_support->hdmi_audio_on_dongle = true;
124 aud_support->dp_audio = true;
126 aud_support->dp_audio = true;
130 switch (straps->audio_stream_number) {
131 case 0: /* multi streams supported */
133 case 1: /* multi streams not supported */
137 DC_ERR("DC: unexpected audio fuse!\n");
141 bool resource_construct(
142 unsigned int num_virtual_links,
144 struct resource_pool *pool,
145 const struct resource_create_funcs *create_funcs)
147 struct dc_context *ctx = dc->ctx;
148 const struct resource_caps *caps = pool->res_cap;
150 unsigned int num_audio = caps->num_audio;
151 struct resource_straps straps = {0};
153 if (create_funcs->read_dce_straps)
154 create_funcs->read_dce_straps(dc->ctx, &straps);
156 pool->audio_count = 0;
157 if (create_funcs->create_audio) {
158 /* find the total number of streams available via the
159 * AZALIA_F0_CODEC_PIN_CONTROL_RESPONSE_CONFIGURATION_DEFAULT
160 * registers (one for each pin) starting from pin 1
161 * up to the max number of audio pins.
162 * We stop on the first pin where
163 * PORT_CONNECTIVITY == 1 (as instructed by HW team).
165 update_num_audio(&straps, &num_audio, &pool->audio_support);
166 for (i = 0; i < pool->pipe_count && i < num_audio; i++) {
167 struct audio *aud = create_funcs->create_audio(ctx, i);
170 DC_ERR("DC: failed to create audio!\n");
174 if (!aud->funcs->endpoint_valid(aud)) {
175 aud->funcs->destroy(&aud);
179 pool->audios[i] = aud;
184 pool->stream_enc_count = 0;
185 if (create_funcs->create_stream_encoder) {
186 for (i = 0; i < caps->num_stream_encoder; i++) {
187 pool->stream_enc[i] = create_funcs->create_stream_encoder(i, ctx);
188 if (pool->stream_enc[i] == NULL)
189 DC_ERR("DC: failed to create stream_encoder!\n");
190 pool->stream_enc_count++;
194 for (i = 0; i < num_virtual_links; i++) {
195 pool->stream_enc[pool->stream_enc_count] =
196 virtual_stream_encoder_create(
198 if (pool->stream_enc[pool->stream_enc_count] == NULL) {
199 DC_ERR("DC: failed to create stream_encoder!\n");
202 pool->stream_enc_count++;
205 dc->hwseq = create_funcs->create_hwseq(ctx);
211 void resource_unreference_clock_source(
212 struct resource_context *res_ctx,
213 struct clock_source **clock_source)
216 for (i = 0; i < res_ctx->pool->clk_src_count; i++) {
217 if (res_ctx->pool->clock_sources[i] != *clock_source)
220 res_ctx->clock_source_ref_count[i]--;
222 if (res_ctx->clock_source_ref_count[i] == 0)
223 (*clock_source)->funcs->cs_power_down(*clock_source);
228 if (res_ctx->pool->dp_clock_source == *clock_source) {
229 res_ctx->dp_clock_source_ref_count--;
231 if (res_ctx->dp_clock_source_ref_count == 0)
232 (*clock_source)->funcs->cs_power_down(*clock_source);
234 *clock_source = NULL;
237 void resource_reference_clock_source(
238 struct resource_context *res_ctx,
239 struct clock_source *clock_source)
242 for (i = 0; i < res_ctx->pool->clk_src_count; i++) {
243 if (res_ctx->pool->clock_sources[i] != clock_source)
246 res_ctx->clock_source_ref_count[i]++;
250 if (res_ctx->pool->dp_clock_source == clock_source)
251 res_ctx->dp_clock_source_ref_count++;
254 bool resource_are_streams_timing_synchronizable(
255 const struct core_stream *stream1,
256 const struct core_stream *stream2)
258 if (stream1->public.timing.h_total != stream2->public.timing.h_total)
261 if (stream1->public.timing.v_total != stream2->public.timing.v_total)
264 if (stream1->public.timing.h_addressable
265 != stream2->public.timing.h_addressable)
268 if (stream1->public.timing.v_addressable
269 != stream2->public.timing.v_addressable)
272 if (stream1->public.timing.pix_clk_khz
273 != stream2->public.timing.pix_clk_khz)
276 if (stream1->phy_pix_clk != stream2->phy_pix_clk
277 && !dc_is_dp_signal(stream1->signal)
278 && !dc_is_dp_signal(stream2->signal))
284 static bool is_sharable_clk_src(
285 const struct pipe_ctx *pipe_with_clk_src,
286 const struct pipe_ctx *pipe)
288 if (pipe_with_clk_src->clock_source == NULL)
291 if (pipe_with_clk_src->stream->signal == SIGNAL_TYPE_VIRTUAL)
294 if (dc_is_dp_signal(pipe_with_clk_src->stream->signal))
297 if (dc_is_hdmi_signal(pipe_with_clk_src->stream->signal)
298 && dc_is_dvi_signal(pipe->stream->signal))
301 if (dc_is_hdmi_signal(pipe->stream->signal)
302 && dc_is_dvi_signal(pipe_with_clk_src->stream->signal))
305 if (!resource_are_streams_timing_synchronizable(
306 pipe_with_clk_src->stream, pipe->stream))
312 struct clock_source *resource_find_used_clk_src_for_sharing(
313 struct resource_context *res_ctx,
314 struct pipe_ctx *pipe_ctx)
318 for (i = 0; i < MAX_PIPES; i++) {
319 if (is_sharable_clk_src(&res_ctx->pipe_ctx[i], pipe_ctx))
320 return res_ctx->pipe_ctx[i].clock_source;
326 static enum pixel_format convert_pixel_format_to_dalsurface(
327 enum surface_pixel_format surface_pixel_format)
329 enum pixel_format dal_pixel_format = PIXEL_FORMAT_UNKNOWN;
331 switch (surface_pixel_format) {
332 case SURFACE_PIXEL_FORMAT_GRPH_PALETA_256_COLORS:
333 dal_pixel_format = PIXEL_FORMAT_INDEX8;
335 case SURFACE_PIXEL_FORMAT_GRPH_ARGB1555:
336 dal_pixel_format = PIXEL_FORMAT_RGB565;
338 case SURFACE_PIXEL_FORMAT_GRPH_RGB565:
339 dal_pixel_format = PIXEL_FORMAT_RGB565;
341 case SURFACE_PIXEL_FORMAT_GRPH_ARGB8888:
342 dal_pixel_format = PIXEL_FORMAT_ARGB8888;
344 case SURFACE_PIXEL_FORMAT_GRPH_ABGR8888:
345 dal_pixel_format = PIXEL_FORMAT_ARGB8888;
347 case SURFACE_PIXEL_FORMAT_GRPH_ARGB2101010:
348 dal_pixel_format = PIXEL_FORMAT_ARGB2101010;
350 case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010:
351 dal_pixel_format = PIXEL_FORMAT_ARGB2101010;
353 case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010_XR_BIAS:
354 dal_pixel_format = PIXEL_FORMAT_ARGB2101010_XRBIAS;
356 case SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616F:
357 case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616F:
358 dal_pixel_format = PIXEL_FORMAT_FP16;
360 case SURFACE_PIXEL_FORMAT_VIDEO_420_YCbCr:
361 case SURFACE_PIXEL_FORMAT_VIDEO_420_YCrCb:
362 dal_pixel_format = PIXEL_FORMAT_420BPP12;
364 case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCbCr:
365 case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCrCb:
366 dal_pixel_format = PIXEL_FORMAT_420BPP15;
368 case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616:
370 dal_pixel_format = PIXEL_FORMAT_UNKNOWN;
373 return dal_pixel_format;
376 static void rect_swap_helper(struct rect *rect)
381 rect->height = rect->width;
389 static void calculate_viewport(
390 const struct dc_surface *surface,
391 struct pipe_ctx *pipe_ctx)
393 struct rect stream_src = pipe_ctx->stream->public.src;
394 struct rect src = surface->src_rect;
395 struct rect dst = surface->dst_rect;
396 struct rect surface_clip = surface->clip_rect;
397 struct rect clip = {0};
400 if (surface->rotation == ROTATION_ANGLE_90 ||
401 surface->rotation == ROTATION_ANGLE_270) {
402 rect_swap_helper(&src);
403 rect_swap_helper(&dst);
404 rect_swap_helper(&surface_clip);
405 rect_swap_helper(&stream_src);
408 /* The actual clip is an intersection between stream
409 * source and surface clip
411 clip.x = stream_src.x > surface_clip.x ?
412 stream_src.x : surface_clip.x;
414 clip.width = stream_src.x + stream_src.width <
415 surface_clip.x + surface_clip.width ?
416 stream_src.x + stream_src.width - clip.x :
417 surface_clip.x + surface_clip.width - clip.x ;
419 clip.y = stream_src.y > surface_clip.y ?
420 stream_src.y : surface_clip.y;
422 clip.height = stream_src.y + stream_src.height <
423 surface_clip.y + surface_clip.height ?
424 stream_src.y + stream_src.height - clip.y :
425 surface_clip.y + surface_clip.height - clip.y ;
427 /* offset = src.ofs + (clip.ofs - dst.ofs) * scl_ratio
428 * num_pixels = clip.num_pix * scl_ratio
430 pipe_ctx->scl_data.viewport.x = src.x + (clip.x - dst.x) *
431 src.width / dst.width;
432 pipe_ctx->scl_data.viewport.width = clip.width *
433 src.width / dst.width;
435 pipe_ctx->scl_data.viewport.y = src.y + (clip.y - dst.y) *
436 src.height / dst.height;
437 pipe_ctx->scl_data.viewport.height = clip.height *
438 src.height / dst.height;
440 /* Minimum viewport such that 420/422 chroma vp is non 0 */
441 if (pipe_ctx->scl_data.viewport.width < 2)
442 pipe_ctx->scl_data.viewport.width = 2;
443 if (pipe_ctx->scl_data.viewport.height < 2)
444 pipe_ctx->scl_data.viewport.height = 2;
447 static void calculate_recout(
448 const struct dc_surface *surface,
449 struct pipe_ctx *pipe_ctx)
451 struct core_stream *stream = pipe_ctx->stream;
452 struct rect clip = surface->clip_rect;
454 pipe_ctx->scl_data.recout.x = stream->public.dst.x;
455 if (stream->public.src.x < clip.x)
456 pipe_ctx->scl_data.recout.x += (clip.x
457 - stream->public.src.x) * stream->public.dst.width
458 / stream->public.src.width;
460 pipe_ctx->scl_data.recout.width = clip.width *
461 stream->public.dst.width / stream->public.src.width;
462 if (pipe_ctx->scl_data.recout.width + pipe_ctx->scl_data.recout.x >
463 stream->public.dst.x + stream->public.dst.width)
464 pipe_ctx->scl_data.recout.width =
465 stream->public.dst.x + stream->public.dst.width
466 - pipe_ctx->scl_data.recout.x;
468 pipe_ctx->scl_data.recout.y = stream->public.dst.y;
469 if (stream->public.src.y < clip.y)
470 pipe_ctx->scl_data.recout.y += (clip.y
471 - stream->public.src.y) * stream->public.dst.height
472 / stream->public.src.height;
474 pipe_ctx->scl_data.recout.height = clip.height *
475 stream->public.dst.height / stream->public.src.height;
476 if (pipe_ctx->scl_data.recout.height + pipe_ctx->scl_data.recout.y >
477 stream->public.dst.y + stream->public.dst.height)
478 pipe_ctx->scl_data.recout.height =
479 stream->public.dst.y + stream->public.dst.height
480 - pipe_ctx->scl_data.recout.y;
483 static void calculate_scaling_ratios(
484 const struct dc_surface *surface,
485 struct pipe_ctx *pipe_ctx)
487 struct core_stream *stream = pipe_ctx->stream;
488 const uint32_t in_w = stream->public.src.width;
489 const uint32_t in_h = stream->public.src.height;
490 const uint32_t out_w = stream->public.dst.width;
491 const uint32_t out_h = stream->public.dst.height;
493 pipe_ctx->scl_data.ratios.horz = dal_fixed31_32_from_fraction(
494 surface->src_rect.width,
495 surface->dst_rect.width);
496 pipe_ctx->scl_data.ratios.vert = dal_fixed31_32_from_fraction(
497 surface->src_rect.height,
498 surface->dst_rect.height);
500 if (surface->stereo_format == PLANE_STEREO_FORMAT_SIDE_BY_SIDE)
501 pipe_ctx->scl_data.ratios.horz.value *= 2;
502 else if (surface->stereo_format == PLANE_STEREO_FORMAT_TOP_AND_BOTTOM)
503 pipe_ctx->scl_data.ratios.vert.value *= 2;
505 pipe_ctx->scl_data.ratios.vert.value = div64_s64(
506 pipe_ctx->scl_data.ratios.vert.value * in_h, out_h);
507 pipe_ctx->scl_data.ratios.horz.value = div64_s64(
508 pipe_ctx->scl_data.ratios.horz.value * in_w, out_w);
510 pipe_ctx->scl_data.ratios.horz_c = pipe_ctx->scl_data.ratios.horz;
511 pipe_ctx->scl_data.ratios.vert_c = pipe_ctx->scl_data.ratios.vert;
513 if (pipe_ctx->scl_data.format == PIXEL_FORMAT_420BPP12) {
514 pipe_ctx->scl_data.ratios.horz_c.value /= 2;
515 pipe_ctx->scl_data.ratios.vert_c.value /= 2;
519 bool resource_build_scaling_params(
520 const struct dc_surface *surface,
521 struct pipe_ctx *pipe_ctx)
524 struct dc_crtc_timing *timing = &pipe_ctx->stream->public.timing;
525 /* Important: scaling ratio calculation requires pixel format,
526 * lb depth calculation requires recout and taps require scaling ratios.
528 pipe_ctx->scl_data.format = convert_pixel_format_to_dalsurface(surface->format);
530 calculate_viewport(surface, pipe_ctx);
532 if (pipe_ctx->scl_data.viewport.height < 16 || pipe_ctx->scl_data.viewport.width < 16)
535 calculate_scaling_ratios(surface, pipe_ctx);
537 calculate_recout(surface, pipe_ctx);
540 * Setting line buffer pixel depth to 24bpp yields banding
541 * on certain displays, such as the Sharp 4k
543 pipe_ctx->scl_data.lb_params.depth = LB_PIXEL_DEPTH_30BPP;
545 pipe_ctx->scl_data.h_active = timing->h_addressable;
546 pipe_ctx->scl_data.v_active = timing->v_addressable;
548 /* Taps calculations */
549 res = pipe_ctx->xfm->funcs->transform_get_optimal_number_of_taps(
550 pipe_ctx->xfm, &pipe_ctx->scl_data, &surface->scaling_quality);
553 /* Try 24 bpp linebuffer */
554 pipe_ctx->scl_data.lb_params.depth = LB_PIXEL_DEPTH_24BPP;
556 res = pipe_ctx->xfm->funcs->transform_get_optimal_number_of_taps(
557 pipe_ctx->xfm, &pipe_ctx->scl_data, &surface->scaling_quality);
560 dm_logger_write(pipe_ctx->stream->ctx->logger, LOG_SCALER,
561 "%s: Viewport:\nheight:%d width:%d x:%d "
562 "y:%d\n dst_rect:\nheight:%d width:%d x:%d "
565 pipe_ctx->scl_data.viewport.height,
566 pipe_ctx->scl_data.viewport.width,
567 pipe_ctx->scl_data.viewport.x,
568 pipe_ctx->scl_data.viewport.y,
569 surface->dst_rect.height,
570 surface->dst_rect.width,
572 surface->dst_rect.y);
578 enum dc_status resource_build_scaling_params_for_context(
579 const struct core_dc *dc,
580 struct validate_context *context)
584 for (i = 0; i < MAX_PIPES; i++) {
585 if (context->res_ctx.pipe_ctx[i].surface != NULL &&
586 context->res_ctx.pipe_ctx[i].stream != NULL)
587 if (!resource_build_scaling_params(
588 &context->res_ctx.pipe_ctx[i].surface->public,
589 &context->res_ctx.pipe_ctx[i]))
590 return DC_FAIL_SCALING;
596 static void detach_surfaces_for_stream(
597 struct validate_context *context,
598 const struct dc_stream *dc_stream)
601 struct core_stream *stream = DC_STREAM_TO_CORE(dc_stream);
603 for (i = 0; i < context->res_ctx.pool->pipe_count; i++) {
604 struct pipe_ctx *cur_pipe = &context->res_ctx.pipe_ctx[i];
605 if (cur_pipe->stream == stream) {
606 cur_pipe->surface = NULL;
607 cur_pipe->top_pipe = NULL;
608 cur_pipe->bottom_pipe = NULL;
613 struct pipe_ctx *find_idle_secondary_pipe(struct resource_context *res_ctx)
616 struct pipe_ctx *secondary_pipe = NULL;
619 * search backwards for the second pipe to keep pipe
620 * assignment more consistent
623 for (i = res_ctx->pool->pipe_count - 1; i >= 0; i--) {
624 if (res_ctx->pipe_ctx[i].stream == NULL) {
625 secondary_pipe = &res_ctx->pipe_ctx[i];
626 secondary_pipe->pipe_idx = i;
632 return secondary_pipe;
635 struct pipe_ctx *resource_get_head_pipe_for_stream(
636 struct resource_context *res_ctx,
637 const struct core_stream *stream)
640 for (i = 0; i < res_ctx->pool->pipe_count; i++) {
641 if (res_ctx->pipe_ctx[i].stream == stream &&
642 !res_ctx->pipe_ctx[i].top_pipe) {
643 return &res_ctx->pipe_ctx[i];
651 * A free_pipe for a stream is defined here as a pipe
652 * that has no surface attached yet
654 static struct pipe_ctx *acquire_free_pipe_for_stream(
655 struct resource_context *res_ctx,
656 const struct dc_stream *dc_stream)
659 struct core_stream *stream = DC_STREAM_TO_CORE(dc_stream);
661 struct pipe_ctx *head_pipe = NULL;
663 /* Find head pipe, which has the back end set up*/
665 head_pipe = resource_get_head_pipe_for_stream(res_ctx, stream);
670 if (!head_pipe->surface)
673 /* Re-use pipe already acquired for this stream if available*/
674 for (i = res_ctx->pool->pipe_count - 1; i >= 0; i--) {
675 if (res_ctx->pipe_ctx[i].stream == stream &&
676 !res_ctx->pipe_ctx[i].surface) {
677 return &res_ctx->pipe_ctx[i];
682 * At this point we have no re-useable pipe for this stream and we need
683 * to acquire an idle one to satisfy the request
686 if(!res_ctx->pool->funcs->acquire_idle_pipe_for_layer)
689 return res_ctx->pool->funcs->acquire_idle_pipe_for_layer(res_ctx, stream);
693 static void release_free_pipes_for_stream(
694 struct resource_context *res_ctx,
695 const struct dc_stream *dc_stream)
698 struct core_stream *stream = DC_STREAM_TO_CORE(dc_stream);
700 for (i = res_ctx->pool->pipe_count - 1; i >= 0; i--) {
701 if (res_ctx->pipe_ctx[i].stream == stream &&
702 !res_ctx->pipe_ctx[i].surface) {
703 res_ctx->pipe_ctx[i].stream = NULL;
708 bool resource_attach_surfaces_to_context(
709 const struct dc_surface * const *surfaces,
711 const struct dc_stream *dc_stream,
712 struct validate_context *context)
715 struct pipe_ctx *tail_pipe;
716 struct dc_stream_status *stream_status = NULL;
719 if (surface_count > MAX_SURFACE_NUM) {
720 dm_error("Surface: can not attach %d surfaces! Maximum is: %d\n",
721 surface_count, MAX_SURFACE_NUM);
725 for (i = 0; i < context->stream_count; i++)
726 if (&context->streams[i]->public == dc_stream) {
727 stream_status = &context->stream_status[i];
730 if (stream_status == NULL) {
731 dm_error("Existing stream not found; failed to attach surfaces\n");
735 /* retain new surfaces */
736 for (i = 0; i < surface_count; i++)
737 dc_surface_retain(surfaces[i]);
739 detach_surfaces_for_stream(context, dc_stream);
741 /* release existing surfaces*/
742 for (i = 0; i < stream_status->surface_count; i++)
743 dc_surface_release(stream_status->surfaces[i]);
745 for (i = surface_count; i < stream_status->surface_count; i++)
746 stream_status->surfaces[i] = NULL;
748 stream_status->surface_count = 0;
750 if (surface_count == 0)
754 for (i = 0; i < surface_count; i++) {
755 struct core_surface *surface = DC_SURFACE_TO_CORE(surfaces[i]);
756 struct pipe_ctx *free_pipe = acquire_free_pipe_for_stream(
757 &context->res_ctx, dc_stream);
760 stream_status->surfaces[i] = NULL;
764 free_pipe->surface = surface;
767 free_pipe->top_pipe = tail_pipe;
768 tail_pipe->bottom_pipe = free_pipe;
771 tail_pipe = free_pipe;
774 release_free_pipes_for_stream(&context->res_ctx, dc_stream);
776 /* assign new surfaces*/
777 for (i = 0; i < surface_count; i++)
778 stream_status->surfaces[i] = surfaces[i];
780 stream_status->surface_count = surface_count;
786 static bool is_timing_changed(const struct core_stream *cur_stream,
787 const struct core_stream *new_stream)
789 if (cur_stream == NULL)
792 /* If sink pointer changed, it means this is a hotplug, we should do
795 if (cur_stream->sink != new_stream->sink)
798 /* If output color space is changed, need to reprogram info frames */
799 if (cur_stream->public.output_color_space !=
800 new_stream->public.output_color_space)
804 &cur_stream->public.timing,
805 &new_stream->public.timing,
806 sizeof(struct dc_crtc_timing)) != 0;
809 static bool are_stream_backends_same(
810 const struct core_stream *stream_a, const struct core_stream *stream_b)
812 if (stream_a == stream_b)
815 if (stream_a == NULL || stream_b == NULL)
818 if (is_timing_changed(stream_a, stream_b))
824 bool is_stream_unchanged(
825 const struct core_stream *old_stream, const struct core_stream *stream)
827 if (old_stream == stream)
830 if (!are_stream_backends_same(old_stream, stream))
836 bool resource_validate_attach_surfaces(
837 const struct dc_validation_set set[],
839 const struct validate_context *old_context,
840 struct validate_context *context)
844 for (i = 0; i < set_count; i++) {
845 for (j = 0; j < old_context->stream_count; j++)
846 if (is_stream_unchanged(
847 old_context->streams[j],
848 context->streams[i])) {
849 if (!resource_attach_surfaces_to_context(
850 old_context->stream_status[j].surfaces,
851 old_context->stream_status[j].surface_count,
852 &context->streams[i]->public,
855 context->stream_status[i] = old_context->stream_status[j];
857 if (set[i].surface_count != 0)
858 if (!resource_attach_surfaces_to_context(
860 set[i].surface_count,
861 &context->streams[i]->public,
870 /* Maximum TMDS single link pixel clock 165MHz */
871 #define TMDS_MAX_PIXEL_CLOCK_IN_KHZ 165000
873 static void set_stream_engine_in_use(
874 struct resource_context *res_ctx,
875 struct stream_encoder *stream_enc)
879 for (i = 0; i < res_ctx->pool->stream_enc_count; i++) {
880 if (res_ctx->pool->stream_enc[i] == stream_enc)
881 res_ctx->is_stream_enc_acquired[i] = true;
885 /* TODO: release audio object */
886 static void set_audio_in_use(
887 struct resource_context *res_ctx,
891 for (i = 0; i < res_ctx->pool->audio_count; i++) {
892 if (res_ctx->pool->audios[i] == audio) {
893 res_ctx->is_audio_acquired[i] = true;
898 static int acquire_first_free_pipe(
899 struct resource_context *res_ctx,
900 struct core_stream *stream)
904 for (i = 0; i < res_ctx->pool->pipe_count; i++) {
905 if (!res_ctx->pipe_ctx[i].stream) {
906 struct pipe_ctx *pipe_ctx = &res_ctx->pipe_ctx[i];
908 pipe_ctx->tg = res_ctx->pool->timing_generators[i];
909 pipe_ctx->mi = res_ctx->pool->mis[i];
910 pipe_ctx->ipp = res_ctx->pool->ipps[i];
911 pipe_ctx->xfm = res_ctx->pool->transforms[i];
912 pipe_ctx->opp = res_ctx->pool->opps[i];
913 pipe_ctx->dis_clk = res_ctx->pool->display_clock;
914 pipe_ctx->pipe_idx = i;
916 pipe_ctx->stream = stream;
923 static struct stream_encoder *find_first_free_match_stream_enc_for_link(
924 struct resource_context *res_ctx,
925 struct core_stream *stream)
929 struct core_link *link = stream->sink->link;
931 for (i = 0; i < res_ctx->pool->stream_enc_count; i++) {
932 if (!res_ctx->is_stream_enc_acquired[i] &&
933 res_ctx->pool->stream_enc[i]) {
934 /* Store first available for MST second display
935 * in daisy chain use case */
937 if (res_ctx->pool->stream_enc[i]->id ==
938 link->link_enc->preferred_engine)
939 return res_ctx->pool->stream_enc[i];
944 * below can happen in cases when stream encoder is acquired:
945 * 1) for second MST display in chain, so preferred engine already
947 * 2) for another link, which preferred engine already acquired by any
950 * If signal is of DP type and preferred engine not found, return last available
952 * TODO - This is just a patch up and a generic solution is
953 * required for non DP connectors.
956 if (j >= 0 && dc_is_dp_signal(stream->signal))
957 return res_ctx->pool->stream_enc[j];
962 static struct audio *find_first_free_audio(struct resource_context *res_ctx)
965 for (i = 0; i < res_ctx->pool->audio_count; i++) {
966 if (res_ctx->is_audio_acquired[i] == false) {
967 return res_ctx->pool->audios[i];
974 static void update_stream_signal(struct core_stream *stream)
976 const struct dc_sink *dc_sink = stream->public.sink;
978 if (dc_sink->sink_signal == SIGNAL_TYPE_NONE)
979 stream->signal = stream->sink->link->public.connector_signal;
980 else if (dc_sink->sink_signal == SIGNAL_TYPE_DVI_SINGLE_LINK ||
981 dc_sink->sink_signal == SIGNAL_TYPE_DVI_DUAL_LINK)
982 /* For asic supports dual link DVI, we should adjust signal type
983 * based on timing pixel clock. If pixel clock more than 165Mhz,
984 * signal is dual link, otherwise, single link.
986 if (stream->public.timing.pix_clk_khz > TMDS_MAX_PIXEL_CLOCK_IN_KHZ)
987 stream->signal = SIGNAL_TYPE_DVI_DUAL_LINK;
989 stream->signal = SIGNAL_TYPE_DVI_SINGLE_LINK;
991 stream->signal = dc_sink->sink_signal;
994 bool resource_is_stream_unchanged(
995 const struct validate_context *old_context, const struct core_stream *stream)
999 for (i = 0; i < old_context->stream_count; i++) {
1000 const struct core_stream *old_stream = old_context->streams[i];
1002 if (are_stream_backends_same(old_stream, stream))
1009 static void copy_pipe_ctx(
1010 const struct pipe_ctx *from_pipe_ctx, struct pipe_ctx *to_pipe_ctx)
1012 struct core_surface *surface = to_pipe_ctx->surface;
1013 struct core_stream *stream = to_pipe_ctx->stream;
1015 *to_pipe_ctx = *from_pipe_ctx;
1016 to_pipe_ctx->stream = stream;
1017 if (surface != NULL)
1018 to_pipe_ctx->surface = surface;
1021 static struct core_stream *find_pll_sharable_stream(
1022 const struct core_stream *stream_needs_pll,
1023 struct validate_context *context)
1027 for (i = 0; i < context->stream_count; i++) {
1028 struct core_stream *stream_has_pll = context->streams[i];
1030 /* We are looking for non dp, non virtual stream */
1031 if (resource_are_streams_timing_synchronizable(
1032 stream_needs_pll, stream_has_pll)
1033 && !dc_is_dp_signal(stream_has_pll->signal)
1034 && stream_has_pll->sink->link->public.connector_signal
1035 != SIGNAL_TYPE_VIRTUAL)
1036 return stream_has_pll;
1043 static int get_norm_pix_clk(const struct dc_crtc_timing *timing)
1045 uint32_t pix_clk = timing->pix_clk_khz;
1046 uint32_t normalized_pix_clk = pix_clk;
1048 if (timing->pixel_encoding == PIXEL_ENCODING_YCBCR420)
1050 if (timing->pixel_encoding != PIXEL_ENCODING_YCBCR422) {
1051 switch (timing->display_color_depth) {
1052 case COLOR_DEPTH_888:
1053 normalized_pix_clk = pix_clk;
1055 case COLOR_DEPTH_101010:
1056 normalized_pix_clk = (pix_clk * 30) / 24;
1058 case COLOR_DEPTH_121212:
1059 normalized_pix_clk = (pix_clk * 36) / 24;
1061 case COLOR_DEPTH_161616:
1062 normalized_pix_clk = (pix_clk * 48) / 24;
1069 return normalized_pix_clk;
1072 static void calculate_phy_pix_clks(
1073 const struct core_dc *dc,
1074 struct validate_context *context)
1078 for (i = 0; i < context->stream_count; i++) {
1079 struct core_stream *stream = context->streams[i];
1081 update_stream_signal(stream);
1083 /* update actual pixel clock on all streams */
1084 if (dc_is_hdmi_signal(stream->signal))
1085 stream->phy_pix_clk = get_norm_pix_clk(
1086 &stream->public.timing);
1088 stream->phy_pix_clk =
1089 stream->public.timing.pix_clk_khz;
1093 enum dc_status resource_map_pool_resources(
1094 const struct core_dc *dc,
1095 struct validate_context *context)
1099 calculate_phy_pix_clks(dc, context);
1101 for (i = 0; i < context->stream_count; i++) {
1102 struct core_stream *stream = context->streams[i];
1104 if (!resource_is_stream_unchanged(dc->current_context, stream))
1107 /* mark resources used for stream that is already active */
1108 for (j = 0; j < MAX_PIPES; j++) {
1109 struct pipe_ctx *pipe_ctx =
1110 &context->res_ctx.pipe_ctx[j];
1111 const struct pipe_ctx *old_pipe_ctx =
1112 &dc->current_context->res_ctx.pipe_ctx[j];
1114 if (!are_stream_backends_same(old_pipe_ctx->stream, stream))
1117 pipe_ctx->stream = stream;
1118 copy_pipe_ctx(old_pipe_ctx, pipe_ctx);
1120 /* Split pipe resource, do not acquire back end */
1121 if (!pipe_ctx->stream_enc)
1124 set_stream_engine_in_use(
1126 pipe_ctx->stream_enc);
1128 /* Switch to dp clock source only if there is
1129 * no non dp stream that shares the same timing
1130 * with the dp stream.
1132 if (dc_is_dp_signal(pipe_ctx->stream->signal) &&
1133 !find_pll_sharable_stream(stream, context))
1134 pipe_ctx->clock_source =
1135 context->res_ctx.pool->dp_clock_source;
1137 resource_reference_clock_source(
1139 pipe_ctx->clock_source);
1141 set_audio_in_use(&context->res_ctx,
1146 for (i = 0; i < context->stream_count; i++) {
1147 struct core_stream *stream = context->streams[i];
1148 struct pipe_ctx *pipe_ctx = NULL;
1151 if (resource_is_stream_unchanged(dc->current_context, stream))
1153 /* acquire new resources */
1154 pipe_idx = acquire_first_free_pipe(&context->res_ctx, stream);
1156 return DC_NO_CONTROLLER_RESOURCE;
1159 pipe_ctx = &context->res_ctx.pipe_ctx[pipe_idx];
1161 pipe_ctx->stream_enc =
1162 find_first_free_match_stream_enc_for_link(
1163 &context->res_ctx, stream);
1165 if (!pipe_ctx->stream_enc)
1166 return DC_NO_STREAM_ENG_RESOURCE;
1168 set_stream_engine_in_use(
1170 pipe_ctx->stream_enc);
1172 /* TODO: Add check if ASIC support and EDID audio */
1173 if (!stream->sink->converter_disable_audio &&
1174 dc_is_audio_capable_signal(pipe_ctx->stream->signal) &&
1175 stream->public.audio_info.mode_count) {
1176 pipe_ctx->audio = find_first_free_audio(
1180 * Audio assigned in order first come first get.
1181 * There are asics which has number of audio
1182 * resources less then number of pipes
1184 if (pipe_ctx->audio)
1190 context->stream_status[i].primary_otg_inst = pipe_ctx->tg->inst;
1196 /* first stream in the context is used to populate the rest */
1197 void validate_guaranteed_copy_streams(
1198 struct validate_context *context,
1203 for (i = 1; i < max_streams; i++) {
1204 context->streams[i] = context->streams[0];
1206 copy_pipe_ctx(&context->res_ctx.pipe_ctx[0],
1207 &context->res_ctx.pipe_ctx[i]);
1208 context->res_ctx.pipe_ctx[i].stream =
1209 context->res_ctx.pipe_ctx[0].stream;
1211 dc_stream_retain(&context->streams[i]->public);
1212 context->stream_count++;
1216 static void patch_gamut_packet_checksum(
1217 struct encoder_info_packet *gamut_packet)
1219 /* For gamut we recalc checksum */
1220 if (gamut_packet->valid) {
1221 uint8_t chk_sum = 0;
1225 /*start of the Gamut data. */
1226 ptr = &gamut_packet->sb[3];
1228 for (i = 0; i <= gamut_packet->sb[1]; i++)
1231 gamut_packet->sb[2] = (uint8_t) (0x100 - chk_sum);
1235 static void set_avi_info_frame(
1236 struct encoder_info_packet *info_packet,
1237 struct pipe_ctx *pipe_ctx)
1239 struct core_stream *stream = pipe_ctx->stream;
1240 enum dc_color_space color_space = COLOR_SPACE_UNKNOWN;
1241 struct info_frame info_frame = { {0} };
1242 uint32_t pixel_encoding = 0;
1243 enum scanning_type scan_type = SCANNING_TYPE_NODATA;
1244 enum dc_aspect_ratio aspect = ASPECT_RATIO_NO_DATA;
1246 uint8_t cn0_cn1 = 0;
1247 uint8_t *check_sum = NULL;
1248 uint8_t byte_index = 0;
1250 color_space = pipe_ctx->stream->public.output_color_space;
1252 /* Initialize header */
1253 info_frame.avi_info_packet.info_packet_hdmi.bits.header.
1254 info_frame_type = HDMI_INFOFRAME_TYPE_AVI;
1255 /* InfoFrameVersion_3 is defined by CEA861F (Section 6.4), but shall
1256 * not be used in HDMI 2.0 (Section 10.1) */
1257 info_frame.avi_info_packet.info_packet_hdmi.bits.header.version = 2;
1258 info_frame.avi_info_packet.info_packet_hdmi.bits.header.length =
1259 HDMI_AVI_INFOFRAME_SIZE;
1262 * IDO-defined (Y2,Y1,Y0 = 1,1,1) shall not be used by devices built
1263 * according to HDMI 2.0 spec (Section 10.1)
1266 switch (stream->public.timing.pixel_encoding) {
1267 case PIXEL_ENCODING_YCBCR422:
1271 case PIXEL_ENCODING_YCBCR444:
1274 case PIXEL_ENCODING_YCBCR420:
1278 case PIXEL_ENCODING_RGB:
1283 /* Y0_Y1_Y2 : The pixel encoding */
1284 /* H14b AVI InfoFrame has extension on Y-field from 2 bits to 3 bits */
1285 info_frame.avi_info_packet.info_packet_hdmi.bits.Y0_Y1_Y2 =
1288 /* A0 = 1 Active Format Information valid */
1289 info_frame.avi_info_packet.info_packet_hdmi.bits.A0 =
1290 ACTIVE_FORMAT_VALID;
1292 /* B0, B1 = 3; Bar info data is valid */
1293 info_frame.avi_info_packet.info_packet_hdmi.bits.B0_B1 =
1294 BAR_INFO_BOTH_VALID;
1296 info_frame.avi_info_packet.info_packet_hdmi.bits.SC0_SC1 =
1297 PICTURE_SCALING_UNIFORM;
1299 /* S0, S1 : Underscan / Overscan */
1300 /* TODO: un-hardcode scan type */
1301 scan_type = SCANNING_TYPE_UNDERSCAN;
1302 info_frame.avi_info_packet.info_packet_hdmi.bits.S0_S1 = scan_type;
1304 /* C0, C1 : Colorimetry */
1305 if (color_space == COLOR_SPACE_YCBCR709 ||
1306 color_space == COLOR_SPACE_YCBCR709_LIMITED)
1307 info_frame.avi_info_packet.info_packet_hdmi.bits.C0_C1 =
1309 else if (color_space == COLOR_SPACE_YCBCR601 ||
1310 color_space == COLOR_SPACE_YCBCR601_LIMITED)
1311 info_frame.avi_info_packet.info_packet_hdmi.bits.C0_C1 =
1314 if (stream->public.timing.pixel_encoding != PIXEL_ENCODING_RGB)
1315 BREAK_TO_DEBUGGER();
1316 info_frame.avi_info_packet.info_packet_hdmi.bits.C0_C1 =
1317 COLORIMETRY_NO_DATA;
1319 if (color_space == COLOR_SPACE_2020_RGB_FULLRANGE ||
1320 color_space == COLOR_SPACE_2020_RGB_LIMITEDRANGE ||
1321 color_space == COLOR_SPACE_2020_YCBCR) {
1322 info_frame.avi_info_packet.info_packet_hdmi.bits.EC0_EC2 =
1323 COLORIMETRYEX_BT2020RGBYCBCR;
1324 info_frame.avi_info_packet.info_packet_hdmi.bits.C0_C1 =
1325 COLORIMETRY_EXTENDED;
1326 } else if (color_space == COLOR_SPACE_ADOBERGB) {
1327 info_frame.avi_info_packet.info_packet_hdmi.bits.EC0_EC2 =
1328 COLORIMETRYEX_ADOBERGB;
1329 info_frame.avi_info_packet.info_packet_hdmi.bits.C0_C1 =
1330 COLORIMETRY_EXTENDED;
1333 /* TODO: un-hardcode aspect ratio */
1334 aspect = stream->public.timing.aspect_ratio;
1337 case ASPECT_RATIO_4_3:
1338 case ASPECT_RATIO_16_9:
1339 info_frame.avi_info_packet.info_packet_hdmi.bits.M0_M1 = aspect;
1342 case ASPECT_RATIO_NO_DATA:
1343 case ASPECT_RATIO_64_27:
1344 case ASPECT_RATIO_256_135:
1346 info_frame.avi_info_packet.info_packet_hdmi.bits.M0_M1 = 0;
1349 /* Active Format Aspect ratio - same as Picture Aspect Ratio. */
1350 info_frame.avi_info_packet.info_packet_hdmi.bits.R0_R3 =
1351 ACTIVE_FORMAT_ASPECT_RATIO_SAME_AS_PICTURE;
1353 /* TODO: un-hardcode cn0_cn1 and itc */
1358 info_frame.avi_info_packet.info_packet_hdmi.bits.ITC = 1;
1359 info_frame.avi_info_packet.info_packet_hdmi.bits.CN0_CN1 =
1363 /* TODO : We should handle YCC quantization */
1364 /* but we do not have matrix calculation */
1365 if (color_space == COLOR_SPACE_SRGB) {
1366 info_frame.avi_info_packet.info_packet_hdmi.bits.Q0_Q1 =
1367 RGB_QUANTIZATION_FULL_RANGE;
1368 info_frame.avi_info_packet.info_packet_hdmi.bits.YQ0_YQ1 =
1369 YYC_QUANTIZATION_FULL_RANGE;
1370 } else if (color_space == COLOR_SPACE_SRGB_LIMITED) {
1371 info_frame.avi_info_packet.info_packet_hdmi.bits.Q0_Q1 =
1372 RGB_QUANTIZATION_LIMITED_RANGE;
1373 info_frame.avi_info_packet.info_packet_hdmi.bits.YQ0_YQ1 =
1374 YYC_QUANTIZATION_LIMITED_RANGE;
1376 info_frame.avi_info_packet.info_packet_hdmi.bits.Q0_Q1 =
1377 RGB_QUANTIZATION_DEFAULT_RANGE;
1378 info_frame.avi_info_packet.info_packet_hdmi.bits.YQ0_YQ1 =
1379 YYC_QUANTIZATION_LIMITED_RANGE;
1382 info_frame.avi_info_packet.info_packet_hdmi.bits.VIC0_VIC7 =
1383 stream->public.timing.vic;
1386 * PR0 - PR3 start from 0 whereas pHwPathMode->mode.timing.flags.pixel
1387 * repetition start from 1 */
1388 info_frame.avi_info_packet.info_packet_hdmi.bits.PR0_PR3 = 0;
1391 * barTop: Line Number of End of Top Bar.
1392 * barBottom: Line Number of Start of Bottom Bar.
1393 * barLeft: Pixel Number of End of Left Bar.
1394 * barRight: Pixel Number of Start of Right Bar. */
1395 info_frame.avi_info_packet.info_packet_hdmi.bits.bar_top =
1396 stream->public.timing.v_border_top;
1397 info_frame.avi_info_packet.info_packet_hdmi.bits.bar_bottom =
1398 (stream->public.timing.v_border_top
1399 - stream->public.timing.v_border_bottom + 1);
1400 info_frame.avi_info_packet.info_packet_hdmi.bits.bar_left =
1401 stream->public.timing.h_border_left;
1402 info_frame.avi_info_packet.info_packet_hdmi.bits.bar_right =
1403 (stream->public.timing.h_total
1404 - stream->public.timing.h_border_right + 1);
1406 /* check_sum - Calculate AFMT_AVI_INFO0 ~ AFMT_AVI_INFO3 */
1409 avi_info_packet.info_packet_hdmi.packet_raw_data.sb[0];
1410 *check_sum = HDMI_INFOFRAME_TYPE_AVI + HDMI_AVI_INFOFRAME_SIZE + 2;
1412 for (byte_index = 1; byte_index <= HDMI_AVI_INFOFRAME_SIZE; byte_index++)
1413 *check_sum += info_frame.avi_info_packet.info_packet_hdmi.
1414 packet_raw_data.sb[byte_index];
1416 /* one byte complement */
1417 *check_sum = (uint8_t) (0x100 - *check_sum);
1419 /* Store in hw_path_mode */
1421 info_frame.avi_info_packet.info_packet_hdmi.packet_raw_data.hb0;
1423 info_frame.avi_info_packet.info_packet_hdmi.packet_raw_data.hb1;
1425 info_frame.avi_info_packet.info_packet_hdmi.packet_raw_data.hb2;
1427 for (byte_index = 0; byte_index < sizeof(info_frame.avi_info_packet.
1428 info_packet_hdmi.packet_raw_data.sb); byte_index++)
1429 info_packet->sb[byte_index] = info_frame.avi_info_packet.
1430 info_packet_hdmi.packet_raw_data.sb[byte_index];
1432 info_packet->valid = true;
1435 static void set_vendor_info_packet(
1436 struct encoder_info_packet *info_packet,
1437 struct core_stream *stream)
1439 uint32_t length = 0;
1440 bool hdmi_vic_mode = false;
1441 uint8_t checksum = 0;
1443 enum dc_timing_3d_format format;
1445 format = stream->public.timing.timing_3d_format;
1447 /* Can be different depending on packet content */
1450 if (stream->public.timing.hdmi_vic != 0
1451 && stream->public.timing.h_total >= 3840
1452 && stream->public.timing.v_total >= 2160)
1453 hdmi_vic_mode = true;
1455 /* According to HDMI 1.4a CTS, VSIF should be sent
1456 * for both 3D stereo and HDMI VIC modes.
1457 * For all other modes, there is no VSIF sent. */
1459 if (format == TIMING_3D_FORMAT_NONE && !hdmi_vic_mode)
1462 /* 24bit IEEE Registration identifier (0x000c03). LSB first. */
1463 info_packet->sb[1] = 0x03;
1464 info_packet->sb[2] = 0x0C;
1465 info_packet->sb[3] = 0x00;
1467 /*PB4: 5 lower bytes = 0 (reserved). 3 higher bits = HDMI_Video_Format.
1468 * The value for HDMI_Video_Format are:
1469 * 0x0 (0b000) - No additional HDMI video format is presented in this
1471 * 0x1 (0b001) - Extended resolution format present. 1 byte of HDMI_VIC
1473 * 0x2 (0b010) - 3D format indication present. 3D_Structure and
1474 * potentially 3D_Ext_Data follows
1475 * 0x3..0x7 (0b011..0b111) - reserved for future use */
1476 if (format != TIMING_3D_FORMAT_NONE)
1477 info_packet->sb[4] = (2 << 5);
1478 else if (hdmi_vic_mode)
1479 info_packet->sb[4] = (1 << 5);
1481 /* PB5: If PB4 claims 3D timing (HDMI_Video_Format = 0x2):
1482 * 4 lower bites = 0 (reserved). 4 higher bits = 3D_Structure.
1483 * The value for 3D_Structure are:
1484 * 0x0 - Frame Packing
1485 * 0x1 - Field Alternative
1486 * 0x2 - Line Alternative
1487 * 0x3 - Side-by-Side (full)
1489 * 0x5 - L + depth + graphics + graphics-depth
1490 * 0x6 - Top-and-Bottom
1491 * 0x7 - Reserved for future use
1492 * 0x8 - Side-by-Side (Half)
1493 * 0x9..0xE - Reserved for future use
1496 case TIMING_3D_FORMAT_HW_FRAME_PACKING:
1497 case TIMING_3D_FORMAT_SW_FRAME_PACKING:
1498 info_packet->sb[5] = (0x0 << 4);
1501 case TIMING_3D_FORMAT_SIDE_BY_SIDE:
1502 case TIMING_3D_FORMAT_SBS_SW_PACKED:
1503 info_packet->sb[5] = (0x8 << 4);
1507 case TIMING_3D_FORMAT_TOP_AND_BOTTOM:
1508 case TIMING_3D_FORMAT_TB_SW_PACKED:
1509 info_packet->sb[5] = (0x6 << 4);
1516 /*PB5: If PB4 is set to 0x1 (extended resolution format)
1517 * fill PB5 with the correct HDMI VIC code */
1519 info_packet->sb[5] = stream->public.timing.hdmi_vic;
1522 info_packet->hb0 = HDMI_INFOFRAME_TYPE_VENDOR; /* VSIF packet type. */
1523 info_packet->hb1 = 0x01; /* Version */
1525 /* 4 lower bits = Length, 4 higher bits = 0 (reserved) */
1526 info_packet->hb2 = (uint8_t) (length);
1528 /* Calculate checksum */
1530 checksum += info_packet->hb0;
1531 checksum += info_packet->hb1;
1532 checksum += info_packet->hb2;
1534 for (i = 1; i <= length; i++)
1535 checksum += info_packet->sb[i];
1537 info_packet->sb[0] = (uint8_t) (0x100 - checksum);
1539 info_packet->valid = true;
1542 static void set_spd_info_packet(
1543 struct encoder_info_packet *info_packet,
1544 struct core_stream *stream)
1546 /* SPD info packet for FreeSync */
1548 unsigned char checksum = 0;
1549 unsigned int idx, payload_size = 0;
1551 /* Check if Freesync is supported. Return if false. If true,
1552 * set the corresponding bit in the info packet
1554 if (stream->public.freesync_ctx.supported == false)
1557 if (dc_is_hdmi_signal(stream->signal)) {
1561 /* HB0 = Packet Type = 0x83 (Source Product
1562 * Descriptor InfoFrame)
1564 info_packet->hb0 = HDMI_INFOFRAME_TYPE_SPD;
1566 /* HB1 = Version = 0x01 */
1567 info_packet->hb1 = 0x01;
1569 /* HB2 = [Bits 7:5 = 0] [Bits 4:0 = Length = 0x08] */
1570 info_packet->hb2 = 0x08;
1572 payload_size = 0x08;
1574 } else if (dc_is_dp_signal(stream->signal)) {
1578 /* HB0 = Secondary-data Packet ID = 0 - Only non-zero
1579 * when used to associate audio related info packets
1581 info_packet->hb0 = 0x00;
1583 /* HB1 = Packet Type = 0x83 (Source Product
1584 * Descriptor InfoFrame)
1586 info_packet->hb1 = HDMI_INFOFRAME_TYPE_SPD;
1588 /* HB2 = [Bits 7:0 = Least significant eight bits -
1589 * For INFOFRAME, the value must be 1Bh]
1591 info_packet->hb2 = 0x1B;
1593 /* HB3 = [Bits 7:2 = INFOFRAME SDP Version Number = 0x1]
1594 * [Bits 1:0 = Most significant two bits = 0x00]
1596 info_packet->hb3 = 0x04;
1598 payload_size = 0x1B;
1601 /* PB1 = 0x1A (24bit AMD IEEE OUI (0x00001A) - Byte 0) */
1602 info_packet->sb[1] = 0x1A;
1604 /* PB2 = 0x00 (24bit AMD IEEE OUI (0x00001A) - Byte 1) */
1605 info_packet->sb[2] = 0x00;
1607 /* PB3 = 0x00 (24bit AMD IEEE OUI (0x00001A) - Byte 2) */
1608 info_packet->sb[3] = 0x00;
1610 /* PB4 = Reserved */
1611 info_packet->sb[4] = 0x00;
1613 /* PB5 = Reserved */
1614 info_packet->sb[5] = 0x00;
1616 /* PB6 = [Bits 7:3 = Reserved] */
1617 info_packet->sb[6] = 0x00;
1619 if (stream->public.freesync_ctx.supported == true)
1620 /* PB6 = [Bit 0 = FreeSync Supported] */
1621 info_packet->sb[6] |= 0x01;
1623 if (stream->public.freesync_ctx.enabled == true)
1624 /* PB6 = [Bit 1 = FreeSync Enabled] */
1625 info_packet->sb[6] |= 0x02;
1627 if (stream->public.freesync_ctx.active == true)
1628 /* PB6 = [Bit 2 = FreeSync Active] */
1629 info_packet->sb[6] |= 0x04;
1631 /* PB7 = FreeSync Minimum refresh rate (Hz) */
1632 info_packet->sb[7] = (unsigned char) (stream->public.freesync_ctx.
1633 min_refresh_in_micro_hz / 1000000);
1635 /* PB8 = FreeSync Maximum refresh rate (Hz)
1637 * Note: We do not use the maximum capable refresh rate
1638 * of the panel, because we should never go above the field
1639 * rate of the mode timing set.
1641 info_packet->sb[8] = (unsigned char) (stream->public.freesync_ctx.
1642 nominal_refresh_in_micro_hz / 1000000);
1644 /* PB9 - PB27 = Reserved */
1645 for (idx = 9; idx <= 27; idx++)
1646 info_packet->sb[idx] = 0x00;
1648 /* Calculate checksum */
1649 checksum += info_packet->hb0;
1650 checksum += info_packet->hb1;
1651 checksum += info_packet->hb2;
1652 checksum += info_packet->hb3;
1654 for (idx = 1; idx <= payload_size; idx++)
1655 checksum += info_packet->sb[idx];
1657 /* PB0 = Checksum (one byte complement) */
1658 info_packet->sb[0] = (unsigned char) (0x100 - checksum);
1660 info_packet->valid = true;
1663 static void set_hdr_static_info_packet(
1664 struct encoder_info_packet *info_packet,
1665 struct core_surface *surface,
1666 struct core_stream *stream)
1669 enum signal_type signal = stream->signal;
1670 struct dc_hdr_static_metadata hdr_metadata;
1676 hdr_metadata = surface->public.hdr_static_ctx;
1678 if (!hdr_metadata.is_hdr)
1681 if (dc_is_hdmi_signal(signal)) {
1682 info_packet->valid = true;
1684 info_packet->hb0 = 0x87;
1685 info_packet->hb1 = 0x01;
1686 info_packet->hb2 = 0x1A;
1688 } else if (dc_is_dp_signal(signal)) {
1689 info_packet->valid = true;
1691 info_packet->hb0 = 0x00;
1692 info_packet->hb1 = 0x87;
1693 info_packet->hb2 = 0x1D;
1694 info_packet->hb3 = (0x13 << 2);
1698 data = hdr_metadata.is_hdr;
1699 info_packet->sb[i++] = data ? 0x02 : 0x00;
1700 info_packet->sb[i++] = 0x00;
1702 data = hdr_metadata.chromaticity_green_x / 2;
1703 info_packet->sb[i++] = data & 0xFF;
1704 info_packet->sb[i++] = (data & 0xFF00) >> 8;
1706 data = hdr_metadata.chromaticity_green_y / 2;
1707 info_packet->sb[i++] = data & 0xFF;
1708 info_packet->sb[i++] = (data & 0xFF00) >> 8;
1710 data = hdr_metadata.chromaticity_blue_x / 2;
1711 info_packet->sb[i++] = data & 0xFF;
1712 info_packet->sb[i++] = (data & 0xFF00) >> 8;
1714 data = hdr_metadata.chromaticity_blue_y / 2;
1715 info_packet->sb[i++] = data & 0xFF;
1716 info_packet->sb[i++] = (data & 0xFF00) >> 8;
1718 data = hdr_metadata.chromaticity_red_x / 2;
1719 info_packet->sb[i++] = data & 0xFF;
1720 info_packet->sb[i++] = (data & 0xFF00) >> 8;
1722 data = hdr_metadata.chromaticity_red_y / 2;
1723 info_packet->sb[i++] = data & 0xFF;
1724 info_packet->sb[i++] = (data & 0xFF00) >> 8;
1726 data = hdr_metadata.chromaticity_white_point_x / 2;
1727 info_packet->sb[i++] = data & 0xFF;
1728 info_packet->sb[i++] = (data & 0xFF00) >> 8;
1730 data = hdr_metadata.chromaticity_white_point_y / 2;
1731 info_packet->sb[i++] = data & 0xFF;
1732 info_packet->sb[i++] = (data & 0xFF00) >> 8;
1734 data = hdr_metadata.max_luminance;
1735 info_packet->sb[i++] = data & 0xFF;
1736 info_packet->sb[i++] = (data & 0xFF00) >> 8;
1738 data = hdr_metadata.min_luminance;
1739 info_packet->sb[i++] = data & 0xFF;
1740 info_packet->sb[i++] = (data & 0xFF00) >> 8;
1742 data = hdr_metadata.maximum_content_light_level;
1743 info_packet->sb[i++] = data & 0xFF;
1744 info_packet->sb[i++] = (data & 0xFF00) >> 8;
1746 data = hdr_metadata.maximum_frame_average_light_level;
1747 info_packet->sb[i++] = data & 0xFF;
1748 info_packet->sb[i++] = (data & 0xFF00) >> 8;
1750 if (dc_is_hdmi_signal(signal)) {
1751 uint32_t checksum = 0;
1753 checksum += info_packet->hb0;
1754 checksum += info_packet->hb1;
1755 checksum += info_packet->hb2;
1757 for (i = 1; i <= info_packet->hb2; i++)
1758 checksum += info_packet->sb[i];
1760 info_packet->sb[0] = 0x100 - checksum;
1761 } else if (dc_is_dp_signal(signal)) {
1762 info_packet->sb[0] = 0x01;
1763 info_packet->sb[1] = 0x1A;
1767 static void set_vsc_info_packet(
1768 struct encoder_info_packet *info_packet,
1769 struct core_stream *stream)
1771 unsigned int vscPacketRevision = 0;
1774 if (stream->sink->link->public.psr_caps.psr_version != 0) {
1775 vscPacketRevision = 2;
1778 /* VSC packet not needed based on the features
1779 * supported by this DP display
1781 if (vscPacketRevision == 0)
1784 if (vscPacketRevision == 0x2) {
1785 /* Secondary-data Packet ID = 0*/
1786 info_packet->hb0 = 0x00;
1787 /* 07h - Packet Type Value indicating Video
1788 * Stream Configuration packet
1790 info_packet->hb1 = 0x07;
1791 /* 02h = VSC SDP supporting 3D stereo and PSR
1792 * (applies to eDP v1.3 or higher).
1794 info_packet->hb2 = 0x02;
1795 /* 08h = VSC packet supporting 3D stereo + PSR
1798 info_packet->hb3 = 0x08;
1800 for (i = 0; i < 28; i++)
1801 info_packet->sb[i] = 0;
1803 info_packet->valid = true;
1806 /*TODO: stereo 3D support and extend pixel encoding colorimetry*/
1809 void resource_validate_ctx_destruct(struct validate_context *context)
1813 for (i = 0; i < context->stream_count; i++) {
1814 for (j = 0; j < context->stream_status[i].surface_count; j++)
1816 context->stream_status[i].surfaces[j]);
1818 context->stream_status[i].surface_count = 0;
1819 dc_stream_release(&context->streams[i]->public);
1820 context->streams[i] = NULL;
1825 * Copy src_ctx into dst_ctx and retain all surfaces and streams referenced
1828 void resource_validate_ctx_copy_construct(
1829 const struct validate_context *src_ctx,
1830 struct validate_context *dst_ctx)
1834 *dst_ctx = *src_ctx;
1836 for (i = 0; i < dst_ctx->res_ctx.pool->pipe_count; i++) {
1837 struct pipe_ctx *cur_pipe = &dst_ctx->res_ctx.pipe_ctx[i];
1839 if (cur_pipe->top_pipe)
1840 cur_pipe->top_pipe = &dst_ctx->res_ctx.pipe_ctx[cur_pipe->top_pipe->pipe_idx];
1842 if (cur_pipe->bottom_pipe)
1843 cur_pipe->bottom_pipe = &dst_ctx->res_ctx.pipe_ctx[cur_pipe->bottom_pipe->pipe_idx];
1847 for (i = 0; i < dst_ctx->stream_count; i++) {
1848 dc_stream_retain(&dst_ctx->streams[i]->public);
1849 for (j = 0; j < dst_ctx->stream_status[i].surface_count; j++)
1851 dst_ctx->stream_status[i].surfaces[j]);
1855 struct clock_source *dc_resource_find_first_free_pll(
1856 struct resource_context *res_ctx)
1860 for (i = 0; i < res_ctx->pool->clk_src_count; ++i) {
1861 if (res_ctx->clock_source_ref_count[i] == 0)
1862 return res_ctx->pool->clock_sources[i];
1868 void resource_build_info_frame(struct pipe_ctx *pipe_ctx)
1870 enum signal_type signal = SIGNAL_TYPE_NONE;
1871 struct encoder_info_frame *info = &pipe_ctx->encoder_info_frame;
1873 /* default all packets to invalid */
1874 info->avi.valid = false;
1875 info->gamut.valid = false;
1876 info->vendor.valid = false;
1877 info->hdrsmd.valid = false;
1878 info->vsc.valid = false;
1880 signal = pipe_ctx->stream->signal;
1882 /* HDMi and DP have different info packets*/
1883 if (dc_is_hdmi_signal(signal)) {
1884 set_avi_info_frame(&info->avi, pipe_ctx);
1886 set_vendor_info_packet(&info->vendor, pipe_ctx->stream);
1888 set_spd_info_packet(&info->spd, pipe_ctx->stream);
1890 set_hdr_static_info_packet(&info->hdrsmd,
1891 pipe_ctx->surface, pipe_ctx->stream);
1893 } else if (dc_is_dp_signal(signal)) {
1894 set_vsc_info_packet(&info->vsc, pipe_ctx->stream);
1896 set_spd_info_packet(&info->spd, pipe_ctx->stream);
1898 set_hdr_static_info_packet(&info->hdrsmd,
1899 pipe_ctx->surface, pipe_ctx->stream);
1902 patch_gamut_packet_checksum(&info->gamut);
1905 enum dc_status resource_map_clock_resources(
1906 const struct core_dc *dc,
1907 struct validate_context *context)
1911 /* acquire new resources */
1912 for (i = 0; i < context->stream_count; i++) {
1913 const struct core_stream *stream = context->streams[i];
1915 if (resource_is_stream_unchanged(dc->current_context, stream))
1918 for (j = 0; j < MAX_PIPES; j++) {
1919 struct pipe_ctx *pipe_ctx =
1920 &context->res_ctx.pipe_ctx[j];
1922 if (context->res_ctx.pipe_ctx[j].stream != stream)
1925 if (dc_is_dp_signal(pipe_ctx->stream->signal)
1926 || pipe_ctx->stream->signal == SIGNAL_TYPE_VIRTUAL)
1927 pipe_ctx->clock_source =
1928 context->res_ctx.pool->dp_clock_source;
1930 pipe_ctx->clock_source = NULL;
1932 if (!dc->public.config.disable_disp_pll_sharing)
1933 resource_find_used_clk_src_for_sharing(
1937 if (pipe_ctx->clock_source == NULL)
1938 pipe_ctx->clock_source =
1939 dc_resource_find_first_free_pll(&context->res_ctx);
1942 if (pipe_ctx->clock_source == NULL)
1943 return DC_NO_CLOCK_SOURCE_RESOURCE;
1945 resource_reference_clock_source(
1947 pipe_ctx->clock_source);
1949 /* only one cs per stream regardless of mpo */
1958 * Note: We need to disable output if clock sources change,
1959 * since bios does optimization and doesn't apply if changing
1960 * PHY when not already disabled.
1962 bool pipe_need_reprogram(
1963 struct pipe_ctx *pipe_ctx_old,
1964 struct pipe_ctx *pipe_ctx)
1966 if (pipe_ctx_old->stream->sink != pipe_ctx->stream->sink)
1969 if (pipe_ctx_old->stream->signal != pipe_ctx->stream->signal)
1972 if (pipe_ctx_old->audio != pipe_ctx->audio)
1975 if (pipe_ctx_old->clock_source != pipe_ctx->clock_source
1976 && pipe_ctx_old->stream != pipe_ctx->stream)
1979 if (pipe_ctx_old->stream_enc != pipe_ctx->stream_enc)
1982 if (is_timing_changed(pipe_ctx_old->stream, pipe_ctx->stream))