if (intel_dp->is_mst)
intel_dp_mst_prepare_probe(intel_dp);
- drm_dp_mst_topology_mgr_set_mst(&intel_dp->mst_mgr, intel_dp->is_mst);
+ drm_dp_mst_topology_mgr_set_mst(&intel_dp->mst.mgr, intel_dp->is_mst);
/* Avoid stale info on the next detect cycle. */
intel_dp->mst_detect = DRM_DP_SST;
drm_dbg_kms(display->drm,
"MST device may have disappeared %d vs %d\n",
- intel_dp->is_mst, intel_dp->mst_mgr.mst_state);
+ intel_dp->is_mst, intel_dp->mst.mgr.mst_state);
intel_dp->is_mst = false;
- drm_dp_mst_topology_mgr_set_mst(&intel_dp->mst_mgr, intel_dp->is_mst);
+ drm_dp_mst_topology_mgr_set_mst(&intel_dp->mst.mgr, intel_dp->is_mst);
}
static bool
{
bool handled = false;
- drm_dp_mst_hpd_irq_handle_event(&intel_dp->mst_mgr, esi, ack, &handled);
+ drm_dp_mst_hpd_irq_handle_event(&intel_dp->mst.mgr, esi, ack, &handled);
if (esi[1] & DP_CP_IRQ) {
intel_hdcp_handle_cp_irq(intel_dp->attached_connector);
bool link_ok = true;
bool reprobe_needed = false;
- drm_WARN_ON_ONCE(display->drm, intel_dp->active_mst_links < 0);
+ drm_WARN_ON_ONCE(display->drm, intel_dp->mst.active_links < 0);
for (;;) {
u8 esi[4] = {};
drm_dbg_kms(display->drm, "DPRX ESI: %4ph\n", esi);
- if (intel_dp->active_mst_links > 0 && link_ok &&
+ if (intel_dp->mst.active_links > 0 && link_ok &&
esi[3] & LINK_STATUS_CHANGED) {
if (!intel_dp_mst_link_status(intel_dp))
link_ok = false;
drm_dbg_kms(display->drm, "Failed to ack ESI\n");
if (ack[1] & (DP_DOWN_REP_MSG_RDY | DP_UP_REQ_MSG_RDY))
- drm_dp_mst_hpd_irq_send_new_request(&intel_dp->mst_mgr);
+ drm_dp_mst_hpd_irq_send_new_request(&intel_dp->mst.mgr);
}
if (!link_ok || intel_dp->link.force_retrain)
/* MST */
for_each_pipe(display, pipe) {
- encoder = &intel_dp->mst_encoders[pipe]->base;
+ encoder = &intel_dp->mst.stream_encoders[pipe]->base;
if (conn_state->best_encoder == &encoder->base)
return true;
}
return ret;
if (intel_dp_mst_source_support(intel_dp)) {
- ret = drm_dp_mst_root_conn_atomic_check(conn_state, &intel_dp->mst_mgr);
+ ret = drm_dp_mst_root_conn_atomic_check(conn_state, &intel_dp->mst.mgr);
if (ret)
return ret;
}
continue;
if (intel_dp->is_mst)
- drm_dp_mst_topology_mgr_suspend(&intel_dp->mst_mgr);
+ drm_dp_mst_topology_mgr_suspend(&intel_dp->mst.mgr);
}
}
if (!intel_dp_mst_source_support(intel_dp))
continue;
- ret = drm_dp_mst_topology_mgr_resume(&intel_dp->mst_mgr,
- true);
+ ret = drm_dp_mst_topology_mgr_resume(&intel_dp->mst.mgr, true);
if (ret) {
intel_dp->is_mst = false;
- drm_dp_mst_topology_mgr_set_mst(&intel_dp->mst_mgr,
- false);
+ drm_dp_mst_topology_mgr_set_mst(&intel_dp->mst.mgr, false);
}
}
}
fxp_q4_to_frac(bpp_step_x16)));
if (is_mst) {
- mst_state = drm_atomic_get_mst_topology_state(state, &intel_dp->mst_mgr);
+ mst_state = drm_atomic_get_mst_topology_state(state, &intel_dp->mst.mgr);
if (IS_ERR(mst_state))
return PTR_ERR(mst_state);
drm_WARN_ON(display->drm, remote_tu < crtc_state->dp_m_n.tu);
crtc_state->dp_m_n.tu = remote_tu;
- slots = drm_dp_atomic_find_time_slots(state, &intel_dp->mst_mgr,
+ slots = drm_dp_atomic_find_time_slots(state, &intel_dp->mst.mgr,
connector->port,
dfixed_trunc(pbn));
} else {
struct drm_connector_state *conn_state)
{
struct intel_display *display = to_intel_display(intel_dp);
- struct drm_dp_mst_topology_mgr *mgr = &intel_dp->mst_mgr;
+ struct drm_dp_mst_topology_mgr *mgr = &intel_dp->mst.mgr;
struct drm_dp_mst_topology_state *topology_state;
u8 link_coding_cap = intel_dp_is_uhbr(crtc_state) ?
DP_CAP_ANSI_128B132B : DP_CAP_ANSI_8B10B;
if (!conn_state->base.crtc)
continue;
- if (&connector->mst_port->mst_mgr != mst_mgr)
+ if (&connector->mst_port->mst.mgr != mst_mgr)
continue;
if (connector->port != parent_port &&
}
return drm_dp_atomic_release_time_slots(&state->base,
- &connector->mst_port->mst_mgr,
+ &connector->mst_port->mst.mgr,
connector->port);
}
enum transcoder trans = old_crtc_state->cpu_transcoder;
drm_dbg_kms(display->drm, "active links %d\n",
- intel_dp->active_mst_links);
+ intel_dp->mst.active_links);
- if (intel_dp->active_mst_links == 1)
+ if (intel_dp->mst.active_links == 1)
intel_dp->link_trained = false;
intel_hdcp_disable(intel_mst->connector);
struct intel_connector *connector =
to_intel_connector(old_conn_state->connector);
struct drm_dp_mst_topology_state *old_mst_state =
- drm_atomic_get_old_mst_topology_state(&state->base, &intel_dp->mst_mgr);
+ drm_atomic_get_old_mst_topology_state(&state->base, &intel_dp->mst.mgr);
struct drm_dp_mst_topology_state *new_mst_state =
- drm_atomic_get_new_mst_topology_state(&state->base, &intel_dp->mst_mgr);
+ drm_atomic_get_new_mst_topology_state(&state->base, &intel_dp->mst.mgr);
const struct drm_dp_mst_atomic_payload *old_payload =
drm_atomic_get_mst_payload_state(old_mst_state, connector->port);
struct drm_dp_mst_atomic_payload *new_payload =
bool last_mst_stream;
int i;
- intel_dp->active_mst_links--;
- last_mst_stream = intel_dp->active_mst_links == 0;
+ intel_dp->mst.active_links--;
+ last_mst_stream = intel_dp->mst.active_links == 0;
drm_WARN_ON(display->drm, DISPLAY_VER(display) >= 12 && last_mst_stream &&
!intel_dp_mst_is_master_trans(old_crtc_state));
intel_disable_transcoder(old_crtc_state);
- drm_dp_remove_payload_part1(&intel_dp->mst_mgr, new_mst_state, new_payload);
+ drm_dp_remove_payload_part1(&intel_dp->mst.mgr, new_mst_state, new_payload);
intel_ddi_clear_act_sent(encoder, old_crtc_state);
TRANS_DDI_DP_VC_PAYLOAD_ALLOC, 0);
intel_ddi_wait_for_act_sent(encoder, old_crtc_state);
- drm_dp_check_act_status(&intel_dp->mst_mgr);
+ drm_dp_check_act_status(&intel_dp->mst.mgr);
- drm_dp_remove_payload_part2(&intel_dp->mst_mgr, new_mst_state,
+ drm_dp_remove_payload_part2(&intel_dp->mst.mgr, new_mst_state,
old_payload, new_payload);
intel_ddi_disable_transcoder_func(old_crtc_state);
* Power down mst path before disabling the port, otherwise we end
* up getting interrupts from the sink upon detecting link loss.
*/
- drm_dp_send_power_updown_phy(&intel_dp->mst_mgr, connector->port,
+ drm_dp_send_power_updown_phy(&intel_dp->mst.mgr, connector->port,
false);
/*
old_crtc_state, NULL);
drm_dbg_kms(display->drm, "active links %d\n",
- intel_dp->active_mst_links);
+ intel_dp->mst.active_links);
}
static void mst_stream_post_pll_disable(struct intel_atomic_state *state,
struct intel_encoder *primary_encoder = to_primary_encoder(encoder);
struct intel_dp *intel_dp = to_primary_dp(encoder);
- if (intel_dp->active_mst_links == 0 &&
+ if (intel_dp->mst.active_links == 0 &&
primary_encoder->post_pll_disable)
primary_encoder->post_pll_disable(state, primary_encoder, old_crtc_state, old_conn_state);
}
struct intel_encoder *primary_encoder = to_primary_encoder(encoder);
struct intel_dp *intel_dp = to_primary_dp(encoder);
- if (intel_dp->active_mst_links == 0)
+ if (intel_dp->mst.active_links == 0)
primary_encoder->pre_pll_enable(state, primary_encoder,
pipe_config, NULL);
else
crtc_state->port_clock, crtc_state->lane_count))
return;
- drm_dp_mst_topology_queue_probe(&intel_dp->mst_mgr);
+ drm_dp_mst_topology_queue_probe(&intel_dp->mst.mgr);
intel_mst_set_probed_link_params(intel_dp,
crtc_state->port_clock, crtc_state->lane_count);
struct intel_connector *connector =
to_intel_connector(conn_state->connector);
struct drm_dp_mst_topology_state *mst_state =
- drm_atomic_get_new_mst_topology_state(&state->base, &intel_dp->mst_mgr);
+ drm_atomic_get_new_mst_topology_state(&state->base, &intel_dp->mst.mgr);
int ret;
bool first_mst_stream;
*/
connector->encoder = encoder;
intel_mst->connector = connector;
- first_mst_stream = intel_dp->active_mst_links == 0;
+ first_mst_stream = intel_dp->mst.active_links == 0;
drm_WARN_ON(display->drm, DISPLAY_VER(display) >= 12 && first_mst_stream &&
!intel_dp_mst_is_master_trans(pipe_config));
drm_dbg_kms(display->drm, "active links %d\n",
- intel_dp->active_mst_links);
+ intel_dp->mst.active_links);
if (first_mst_stream)
intel_dp_set_power(intel_dp, DP_SET_POWER_D0);
- drm_dp_send_power_updown_phy(&intel_dp->mst_mgr, connector->port, true);
+ drm_dp_send_power_updown_phy(&intel_dp->mst.mgr, connector->port, true);
intel_dp_sink_enable_decompression(state, connector, pipe_config);
intel_mst_reprobe_topology(intel_dp, pipe_config);
}
- intel_dp->active_mst_links++;
+ intel_dp->mst.active_links++;
- ret = drm_dp_add_payload_part1(&intel_dp->mst_mgr, mst_state,
+ ret = drm_dp_add_payload_part1(&intel_dp->mst.mgr, mst_state,
drm_atomic_get_mst_payload_state(mst_state, connector->port));
if (ret < 0)
intel_dp_queue_modeset_retry_for_link(state, primary_encoder, pipe_config);
struct intel_dp *intel_dp = to_primary_dp(encoder);
struct intel_connector *connector = to_intel_connector(conn_state->connector);
struct drm_dp_mst_topology_state *mst_state =
- drm_atomic_get_new_mst_topology_state(&state->base, &intel_dp->mst_mgr);
+ drm_atomic_get_new_mst_topology_state(&state->base, &intel_dp->mst.mgr);
enum transcoder trans = pipe_config->cpu_transcoder;
- bool first_mst_stream = intel_dp->active_mst_links == 1;
+ bool first_mst_stream = intel_dp->mst.active_links == 1;
struct intel_crtc *pipe_crtc;
int ret, i, min_hblank;
TRANS_DDI_DP_VC_PAYLOAD_ALLOC);
drm_dbg_kms(display->drm, "active links %d\n",
- intel_dp->active_mst_links);
+ intel_dp->mst.active_links);
intel_ddi_wait_for_act_sent(encoder, pipe_config);
- drm_dp_check_act_status(&intel_dp->mst_mgr);
+ drm_dp_check_act_status(&intel_dp->mst.mgr);
if (first_mst_stream)
intel_ddi_wait_for_fec_status(encoder, pipe_config, true);
- ret = drm_dp_add_payload_part2(&intel_dp->mst_mgr,
+ ret = drm_dp_add_payload_part2(&intel_dp->mst.mgr,
drm_atomic_get_mst_payload_state(mst_state,
connector->port));
if (ret < 0)
if (!intel_display_driver_check_access(display))
return drm_edid_connector_add_modes(&connector->base);
- drm_edid = drm_dp_mst_edid_read(&connector->base, &intel_dp->mst_mgr, connector->port);
+ drm_edid = drm_dp_mst_edid_read(&connector->base, &intel_dp->mst.mgr, connector->port);
ret = intel_connector_update_modes(&connector->base, drm_edid);
struct intel_connector *connector = to_intel_connector(_connector);
struct intel_display *display = to_intel_display(connector);
struct intel_dp *intel_dp = connector->mst_port;
- struct drm_dp_mst_topology_mgr *mgr = &intel_dp->mst_mgr;
+ struct drm_dp_mst_topology_mgr *mgr = &intel_dp->mst.mgr;
struct drm_dp_mst_port *port = connector->port;
const int min_bpp = 18;
int max_dotclk = display->cdclk.max_dotclk_freq;
struct intel_dp *intel_dp = connector->mst_port;
struct intel_crtc *crtc = to_intel_crtc(connector_state->crtc);
- return &intel_dp->mst_encoders[crtc->pipe]->base.base;
+ return &intel_dp->mst.stream_encoders[crtc->pipe]->base.base;
}
static int
intel_dp_flush_connector_commits(connector);
- return drm_dp_mst_detect_port(&connector->base, ctx, &intel_dp->mst_mgr,
+ return drm_dp_mst_detect_port(&connector->base, ctx, &intel_dp->mst.mgr,
connector->port);
}
struct drm_dp_mst_port *port,
const char *pathprop)
{
- struct intel_dp *intel_dp = container_of(mgr, struct intel_dp, mst_mgr);
+ struct intel_dp *intel_dp = container_of(mgr, struct intel_dp, mst.mgr);
struct intel_display *display = to_intel_display(intel_dp);
struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
struct intel_connector *connector;
for_each_pipe(display, pipe) {
struct drm_encoder *enc =
- &intel_dp->mst_encoders[pipe]->base.base;
+ &intel_dp->mst.stream_encoders[pipe]->base.base;
ret = drm_connector_attach_encoder(&connector->base, enc);
if (ret)
static void
mst_topology_poll_hpd_irq(struct drm_dp_mst_topology_mgr *mgr)
{
- struct intel_dp *intel_dp = container_of(mgr, struct intel_dp, mst_mgr);
+ struct intel_dp *intel_dp = container_of(mgr, struct intel_dp, mst.mgr);
intel_hpd_trigger_irq(dp_to_dig_port(intel_dp));
}
enum pipe pipe;
for_each_pipe(display, pipe)
- intel_dp->mst_encoders[pipe] = mst_stream_encoder_create(dig_port, pipe);
+ intel_dp->mst.stream_encoders[pipe] = mst_stream_encoder_create(dig_port, pipe);
return true;
}
int
intel_dp_mst_encoder_active_links(struct intel_digital_port *dig_port)
{
- return dig_port->dp.active_mst_links;
+ return dig_port->dp.mst.active_links;
}
int
if (DISPLAY_VER(display) < 11 && port == PORT_E)
return 0;
- intel_dp->mst_mgr.cbs = &mst_topology_cbs;
+ intel_dp->mst.mgr.cbs = &mst_topology_cbs;
/* create encoders */
mst_stream_encoders_create(dig_port);
- ret = drm_dp_mst_topology_mgr_init(&intel_dp->mst_mgr, display->drm,
+ ret = drm_dp_mst_topology_mgr_init(&intel_dp->mst.mgr, display->drm,
&intel_dp->aux, 16,
INTEL_NUM_PIPES(display), conn_base_id);
if (ret) {
- intel_dp->mst_mgr.cbs = NULL;
+ intel_dp->mst.mgr.cbs = NULL;
return ret;
}
bool intel_dp_mst_source_support(struct intel_dp *intel_dp)
{
- return intel_dp->mst_mgr.cbs;
+ return intel_dp->mst.mgr.cbs;
}
void
if (!intel_dp_mst_source_support(intel_dp))
return;
- drm_dp_mst_topology_mgr_destroy(&intel_dp->mst_mgr);
+ drm_dp_mst_topology_mgr_destroy(&intel_dp->mst.mgr);
/* encoders will get killed by normal cleanup */
- intel_dp->mst_mgr.cbs = NULL;
+ intel_dp->mst.mgr.cbs = NULL;
}
bool intel_dp_mst_is_master_trans(const struct intel_crtc_state *crtc_state)
return 0;
mst_state = drm_atomic_get_mst_topology_state(&state->base,
- &connector->mst_port->mst_mgr);
+ &connector->mst_port->mst.mgr);
if (IS_ERR(mst_state))
return PTR_ERR(mst_state);
if (!intel_dp->is_mst)
return true;
- ret = drm_dp_dpcd_readb(intel_dp->mst_mgr.aux, DP_MSTM_CTRL, &val);
+ ret = drm_dp_dpcd_readb(intel_dp->mst.mgr.aux, DP_MSTM_CTRL, &val);
/* Adjust the expected register value for SST + SideBand. */
if (ret < 0 || val != (DP_MST_EN | DP_UP_REQ_EN | DP_UPSTREAM_IS_SRC)) {