[PATCH v3 3/3] drm/i915/display: move min_hblank from dp_mst.c to dp.c

Imre Deak imre.deak at intel.com
Tue Apr 22 14:22:53 UTC 2025


On Tue, Apr 22, 2025 at 08:28:31AM +0300, Kandpal, Suraj wrote:
> [...]
>
> > +void intel_dp_compute_min_hblank(int link_bpp_x16,
> > +				 struct intel_crtc_state *crtc_state,
> > +				 struct drm_connector_state *conn_state,
> > +				 bool is_dsc)
> > +{
> >
> > [...]
> > 
> > +
> > +	/* Calculate min Hblank Link Layer Symbol Cycle Count for 8b/10b MST & 128b/132b */
> > +	hactive_sym_cycles = drm_dp_link_data_symbol_cycles(max_lane_count,
> > +							    adjusted_mode->hdisplay,
> > +							    link_bpp_x16,
> > +							    symbol_size,
> > +							    is_mst,
> > +							    dsc_slices);
> > +	htotal_sym_cycles = adjusted_mode->htotal *
> > +			    (hactive_sym_cycles / adjusted_mode->hdisplay);

Here the ( ) around the divisor should be dropped for the div-round-down
by adjusted_mode->hdisplay to work as expected.

> > +
> > +	min_hblank = htotal_sym_cycles - hactive_sym_cycles;
> > +	/* minimum Hblank calculation:
> > https://groups.vesa.org/wg/DP/document/20494 */
> > +	min_hblank = max(min_hblank, min_sym_cycles);
> 
> From the solution I see the way to calculate min hblank as 
> HACT_ML_SYM_CYC_CNT = CEIL(CEIL(HACT_WIDTH / 4) × PIX_BPP / SYMBOL_SIZE) × 4 / PHY_LANE_CNT 
> HBLNK_ML_SYM_CYC_CNT =  CEIL(CEIL(HBLNK_WIDTH / 4) × PIX_BPP / SYMBOL_SIZE) × 4 / PHY_LANE_CNT 
> HTOTAL_ML_SYM_CYC_CNT = HACT_ML_SYM_CYC_CNT+ HBLNK_ML_SYM_CYC_CNT
> EFF_PIX_BPP = HTOTAL_ML_SYM_CYC_CNT × SYMBOL_SIZE × PHY_LANE_CNT / HTOTAL_WIDTH
>  
> Which is similar to how we calculate hactive_sym_cycles so lets use drm_dp_link_data_symbol_cycles
> and pass htotal-hdisplay to get min hblank we wont need to calculate htotal sym cycles that way too

That would be calculating EFF_HBLNK_ML_SYM_CYC_CNT (or the LL version of
this), so a different value than MIN_HBLNK_LL_SYM_CYC_CNT. The latter is
based on a rounded-down htotal_sym_cycles value, see:

MIN_HBLNK_LL_SYM_CYC_CNT_128B132B_DPTX
= MAX((FLOOR(HTOTAL_WIDTH × EFF_PIX_BPP / (4 × SYMBOL_SIZE))
– HACT_LL_SYM_CYC_CNT), 3)

Using the

EFF_PIX_BPP
= HACT_ML_SYM_CYC_CNT × SYMBOL_SIZE × PHY_LANE_CNT / HACT_WIDTH

equation in the standard, this matches the above way in the patch
calculating htotal_sym_cycles, subtracting hactive_sym_cycles from it.

> > +	/*
> > +	 * adjust the BlankingStart/BlankingEnd framing control from
> > +	 * the calculated value
> > +	 */
> > +	min_hblank = min_hblank - 2;
> > +
> > +	min_hblank = min(10, min_hblank);
> 
> Is this 10 or 0x10 since previously 0x10 was coded in
> 
> Regards,
> Suraj Kandpal
> 
> > +	crtc_state->min_hblank = min_hblank;
> > +}
> > +
> >  int
> >  intel_dp_compute_config(struct intel_encoder *encoder,
> >  			struct intel_crtc_state *pipe_config, @@ -3202,6
> > +3263,9 @@ intel_dp_compute_config(struct intel_encoder *encoder,
> >  				       &pipe_config->dp_m_n);
> >  	}
> > 
> > +	intel_dp_compute_min_hblank(link_bpp_x16, pipe_config, conn_state,
> > +				    pipe_config->dsc.compression_enable);
> > +
> >  	/* FIXME: abstract this better */
> >  	if (pipe_config->splitter.enable)
> >  		pipe_config->dp_m_n.data_m *= pipe_config-
> > >splitter.link_count; diff --git a/drivers/gpu/drm/i915/display/intel_dp.h
> > b/drivers/gpu/drm/i915/display/intel_dp.h
> > index
> > 9189db4c25946a0f082223ce059c242e80cc32dc..43624aead998a8a330a244bb9
> > c85f026e203171b 100644
> > --- a/drivers/gpu/drm/i915/display/intel_dp.h
> > +++ b/drivers/gpu/drm/i915/display/intel_dp.h
> > @@ -208,5 +208,9 @@ bool intel_dp_has_connector(struct intel_dp *intel_dp,
> >  			    const struct drm_connector_state *conn_state);  int
> > intel_dp_dsc_max_src_input_bpc(struct intel_display *display);  int
> > intel_dp_dsc_min_src_input_bpc(void);
> > +void intel_dp_compute_min_hblank(int link_bpp_x16,
> > +				 struct intel_crtc_state *crtc_state,
> > +				 struct drm_connector_state *conn_state,
> > +				 bool is_dsc);
> > 
> >  #endif /* __INTEL_DP_H__ */
> > diff --git a/drivers/gpu/drm/i915/display/intel_dp_mst.c
> > b/drivers/gpu/drm/i915/display/intel_dp_mst.c
> > index
> > af98a0d0e8376a79ce1ab6ff3c4f6af30f4d3e73..4153afa13c618bb4db6dbcdc6e5
> > 9faddcbeade6b 100644
> > --- a/drivers/gpu/drm/i915/display/intel_dp_mst.c
> > +++ b/drivers/gpu/drm/i915/display/intel_dp_mst.c
> > @@ -211,26 +211,6 @@ int intel_dp_mst_dsc_get_slice_count(const struct
> > intel_connector *connector,
> >  					    num_joined_pipes);
> >  }
> > 
> > -static void intel_dp_mst_compute_min_hblank(struct intel_crtc_state
> > *crtc_state,
> > -					    int bpp_x16)
> > -{
> > -	struct intel_display *display = to_intel_display(crtc_state);
> > -	const struct drm_display_mode *adjusted_mode =
> > -					&crtc_state->hw.adjusted_mode;
> > -	int symbol_size = intel_dp_is_uhbr(crtc_state) ? 32 : 8;
> > -	int hblank;
> > -
> > -	if (DISPLAY_VER(display) < 20)
> > -		return;
> > -
> > -	/* Calculate min Hblank Link Layer Symbol Cycle Count for 8b/10b MST
> > & 128b/132b */
> > -	hblank = DIV_ROUND_UP((DIV_ROUND_UP
> > -			       (adjusted_mode->htotal - adjusted_mode-
> > >hdisplay, 4) * bpp_x16),
> > -			      symbol_size);
> > -
> > -	crtc_state->min_hblank = hblank;
> > -}
> > -
> >  int intel_dp_mtp_tu_compute_config(struct intel_dp *intel_dp,
> >  				   struct intel_crtc_state *crtc_state,
> >  				   struct drm_connector_state *conn_state,
> > @@ -301,12 +281,11 @@ int intel_dp_mtp_tu_compute_config(struct intel_dp
> > *intel_dp,
> >  		local_bw_overhead = intel_dp_mst_bw_overhead(crtc_state,
> >  							     false,
> > dsc_slice_count, link_bpp_x16);
> > 
> > -		intel_dp_mst_compute_min_hblank(crtc_state, link_bpp_x16);
> > -
> >  		intel_dp_mst_compute_m_n(crtc_state,
> >  					 local_bw_overhead,
> >  					 link_bpp_x16,
> >  					 &crtc_state->dp_m_n);
> > +		intel_dp_compute_min_hblank(link_bpp_x16, crtc_state,
> > conn_state,
> > +dsc);
> > 
> >  		if (is_mst) {
> >  			int remote_bw_overhead;
> > @@ -998,7 +977,6 @@ static void mst_stream_disable(struct
> > intel_atomic_state *state,
> >  	struct intel_dp *intel_dp = to_primary_dp(encoder);
> >  	struct intel_connector *connector =
> >  		to_intel_connector(old_conn_state->connector);
> > -	enum transcoder trans = old_crtc_state->cpu_transcoder;
> > 
> >  	drm_dbg_kms(display->drm, "active links %d\n",
> >  		    intel_dp->mst.active_links);
> > @@ -1009,9 +987,6 @@ static void mst_stream_disable(struct
> > intel_atomic_state *state,
> >  	intel_hdcp_disable(intel_mst->connector);
> > 
> >  	intel_dp_sink_disable_decompression(state, connector, old_crtc_state);
> > -
> > -	if (DISPLAY_VER(display) >= 20)
> > -		intel_de_write(display, DP_MIN_HBLANK_CTL(trans), 0);
> >  }
> > 
> >  static void mst_stream_post_disable(struct intel_atomic_state *state, @@ -
> > 1286,7 +1261,7 @@ static void mst_stream_enable(struct intel_atomic_state
> > *state,
> >  	enum transcoder trans = pipe_config->cpu_transcoder;
> >  	bool first_mst_stream = intel_dp->mst.active_links == 1;
> >  	struct intel_crtc *pipe_crtc;
> > -	int ret, i, min_hblank;
> > +	int ret, i;
> > 
> >  	drm_WARN_ON(display->drm, pipe_config->has_pch_encoder);
> > 
> > @@ -1301,29 +1276,6 @@ static void mst_stream_enable(struct
> > intel_atomic_state *state,
> >  			       TRANS_DP2_VFREQ_PIXEL_CLOCK(crtc_clock_hz &
> > 0xffffff));
> >  	}
> > 
> > -	if (DISPLAY_VER(display) >= 20) {
> > -		/*
> > -		 * adjust the BlankingStart/BlankingEnd framing control from
> > -		 * the calculated value
> > -		 */
> > -		min_hblank = pipe_config->min_hblank - 2;
> > -
> > -		/* Maximum value to be programmed is limited to 0x10 */
> > -		min_hblank = min(0x10, min_hblank);
> > -
> > -		/*
> > -		 * Minimum hblank accepted for 128b/132b would be 5 and for
> > -		 * 8b/10b would be 3 symbol count
> > -		 */
> > -		if (intel_dp_is_uhbr(pipe_config))
> > -			min_hblank = max(min_hblank, 5);
> > -		else
> > -			min_hblank = max(min_hblank, 3);
> > -
> > -		intel_de_write(display, DP_MIN_HBLANK_CTL(trans),
> > -			       min_hblank);
> > -	}
> > -
> >  	enable_bs_jitter_was(pipe_config);
> > 
> >  	intel_ddi_enable_transcoder_func(encoder, pipe_config);
> > 
> > --
> > 2.25.1
> 


More information about the Intel-xe mailing list