[PATCH 34/87] drm/amd/display: Fix some HUBP programming issues
sunpeng.li at amd.com
sunpeng.li at amd.com
Mon Jul 15 21:19:56 UTC 2019
From: Ilya Bakoulin <Ilya.Bakoulin at amd.com>
[Why]
A hubp pointer was being passed to DCN1 functions, which
expect the enclosing structure (for the purpose of container_of macros)
to be dcn10_hubp, but the actual type was dcn20_hubp.
[How]
Copy existing DCN1 functions and alter them slightly for use with
dcn20_hubp.
Signed-off-by: Ilya Bakoulin <Ilya.Bakoulin at amd.com>
Reviewed-by: Charlene Liu <Charlene.Liu at amd.com>
Acked-by: Leo Li <sunpeng.li at amd.com>
---
.../gpu/drm/amd/display/dc/dcn20/dcn20_hubp.c | 682 +++++++++++++++++-
.../gpu/drm/amd/display/dc/dcn20/dcn20_hubp.h | 37 +
2 files changed, 705 insertions(+), 14 deletions(-)
diff --git a/drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubp.c b/drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubp.c
index d3f7dd374d50..02e8c0c6a233 100644
--- a/drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubp.c
+++ b/drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubp.c
@@ -156,7 +156,85 @@ void hubp2_program_deadline(
{
struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
- hubp1_program_deadline(hubp, dlg_attr, ttu_attr);
+ /* DLG - Per hubp */
+ REG_SET_2(BLANK_OFFSET_0, 0,
+ REFCYC_H_BLANK_END, dlg_attr->refcyc_h_blank_end,
+ DLG_V_BLANK_END, dlg_attr->dlg_vblank_end);
+
+ REG_SET(BLANK_OFFSET_1, 0,
+ MIN_DST_Y_NEXT_START, dlg_attr->min_dst_y_next_start);
+
+ REG_SET(DST_DIMENSIONS, 0,
+ REFCYC_PER_HTOTAL, dlg_attr->refcyc_per_htotal);
+
+ REG_SET_2(DST_AFTER_SCALER, 0,
+ REFCYC_X_AFTER_SCALER, dlg_attr->refcyc_x_after_scaler,
+ DST_Y_AFTER_SCALER, dlg_attr->dst_y_after_scaler);
+
+ REG_SET(REF_FREQ_TO_PIX_FREQ, 0,
+ REF_FREQ_TO_PIX_FREQ, dlg_attr->ref_freq_to_pix_freq);
+
+ /* DLG - Per luma/chroma */
+ REG_SET(VBLANK_PARAMETERS_1, 0,
+ REFCYC_PER_PTE_GROUP_VBLANK_L, dlg_attr->refcyc_per_pte_group_vblank_l);
+
+ if (REG(NOM_PARAMETERS_0))
+ REG_SET(NOM_PARAMETERS_0, 0,
+ DST_Y_PER_PTE_ROW_NOM_L, dlg_attr->dst_y_per_pte_row_nom_l);
+
+ if (REG(NOM_PARAMETERS_1))
+ REG_SET(NOM_PARAMETERS_1, 0,
+ REFCYC_PER_PTE_GROUP_NOM_L, dlg_attr->refcyc_per_pte_group_nom_l);
+
+ REG_SET(NOM_PARAMETERS_4, 0,
+ DST_Y_PER_META_ROW_NOM_L, dlg_attr->dst_y_per_meta_row_nom_l);
+
+ REG_SET(NOM_PARAMETERS_5, 0,
+ REFCYC_PER_META_CHUNK_NOM_L, dlg_attr->refcyc_per_meta_chunk_nom_l);
+
+ REG_SET_2(PER_LINE_DELIVERY, 0,
+ REFCYC_PER_LINE_DELIVERY_L, dlg_attr->refcyc_per_line_delivery_l,
+ REFCYC_PER_LINE_DELIVERY_C, dlg_attr->refcyc_per_line_delivery_c);
+
+ REG_SET(VBLANK_PARAMETERS_2, 0,
+ REFCYC_PER_PTE_GROUP_VBLANK_C, dlg_attr->refcyc_per_pte_group_vblank_c);
+
+ if (REG(NOM_PARAMETERS_2))
+ REG_SET(NOM_PARAMETERS_2, 0,
+ DST_Y_PER_PTE_ROW_NOM_C, dlg_attr->dst_y_per_pte_row_nom_c);
+
+ if (REG(NOM_PARAMETERS_3))
+ REG_SET(NOM_PARAMETERS_3, 0,
+ REFCYC_PER_PTE_GROUP_NOM_C, dlg_attr->refcyc_per_pte_group_nom_c);
+
+ REG_SET(NOM_PARAMETERS_6, 0,
+ DST_Y_PER_META_ROW_NOM_C, dlg_attr->dst_y_per_meta_row_nom_c);
+
+ REG_SET(NOM_PARAMETERS_7, 0,
+ REFCYC_PER_META_CHUNK_NOM_C, dlg_attr->refcyc_per_meta_chunk_nom_c);
+
+ /* TTU - per hubp */
+ REG_SET_2(DCN_TTU_QOS_WM, 0,
+ QoS_LEVEL_LOW_WM, ttu_attr->qos_level_low_wm,
+ QoS_LEVEL_HIGH_WM, ttu_attr->qos_level_high_wm);
+
+ /* TTU - per luma/chroma */
+ /* Assumed surf0 is luma and 1 is chroma */
+
+ REG_SET_3(DCN_SURF0_TTU_CNTL0, 0,
+ REFCYC_PER_REQ_DELIVERY, ttu_attr->refcyc_per_req_delivery_l,
+ QoS_LEVEL_FIXED, ttu_attr->qos_level_fixed_l,
+ QoS_RAMP_DISABLE, ttu_attr->qos_ramp_disable_l);
+
+ REG_SET_3(DCN_SURF1_TTU_CNTL0, 0,
+ REFCYC_PER_REQ_DELIVERY, ttu_attr->refcyc_per_req_delivery_c,
+ QoS_LEVEL_FIXED, ttu_attr->qos_level_fixed_c,
+ QoS_RAMP_DISABLE, ttu_attr->qos_ramp_disable_c);
+
+ REG_SET_3(DCN_CUR0_TTU_CNTL0, 0,
+ REFCYC_PER_REQ_DELIVERY, ttu_attr->refcyc_per_req_delivery_cur0,
+ QoS_LEVEL_FIXED, ttu_attr->qos_level_fixed_cur0,
+ QoS_RAMP_DISABLE, ttu_attr->qos_ramp_disable_cur0);
REG_SET(FLIP_PARAMETERS_1, 0,
REFCYC_PER_PTE_GROUP_FLIP_L, dlg_attr->refcyc_per_pte_group_flip_l);
@@ -184,6 +262,39 @@ void hubp2_vready_at_or_After_vsync(struct hubp *hubp,
REG_UPDATE(DCHUBP_CNTL, HUBP_VREADY_AT_OR_AFTER_VSYNC, value);
}
+void hubp2_program_requestor(
+ struct hubp *hubp,
+ struct _vcs_dpi_display_rq_regs_st *rq_regs)
+{
+ struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
+
+ REG_UPDATE(HUBPRET_CONTROL,
+ DET_BUF_PLANE1_BASE_ADDRESS, rq_regs->plane1_base_address);
+ REG_SET_4(DCN_EXPANSION_MODE, 0,
+ DRQ_EXPANSION_MODE, rq_regs->drq_expansion_mode,
+ PRQ_EXPANSION_MODE, rq_regs->prq_expansion_mode,
+ MRQ_EXPANSION_MODE, rq_regs->mrq_expansion_mode,
+ CRQ_EXPANSION_MODE, rq_regs->crq_expansion_mode);
+ REG_SET_8(DCHUBP_REQ_SIZE_CONFIG, 0,
+ CHUNK_SIZE, rq_regs->rq_regs_l.chunk_size,
+ MIN_CHUNK_SIZE, rq_regs->rq_regs_l.min_chunk_size,
+ META_CHUNK_SIZE, rq_regs->rq_regs_l.meta_chunk_size,
+ MIN_META_CHUNK_SIZE, rq_regs->rq_regs_l.min_meta_chunk_size,
+ DPTE_GROUP_SIZE, rq_regs->rq_regs_l.dpte_group_size,
+ MPTE_GROUP_SIZE, rq_regs->rq_regs_l.mpte_group_size,
+ SWATH_HEIGHT, rq_regs->rq_regs_l.swath_height,
+ PTE_ROW_HEIGHT_LINEAR, rq_regs->rq_regs_l.pte_row_height_linear);
+ REG_SET_8(DCHUBP_REQ_SIZE_CONFIG_C, 0,
+ CHUNK_SIZE_C, rq_regs->rq_regs_c.chunk_size,
+ MIN_CHUNK_SIZE_C, rq_regs->rq_regs_c.min_chunk_size,
+ META_CHUNK_SIZE_C, rq_regs->rq_regs_c.meta_chunk_size,
+ MIN_META_CHUNK_SIZE_C, rq_regs->rq_regs_c.min_meta_chunk_size,
+ DPTE_GROUP_SIZE_C, rq_regs->rq_regs_c.dpte_group_size,
+ MPTE_GROUP_SIZE_C, rq_regs->rq_regs_c.mpte_group_size,
+ SWATH_HEIGHT_C, rq_regs->rq_regs_c.swath_height,
+ PTE_ROW_HEIGHT_LINEAR_C, rq_regs->rq_regs_c.pte_row_height_linear);
+}
+
static void hubp2_setup(
struct hubp *hubp,
struct _vcs_dpi_display_dlg_regs_st *dlg_attr,
@@ -196,7 +307,7 @@ static void hubp2_setup(
*/
hubp2_vready_at_or_After_vsync(hubp, pipe_dest);
- hubp1_program_requestor(hubp, rq_regs);
+ hubp2_program_requestor(hubp, rq_regs);
hubp2_program_deadline(hubp, dlg_attr, ttu_attr);
}
@@ -283,6 +394,196 @@ static void hubp2_program_tiling(
PIPE_ALIGNED, 0);
}
+void hubp2_program_size(
+ struct hubp *hubp,
+ enum surface_pixel_format format,
+ const union plane_size *plane_size,
+ struct dc_plane_dcc_param *dcc)
+{
+ struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
+ uint32_t pitch, meta_pitch, pitch_c, meta_pitch_c;
+
+ /* Program data and meta surface pitch (calculation from addrlib)
+ * 444 or 420 luma
+ */
+ if (format >= SURFACE_PIXEL_FORMAT_VIDEO_BEGIN && format < SURFACE_PIXEL_FORMAT_SUBSAMPLE_END) {
+ ASSERT(plane_size->video.chroma_pitch != 0);
+ /* Chroma pitch zero can cause system hang! */
+
+ pitch = plane_size->video.luma_pitch - 1;
+ meta_pitch = dcc->video.meta_pitch_l - 1;
+ pitch_c = plane_size->video.chroma_pitch - 1;
+ meta_pitch_c = dcc->video.meta_pitch_c - 1;
+ } else {
+ pitch = plane_size->grph.surface_pitch - 1;
+ meta_pitch = dcc->grph.meta_pitch - 1;
+ pitch_c = 0;
+ meta_pitch_c = 0;
+ }
+
+ if (!dcc->enable) {
+ meta_pitch = 0;
+ meta_pitch_c = 0;
+ }
+
+ REG_UPDATE_2(DCSURF_SURFACE_PITCH,
+ PITCH, pitch, META_PITCH, meta_pitch);
+
+ if (format >= SURFACE_PIXEL_FORMAT_VIDEO_BEGIN)
+ REG_UPDATE_2(DCSURF_SURFACE_PITCH_C,
+ PITCH_C, pitch_c, META_PITCH_C, meta_pitch_c);
+}
+
+void hubp2_program_rotation(
+ struct hubp *hubp,
+ enum dc_rotation_angle rotation,
+ bool horizontal_mirror)
+{
+ struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
+ uint32_t mirror;
+
+
+ if (horizontal_mirror)
+ mirror = 1;
+ else
+ mirror = 0;
+
+ /* Program rotation angle and horz mirror - no mirror */
+ if (rotation == ROTATION_ANGLE_0)
+ REG_UPDATE_2(DCSURF_SURFACE_CONFIG,
+ ROTATION_ANGLE, 0,
+ H_MIRROR_EN, mirror);
+ else if (rotation == ROTATION_ANGLE_90)
+ REG_UPDATE_2(DCSURF_SURFACE_CONFIG,
+ ROTATION_ANGLE, 1,
+ H_MIRROR_EN, mirror);
+ else if (rotation == ROTATION_ANGLE_180)
+ REG_UPDATE_2(DCSURF_SURFACE_CONFIG,
+ ROTATION_ANGLE, 2,
+ H_MIRROR_EN, mirror);
+ else if (rotation == ROTATION_ANGLE_270)
+ REG_UPDATE_2(DCSURF_SURFACE_CONFIG,
+ ROTATION_ANGLE, 3,
+ H_MIRROR_EN, mirror);
+}
+
+void hubp2_dcc_control(struct hubp *hubp, bool enable,
+ bool independent_64b_blks)
+{
+ uint32_t dcc_en = enable ? 1 : 0;
+ uint32_t dcc_ind_64b_blk = independent_64b_blks ? 1 : 0;
+ struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
+
+ REG_UPDATE_4(DCSURF_SURFACE_CONTROL,
+ PRIMARY_SURFACE_DCC_EN, dcc_en,
+ PRIMARY_SURFACE_DCC_IND_64B_BLK, dcc_ind_64b_blk,
+ SECONDARY_SURFACE_DCC_EN, dcc_en,
+ SECONDARY_SURFACE_DCC_IND_64B_BLK, dcc_ind_64b_blk);
+}
+
+void hubp2_program_pixel_format(
+ struct hubp *hubp,
+ enum surface_pixel_format format)
+{
+ struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
+ uint32_t red_bar = 3;
+ uint32_t blue_bar = 2;
+
+ /* swap for ABGR format */
+ if (format == SURFACE_PIXEL_FORMAT_GRPH_ABGR8888
+ || format == SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010
+ || format == SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010_XR_BIAS
+ || format == SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616F) {
+ red_bar = 2;
+ blue_bar = 3;
+ }
+
+ REG_UPDATE_2(HUBPRET_CONTROL,
+ CROSSBAR_SRC_CB_B, blue_bar,
+ CROSSBAR_SRC_CR_R, red_bar);
+
+ /* Mapping is same as ipp programming (cnvc) */
+
+ switch (format) {
+ case SURFACE_PIXEL_FORMAT_GRPH_ARGB1555:
+ REG_UPDATE(DCSURF_SURFACE_CONFIG,
+ SURFACE_PIXEL_FORMAT, 1);
+ break;
+ case SURFACE_PIXEL_FORMAT_GRPH_RGB565:
+ REG_UPDATE(DCSURF_SURFACE_CONFIG,
+ SURFACE_PIXEL_FORMAT, 3);
+ break;
+ case SURFACE_PIXEL_FORMAT_GRPH_ARGB8888:
+ case SURFACE_PIXEL_FORMAT_GRPH_ABGR8888:
+ REG_UPDATE(DCSURF_SURFACE_CONFIG,
+ SURFACE_PIXEL_FORMAT, 8);
+ break;
+ case SURFACE_PIXEL_FORMAT_GRPH_ARGB2101010:
+ case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010:
+ case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010_XR_BIAS:
+ REG_UPDATE(DCSURF_SURFACE_CONFIG,
+ SURFACE_PIXEL_FORMAT, 10);
+ break;
+ case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616:
+ REG_UPDATE(DCSURF_SURFACE_CONFIG,
+ SURFACE_PIXEL_FORMAT, 22);
+ break;
+ case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616F:
+ case SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616F:/*we use crossbar already*/
+ REG_UPDATE(DCSURF_SURFACE_CONFIG,
+ SURFACE_PIXEL_FORMAT, 24);
+ break;
+
+ case SURFACE_PIXEL_FORMAT_VIDEO_420_YCbCr:
+ REG_UPDATE(DCSURF_SURFACE_CONFIG,
+ SURFACE_PIXEL_FORMAT, 65);
+ break;
+ case SURFACE_PIXEL_FORMAT_VIDEO_420_YCrCb:
+ REG_UPDATE(DCSURF_SURFACE_CONFIG,
+ SURFACE_PIXEL_FORMAT, 64);
+ break;
+ case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCbCr:
+ REG_UPDATE(DCSURF_SURFACE_CONFIG,
+ SURFACE_PIXEL_FORMAT, 67);
+ break;
+ case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCrCb:
+ REG_UPDATE(DCSURF_SURFACE_CONFIG,
+ SURFACE_PIXEL_FORMAT, 66);
+ break;
+ case SURFACE_PIXEL_FORMAT_VIDEO_AYCrCb8888:
+ REG_UPDATE(DCSURF_SURFACE_CONFIG,
+ SURFACE_PIXEL_FORMAT, 12);
+ break;
+#if defined(CONFIG_DRM_AMD_DC_DCN2_0)
+ case SURFACE_PIXEL_FORMAT_GRPH_RGB111110_FIX:
+ REG_UPDATE(DCSURF_SURFACE_CONFIG,
+ SURFACE_PIXEL_FORMAT, 112);
+ break;
+ case SURFACE_PIXEL_FORMAT_GRPH_BGR101111_FIX:
+ REG_UPDATE(DCSURF_SURFACE_CONFIG,
+ SURFACE_PIXEL_FORMAT, 113);
+ break;
+ case SURFACE_PIXEL_FORMAT_VIDEO_ACrYCb2101010:
+ REG_UPDATE(DCSURF_SURFACE_CONFIG,
+ SURFACE_PIXEL_FORMAT, 114);
+ break;
+ case SURFACE_PIXEL_FORMAT_GRPH_RGB111110_FLOAT:
+ REG_UPDATE(DCSURF_SURFACE_CONFIG,
+ SURFACE_PIXEL_FORMAT, 118);
+ break;
+ case SURFACE_PIXEL_FORMAT_GRPH_BGR101111_FLOAT:
+ REG_UPDATE(DCSURF_SURFACE_CONFIG,
+ SURFACE_PIXEL_FORMAT, 119);
+ break;
+#endif
+ default:
+ BREAK_TO_DEBUGGER();
+ break;
+ }
+
+ /* don't see the need of program the xbar in DCN 1.0 */
+}
+
void hubp2_program_surface_config(
struct hubp *hubp,
enum surface_pixel_format format,
@@ -295,11 +596,11 @@ void hubp2_program_surface_config(
{
struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
- hubp1_dcc_control(hubp, dcc->enable, dcc->grph.independent_64b_blks);
+ hubp2_dcc_control(hubp, dcc->enable, dcc->grph.independent_64b_blks);
hubp2_program_tiling(hubp2, tiling_info, format);
- hubp1_program_size(hubp, format, plane_size, dcc);
- hubp1_program_rotation(hubp, rotation, horizontal_mirror);
- hubp1_program_pixel_format(hubp, format);
+ hubp2_program_size(hubp, format, plane_size, dcc);
+ hubp2_program_rotation(hubp, rotation, horizontal_mirror);
+ hubp2_program_pixel_format(hubp, format);
}
enum cursor_lines_per_chunk hubp2_get_lines_per_chunk(
@@ -652,28 +953,381 @@ void hubp2_set_flip_control_surface_gsl(struct hubp *hubp, bool enable)
REG_UPDATE(DCSURF_FLIP_CONTROL2, SURFACE_GSL_ENABLE, enable ? 1 : 0);
}
+bool hubp2_is_flip_pending(struct hubp *hubp)
+{
+ uint32_t flip_pending = 0;
+ struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
+ struct dc_plane_address earliest_inuse_address;
+
+ REG_GET(DCSURF_FLIP_CONTROL,
+ SURFACE_FLIP_PENDING, &flip_pending);
+
+ REG_GET(DCSURF_SURFACE_EARLIEST_INUSE,
+ SURFACE_EARLIEST_INUSE_ADDRESS, &earliest_inuse_address.grph.addr.low_part);
+
+ REG_GET(DCSURF_SURFACE_EARLIEST_INUSE_HIGH,
+ SURFACE_EARLIEST_INUSE_ADDRESS_HIGH, &earliest_inuse_address.grph.addr.high_part);
+
+ if (flip_pending)
+ return true;
+
+ if (earliest_inuse_address.grph.addr.quad_part != hubp->request_address.grph.addr.quad_part)
+ return true;
+
+ return false;
+}
+
+void hubp2_set_blank(struct hubp *hubp, bool blank)
+{
+ struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
+ uint32_t blank_en = blank ? 1 : 0;
+
+ REG_UPDATE_2(DCHUBP_CNTL,
+ HUBP_BLANK_EN, blank_en,
+ HUBP_TTU_DISABLE, blank_en);
+
+ if (blank) {
+ uint32_t reg_val = REG_READ(DCHUBP_CNTL);
+
+ if (reg_val) {
+ /* init sequence workaround: in case HUBP is
+ * power gated, this wait would timeout.
+ *
+ * we just wrote reg_val to non-0, if it stay 0
+ * it means HUBP is gated
+ */
+ REG_WAIT(DCHUBP_CNTL,
+ HUBP_NO_OUTSTANDING_REQ, 1,
+ 1, 200);
+ }
+
+ hubp->mpcc_id = 0xf;
+ hubp->opp_id = OPP_ID_INVALID;
+ }
+}
+
+void hubp2_cursor_set_position(
+ struct hubp *hubp,
+ const struct dc_cursor_position *pos,
+ const struct dc_cursor_mi_param *param)
+{
+ struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
+ int src_x_offset = pos->x - pos->x_hotspot - param->viewport.x;
+ int src_y_offset = pos->y - pos->y_hotspot - param->viewport.y;
+ int x_hotspot = pos->x_hotspot;
+ int y_hotspot = pos->y_hotspot;
+ uint32_t dst_x_offset;
+ uint32_t cur_en = pos->enable ? 1 : 0;
+
+ /*
+ * Guard aganst cursor_set_position() from being called with invalid
+ * attributes
+ *
+ * TODO: Look at combining cursor_set_position() and
+ * cursor_set_attributes() into cursor_update()
+ */
+ if (hubp->curs_attr.address.quad_part == 0)
+ return;
+
+ if (param->rotation == ROTATION_ANGLE_90 || param->rotation == ROTATION_ANGLE_270) {
+ src_x_offset = pos->y - pos->y_hotspot - param->viewport.x;
+ y_hotspot = pos->x_hotspot;
+ x_hotspot = pos->y_hotspot;
+ }
+
+ if (param->mirror) {
+ x_hotspot = param->viewport.width - x_hotspot;
+ src_x_offset = param->viewport.x + param->viewport.width - src_x_offset;
+ }
+
+ dst_x_offset = (src_x_offset >= 0) ? src_x_offset : 0;
+ dst_x_offset *= param->ref_clk_khz;
+ dst_x_offset /= param->pixel_clk_khz;
+
+ ASSERT(param->h_scale_ratio.value);
+
+ if (param->h_scale_ratio.value)
+ dst_x_offset = dc_fixpt_floor(dc_fixpt_div(
+ dc_fixpt_from_int(dst_x_offset),
+ param->h_scale_ratio));
+
+ if (src_x_offset >= (int)param->viewport.width)
+ cur_en = 0; /* not visible beyond right edge*/
+
+ if (src_x_offset + (int)hubp->curs_attr.width <= 0)
+ cur_en = 0; /* not visible beyond left edge*/
+
+ if (src_y_offset >= (int)param->viewport.height)
+ cur_en = 0; /* not visible beyond bottom edge*/
+
+ if (src_y_offset + (int)hubp->curs_attr.height <= 0)
+ cur_en = 0; /* not visible beyond top edge*/
+
+ if (cur_en && REG_READ(CURSOR_SURFACE_ADDRESS) == 0)
+ hubp->funcs->set_cursor_attributes(hubp, &hubp->curs_attr);
+
+ REG_UPDATE(CURSOR_CONTROL,
+ CURSOR_ENABLE, cur_en);
+
+ REG_SET_2(CURSOR_POSITION, 0,
+ CURSOR_X_POSITION, pos->x,
+ CURSOR_Y_POSITION, pos->y);
+
+ REG_SET_2(CURSOR_HOT_SPOT, 0,
+ CURSOR_HOT_SPOT_X, x_hotspot,
+ CURSOR_HOT_SPOT_Y, y_hotspot);
+
+ REG_SET(CURSOR_DST_OFFSET, 0,
+ CURSOR_DST_X_OFFSET, dst_x_offset);
+ /* TODO Handle surface pixel formats other than 4:4:4 */
+}
+
+void hubp2_clk_cntl(struct hubp *hubp, bool enable)
+{
+ struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
+ uint32_t clk_enable = enable ? 1 : 0;
+
+ REG_UPDATE(HUBP_CLK_CNTL, HUBP_CLOCK_ENABLE, clk_enable);
+}
+
+void hubp2_vtg_sel(struct hubp *hubp, uint32_t otg_inst)
+{
+ struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
+
+ REG_UPDATE(DCHUBP_CNTL, HUBP_VTG_SEL, otg_inst);
+}
+
+void hubp2_clear_underflow(struct hubp *hubp)
+{
+ struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
+
+ REG_UPDATE(DCHUBP_CNTL, HUBP_UNDERFLOW_CLEAR, 1);
+}
+
+void hubp2_read_state_common(struct hubp *hubp)
+{
+ struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
+ struct dcn_hubp_state *s = &hubp2->state;
+ struct _vcs_dpi_display_dlg_regs_st *dlg_attr = &s->dlg_attr;
+ struct _vcs_dpi_display_ttu_regs_st *ttu_attr = &s->ttu_attr;
+ struct _vcs_dpi_display_rq_regs_st *rq_regs = &s->rq_regs;
+
+ /* Requester */
+ REG_GET(HUBPRET_CONTROL,
+ DET_BUF_PLANE1_BASE_ADDRESS, &rq_regs->plane1_base_address);
+ REG_GET_4(DCN_EXPANSION_MODE,
+ DRQ_EXPANSION_MODE, &rq_regs->drq_expansion_mode,
+ PRQ_EXPANSION_MODE, &rq_regs->prq_expansion_mode,
+ MRQ_EXPANSION_MODE, &rq_regs->mrq_expansion_mode,
+ CRQ_EXPANSION_MODE, &rq_regs->crq_expansion_mode);
+
+ /* DLG - Per hubp */
+ REG_GET_2(BLANK_OFFSET_0,
+ REFCYC_H_BLANK_END, &dlg_attr->refcyc_h_blank_end,
+ DLG_V_BLANK_END, &dlg_attr->dlg_vblank_end);
+
+ REG_GET(BLANK_OFFSET_1,
+ MIN_DST_Y_NEXT_START, &dlg_attr->min_dst_y_next_start);
+
+ REG_GET(DST_DIMENSIONS,
+ REFCYC_PER_HTOTAL, &dlg_attr->refcyc_per_htotal);
+
+ REG_GET_2(DST_AFTER_SCALER,
+ REFCYC_X_AFTER_SCALER, &dlg_attr->refcyc_x_after_scaler,
+ DST_Y_AFTER_SCALER, &dlg_attr->dst_y_after_scaler);
+
+ if (REG(PREFETCH_SETTINS))
+ REG_GET_2(PREFETCH_SETTINS,
+ DST_Y_PREFETCH, &dlg_attr->dst_y_prefetch,
+ VRATIO_PREFETCH, &dlg_attr->vratio_prefetch);
+ else
+ REG_GET_2(PREFETCH_SETTINGS,
+ DST_Y_PREFETCH, &dlg_attr->dst_y_prefetch,
+ VRATIO_PREFETCH, &dlg_attr->vratio_prefetch);
+
+ REG_GET_2(VBLANK_PARAMETERS_0,
+ DST_Y_PER_VM_VBLANK, &dlg_attr->dst_y_per_vm_vblank,
+ DST_Y_PER_ROW_VBLANK, &dlg_attr->dst_y_per_row_vblank);
+
+ REG_GET(REF_FREQ_TO_PIX_FREQ,
+ REF_FREQ_TO_PIX_FREQ, &dlg_attr->ref_freq_to_pix_freq);
+
+ /* DLG - Per luma/chroma */
+ REG_GET(VBLANK_PARAMETERS_1,
+ REFCYC_PER_PTE_GROUP_VBLANK_L, &dlg_attr->refcyc_per_pte_group_vblank_l);
+
+ REG_GET(VBLANK_PARAMETERS_3,
+ REFCYC_PER_META_CHUNK_VBLANK_L, &dlg_attr->refcyc_per_meta_chunk_vblank_l);
+
+ if (REG(NOM_PARAMETERS_0))
+ REG_GET(NOM_PARAMETERS_0,
+ DST_Y_PER_PTE_ROW_NOM_L, &dlg_attr->dst_y_per_pte_row_nom_l);
+
+ if (REG(NOM_PARAMETERS_1))
+ REG_GET(NOM_PARAMETERS_1,
+ REFCYC_PER_PTE_GROUP_NOM_L, &dlg_attr->refcyc_per_pte_group_nom_l);
+
+ REG_GET(NOM_PARAMETERS_4,
+ DST_Y_PER_META_ROW_NOM_L, &dlg_attr->dst_y_per_meta_row_nom_l);
+
+ REG_GET(NOM_PARAMETERS_5,
+ REFCYC_PER_META_CHUNK_NOM_L, &dlg_attr->refcyc_per_meta_chunk_nom_l);
+
+ REG_GET_2(PER_LINE_DELIVERY_PRE,
+ REFCYC_PER_LINE_DELIVERY_PRE_L, &dlg_attr->refcyc_per_line_delivery_pre_l,
+ REFCYC_PER_LINE_DELIVERY_PRE_C, &dlg_attr->refcyc_per_line_delivery_pre_c);
+
+ REG_GET_2(PER_LINE_DELIVERY,
+ REFCYC_PER_LINE_DELIVERY_L, &dlg_attr->refcyc_per_line_delivery_l,
+ REFCYC_PER_LINE_DELIVERY_C, &dlg_attr->refcyc_per_line_delivery_c);
+
+ if (REG(PREFETCH_SETTINS_C))
+ REG_GET(PREFETCH_SETTINS_C,
+ VRATIO_PREFETCH_C, &dlg_attr->vratio_prefetch_c);
+ else
+ REG_GET(PREFETCH_SETTINGS_C,
+ VRATIO_PREFETCH_C, &dlg_attr->vratio_prefetch_c);
+
+ REG_GET(VBLANK_PARAMETERS_2,
+ REFCYC_PER_PTE_GROUP_VBLANK_C, &dlg_attr->refcyc_per_pte_group_vblank_c);
+
+ REG_GET(VBLANK_PARAMETERS_4,
+ REFCYC_PER_META_CHUNK_VBLANK_C, &dlg_attr->refcyc_per_meta_chunk_vblank_c);
+
+ if (REG(NOM_PARAMETERS_2))
+ REG_GET(NOM_PARAMETERS_2,
+ DST_Y_PER_PTE_ROW_NOM_C, &dlg_attr->dst_y_per_pte_row_nom_c);
+
+ if (REG(NOM_PARAMETERS_3))
+ REG_GET(NOM_PARAMETERS_3,
+ REFCYC_PER_PTE_GROUP_NOM_C, &dlg_attr->refcyc_per_pte_group_nom_c);
+
+ REG_GET(NOM_PARAMETERS_6,
+ DST_Y_PER_META_ROW_NOM_C, &dlg_attr->dst_y_per_meta_row_nom_c);
+
+ REG_GET(NOM_PARAMETERS_7,
+ REFCYC_PER_META_CHUNK_NOM_C, &dlg_attr->refcyc_per_meta_chunk_nom_c);
+
+ /* TTU - per hubp */
+ REG_GET_2(DCN_TTU_QOS_WM,
+ QoS_LEVEL_LOW_WM, &ttu_attr->qos_level_low_wm,
+ QoS_LEVEL_HIGH_WM, &ttu_attr->qos_level_high_wm);
+
+ REG_GET_2(DCN_GLOBAL_TTU_CNTL,
+ MIN_TTU_VBLANK, &ttu_attr->min_ttu_vblank,
+ QoS_LEVEL_FLIP, &ttu_attr->qos_level_flip);
+
+ /* TTU - per luma/chroma */
+ /* Assumed surf0 is luma and 1 is chroma */
+
+ REG_GET_3(DCN_SURF0_TTU_CNTL0,
+ REFCYC_PER_REQ_DELIVERY, &ttu_attr->refcyc_per_req_delivery_l,
+ QoS_LEVEL_FIXED, &ttu_attr->qos_level_fixed_l,
+ QoS_RAMP_DISABLE, &ttu_attr->qos_ramp_disable_l);
+
+ REG_GET(DCN_SURF0_TTU_CNTL1,
+ REFCYC_PER_REQ_DELIVERY_PRE,
+ &ttu_attr->refcyc_per_req_delivery_pre_l);
+
+ REG_GET_3(DCN_SURF1_TTU_CNTL0,
+ REFCYC_PER_REQ_DELIVERY, &ttu_attr->refcyc_per_req_delivery_c,
+ QoS_LEVEL_FIXED, &ttu_attr->qos_level_fixed_c,
+ QoS_RAMP_DISABLE, &ttu_attr->qos_ramp_disable_c);
+
+ REG_GET(DCN_SURF1_TTU_CNTL1,
+ REFCYC_PER_REQ_DELIVERY_PRE,
+ &ttu_attr->refcyc_per_req_delivery_pre_c);
+
+ /* Rest of hubp */
+ REG_GET(DCSURF_SURFACE_CONFIG,
+ SURFACE_PIXEL_FORMAT, &s->pixel_format);
+
+ REG_GET(DCSURF_SURFACE_EARLIEST_INUSE_HIGH,
+ SURFACE_EARLIEST_INUSE_ADDRESS_HIGH, &s->inuse_addr_hi);
+
+ REG_GET(DCSURF_SURFACE_EARLIEST_INUSE,
+ SURFACE_EARLIEST_INUSE_ADDRESS, &s->inuse_addr_lo);
+
+ REG_GET_2(DCSURF_PRI_VIEWPORT_DIMENSION,
+ PRI_VIEWPORT_WIDTH, &s->viewport_width,
+ PRI_VIEWPORT_HEIGHT, &s->viewport_height);
+
+ REG_GET_2(DCSURF_SURFACE_CONFIG,
+ ROTATION_ANGLE, &s->rotation_angle,
+ H_MIRROR_EN, &s->h_mirror_en);
+
+ REG_GET(DCSURF_TILING_CONFIG,
+ SW_MODE, &s->sw_mode);
+
+ REG_GET(DCSURF_SURFACE_CONTROL,
+ PRIMARY_SURFACE_DCC_EN, &s->dcc_en);
+
+ REG_GET_3(DCHUBP_CNTL,
+ HUBP_BLANK_EN, &s->blank_en,
+ HUBP_TTU_DISABLE, &s->ttu_disable,
+ HUBP_UNDERFLOW_STATUS, &s->underflow_status);
+
+ REG_GET(DCN_GLOBAL_TTU_CNTL,
+ MIN_TTU_VBLANK, &s->min_ttu_vblank);
+
+ REG_GET_2(DCN_TTU_QOS_WM,
+ QoS_LEVEL_LOW_WM, &s->qos_level_low_wm,
+ QoS_LEVEL_HIGH_WM, &s->qos_level_high_wm);
+
+}
+
+void hubp2_read_state(struct hubp *hubp)
+{
+ struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
+ struct dcn_hubp_state *s = &hubp2->state;
+ struct _vcs_dpi_display_rq_regs_st *rq_regs = &s->rq_regs;
+
+ hubp2_read_state_common(hubp);
+
+ REG_GET_8(DCHUBP_REQ_SIZE_CONFIG,
+ CHUNK_SIZE, &rq_regs->rq_regs_l.chunk_size,
+ MIN_CHUNK_SIZE, &rq_regs->rq_regs_l.min_chunk_size,
+ META_CHUNK_SIZE, &rq_regs->rq_regs_l.meta_chunk_size,
+ MIN_META_CHUNK_SIZE, &rq_regs->rq_regs_l.min_meta_chunk_size,
+ DPTE_GROUP_SIZE, &rq_regs->rq_regs_l.dpte_group_size,
+ MPTE_GROUP_SIZE, &rq_regs->rq_regs_l.mpte_group_size,
+ SWATH_HEIGHT, &rq_regs->rq_regs_l.swath_height,
+ PTE_ROW_HEIGHT_LINEAR, &rq_regs->rq_regs_l.pte_row_height_linear);
+
+ REG_GET_8(DCHUBP_REQ_SIZE_CONFIG_C,
+ CHUNK_SIZE_C, &rq_regs->rq_regs_c.chunk_size,
+ MIN_CHUNK_SIZE_C, &rq_regs->rq_regs_c.min_chunk_size,
+ META_CHUNK_SIZE_C, &rq_regs->rq_regs_c.meta_chunk_size,
+ MIN_META_CHUNK_SIZE_C, &rq_regs->rq_regs_c.min_meta_chunk_size,
+ DPTE_GROUP_SIZE_C, &rq_regs->rq_regs_c.dpte_group_size,
+ MPTE_GROUP_SIZE_C, &rq_regs->rq_regs_c.mpte_group_size,
+ SWATH_HEIGHT_C, &rq_regs->rq_regs_c.swath_height,
+ PTE_ROW_HEIGHT_LINEAR_C, &rq_regs->rq_regs_c.pte_row_height_linear);
+
+}
+
static struct hubp_funcs dcn20_hubp_funcs = {
.hubp_enable_tripleBuffer = hubp2_enable_triplebuffer,
.hubp_is_triplebuffer_enabled = hubp2_is_triplebuffer_enabled,
.hubp_program_surface_flip_and_addr = hubp2_program_surface_flip_and_addr,
.hubp_program_surface_config = hubp2_program_surface_config,
- .hubp_is_flip_pending = hubp1_is_flip_pending,
+ .hubp_is_flip_pending = hubp2_is_flip_pending,
.hubp_setup = hubp2_setup,
.hubp_setup_interdependent = hubp2_setup_interdependent,
.hubp_set_vm_system_aperture_settings = hubp2_set_vm_system_aperture_settings,
- .set_blank = hubp1_set_blank,
- .dcc_control = hubp1_dcc_control,
+ .set_blank = hubp2_set_blank,
+ .dcc_control = hubp2_dcc_control,
.hubp_update_dchub = hubp2_update_dchub,
.mem_program_viewport = min_set_viewport,
.set_cursor_attributes = hubp2_cursor_set_attributes,
- .set_cursor_position = hubp1_cursor_set_position,
- .hubp_clk_cntl = hubp1_clk_cntl,
- .hubp_vtg_sel = hubp1_vtg_sel,
+ .set_cursor_position = hubp2_cursor_set_position,
+ .hubp_clk_cntl = hubp2_clk_cntl,
+ .hubp_vtg_sel = hubp2_vtg_sel,
.dmdata_set_attributes = hubp2_dmdata_set_attributes,
.dmdata_load = hubp2_dmdata_load,
.dmdata_status_done = hubp2_dmdata_status_done,
- .hubp_read_state = hubp1_read_state,
- .hubp_clear_underflow = hubp1_clear_underflow,
+ .hubp_read_state = hubp2_read_state,
+ .hubp_clear_underflow = hubp2_clear_underflow,
.hubp_set_flip_control_surface_gsl = hubp2_set_flip_control_surface_gsl,
.hubp_init = hubp1_init,
};
diff --git a/drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubp.h b/drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubp.h
index 2c6405a62fc1..c8418235e154 100644
--- a/drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubp.h
+++ b/drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubp.h
@@ -267,6 +267,24 @@ bool hubp2_program_surface_flip_and_addr(
const struct dc_plane_address *address,
bool flip_immediate);
+void hubp2_dcc_control(struct hubp *hubp, bool enable,
+ bool independent_64b_blks);
+
+void hubp2_program_size(
+ struct hubp *hubp,
+ enum surface_pixel_format format,
+ const union plane_size *plane_size,
+ struct dc_plane_dcc_param *dcc);
+
+void hubp2_program_rotation(
+ struct hubp *hubp,
+ enum dc_rotation_angle rotation,
+ bool horizontal_mirror);
+
+void hubp2_program_pixel_format(
+ struct hubp *hubp,
+ enum surface_pixel_format format);
+
void hubp2_program_surface_config(
struct hubp *hubp,
enum surface_pixel_format format,
@@ -277,6 +295,25 @@ void hubp2_program_surface_config(
bool horizontal_mirror,
unsigned int compat_level);
+bool hubp2_is_flip_pending(struct hubp *hubp);
+
+void hubp2_set_blank(struct hubp *hubp, bool blank);
+
+void hubp2_cursor_set_position(
+ struct hubp *hubp,
+ const struct dc_cursor_position *pos,
+ const struct dc_cursor_mi_param *param);
+
+void hubp2_clk_cntl(struct hubp *hubp, bool enable);
+
+void hubp2_vtg_sel(struct hubp *hubp, uint32_t otg_inst);
+
+void hubp2_clear_underflow(struct hubp *hubp);
+
+void hubp2_read_state_common(struct hubp *hubp);
+
+void hubp2_read_state(struct hubp *hubp);
+
#endif /* __DC_MEM_INPUT_DCN20_H__ */
--
2.22.0
More information about the amd-gfx
mailing list