summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAnatoliy Klymenko <anatoliy.klymenko@amd.com>2024-04-16 13:31:40 -0700
committerTomi Valkeinen <tomi.valkeinen@ideasonboard.com>2024-04-24 17:51:19 +0300
commit1836fd5ed98db85f249bf755978c964c2607a25d (patch)
treee6ef543fe46d85dfc4cc28089fe39025d90d4624
parentb0f0469ab662159f182f5af292b71cc5d42468a8 (diff)
drm: xlnx: zynqmp_dpsub: Minimize usage of global flag
Avoid usage of global zynqmp_dpsub.dma_enabled flag in DPSUB layer context. This flag signals whether the driver runs in DRM CRTC or DRM bridge mode, assuming that all display layers share the same live or non-live mode of operation. Using per-layer mode instead of global flag will simplify future support of the hybrid scenario. Remove redundant checks in DMA request/release contexts as zynqmp_disp_layer.info is well-defined for all layer types, including the correct number of DMA channels required for each particular layer. Signed-off-by: Anatoliy Klymenko <anatoliy.klymenko@amd.com> Reviewed-by: Tomi Valkeinen <tomi.valkainen@ideasonboard.com> Reviewed-by: Tomi Valkeinen <tomi.valkeinen@ideasonboard.com> Signed-off-by: Tomi Valkeinen <tomi.valkeinen@ideasonboard.com> Link: https://patchwork.freedesktop.org/patch/msgid/20240416-dp-live-fmt-v4-5-c7f379b7168e@amd.com
-rw-r--r--drivers/gpu/drm/xlnx/zynqmp_disp.c12
1 files changed, 3 insertions, 9 deletions
diff --git a/drivers/gpu/drm/xlnx/zynqmp_disp.c b/drivers/gpu/drm/xlnx/zynqmp_disp.c
index 24f1f367b1d3..8cdd74a9b772 100644
--- a/drivers/gpu/drm/xlnx/zynqmp_disp.c
+++ b/drivers/gpu/drm/xlnx/zynqmp_disp.c
@@ -1026,7 +1026,7 @@ void zynqmp_disp_layer_disable(struct zynqmp_disp_layer *layer)
{
unsigned int i;
- if (layer->disp->dpsub->dma_enabled) {
+ if (layer->mode == ZYNQMP_DPSUB_LAYER_NONLIVE) {
for (i = 0; i < layer->drm_fmt->num_planes; i++)
dmaengine_terminate_sync(layer->dmas[i].chan);
}
@@ -1052,7 +1052,7 @@ void zynqmp_disp_layer_set_format(struct zynqmp_disp_layer *layer,
zynqmp_disp_avbuf_set_format(layer->disp, layer, layer->disp_fmt);
- if (!layer->disp->dpsub->dma_enabled)
+ if (layer->mode == ZYNQMP_DPSUB_LAYER_LIVE)
return;
/*
@@ -1090,7 +1090,7 @@ int zynqmp_disp_layer_update(struct zynqmp_disp_layer *layer,
const struct drm_format_info *info = layer->drm_fmt;
unsigned int i;
- if (!layer->disp->dpsub->dma_enabled)
+ if (layer->mode == ZYNQMP_DPSUB_LAYER_LIVE)
return 0;
for (i = 0; i < info->num_planes; i++) {
@@ -1140,9 +1140,6 @@ static void zynqmp_disp_layer_release_dma(struct zynqmp_disp *disp,
{
unsigned int i;
- if (!layer->info || !disp->dpsub->dma_enabled)
- return;
-
for (i = 0; i < layer->info->num_channels; i++) {
struct zynqmp_disp_layer_dma *dma = &layer->dmas[i];
@@ -1183,9 +1180,6 @@ static int zynqmp_disp_layer_request_dma(struct zynqmp_disp *disp,
unsigned int i;
int ret;
- if (!disp->dpsub->dma_enabled)
- return 0;
-
for (i = 0; i < layer->info->num_channels; i++) {
struct zynqmp_disp_layer_dma *dma = &layer->dmas[i];
char dma_channel_name[16];