Skip to content

Commit b9511c6

Browse files
committed
Merge tag 'drm-msm-next-2024-03-07' of https://gitlab.freedesktop.org/drm/msm into drm-next
Late updates for v6.9, the main part is CDM (YUV over DP) which was waiting for drm-misc-next-2024-02-29. DPU: - Add support for YUV420 over DP - Patchset to ease debugging of vblank timeouts - Small cleanup Signed-off-by: Dave Airlie <airlied@redhat.com> From: Rob Clark <robdclark@gmail.com> Link: https://patchwork.freedesktop.org/patch/msgid/CAF6AEGvedk6OCOZ-NNtGf_pNiGuK9uvWj1MCDZLX9Jo2nHS=Zg@mail.gmail.com
2 parents b0b6739 + 4be445f commit b9511c6

23 files changed

+736
-291
lines changed

drivers/gpu/drm/msm/Makefile

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -127,7 +127,8 @@ msm-$(CONFIG_DRM_MSM_DP)+= dp/dp_aux.o \
127127
dp/dp_drm.o \
128128
dp/dp_link.o \
129129
dp/dp_panel.o \
130-
dp/dp_audio.o
130+
dp/dp_audio.o \
131+
dp/dp_utils.o
131132

132133
msm-$(CONFIG_DRM_FBDEV_EMULATION) += msm_fbdev.o
133134

drivers/gpu/drm/msm/disp/dpu1/dpu_encoder.c

Lines changed: 204 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -126,6 +126,8 @@ enum dpu_enc_rc_states {
126126
* @base: drm_encoder base class for registration with DRM
127127
* @enc_spinlock: Virtual-Encoder-Wide Spin Lock for IRQ purposes
128128
* @enabled: True if the encoder is active, protected by enc_lock
129+
* @commit_done_timedout: True if there has been a timeout on commit after
130+
* enabling the encoder.
129131
* @num_phys_encs: Actual number of physical encoders contained.
130132
* @phys_encs: Container of physical encoders managed.
131133
* @cur_master: Pointer to the current master in this mode. Optimization
@@ -172,6 +174,7 @@ struct dpu_encoder_virt {
172174
spinlock_t enc_spinlock;
173175

174176
bool enabled;
177+
bool commit_done_timedout;
175178

176179
unsigned int num_phys_encs;
177180
struct dpu_encoder_phys *phys_encs[MAX_PHYS_ENCODERS_PER_VIRTUAL];
@@ -218,12 +221,59 @@ static u32 dither_matrix[DITHER_MATRIX_SZ] = {
218221
15, 7, 13, 5, 3, 11, 1, 9, 12, 4, 14, 6, 0, 8, 2, 10
219222
};
220223

224+
u32 dpu_encoder_get_drm_fmt(struct dpu_encoder_phys *phys_enc)
225+
{
226+
struct drm_encoder *drm_enc;
227+
struct dpu_encoder_virt *dpu_enc;
228+
struct drm_display_info *info;
229+
struct drm_display_mode *mode;
230+
231+
drm_enc = phys_enc->parent;
232+
dpu_enc = to_dpu_encoder_virt(drm_enc);
233+
info = &dpu_enc->connector->display_info;
234+
mode = &phys_enc->cached_mode;
235+
236+
if (drm_mode_is_420_only(info, mode))
237+
return DRM_FORMAT_YUV420;
238+
239+
return DRM_FORMAT_RGB888;
240+
}
241+
242+
bool dpu_encoder_needs_periph_flush(struct dpu_encoder_phys *phys_enc)
243+
{
244+
struct drm_encoder *drm_enc;
245+
struct dpu_encoder_virt *dpu_enc;
246+
struct msm_display_info *disp_info;
247+
struct msm_drm_private *priv;
248+
struct drm_display_mode *mode;
249+
250+
drm_enc = phys_enc->parent;
251+
dpu_enc = to_dpu_encoder_virt(drm_enc);
252+
disp_info = &dpu_enc->disp_info;
253+
priv = drm_enc->dev->dev_private;
254+
mode = &phys_enc->cached_mode;
255+
256+
return phys_enc->hw_intf->cap->type == INTF_DP &&
257+
msm_dp_needs_periph_flush(priv->dp[disp_info->h_tile_instance[0]], mode);
258+
}
221259

222260
bool dpu_encoder_is_widebus_enabled(const struct drm_encoder *drm_enc)
223261
{
224-
const struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(drm_enc);
262+
const struct dpu_encoder_virt *dpu_enc;
263+
struct msm_drm_private *priv = drm_enc->dev->dev_private;
264+
const struct msm_display_info *disp_info;
265+
int index;
225266

226-
return dpu_enc->wide_bus_en;
267+
dpu_enc = to_dpu_encoder_virt(drm_enc);
268+
disp_info = &dpu_enc->disp_info;
269+
index = disp_info->h_tile_instance[0];
270+
271+
if (disp_info->intf_type == INTF_DP)
272+
return msm_dp_wide_bus_available(priv->dp[index]);
273+
else if (disp_info->intf_type == INTF_DSI)
274+
return msm_dsi_wide_bus_enabled(priv->dsi[index]);
275+
276+
return false;
227277
}
228278

229279
bool dpu_encoder_is_dsc_enabled(const struct drm_encoder *drm_enc)
@@ -588,6 +638,7 @@ static int dpu_encoder_virt_atomic_check(
588638
struct dpu_kms *dpu_kms;
589639
struct drm_display_mode *adj_mode;
590640
struct msm_display_topology topology;
641+
struct msm_display_info *disp_info;
591642
struct dpu_global_state *global_state;
592643
struct drm_framebuffer *fb;
593644
struct drm_dsc_config *dsc;
@@ -603,6 +654,7 @@ static int dpu_encoder_virt_atomic_check(
603654
DPU_DEBUG_ENC(dpu_enc, "\n");
604655

605656
priv = drm_enc->dev->dev_private;
657+
disp_info = &dpu_enc->disp_info;
606658
dpu_kms = to_dpu_kms(priv->kms);
607659
adj_mode = &crtc_state->adjusted_mode;
608660
global_state = dpu_kms_get_global_state(crtc_state->state);
@@ -616,21 +668,24 @@ static int dpu_encoder_virt_atomic_check(
616668
topology = dpu_encoder_get_topology(dpu_enc, dpu_kms, adj_mode, crtc_state, dsc);
617669

618670
/*
619-
* Use CDM only for writeback at the moment as other interfaces cannot handle it.
620-
* if writeback itself cannot handle cdm for some reason it will fail in its atomic_check()
671+
* Use CDM only for writeback or DP at the moment as other interfaces cannot handle it.
672+
* If writeback itself cannot handle cdm for some reason it will fail in its atomic_check()
621673
* earlier.
622674
*/
623-
if (dpu_enc->disp_info.intf_type == INTF_WB && conn_state->writeback_job) {
675+
if (disp_info->intf_type == INTF_WB && conn_state->writeback_job) {
624676
fb = conn_state->writeback_job->fb;
625677

626678
if (fb && DPU_FORMAT_IS_YUV(to_dpu_format(msm_framebuffer_format(fb))))
627679
topology.needs_cdm = true;
628-
if (topology.needs_cdm && !dpu_enc->cur_master->hw_cdm)
629-
crtc_state->mode_changed = true;
630-
else if (!topology.needs_cdm && dpu_enc->cur_master->hw_cdm)
631-
crtc_state->mode_changed = true;
680+
} else if (disp_info->intf_type == INTF_DP) {
681+
if (msm_dp_is_yuv_420_enabled(priv->dp[disp_info->h_tile_instance[0]], adj_mode))
682+
topology.needs_cdm = true;
632683
}
633684

685+
if (topology.needs_cdm && !dpu_enc->cur_master->hw_cdm)
686+
crtc_state->mode_changed = true;
687+
else if (!topology.needs_cdm && dpu_enc->cur_master->hw_cdm)
688+
crtc_state->mode_changed = true;
634689
/*
635690
* Release and Allocate resources on every modeset
636691
* Dont allocate when active is false.
@@ -1102,7 +1157,8 @@ static void dpu_encoder_virt_atomic_mode_set(struct drm_encoder *drm_enc,
11021157

11031158
dpu_enc->dsc_mask = dsc_mask;
11041159

1105-
if (dpu_enc->disp_info.intf_type == INTF_WB && conn_state->writeback_job) {
1160+
if ((dpu_enc->disp_info.intf_type == INTF_WB && conn_state->writeback_job) ||
1161+
dpu_enc->disp_info.intf_type == INTF_DP) {
11061162
struct dpu_hw_blk *hw_cdm = NULL;
11071163

11081164
dpu_rm_get_assigned_resources(&dpu_kms->rm, global_state,
@@ -1209,26 +1265,20 @@ static void dpu_encoder_virt_atomic_enable(struct drm_encoder *drm_enc,
12091265
struct dpu_encoder_virt *dpu_enc = NULL;
12101266
int ret = 0;
12111267
struct drm_display_mode *cur_mode = NULL;
1212-
struct msm_drm_private *priv = drm_enc->dev->dev_private;
1213-
struct msm_display_info *disp_info;
1214-
int index;
12151268

12161269
dpu_enc = to_dpu_encoder_virt(drm_enc);
1217-
disp_info = &dpu_enc->disp_info;
1218-
index = disp_info->h_tile_instance[0];
1219-
12201270
dpu_enc->dsc = dpu_encoder_get_dsc_config(drm_enc);
12211271

12221272
atomic_set(&dpu_enc->frame_done_timeout_cnt, 0);
12231273

1224-
if (disp_info->intf_type == INTF_DP)
1225-
dpu_enc->wide_bus_en = msm_dp_wide_bus_available(priv->dp[index]);
1226-
else if (disp_info->intf_type == INTF_DSI)
1227-
dpu_enc->wide_bus_en = msm_dsi_wide_bus_enabled(priv->dsi[index]);
1228-
12291274
mutex_lock(&dpu_enc->enc_lock);
1275+
1276+
dpu_enc->commit_done_timedout = false;
1277+
12301278
cur_mode = &dpu_enc->base.crtc->state->adjusted_mode;
12311279

1280+
dpu_enc->wide_bus_en = dpu_encoder_is_widebus_enabled(drm_enc);
1281+
12321282
trace_dpu_enc_enable(DRMID(drm_enc), cur_mode->hdisplay,
12331283
cur_mode->vdisplay);
12341284

@@ -1282,7 +1332,7 @@ static void dpu_encoder_virt_atomic_disable(struct drm_encoder *drm_enc,
12821332
trace_dpu_enc_disable(DRMID(drm_enc));
12831333

12841334
/* wait for idle */
1285-
dpu_encoder_wait_for_event(drm_enc, MSM_ENC_TX_COMPLETE);
1335+
dpu_encoder_wait_for_tx_complete(drm_enc);
12861336

12871337
dpu_encoder_resource_control(drm_enc, DPU_ENC_RC_EVENT_PRE_STOP);
12881338

@@ -2133,6 +2183,84 @@ void dpu_encoder_helper_phys_cleanup(struct dpu_encoder_phys *phys_enc)
21332183
ctl->ops.clear_pending_flush(ctl);
21342184
}
21352185

2186+
void dpu_encoder_helper_phys_setup_cdm(struct dpu_encoder_phys *phys_enc,
2187+
const struct dpu_format *dpu_fmt,
2188+
u32 output_type)
2189+
{
2190+
struct dpu_hw_cdm *hw_cdm;
2191+
struct dpu_hw_cdm_cfg *cdm_cfg;
2192+
struct dpu_hw_pingpong *hw_pp;
2193+
int ret;
2194+
2195+
if (!phys_enc)
2196+
return;
2197+
2198+
cdm_cfg = &phys_enc->cdm_cfg;
2199+
hw_pp = phys_enc->hw_pp;
2200+
hw_cdm = phys_enc->hw_cdm;
2201+
2202+
if (!hw_cdm)
2203+
return;
2204+
2205+
if (!DPU_FORMAT_IS_YUV(dpu_fmt)) {
2206+
DPU_DEBUG("[enc:%d] cdm_disable fmt:%x\n", DRMID(phys_enc->parent),
2207+
dpu_fmt->base.pixel_format);
2208+
if (hw_cdm->ops.bind_pingpong_blk)
2209+
hw_cdm->ops.bind_pingpong_blk(hw_cdm, PINGPONG_NONE);
2210+
2211+
return;
2212+
}
2213+
2214+
memset(cdm_cfg, 0, sizeof(struct dpu_hw_cdm_cfg));
2215+
2216+
cdm_cfg->output_width = phys_enc->cached_mode.hdisplay;
2217+
cdm_cfg->output_height = phys_enc->cached_mode.vdisplay;
2218+
cdm_cfg->output_fmt = dpu_fmt;
2219+
cdm_cfg->output_type = output_type;
2220+
cdm_cfg->output_bit_depth = DPU_FORMAT_IS_DX(dpu_fmt) ?
2221+
CDM_CDWN_OUTPUT_10BIT : CDM_CDWN_OUTPUT_8BIT;
2222+
cdm_cfg->csc_cfg = &dpu_csc10_rgb2yuv_601l;
2223+
2224+
/* enable 10 bit logic */
2225+
switch (cdm_cfg->output_fmt->chroma_sample) {
2226+
case DPU_CHROMA_RGB:
2227+
cdm_cfg->h_cdwn_type = CDM_CDWN_DISABLE;
2228+
cdm_cfg->v_cdwn_type = CDM_CDWN_DISABLE;
2229+
break;
2230+
case DPU_CHROMA_H2V1:
2231+
cdm_cfg->h_cdwn_type = CDM_CDWN_COSITE;
2232+
cdm_cfg->v_cdwn_type = CDM_CDWN_DISABLE;
2233+
break;
2234+
case DPU_CHROMA_420:
2235+
cdm_cfg->h_cdwn_type = CDM_CDWN_COSITE;
2236+
cdm_cfg->v_cdwn_type = CDM_CDWN_OFFSITE;
2237+
break;
2238+
case DPU_CHROMA_H1V2:
2239+
default:
2240+
DPU_ERROR("[enc:%d] unsupported chroma sampling type\n",
2241+
DRMID(phys_enc->parent));
2242+
cdm_cfg->h_cdwn_type = CDM_CDWN_DISABLE;
2243+
cdm_cfg->v_cdwn_type = CDM_CDWN_DISABLE;
2244+
break;
2245+
}
2246+
2247+
DPU_DEBUG("[enc:%d] cdm_enable:%d,%d,%X,%d,%d,%d,%d]\n",
2248+
DRMID(phys_enc->parent), cdm_cfg->output_width,
2249+
cdm_cfg->output_height, cdm_cfg->output_fmt->base.pixel_format,
2250+
cdm_cfg->output_type, cdm_cfg->output_bit_depth,
2251+
cdm_cfg->h_cdwn_type, cdm_cfg->v_cdwn_type);
2252+
2253+
if (hw_cdm->ops.enable) {
2254+
cdm_cfg->pp_id = hw_pp->idx;
2255+
ret = hw_cdm->ops.enable(hw_cdm, cdm_cfg);
2256+
if (ret < 0) {
2257+
DPU_ERROR("[enc:%d] failed to enable CDM; ret:%d\n",
2258+
DRMID(phys_enc->parent), ret);
2259+
return;
2260+
}
2261+
}
2262+
}
2263+
21362264
#ifdef CONFIG_DEBUG_FS
21372265
static int _dpu_encoder_status_show(struct seq_file *s, void *data)
21382266
{
@@ -2402,10 +2530,18 @@ struct drm_encoder *dpu_encoder_init(struct drm_device *dev,
24022530
return &dpu_enc->base;
24032531
}
24042532

2405-
int dpu_encoder_wait_for_event(struct drm_encoder *drm_enc,
2406-
enum msm_event_wait event)
2533+
/**
2534+
* dpu_encoder_wait_for_commit_done() - Wait for encoder to flush pending state
2535+
* @drm_enc: encoder pointer
2536+
*
2537+
* Wait for hardware to have flushed the current pending changes to hardware at
2538+
* a vblank or CTL_START. Physical encoders will map this differently depending
2539+
* on the type: vid mode -> vsync_irq, cmd mode -> CTL_START.
2540+
*
2541+
* Return: 0 on success, -EWOULDBLOCK if already signaled, error otherwise
2542+
*/
2543+
int dpu_encoder_wait_for_commit_done(struct drm_encoder *drm_enc)
24072544
{
2408-
int (*fn_wait)(struct dpu_encoder_phys *phys_enc) = NULL;
24092545
struct dpu_encoder_virt *dpu_enc = NULL;
24102546
int i, ret = 0;
24112547

@@ -2419,23 +2555,51 @@ int dpu_encoder_wait_for_event(struct drm_encoder *drm_enc,
24192555
for (i = 0; i < dpu_enc->num_phys_encs; i++) {
24202556
struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
24212557

2422-
switch (event) {
2423-
case MSM_ENC_COMMIT_DONE:
2424-
fn_wait = phys->ops.wait_for_commit_done;
2425-
break;
2426-
case MSM_ENC_TX_COMPLETE:
2427-
fn_wait = phys->ops.wait_for_tx_complete;
2428-
break;
2429-
default:
2430-
DPU_ERROR_ENC(dpu_enc, "unknown wait event %d\n",
2431-
event);
2432-
return -EINVAL;
2558+
if (phys->ops.wait_for_commit_done) {
2559+
DPU_ATRACE_BEGIN("wait_for_commit_done");
2560+
ret = phys->ops.wait_for_commit_done(phys);
2561+
DPU_ATRACE_END("wait_for_commit_done");
2562+
if (ret == -ETIMEDOUT && !dpu_enc->commit_done_timedout) {
2563+
dpu_enc->commit_done_timedout = true;
2564+
msm_disp_snapshot_state(drm_enc->dev);
2565+
}
2566+
if (ret)
2567+
return ret;
24332568
}
2569+
}
2570+
2571+
return ret;
2572+
}
2573+
2574+
/**
2575+
* dpu_encoder_wait_for_tx_complete() - Wait for encoder to transfer pixels to panel
2576+
* @drm_enc: encoder pointer
2577+
*
2578+
* Wait for the hardware to transfer all the pixels to the panel. Physical
2579+
* encoders will map this differently depending on the type: vid mode -> vsync_irq,
2580+
* cmd mode -> pp_done.
2581+
*
2582+
* Return: 0 on success, -EWOULDBLOCK if already signaled, error otherwise
2583+
*/
2584+
int dpu_encoder_wait_for_tx_complete(struct drm_encoder *drm_enc)
2585+
{
2586+
struct dpu_encoder_virt *dpu_enc = NULL;
2587+
int i, ret = 0;
2588+
2589+
if (!drm_enc) {
2590+
DPU_ERROR("invalid encoder\n");
2591+
return -EINVAL;
2592+
}
2593+
dpu_enc = to_dpu_encoder_virt(drm_enc);
2594+
DPU_DEBUG_ENC(dpu_enc, "\n");
2595+
2596+
for (i = 0; i < dpu_enc->num_phys_encs; i++) {
2597+
struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
24342598

2435-
if (fn_wait) {
2436-
DPU_ATRACE_BEGIN("wait_for_completion_event");
2437-
ret = fn_wait(phys);
2438-
DPU_ATRACE_END("wait_for_completion_event");
2599+
if (phys->ops.wait_for_tx_complete) {
2600+
DPU_ATRACE_BEGIN("wait_for_tx_complete");
2601+
ret = phys->ops.wait_for_tx_complete(phys);
2602+
DPU_ATRACE_END("wait_for_tx_complete");
24392603
if (ret)
24402604
return ret;
24412605
}

0 commit comments

Comments
 (0)