drm/amd/display: Refactor suspend/resume of Secure display
[Why] Once set ROI and do suspend/resume, current flow will not enable OTG_CRC_CTL again due to we'll defer crc configuration when stream is enabled. [How] Remove current suspend/resume function and have logic implemented into amdgpu_dm_atomic_commit_tail() Signed-off-by: Wayne Lin <Wayne.Lin@amd.com> Reviewed-by: Chao-kai Wang <Stylon.Wang@amd.com> Acked-by: Stylon Wang <stylon.wang@amd.com> Tested-by: Daniel Wheeler <daniel.wheeler@amd.com> Signed-off-by: Alex Deucher <alexander.deucher@amd.com>
This commit is contained in:
@@ -1987,9 +1987,6 @@ static int dm_suspend(void *handle)
|
|||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef CONFIG_DRM_AMD_SECURE_DISPLAY
|
|
||||||
amdgpu_dm_crtc_secure_display_suspend(adev);
|
|
||||||
#endif
|
|
||||||
WARN_ON(adev->dm.cached_state);
|
WARN_ON(adev->dm.cached_state);
|
||||||
adev->dm.cached_state = drm_atomic_helper_suspend(adev_to_drm(adev));
|
adev->dm.cached_state = drm_atomic_helper_suspend(adev_to_drm(adev));
|
||||||
|
|
||||||
@@ -2314,10 +2311,6 @@ static int dm_resume(void *handle)
|
|||||||
|
|
||||||
dm->cached_state = NULL;
|
dm->cached_state = NULL;
|
||||||
|
|
||||||
#ifdef CONFIG_DRM_AMD_SECURE_DISPLAY
|
|
||||||
amdgpu_dm_crtc_secure_display_resume(adev);
|
|
||||||
#endif
|
|
||||||
|
|
||||||
amdgpu_dm_irq_resume_late(adev);
|
amdgpu_dm_irq_resume_late(adev);
|
||||||
|
|
||||||
amdgpu_dm_smu_write_watermarks_table(adev);
|
amdgpu_dm_smu_write_watermarks_table(adev);
|
||||||
@@ -9004,6 +8997,12 @@ static void amdgpu_dm_atomic_commit_tail(struct drm_atomic_state *state)
|
|||||||
#ifdef CONFIG_DEBUG_FS
|
#ifdef CONFIG_DEBUG_FS
|
||||||
bool configure_crc = false;
|
bool configure_crc = false;
|
||||||
enum amdgpu_dm_pipe_crc_source cur_crc_src;
|
enum amdgpu_dm_pipe_crc_source cur_crc_src;
|
||||||
|
#if defined(CONFIG_DRM_AMD_SECURE_DISPLAY)
|
||||||
|
struct crc_rd_work *crc_rd_wrk = dm->crc_rd_wrk;
|
||||||
|
#endif
|
||||||
|
spin_lock_irqsave(&adev_to_drm(adev)->event_lock, flags);
|
||||||
|
cur_crc_src = acrtc->dm_irq_params.crc_src;
|
||||||
|
spin_unlock_irqrestore(&adev_to_drm(adev)->event_lock, flags);
|
||||||
#endif
|
#endif
|
||||||
dm_new_crtc_state = to_dm_crtc_state(new_crtc_state);
|
dm_new_crtc_state = to_dm_crtc_state(new_crtc_state);
|
||||||
|
|
||||||
@@ -9020,15 +9019,19 @@ static void amdgpu_dm_atomic_commit_tail(struct drm_atomic_state *state)
|
|||||||
* settings for the stream.
|
* settings for the stream.
|
||||||
*/
|
*/
|
||||||
dm_new_crtc_state = to_dm_crtc_state(new_crtc_state);
|
dm_new_crtc_state = to_dm_crtc_state(new_crtc_state);
|
||||||
spin_lock_irqsave(&adev_to_drm(adev)->event_lock, flags);
|
|
||||||
cur_crc_src = acrtc->dm_irq_params.crc_src;
|
|
||||||
spin_unlock_irqrestore(&adev_to_drm(adev)->event_lock, flags);
|
|
||||||
|
|
||||||
if (amdgpu_dm_is_valid_crc_source(cur_crc_src)) {
|
if (amdgpu_dm_is_valid_crc_source(cur_crc_src)) {
|
||||||
configure_crc = true;
|
configure_crc = true;
|
||||||
#if defined(CONFIG_DRM_AMD_SECURE_DISPLAY)
|
#if defined(CONFIG_DRM_AMD_SECURE_DISPLAY)
|
||||||
if (amdgpu_dm_crc_window_is_activated(crtc))
|
if (amdgpu_dm_crc_window_is_activated(crtc)) {
|
||||||
configure_crc = false;
|
spin_lock_irqsave(&adev_to_drm(adev)->event_lock, flags);
|
||||||
|
acrtc->dm_irq_params.crc_window.update_win = true;
|
||||||
|
acrtc->dm_irq_params.crc_window.skip_frame_cnt = 2;
|
||||||
|
spin_lock_irq(&crc_rd_wrk->crc_rd_work_lock);
|
||||||
|
crc_rd_wrk->crtc = crtc;
|
||||||
|
spin_unlock_irq(&crc_rd_wrk->crc_rd_work_lock);
|
||||||
|
spin_unlock_irqrestore(&adev_to_drm(adev)->event_lock, flags);
|
||||||
|
}
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -525,67 +525,6 @@ cleanup:
|
|||||||
spin_unlock_irqrestore(&drm_dev->event_lock, flags1);
|
spin_unlock_irqrestore(&drm_dev->event_lock, flags1);
|
||||||
}
|
}
|
||||||
|
|
||||||
void amdgpu_dm_crtc_secure_display_resume(struct amdgpu_device *adev)
|
|
||||||
{
|
|
||||||
struct drm_crtc *crtc;
|
|
||||||
enum amdgpu_dm_pipe_crc_source cur_crc_src;
|
|
||||||
struct crc_rd_work *crc_rd_wrk = adev->dm.crc_rd_wrk;
|
|
||||||
struct crc_window_parm cur_crc_window;
|
|
||||||
struct amdgpu_crtc *acrtc = NULL;
|
|
||||||
|
|
||||||
drm_for_each_crtc(crtc, &adev->ddev) {
|
|
||||||
acrtc = to_amdgpu_crtc(crtc);
|
|
||||||
|
|
||||||
spin_lock_irq(&adev_to_drm(adev)->event_lock);
|
|
||||||
cur_crc_src = acrtc->dm_irq_params.crc_src;
|
|
||||||
cur_crc_window = acrtc->dm_irq_params.crc_window;
|
|
||||||
spin_unlock_irq(&adev_to_drm(adev)->event_lock);
|
|
||||||
|
|
||||||
if (amdgpu_dm_is_valid_crc_source(cur_crc_src)) {
|
|
||||||
amdgpu_dm_crtc_set_crc_source(crtc,
|
|
||||||
pipe_crc_sources[cur_crc_src]);
|
|
||||||
spin_lock_irq(&adev_to_drm(adev)->event_lock);
|
|
||||||
acrtc->dm_irq_params.crc_window = cur_crc_window;
|
|
||||||
if (acrtc->dm_irq_params.crc_window.activated) {
|
|
||||||
acrtc->dm_irq_params.crc_window.update_win = true;
|
|
||||||
acrtc->dm_irq_params.crc_window.skip_frame_cnt = 1;
|
|
||||||
spin_lock_irq(&crc_rd_wrk->crc_rd_work_lock);
|
|
||||||
crc_rd_wrk->crtc = crtc;
|
|
||||||
spin_unlock_irq(&crc_rd_wrk->crc_rd_work_lock);
|
|
||||||
}
|
|
||||||
spin_unlock_irq(&adev_to_drm(adev)->event_lock);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void amdgpu_dm_crtc_secure_display_suspend(struct amdgpu_device *adev)
|
|
||||||
{
|
|
||||||
struct drm_crtc *crtc;
|
|
||||||
struct crc_window_parm cur_crc_window;
|
|
||||||
enum amdgpu_dm_pipe_crc_source cur_crc_src;
|
|
||||||
struct amdgpu_crtc *acrtc = NULL;
|
|
||||||
|
|
||||||
drm_for_each_crtc(crtc, &adev->ddev) {
|
|
||||||
acrtc = to_amdgpu_crtc(crtc);
|
|
||||||
|
|
||||||
spin_lock_irq(&adev_to_drm(adev)->event_lock);
|
|
||||||
cur_crc_src = acrtc->dm_irq_params.crc_src;
|
|
||||||
cur_crc_window = acrtc->dm_irq_params.crc_window;
|
|
||||||
cur_crc_window.update_win = false;
|
|
||||||
spin_unlock_irq(&adev_to_drm(adev)->event_lock);
|
|
||||||
|
|
||||||
if (amdgpu_dm_is_valid_crc_source(cur_crc_src)) {
|
|
||||||
amdgpu_dm_crtc_set_crc_source(crtc, NULL);
|
|
||||||
spin_lock_irq(&adev_to_drm(adev)->event_lock);
|
|
||||||
/* For resume to set back crc source*/
|
|
||||||
acrtc->dm_irq_params.crc_src = cur_crc_src;
|
|
||||||
acrtc->dm_irq_params.crc_window = cur_crc_window;
|
|
||||||
spin_unlock_irq(&adev_to_drm(adev)->event_lock);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
struct crc_rd_work *amdgpu_dm_crtc_secure_display_create_work(void)
|
struct crc_rd_work *amdgpu_dm_crtc_secure_display_create_work(void)
|
||||||
{
|
{
|
||||||
struct crc_rd_work *crc_rd_wrk = NULL;
|
struct crc_rd_work *crc_rd_wrk = NULL;
|
||||||
|
|||||||
@@ -91,14 +91,10 @@ void amdgpu_dm_crtc_handle_crc_irq(struct drm_crtc *crtc);
|
|||||||
bool amdgpu_dm_crc_window_is_activated(struct drm_crtc *crtc);
|
bool amdgpu_dm_crc_window_is_activated(struct drm_crtc *crtc);
|
||||||
void amdgpu_dm_crtc_handle_crc_window_irq(struct drm_crtc *crtc);
|
void amdgpu_dm_crtc_handle_crc_window_irq(struct drm_crtc *crtc);
|
||||||
struct crc_rd_work *amdgpu_dm_crtc_secure_display_create_work(void);
|
struct crc_rd_work *amdgpu_dm_crtc_secure_display_create_work(void);
|
||||||
void amdgpu_dm_crtc_secure_display_resume(struct amdgpu_device *adev);
|
|
||||||
void amdgpu_dm_crtc_secure_display_suspend(struct amdgpu_device *adev);
|
|
||||||
#else
|
#else
|
||||||
#define amdgpu_dm_crc_window_is_activated(x)
|
#define amdgpu_dm_crc_window_is_activated(x)
|
||||||
#define amdgpu_dm_crtc_handle_crc_window_irq(x)
|
#define amdgpu_dm_crtc_handle_crc_window_irq(x)
|
||||||
#define amdgpu_dm_crtc_secure_display_create_work()
|
#define amdgpu_dm_crtc_secure_display_create_work()
|
||||||
#define amdgpu_dm_crtc_secure_display_resume(x)
|
|
||||||
#define amdgpu_dm_crtc_secure_display_suspend(x)
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#endif /* AMD_DAL_DEV_AMDGPU_DM_AMDGPU_DM_CRC_H_ */
|
#endif /* AMD_DAL_DEV_AMDGPU_DM_AMDGPU_DM_CRC_H_ */
|
||||||
|
|||||||
Reference in New Issue
Block a user