diff options
Diffstat (limited to 'drivers/gpu/drm/i915/display/intel_psr.c')
-rw-r--r-- | drivers/gpu/drm/i915/display/intel_psr.c | 302 |
1 files changed, 191 insertions, 111 deletions
diff --git a/drivers/gpu/drm/i915/display/intel_psr.c b/drivers/gpu/drm/i915/display/intel_psr.c index 430ad4ef7146..01bf304c705f 100644 --- a/drivers/gpu/drm/i915/display/intel_psr.c +++ b/drivers/gpu/drm/i915/display/intel_psr.c @@ -28,7 +28,6 @@ #include <drm/drm_debugfs.h> #include <drm/drm_vblank.h> -#include "i915_drv.h" #include "i915_reg.h" #include "intel_alpm.h" #include "intel_atomic.h" @@ -37,16 +36,19 @@ #include "intel_ddi.h" #include "intel_de.h" #include "intel_display_irq.h" +#include "intel_display_regs.h" #include "intel_display_rpm.h" #include "intel_display_types.h" #include "intel_dmc.h" #include "intel_dp.h" #include "intel_dp_aux.h" +#include "intel_dsb.h" #include "intel_frontbuffer.h" #include "intel_hdmi.h" #include "intel_psr.h" #include "intel_psr_regs.h" #include "intel_snps_phy.h" +#include "intel_step.h" #include "intel_vblank.h" #include "intel_vrr.h" #include "skl_universal_plane.h" @@ -232,16 +234,12 @@ bool intel_psr_needs_aux_io_power(struct intel_encoder *encoder, static bool psr_global_enabled(struct intel_dp *intel_dp) { - struct intel_display *display = to_intel_display(intel_dp); struct intel_connector *connector = intel_dp->attached_connector; switch (intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK) { case I915_PSR_DEBUG_DEFAULT: - if (display->params.enable_psr == -1) - return intel_dp_is_edp(intel_dp) ? - connector->panel.vbt.psr.enable : - true; - return display->params.enable_psr; + return intel_dp_is_edp(intel_dp) ? + connector->panel.vbt.psr.enable : true; case I915_PSR_DEBUG_DISABLE: return false; default: @@ -249,39 +247,23 @@ static bool psr_global_enabled(struct intel_dp *intel_dp) } } -static bool psr2_global_enabled(struct intel_dp *intel_dp) +static bool sel_update_global_enabled(struct intel_dp *intel_dp) { - struct intel_display *display = to_intel_display(intel_dp); - switch (intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK) { case I915_PSR_DEBUG_DISABLE: case I915_PSR_DEBUG_FORCE_PSR1: return false; default: - if (display->params.enable_psr == 1) - return false; return true; } } -static bool psr2_su_region_et_global_enabled(struct intel_dp *intel_dp) -{ - struct intel_display *display = to_intel_display(intel_dp); - - if (display->params.enable_psr != -1) - return false; - - return true; -} - static bool panel_replay_global_enabled(struct intel_dp *intel_dp) { struct intel_display *display = to_intel_display(intel_dp); - if ((display->params.enable_psr != -1) || - (intel_dp->psr.debug & I915_PSR_DEBUG_PANEL_REPLAY_DISABLE)) - return false; - return true; + return !(intel_dp->psr.debug & I915_PSR_DEBUG_PANEL_REPLAY_DISABLE) && + display->params.enable_panel_replay; } static u32 psr_irq_psr_error_bit_get(struct intel_dp *intel_dp) @@ -447,7 +429,6 @@ static void psr_event_print(struct intel_display *display, void intel_psr_irq_handler(struct intel_dp *intel_dp, u32 psr_iir) { struct intel_display *display = to_intel_display(intel_dp); - struct drm_i915_private *dev_priv = to_i915(display->drm); enum transcoder cpu_transcoder = intel_dp->psr.transcoder; ktime_t time_ns = ktime_get(); @@ -492,7 +473,7 @@ void intel_psr_irq_handler(struct intel_dp *intel_dp, u32 psr_iir) intel_de_rmw(display, psr_imr_reg(display, cpu_transcoder), 0, psr_irq_psr_error_bit_get(intel_dp)); - queue_work(dev_priv->unordered_wq, &intel_dp->psr.work); + queue_work(display->wq.unordered, &intel_dp->psr.work); } } @@ -514,12 +495,14 @@ static u8 intel_dp_get_su_capability(struct intel_dp *intel_dp) { u8 su_capability = 0; - if (intel_dp->psr.sink_panel_replay_su_support) - drm_dp_dpcd_readb(&intel_dp->aux, - DP_PANEL_PANEL_REPLAY_CAPABILITY, - &su_capability); - else + if (intel_dp->psr.sink_panel_replay_su_support) { + if (drm_dp_dpcd_read_byte(&intel_dp->aux, + DP_PANEL_REPLAY_CAP_CAPABILITY, + &su_capability) < 0) + return 0; + } else { su_capability = intel_dp->psr_dpcd[1]; + } return su_capability; } @@ -528,7 +511,7 @@ static unsigned int intel_dp_get_su_x_granularity_offset(struct intel_dp *intel_dp) { return intel_dp->psr.sink_panel_replay_su_support ? - DP_PANEL_PANEL_REPLAY_X_GRANULARITY : + DP_PANEL_REPLAY_CAP_X_GRANULARITY : DP_PSR2_SU_X_GRANULARITY; } @@ -536,7 +519,7 @@ static unsigned int intel_dp_get_su_y_granularity_offset(struct intel_dp *intel_dp) { return intel_dp->psr.sink_panel_replay_su_support ? - DP_PANEL_PANEL_REPLAY_Y_GRANULARITY : + DP_PANEL_REPLAY_CAP_Y_GRANULARITY : DP_PSR2_SU_Y_GRANULARITY; } @@ -600,6 +583,16 @@ exit: static void _panel_replay_init_dpcd(struct intel_dp *intel_dp) { struct intel_display *display = to_intel_display(intel_dp); + int ret; + + ret = drm_dp_dpcd_read_data(&intel_dp->aux, DP_PANEL_REPLAY_CAP_SUPPORT, + &intel_dp->pr_dpcd, sizeof(intel_dp->pr_dpcd)); + if (ret < 0) + return; + + if (!(intel_dp->pr_dpcd[INTEL_PR_DPCD_INDEX(DP_PANEL_REPLAY_CAP_SUPPORT)] & + DP_PANEL_REPLAY_SUPPORT)) + return; if (intel_dp_is_edp(intel_dp)) { if (!intel_alpm_aux_less_wake_supported(intel_dp)) { @@ -608,7 +601,8 @@ static void _panel_replay_init_dpcd(struct intel_dp *intel_dp) return; } - if (!(intel_dp->pr_dpcd & DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT)) { + if (!(intel_dp->pr_dpcd[INTEL_PR_DPCD_INDEX(DP_PANEL_REPLAY_CAP_SUPPORT)] & + DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT)) { drm_dbg_kms(display->drm, "Panel doesn't support early transport, eDP Panel Replay not possible\n"); return; @@ -617,7 +611,8 @@ static void _panel_replay_init_dpcd(struct intel_dp *intel_dp) intel_dp->psr.sink_panel_replay_support = true; - if (intel_dp->pr_dpcd & DP_PANEL_REPLAY_SU_SUPPORT) + if (intel_dp->pr_dpcd[INTEL_PR_DPCD_INDEX(DP_PANEL_REPLAY_CAP_SUPPORT)] & + DP_PANEL_REPLAY_SU_SUPPORT) intel_dp->psr.sink_panel_replay_su_support = true; drm_dbg_kms(display->drm, @@ -629,6 +624,15 @@ static void _panel_replay_init_dpcd(struct intel_dp *intel_dp) static void _psr_init_dpcd(struct intel_dp *intel_dp) { struct intel_display *display = to_intel_display(intel_dp); + int ret; + + ret = drm_dp_dpcd_read_data(&intel_dp->aux, DP_PSR_SUPPORT, intel_dp->psr_dpcd, + sizeof(intel_dp->psr_dpcd)); + if (ret < 0) + return; + + if (!intel_dp->psr_dpcd[0]) + return; drm_dbg_kms(display->drm, "eDP panel supports PSR version %x\n", intel_dp->psr_dpcd[0]); @@ -674,16 +678,9 @@ static void _psr_init_dpcd(struct intel_dp *intel_dp) void intel_psr_init_dpcd(struct intel_dp *intel_dp) { - drm_dp_dpcd_read(&intel_dp->aux, DP_PSR_SUPPORT, intel_dp->psr_dpcd, - sizeof(intel_dp->psr_dpcd)); - drm_dp_dpcd_readb(&intel_dp->aux, DP_PANEL_REPLAY_CAP, - &intel_dp->pr_dpcd); + _psr_init_dpcd(intel_dp); - if (intel_dp->pr_dpcd & DP_PANEL_REPLAY_SUPPORT) - _panel_replay_init_dpcd(intel_dp); - - if (intel_dp->psr_dpcd[0]) - _psr_init_dpcd(intel_dp); + _panel_replay_init_dpcd(intel_dp); if (intel_dp->psr.sink_psr2_support || intel_dp->psr.sink_panel_replay_su_support) @@ -736,9 +733,9 @@ static bool psr2_su_region_et_valid(struct intel_dp *intel_dp, bool panel_replay return false; return panel_replay ? - intel_dp->pr_dpcd & DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT : - intel_dp->psr_dpcd[0] == DP_PSR2_WITH_Y_COORD_ET_SUPPORTED && - psr2_su_region_et_global_enabled(intel_dp); + intel_dp->pr_dpcd[INTEL_PR_DPCD_INDEX(DP_PANEL_REPLAY_CAP_SUPPORT)] & + DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT : + intel_dp->psr_dpcd[0] == DP_PSR2_WITH_Y_COORD_ET_SUPPORTED; } static void _panel_replay_enable_sink(struct intel_dp *intel_dp, @@ -931,7 +928,7 @@ static void hsw_activate_psr1(struct intel_dp *intel_dp) /* Wa_16025596647 */ if ((DISPLAY_VER(display) == 20 || IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) && - is_dc5_dc6_blocked(intel_dp)) + is_dc5_dc6_blocked(intel_dp) && intel_dp->psr.pkg_c_latency_used) intel_dmc_start_pkgc_exit_at_start_of_undelayed_vblank(display, intel_dp->psr.pipe, true); @@ -1021,7 +1018,7 @@ static void hsw_activate_psr2(struct intel_dp *intel_dp) /* Wa_16025596647 */ if ((DISPLAY_VER(display) == 20 || IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) && - is_dc5_dc6_blocked(intel_dp)) + is_dc5_dc6_blocked(intel_dp) && intel_dp->psr.pkg_c_latency_used) idle_frames = 0; else idle_frames = psr_compute_idle_frames(intel_dp); @@ -1418,7 +1415,7 @@ static bool intel_psr2_config_valid(struct intel_dp *intel_dp, int crtc_vdisplay = crtc_state->hw.adjusted_mode.crtc_vdisplay; int psr_max_h = 0, psr_max_v = 0, max_bpp = 0; - if (!intel_dp->psr.sink_psr2_support) + if (!intel_dp->psr.sink_psr2_support || display->params.enable_psr == 1) return false; /* JSL and EHL only supports eDP 1.3 */ @@ -1523,7 +1520,7 @@ static bool intel_sel_update_config_valid(struct intel_dp *intel_dp, goto unsupported; } - if (!psr2_global_enabled(intel_dp)) { + if (!sel_update_global_enabled(intel_dp)) { drm_dbg_kms(display->drm, "Selective update disabled by flag\n"); goto unsupported; @@ -1571,7 +1568,13 @@ static bool _psr_compute_config(struct intel_dp *intel_dp, const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode; int entry_setup_frames; - if (!CAN_PSR(intel_dp)) + if (!CAN_PSR(intel_dp) || !display->params.enable_psr) + return false; + + /* + * Currently PSR doesn't work reliably with VRR enabled. + */ + if (crtc_state->vrr.enable) return false; entry_setup_frames = intel_psr_entry_setup_frames(intel_dp, adjusted_mode); @@ -1691,12 +1694,6 @@ void intel_psr_compute_config(struct intel_dp *intel_dp, return; } - /* - * Currently PSR/PR doesn't work reliably with VRR enabled. - */ - if (crtc_state->vrr.enable) - return; - crtc_state->has_panel_replay = _panel_replay_compute_config(intel_dp, crtc_state, conn_state); @@ -1803,6 +1800,8 @@ static void intel_psr_activate(struct intel_dp *intel_dp) drm_WARN_ON(display->drm, intel_dp->psr.active); + drm_WARN_ON(display->drm, !intel_dp->psr.enabled); + lockdep_assert_held(&intel_dp->psr.lock); /* psr1, psr2 and panel-replay are mutually exclusive.*/ @@ -2022,6 +2021,7 @@ static void intel_psr_enable_locked(struct intel_dp *intel_dp, intel_dp->psr.req_psr2_sdp_prior_scanline = crtc_state->req_psr2_sdp_prior_scanline; intel_dp->psr.active_non_psr_pipes = crtc_state->active_non_psr_pipes; + intel_dp->psr.pkg_c_latency_used = crtc_state->pkg_c_latency_used; if (!psr_interrupt_error_check(intel_dp)) return; @@ -2098,8 +2098,9 @@ static void intel_psr_exit(struct intel_dp *intel_dp) drm_WARN_ON(display->drm, !(val & EDP_PSR2_ENABLE)); } else { - if (DISPLAY_VER(display) == 20 || - IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) + if ((DISPLAY_VER(display) == 20 || + IS_DISPLAY_VERx100_STEP(display, 3000, STEP_A0, STEP_B0)) && + intel_dp->psr.pkg_c_latency_used) intel_dmc_start_pkgc_exit_at_start_of_undelayed_vblank(display, intel_dp->psr.pipe, false); @@ -2202,6 +2203,7 @@ static void intel_psr_disable_locked(struct intel_dp *intel_dp) intel_dp->psr.su_region_et_enabled = false; intel_dp->psr.psr2_sel_fetch_cff_enabled = false; intel_dp->psr.active_non_psr_pipes = 0; + intel_dp->psr.pkg_c_latency_used = 0; } /** @@ -2883,6 +2885,26 @@ skip_sel_fetch_set_loop: return 0; } +void intel_psr2_panic_force_full_update(struct intel_display *display, + struct intel_crtc_state *crtc_state) +{ + struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); + enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; + u32 val = man_trk_ctl_enable_bit_get(display); + + /* SF partial frame enable has to be set even on full update */ + val |= man_trk_ctl_partial_frame_bit_get(display); + val |= man_trk_ctl_continuos_full_frame(display); + + /* Directly write the register */ + intel_de_write_fw(display, PSR2_MAN_TRK_CTL(display, cpu_transcoder), val); + + if (!crtc_state->enable_psr2_su_region_et) + return; + + intel_de_write_fw(display, PIPE_SRCSZ_ERLY_TPT(crtc->pipe), 0); +} + void intel_psr_pre_plane_update(struct intel_atomic_state *state, struct intel_crtc *crtc) { @@ -2978,35 +3000,57 @@ void intel_psr_post_plane_update(struct intel_atomic_state *state, } } -static int _psr2_ready_for_pipe_update_locked(struct intel_dp *intel_dp) +/* + * From bspec: Panel Self Refresh (BDW+) + * Max. time for PSR to idle = Inverse of the refresh rate + 6 ms of + * exit training time + 1.5 ms of aux channel handshake. 50 ms is + * defensive enough to cover everything. + */ +#define PSR_IDLE_TIMEOUT_MS 50 + +static int +_psr2_ready_for_pipe_update_locked(const struct intel_crtc_state *new_crtc_state, + struct intel_dsb *dsb) { - struct intel_display *display = to_intel_display(intel_dp); - enum transcoder cpu_transcoder = intel_dp->psr.transcoder; + struct intel_display *display = to_intel_display(new_crtc_state); + enum transcoder cpu_transcoder = new_crtc_state->cpu_transcoder; /* * Any state lower than EDP_PSR2_STATUS_STATE_DEEP_SLEEP is enough. * As all higher states has bit 4 of PSR2 state set we can just wait for * EDP_PSR2_STATUS_STATE_DEEP_SLEEP to be cleared. */ + if (dsb) { + intel_dsb_poll(dsb, EDP_PSR2_STATUS(display, cpu_transcoder), + EDP_PSR2_STATUS_STATE_DEEP_SLEEP, 0, 200, + PSR_IDLE_TIMEOUT_MS * 1000 / 200); + return true; + } + return intel_de_wait_for_clear(display, EDP_PSR2_STATUS(display, cpu_transcoder), - EDP_PSR2_STATUS_STATE_DEEP_SLEEP, 50); + EDP_PSR2_STATUS_STATE_DEEP_SLEEP, + PSR_IDLE_TIMEOUT_MS); } -static int _psr1_ready_for_pipe_update_locked(struct intel_dp *intel_dp) +static int +_psr1_ready_for_pipe_update_locked(const struct intel_crtc_state *new_crtc_state, + struct intel_dsb *dsb) { - struct intel_display *display = to_intel_display(intel_dp); - enum transcoder cpu_transcoder = intel_dp->psr.transcoder; + struct intel_display *display = to_intel_display(new_crtc_state); + enum transcoder cpu_transcoder = new_crtc_state->cpu_transcoder; + + if (dsb) { + intel_dsb_poll(dsb, psr_status_reg(display, cpu_transcoder), + EDP_PSR_STATUS_STATE_MASK, 0, 200, + PSR_IDLE_TIMEOUT_MS * 1000 / 200); + return true; + } - /* - * From bspec: Panel Self Refresh (BDW+) - * Max. time for PSR to idle = Inverse of the refresh rate + 6 ms of - * exit training time + 1.5 ms of aux channel handshake. 50 ms is - * defensive enough to cover everything. - */ return intel_de_wait_for_clear(display, psr_status_reg(display, cpu_transcoder), - EDP_PSR_STATUS_STATE_MASK, 50); + EDP_PSR_STATUS_STATE_MASK, + PSR_IDLE_TIMEOUT_MS); } /** @@ -3035,9 +3079,11 @@ void intel_psr_wait_for_idle_locked(const struct intel_crtc_state *new_crtc_stat continue; if (intel_dp->psr.sel_update_enabled) - ret = _psr2_ready_for_pipe_update_locked(intel_dp); + ret = _psr2_ready_for_pipe_update_locked(new_crtc_state, + NULL); else - ret = _psr1_ready_for_pipe_update_locked(intel_dp); + ret = _psr1_ready_for_pipe_update_locked(new_crtc_state, + NULL); if (ret) drm_err(display->drm, @@ -3045,6 +3091,18 @@ void intel_psr_wait_for_idle_locked(const struct intel_crtc_state *new_crtc_stat } } +void intel_psr_wait_for_idle_dsb(struct intel_dsb *dsb, + const struct intel_crtc_state *new_crtc_state) +{ + if (!new_crtc_state->has_psr || new_crtc_state->has_panel_replay) + return; + + if (new_crtc_state->has_sel_update) + _psr2_ready_for_pipe_update_locked(new_crtc_state, dsb); + else + _psr1_ready_for_pipe_update_locked(new_crtc_state, dsb); +} + static bool __psr_wait_for_idle_locked(struct intel_dp *intel_dp) { struct intel_display *display = to_intel_display(intel_dp); @@ -3074,7 +3132,7 @@ static bool __psr_wait_for_idle_locked(struct intel_dp *intel_dp) /* After the unlocked wait, verify that PSR is still wanted! */ mutex_lock(&intel_dp->psr.lock); - return err == 0 && intel_dp->psr.enabled; + return err == 0 && intel_dp->psr.enabled && !intel_dp->psr.pause_counter; } static int intel_psr_fastset_force(struct intel_display *display) @@ -3203,8 +3261,13 @@ static void intel_psr_work(struct work_struct *work) if (!intel_dp->psr.enabled) goto unlock; - if (READ_ONCE(intel_dp->psr.irq_aux_error)) + if (READ_ONCE(intel_dp->psr.irq_aux_error)) { intel_psr_handle_irq(intel_dp); + goto unlock; + } + + if (intel_dp->psr.pause_counter) + goto unlock; /* * We have to make sure PSR is ready for re-enable @@ -3250,7 +3313,9 @@ static void intel_psr_configure_full_frame_update(struct intel_dp *intel_dp) static void _psr_invalidate_handle(struct intel_dp *intel_dp) { - if (intel_dp->psr.psr2_sel_fetch_enabled) { + struct intel_display *display = to_intel_display(intel_dp); + + if (DISPLAY_VER(display) < 20 && intel_dp->psr.psr2_sel_fetch_enabled) { if (!intel_dp->psr.psr2_sel_fetch_cff_enabled) { intel_dp->psr.psr2_sel_fetch_cff_enabled = true; intel_psr_configure_full_frame_update(intel_dp); @@ -3314,7 +3379,6 @@ tgl_dc3co_flush_locked(struct intel_dp *intel_dp, unsigned int frontbuffer_bits, enum fb_op_origin origin) { struct intel_display *display = to_intel_display(intel_dp); - struct drm_i915_private *i915 = to_i915(display->drm); if (!intel_dp->psr.dc3co_exitline || !intel_dp->psr.sel_update_enabled || !intel_dp->psr.active) @@ -3329,16 +3393,15 @@ tgl_dc3co_flush_locked(struct intel_dp *intel_dp, unsigned int frontbuffer_bits, return; tgl_psr2_enable_dc3co(intel_dp); - mod_delayed_work(i915->unordered_wq, &intel_dp->psr.dc3co_work, + mod_delayed_work(display->wq.unordered, &intel_dp->psr.dc3co_work, intel_dp->psr.dc3co_exit_delay); } static void _psr_flush_handle(struct intel_dp *intel_dp) { struct intel_display *display = to_intel_display(intel_dp); - struct drm_i915_private *dev_priv = to_i915(display->drm); - if (intel_dp->psr.psr2_sel_fetch_enabled) { + if (DISPLAY_VER(display) < 20 && intel_dp->psr.psr2_sel_fetch_enabled) { if (intel_dp->psr.psr2_sel_fetch_cff_enabled) { /* can we turn CFF off? */ if (intel_dp->psr.busy_frontbuffer_bits == 0) @@ -3355,13 +3418,15 @@ static void _psr_flush_handle(struct intel_dp *intel_dp) * existing SU configuration */ intel_psr_configure_full_frame_update(intel_dp); - } - intel_psr_force_update(intel_dp); + intel_psr_force_update(intel_dp); + } else { + intel_psr_exit(intel_dp); + } - if (!intel_dp->psr.psr2_sel_fetch_enabled && !intel_dp->psr.active && + if ((!intel_dp->psr.psr2_sel_fetch_enabled || DISPLAY_VER(display) >= 20) && !intel_dp->psr.busy_frontbuffer_bits) - queue_work(dev_priv->unordered_wq, &intel_dp->psr.work); + queue_work(display->wq.unordered, &intel_dp->psr.work); } /** @@ -3696,7 +3761,7 @@ static void intel_psr_apply_underrun_on_idle_wa_locked(struct intel_dp *intel_dp struct intel_display *display = to_intel_display(intel_dp); bool dc5_dc6_blocked; - if (!intel_dp->psr.active) + if (!intel_dp->psr.active || !intel_dp->psr.pkg_c_latency_used) return; dc5_dc6_blocked = is_dc5_dc6_blocked(intel_dp); @@ -3721,7 +3786,8 @@ static void psr_dc5_dc6_wa_work(struct work_struct *work) mutex_lock(&intel_dp->psr.lock); - if (intel_dp->psr.enabled && !intel_dp->psr.panel_replay_enabled) + if (intel_dp->psr.enabled && !intel_dp->psr.panel_replay_enabled && + !intel_dp->psr.pkg_c_latency_used) intel_psr_apply_underrun_on_idle_wa_locked(intel_dp); mutex_unlock(&intel_dp->psr.lock); @@ -3799,7 +3865,8 @@ void intel_psr_notify_pipe_change(struct intel_atomic_state *state, goto unlock; if ((enable && intel_dp->psr.active_non_psr_pipes) || - (!enable && !intel_dp->psr.active_non_psr_pipes)) { + (!enable && !intel_dp->psr.active_non_psr_pipes) || + !intel_dp->psr.pkg_c_latency_used) { intel_dp->psr.active_non_psr_pipes = active_non_psr_pipes; goto unlock; } @@ -3834,7 +3901,7 @@ void intel_psr_notify_vblank_enable_disable(struct intel_display *display, break; } - if (intel_dp->psr.enabled) + if (intel_dp->psr.enabled && intel_dp->psr.pkg_c_latency_used) intel_psr_apply_underrun_on_idle_wa_locked(intel_dp); mutex_unlock(&intel_dp->psr.lock); @@ -3916,7 +3983,8 @@ static void intel_psr_sink_capability(struct intel_dp *intel_dp, seq_printf(m, ", Panel Replay = %s", str_yes_no(psr->sink_panel_replay_support)); seq_printf(m, ", Panel Replay Selective Update = %s", str_yes_no(psr->sink_panel_replay_su_support)); - if (intel_dp->pr_dpcd & DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT) + if (intel_dp->pr_dpcd[INTEL_PR_DPCD_INDEX(DP_PANEL_REPLAY_CAP_SUPPORT)] & + DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT) seq_printf(m, " (Early Transport)"); seq_printf(m, "\n"); } @@ -4021,24 +4089,30 @@ static int intel_psr_status(struct seq_file *m, struct intel_dp *intel_dp) int frame; /* - * Reading all 3 registers before hand to minimize crossing a - * frame boundary between register reads + * PSR2_SU_STATUS register has been tied-off since DG2/ADL-P + * (it returns zeros only) and it has been removed on Xe2_LPD. */ - for (frame = 0; frame < PSR2_SU_STATUS_FRAMES; frame += 3) { - val = intel_de_read(display, - PSR2_SU_STATUS(display, cpu_transcoder, frame)); - su_frames_val[frame / 3] = val; - } + if (DISPLAY_VER(display) < 13) { + /* + * Reading all 3 registers before hand to minimize crossing a + * frame boundary between register reads + */ + for (frame = 0; frame < PSR2_SU_STATUS_FRAMES; frame += 3) { + val = intel_de_read(display, + PSR2_SU_STATUS(display, cpu_transcoder, frame)); + su_frames_val[frame / 3] = val; + } - seq_puts(m, "Frame:\tPSR2 SU blocks:\n"); + seq_puts(m, "Frame:\tPSR2 SU blocks:\n"); - for (frame = 0; frame < PSR2_SU_STATUS_FRAMES; frame++) { - u32 su_blocks; + for (frame = 0; frame < PSR2_SU_STATUS_FRAMES; frame++) { + u32 su_blocks; - su_blocks = su_frames_val[frame / 3] & - PSR2_SU_STATUS_MASK(frame); - su_blocks = su_blocks >> PSR2_SU_STATUS_SHIFT(frame); - seq_printf(m, "%d\t%d\n", frame, su_blocks); + su_blocks = su_frames_val[frame / 3] & + PSR2_SU_STATUS_MASK(frame); + su_blocks = su_blocks >> PSR2_SU_STATUS_SHIFT(frame); + seq_printf(m, "%d\t%d\n", frame, su_blocks); + } } seq_printf(m, "PSR2 selective fetch: %s\n", @@ -4123,12 +4197,12 @@ DEFINE_SIMPLE_ATTRIBUTE(i915_edp_psr_debug_fops, void intel_psr_debugfs_register(struct intel_display *display) { - struct drm_minor *minor = display->drm->primary; + struct dentry *debugfs_root = display->drm->debugfs_root; - debugfs_create_file("i915_edp_psr_debug", 0644, minor->debugfs_root, + debugfs_create_file("i915_edp_psr_debug", 0644, debugfs_root, display, &i915_edp_psr_debug_fops); - debugfs_create_file("i915_edp_psr_status", 0444, minor->debugfs_root, + debugfs_create_file("i915_edp_psr_status", 0444, debugfs_root, display, &i915_edp_psr_status_fops); } @@ -4234,3 +4308,9 @@ bool intel_psr_needs_alpm(struct intel_dp *intel_dp, const struct intel_crtc_sta return intel_dp_is_edp(intel_dp) && (crtc_state->has_sel_update || crtc_state->has_panel_replay); } + +bool intel_psr_needs_alpm_aux_less(struct intel_dp *intel_dp, + const struct intel_crtc_state *crtc_state) +{ + return intel_dp_is_edp(intel_dp) && crtc_state->has_panel_replay; +} |