aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/gpu/drm/i915/display/intel_psr.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/gpu/drm/i915/display/intel_psr.c')
-rw-r--r--drivers/gpu/drm/i915/display/intel_psr.c441
1 files changed, 285 insertions, 156 deletions
diff --git a/drivers/gpu/drm/i915/display/intel_psr.c b/drivers/gpu/drm/i915/display/intel_psr.c
index 3bfb720560c2..6a9f322d3fca 100644
--- a/drivers/gpu/drm/i915/display/intel_psr.c
+++ b/drivers/gpu/drm/i915/display/intel_psr.c
@@ -76,7 +76,7 @@ static bool intel_psr2_enabled(struct drm_i915_private *dev_priv,
const struct intel_crtc_state *crtc_state)
{
/* Cannot enable DSC and PSR2 simultaneously */
- WARN_ON(crtc_state->dsc_params.compression_enable &&
+ WARN_ON(crtc_state->dsc.compression_enable &&
crtc_state->has_psr2);
switch (dev_priv->psr.debug & I915_PSR_DEBUG_MODE_MASK) {
@@ -88,48 +88,35 @@ static bool intel_psr2_enabled(struct drm_i915_private *dev_priv,
}
}
-static int edp_psr_shift(enum transcoder cpu_transcoder)
+static void psr_irq_control(struct drm_i915_private *dev_priv)
{
- switch (cpu_transcoder) {
- case TRANSCODER_A:
- return EDP_PSR_TRANSCODER_A_SHIFT;
- case TRANSCODER_B:
- return EDP_PSR_TRANSCODER_B_SHIFT;
- case TRANSCODER_C:
- return EDP_PSR_TRANSCODER_C_SHIFT;
- default:
- MISSING_CASE(cpu_transcoder);
- /* fallthrough */
- case TRANSCODER_EDP:
- return EDP_PSR_TRANSCODER_EDP_SHIFT;
- }
-}
-
-void intel_psr_irq_control(struct drm_i915_private *dev_priv, u32 debug)
-{
- u32 debug_mask, mask;
- enum transcoder cpu_transcoder;
- u32 transcoders = BIT(TRANSCODER_EDP);
+ enum transcoder trans_shift;
+ u32 mask, val;
+ i915_reg_t imr_reg;
- if (INTEL_GEN(dev_priv) >= 8)
- transcoders |= BIT(TRANSCODER_A) |
- BIT(TRANSCODER_B) |
- BIT(TRANSCODER_C);
-
- debug_mask = 0;
- mask = 0;
- for_each_cpu_transcoder_masked(dev_priv, cpu_transcoder, transcoders) {
- int shift = edp_psr_shift(cpu_transcoder);
-
- mask |= EDP_PSR_ERROR(shift);
- debug_mask |= EDP_PSR_POST_EXIT(shift) |
- EDP_PSR_PRE_ENTRY(shift);
+ /*
+ * gen12+ has registers relative to transcoder and one per transcoder
+ * using the same bit definition: handle it as TRANSCODER_EDP to force
+ * 0 shift in bit definition
+ */
+ if (INTEL_GEN(dev_priv) >= 12) {
+ trans_shift = 0;
+ imr_reg = TRANS_PSR_IMR(dev_priv->psr.transcoder);
+ } else {
+ trans_shift = dev_priv->psr.transcoder;
+ imr_reg = EDP_PSR_IMR;
}
- if (debug & I915_PSR_DEBUG_IRQ)
- mask |= debug_mask;
+ mask = EDP_PSR_ERROR(trans_shift);
+ if (dev_priv->psr.debug & I915_PSR_DEBUG_IRQ)
+ mask |= EDP_PSR_POST_EXIT(trans_shift) |
+ EDP_PSR_PRE_ENTRY(trans_shift);
- I915_WRITE(EDP_PSR_IMR, ~mask);
+ /* Warning: it is masking/setting reserved bits too */
+ val = I915_READ(imr_reg);
+ val &= ~EDP_PSR_TRANS_MASK(trans_shift);
+ val |= ~mask;
+ I915_WRITE(imr_reg, val);
}
static void psr_event_print(u32 val, bool psr2_enabled)
@@ -171,60 +158,58 @@ static void psr_event_print(u32 val, bool psr2_enabled)
void intel_psr_irq_handler(struct drm_i915_private *dev_priv, u32 psr_iir)
{
- u32 transcoders = BIT(TRANSCODER_EDP);
- enum transcoder cpu_transcoder;
+ enum transcoder cpu_transcoder = dev_priv->psr.transcoder;
+ enum transcoder trans_shift;
+ i915_reg_t imr_reg;
ktime_t time_ns = ktime_get();
- u32 mask = 0;
- if (INTEL_GEN(dev_priv) >= 8)
- transcoders |= BIT(TRANSCODER_A) |
- BIT(TRANSCODER_B) |
- BIT(TRANSCODER_C);
+ if (INTEL_GEN(dev_priv) >= 12) {
+ trans_shift = 0;
+ imr_reg = TRANS_PSR_IMR(dev_priv->psr.transcoder);
+ } else {
+ trans_shift = dev_priv->psr.transcoder;
+ imr_reg = EDP_PSR_IMR;
+ }
- for_each_cpu_transcoder_masked(dev_priv, cpu_transcoder, transcoders) {
- int shift = edp_psr_shift(cpu_transcoder);
+ if (psr_iir & EDP_PSR_PRE_ENTRY(trans_shift)) {
+ dev_priv->psr.last_entry_attempt = time_ns;
+ DRM_DEBUG_KMS("[transcoder %s] PSR entry attempt in 2 vblanks\n",
+ transcoder_name(cpu_transcoder));
+ }
- if (psr_iir & EDP_PSR_ERROR(shift)) {
- DRM_WARN("[transcoder %s] PSR aux error\n",
- transcoder_name(cpu_transcoder));
+ if (psr_iir & EDP_PSR_POST_EXIT(trans_shift)) {
+ dev_priv->psr.last_exit = time_ns;
+ DRM_DEBUG_KMS("[transcoder %s] PSR exit completed\n",
+ transcoder_name(cpu_transcoder));
- dev_priv->psr.irq_aux_error = true;
+ if (INTEL_GEN(dev_priv) >= 9) {
+ u32 val = I915_READ(PSR_EVENT(cpu_transcoder));
+ bool psr2_enabled = dev_priv->psr.psr2_enabled;
- /*
- * If this interruption is not masked it will keep
- * interrupting so fast that it prevents the scheduled
- * work to run.
- * Also after a PSR error, we don't want to arm PSR
- * again so we don't care about unmask the interruption
- * or unset irq_aux_error.
- */
- mask |= EDP_PSR_ERROR(shift);
- }
-
- if (psr_iir & EDP_PSR_PRE_ENTRY(shift)) {
- dev_priv->psr.last_entry_attempt = time_ns;
- DRM_DEBUG_KMS("[transcoder %s] PSR entry attempt in 2 vblanks\n",
- transcoder_name(cpu_transcoder));
+ I915_WRITE(PSR_EVENT(cpu_transcoder), val);
+ psr_event_print(val, psr2_enabled);
}
+ }
- if (psr_iir & EDP_PSR_POST_EXIT(shift)) {
- dev_priv->psr.last_exit = time_ns;
- DRM_DEBUG_KMS("[transcoder %s] PSR exit completed\n",
- transcoder_name(cpu_transcoder));
+ if (psr_iir & EDP_PSR_ERROR(trans_shift)) {
+ u32 val;
- if (INTEL_GEN(dev_priv) >= 9) {
- u32 val = I915_READ(PSR_EVENT(cpu_transcoder));
- bool psr2_enabled = dev_priv->psr.psr2_enabled;
+ DRM_WARN("[transcoder %s] PSR aux error\n",
+ transcoder_name(cpu_transcoder));
- I915_WRITE(PSR_EVENT(cpu_transcoder), val);
- psr_event_print(val, psr2_enabled);
- }
- }
- }
+ dev_priv->psr.irq_aux_error = true;
- if (mask) {
- mask |= I915_READ(EDP_PSR_IMR);
- I915_WRITE(EDP_PSR_IMR, mask);
+ /*
+ * If this interruption is not masked it will keep
+ * interrupting so fast that it prevents the scheduled
+ * work to run.
+ * Also after a PSR error, we don't want to arm PSR
+ * again so we don't care about unmask the interruption
+ * or unset irq_aux_error.
+ */
+ val = I915_READ(imr_reg);
+ val |= EDP_PSR_ERROR(trans_shift);
+ I915_WRITE(imr_reg, val);
schedule_work(&dev_priv->psr.work);
}
@@ -283,6 +268,11 @@ void intel_psr_init_dpcd(struct intel_dp *intel_dp)
struct drm_i915_private *dev_priv =
to_i915(dp_to_dig_port(intel_dp)->base.base.dev);
+ if (dev_priv->psr.dp) {
+ DRM_WARN("More than one eDP panel found, PSR support should be extended\n");
+ return;
+ }
+
drm_dp_dpcd_read(&intel_dp->aux, DP_PSR_SUPPORT, intel_dp->psr_dpcd,
sizeof(intel_dp->psr_dpcd));
@@ -305,7 +295,6 @@ void intel_psr_init_dpcd(struct intel_dp *intel_dp)
dev_priv->psr.sink_sync_latency =
intel_dp_get_sink_sync_latency(intel_dp);
- WARN_ON(dev_priv->psr.dp);
dev_priv->psr.dp = intel_dp;
if (INTEL_GEN(dev_priv) >= 9 &&
@@ -390,7 +379,7 @@ static void hsw_psr_setup_aux(struct intel_dp *intel_dp)
BUILD_BUG_ON(sizeof(aux_msg) > 20);
for (i = 0; i < sizeof(aux_msg); i += 4)
- I915_WRITE(EDP_PSR_AUX_DATA(i >> 2),
+ I915_WRITE(EDP_PSR_AUX_DATA(dev_priv->psr.transcoder, i >> 2),
intel_dp_pack_aux(&aux_msg[i], sizeof(aux_msg) - i));
aux_clock_divider = intel_dp->get_aux_clock_divider(intel_dp, 0);
@@ -401,7 +390,7 @@ static void hsw_psr_setup_aux(struct intel_dp *intel_dp)
/* Select only valid bits for SRD_AUX_CTL */
aux_ctl &= psr_aux_mask;
- I915_WRITE(EDP_PSR_AUX_CTL, aux_ctl);
+ I915_WRITE(EDP_PSR_AUX_CTL(dev_priv->psr.transcoder), aux_ctl);
}
static void intel_psr_enable_sink(struct intel_dp *intel_dp)
@@ -491,8 +480,9 @@ static void hsw_activate_psr1(struct intel_dp *intel_dp)
if (INTEL_GEN(dev_priv) >= 8)
val |= EDP_PSR_CRC_ENABLE;
- val |= I915_READ(EDP_PSR_CTL) & EDP_PSR_RESTORE_PSR_ACTIVE_CTX_MASK;
- I915_WRITE(EDP_PSR_CTL, val);
+ val |= (I915_READ(EDP_PSR_CTL(dev_priv->psr.transcoder)) &
+ EDP_PSR_RESTORE_PSR_ACTIVE_CTX_MASK);
+ I915_WRITE(EDP_PSR_CTL(dev_priv->psr.transcoder), val);
}
static void hsw_activate_psr2(struct intel_dp *intel_dp)
@@ -528,9 +518,87 @@ static void hsw_activate_psr2(struct intel_dp *intel_dp)
* PSR2 HW is incorrectly using EDP_PSR_TP1_TP3_SEL and BSpec is
* recommending keep this bit unset while PSR2 is enabled.
*/
- I915_WRITE(EDP_PSR_CTL, 0);
+ I915_WRITE(EDP_PSR_CTL(dev_priv->psr.transcoder), 0);
+
+ I915_WRITE(EDP_PSR2_CTL(dev_priv->psr.transcoder), val);
+}
- I915_WRITE(EDP_PSR2_CTL, val);
+static bool
+transcoder_has_psr2(struct drm_i915_private *dev_priv, enum transcoder trans)
+{
+ if (INTEL_GEN(dev_priv) < 9)
+ return false;
+ else if (INTEL_GEN(dev_priv) >= 12)
+ return trans == TRANSCODER_A;
+ else
+ return trans == TRANSCODER_EDP;
+}
+
+static u32 intel_get_frame_time_us(const struct intel_crtc_state *cstate)
+{
+ if (!cstate || !cstate->base.active)
+ return 0;
+
+ return DIV_ROUND_UP(1000 * 1000,
+ drm_mode_vrefresh(&cstate->base.adjusted_mode));
+}
+
+static void psr2_program_idle_frames(struct drm_i915_private *dev_priv,
+ u32 idle_frames)
+{
+ u32 val;
+
+ idle_frames <<= EDP_PSR2_IDLE_FRAME_SHIFT;
+ val = I915_READ(EDP_PSR2_CTL(dev_priv->psr.transcoder));
+ val &= ~EDP_PSR2_IDLE_FRAME_MASK;
+ val |= idle_frames;
+ I915_WRITE(EDP_PSR2_CTL(dev_priv->psr.transcoder), val);
+}
+
+static void tgl_psr2_enable_dc3co(struct drm_i915_private *dev_priv)
+{
+ psr2_program_idle_frames(dev_priv, 0);
+ intel_display_power_set_target_dc_state(dev_priv, DC_STATE_EN_DC3CO);
+}
+
+static void tgl_psr2_disable_dc3co(struct drm_i915_private *dev_priv)
+{
+ int idle_frames;
+
+ intel_display_power_set_target_dc_state(dev_priv, DC_STATE_EN_UPTO_DC6);
+ /*
+ * Restore PSR2 idle frame let's use 6 as the minimum to cover all known
+ * cases including the off-by-one issue that HW has in some cases.
+ */
+ idle_frames = max(6, dev_priv->vbt.psr.idle_frames);
+ idle_frames = max(idle_frames, dev_priv->psr.sink_sync_latency + 1);
+ psr2_program_idle_frames(dev_priv, idle_frames);
+}
+
+static void tgl_dc5_idle_thread(struct work_struct *work)
+{
+ struct drm_i915_private *dev_priv =
+ container_of(work, typeof(*dev_priv), psr.idle_work.work);
+
+ mutex_lock(&dev_priv->psr.lock);
+ /* If delayed work is pending, it is not idle */
+ if (delayed_work_pending(&dev_priv->psr.idle_work))
+ goto unlock;
+
+ DRM_DEBUG_KMS("DC5/6 idle thread\n");
+ tgl_psr2_disable_dc3co(dev_priv);
+unlock:
+ mutex_unlock(&dev_priv->psr.lock);
+}
+
+static void tgl_disallow_dc3co_on_psr2_exit(struct drm_i915_private *dev_priv)
+{
+ if (!dev_priv->psr.dc3co_enabled)
+ return;
+
+ cancel_delayed_work(&dev_priv->psr.idle_work);
+ /* Before PSR2 exit disallow dc3co*/
+ tgl_psr2_disable_dc3co(dev_priv);
}
static bool intel_psr2_config_valid(struct intel_dp *intel_dp,
@@ -544,17 +612,26 @@ static bool intel_psr2_config_valid(struct intel_dp *intel_dp,
if (!dev_priv->psr.sink_psr2_support)
return false;
+ if (!transcoder_has_psr2(dev_priv, crtc_state->cpu_transcoder)) {
+ DRM_DEBUG_KMS("PSR2 not supported in transcoder %s\n",
+ transcoder_name(crtc_state->cpu_transcoder));
+ return false;
+ }
+
/*
* DSC and PSR2 cannot be enabled simultaneously. If a requested
* resolution requires DSC to be enabled, priority is given to DSC
* over PSR2.
*/
- if (crtc_state->dsc_params.compression_enable) {
+ if (crtc_state->dsc.compression_enable) {
DRM_DEBUG_KMS("PSR2 cannot be enabled since DSC is enabled\n");
return false;
}
- if (INTEL_GEN(dev_priv) >= 10 || IS_GEMINILAKE(dev_priv)) {
+ if (INTEL_GEN(dev_priv) >= 12) {
+ psr_max_h = 5120;
+ psr_max_v = 3200;
+ } else if (INTEL_GEN(dev_priv) >= 10 || IS_GEMINILAKE(dev_priv)) {
psr_max_h = 4096;
psr_max_v = 2304;
} else if (IS_GEN(dev_priv, 9)) {
@@ -606,10 +683,9 @@ void intel_psr_compute_config(struct intel_dp *intel_dp,
/*
* HSW spec explicitly says PSR is tied to port A.
- * BDW+ platforms with DDI implementation of PSR have different
- * PSR registers per transcoder and we only implement transcoder EDP
- * ones. Since by Display design transcoder EDP is tied to port A
- * we can safely escape based on the port A.
+ * BDW+ platforms have a instance of PSR registers per transcoder but
+ * for now it only supports one instance of PSR, so lets keep it
+ * hardcoded to PORT_A
*/
if (dig_port->base.port != PORT_A) {
DRM_DEBUG_KMS("PSR condition failed: Port not supported\n");
@@ -648,9 +724,10 @@ static void intel_psr_activate(struct intel_dp *intel_dp)
{
struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
- if (INTEL_GEN(dev_priv) >= 9)
- WARN_ON(I915_READ(EDP_PSR2_CTL) & EDP_PSR2_ENABLE);
- WARN_ON(I915_READ(EDP_PSR_CTL) & EDP_PSR_ENABLE);
+ if (transcoder_has_psr2(dev_priv, dev_priv->psr.transcoder))
+ WARN_ON(I915_READ(EDP_PSR2_CTL(dev_priv->psr.transcoder)) & EDP_PSR2_ENABLE);
+
+ WARN_ON(I915_READ(EDP_PSR_CTL(dev_priv->psr.transcoder)) & EDP_PSR_ENABLE);
WARN_ON(dev_priv->psr.active);
lockdep_assert_held(&dev_priv->psr.lock);
@@ -663,25 +740,6 @@ static void intel_psr_activate(struct intel_dp *intel_dp)
dev_priv->psr.active = true;
}
-static i915_reg_t gen9_chicken_trans_reg(struct drm_i915_private *dev_priv,
- enum transcoder cpu_transcoder)
-{
- static const i915_reg_t regs[] = {
- [TRANSCODER_A] = CHICKEN_TRANS_A,
- [TRANSCODER_B] = CHICKEN_TRANS_B,
- [TRANSCODER_C] = CHICKEN_TRANS_C,
- [TRANSCODER_EDP] = CHICKEN_TRANS_EDP,
- };
-
- WARN_ON(INTEL_GEN(dev_priv) < 9);
-
- if (WARN_ON(cpu_transcoder >= ARRAY_SIZE(regs) ||
- !regs[cpu_transcoder].reg))
- cpu_transcoder = TRANSCODER_A;
-
- return regs[cpu_transcoder];
-}
-
static void intel_psr_enable_source(struct intel_dp *intel_dp,
const struct intel_crtc_state *crtc_state)
{
@@ -697,8 +755,7 @@ static void intel_psr_enable_source(struct intel_dp *intel_dp,
if (dev_priv->psr.psr2_enabled && (IS_GEN(dev_priv, 9) &&
!IS_GEMINILAKE(dev_priv))) {
- i915_reg_t reg = gen9_chicken_trans_reg(dev_priv,
- cpu_transcoder);
+ i915_reg_t reg = CHICKEN_TRANS(cpu_transcoder);
u32 chicken = I915_READ(reg);
chicken |= PSR2_VSC_ENABLE_PROG_HEADER |
@@ -720,19 +777,46 @@ static void intel_psr_enable_source(struct intel_dp *intel_dp,
if (INTEL_GEN(dev_priv) < 11)
mask |= EDP_PSR_DEBUG_MASK_DISP_REG_WRITE;
- I915_WRITE(EDP_PSR_DEBUG, mask);
+ I915_WRITE(EDP_PSR_DEBUG(dev_priv->psr.transcoder), mask);
+
+ psr_irq_control(dev_priv);
}
static void intel_psr_enable_locked(struct drm_i915_private *dev_priv,
const struct intel_crtc_state *crtc_state)
{
struct intel_dp *intel_dp = dev_priv->psr.dp;
+ u32 val;
WARN_ON(dev_priv->psr.enabled);
dev_priv->psr.psr2_enabled = intel_psr2_enabled(dev_priv, crtc_state);
dev_priv->psr.busy_frontbuffer_bits = 0;
dev_priv->psr.pipe = to_intel_crtc(crtc_state->base.crtc)->pipe;
+ dev_priv->psr.dc3co_enabled = !!crtc_state->dc3co_exitline;
+ dev_priv->psr.dc3co_exit_delay = intel_get_frame_time_us(crtc_state);
+ dev_priv->psr.transcoder = crtc_state->cpu_transcoder;
+
+ /*
+ * If a PSR error happened and the driver is reloaded, the EDP_PSR_IIR
+ * will still keep the error set even after the reset done in the
+ * irq_preinstall and irq_uninstall hooks.
+ * And enabling in this situation cause the screen to freeze in the
+ * first time that PSR HW tries to activate so lets keep PSR disabled
+ * to avoid any rendering problems.
+ */
+ if (INTEL_GEN(dev_priv) >= 12) {
+ val = I915_READ(TRANS_PSR_IIR(dev_priv->psr.transcoder));
+ val &= EDP_PSR_ERROR(0);
+ } else {
+ val = I915_READ(EDP_PSR_IIR);
+ val &= EDP_PSR_ERROR(dev_priv->psr.transcoder);
+ }
+ if (val) {
+ dev_priv->psr.sink_not_reliable = true;
+ DRM_DEBUG_KMS("PSR interruption error set, not enabling PSR\n");
+ return;
+ }
DRM_DEBUG_KMS("Enabling PSR%s\n",
dev_priv->psr.psr2_enabled ? "2" : "1");
@@ -782,20 +866,28 @@ static void intel_psr_exit(struct drm_i915_private *dev_priv)
u32 val;
if (!dev_priv->psr.active) {
- if (INTEL_GEN(dev_priv) >= 9)
- WARN_ON(I915_READ(EDP_PSR2_CTL) & EDP_PSR2_ENABLE);
- WARN_ON(I915_READ(EDP_PSR_CTL) & EDP_PSR_ENABLE);
+ if (transcoder_has_psr2(dev_priv, dev_priv->psr.transcoder)) {
+ val = I915_READ(EDP_PSR2_CTL(dev_priv->psr.transcoder));
+ WARN_ON(val & EDP_PSR2_ENABLE);
+ }
+
+ val = I915_READ(EDP_PSR_CTL(dev_priv->psr.transcoder));
+ WARN_ON(val & EDP_PSR_ENABLE);
+
return;
}
if (dev_priv->psr.psr2_enabled) {
- val = I915_READ(EDP_PSR2_CTL);
+ tgl_disallow_dc3co_on_psr2_exit(dev_priv);
+ val = I915_READ(EDP_PSR2_CTL(dev_priv->psr.transcoder));
WARN_ON(!(val & EDP_PSR2_ENABLE));
- I915_WRITE(EDP_PSR2_CTL, val & ~EDP_PSR2_ENABLE);
+ val &= ~EDP_PSR2_ENABLE;
+ I915_WRITE(EDP_PSR2_CTL(dev_priv->psr.transcoder), val);
} else {
- val = I915_READ(EDP_PSR_CTL);
+ val = I915_READ(EDP_PSR_CTL(dev_priv->psr.transcoder));
WARN_ON(!(val & EDP_PSR_ENABLE));
- I915_WRITE(EDP_PSR_CTL, val & ~EDP_PSR_ENABLE);
+ val &= ~EDP_PSR_ENABLE;
+ I915_WRITE(EDP_PSR_CTL(dev_priv->psr.transcoder), val);
}
dev_priv->psr.active = false;
}
@@ -817,10 +909,10 @@ static void intel_psr_disable_locked(struct intel_dp *intel_dp)
intel_psr_exit(dev_priv);
if (dev_priv->psr.psr2_enabled) {
- psr_status = EDP_PSR2_STATUS;
+ psr_status = EDP_PSR2_STATUS(dev_priv->psr.transcoder);
psr_status_mask = EDP_PSR2_STATUS_STATE_MASK;
} else {
- psr_status = EDP_PSR_STATUS;
+ psr_status = EDP_PSR_STATUS(dev_priv->psr.transcoder);
psr_status_mask = EDP_PSR_STATUS_STATE_MASK;
}
@@ -859,6 +951,7 @@ void intel_psr_disable(struct intel_dp *intel_dp,
mutex_unlock(&dev_priv->psr.lock);
cancel_work_sync(&dev_priv->psr.work);
+ cancel_delayed_work_sync(&dev_priv->psr.idle_work);
}
static void psr_force_hw_tracking_exit(struct drm_i915_private *dev_priv)
@@ -963,7 +1056,8 @@ int intel_psr_wait_for_idle(const struct intel_crtc_state *new_crtc_state,
* defensive enough to cover everything.
*/
- return __intel_wait_for_register(&dev_priv->uncore, EDP_PSR_STATUS,
+ return __intel_wait_for_register(&dev_priv->uncore,
+ EDP_PSR_STATUS(dev_priv->psr.transcoder),
EDP_PSR_STATUS_STATE_MASK,
EDP_PSR_STATUS_STATE_IDLE, 2, 50,
out_value);
@@ -979,10 +1073,10 @@ static bool __psr_wait_for_idle_locked(struct drm_i915_private *dev_priv)
return false;
if (dev_priv->psr.psr2_enabled) {
- reg = EDP_PSR2_STATUS;
+ reg = EDP_PSR2_STATUS(dev_priv->psr.transcoder);
mask = EDP_PSR2_STATUS_STATE_MASK;
} else {
- reg = EDP_PSR_STATUS;
+ reg = EDP_PSR_STATUS(dev_priv->psr.transcoder);
mask = EDP_PSR_STATUS_STATE_MASK;
}
@@ -1067,7 +1161,13 @@ int intel_psr_debug_set(struct drm_i915_private *dev_priv, u64 val)
old_mode = dev_priv->psr.debug & I915_PSR_DEBUG_MODE_MASK;
dev_priv->psr.debug = val;
- intel_psr_irq_control(dev_priv, dev_priv->psr.debug);
+
+ /*
+ * Do it right away if it's already enabled, otherwise it will be done
+ * when enabling the source.
+ */
+ if (dev_priv->psr.enabled)
+ psr_irq_control(dev_priv);
mutex_unlock(&dev_priv->psr.lock);
@@ -1159,6 +1259,44 @@ void intel_psr_invalidate(struct drm_i915_private *dev_priv,
mutex_unlock(&dev_priv->psr.lock);
}
+/*
+ * When we will be completely rely on PSR2 S/W tracking in future,
+ * intel_psr_flush() will invalidate and flush the PSR for ORIGIN_FLIP
+ * event also therefore tgl_dc3co_flush() require to be changed
+ * accrodingly in future.
+ */
+static void
+tgl_dc3co_flush(struct drm_i915_private *dev_priv,
+ unsigned int frontbuffer_bits, enum fb_op_origin origin)
+{
+ u32 delay;
+
+ mutex_lock(&dev_priv->psr.lock);
+
+ if (!dev_priv->psr.dc3co_enabled)
+ goto unlock;
+
+ if (!dev_priv->psr.psr2_enabled || !dev_priv->psr.active)
+ goto unlock;
+
+ /*
+ * At every frontbuffer flush flip event modified delay of delayed work,
+ * when delayed work schedules that means display has been idle.
+ */
+ if (!(frontbuffer_bits &
+ INTEL_FRONTBUFFER_ALL_MASK(dev_priv->psr.pipe)))
+ goto unlock;
+
+ tgl_psr2_enable_dc3co(dev_priv);
+ /* DC5/DC6 required idle frames = 6 */
+ delay = 6 * dev_priv->psr.dc3co_exit_delay;
+ mod_delayed_work(system_wq, &dev_priv->psr.idle_work,
+ usecs_to_jiffies(delay));
+
+unlock:
+ mutex_unlock(&dev_priv->psr.lock);
+}
+
/**
* intel_psr_flush - Flush PSR
* @dev_priv: i915 device
@@ -1178,8 +1316,10 @@ void intel_psr_flush(struct drm_i915_private *dev_priv,
if (!CAN_PSR(dev_priv))
return;
- if (origin == ORIGIN_FLIP)
+ if (origin == ORIGIN_FLIP) {
+ tgl_dc3co_flush(dev_priv, frontbuffer_bits, origin);
return;
+ }
mutex_lock(&dev_priv->psr.lock);
if (!dev_priv->psr.enabled) {
@@ -1208,45 +1348,34 @@ void intel_psr_flush(struct drm_i915_private *dev_priv,
*/
void intel_psr_init(struct drm_i915_private *dev_priv)
{
- u32 val;
-
if (!HAS_PSR(dev_priv))
return;
- dev_priv->psr_mmio_base = IS_HASWELL(dev_priv) ?
- HSW_EDP_PSR_BASE : BDW_EDP_PSR_BASE;
-
if (!dev_priv->psr.sink_support)
return;
+ if (IS_HASWELL(dev_priv))
+ /*
+ * HSW don't have PSR registers on the same space as transcoder
+ * so set this to a value that when subtract to the register
+ * in transcoder space results in the right offset for HSW
+ */
+ dev_priv->hsw_psr_mmio_adjust = _SRD_CTL_EDP - _HSW_EDP_PSR_BASE;
+
if (i915_modparams.enable_psr == -1)
if (INTEL_GEN(dev_priv) < 9 || !dev_priv->vbt.psr.enable)
i915_modparams.enable_psr = 0;
- /*
- * If a PSR error happened and the driver is reloaded, the EDP_PSR_IIR
- * will still keep the error set even after the reset done in the
- * irq_preinstall and irq_uninstall hooks.
- * And enabling in this situation cause the screen to freeze in the
- * first time that PSR HW tries to activate so lets keep PSR disabled
- * to avoid any rendering problems.
- */
- val = I915_READ(EDP_PSR_IIR);
- val &= EDP_PSR_ERROR(edp_psr_shift(TRANSCODER_EDP));
- if (val) {
- DRM_DEBUG_KMS("PSR interruption error set\n");
- dev_priv->psr.sink_not_reliable = true;
- }
-
/* Set link_standby x link_off defaults */
if (IS_HASWELL(dev_priv) || IS_BROADWELL(dev_priv))
/* HSW and BDW require workarounds that we don't implement. */
dev_priv->psr.link_standby = false;
- else
- /* For new platforms let's respect VBT back again */
+ else if (INTEL_GEN(dev_priv) < 12)
+ /* For new platforms up to TGL let's respect VBT back again */
dev_priv->psr.link_standby = dev_priv->vbt.psr.full_link;
INIT_WORK(&dev_priv->psr.work, intel_psr_work);
+ INIT_DELAYED_WORK(&dev_priv->psr.idle_work, tgl_dc5_idle_thread);
mutex_init(&dev_priv->psr.lock);
}
@@ -1288,7 +1417,7 @@ void intel_psr_short_pulse(struct intel_dp *intel_dp)
if (val & DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR)
DRM_DEBUG_KMS("PSR VSC SDP uncorrectable error, disabling PSR\n");
if (val & DP_PSR_LINK_CRC_ERROR)
- DRM_ERROR("PSR Link CRC error, disabling PSR\n");
+ DRM_DEBUG_KMS("PSR Link CRC error, disabling PSR\n");
if (val & ~errors)
DRM_ERROR("PSR_ERROR_STATUS unhandled errors %x\n",