Eugeni Dodonov | 85208be | 2012-04-16 22:20:34 -0300 | [diff] [blame] | 1 | /* |
| 2 | * Copyright © 2012 Intel Corporation |
| 3 | * |
| 4 | * Permission is hereby granted, free of charge, to any person obtaining a |
| 5 | * copy of this software and associated documentation files (the "Software"), |
| 6 | * to deal in the Software without restriction, including without limitation |
| 7 | * the rights to use, copy, modify, merge, publish, distribute, sublicense, |
| 8 | * and/or sell copies of the Software, and to permit persons to whom the |
| 9 | * Software is furnished to do so, subject to the following conditions: |
| 10 | * |
| 11 | * The above copyright notice and this permission notice (including the next |
| 12 | * paragraph) shall be included in all copies or substantial portions of the |
| 13 | * Software. |
| 14 | * |
| 15 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
| 16 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
| 17 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL |
| 18 | * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
| 19 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING |
| 20 | * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS |
| 21 | * IN THE SOFTWARE. |
| 22 | * |
| 23 | * Authors: |
| 24 | * Eugeni Dodonov <eugeni.dodonov@intel.com> |
| 25 | * |
| 26 | */ |
| 27 | |
Sam Ravnborg | d0e9359 | 2019-01-26 13:25:24 +0100 | [diff] [blame] | 28 | #include <linux/module.h> |
Chris Wilson | 08ea70a | 2018-08-12 23:36:31 +0100 | [diff] [blame] | 29 | #include <linux/pm_runtime.h> |
Sam Ravnborg | d0e9359 | 2019-01-26 13:25:24 +0100 | [diff] [blame] | 30 | |
| 31 | #include <drm/drm_atomic_helper.h> |
| 32 | #include <drm/drm_fourcc.h> |
Kumar, Mahesh | 9c2f7a9 | 2016-05-16 15:52:00 -0700 | [diff] [blame] | 33 | #include <drm/drm_plane_helper.h> |
Sam Ravnborg | d0e9359 | 2019-01-26 13:25:24 +0100 | [diff] [blame] | 34 | |
Jani Nikula | df0566a | 2019-06-13 11:44:16 +0300 | [diff] [blame] | 35 | #include "display/intel_atomic.h" |
Stanislav Lisovskiy | cac91e6 | 2020-05-22 16:18:43 +0300 | [diff] [blame] | 36 | #include "display/intel_bw.h" |
Jani Nikula | 1d455f8 | 2019-08-06 14:39:33 +0300 | [diff] [blame] | 37 | #include "display/intel_display_types.h" |
Jani Nikula | df0566a | 2019-06-13 11:44:16 +0300 | [diff] [blame] | 38 | #include "display/intel_fbc.h" |
| 39 | #include "display/intel_sprite.h" |
| 40 | |
Andi Shyti | 0dc3c56 | 2019-10-20 19:41:39 +0100 | [diff] [blame] | 41 | #include "gt/intel_llc.h" |
| 42 | |
Eugeni Dodonov | 85208be | 2012-04-16 22:20:34 -0300 | [diff] [blame] | 43 | #include "i915_drv.h" |
Jani Nikula | a10510a | 2020-02-27 19:00:47 +0200 | [diff] [blame] | 44 | #include "i915_fixed.h" |
Jani Nikula | 440e2b3 | 2019-04-29 15:29:27 +0300 | [diff] [blame] | 45 | #include "i915_irq.h" |
Jani Nikula | a09d9a8 | 2019-08-06 13:07:28 +0300 | [diff] [blame] | 46 | #include "i915_trace.h" |
Jani Nikula | 696173b | 2019-04-05 14:00:15 +0300 | [diff] [blame] | 47 | #include "intel_pm.h" |
Chris Wilson | 56c5098 | 2019-04-26 09:17:22 +0100 | [diff] [blame] | 48 | #include "intel_sideband.h" |
Daniel Vetter | eb48eb0 | 2012-04-26 23:28:12 +0200 | [diff] [blame] | 49 | #include "../../../platform/x86/intel_ips.h" |
Eugeni Dodonov | 85208be | 2012-04-16 22:20:34 -0300 | [diff] [blame] | 50 | |
Jani Nikula | a10510a | 2020-02-27 19:00:47 +0200 | [diff] [blame] | 51 | /* Stores plane specific WM parameters */ |
| 52 | struct skl_wm_params { |
| 53 | bool x_tiled, y_tiled; |
| 54 | bool rc_surface; |
| 55 | bool is_planar; |
| 56 | u32 width; |
| 57 | u8 cpp; |
| 58 | u32 plane_pixel_rate; |
| 59 | u32 y_min_scanlines; |
| 60 | u32 plane_bytes_per_line; |
| 61 | uint_fixed_16_16_t plane_blocks_per_line; |
| 62 | uint_fixed_16_16_t y_tile_minimum; |
| 63 | u32 linetime_us; |
| 64 | u32 dbuf_block_size; |
| 65 | }; |
| 66 | |
| 67 | /* used in computing the new watermarks state */ |
| 68 | struct intel_wm_config { |
| 69 | unsigned int num_pipes_active; |
| 70 | bool sprites_enabled; |
| 71 | bool sprites_scaled; |
| 72 | }; |
| 73 | |
Ville Syrjälä | 46f16e6 | 2016-10-31 22:37:22 +0200 | [diff] [blame] | 74 | static void gen9_init_clock_gating(struct drm_i915_private *dev_priv) |
Mika Kuoppala | b033bb6 | 2016-06-07 17:19:04 +0300 | [diff] [blame] | 75 | { |
Ville Syrjälä | 9356404 | 2017-08-24 22:10:51 +0300 | [diff] [blame] | 76 | if (HAS_LLC(dev_priv)) { |
| 77 | /* |
| 78 | * WaCompressedResourceDisplayNewHashMode:skl,kbl |
Lucas De Marchi | e0403cb | 2017-12-05 11:01:17 -0800 | [diff] [blame] | 79 | * Display WA #0390: skl,kbl |
Ville Syrjälä | 9356404 | 2017-08-24 22:10:51 +0300 | [diff] [blame] | 80 | * |
| 81 | * Must match Sampler, Pixel Back End, and Media. See |
| 82 | * WaCompressedResourceSamplerPbeMediaNewHashMode. |
| 83 | */ |
| 84 | I915_WRITE(CHICKEN_PAR1_1, |
| 85 | I915_READ(CHICKEN_PAR1_1) | |
| 86 | SKL_DE_COMPRESSED_HASH_MODE); |
| 87 | } |
| 88 | |
Rodrigo Vivi | 82525c1 | 2017-06-08 08:50:00 -0700 | [diff] [blame] | 89 | /* See Bspec note for PSR2_CTL bit 31, Wa#828:skl,bxt,kbl,cfl */ |
Mika Kuoppala | b033bb6 | 2016-06-07 17:19:04 +0300 | [diff] [blame] | 90 | I915_WRITE(CHICKEN_PAR1_1, |
| 91 | I915_READ(CHICKEN_PAR1_1) | SKL_EDP_PSR_FIX_RDWRAP); |
| 92 | |
Rodrigo Vivi | 82525c1 | 2017-06-08 08:50:00 -0700 | [diff] [blame] | 93 | /* WaEnableChickenDCPR:skl,bxt,kbl,glk,cfl */ |
Mika Kuoppala | 590e8ff | 2016-06-07 17:19:13 +0300 | [diff] [blame] | 94 | I915_WRITE(GEN8_CHICKEN_DCPR_1, |
| 95 | I915_READ(GEN8_CHICKEN_DCPR_1) | MASK_WAKEMEM); |
Mika Kuoppala | 0f78dee | 2016-06-07 17:19:16 +0300 | [diff] [blame] | 96 | |
Rodrigo Vivi | 82525c1 | 2017-06-08 08:50:00 -0700 | [diff] [blame] | 97 | /* WaFbcTurnOffFbcWatermark:skl,bxt,kbl,cfl */ |
| 98 | /* WaFbcWakeMemOn:skl,bxt,kbl,glk,cfl */ |
Mika Kuoppala | 303d4ea | 2016-06-07 17:19:17 +0300 | [diff] [blame] | 99 | I915_WRITE(DISP_ARB_CTL, I915_READ(DISP_ARB_CTL) | |
| 100 | DISP_FBC_WM_DIS | |
| 101 | DISP_FBC_MEMORY_WAKE); |
Mika Kuoppala | d1b4eef | 2016-06-07 17:19:19 +0300 | [diff] [blame] | 102 | |
Rodrigo Vivi | 82525c1 | 2017-06-08 08:50:00 -0700 | [diff] [blame] | 103 | /* WaFbcHighMemBwCorruptionAvoidance:skl,bxt,kbl,cfl */ |
Mika Kuoppala | d1b4eef | 2016-06-07 17:19:19 +0300 | [diff] [blame] | 104 | I915_WRITE(ILK_DPFC_CHICKEN, I915_READ(ILK_DPFC_CHICKEN) | |
| 105 | ILK_DPFC_DISABLE_DUMMY0); |
Praveen Paneri | 32087d1 | 2017-08-03 23:02:10 +0530 | [diff] [blame] | 106 | |
| 107 | if (IS_SKYLAKE(dev_priv)) { |
| 108 | /* WaDisableDopClockGating */ |
| 109 | I915_WRITE(GEN7_MISCCPCTL, I915_READ(GEN7_MISCCPCTL) |
| 110 | & ~GEN7_DOP_CLOCK_GATE_ENABLE); |
| 111 | } |
Mika Kuoppala | b033bb6 | 2016-06-07 17:19:04 +0300 | [diff] [blame] | 112 | } |
| 113 | |
Ville Syrjälä | 46f16e6 | 2016-10-31 22:37:22 +0200 | [diff] [blame] | 114 | static void bxt_init_clock_gating(struct drm_i915_private *dev_priv) |
Imre Deak | a82abe4 | 2015-03-27 14:00:04 +0200 | [diff] [blame] | 115 | { |
Ville Syrjälä | 46f16e6 | 2016-10-31 22:37:22 +0200 | [diff] [blame] | 116 | gen9_init_clock_gating(dev_priv); |
Daniel Vetter | dc00b6a | 2016-05-19 09:14:20 +0200 | [diff] [blame] | 117 | |
Nick Hoath | a754615 | 2015-06-29 14:07:32 +0100 | [diff] [blame] | 118 | /* WaDisableSDEUnitClockGating:bxt */ |
| 119 | I915_WRITE(GEN8_UCGCTL6, I915_READ(GEN8_UCGCTL6) | |
| 120 | GEN8_SDEUNIT_CLOCK_GATE_DISABLE); |
| 121 | |
Imre Deak | 32608ca | 2015-03-11 11:10:27 +0200 | [diff] [blame] | 122 | /* |
| 123 | * FIXME: |
Ben Widawsky | 868434c | 2015-03-11 10:49:32 +0200 | [diff] [blame] | 124 | * GEN8_HDCUNIT_CLOCK_GATE_DISABLE_HDCREQ applies on 3x6 GT SKUs only. |
Imre Deak | 32608ca | 2015-03-11 11:10:27 +0200 | [diff] [blame] | 125 | */ |
Imre Deak | 32608ca | 2015-03-11 11:10:27 +0200 | [diff] [blame] | 126 | I915_WRITE(GEN8_UCGCTL6, I915_READ(GEN8_UCGCTL6) | |
Ben Widawsky | 868434c | 2015-03-11 10:49:32 +0200 | [diff] [blame] | 127 | GEN8_HDCUNIT_CLOCK_GATE_DISABLE_HDCREQ); |
Imre Deak | d965e7ac | 2015-12-01 10:23:52 +0200 | [diff] [blame] | 128 | |
| 129 | /* |
| 130 | * Wa: Backlight PWM may stop in the asserted state, causing backlight |
| 131 | * to stay fully on. |
| 132 | */ |
Jani Nikula | 8aeaf64 | 2017-02-15 17:21:37 +0200 | [diff] [blame] | 133 | I915_WRITE(GEN9_CLKGATE_DIS_0, I915_READ(GEN9_CLKGATE_DIS_0) | |
| 134 | PWM1_GATING_DIS | PWM2_GATING_DIS); |
Uma Shankar | 1d85a29 | 2018-08-07 21:15:35 +0530 | [diff] [blame] | 135 | |
| 136 | /* |
| 137 | * Lower the display internal timeout. |
| 138 | * This is needed to avoid any hard hangs when DSI port PLL |
| 139 | * is off and a MMIO access is attempted by any privilege |
| 140 | * application, using batch buffers or any other means. |
| 141 | */ |
| 142 | I915_WRITE(RM_TIMEOUT, MMIO_TIMEOUT_US(950)); |
Imre Deak | a82abe4 | 2015-03-27 14:00:04 +0200 | [diff] [blame] | 143 | } |
| 144 | |
Ander Conselvan de Oliveira | 9fb5026 | 2017-01-26 11:16:58 +0200 | [diff] [blame] | 145 | static void glk_init_clock_gating(struct drm_i915_private *dev_priv) |
| 146 | { |
| 147 | gen9_init_clock_gating(dev_priv); |
| 148 | |
| 149 | /* |
| 150 | * WaDisablePWMClockGating:glk |
| 151 | * Backlight PWM may stop in the asserted state, causing backlight |
| 152 | * to stay fully on. |
| 153 | */ |
| 154 | I915_WRITE(GEN9_CLKGATE_DIS_0, I915_READ(GEN9_CLKGATE_DIS_0) | |
| 155 | PWM1_GATING_DIS | PWM2_GATING_DIS); |
| 156 | } |
| 157 | |
Lucas De Marchi | 1d21822 | 2019-12-24 00:40:04 -0800 | [diff] [blame] | 158 | static void pnv_get_mem_freq(struct drm_i915_private *dev_priv) |
Daniel Vetter | c921aba | 2012-04-26 23:28:17 +0200 | [diff] [blame] | 159 | { |
Daniel Vetter | c921aba | 2012-04-26 23:28:17 +0200 | [diff] [blame] | 160 | u32 tmp; |
| 161 | |
| 162 | tmp = I915_READ(CLKCFG); |
| 163 | |
| 164 | switch (tmp & CLKCFG_FSB_MASK) { |
| 165 | case CLKCFG_FSB_533: |
| 166 | dev_priv->fsb_freq = 533; /* 133*4 */ |
| 167 | break; |
| 168 | case CLKCFG_FSB_800: |
| 169 | dev_priv->fsb_freq = 800; /* 200*4 */ |
| 170 | break; |
| 171 | case CLKCFG_FSB_667: |
| 172 | dev_priv->fsb_freq = 667; /* 167*4 */ |
| 173 | break; |
| 174 | case CLKCFG_FSB_400: |
| 175 | dev_priv->fsb_freq = 400; /* 100*4 */ |
| 176 | break; |
| 177 | } |
| 178 | |
| 179 | switch (tmp & CLKCFG_MEM_MASK) { |
| 180 | case CLKCFG_MEM_533: |
| 181 | dev_priv->mem_freq = 533; |
| 182 | break; |
| 183 | case CLKCFG_MEM_667: |
| 184 | dev_priv->mem_freq = 667; |
| 185 | break; |
| 186 | case CLKCFG_MEM_800: |
| 187 | dev_priv->mem_freq = 800; |
| 188 | break; |
| 189 | } |
| 190 | |
| 191 | /* detect pineview DDR3 setting */ |
| 192 | tmp = I915_READ(CSHRDDR3CTL); |
| 193 | dev_priv->is_ddr3 = (tmp & CSHRDDR3CTL_DDR3) ? 1 : 0; |
| 194 | } |
| 195 | |
Lucas De Marchi | 9eae5e2 | 2019-12-24 00:40:09 -0800 | [diff] [blame] | 196 | static void ilk_get_mem_freq(struct drm_i915_private *dev_priv) |
Daniel Vetter | c921aba | 2012-04-26 23:28:17 +0200 | [diff] [blame] | 197 | { |
Daniel Vetter | c921aba | 2012-04-26 23:28:17 +0200 | [diff] [blame] | 198 | u16 ddrpll, csipll; |
| 199 | |
Tvrtko Ursulin | 4f5fd91 | 2019-06-11 11:45:48 +0100 | [diff] [blame] | 200 | ddrpll = intel_uncore_read16(&dev_priv->uncore, DDRMPLL1); |
| 201 | csipll = intel_uncore_read16(&dev_priv->uncore, CSIPLL0); |
Daniel Vetter | c921aba | 2012-04-26 23:28:17 +0200 | [diff] [blame] | 202 | |
| 203 | switch (ddrpll & 0xff) { |
| 204 | case 0xc: |
| 205 | dev_priv->mem_freq = 800; |
| 206 | break; |
| 207 | case 0x10: |
| 208 | dev_priv->mem_freq = 1066; |
| 209 | break; |
| 210 | case 0x14: |
| 211 | dev_priv->mem_freq = 1333; |
| 212 | break; |
| 213 | case 0x18: |
| 214 | dev_priv->mem_freq = 1600; |
| 215 | break; |
| 216 | default: |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 217 | drm_dbg(&dev_priv->drm, "unknown memory frequency 0x%02x\n", |
| 218 | ddrpll & 0xff); |
Daniel Vetter | c921aba | 2012-04-26 23:28:17 +0200 | [diff] [blame] | 219 | dev_priv->mem_freq = 0; |
| 220 | break; |
| 221 | } |
| 222 | |
Daniel Vetter | c921aba | 2012-04-26 23:28:17 +0200 | [diff] [blame] | 223 | switch (csipll & 0x3ff) { |
| 224 | case 0x00c: |
| 225 | dev_priv->fsb_freq = 3200; |
| 226 | break; |
| 227 | case 0x00e: |
| 228 | dev_priv->fsb_freq = 3733; |
| 229 | break; |
| 230 | case 0x010: |
| 231 | dev_priv->fsb_freq = 4266; |
| 232 | break; |
| 233 | case 0x012: |
| 234 | dev_priv->fsb_freq = 4800; |
| 235 | break; |
| 236 | case 0x014: |
| 237 | dev_priv->fsb_freq = 5333; |
| 238 | break; |
| 239 | case 0x016: |
| 240 | dev_priv->fsb_freq = 5866; |
| 241 | break; |
| 242 | case 0x018: |
| 243 | dev_priv->fsb_freq = 6400; |
| 244 | break; |
| 245 | default: |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 246 | drm_dbg(&dev_priv->drm, "unknown fsb frequency 0x%04x\n", |
| 247 | csipll & 0x3ff); |
Daniel Vetter | c921aba | 2012-04-26 23:28:17 +0200 | [diff] [blame] | 248 | dev_priv->fsb_freq = 0; |
| 249 | break; |
| 250 | } |
Daniel Vetter | c921aba | 2012-04-26 23:28:17 +0200 | [diff] [blame] | 251 | } |
| 252 | |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 253 | static const struct cxsr_latency cxsr_latency_table[] = { |
| 254 | {1, 0, 800, 400, 3382, 33382, 3983, 33983}, /* DDR2-400 SC */ |
| 255 | {1, 0, 800, 667, 3354, 33354, 3807, 33807}, /* DDR2-667 SC */ |
| 256 | {1, 0, 800, 800, 3347, 33347, 3763, 33763}, /* DDR2-800 SC */ |
| 257 | {1, 1, 800, 667, 6420, 36420, 6873, 36873}, /* DDR3-667 SC */ |
| 258 | {1, 1, 800, 800, 5902, 35902, 6318, 36318}, /* DDR3-800 SC */ |
| 259 | |
| 260 | {1, 0, 667, 400, 3400, 33400, 4021, 34021}, /* DDR2-400 SC */ |
| 261 | {1, 0, 667, 667, 3372, 33372, 3845, 33845}, /* DDR2-667 SC */ |
| 262 | {1, 0, 667, 800, 3386, 33386, 3822, 33822}, /* DDR2-800 SC */ |
| 263 | {1, 1, 667, 667, 6438, 36438, 6911, 36911}, /* DDR3-667 SC */ |
| 264 | {1, 1, 667, 800, 5941, 35941, 6377, 36377}, /* DDR3-800 SC */ |
| 265 | |
| 266 | {1, 0, 400, 400, 3472, 33472, 4173, 34173}, /* DDR2-400 SC */ |
| 267 | {1, 0, 400, 667, 3443, 33443, 3996, 33996}, /* DDR2-667 SC */ |
| 268 | {1, 0, 400, 800, 3430, 33430, 3946, 33946}, /* DDR2-800 SC */ |
| 269 | {1, 1, 400, 667, 6509, 36509, 7062, 37062}, /* DDR3-667 SC */ |
| 270 | {1, 1, 400, 800, 5985, 35985, 6501, 36501}, /* DDR3-800 SC */ |
| 271 | |
| 272 | {0, 0, 800, 400, 3438, 33438, 4065, 34065}, /* DDR2-400 SC */ |
| 273 | {0, 0, 800, 667, 3410, 33410, 3889, 33889}, /* DDR2-667 SC */ |
| 274 | {0, 0, 800, 800, 3403, 33403, 3845, 33845}, /* DDR2-800 SC */ |
| 275 | {0, 1, 800, 667, 6476, 36476, 6955, 36955}, /* DDR3-667 SC */ |
| 276 | {0, 1, 800, 800, 5958, 35958, 6400, 36400}, /* DDR3-800 SC */ |
| 277 | |
| 278 | {0, 0, 667, 400, 3456, 33456, 4103, 34106}, /* DDR2-400 SC */ |
| 279 | {0, 0, 667, 667, 3428, 33428, 3927, 33927}, /* DDR2-667 SC */ |
| 280 | {0, 0, 667, 800, 3443, 33443, 3905, 33905}, /* DDR2-800 SC */ |
| 281 | {0, 1, 667, 667, 6494, 36494, 6993, 36993}, /* DDR3-667 SC */ |
| 282 | {0, 1, 667, 800, 5998, 35998, 6460, 36460}, /* DDR3-800 SC */ |
| 283 | |
| 284 | {0, 0, 400, 400, 3528, 33528, 4255, 34255}, /* DDR2-400 SC */ |
| 285 | {0, 0, 400, 667, 3500, 33500, 4079, 34079}, /* DDR2-667 SC */ |
| 286 | {0, 0, 400, 800, 3487, 33487, 4029, 34029}, /* DDR2-800 SC */ |
| 287 | {0, 1, 400, 667, 6566, 36566, 7145, 37145}, /* DDR3-667 SC */ |
| 288 | {0, 1, 400, 800, 6042, 36042, 6584, 36584}, /* DDR3-800 SC */ |
| 289 | }; |
| 290 | |
Tvrtko Ursulin | 44a655c | 2016-10-13 11:09:23 +0100 | [diff] [blame] | 291 | static const struct cxsr_latency *intel_get_cxsr_latency(bool is_desktop, |
| 292 | bool is_ddr3, |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 293 | int fsb, |
| 294 | int mem) |
| 295 | { |
| 296 | const struct cxsr_latency *latency; |
| 297 | int i; |
| 298 | |
| 299 | if (fsb == 0 || mem == 0) |
| 300 | return NULL; |
| 301 | |
| 302 | for (i = 0; i < ARRAY_SIZE(cxsr_latency_table); i++) { |
| 303 | latency = &cxsr_latency_table[i]; |
| 304 | if (is_desktop == latency->is_desktop && |
| 305 | is_ddr3 == latency->is_ddr3 && |
| 306 | fsb == latency->fsb_freq && mem == latency->mem_freq) |
| 307 | return latency; |
| 308 | } |
| 309 | |
| 310 | DRM_DEBUG_KMS("Unknown FSB/MEM found, disable CxSR\n"); |
| 311 | |
| 312 | return NULL; |
| 313 | } |
| 314 | |
Ville Syrjälä | fc1ac8d | 2015-03-05 21:19:52 +0200 | [diff] [blame] | 315 | static void chv_set_memory_dvfs(struct drm_i915_private *dev_priv, bool enable) |
| 316 | { |
| 317 | u32 val; |
| 318 | |
Chris Wilson | 337fa6e | 2019-04-26 09:17:20 +0100 | [diff] [blame] | 319 | vlv_punit_get(dev_priv); |
Ville Syrjälä | fc1ac8d | 2015-03-05 21:19:52 +0200 | [diff] [blame] | 320 | |
| 321 | val = vlv_punit_read(dev_priv, PUNIT_REG_DDR_SETUP2); |
| 322 | if (enable) |
| 323 | val &= ~FORCE_DDR_HIGH_FREQ; |
| 324 | else |
| 325 | val |= FORCE_DDR_HIGH_FREQ; |
| 326 | val &= ~FORCE_DDR_LOW_FREQ; |
| 327 | val |= FORCE_DDR_FREQ_REQ_ACK; |
| 328 | vlv_punit_write(dev_priv, PUNIT_REG_DDR_SETUP2, val); |
| 329 | |
| 330 | if (wait_for((vlv_punit_read(dev_priv, PUNIT_REG_DDR_SETUP2) & |
| 331 | FORCE_DDR_FREQ_REQ_ACK) == 0, 3)) |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 332 | drm_err(&dev_priv->drm, |
| 333 | "timed out waiting for Punit DDR DVFS request\n"); |
Ville Syrjälä | fc1ac8d | 2015-03-05 21:19:52 +0200 | [diff] [blame] | 334 | |
Chris Wilson | 337fa6e | 2019-04-26 09:17:20 +0100 | [diff] [blame] | 335 | vlv_punit_put(dev_priv); |
Ville Syrjälä | fc1ac8d | 2015-03-05 21:19:52 +0200 | [diff] [blame] | 336 | } |
| 337 | |
Ville Syrjälä | cfb4141 | 2015-03-05 21:19:51 +0200 | [diff] [blame] | 338 | static void chv_set_memory_pm5(struct drm_i915_private *dev_priv, bool enable) |
| 339 | { |
| 340 | u32 val; |
| 341 | |
Chris Wilson | 337fa6e | 2019-04-26 09:17:20 +0100 | [diff] [blame] | 342 | vlv_punit_get(dev_priv); |
Ville Syrjälä | cfb4141 | 2015-03-05 21:19:51 +0200 | [diff] [blame] | 343 | |
Ville Syrjälä | c11b813 | 2018-11-29 19:55:03 +0200 | [diff] [blame] | 344 | val = vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM); |
Ville Syrjälä | cfb4141 | 2015-03-05 21:19:51 +0200 | [diff] [blame] | 345 | if (enable) |
| 346 | val |= DSP_MAXFIFO_PM5_ENABLE; |
| 347 | else |
| 348 | val &= ~DSP_MAXFIFO_PM5_ENABLE; |
Ville Syrjälä | c11b813 | 2018-11-29 19:55:03 +0200 | [diff] [blame] | 349 | vlv_punit_write(dev_priv, PUNIT_REG_DSPSSPM, val); |
Ville Syrjälä | cfb4141 | 2015-03-05 21:19:51 +0200 | [diff] [blame] | 350 | |
Chris Wilson | 337fa6e | 2019-04-26 09:17:20 +0100 | [diff] [blame] | 351 | vlv_punit_put(dev_priv); |
Ville Syrjälä | cfb4141 | 2015-03-05 21:19:51 +0200 | [diff] [blame] | 352 | } |
| 353 | |
Ville Syrjälä | f499896 | 2015-03-10 17:02:21 +0200 | [diff] [blame] | 354 | #define FW_WM(value, plane) \ |
| 355 | (((value) << DSPFW_ ## plane ## _SHIFT) & DSPFW_ ## plane ## _MASK) |
| 356 | |
Ville Syrjälä | 11a85d6 | 2016-11-28 19:37:12 +0200 | [diff] [blame] | 357 | static bool _intel_set_memory_cxsr(struct drm_i915_private *dev_priv, bool enable) |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 358 | { |
Ville Syrjälä | 11a85d6 | 2016-11-28 19:37:12 +0200 | [diff] [blame] | 359 | bool was_enabled; |
Imre Deak | 5209b1f | 2014-07-01 12:36:17 +0300 | [diff] [blame] | 360 | u32 val; |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 361 | |
Tvrtko Ursulin | 920a14b | 2016-10-14 10:13:44 +0100 | [diff] [blame] | 362 | if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) { |
Ville Syrjälä | 11a85d6 | 2016-11-28 19:37:12 +0200 | [diff] [blame] | 363 | was_enabled = I915_READ(FW_BLC_SELF_VLV) & FW_CSPWRDWNEN; |
Imre Deak | 5209b1f | 2014-07-01 12:36:17 +0300 | [diff] [blame] | 364 | I915_WRITE(FW_BLC_SELF_VLV, enable ? FW_CSPWRDWNEN : 0); |
Ville Syrjälä | a7a6c49 | 2015-06-24 22:00:01 +0300 | [diff] [blame] | 365 | POSTING_READ(FW_BLC_SELF_VLV); |
Jani Nikula | c0f8683 | 2016-12-07 12:13:04 +0200 | [diff] [blame] | 366 | } else if (IS_G4X(dev_priv) || IS_I965GM(dev_priv)) { |
Ville Syrjälä | 11a85d6 | 2016-11-28 19:37:12 +0200 | [diff] [blame] | 367 | was_enabled = I915_READ(FW_BLC_SELF) & FW_BLC_SELF_EN; |
Imre Deak | 5209b1f | 2014-07-01 12:36:17 +0300 | [diff] [blame] | 368 | I915_WRITE(FW_BLC_SELF, enable ? FW_BLC_SELF_EN : 0); |
Ville Syrjälä | a7a6c49 | 2015-06-24 22:00:01 +0300 | [diff] [blame] | 369 | POSTING_READ(FW_BLC_SELF); |
Ville Syrjälä | 9b1e14f | 2016-10-31 22:37:15 +0200 | [diff] [blame] | 370 | } else if (IS_PINEVIEW(dev_priv)) { |
Ville Syrjälä | 11a85d6 | 2016-11-28 19:37:12 +0200 | [diff] [blame] | 371 | val = I915_READ(DSPFW3); |
| 372 | was_enabled = val & PINEVIEW_SELF_REFRESH_EN; |
| 373 | if (enable) |
| 374 | val |= PINEVIEW_SELF_REFRESH_EN; |
| 375 | else |
| 376 | val &= ~PINEVIEW_SELF_REFRESH_EN; |
Imre Deak | 5209b1f | 2014-07-01 12:36:17 +0300 | [diff] [blame] | 377 | I915_WRITE(DSPFW3, val); |
Ville Syrjälä | a7a6c49 | 2015-06-24 22:00:01 +0300 | [diff] [blame] | 378 | POSTING_READ(DSPFW3); |
Tvrtko Ursulin | 50a0bc9 | 2016-10-13 11:02:58 +0100 | [diff] [blame] | 379 | } else if (IS_I945G(dev_priv) || IS_I945GM(dev_priv)) { |
Ville Syrjälä | 11a85d6 | 2016-11-28 19:37:12 +0200 | [diff] [blame] | 380 | was_enabled = I915_READ(FW_BLC_SELF) & FW_BLC_SELF_EN; |
Imre Deak | 5209b1f | 2014-07-01 12:36:17 +0300 | [diff] [blame] | 381 | val = enable ? _MASKED_BIT_ENABLE(FW_BLC_SELF_EN) : |
| 382 | _MASKED_BIT_DISABLE(FW_BLC_SELF_EN); |
| 383 | I915_WRITE(FW_BLC_SELF, val); |
Ville Syrjälä | a7a6c49 | 2015-06-24 22:00:01 +0300 | [diff] [blame] | 384 | POSTING_READ(FW_BLC_SELF); |
Tvrtko Ursulin | 50a0bc9 | 2016-10-13 11:02:58 +0100 | [diff] [blame] | 385 | } else if (IS_I915GM(dev_priv)) { |
Ville Syrjälä | acb9135 | 2016-07-29 17:57:02 +0300 | [diff] [blame] | 386 | /* |
| 387 | * FIXME can't find a bit like this for 915G, and |
| 388 | * and yet it does have the related watermark in |
| 389 | * FW_BLC_SELF. What's going on? |
| 390 | */ |
Ville Syrjälä | 11a85d6 | 2016-11-28 19:37:12 +0200 | [diff] [blame] | 391 | was_enabled = I915_READ(INSTPM) & INSTPM_SELF_EN; |
Imre Deak | 5209b1f | 2014-07-01 12:36:17 +0300 | [diff] [blame] | 392 | val = enable ? _MASKED_BIT_ENABLE(INSTPM_SELF_EN) : |
| 393 | _MASKED_BIT_DISABLE(INSTPM_SELF_EN); |
| 394 | I915_WRITE(INSTPM, val); |
Ville Syrjälä | a7a6c49 | 2015-06-24 22:00:01 +0300 | [diff] [blame] | 395 | POSTING_READ(INSTPM); |
Imre Deak | 5209b1f | 2014-07-01 12:36:17 +0300 | [diff] [blame] | 396 | } else { |
Ville Syrjälä | 11a85d6 | 2016-11-28 19:37:12 +0200 | [diff] [blame] | 397 | return false; |
Imre Deak | 5209b1f | 2014-07-01 12:36:17 +0300 | [diff] [blame] | 398 | } |
| 399 | |
Ville Syrjälä | 1489bba | 2017-03-02 19:15:07 +0200 | [diff] [blame] | 400 | trace_intel_memory_cxsr(dev_priv, was_enabled, enable); |
| 401 | |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 402 | drm_dbg_kms(&dev_priv->drm, "memory self-refresh is %s (was %s)\n", |
| 403 | enableddisabled(enable), |
| 404 | enableddisabled(was_enabled)); |
Ville Syrjälä | 11a85d6 | 2016-11-28 19:37:12 +0200 | [diff] [blame] | 405 | |
| 406 | return was_enabled; |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 407 | } |
| 408 | |
Ville Syrjälä | 62571fc | 2017-04-21 21:14:23 +0300 | [diff] [blame] | 409 | /** |
| 410 | * intel_set_memory_cxsr - Configure CxSR state |
| 411 | * @dev_priv: i915 device |
| 412 | * @enable: Allow vs. disallow CxSR |
| 413 | * |
| 414 | * Allow or disallow the system to enter a special CxSR |
| 415 | * (C-state self refresh) state. What typically happens in CxSR mode |
| 416 | * is that several display FIFOs may get combined into a single larger |
| 417 | * FIFO for a particular plane (so called max FIFO mode) to allow the |
| 418 | * system to defer memory fetches longer, and the memory will enter |
| 419 | * self refresh. |
| 420 | * |
| 421 | * Note that enabling CxSR does not guarantee that the system enter |
| 422 | * this special mode, nor does it guarantee that the system stays |
| 423 | * in that mode once entered. So this just allows/disallows the system |
| 424 | * to autonomously utilize the CxSR mode. Other factors such as core |
| 425 | * C-states will affect when/if the system actually enters/exits the |
| 426 | * CxSR mode. |
| 427 | * |
| 428 | * Note that on VLV/CHV this actually only controls the max FIFO mode, |
| 429 | * and the system is free to enter/exit memory self refresh at any time |
| 430 | * even when the use of CxSR has been disallowed. |
| 431 | * |
| 432 | * While the system is actually in the CxSR/max FIFO mode, some plane |
| 433 | * control registers will not get latched on vblank. Thus in order to |
| 434 | * guarantee the system will respond to changes in the plane registers |
| 435 | * we must always disallow CxSR prior to making changes to those registers. |
| 436 | * Unfortunately the system will re-evaluate the CxSR conditions at |
| 437 | * frame start which happens after vblank start (which is when the plane |
| 438 | * registers would get latched), so we can't proceed with the plane update |
| 439 | * during the same frame where we disallowed CxSR. |
| 440 | * |
| 441 | * Certain platforms also have a deeper HPLL SR mode. Fortunately the |
| 442 | * HPLL SR mode depends on CxSR itself, so we don't have to hand hold |
| 443 | * the hardware w.r.t. HPLL SR when writing to plane registers. |
| 444 | * Disallowing just CxSR is sufficient. |
| 445 | */ |
Ville Syrjälä | 11a85d6 | 2016-11-28 19:37:12 +0200 | [diff] [blame] | 446 | bool intel_set_memory_cxsr(struct drm_i915_private *dev_priv, bool enable) |
Ville Syrjälä | 3d90e64 | 2016-11-28 19:37:11 +0200 | [diff] [blame] | 447 | { |
Ville Syrjälä | 11a85d6 | 2016-11-28 19:37:12 +0200 | [diff] [blame] | 448 | bool ret; |
| 449 | |
Ville Syrjälä | 3d90e64 | 2016-11-28 19:37:11 +0200 | [diff] [blame] | 450 | mutex_lock(&dev_priv->wm.wm_mutex); |
Ville Syrjälä | 11a85d6 | 2016-11-28 19:37:12 +0200 | [diff] [blame] | 451 | ret = _intel_set_memory_cxsr(dev_priv, enable); |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 452 | if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) |
| 453 | dev_priv->wm.vlv.cxsr = enable; |
| 454 | else if (IS_G4X(dev_priv)) |
| 455 | dev_priv->wm.g4x.cxsr = enable; |
Ville Syrjälä | 3d90e64 | 2016-11-28 19:37:11 +0200 | [diff] [blame] | 456 | mutex_unlock(&dev_priv->wm.wm_mutex); |
Ville Syrjälä | 11a85d6 | 2016-11-28 19:37:12 +0200 | [diff] [blame] | 457 | |
| 458 | return ret; |
Ville Syrjälä | 3d90e64 | 2016-11-28 19:37:11 +0200 | [diff] [blame] | 459 | } |
Ville Syrjälä | fc1ac8d | 2015-03-05 21:19:52 +0200 | [diff] [blame] | 460 | |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 461 | /* |
| 462 | * Latency for FIFO fetches is dependent on several factors: |
| 463 | * - memory configuration (speed, channels) |
| 464 | * - chipset |
| 465 | * - current MCH state |
| 466 | * It can be fairly high in some situations, so here we assume a fairly |
| 467 | * pessimal value. It's a tradeoff between extra memory fetches (if we |
| 468 | * set this value too high, the FIFO will fetch frequently to stay full) |
| 469 | * and power consumption (set it too low to save power and we might see |
| 470 | * FIFO underruns and display "flicker"). |
| 471 | * |
| 472 | * A value of 5us seems to be a good balance; safe for very low end |
| 473 | * platforms but not overly aggressive on lower latency configs. |
| 474 | */ |
Chris Wilson | 5aef600 | 2014-09-03 11:56:07 +0100 | [diff] [blame] | 475 | static const int pessimal_latency_ns = 5000; |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 476 | |
Ville Syrjälä | b500472 | 2015-03-05 21:19:47 +0200 | [diff] [blame] | 477 | #define VLV_FIFO_START(dsparb, dsparb2, lo_shift, hi_shift) \ |
| 478 | ((((dsparb) >> (lo_shift)) & 0xff) | ((((dsparb2) >> (hi_shift)) & 0x1) << 8)) |
| 479 | |
Ville Syrjälä | 814e7f0 | 2017-03-02 19:14:55 +0200 | [diff] [blame] | 480 | static void vlv_get_fifo_size(struct intel_crtc_state *crtc_state) |
Ville Syrjälä | b500472 | 2015-03-05 21:19:47 +0200 | [diff] [blame] | 481 | { |
Maarten Lankhorst | 2225f3c | 2019-10-31 12:26:03 +0100 | [diff] [blame] | 482 | struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); |
Ville Syrjälä | f07d43d | 2017-03-02 19:14:52 +0200 | [diff] [blame] | 483 | struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); |
Ville Syrjälä | 814e7f0 | 2017-03-02 19:14:55 +0200 | [diff] [blame] | 484 | struct vlv_fifo_state *fifo_state = &crtc_state->wm.vlv.fifo_state; |
Ville Syrjälä | f07d43d | 2017-03-02 19:14:52 +0200 | [diff] [blame] | 485 | enum pipe pipe = crtc->pipe; |
| 486 | int sprite0_start, sprite1_start; |
Kees Cook | 2713eb4 | 2020-02-20 16:05:17 -0800 | [diff] [blame] | 487 | u32 dsparb, dsparb2, dsparb3; |
Ville Syrjälä | b500472 | 2015-03-05 21:19:47 +0200 | [diff] [blame] | 488 | |
Ville Syrjälä | f07d43d | 2017-03-02 19:14:52 +0200 | [diff] [blame] | 489 | switch (pipe) { |
Ville Syrjälä | b500472 | 2015-03-05 21:19:47 +0200 | [diff] [blame] | 490 | case PIPE_A: |
| 491 | dsparb = I915_READ(DSPARB); |
| 492 | dsparb2 = I915_READ(DSPARB2); |
| 493 | sprite0_start = VLV_FIFO_START(dsparb, dsparb2, 0, 0); |
| 494 | sprite1_start = VLV_FIFO_START(dsparb, dsparb2, 8, 4); |
| 495 | break; |
| 496 | case PIPE_B: |
| 497 | dsparb = I915_READ(DSPARB); |
| 498 | dsparb2 = I915_READ(DSPARB2); |
| 499 | sprite0_start = VLV_FIFO_START(dsparb, dsparb2, 16, 8); |
| 500 | sprite1_start = VLV_FIFO_START(dsparb, dsparb2, 24, 12); |
| 501 | break; |
| 502 | case PIPE_C: |
| 503 | dsparb2 = I915_READ(DSPARB2); |
| 504 | dsparb3 = I915_READ(DSPARB3); |
| 505 | sprite0_start = VLV_FIFO_START(dsparb3, dsparb2, 0, 16); |
| 506 | sprite1_start = VLV_FIFO_START(dsparb3, dsparb2, 8, 20); |
| 507 | break; |
| 508 | default: |
Ville Syrjälä | f07d43d | 2017-03-02 19:14:52 +0200 | [diff] [blame] | 509 | MISSING_CASE(pipe); |
| 510 | return; |
Ville Syrjälä | b500472 | 2015-03-05 21:19:47 +0200 | [diff] [blame] | 511 | } |
| 512 | |
Ville Syrjälä | f07d43d | 2017-03-02 19:14:52 +0200 | [diff] [blame] | 513 | fifo_state->plane[PLANE_PRIMARY] = sprite0_start; |
| 514 | fifo_state->plane[PLANE_SPRITE0] = sprite1_start - sprite0_start; |
| 515 | fifo_state->plane[PLANE_SPRITE1] = 511 - sprite1_start; |
| 516 | fifo_state->plane[PLANE_CURSOR] = 63; |
Ville Syrjälä | b500472 | 2015-03-05 21:19:47 +0200 | [diff] [blame] | 517 | } |
| 518 | |
Ville Syrjälä | bdaf843 | 2017-11-17 21:19:11 +0200 | [diff] [blame] | 519 | static int i9xx_get_fifo_size(struct drm_i915_private *dev_priv, |
| 520 | enum i9xx_plane_id i9xx_plane) |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 521 | { |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 522 | u32 dsparb = I915_READ(DSPARB); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 523 | int size; |
| 524 | |
| 525 | size = dsparb & 0x7f; |
Ville Syrjälä | bdaf843 | 2017-11-17 21:19:11 +0200 | [diff] [blame] | 526 | if (i9xx_plane == PLANE_B) |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 527 | size = ((dsparb >> DSPARB_CSTART_SHIFT) & 0x7f) - size; |
| 528 | |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 529 | drm_dbg_kms(&dev_priv->drm, "FIFO size - (0x%08x) %c: %d\n", |
| 530 | dsparb, plane_name(i9xx_plane), size); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 531 | |
| 532 | return size; |
| 533 | } |
| 534 | |
Ville Syrjälä | bdaf843 | 2017-11-17 21:19:11 +0200 | [diff] [blame] | 535 | static int i830_get_fifo_size(struct drm_i915_private *dev_priv, |
| 536 | enum i9xx_plane_id i9xx_plane) |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 537 | { |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 538 | u32 dsparb = I915_READ(DSPARB); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 539 | int size; |
| 540 | |
| 541 | size = dsparb & 0x1ff; |
Ville Syrjälä | bdaf843 | 2017-11-17 21:19:11 +0200 | [diff] [blame] | 542 | if (i9xx_plane == PLANE_B) |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 543 | size = ((dsparb >> DSPARB_BEND_SHIFT) & 0x1ff) - size; |
| 544 | size >>= 1; /* Convert to cachelines */ |
| 545 | |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 546 | drm_dbg_kms(&dev_priv->drm, "FIFO size - (0x%08x) %c: %d\n", |
| 547 | dsparb, plane_name(i9xx_plane), size); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 548 | |
| 549 | return size; |
| 550 | } |
| 551 | |
Ville Syrjälä | bdaf843 | 2017-11-17 21:19:11 +0200 | [diff] [blame] | 552 | static int i845_get_fifo_size(struct drm_i915_private *dev_priv, |
| 553 | enum i9xx_plane_id i9xx_plane) |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 554 | { |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 555 | u32 dsparb = I915_READ(DSPARB); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 556 | int size; |
| 557 | |
| 558 | size = dsparb & 0x7f; |
| 559 | size >>= 2; /* Convert to cachelines */ |
| 560 | |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 561 | drm_dbg_kms(&dev_priv->drm, "FIFO size - (0x%08x) %c: %d\n", |
| 562 | dsparb, plane_name(i9xx_plane), size); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 563 | |
| 564 | return size; |
| 565 | } |
| 566 | |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 567 | /* Pineview has different values for various configs */ |
Lucas De Marchi | 1d21822 | 2019-12-24 00:40:04 -0800 | [diff] [blame] | 568 | static const struct intel_watermark_params pnv_display_wm = { |
Ville Syrjälä | e0f0273 | 2014-06-05 19:15:50 +0300 | [diff] [blame] | 569 | .fifo_size = PINEVIEW_DISPLAY_FIFO, |
| 570 | .max_wm = PINEVIEW_MAX_WM, |
| 571 | .default_wm = PINEVIEW_DFT_WM, |
| 572 | .guard_size = PINEVIEW_GUARD_WM, |
| 573 | .cacheline_size = PINEVIEW_FIFO_LINE_SIZE, |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 574 | }; |
Lucas De Marchi | 1d21822 | 2019-12-24 00:40:04 -0800 | [diff] [blame] | 575 | |
| 576 | static const struct intel_watermark_params pnv_display_hplloff_wm = { |
Ville Syrjälä | e0f0273 | 2014-06-05 19:15:50 +0300 | [diff] [blame] | 577 | .fifo_size = PINEVIEW_DISPLAY_FIFO, |
| 578 | .max_wm = PINEVIEW_MAX_WM, |
| 579 | .default_wm = PINEVIEW_DFT_HPLLOFF_WM, |
| 580 | .guard_size = PINEVIEW_GUARD_WM, |
| 581 | .cacheline_size = PINEVIEW_FIFO_LINE_SIZE, |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 582 | }; |
Lucas De Marchi | 1d21822 | 2019-12-24 00:40:04 -0800 | [diff] [blame] | 583 | |
| 584 | static const struct intel_watermark_params pnv_cursor_wm = { |
Ville Syrjälä | e0f0273 | 2014-06-05 19:15:50 +0300 | [diff] [blame] | 585 | .fifo_size = PINEVIEW_CURSOR_FIFO, |
| 586 | .max_wm = PINEVIEW_CURSOR_MAX_WM, |
| 587 | .default_wm = PINEVIEW_CURSOR_DFT_WM, |
| 588 | .guard_size = PINEVIEW_CURSOR_GUARD_WM, |
| 589 | .cacheline_size = PINEVIEW_FIFO_LINE_SIZE, |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 590 | }; |
Lucas De Marchi | 1d21822 | 2019-12-24 00:40:04 -0800 | [diff] [blame] | 591 | |
| 592 | static const struct intel_watermark_params pnv_cursor_hplloff_wm = { |
Ville Syrjälä | e0f0273 | 2014-06-05 19:15:50 +0300 | [diff] [blame] | 593 | .fifo_size = PINEVIEW_CURSOR_FIFO, |
| 594 | .max_wm = PINEVIEW_CURSOR_MAX_WM, |
| 595 | .default_wm = PINEVIEW_CURSOR_DFT_WM, |
| 596 | .guard_size = PINEVIEW_CURSOR_GUARD_WM, |
| 597 | .cacheline_size = PINEVIEW_FIFO_LINE_SIZE, |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 598 | }; |
Lucas De Marchi | 1d21822 | 2019-12-24 00:40:04 -0800 | [diff] [blame] | 599 | |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 600 | static const struct intel_watermark_params i965_cursor_wm_info = { |
Ville Syrjälä | e0f0273 | 2014-06-05 19:15:50 +0300 | [diff] [blame] | 601 | .fifo_size = I965_CURSOR_FIFO, |
| 602 | .max_wm = I965_CURSOR_MAX_WM, |
| 603 | .default_wm = I965_CURSOR_DFT_WM, |
| 604 | .guard_size = 2, |
| 605 | .cacheline_size = I915_FIFO_LINE_SIZE, |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 606 | }; |
Lucas De Marchi | 1d21822 | 2019-12-24 00:40:04 -0800 | [diff] [blame] | 607 | |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 608 | static const struct intel_watermark_params i945_wm_info = { |
Ville Syrjälä | e0f0273 | 2014-06-05 19:15:50 +0300 | [diff] [blame] | 609 | .fifo_size = I945_FIFO_SIZE, |
| 610 | .max_wm = I915_MAX_WM, |
| 611 | .default_wm = 1, |
| 612 | .guard_size = 2, |
| 613 | .cacheline_size = I915_FIFO_LINE_SIZE, |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 614 | }; |
Lucas De Marchi | 1d21822 | 2019-12-24 00:40:04 -0800 | [diff] [blame] | 615 | |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 616 | static const struct intel_watermark_params i915_wm_info = { |
Ville Syrjälä | e0f0273 | 2014-06-05 19:15:50 +0300 | [diff] [blame] | 617 | .fifo_size = I915_FIFO_SIZE, |
| 618 | .max_wm = I915_MAX_WM, |
| 619 | .default_wm = 1, |
| 620 | .guard_size = 2, |
| 621 | .cacheline_size = I915_FIFO_LINE_SIZE, |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 622 | }; |
Lucas De Marchi | 1d21822 | 2019-12-24 00:40:04 -0800 | [diff] [blame] | 623 | |
Ville Syrjälä | 9d53910 | 2014-08-15 01:21:53 +0300 | [diff] [blame] | 624 | static const struct intel_watermark_params i830_a_wm_info = { |
Ville Syrjälä | e0f0273 | 2014-06-05 19:15:50 +0300 | [diff] [blame] | 625 | .fifo_size = I855GM_FIFO_SIZE, |
| 626 | .max_wm = I915_MAX_WM, |
| 627 | .default_wm = 1, |
| 628 | .guard_size = 2, |
| 629 | .cacheline_size = I830_FIFO_LINE_SIZE, |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 630 | }; |
Lucas De Marchi | 1d21822 | 2019-12-24 00:40:04 -0800 | [diff] [blame] | 631 | |
Ville Syrjälä | 9d53910 | 2014-08-15 01:21:53 +0300 | [diff] [blame] | 632 | static const struct intel_watermark_params i830_bc_wm_info = { |
| 633 | .fifo_size = I855GM_FIFO_SIZE, |
| 634 | .max_wm = I915_MAX_WM/2, |
| 635 | .default_wm = 1, |
| 636 | .guard_size = 2, |
| 637 | .cacheline_size = I830_FIFO_LINE_SIZE, |
| 638 | }; |
Lucas De Marchi | 1d21822 | 2019-12-24 00:40:04 -0800 | [diff] [blame] | 639 | |
Daniel Vetter | feb56b9 | 2013-12-14 20:38:30 -0200 | [diff] [blame] | 640 | static const struct intel_watermark_params i845_wm_info = { |
Ville Syrjälä | e0f0273 | 2014-06-05 19:15:50 +0300 | [diff] [blame] | 641 | .fifo_size = I830_FIFO_SIZE, |
| 642 | .max_wm = I915_MAX_WM, |
| 643 | .default_wm = 1, |
| 644 | .guard_size = 2, |
| 645 | .cacheline_size = I830_FIFO_LINE_SIZE, |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 646 | }; |
| 647 | |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 648 | /** |
Ville Syrjälä | baf69ca | 2017-04-21 21:14:27 +0300 | [diff] [blame] | 649 | * intel_wm_method1 - Method 1 / "small buffer" watermark formula |
| 650 | * @pixel_rate: Pipe pixel rate in kHz |
| 651 | * @cpp: Plane bytes per pixel |
| 652 | * @latency: Memory wakeup latency in 0.1us units |
| 653 | * |
| 654 | * Compute the watermark using the method 1 or "small buffer" |
| 655 | * formula. The caller may additonally add extra cachelines |
| 656 | * to account for TLB misses and clock crossings. |
| 657 | * |
| 658 | * This method is concerned with the short term drain rate |
| 659 | * of the FIFO, ie. it does not account for blanking periods |
| 660 | * which would effectively reduce the average drain rate across |
| 661 | * a longer period. The name "small" refers to the fact the |
| 662 | * FIFO is relatively small compared to the amount of data |
| 663 | * fetched. |
| 664 | * |
| 665 | * The FIFO level vs. time graph might look something like: |
| 666 | * |
| 667 | * |\ |\ |
| 668 | * | \ | \ |
| 669 | * __---__---__ (- plane active, _ blanking) |
| 670 | * -> time |
| 671 | * |
| 672 | * or perhaps like this: |
| 673 | * |
| 674 | * |\|\ |\|\ |
| 675 | * __----__----__ (- plane active, _ blanking) |
| 676 | * -> time |
| 677 | * |
| 678 | * Returns: |
| 679 | * The watermark in bytes |
| 680 | */ |
| 681 | static unsigned int intel_wm_method1(unsigned int pixel_rate, |
| 682 | unsigned int cpp, |
| 683 | unsigned int latency) |
| 684 | { |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 685 | u64 ret; |
Ville Syrjälä | baf69ca | 2017-04-21 21:14:27 +0300 | [diff] [blame] | 686 | |
Ville Syrjälä | d492a29 | 2019-04-08 18:27:01 +0300 | [diff] [blame] | 687 | ret = mul_u32_u32(pixel_rate, cpp * latency); |
Ville Syrjälä | baf69ca | 2017-04-21 21:14:27 +0300 | [diff] [blame] | 688 | ret = DIV_ROUND_UP_ULL(ret, 10000); |
| 689 | |
| 690 | return ret; |
| 691 | } |
| 692 | |
| 693 | /** |
| 694 | * intel_wm_method2 - Method 2 / "large buffer" watermark formula |
| 695 | * @pixel_rate: Pipe pixel rate in kHz |
| 696 | * @htotal: Pipe horizontal total |
| 697 | * @width: Plane width in pixels |
| 698 | * @cpp: Plane bytes per pixel |
| 699 | * @latency: Memory wakeup latency in 0.1us units |
| 700 | * |
| 701 | * Compute the watermark using the method 2 or "large buffer" |
| 702 | * formula. The caller may additonally add extra cachelines |
| 703 | * to account for TLB misses and clock crossings. |
| 704 | * |
| 705 | * This method is concerned with the long term drain rate |
| 706 | * of the FIFO, ie. it does account for blanking periods |
| 707 | * which effectively reduce the average drain rate across |
| 708 | * a longer period. The name "large" refers to the fact the |
| 709 | * FIFO is relatively large compared to the amount of data |
| 710 | * fetched. |
| 711 | * |
| 712 | * The FIFO level vs. time graph might look something like: |
| 713 | * |
| 714 | * |\___ |\___ |
| 715 | * | \___ | \___ |
| 716 | * | \ | \ |
| 717 | * __ --__--__--__--__--__--__ (- plane active, _ blanking) |
| 718 | * -> time |
| 719 | * |
| 720 | * Returns: |
| 721 | * The watermark in bytes |
| 722 | */ |
| 723 | static unsigned int intel_wm_method2(unsigned int pixel_rate, |
| 724 | unsigned int htotal, |
| 725 | unsigned int width, |
| 726 | unsigned int cpp, |
| 727 | unsigned int latency) |
| 728 | { |
| 729 | unsigned int ret; |
| 730 | |
| 731 | /* |
| 732 | * FIXME remove once all users are computing |
| 733 | * watermarks in the correct place. |
| 734 | */ |
| 735 | if (WARN_ON_ONCE(htotal == 0)) |
| 736 | htotal = 1; |
| 737 | |
| 738 | ret = (latency * pixel_rate) / (htotal * 10000); |
| 739 | ret = (ret + 1) * width * cpp; |
| 740 | |
| 741 | return ret; |
| 742 | } |
| 743 | |
| 744 | /** |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 745 | * intel_calculate_wm - calculate watermark level |
Ville Syrjälä | baf69ca | 2017-04-21 21:14:27 +0300 | [diff] [blame] | 746 | * @pixel_rate: pixel clock |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 747 | * @wm: chip FIFO params |
Chris Wilson | 3138341 | 2018-02-14 14:03:03 +0000 | [diff] [blame] | 748 | * @fifo_size: size of the FIFO buffer |
Ville Syrjälä | ac48496 | 2016-01-20 21:05:26 +0200 | [diff] [blame] | 749 | * @cpp: bytes per pixel |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 750 | * @latency_ns: memory latency for the platform |
| 751 | * |
| 752 | * Calculate the watermark level (the level at which the display plane will |
| 753 | * start fetching from memory again). Each chip has a different display |
| 754 | * FIFO size and allocation, so the caller needs to figure that out and pass |
| 755 | * in the correct intel_watermark_params structure. |
| 756 | * |
| 757 | * As the pixel clock runs, the FIFO will be drained at a rate that depends |
| 758 | * on the pixel size. When it reaches the watermark level, it'll start |
| 759 | * fetching FIFO line sized based chunks from memory until the FIFO fills |
| 760 | * past the watermark point. If the FIFO drains completely, a FIFO underrun |
| 761 | * will occur, and a display engine hang could result. |
| 762 | */ |
Ville Syrjälä | baf69ca | 2017-04-21 21:14:27 +0300 | [diff] [blame] | 763 | static unsigned int intel_calculate_wm(int pixel_rate, |
| 764 | const struct intel_watermark_params *wm, |
| 765 | int fifo_size, int cpp, |
| 766 | unsigned int latency_ns) |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 767 | { |
Ville Syrjälä | baf69ca | 2017-04-21 21:14:27 +0300 | [diff] [blame] | 768 | int entries, wm_size; |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 769 | |
| 770 | /* |
| 771 | * Note: we need to make sure we don't overflow for various clock & |
| 772 | * latency values. |
| 773 | * clocks go from a few thousand to several hundred thousand. |
| 774 | * latency is usually a few thousand |
| 775 | */ |
Ville Syrjälä | baf69ca | 2017-04-21 21:14:27 +0300 | [diff] [blame] | 776 | entries = intel_wm_method1(pixel_rate, cpp, |
| 777 | latency_ns / 100); |
| 778 | entries = DIV_ROUND_UP(entries, wm->cacheline_size) + |
| 779 | wm->guard_size; |
| 780 | DRM_DEBUG_KMS("FIFO entries required for mode: %d\n", entries); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 781 | |
Ville Syrjälä | baf69ca | 2017-04-21 21:14:27 +0300 | [diff] [blame] | 782 | wm_size = fifo_size - entries; |
| 783 | DRM_DEBUG_KMS("FIFO watermark level: %d\n", wm_size); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 784 | |
| 785 | /* Don't promote wm_size to unsigned... */ |
Ville Syrjälä | baf69ca | 2017-04-21 21:14:27 +0300 | [diff] [blame] | 786 | if (wm_size > wm->max_wm) |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 787 | wm_size = wm->max_wm; |
| 788 | if (wm_size <= 0) |
| 789 | wm_size = wm->default_wm; |
Ville Syrjälä | d6feb19 | 2014-09-05 21:54:13 +0300 | [diff] [blame] | 790 | |
| 791 | /* |
| 792 | * Bspec seems to indicate that the value shouldn't be lower than |
| 793 | * 'burst size + 1'. Certainly 830 is quite unhappy with low values. |
| 794 | * Lets go for 8 which is the burst size since certain platforms |
| 795 | * already use a hardcoded 8 (which is what the spec says should be |
| 796 | * done). |
| 797 | */ |
| 798 | if (wm_size <= 8) |
| 799 | wm_size = 8; |
| 800 | |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 801 | return wm_size; |
| 802 | } |
| 803 | |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 804 | static bool is_disabling(int old, int new, int threshold) |
| 805 | { |
| 806 | return old >= threshold && new < threshold; |
| 807 | } |
| 808 | |
| 809 | static bool is_enabling(int old, int new, int threshold) |
| 810 | { |
| 811 | return old < threshold && new >= threshold; |
| 812 | } |
| 813 | |
Ville Syrjälä | 6d5019b | 2017-04-21 21:14:20 +0300 | [diff] [blame] | 814 | static int intel_wm_num_levels(struct drm_i915_private *dev_priv) |
| 815 | { |
| 816 | return dev_priv->wm.max_level + 1; |
| 817 | } |
| 818 | |
Ville Syrjälä | 24304d81 | 2017-03-14 17:10:49 +0200 | [diff] [blame] | 819 | static bool intel_wm_plane_visible(const struct intel_crtc_state *crtc_state, |
| 820 | const struct intel_plane_state *plane_state) |
| 821 | { |
Maarten Lankhorst | f90a85e | 2019-10-31 12:26:08 +0100 | [diff] [blame] | 822 | struct intel_plane *plane = to_intel_plane(plane_state->uapi.plane); |
Ville Syrjälä | 24304d81 | 2017-03-14 17:10:49 +0200 | [diff] [blame] | 823 | |
| 824 | /* FIXME check the 'enable' instead */ |
Maarten Lankhorst | 1326a92 | 2019-10-31 12:26:02 +0100 | [diff] [blame] | 825 | if (!crtc_state->hw.active) |
Ville Syrjälä | 24304d81 | 2017-03-14 17:10:49 +0200 | [diff] [blame] | 826 | return false; |
| 827 | |
| 828 | /* |
| 829 | * Treat cursor with fb as always visible since cursor updates |
| 830 | * can happen faster than the vrefresh rate, and the current |
| 831 | * watermark code doesn't handle that correctly. Cursor updates |
| 832 | * which set/clear the fb or change the cursor size are going |
| 833 | * to get throttled by intel_legacy_cursor_update() to work |
| 834 | * around this problem with the watermark code. |
| 835 | */ |
| 836 | if (plane->id == PLANE_CURSOR) |
Maarten Lankhorst | 7b3cb17 | 2019-10-31 12:26:07 +0100 | [diff] [blame] | 837 | return plane_state->hw.fb != NULL; |
Ville Syrjälä | 24304d81 | 2017-03-14 17:10:49 +0200 | [diff] [blame] | 838 | else |
Maarten Lankhorst | f90a85e | 2019-10-31 12:26:08 +0100 | [diff] [blame] | 839 | return plane_state->uapi.visible; |
Ville Syrjälä | 24304d81 | 2017-03-14 17:10:49 +0200 | [diff] [blame] | 840 | } |
| 841 | |
Ville Syrjälä | 04da7b9 | 2019-11-27 22:12:11 +0200 | [diff] [blame] | 842 | static bool intel_crtc_active(struct intel_crtc *crtc) |
| 843 | { |
| 844 | /* Be paranoid as we can arrive here with only partial |
| 845 | * state retrieved from the hardware during setup. |
| 846 | * |
| 847 | * We can ditch the adjusted_mode.crtc_clock check as soon |
| 848 | * as Haswell has gained clock readout/fastboot support. |
| 849 | * |
| 850 | * We can ditch the crtc->primary->state->fb check as soon as we can |
| 851 | * properly reconstruct framebuffers. |
| 852 | * |
| 853 | * FIXME: The intel_crtc->active here should be switched to |
| 854 | * crtc->state->active once we have proper CRTC states wired up |
| 855 | * for atomic. |
| 856 | */ |
| 857 | return crtc->active && crtc->base.primary->state->fb && |
| 858 | crtc->config->hw.adjusted_mode.crtc_clock; |
| 859 | } |
| 860 | |
Ville Syrjälä | ffc7a76 | 2016-10-31 22:37:21 +0200 | [diff] [blame] | 861 | static struct intel_crtc *single_enabled_crtc(struct drm_i915_private *dev_priv) |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 862 | { |
Ville Syrjälä | efc2611 | 2016-10-31 22:37:04 +0200 | [diff] [blame] | 863 | struct intel_crtc *crtc, *enabled = NULL; |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 864 | |
Ville Syrjälä | ffc7a76 | 2016-10-31 22:37:21 +0200 | [diff] [blame] | 865 | for_each_intel_crtc(&dev_priv->drm, crtc) { |
Ville Syrjälä | efc2611 | 2016-10-31 22:37:04 +0200 | [diff] [blame] | 866 | if (intel_crtc_active(crtc)) { |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 867 | if (enabled) |
| 868 | return NULL; |
| 869 | enabled = crtc; |
| 870 | } |
| 871 | } |
| 872 | |
| 873 | return enabled; |
| 874 | } |
| 875 | |
Lucas De Marchi | 1d21822 | 2019-12-24 00:40:04 -0800 | [diff] [blame] | 876 | static void pnv_update_wm(struct intel_crtc *unused_crtc) |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 877 | { |
Ville Syrjälä | ffc7a76 | 2016-10-31 22:37:21 +0200 | [diff] [blame] | 878 | struct drm_i915_private *dev_priv = to_i915(unused_crtc->base.dev); |
Ville Syrjälä | efc2611 | 2016-10-31 22:37:04 +0200 | [diff] [blame] | 879 | struct intel_crtc *crtc; |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 880 | const struct cxsr_latency *latency; |
| 881 | u32 reg; |
Ville Syrjälä | baf69ca | 2017-04-21 21:14:27 +0300 | [diff] [blame] | 882 | unsigned int wm; |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 883 | |
Tvrtko Ursulin | 86d35d4 | 2019-03-26 07:40:54 +0000 | [diff] [blame] | 884 | latency = intel_get_cxsr_latency(!IS_MOBILE(dev_priv), |
Tvrtko Ursulin | 50a0bc9 | 2016-10-13 11:02:58 +0100 | [diff] [blame] | 885 | dev_priv->is_ddr3, |
| 886 | dev_priv->fsb_freq, |
| 887 | dev_priv->mem_freq); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 888 | if (!latency) { |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 889 | drm_dbg_kms(&dev_priv->drm, |
| 890 | "Unknown FSB/MEM found, disable CxSR\n"); |
Imre Deak | 5209b1f | 2014-07-01 12:36:17 +0300 | [diff] [blame] | 891 | intel_set_memory_cxsr(dev_priv, false); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 892 | return; |
| 893 | } |
| 894 | |
Ville Syrjälä | ffc7a76 | 2016-10-31 22:37:21 +0200 | [diff] [blame] | 895 | crtc = single_enabled_crtc(dev_priv); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 896 | if (crtc) { |
Ville Syrjälä | efc2611 | 2016-10-31 22:37:04 +0200 | [diff] [blame] | 897 | const struct drm_display_mode *adjusted_mode = |
Maarten Lankhorst | 1326a92 | 2019-10-31 12:26:02 +0100 | [diff] [blame] | 898 | &crtc->config->hw.adjusted_mode; |
Ville Syrjälä | efc2611 | 2016-10-31 22:37:04 +0200 | [diff] [blame] | 899 | const struct drm_framebuffer *fb = |
| 900 | crtc->base.primary->state->fb; |
Ville Syrjälä | 353c859 | 2016-12-14 23:30:57 +0200 | [diff] [blame] | 901 | int cpp = fb->format->cpp[0]; |
Ville Syrjälä | 7c5f93b | 2015-09-08 13:40:49 +0300 | [diff] [blame] | 902 | int clock = adjusted_mode->crtc_clock; |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 903 | |
| 904 | /* Display SR */ |
Lucas De Marchi | 1d21822 | 2019-12-24 00:40:04 -0800 | [diff] [blame] | 905 | wm = intel_calculate_wm(clock, &pnv_display_wm, |
| 906 | pnv_display_wm.fifo_size, |
Ville Syrjälä | ac48496 | 2016-01-20 21:05:26 +0200 | [diff] [blame] | 907 | cpp, latency->display_sr); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 908 | reg = I915_READ(DSPFW1); |
| 909 | reg &= ~DSPFW_SR_MASK; |
Ville Syrjälä | f499896 | 2015-03-10 17:02:21 +0200 | [diff] [blame] | 910 | reg |= FW_WM(wm, SR); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 911 | I915_WRITE(DSPFW1, reg); |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 912 | drm_dbg_kms(&dev_priv->drm, "DSPFW1 register is %x\n", reg); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 913 | |
| 914 | /* cursor SR */ |
Lucas De Marchi | 1d21822 | 2019-12-24 00:40:04 -0800 | [diff] [blame] | 915 | wm = intel_calculate_wm(clock, &pnv_cursor_wm, |
| 916 | pnv_display_wm.fifo_size, |
Ville Syrjälä | 99834b1 | 2017-04-21 21:14:24 +0300 | [diff] [blame] | 917 | 4, latency->cursor_sr); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 918 | reg = I915_READ(DSPFW3); |
| 919 | reg &= ~DSPFW_CURSOR_SR_MASK; |
Ville Syrjälä | f499896 | 2015-03-10 17:02:21 +0200 | [diff] [blame] | 920 | reg |= FW_WM(wm, CURSOR_SR); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 921 | I915_WRITE(DSPFW3, reg); |
| 922 | |
| 923 | /* Display HPLL off SR */ |
Lucas De Marchi | 1d21822 | 2019-12-24 00:40:04 -0800 | [diff] [blame] | 924 | wm = intel_calculate_wm(clock, &pnv_display_hplloff_wm, |
| 925 | pnv_display_hplloff_wm.fifo_size, |
Ville Syrjälä | ac48496 | 2016-01-20 21:05:26 +0200 | [diff] [blame] | 926 | cpp, latency->display_hpll_disable); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 927 | reg = I915_READ(DSPFW3); |
| 928 | reg &= ~DSPFW_HPLL_SR_MASK; |
Ville Syrjälä | f499896 | 2015-03-10 17:02:21 +0200 | [diff] [blame] | 929 | reg |= FW_WM(wm, HPLL_SR); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 930 | I915_WRITE(DSPFW3, reg); |
| 931 | |
| 932 | /* cursor HPLL off SR */ |
Lucas De Marchi | 1d21822 | 2019-12-24 00:40:04 -0800 | [diff] [blame] | 933 | wm = intel_calculate_wm(clock, &pnv_cursor_hplloff_wm, |
| 934 | pnv_display_hplloff_wm.fifo_size, |
Ville Syrjälä | 99834b1 | 2017-04-21 21:14:24 +0300 | [diff] [blame] | 935 | 4, latency->cursor_hpll_disable); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 936 | reg = I915_READ(DSPFW3); |
| 937 | reg &= ~DSPFW_HPLL_CURSOR_MASK; |
Ville Syrjälä | f499896 | 2015-03-10 17:02:21 +0200 | [diff] [blame] | 938 | reg |= FW_WM(wm, HPLL_CURSOR); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 939 | I915_WRITE(DSPFW3, reg); |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 940 | drm_dbg_kms(&dev_priv->drm, "DSPFW3 register is %x\n", reg); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 941 | |
Imre Deak | 5209b1f | 2014-07-01 12:36:17 +0300 | [diff] [blame] | 942 | intel_set_memory_cxsr(dev_priv, true); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 943 | } else { |
Imre Deak | 5209b1f | 2014-07-01 12:36:17 +0300 | [diff] [blame] | 944 | intel_set_memory_cxsr(dev_priv, false); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 945 | } |
| 946 | } |
| 947 | |
Ville Syrjälä | 0f95ff8 | 2017-04-21 21:14:26 +0300 | [diff] [blame] | 948 | /* |
| 949 | * Documentation says: |
| 950 | * "If the line size is small, the TLB fetches can get in the way of the |
| 951 | * data fetches, causing some lag in the pixel data return which is not |
| 952 | * accounted for in the above formulas. The following adjustment only |
| 953 | * needs to be applied if eight whole lines fit in the buffer at once. |
| 954 | * The WM is adjusted upwards by the difference between the FIFO size |
| 955 | * and the size of 8 whole lines. This adjustment is always performed |
| 956 | * in the actual pixel depth regardless of whether FBC is enabled or not." |
| 957 | */ |
Chris Wilson | 1a1f128 | 2017-11-07 14:03:38 +0000 | [diff] [blame] | 958 | static unsigned int g4x_tlb_miss_wa(int fifo_size, int width, int cpp) |
Ville Syrjälä | 0f95ff8 | 2017-04-21 21:14:26 +0300 | [diff] [blame] | 959 | { |
| 960 | int tlb_miss = fifo_size * 64 - width * cpp * 8; |
| 961 | |
| 962 | return max(0, tlb_miss); |
| 963 | } |
| 964 | |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 965 | static void g4x_write_wm_values(struct drm_i915_private *dev_priv, |
| 966 | const struct g4x_wm_values *wm) |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 967 | { |
Ville Syrjälä | e93329a | 2017-04-21 21:14:31 +0300 | [diff] [blame] | 968 | enum pipe pipe; |
| 969 | |
| 970 | for_each_pipe(dev_priv, pipe) |
| 971 | trace_g4x_wm(intel_get_crtc_for_pipe(dev_priv, pipe), wm); |
| 972 | |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 973 | I915_WRITE(DSPFW1, |
| 974 | FW_WM(wm->sr.plane, SR) | |
| 975 | FW_WM(wm->pipe[PIPE_B].plane[PLANE_CURSOR], CURSORB) | |
| 976 | FW_WM(wm->pipe[PIPE_B].plane[PLANE_PRIMARY], PLANEB) | |
| 977 | FW_WM(wm->pipe[PIPE_A].plane[PLANE_PRIMARY], PLANEA)); |
| 978 | I915_WRITE(DSPFW2, |
| 979 | (wm->fbc_en ? DSPFW_FBC_SR_EN : 0) | |
| 980 | FW_WM(wm->sr.fbc, FBC_SR) | |
| 981 | FW_WM(wm->hpll.fbc, FBC_HPLL_SR) | |
| 982 | FW_WM(wm->pipe[PIPE_B].plane[PLANE_SPRITE0], SPRITEB) | |
| 983 | FW_WM(wm->pipe[PIPE_A].plane[PLANE_CURSOR], CURSORA) | |
| 984 | FW_WM(wm->pipe[PIPE_A].plane[PLANE_SPRITE0], SPRITEA)); |
| 985 | I915_WRITE(DSPFW3, |
| 986 | (wm->hpll_en ? DSPFW_HPLL_SR_EN : 0) | |
| 987 | FW_WM(wm->sr.cursor, CURSOR_SR) | |
| 988 | FW_WM(wm->hpll.cursor, HPLL_CURSOR) | |
| 989 | FW_WM(wm->hpll.plane, HPLL_SR)); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 990 | |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 991 | POSTING_READ(DSPFW1); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 992 | } |
| 993 | |
Ville Syrjälä | 1566597 | 2015-03-10 16:16:28 +0200 | [diff] [blame] | 994 | #define FW_WM_VLV(value, plane) \ |
| 995 | (((value) << DSPFW_ ## plane ## _SHIFT) & DSPFW_ ## plane ## _MASK_VLV) |
| 996 | |
Ville Syrjälä | 50f4cae | 2016-11-28 19:37:15 +0200 | [diff] [blame] | 997 | static void vlv_write_wm_values(struct drm_i915_private *dev_priv, |
Ville Syrjälä | 0018fda | 2015-03-05 21:19:45 +0200 | [diff] [blame] | 998 | const struct vlv_wm_values *wm) |
| 999 | { |
Ville Syrjälä | 50f4cae | 2016-11-28 19:37:15 +0200 | [diff] [blame] | 1000 | enum pipe pipe; |
Ville Syrjälä | 0018fda | 2015-03-05 21:19:45 +0200 | [diff] [blame] | 1001 | |
Ville Syrjälä | 50f4cae | 2016-11-28 19:37:15 +0200 | [diff] [blame] | 1002 | for_each_pipe(dev_priv, pipe) { |
Ville Syrjälä | c137d66 | 2017-03-02 19:15:06 +0200 | [diff] [blame] | 1003 | trace_vlv_wm(intel_get_crtc_for_pipe(dev_priv, pipe), wm); |
| 1004 | |
Ville Syrjälä | 50f4cae | 2016-11-28 19:37:15 +0200 | [diff] [blame] | 1005 | I915_WRITE(VLV_DDL(pipe), |
| 1006 | (wm->ddl[pipe].plane[PLANE_CURSOR] << DDL_CURSOR_SHIFT) | |
| 1007 | (wm->ddl[pipe].plane[PLANE_SPRITE1] << DDL_SPRITE_SHIFT(1)) | |
| 1008 | (wm->ddl[pipe].plane[PLANE_SPRITE0] << DDL_SPRITE_SHIFT(0)) | |
| 1009 | (wm->ddl[pipe].plane[PLANE_PRIMARY] << DDL_PLANE_SHIFT)); |
| 1010 | } |
Ville Syrjälä | 0018fda | 2015-03-05 21:19:45 +0200 | [diff] [blame] | 1011 | |
Ville Syrjälä | 6fe6a7f | 2016-11-28 19:37:14 +0200 | [diff] [blame] | 1012 | /* |
| 1013 | * Zero the (unused) WM1 watermarks, and also clear all the |
| 1014 | * high order bits so that there are no out of bounds values |
| 1015 | * present in the registers during the reprogramming. |
| 1016 | */ |
| 1017 | I915_WRITE(DSPHOWM, 0); |
| 1018 | I915_WRITE(DSPHOWM1, 0); |
| 1019 | I915_WRITE(DSPFW4, 0); |
| 1020 | I915_WRITE(DSPFW5, 0); |
| 1021 | I915_WRITE(DSPFW6, 0); |
| 1022 | |
Ville Syrjälä | ae80152 | 2015-03-05 21:19:49 +0200 | [diff] [blame] | 1023 | I915_WRITE(DSPFW1, |
Ville Syrjälä | 1566597 | 2015-03-10 16:16:28 +0200 | [diff] [blame] | 1024 | FW_WM(wm->sr.plane, SR) | |
Ville Syrjälä | 1b31389 | 2016-11-28 19:37:08 +0200 | [diff] [blame] | 1025 | FW_WM(wm->pipe[PIPE_B].plane[PLANE_CURSOR], CURSORB) | |
| 1026 | FW_WM_VLV(wm->pipe[PIPE_B].plane[PLANE_PRIMARY], PLANEB) | |
| 1027 | FW_WM_VLV(wm->pipe[PIPE_A].plane[PLANE_PRIMARY], PLANEA)); |
Ville Syrjälä | ae80152 | 2015-03-05 21:19:49 +0200 | [diff] [blame] | 1028 | I915_WRITE(DSPFW2, |
Ville Syrjälä | 1b31389 | 2016-11-28 19:37:08 +0200 | [diff] [blame] | 1029 | FW_WM_VLV(wm->pipe[PIPE_A].plane[PLANE_SPRITE1], SPRITEB) | |
| 1030 | FW_WM(wm->pipe[PIPE_A].plane[PLANE_CURSOR], CURSORA) | |
| 1031 | FW_WM_VLV(wm->pipe[PIPE_A].plane[PLANE_SPRITE0], SPRITEA)); |
Ville Syrjälä | ae80152 | 2015-03-05 21:19:49 +0200 | [diff] [blame] | 1032 | I915_WRITE(DSPFW3, |
Ville Syrjälä | 1566597 | 2015-03-10 16:16:28 +0200 | [diff] [blame] | 1033 | FW_WM(wm->sr.cursor, CURSOR_SR)); |
Ville Syrjälä | ae80152 | 2015-03-05 21:19:49 +0200 | [diff] [blame] | 1034 | |
| 1035 | if (IS_CHERRYVIEW(dev_priv)) { |
| 1036 | I915_WRITE(DSPFW7_CHV, |
Ville Syrjälä | 1b31389 | 2016-11-28 19:37:08 +0200 | [diff] [blame] | 1037 | FW_WM_VLV(wm->pipe[PIPE_B].plane[PLANE_SPRITE1], SPRITED) | |
| 1038 | FW_WM_VLV(wm->pipe[PIPE_B].plane[PLANE_SPRITE0], SPRITEC)); |
Ville Syrjälä | ae80152 | 2015-03-05 21:19:49 +0200 | [diff] [blame] | 1039 | I915_WRITE(DSPFW8_CHV, |
Ville Syrjälä | 1b31389 | 2016-11-28 19:37:08 +0200 | [diff] [blame] | 1040 | FW_WM_VLV(wm->pipe[PIPE_C].plane[PLANE_SPRITE1], SPRITEF) | |
| 1041 | FW_WM_VLV(wm->pipe[PIPE_C].plane[PLANE_SPRITE0], SPRITEE)); |
Ville Syrjälä | ae80152 | 2015-03-05 21:19:49 +0200 | [diff] [blame] | 1042 | I915_WRITE(DSPFW9_CHV, |
Ville Syrjälä | 1b31389 | 2016-11-28 19:37:08 +0200 | [diff] [blame] | 1043 | FW_WM_VLV(wm->pipe[PIPE_C].plane[PLANE_PRIMARY], PLANEC) | |
| 1044 | FW_WM(wm->pipe[PIPE_C].plane[PLANE_CURSOR], CURSORC)); |
Ville Syrjälä | ae80152 | 2015-03-05 21:19:49 +0200 | [diff] [blame] | 1045 | I915_WRITE(DSPHOWM, |
Ville Syrjälä | 1566597 | 2015-03-10 16:16:28 +0200 | [diff] [blame] | 1046 | FW_WM(wm->sr.plane >> 9, SR_HI) | |
Ville Syrjälä | 1b31389 | 2016-11-28 19:37:08 +0200 | [diff] [blame] | 1047 | FW_WM(wm->pipe[PIPE_C].plane[PLANE_SPRITE1] >> 8, SPRITEF_HI) | |
| 1048 | FW_WM(wm->pipe[PIPE_C].plane[PLANE_SPRITE0] >> 8, SPRITEE_HI) | |
| 1049 | FW_WM(wm->pipe[PIPE_C].plane[PLANE_PRIMARY] >> 8, PLANEC_HI) | |
| 1050 | FW_WM(wm->pipe[PIPE_B].plane[PLANE_SPRITE1] >> 8, SPRITED_HI) | |
| 1051 | FW_WM(wm->pipe[PIPE_B].plane[PLANE_SPRITE0] >> 8, SPRITEC_HI) | |
| 1052 | FW_WM(wm->pipe[PIPE_B].plane[PLANE_PRIMARY] >> 8, PLANEB_HI) | |
| 1053 | FW_WM(wm->pipe[PIPE_A].plane[PLANE_SPRITE1] >> 8, SPRITEB_HI) | |
| 1054 | FW_WM(wm->pipe[PIPE_A].plane[PLANE_SPRITE0] >> 8, SPRITEA_HI) | |
| 1055 | FW_WM(wm->pipe[PIPE_A].plane[PLANE_PRIMARY] >> 8, PLANEA_HI)); |
Ville Syrjälä | ae80152 | 2015-03-05 21:19:49 +0200 | [diff] [blame] | 1056 | } else { |
| 1057 | I915_WRITE(DSPFW7, |
Ville Syrjälä | 1b31389 | 2016-11-28 19:37:08 +0200 | [diff] [blame] | 1058 | FW_WM_VLV(wm->pipe[PIPE_B].plane[PLANE_SPRITE1], SPRITED) | |
| 1059 | FW_WM_VLV(wm->pipe[PIPE_B].plane[PLANE_SPRITE0], SPRITEC)); |
Ville Syrjälä | ae80152 | 2015-03-05 21:19:49 +0200 | [diff] [blame] | 1060 | I915_WRITE(DSPHOWM, |
Ville Syrjälä | 1566597 | 2015-03-10 16:16:28 +0200 | [diff] [blame] | 1061 | FW_WM(wm->sr.plane >> 9, SR_HI) | |
Ville Syrjälä | 1b31389 | 2016-11-28 19:37:08 +0200 | [diff] [blame] | 1062 | FW_WM(wm->pipe[PIPE_B].plane[PLANE_SPRITE1] >> 8, SPRITED_HI) | |
| 1063 | FW_WM(wm->pipe[PIPE_B].plane[PLANE_SPRITE0] >> 8, SPRITEC_HI) | |
| 1064 | FW_WM(wm->pipe[PIPE_B].plane[PLANE_PRIMARY] >> 8, PLANEB_HI) | |
| 1065 | FW_WM(wm->pipe[PIPE_A].plane[PLANE_SPRITE1] >> 8, SPRITEB_HI) | |
| 1066 | FW_WM(wm->pipe[PIPE_A].plane[PLANE_SPRITE0] >> 8, SPRITEA_HI) | |
| 1067 | FW_WM(wm->pipe[PIPE_A].plane[PLANE_PRIMARY] >> 8, PLANEA_HI)); |
Ville Syrjälä | ae80152 | 2015-03-05 21:19:49 +0200 | [diff] [blame] | 1068 | } |
| 1069 | |
| 1070 | POSTING_READ(DSPFW1); |
Ville Syrjälä | 0018fda | 2015-03-05 21:19:45 +0200 | [diff] [blame] | 1071 | } |
| 1072 | |
Ville Syrjälä | 1566597 | 2015-03-10 16:16:28 +0200 | [diff] [blame] | 1073 | #undef FW_WM_VLV |
| 1074 | |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1075 | static void g4x_setup_wm_latency(struct drm_i915_private *dev_priv) |
| 1076 | { |
| 1077 | /* all latencies in usec */ |
| 1078 | dev_priv->wm.pri_latency[G4X_WM_LEVEL_NORMAL] = 5; |
| 1079 | dev_priv->wm.pri_latency[G4X_WM_LEVEL_SR] = 12; |
Ville Syrjälä | 79d9430 | 2017-04-21 21:14:30 +0300 | [diff] [blame] | 1080 | dev_priv->wm.pri_latency[G4X_WM_LEVEL_HPLL] = 35; |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1081 | |
Ville Syrjälä | 79d9430 | 2017-04-21 21:14:30 +0300 | [diff] [blame] | 1082 | dev_priv->wm.max_level = G4X_WM_LEVEL_HPLL; |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1083 | } |
| 1084 | |
| 1085 | static int g4x_plane_fifo_size(enum plane_id plane_id, int level) |
| 1086 | { |
| 1087 | /* |
| 1088 | * DSPCNTR[13] supposedly controls whether the |
| 1089 | * primary plane can use the FIFO space otherwise |
| 1090 | * reserved for the sprite plane. It's not 100% clear |
| 1091 | * what the actual FIFO size is, but it looks like we |
| 1092 | * can happily set both primary and sprite watermarks |
| 1093 | * up to 127 cachelines. So that would seem to mean |
| 1094 | * that either DSPCNTR[13] doesn't do anything, or that |
| 1095 | * the total FIFO is >= 256 cachelines in size. Either |
| 1096 | * way, we don't seem to have to worry about this |
| 1097 | * repartitioning as the maximum watermark value the |
| 1098 | * register can hold for each plane is lower than the |
| 1099 | * minimum FIFO size. |
| 1100 | */ |
| 1101 | switch (plane_id) { |
| 1102 | case PLANE_CURSOR: |
| 1103 | return 63; |
| 1104 | case PLANE_PRIMARY: |
| 1105 | return level == G4X_WM_LEVEL_NORMAL ? 127 : 511; |
| 1106 | case PLANE_SPRITE0: |
| 1107 | return level == G4X_WM_LEVEL_NORMAL ? 127 : 0; |
| 1108 | default: |
| 1109 | MISSING_CASE(plane_id); |
| 1110 | return 0; |
| 1111 | } |
| 1112 | } |
| 1113 | |
| 1114 | static int g4x_fbc_fifo_size(int level) |
| 1115 | { |
| 1116 | switch (level) { |
| 1117 | case G4X_WM_LEVEL_SR: |
| 1118 | return 7; |
| 1119 | case G4X_WM_LEVEL_HPLL: |
| 1120 | return 15; |
| 1121 | default: |
| 1122 | MISSING_CASE(level); |
| 1123 | return 0; |
| 1124 | } |
| 1125 | } |
| 1126 | |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 1127 | static u16 g4x_compute_wm(const struct intel_crtc_state *crtc_state, |
| 1128 | const struct intel_plane_state *plane_state, |
| 1129 | int level) |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1130 | { |
Maarten Lankhorst | f90a85e | 2019-10-31 12:26:08 +0100 | [diff] [blame] | 1131 | struct intel_plane *plane = to_intel_plane(plane_state->uapi.plane); |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1132 | struct drm_i915_private *dev_priv = to_i915(plane->base.dev); |
| 1133 | const struct drm_display_mode *adjusted_mode = |
Maarten Lankhorst | 1326a92 | 2019-10-31 12:26:02 +0100 | [diff] [blame] | 1134 | &crtc_state->hw.adjusted_mode; |
Chris Wilson | 1a1f128 | 2017-11-07 14:03:38 +0000 | [diff] [blame] | 1135 | unsigned int latency = dev_priv->wm.pri_latency[level] * 10; |
| 1136 | unsigned int clock, htotal, cpp, width, wm; |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1137 | |
| 1138 | if (latency == 0) |
| 1139 | return USHRT_MAX; |
| 1140 | |
| 1141 | if (!intel_wm_plane_visible(crtc_state, plane_state)) |
| 1142 | return 0; |
| 1143 | |
Maarten Lankhorst | 7b3cb17 | 2019-10-31 12:26:07 +0100 | [diff] [blame] | 1144 | cpp = plane_state->hw.fb->format->cpp[0]; |
Ville Syrjälä | d56e823 | 2019-07-03 23:08:22 +0300 | [diff] [blame] | 1145 | |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1146 | /* |
| 1147 | * Not 100% sure which way ELK should go here as the |
| 1148 | * spec only says CL/CTG should assume 32bpp and BW |
| 1149 | * doesn't need to. But as these things followed the |
| 1150 | * mobile vs. desktop lines on gen3 as well, let's |
| 1151 | * assume ELK doesn't need this. |
| 1152 | * |
| 1153 | * The spec also fails to list such a restriction for |
| 1154 | * the HPLL watermark, which seems a little strange. |
| 1155 | * Let's use 32bpp for the HPLL watermark as well. |
| 1156 | */ |
| 1157 | if (IS_GM45(dev_priv) && plane->id == PLANE_PRIMARY && |
| 1158 | level != G4X_WM_LEVEL_NORMAL) |
Ville Syrjälä | d56e823 | 2019-07-03 23:08:22 +0300 | [diff] [blame] | 1159 | cpp = max(cpp, 4u); |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1160 | |
| 1161 | clock = adjusted_mode->crtc_clock; |
| 1162 | htotal = adjusted_mode->crtc_htotal; |
| 1163 | |
Maarten Lankhorst | f90a85e | 2019-10-31 12:26:08 +0100 | [diff] [blame] | 1164 | width = drm_rect_width(&plane_state->uapi.dst); |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1165 | |
| 1166 | if (plane->id == PLANE_CURSOR) { |
| 1167 | wm = intel_wm_method2(clock, htotal, width, cpp, latency); |
| 1168 | } else if (plane->id == PLANE_PRIMARY && |
| 1169 | level == G4X_WM_LEVEL_NORMAL) { |
| 1170 | wm = intel_wm_method1(clock, cpp, latency); |
| 1171 | } else { |
Chris Wilson | 1a1f128 | 2017-11-07 14:03:38 +0000 | [diff] [blame] | 1172 | unsigned int small, large; |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1173 | |
| 1174 | small = intel_wm_method1(clock, cpp, latency); |
| 1175 | large = intel_wm_method2(clock, htotal, width, cpp, latency); |
| 1176 | |
| 1177 | wm = min(small, large); |
| 1178 | } |
| 1179 | |
| 1180 | wm += g4x_tlb_miss_wa(g4x_plane_fifo_size(plane->id, level), |
| 1181 | width, cpp); |
| 1182 | |
| 1183 | wm = DIV_ROUND_UP(wm, 64) + 2; |
| 1184 | |
Chris Wilson | 1a1f128 | 2017-11-07 14:03:38 +0000 | [diff] [blame] | 1185 | return min_t(unsigned int, wm, USHRT_MAX); |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1186 | } |
| 1187 | |
| 1188 | static bool g4x_raw_plane_wm_set(struct intel_crtc_state *crtc_state, |
| 1189 | int level, enum plane_id plane_id, u16 value) |
| 1190 | { |
Maarten Lankhorst | 2225f3c | 2019-10-31 12:26:03 +0100 | [diff] [blame] | 1191 | struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev); |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1192 | bool dirty = false; |
| 1193 | |
| 1194 | for (; level < intel_wm_num_levels(dev_priv); level++) { |
| 1195 | struct g4x_pipe_wm *raw = &crtc_state->wm.g4x.raw[level]; |
| 1196 | |
| 1197 | dirty |= raw->plane[plane_id] != value; |
| 1198 | raw->plane[plane_id] = value; |
| 1199 | } |
| 1200 | |
| 1201 | return dirty; |
| 1202 | } |
| 1203 | |
| 1204 | static bool g4x_raw_fbc_wm_set(struct intel_crtc_state *crtc_state, |
| 1205 | int level, u16 value) |
| 1206 | { |
Maarten Lankhorst | 2225f3c | 2019-10-31 12:26:03 +0100 | [diff] [blame] | 1207 | struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev); |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1208 | bool dirty = false; |
| 1209 | |
| 1210 | /* NORMAL level doesn't have an FBC watermark */ |
| 1211 | level = max(level, G4X_WM_LEVEL_SR); |
| 1212 | |
| 1213 | for (; level < intel_wm_num_levels(dev_priv); level++) { |
| 1214 | struct g4x_pipe_wm *raw = &crtc_state->wm.g4x.raw[level]; |
| 1215 | |
| 1216 | dirty |= raw->fbc != value; |
| 1217 | raw->fbc = value; |
| 1218 | } |
| 1219 | |
| 1220 | return dirty; |
| 1221 | } |
| 1222 | |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 1223 | static u32 ilk_compute_fbc_wm(const struct intel_crtc_state *crtc_state, |
| 1224 | const struct intel_plane_state *plane_state, |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 1225 | u32 pri_val); |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1226 | |
| 1227 | static bool g4x_raw_plane_wm_compute(struct intel_crtc_state *crtc_state, |
| 1228 | const struct intel_plane_state *plane_state) |
| 1229 | { |
Maarten Lankhorst | f90a85e | 2019-10-31 12:26:08 +0100 | [diff] [blame] | 1230 | struct intel_plane *plane = to_intel_plane(plane_state->uapi.plane); |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 1231 | struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev); |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1232 | int num_levels = intel_wm_num_levels(to_i915(plane->base.dev)); |
| 1233 | enum plane_id plane_id = plane->id; |
| 1234 | bool dirty = false; |
| 1235 | int level; |
| 1236 | |
| 1237 | if (!intel_wm_plane_visible(crtc_state, plane_state)) { |
| 1238 | dirty |= g4x_raw_plane_wm_set(crtc_state, 0, plane_id, 0); |
| 1239 | if (plane_id == PLANE_PRIMARY) |
| 1240 | dirty |= g4x_raw_fbc_wm_set(crtc_state, 0, 0); |
| 1241 | goto out; |
| 1242 | } |
| 1243 | |
| 1244 | for (level = 0; level < num_levels; level++) { |
| 1245 | struct g4x_pipe_wm *raw = &crtc_state->wm.g4x.raw[level]; |
| 1246 | int wm, max_wm; |
| 1247 | |
| 1248 | wm = g4x_compute_wm(crtc_state, plane_state, level); |
| 1249 | max_wm = g4x_plane_fifo_size(plane_id, level); |
| 1250 | |
| 1251 | if (wm > max_wm) |
| 1252 | break; |
| 1253 | |
| 1254 | dirty |= raw->plane[plane_id] != wm; |
| 1255 | raw->plane[plane_id] = wm; |
| 1256 | |
| 1257 | if (plane_id != PLANE_PRIMARY || |
| 1258 | level == G4X_WM_LEVEL_NORMAL) |
| 1259 | continue; |
| 1260 | |
| 1261 | wm = ilk_compute_fbc_wm(crtc_state, plane_state, |
| 1262 | raw->plane[plane_id]); |
| 1263 | max_wm = g4x_fbc_fifo_size(level); |
| 1264 | |
| 1265 | /* |
| 1266 | * FBC wm is not mandatory as we |
| 1267 | * can always just disable its use. |
| 1268 | */ |
| 1269 | if (wm > max_wm) |
| 1270 | wm = USHRT_MAX; |
| 1271 | |
| 1272 | dirty |= raw->fbc != wm; |
| 1273 | raw->fbc = wm; |
| 1274 | } |
| 1275 | |
| 1276 | /* mark watermarks as invalid */ |
| 1277 | dirty |= g4x_raw_plane_wm_set(crtc_state, level, plane_id, USHRT_MAX); |
| 1278 | |
| 1279 | if (plane_id == PLANE_PRIMARY) |
| 1280 | dirty |= g4x_raw_fbc_wm_set(crtc_state, level, USHRT_MAX); |
| 1281 | |
| 1282 | out: |
| 1283 | if (dirty) { |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 1284 | drm_dbg_kms(&dev_priv->drm, |
| 1285 | "%s watermarks: normal=%d, SR=%d, HPLL=%d\n", |
| 1286 | plane->base.name, |
| 1287 | crtc_state->wm.g4x.raw[G4X_WM_LEVEL_NORMAL].plane[plane_id], |
| 1288 | crtc_state->wm.g4x.raw[G4X_WM_LEVEL_SR].plane[plane_id], |
| 1289 | crtc_state->wm.g4x.raw[G4X_WM_LEVEL_HPLL].plane[plane_id]); |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1290 | |
| 1291 | if (plane_id == PLANE_PRIMARY) |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 1292 | drm_dbg_kms(&dev_priv->drm, |
| 1293 | "FBC watermarks: SR=%d, HPLL=%d\n", |
| 1294 | crtc_state->wm.g4x.raw[G4X_WM_LEVEL_SR].fbc, |
| 1295 | crtc_state->wm.g4x.raw[G4X_WM_LEVEL_HPLL].fbc); |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1296 | } |
| 1297 | |
| 1298 | return dirty; |
| 1299 | } |
| 1300 | |
| 1301 | static bool g4x_raw_plane_wm_is_valid(const struct intel_crtc_state *crtc_state, |
| 1302 | enum plane_id plane_id, int level) |
| 1303 | { |
| 1304 | const struct g4x_pipe_wm *raw = &crtc_state->wm.g4x.raw[level]; |
| 1305 | |
| 1306 | return raw->plane[plane_id] <= g4x_plane_fifo_size(plane_id, level); |
| 1307 | } |
| 1308 | |
| 1309 | static bool g4x_raw_crtc_wm_is_valid(const struct intel_crtc_state *crtc_state, |
| 1310 | int level) |
| 1311 | { |
Maarten Lankhorst | 2225f3c | 2019-10-31 12:26:03 +0100 | [diff] [blame] | 1312 | struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev); |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1313 | |
| 1314 | if (level > dev_priv->wm.max_level) |
| 1315 | return false; |
| 1316 | |
| 1317 | return g4x_raw_plane_wm_is_valid(crtc_state, PLANE_PRIMARY, level) && |
| 1318 | g4x_raw_plane_wm_is_valid(crtc_state, PLANE_SPRITE0, level) && |
| 1319 | g4x_raw_plane_wm_is_valid(crtc_state, PLANE_CURSOR, level); |
| 1320 | } |
| 1321 | |
| 1322 | /* mark all levels starting from 'level' as invalid */ |
| 1323 | static void g4x_invalidate_wms(struct intel_crtc *crtc, |
| 1324 | struct g4x_wm_state *wm_state, int level) |
| 1325 | { |
| 1326 | if (level <= G4X_WM_LEVEL_NORMAL) { |
| 1327 | enum plane_id plane_id; |
| 1328 | |
| 1329 | for_each_plane_id_on_crtc(crtc, plane_id) |
| 1330 | wm_state->wm.plane[plane_id] = USHRT_MAX; |
| 1331 | } |
| 1332 | |
| 1333 | if (level <= G4X_WM_LEVEL_SR) { |
| 1334 | wm_state->cxsr = false; |
| 1335 | wm_state->sr.cursor = USHRT_MAX; |
| 1336 | wm_state->sr.plane = USHRT_MAX; |
| 1337 | wm_state->sr.fbc = USHRT_MAX; |
| 1338 | } |
| 1339 | |
| 1340 | if (level <= G4X_WM_LEVEL_HPLL) { |
| 1341 | wm_state->hpll_en = false; |
| 1342 | wm_state->hpll.cursor = USHRT_MAX; |
| 1343 | wm_state->hpll.plane = USHRT_MAX; |
| 1344 | wm_state->hpll.fbc = USHRT_MAX; |
| 1345 | } |
| 1346 | } |
| 1347 | |
| 1348 | static int g4x_compute_pipe_wm(struct intel_crtc_state *crtc_state) |
| 1349 | { |
Maarten Lankhorst | 2225f3c | 2019-10-31 12:26:03 +0100 | [diff] [blame] | 1350 | struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1351 | struct intel_atomic_state *state = |
Maarten Lankhorst | 2225f3c | 2019-10-31 12:26:03 +0100 | [diff] [blame] | 1352 | to_intel_atomic_state(crtc_state->uapi.state); |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1353 | struct g4x_wm_state *wm_state = &crtc_state->wm.g4x.optimal; |
Ville Syrjälä | 0b14d96 | 2019-08-21 20:30:33 +0300 | [diff] [blame] | 1354 | int num_active_planes = hweight8(crtc_state->active_planes & |
| 1355 | ~BIT(PLANE_CURSOR)); |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1356 | const struct g4x_pipe_wm *raw; |
Ville Syrjälä | 7b510451 | 2017-08-23 18:22:22 +0300 | [diff] [blame] | 1357 | const struct intel_plane_state *old_plane_state; |
| 1358 | const struct intel_plane_state *new_plane_state; |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1359 | struct intel_plane *plane; |
| 1360 | enum plane_id plane_id; |
| 1361 | int i, level; |
| 1362 | unsigned int dirty = 0; |
| 1363 | |
Ville Syrjälä | 7b510451 | 2017-08-23 18:22:22 +0300 | [diff] [blame] | 1364 | for_each_oldnew_intel_plane_in_state(state, plane, |
| 1365 | old_plane_state, |
| 1366 | new_plane_state, i) { |
Maarten Lankhorst | 7b3cb17 | 2019-10-31 12:26:07 +0100 | [diff] [blame] | 1367 | if (new_plane_state->hw.crtc != &crtc->base && |
| 1368 | old_plane_state->hw.crtc != &crtc->base) |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1369 | continue; |
| 1370 | |
Ville Syrjälä | 7b510451 | 2017-08-23 18:22:22 +0300 | [diff] [blame] | 1371 | if (g4x_raw_plane_wm_compute(crtc_state, new_plane_state)) |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1372 | dirty |= BIT(plane->id); |
| 1373 | } |
| 1374 | |
| 1375 | if (!dirty) |
| 1376 | return 0; |
| 1377 | |
| 1378 | level = G4X_WM_LEVEL_NORMAL; |
| 1379 | if (!g4x_raw_crtc_wm_is_valid(crtc_state, level)) |
| 1380 | goto out; |
| 1381 | |
| 1382 | raw = &crtc_state->wm.g4x.raw[level]; |
| 1383 | for_each_plane_id_on_crtc(crtc, plane_id) |
| 1384 | wm_state->wm.plane[plane_id] = raw->plane[plane_id]; |
| 1385 | |
| 1386 | level = G4X_WM_LEVEL_SR; |
| 1387 | |
| 1388 | if (!g4x_raw_crtc_wm_is_valid(crtc_state, level)) |
| 1389 | goto out; |
| 1390 | |
| 1391 | raw = &crtc_state->wm.g4x.raw[level]; |
| 1392 | wm_state->sr.plane = raw->plane[PLANE_PRIMARY]; |
| 1393 | wm_state->sr.cursor = raw->plane[PLANE_CURSOR]; |
| 1394 | wm_state->sr.fbc = raw->fbc; |
| 1395 | |
| 1396 | wm_state->cxsr = num_active_planes == BIT(PLANE_PRIMARY); |
| 1397 | |
| 1398 | level = G4X_WM_LEVEL_HPLL; |
| 1399 | |
| 1400 | if (!g4x_raw_crtc_wm_is_valid(crtc_state, level)) |
| 1401 | goto out; |
| 1402 | |
| 1403 | raw = &crtc_state->wm.g4x.raw[level]; |
| 1404 | wm_state->hpll.plane = raw->plane[PLANE_PRIMARY]; |
| 1405 | wm_state->hpll.cursor = raw->plane[PLANE_CURSOR]; |
| 1406 | wm_state->hpll.fbc = raw->fbc; |
| 1407 | |
| 1408 | wm_state->hpll_en = wm_state->cxsr; |
| 1409 | |
| 1410 | level++; |
| 1411 | |
| 1412 | out: |
| 1413 | if (level == G4X_WM_LEVEL_NORMAL) |
| 1414 | return -EINVAL; |
| 1415 | |
| 1416 | /* invalidate the higher levels */ |
| 1417 | g4x_invalidate_wms(crtc, wm_state, level); |
| 1418 | |
| 1419 | /* |
| 1420 | * Determine if the FBC watermark(s) can be used. IF |
| 1421 | * this isn't the case we prefer to disable the FBC |
| 1422 | ( watermark(s) rather than disable the SR/HPLL |
| 1423 | * level(s) entirely. |
| 1424 | */ |
| 1425 | wm_state->fbc_en = level > G4X_WM_LEVEL_NORMAL; |
| 1426 | |
| 1427 | if (level >= G4X_WM_LEVEL_SR && |
| 1428 | wm_state->sr.fbc > g4x_fbc_fifo_size(G4X_WM_LEVEL_SR)) |
| 1429 | wm_state->fbc_en = false; |
| 1430 | else if (level >= G4X_WM_LEVEL_HPLL && |
| 1431 | wm_state->hpll.fbc > g4x_fbc_fifo_size(G4X_WM_LEVEL_HPLL)) |
| 1432 | wm_state->fbc_en = false; |
| 1433 | |
| 1434 | return 0; |
| 1435 | } |
| 1436 | |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 1437 | static int g4x_compute_intermediate_wm(struct intel_crtc_state *new_crtc_state) |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1438 | { |
Maarten Lankhorst | 2225f3c | 2019-10-31 12:26:03 +0100 | [diff] [blame] | 1439 | struct intel_crtc *crtc = to_intel_crtc(new_crtc_state->uapi.crtc); |
Pankaj Bharadiya | 19edeb38 | 2020-05-04 23:45:59 +0530 | [diff] [blame] | 1440 | struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); |
Maarten Lankhorst | 248c243 | 2017-11-15 17:31:57 +0100 | [diff] [blame] | 1441 | struct g4x_wm_state *intermediate = &new_crtc_state->wm.g4x.intermediate; |
| 1442 | const struct g4x_wm_state *optimal = &new_crtc_state->wm.g4x.optimal; |
| 1443 | struct intel_atomic_state *intel_state = |
Maarten Lankhorst | 2225f3c | 2019-10-31 12:26:03 +0100 | [diff] [blame] | 1444 | to_intel_atomic_state(new_crtc_state->uapi.state); |
Maarten Lankhorst | 248c243 | 2017-11-15 17:31:57 +0100 | [diff] [blame] | 1445 | const struct intel_crtc_state *old_crtc_state = |
| 1446 | intel_atomic_get_old_crtc_state(intel_state, crtc); |
| 1447 | const struct g4x_wm_state *active = &old_crtc_state->wm.g4x.optimal; |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1448 | enum plane_id plane_id; |
| 1449 | |
Maarten Lankhorst | 2225f3c | 2019-10-31 12:26:03 +0100 | [diff] [blame] | 1450 | if (!new_crtc_state->hw.active || drm_atomic_crtc_needs_modeset(&new_crtc_state->uapi)) { |
Maarten Lankhorst | 248c243 | 2017-11-15 17:31:57 +0100 | [diff] [blame] | 1451 | *intermediate = *optimal; |
| 1452 | |
| 1453 | intermediate->cxsr = false; |
| 1454 | intermediate->hpll_en = false; |
| 1455 | goto out; |
| 1456 | } |
| 1457 | |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1458 | intermediate->cxsr = optimal->cxsr && active->cxsr && |
Maarten Lankhorst | 248c243 | 2017-11-15 17:31:57 +0100 | [diff] [blame] | 1459 | !new_crtc_state->disable_cxsr; |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1460 | intermediate->hpll_en = optimal->hpll_en && active->hpll_en && |
Maarten Lankhorst | 248c243 | 2017-11-15 17:31:57 +0100 | [diff] [blame] | 1461 | !new_crtc_state->disable_cxsr; |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1462 | intermediate->fbc_en = optimal->fbc_en && active->fbc_en; |
| 1463 | |
| 1464 | for_each_plane_id_on_crtc(crtc, plane_id) { |
| 1465 | intermediate->wm.plane[plane_id] = |
| 1466 | max(optimal->wm.plane[plane_id], |
| 1467 | active->wm.plane[plane_id]); |
| 1468 | |
Pankaj Bharadiya | 19edeb38 | 2020-05-04 23:45:59 +0530 | [diff] [blame] | 1469 | drm_WARN_ON(&dev_priv->drm, intermediate->wm.plane[plane_id] > |
| 1470 | g4x_plane_fifo_size(plane_id, G4X_WM_LEVEL_NORMAL)); |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1471 | } |
| 1472 | |
| 1473 | intermediate->sr.plane = max(optimal->sr.plane, |
| 1474 | active->sr.plane); |
| 1475 | intermediate->sr.cursor = max(optimal->sr.cursor, |
| 1476 | active->sr.cursor); |
| 1477 | intermediate->sr.fbc = max(optimal->sr.fbc, |
| 1478 | active->sr.fbc); |
| 1479 | |
| 1480 | intermediate->hpll.plane = max(optimal->hpll.plane, |
| 1481 | active->hpll.plane); |
| 1482 | intermediate->hpll.cursor = max(optimal->hpll.cursor, |
| 1483 | active->hpll.cursor); |
| 1484 | intermediate->hpll.fbc = max(optimal->hpll.fbc, |
| 1485 | active->hpll.fbc); |
| 1486 | |
Pankaj Bharadiya | 19edeb38 | 2020-05-04 23:45:59 +0530 | [diff] [blame] | 1487 | drm_WARN_ON(&dev_priv->drm, |
| 1488 | (intermediate->sr.plane > |
| 1489 | g4x_plane_fifo_size(PLANE_PRIMARY, G4X_WM_LEVEL_SR) || |
| 1490 | intermediate->sr.cursor > |
| 1491 | g4x_plane_fifo_size(PLANE_CURSOR, G4X_WM_LEVEL_SR)) && |
| 1492 | intermediate->cxsr); |
| 1493 | drm_WARN_ON(&dev_priv->drm, |
| 1494 | (intermediate->sr.plane > |
| 1495 | g4x_plane_fifo_size(PLANE_PRIMARY, G4X_WM_LEVEL_HPLL) || |
| 1496 | intermediate->sr.cursor > |
| 1497 | g4x_plane_fifo_size(PLANE_CURSOR, G4X_WM_LEVEL_HPLL)) && |
| 1498 | intermediate->hpll_en); |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1499 | |
Pankaj Bharadiya | 19edeb38 | 2020-05-04 23:45:59 +0530 | [diff] [blame] | 1500 | drm_WARN_ON(&dev_priv->drm, |
| 1501 | intermediate->sr.fbc > g4x_fbc_fifo_size(1) && |
| 1502 | intermediate->fbc_en && intermediate->cxsr); |
| 1503 | drm_WARN_ON(&dev_priv->drm, |
| 1504 | intermediate->hpll.fbc > g4x_fbc_fifo_size(2) && |
| 1505 | intermediate->fbc_en && intermediate->hpll_en); |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1506 | |
Maarten Lankhorst | 248c243 | 2017-11-15 17:31:57 +0100 | [diff] [blame] | 1507 | out: |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1508 | /* |
| 1509 | * If our intermediate WM are identical to the final WM, then we can |
| 1510 | * omit the post-vblank programming; only update if it's different. |
| 1511 | */ |
| 1512 | if (memcmp(intermediate, optimal, sizeof(*intermediate)) != 0) |
Maarten Lankhorst | 248c243 | 2017-11-15 17:31:57 +0100 | [diff] [blame] | 1513 | new_crtc_state->wm.need_postvbl_update = true; |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1514 | |
| 1515 | return 0; |
| 1516 | } |
| 1517 | |
| 1518 | static void g4x_merge_wm(struct drm_i915_private *dev_priv, |
| 1519 | struct g4x_wm_values *wm) |
| 1520 | { |
| 1521 | struct intel_crtc *crtc; |
Ville Syrjälä | c08e913 | 2019-08-21 20:30:32 +0300 | [diff] [blame] | 1522 | int num_active_pipes = 0; |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1523 | |
| 1524 | wm->cxsr = true; |
| 1525 | wm->hpll_en = true; |
| 1526 | wm->fbc_en = true; |
| 1527 | |
| 1528 | for_each_intel_crtc(&dev_priv->drm, crtc) { |
| 1529 | const struct g4x_wm_state *wm_state = &crtc->wm.active.g4x; |
| 1530 | |
| 1531 | if (!crtc->active) |
| 1532 | continue; |
| 1533 | |
| 1534 | if (!wm_state->cxsr) |
| 1535 | wm->cxsr = false; |
| 1536 | if (!wm_state->hpll_en) |
| 1537 | wm->hpll_en = false; |
| 1538 | if (!wm_state->fbc_en) |
| 1539 | wm->fbc_en = false; |
| 1540 | |
Ville Syrjälä | c08e913 | 2019-08-21 20:30:32 +0300 | [diff] [blame] | 1541 | num_active_pipes++; |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1542 | } |
| 1543 | |
Ville Syrjälä | c08e913 | 2019-08-21 20:30:32 +0300 | [diff] [blame] | 1544 | if (num_active_pipes != 1) { |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1545 | wm->cxsr = false; |
| 1546 | wm->hpll_en = false; |
| 1547 | wm->fbc_en = false; |
| 1548 | } |
| 1549 | |
| 1550 | for_each_intel_crtc(&dev_priv->drm, crtc) { |
| 1551 | const struct g4x_wm_state *wm_state = &crtc->wm.active.g4x; |
| 1552 | enum pipe pipe = crtc->pipe; |
| 1553 | |
| 1554 | wm->pipe[pipe] = wm_state->wm; |
| 1555 | if (crtc->active && wm->cxsr) |
| 1556 | wm->sr = wm_state->sr; |
| 1557 | if (crtc->active && wm->hpll_en) |
| 1558 | wm->hpll = wm_state->hpll; |
| 1559 | } |
| 1560 | } |
| 1561 | |
| 1562 | static void g4x_program_watermarks(struct drm_i915_private *dev_priv) |
| 1563 | { |
| 1564 | struct g4x_wm_values *old_wm = &dev_priv->wm.g4x; |
| 1565 | struct g4x_wm_values new_wm = {}; |
| 1566 | |
| 1567 | g4x_merge_wm(dev_priv, &new_wm); |
| 1568 | |
| 1569 | if (memcmp(old_wm, &new_wm, sizeof(new_wm)) == 0) |
| 1570 | return; |
| 1571 | |
| 1572 | if (is_disabling(old_wm->cxsr, new_wm.cxsr, true)) |
| 1573 | _intel_set_memory_cxsr(dev_priv, false); |
| 1574 | |
| 1575 | g4x_write_wm_values(dev_priv, &new_wm); |
| 1576 | |
| 1577 | if (is_enabling(old_wm->cxsr, new_wm.cxsr, true)) |
| 1578 | _intel_set_memory_cxsr(dev_priv, true); |
| 1579 | |
| 1580 | *old_wm = new_wm; |
| 1581 | } |
| 1582 | |
| 1583 | static void g4x_initial_watermarks(struct intel_atomic_state *state, |
Ville Syrjälä | 7a8fdb1f | 2019-11-18 18:44:26 +0200 | [diff] [blame] | 1584 | struct intel_crtc *crtc) |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1585 | { |
Ville Syrjälä | 7a8fdb1f | 2019-11-18 18:44:26 +0200 | [diff] [blame] | 1586 | struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); |
| 1587 | const struct intel_crtc_state *crtc_state = |
| 1588 | intel_atomic_get_new_crtc_state(state, crtc); |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1589 | |
| 1590 | mutex_lock(&dev_priv->wm.wm_mutex); |
| 1591 | crtc->wm.active.g4x = crtc_state->wm.g4x.intermediate; |
| 1592 | g4x_program_watermarks(dev_priv); |
| 1593 | mutex_unlock(&dev_priv->wm.wm_mutex); |
| 1594 | } |
| 1595 | |
| 1596 | static void g4x_optimize_watermarks(struct intel_atomic_state *state, |
Ville Syrjälä | 7a8fdb1f | 2019-11-18 18:44:26 +0200 | [diff] [blame] | 1597 | struct intel_crtc *crtc) |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1598 | { |
Ville Syrjälä | 7a8fdb1f | 2019-11-18 18:44:26 +0200 | [diff] [blame] | 1599 | struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); |
| 1600 | const struct intel_crtc_state *crtc_state = |
| 1601 | intel_atomic_get_new_crtc_state(state, crtc); |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1602 | |
| 1603 | if (!crtc_state->wm.need_postvbl_update) |
| 1604 | return; |
| 1605 | |
| 1606 | mutex_lock(&dev_priv->wm.wm_mutex); |
Ville Syrjälä | 88016a9 | 2019-07-01 19:05:45 +0300 | [diff] [blame] | 1607 | crtc->wm.active.g4x = crtc_state->wm.g4x.optimal; |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 1608 | g4x_program_watermarks(dev_priv); |
| 1609 | mutex_unlock(&dev_priv->wm.wm_mutex); |
| 1610 | } |
| 1611 | |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 1612 | /* latency must be in 0.1us units. */ |
| 1613 | static unsigned int vlv_wm_method2(unsigned int pixel_rate, |
Ville Syrjälä | baf69ca | 2017-04-21 21:14:27 +0300 | [diff] [blame] | 1614 | unsigned int htotal, |
| 1615 | unsigned int width, |
Ville Syrjälä | ac48496 | 2016-01-20 21:05:26 +0200 | [diff] [blame] | 1616 | unsigned int cpp, |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 1617 | unsigned int latency) |
| 1618 | { |
| 1619 | unsigned int ret; |
| 1620 | |
Ville Syrjälä | baf69ca | 2017-04-21 21:14:27 +0300 | [diff] [blame] | 1621 | ret = intel_wm_method2(pixel_rate, htotal, |
| 1622 | width, cpp, latency); |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 1623 | ret = DIV_ROUND_UP(ret, 64); |
| 1624 | |
| 1625 | return ret; |
| 1626 | } |
| 1627 | |
Ville Syrjälä | bb72651 | 2016-10-31 22:37:24 +0200 | [diff] [blame] | 1628 | static void vlv_setup_wm_latency(struct drm_i915_private *dev_priv) |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 1629 | { |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 1630 | /* all latencies in usec */ |
| 1631 | dev_priv->wm.pri_latency[VLV_WM_LEVEL_PM2] = 3; |
| 1632 | |
Ville Syrjälä | 58590c1 | 2015-09-08 21:05:12 +0300 | [diff] [blame] | 1633 | dev_priv->wm.max_level = VLV_WM_LEVEL_PM2; |
| 1634 | |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 1635 | if (IS_CHERRYVIEW(dev_priv)) { |
| 1636 | dev_priv->wm.pri_latency[VLV_WM_LEVEL_PM5] = 12; |
| 1637 | dev_priv->wm.pri_latency[VLV_WM_LEVEL_DDR_DVFS] = 33; |
Ville Syrjälä | 58590c1 | 2015-09-08 21:05:12 +0300 | [diff] [blame] | 1638 | |
| 1639 | dev_priv->wm.max_level = VLV_WM_LEVEL_DDR_DVFS; |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 1640 | } |
| 1641 | } |
| 1642 | |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 1643 | static u16 vlv_compute_wm_level(const struct intel_crtc_state *crtc_state, |
| 1644 | const struct intel_plane_state *plane_state, |
| 1645 | int level) |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 1646 | { |
Maarten Lankhorst | f90a85e | 2019-10-31 12:26:08 +0100 | [diff] [blame] | 1647 | struct intel_plane *plane = to_intel_plane(plane_state->uapi.plane); |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 1648 | struct drm_i915_private *dev_priv = to_i915(plane->base.dev); |
Ville Syrjälä | e339d67 | 2016-11-28 19:37:17 +0200 | [diff] [blame] | 1649 | const struct drm_display_mode *adjusted_mode = |
Maarten Lankhorst | 1326a92 | 2019-10-31 12:26:02 +0100 | [diff] [blame] | 1650 | &crtc_state->hw.adjusted_mode; |
Chris Wilson | 1a1f128 | 2017-11-07 14:03:38 +0000 | [diff] [blame] | 1651 | unsigned int clock, htotal, cpp, width, wm; |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 1652 | |
| 1653 | if (dev_priv->wm.pri_latency[level] == 0) |
| 1654 | return USHRT_MAX; |
| 1655 | |
Ville Syrjälä | a07102f | 2017-03-03 17:19:27 +0200 | [diff] [blame] | 1656 | if (!intel_wm_plane_visible(crtc_state, plane_state)) |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 1657 | return 0; |
| 1658 | |
Maarten Lankhorst | 7b3cb17 | 2019-10-31 12:26:07 +0100 | [diff] [blame] | 1659 | cpp = plane_state->hw.fb->format->cpp[0]; |
Ville Syrjälä | e339d67 | 2016-11-28 19:37:17 +0200 | [diff] [blame] | 1660 | clock = adjusted_mode->crtc_clock; |
| 1661 | htotal = adjusted_mode->crtc_htotal; |
| 1662 | width = crtc_state->pipe_src_w; |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 1663 | |
Ville Syrjälä | 709f3fc | 2017-03-03 17:19:26 +0200 | [diff] [blame] | 1664 | if (plane->id == PLANE_CURSOR) { |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 1665 | /* |
| 1666 | * FIXME the formula gives values that are |
| 1667 | * too big for the cursor FIFO, and hence we |
| 1668 | * would never be able to use cursors. For |
| 1669 | * now just hardcode the watermark. |
| 1670 | */ |
| 1671 | wm = 63; |
| 1672 | } else { |
Ville Syrjälä | ac48496 | 2016-01-20 21:05:26 +0200 | [diff] [blame] | 1673 | wm = vlv_wm_method2(clock, htotal, width, cpp, |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 1674 | dev_priv->wm.pri_latency[level] * 10); |
| 1675 | } |
| 1676 | |
Chris Wilson | 1a1f128 | 2017-11-07 14:03:38 +0000 | [diff] [blame] | 1677 | return min_t(unsigned int, wm, USHRT_MAX); |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 1678 | } |
| 1679 | |
Ville Syrjälä | 1a10ae6 | 2017-03-02 19:15:03 +0200 | [diff] [blame] | 1680 | static bool vlv_need_sprite0_fifo_workaround(unsigned int active_planes) |
| 1681 | { |
| 1682 | return (active_planes & (BIT(PLANE_SPRITE0) | |
| 1683 | BIT(PLANE_SPRITE1))) == BIT(PLANE_SPRITE1); |
| 1684 | } |
| 1685 | |
Ville Syrjälä | 5012e60 | 2017-03-02 19:14:56 +0200 | [diff] [blame] | 1686 | static int vlv_compute_fifo(struct intel_crtc_state *crtc_state) |
Ville Syrjälä | 54f1b6e | 2015-06-24 22:00:05 +0300 | [diff] [blame] | 1687 | { |
Maarten Lankhorst | 2225f3c | 2019-10-31 12:26:03 +0100 | [diff] [blame] | 1688 | struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); |
Pankaj Bharadiya | 19edeb38 | 2020-05-04 23:45:59 +0530 | [diff] [blame] | 1689 | struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); |
Ville Syrjälä | 114d7dc | 2017-04-21 21:14:21 +0300 | [diff] [blame] | 1690 | const struct g4x_pipe_wm *raw = |
Ville Syrjälä | 5012e60 | 2017-03-02 19:14:56 +0200 | [diff] [blame] | 1691 | &crtc_state->wm.vlv.raw[VLV_WM_LEVEL_PM2]; |
Ville Syrjälä | 814e7f0 | 2017-03-02 19:14:55 +0200 | [diff] [blame] | 1692 | struct vlv_fifo_state *fifo_state = &crtc_state->wm.vlv.fifo_state; |
Ville Syrjälä | 5012e60 | 2017-03-02 19:14:56 +0200 | [diff] [blame] | 1693 | unsigned int active_planes = crtc_state->active_planes & ~BIT(PLANE_CURSOR); |
Ville Syrjälä | 0b14d96 | 2019-08-21 20:30:33 +0300 | [diff] [blame] | 1694 | int num_active_planes = hweight8(active_planes); |
Ville Syrjälä | 5012e60 | 2017-03-02 19:14:56 +0200 | [diff] [blame] | 1695 | const int fifo_size = 511; |
Ville Syrjälä | 54f1b6e | 2015-06-24 22:00:05 +0300 | [diff] [blame] | 1696 | int fifo_extra, fifo_left = fifo_size; |
Ville Syrjälä | 1a10ae6 | 2017-03-02 19:15:03 +0200 | [diff] [blame] | 1697 | int sprite0_fifo_extra = 0; |
Ville Syrjälä | 5012e60 | 2017-03-02 19:14:56 +0200 | [diff] [blame] | 1698 | unsigned int total_rate; |
| 1699 | enum plane_id plane_id; |
Ville Syrjälä | 54f1b6e | 2015-06-24 22:00:05 +0300 | [diff] [blame] | 1700 | |
Ville Syrjälä | 1a10ae6 | 2017-03-02 19:15:03 +0200 | [diff] [blame] | 1701 | /* |
| 1702 | * When enabling sprite0 after sprite1 has already been enabled |
| 1703 | * we tend to get an underrun unless sprite0 already has some |
| 1704 | * FIFO space allcoated. Hence we always allocate at least one |
| 1705 | * cacheline for sprite0 whenever sprite1 is enabled. |
| 1706 | * |
| 1707 | * All other plane enable sequences appear immune to this problem. |
| 1708 | */ |
| 1709 | if (vlv_need_sprite0_fifo_workaround(active_planes)) |
| 1710 | sprite0_fifo_extra = 1; |
| 1711 | |
Ville Syrjälä | 5012e60 | 2017-03-02 19:14:56 +0200 | [diff] [blame] | 1712 | total_rate = raw->plane[PLANE_PRIMARY] + |
| 1713 | raw->plane[PLANE_SPRITE0] + |
Ville Syrjälä | 1a10ae6 | 2017-03-02 19:15:03 +0200 | [diff] [blame] | 1714 | raw->plane[PLANE_SPRITE1] + |
| 1715 | sprite0_fifo_extra; |
Ville Syrjälä | 54f1b6e | 2015-06-24 22:00:05 +0300 | [diff] [blame] | 1716 | |
Ville Syrjälä | 5012e60 | 2017-03-02 19:14:56 +0200 | [diff] [blame] | 1717 | if (total_rate > fifo_size) |
| 1718 | return -EINVAL; |
Ville Syrjälä | 54f1b6e | 2015-06-24 22:00:05 +0300 | [diff] [blame] | 1719 | |
Ville Syrjälä | 5012e60 | 2017-03-02 19:14:56 +0200 | [diff] [blame] | 1720 | if (total_rate == 0) |
| 1721 | total_rate = 1; |
Ville Syrjälä | 54f1b6e | 2015-06-24 22:00:05 +0300 | [diff] [blame] | 1722 | |
Ville Syrjälä | 5012e60 | 2017-03-02 19:14:56 +0200 | [diff] [blame] | 1723 | for_each_plane_id_on_crtc(crtc, plane_id) { |
Ville Syrjälä | 54f1b6e | 2015-06-24 22:00:05 +0300 | [diff] [blame] | 1724 | unsigned int rate; |
| 1725 | |
Ville Syrjälä | 5012e60 | 2017-03-02 19:14:56 +0200 | [diff] [blame] | 1726 | if ((active_planes & BIT(plane_id)) == 0) { |
| 1727 | fifo_state->plane[plane_id] = 0; |
Ville Syrjälä | 54f1b6e | 2015-06-24 22:00:05 +0300 | [diff] [blame] | 1728 | continue; |
| 1729 | } |
| 1730 | |
Ville Syrjälä | 5012e60 | 2017-03-02 19:14:56 +0200 | [diff] [blame] | 1731 | rate = raw->plane[plane_id]; |
| 1732 | fifo_state->plane[plane_id] = fifo_size * rate / total_rate; |
| 1733 | fifo_left -= fifo_state->plane[plane_id]; |
Ville Syrjälä | 54f1b6e | 2015-06-24 22:00:05 +0300 | [diff] [blame] | 1734 | } |
| 1735 | |
Ville Syrjälä | 1a10ae6 | 2017-03-02 19:15:03 +0200 | [diff] [blame] | 1736 | fifo_state->plane[PLANE_SPRITE0] += sprite0_fifo_extra; |
| 1737 | fifo_left -= sprite0_fifo_extra; |
| 1738 | |
Ville Syrjälä | 5012e60 | 2017-03-02 19:14:56 +0200 | [diff] [blame] | 1739 | fifo_state->plane[PLANE_CURSOR] = 63; |
| 1740 | |
| 1741 | fifo_extra = DIV_ROUND_UP(fifo_left, num_active_planes ?: 1); |
Ville Syrjälä | 54f1b6e | 2015-06-24 22:00:05 +0300 | [diff] [blame] | 1742 | |
| 1743 | /* spread the remainder evenly */ |
Ville Syrjälä | 5012e60 | 2017-03-02 19:14:56 +0200 | [diff] [blame] | 1744 | for_each_plane_id_on_crtc(crtc, plane_id) { |
Ville Syrjälä | 54f1b6e | 2015-06-24 22:00:05 +0300 | [diff] [blame] | 1745 | int plane_extra; |
| 1746 | |
| 1747 | if (fifo_left == 0) |
| 1748 | break; |
| 1749 | |
Ville Syrjälä | 5012e60 | 2017-03-02 19:14:56 +0200 | [diff] [blame] | 1750 | if ((active_planes & BIT(plane_id)) == 0) |
Ville Syrjälä | 54f1b6e | 2015-06-24 22:00:05 +0300 | [diff] [blame] | 1751 | continue; |
| 1752 | |
| 1753 | plane_extra = min(fifo_extra, fifo_left); |
Ville Syrjälä | 5012e60 | 2017-03-02 19:14:56 +0200 | [diff] [blame] | 1754 | fifo_state->plane[plane_id] += plane_extra; |
Ville Syrjälä | 54f1b6e | 2015-06-24 22:00:05 +0300 | [diff] [blame] | 1755 | fifo_left -= plane_extra; |
| 1756 | } |
| 1757 | |
Pankaj Bharadiya | 19edeb38 | 2020-05-04 23:45:59 +0530 | [diff] [blame] | 1758 | drm_WARN_ON(&dev_priv->drm, active_planes != 0 && fifo_left != 0); |
Ville Syrjälä | 5012e60 | 2017-03-02 19:14:56 +0200 | [diff] [blame] | 1759 | |
| 1760 | /* give it all to the first plane if none are active */ |
| 1761 | if (active_planes == 0) { |
Pankaj Bharadiya | 19edeb38 | 2020-05-04 23:45:59 +0530 | [diff] [blame] | 1762 | drm_WARN_ON(&dev_priv->drm, fifo_left != fifo_size); |
Ville Syrjälä | 5012e60 | 2017-03-02 19:14:56 +0200 | [diff] [blame] | 1763 | fifo_state->plane[PLANE_PRIMARY] = fifo_left; |
| 1764 | } |
| 1765 | |
| 1766 | return 0; |
Ville Syrjälä | 54f1b6e | 2015-06-24 22:00:05 +0300 | [diff] [blame] | 1767 | } |
| 1768 | |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 1769 | /* mark all levels starting from 'level' as invalid */ |
| 1770 | static void vlv_invalidate_wms(struct intel_crtc *crtc, |
| 1771 | struct vlv_wm_state *wm_state, int level) |
| 1772 | { |
| 1773 | struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); |
| 1774 | |
Ville Syrjälä | 6d5019b | 2017-04-21 21:14:20 +0300 | [diff] [blame] | 1775 | for (; level < intel_wm_num_levels(dev_priv); level++) { |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 1776 | enum plane_id plane_id; |
| 1777 | |
| 1778 | for_each_plane_id_on_crtc(crtc, plane_id) |
| 1779 | wm_state->wm[level].plane[plane_id] = USHRT_MAX; |
| 1780 | |
| 1781 | wm_state->sr[level].cursor = USHRT_MAX; |
| 1782 | wm_state->sr[level].plane = USHRT_MAX; |
| 1783 | } |
| 1784 | } |
| 1785 | |
Ville Syrjälä | 26cca0e | 2016-11-28 19:37:09 +0200 | [diff] [blame] | 1786 | static u16 vlv_invert_wm_value(u16 wm, u16 fifo_size) |
| 1787 | { |
| 1788 | if (wm > fifo_size) |
| 1789 | return USHRT_MAX; |
| 1790 | else |
| 1791 | return fifo_size - wm; |
| 1792 | } |
| 1793 | |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 1794 | /* |
| 1795 | * Starting from 'level' set all higher |
| 1796 | * levels to 'value' in the "raw" watermarks. |
| 1797 | */ |
Ville Syrjälä | 236c48e | 2017-03-02 19:14:58 +0200 | [diff] [blame] | 1798 | static bool vlv_raw_plane_wm_set(struct intel_crtc_state *crtc_state, |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 1799 | int level, enum plane_id plane_id, u16 value) |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 1800 | { |
Maarten Lankhorst | 2225f3c | 2019-10-31 12:26:03 +0100 | [diff] [blame] | 1801 | struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev); |
Ville Syrjälä | 6d5019b | 2017-04-21 21:14:20 +0300 | [diff] [blame] | 1802 | int num_levels = intel_wm_num_levels(dev_priv); |
Ville Syrjälä | 236c48e | 2017-03-02 19:14:58 +0200 | [diff] [blame] | 1803 | bool dirty = false; |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 1804 | |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 1805 | for (; level < num_levels; level++) { |
Ville Syrjälä | 114d7dc | 2017-04-21 21:14:21 +0300 | [diff] [blame] | 1806 | struct g4x_pipe_wm *raw = &crtc_state->wm.vlv.raw[level]; |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 1807 | |
Ville Syrjälä | 236c48e | 2017-03-02 19:14:58 +0200 | [diff] [blame] | 1808 | dirty |= raw->plane[plane_id] != value; |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 1809 | raw->plane[plane_id] = value; |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 1810 | } |
Ville Syrjälä | 236c48e | 2017-03-02 19:14:58 +0200 | [diff] [blame] | 1811 | |
| 1812 | return dirty; |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 1813 | } |
| 1814 | |
Ville Syrjälä | 77d14ee | 2017-04-21 21:14:18 +0300 | [diff] [blame] | 1815 | static bool vlv_raw_plane_wm_compute(struct intel_crtc_state *crtc_state, |
| 1816 | const struct intel_plane_state *plane_state) |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 1817 | { |
Maarten Lankhorst | f90a85e | 2019-10-31 12:26:08 +0100 | [diff] [blame] | 1818 | struct intel_plane *plane = to_intel_plane(plane_state->uapi.plane); |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 1819 | struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev); |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 1820 | enum plane_id plane_id = plane->id; |
Ville Syrjälä | 6d5019b | 2017-04-21 21:14:20 +0300 | [diff] [blame] | 1821 | int num_levels = intel_wm_num_levels(to_i915(plane->base.dev)); |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 1822 | int level; |
Ville Syrjälä | 236c48e | 2017-03-02 19:14:58 +0200 | [diff] [blame] | 1823 | bool dirty = false; |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 1824 | |
Ville Syrjälä | a07102f | 2017-03-03 17:19:27 +0200 | [diff] [blame] | 1825 | if (!intel_wm_plane_visible(crtc_state, plane_state)) { |
Ville Syrjälä | 236c48e | 2017-03-02 19:14:58 +0200 | [diff] [blame] | 1826 | dirty |= vlv_raw_plane_wm_set(crtc_state, 0, plane_id, 0); |
| 1827 | goto out; |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 1828 | } |
| 1829 | |
| 1830 | for (level = 0; level < num_levels; level++) { |
Ville Syrjälä | 114d7dc | 2017-04-21 21:14:21 +0300 | [diff] [blame] | 1831 | struct g4x_pipe_wm *raw = &crtc_state->wm.vlv.raw[level]; |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 1832 | int wm = vlv_compute_wm_level(crtc_state, plane_state, level); |
| 1833 | int max_wm = plane_id == PLANE_CURSOR ? 63 : 511; |
| 1834 | |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 1835 | if (wm > max_wm) |
| 1836 | break; |
| 1837 | |
Ville Syrjälä | 236c48e | 2017-03-02 19:14:58 +0200 | [diff] [blame] | 1838 | dirty |= raw->plane[plane_id] != wm; |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 1839 | raw->plane[plane_id] = wm; |
| 1840 | } |
| 1841 | |
| 1842 | /* mark all higher levels as invalid */ |
Ville Syrjälä | 236c48e | 2017-03-02 19:14:58 +0200 | [diff] [blame] | 1843 | dirty |= vlv_raw_plane_wm_set(crtc_state, level, plane_id, USHRT_MAX); |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 1844 | |
Ville Syrjälä | 236c48e | 2017-03-02 19:14:58 +0200 | [diff] [blame] | 1845 | out: |
| 1846 | if (dirty) |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 1847 | drm_dbg_kms(&dev_priv->drm, |
| 1848 | "%s watermarks: PM2=%d, PM5=%d, DDR DVFS=%d\n", |
| 1849 | plane->base.name, |
| 1850 | crtc_state->wm.vlv.raw[VLV_WM_LEVEL_PM2].plane[plane_id], |
| 1851 | crtc_state->wm.vlv.raw[VLV_WM_LEVEL_PM5].plane[plane_id], |
| 1852 | crtc_state->wm.vlv.raw[VLV_WM_LEVEL_DDR_DVFS].plane[plane_id]); |
Ville Syrjälä | 236c48e | 2017-03-02 19:14:58 +0200 | [diff] [blame] | 1853 | |
| 1854 | return dirty; |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 1855 | } |
| 1856 | |
Ville Syrjälä | 77d14ee | 2017-04-21 21:14:18 +0300 | [diff] [blame] | 1857 | static bool vlv_raw_plane_wm_is_valid(const struct intel_crtc_state *crtc_state, |
| 1858 | enum plane_id plane_id, int level) |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 1859 | { |
Ville Syrjälä | 114d7dc | 2017-04-21 21:14:21 +0300 | [diff] [blame] | 1860 | const struct g4x_pipe_wm *raw = |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 1861 | &crtc_state->wm.vlv.raw[level]; |
| 1862 | const struct vlv_fifo_state *fifo_state = |
| 1863 | &crtc_state->wm.vlv.fifo_state; |
| 1864 | |
| 1865 | return raw->plane[plane_id] <= fifo_state->plane[plane_id]; |
| 1866 | } |
| 1867 | |
Ville Syrjälä | 77d14ee | 2017-04-21 21:14:18 +0300 | [diff] [blame] | 1868 | static bool vlv_raw_crtc_wm_is_valid(const struct intel_crtc_state *crtc_state, int level) |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 1869 | { |
Ville Syrjälä | 77d14ee | 2017-04-21 21:14:18 +0300 | [diff] [blame] | 1870 | return vlv_raw_plane_wm_is_valid(crtc_state, PLANE_PRIMARY, level) && |
| 1871 | vlv_raw_plane_wm_is_valid(crtc_state, PLANE_SPRITE0, level) && |
| 1872 | vlv_raw_plane_wm_is_valid(crtc_state, PLANE_SPRITE1, level) && |
| 1873 | vlv_raw_plane_wm_is_valid(crtc_state, PLANE_CURSOR, level); |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 1874 | } |
| 1875 | |
| 1876 | static int vlv_compute_pipe_wm(struct intel_crtc_state *crtc_state) |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 1877 | { |
Maarten Lankhorst | 2225f3c | 2019-10-31 12:26:03 +0100 | [diff] [blame] | 1878 | struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); |
Ville Syrjälä | 7c951c0 | 2016-11-28 19:37:10 +0200 | [diff] [blame] | 1879 | struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 1880 | struct intel_atomic_state *state = |
Maarten Lankhorst | 2225f3c | 2019-10-31 12:26:03 +0100 | [diff] [blame] | 1881 | to_intel_atomic_state(crtc_state->uapi.state); |
Ville Syrjälä | 855c79f | 2017-03-02 19:14:54 +0200 | [diff] [blame] | 1882 | struct vlv_wm_state *wm_state = &crtc_state->wm.vlv.optimal; |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 1883 | const struct vlv_fifo_state *fifo_state = |
| 1884 | &crtc_state->wm.vlv.fifo_state; |
Ville Syrjälä | 0b14d96 | 2019-08-21 20:30:33 +0300 | [diff] [blame] | 1885 | int num_active_planes = hweight8(crtc_state->active_planes & |
| 1886 | ~BIT(PLANE_CURSOR)); |
Maarten Lankhorst | 2225f3c | 2019-10-31 12:26:03 +0100 | [diff] [blame] | 1887 | bool needs_modeset = drm_atomic_crtc_needs_modeset(&crtc_state->uapi); |
Ville Syrjälä | 7b510451 | 2017-08-23 18:22:22 +0300 | [diff] [blame] | 1888 | const struct intel_plane_state *old_plane_state; |
| 1889 | const struct intel_plane_state *new_plane_state; |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 1890 | struct intel_plane *plane; |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 1891 | enum plane_id plane_id; |
| 1892 | int level, ret, i; |
Ville Syrjälä | 236c48e | 2017-03-02 19:14:58 +0200 | [diff] [blame] | 1893 | unsigned int dirty = 0; |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 1894 | |
Ville Syrjälä | 7b510451 | 2017-08-23 18:22:22 +0300 | [diff] [blame] | 1895 | for_each_oldnew_intel_plane_in_state(state, plane, |
| 1896 | old_plane_state, |
| 1897 | new_plane_state, i) { |
Maarten Lankhorst | 7b3cb17 | 2019-10-31 12:26:07 +0100 | [diff] [blame] | 1898 | if (new_plane_state->hw.crtc != &crtc->base && |
| 1899 | old_plane_state->hw.crtc != &crtc->base) |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 1900 | continue; |
| 1901 | |
Ville Syrjälä | 7b510451 | 2017-08-23 18:22:22 +0300 | [diff] [blame] | 1902 | if (vlv_raw_plane_wm_compute(crtc_state, new_plane_state)) |
Ville Syrjälä | 236c48e | 2017-03-02 19:14:58 +0200 | [diff] [blame] | 1903 | dirty |= BIT(plane->id); |
| 1904 | } |
| 1905 | |
| 1906 | /* |
| 1907 | * DSPARB registers may have been reset due to the |
| 1908 | * power well being turned off. Make sure we restore |
| 1909 | * them to a consistent state even if no primary/sprite |
| 1910 | * planes are initially active. |
| 1911 | */ |
| 1912 | if (needs_modeset) |
| 1913 | crtc_state->fifo_changed = true; |
| 1914 | |
| 1915 | if (!dirty) |
| 1916 | return 0; |
| 1917 | |
| 1918 | /* cursor changes don't warrant a FIFO recompute */ |
| 1919 | if (dirty & ~BIT(PLANE_CURSOR)) { |
| 1920 | const struct intel_crtc_state *old_crtc_state = |
Ville Syrjälä | 7b510451 | 2017-08-23 18:22:22 +0300 | [diff] [blame] | 1921 | intel_atomic_get_old_crtc_state(state, crtc); |
Ville Syrjälä | 236c48e | 2017-03-02 19:14:58 +0200 | [diff] [blame] | 1922 | const struct vlv_fifo_state *old_fifo_state = |
| 1923 | &old_crtc_state->wm.vlv.fifo_state; |
| 1924 | |
| 1925 | ret = vlv_compute_fifo(crtc_state); |
| 1926 | if (ret) |
| 1927 | return ret; |
| 1928 | |
| 1929 | if (needs_modeset || |
| 1930 | memcmp(old_fifo_state, fifo_state, |
| 1931 | sizeof(*fifo_state)) != 0) |
| 1932 | crtc_state->fifo_changed = true; |
Ville Syrjälä | 5012e60 | 2017-03-02 19:14:56 +0200 | [diff] [blame] | 1933 | } |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 1934 | |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 1935 | /* initially allow all levels */ |
Ville Syrjälä | 6d5019b | 2017-04-21 21:14:20 +0300 | [diff] [blame] | 1936 | wm_state->num_levels = intel_wm_num_levels(dev_priv); |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 1937 | /* |
| 1938 | * Note that enabling cxsr with no primary/sprite planes |
| 1939 | * enabled can wedge the pipe. Hence we only allow cxsr |
| 1940 | * with exactly one enabled primary/sprite plane. |
| 1941 | */ |
Ville Syrjälä | 5eeb798 | 2017-03-02 19:15:00 +0200 | [diff] [blame] | 1942 | wm_state->cxsr = crtc->pipe != PIPE_C && num_active_planes == 1; |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 1943 | |
Ville Syrjälä | 5012e60 | 2017-03-02 19:14:56 +0200 | [diff] [blame] | 1944 | for (level = 0; level < wm_state->num_levels; level++) { |
Ville Syrjälä | 114d7dc | 2017-04-21 21:14:21 +0300 | [diff] [blame] | 1945 | const struct g4x_pipe_wm *raw = &crtc_state->wm.vlv.raw[level]; |
Jani Nikula | 2497787 | 2019-09-11 12:26:08 +0300 | [diff] [blame] | 1946 | const int sr_fifo_size = INTEL_NUM_PIPES(dev_priv) * 512 - 1; |
Ville Syrjälä | 5012e60 | 2017-03-02 19:14:56 +0200 | [diff] [blame] | 1947 | |
Ville Syrjälä | 77d14ee | 2017-04-21 21:14:18 +0300 | [diff] [blame] | 1948 | if (!vlv_raw_crtc_wm_is_valid(crtc_state, level)) |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 1949 | break; |
Ville Syrjälä | 5012e60 | 2017-03-02 19:14:56 +0200 | [diff] [blame] | 1950 | |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 1951 | for_each_plane_id_on_crtc(crtc, plane_id) { |
| 1952 | wm_state->wm[level].plane[plane_id] = |
| 1953 | vlv_invert_wm_value(raw->plane[plane_id], |
| 1954 | fifo_state->plane[plane_id]); |
| 1955 | } |
| 1956 | |
| 1957 | wm_state->sr[level].plane = |
| 1958 | vlv_invert_wm_value(max3(raw->plane[PLANE_PRIMARY], |
Ville Syrjälä | 5012e60 | 2017-03-02 19:14:56 +0200 | [diff] [blame] | 1959 | raw->plane[PLANE_SPRITE0], |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 1960 | raw->plane[PLANE_SPRITE1]), |
| 1961 | sr_fifo_size); |
| 1962 | |
| 1963 | wm_state->sr[level].cursor = |
| 1964 | vlv_invert_wm_value(raw->plane[PLANE_CURSOR], |
| 1965 | 63); |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 1966 | } |
| 1967 | |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 1968 | if (level == 0) |
| 1969 | return -EINVAL; |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 1970 | |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 1971 | /* limit to only levels we can actually handle */ |
| 1972 | wm_state->num_levels = level; |
| 1973 | |
| 1974 | /* invalidate the higher levels */ |
| 1975 | vlv_invalidate_wms(crtc, wm_state, level); |
| 1976 | |
| 1977 | return 0; |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 1978 | } |
| 1979 | |
Ville Syrjälä | 54f1b6e | 2015-06-24 22:00:05 +0300 | [diff] [blame] | 1980 | #define VLV_FIFO(plane, value) \ |
| 1981 | (((value) << DSPARB_ ## plane ## _SHIFT_VLV) & DSPARB_ ## plane ## _MASK_VLV) |
| 1982 | |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 1983 | static void vlv_atomic_update_fifo(struct intel_atomic_state *state, |
Ville Syrjälä | 7a8fdb1f | 2019-11-18 18:44:26 +0200 | [diff] [blame] | 1984 | struct intel_crtc *crtc) |
Ville Syrjälä | 54f1b6e | 2015-06-24 22:00:05 +0300 | [diff] [blame] | 1985 | { |
Ville Syrjälä | f07d43d | 2017-03-02 19:14:52 +0200 | [diff] [blame] | 1986 | struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); |
Tvrtko Ursulin | e33a4be | 2019-06-11 11:45:44 +0100 | [diff] [blame] | 1987 | struct intel_uncore *uncore = &dev_priv->uncore; |
Ville Syrjälä | 7a8fdb1f | 2019-11-18 18:44:26 +0200 | [diff] [blame] | 1988 | const struct intel_crtc_state *crtc_state = |
| 1989 | intel_atomic_get_new_crtc_state(state, crtc); |
Ville Syrjälä | 814e7f0 | 2017-03-02 19:14:55 +0200 | [diff] [blame] | 1990 | const struct vlv_fifo_state *fifo_state = |
| 1991 | &crtc_state->wm.vlv.fifo_state; |
Ville Syrjälä | f07d43d | 2017-03-02 19:14:52 +0200 | [diff] [blame] | 1992 | int sprite0_start, sprite1_start, fifo_size; |
Kees Cook | 2713eb4 | 2020-02-20 16:05:17 -0800 | [diff] [blame] | 1993 | u32 dsparb, dsparb2, dsparb3; |
Ville Syrjälä | 54f1b6e | 2015-06-24 22:00:05 +0300 | [diff] [blame] | 1994 | |
Ville Syrjälä | 236c48e | 2017-03-02 19:14:58 +0200 | [diff] [blame] | 1995 | if (!crtc_state->fifo_changed) |
| 1996 | return; |
| 1997 | |
Ville Syrjälä | f07d43d | 2017-03-02 19:14:52 +0200 | [diff] [blame] | 1998 | sprite0_start = fifo_state->plane[PLANE_PRIMARY]; |
| 1999 | sprite1_start = fifo_state->plane[PLANE_SPRITE0] + sprite0_start; |
| 2000 | fifo_size = fifo_state->plane[PLANE_SPRITE1] + sprite1_start; |
Ville Syrjälä | 54f1b6e | 2015-06-24 22:00:05 +0300 | [diff] [blame] | 2001 | |
Pankaj Bharadiya | 48a1b8d | 2020-01-15 09:14:53 +0530 | [diff] [blame] | 2002 | drm_WARN_ON(&dev_priv->drm, fifo_state->plane[PLANE_CURSOR] != 63); |
| 2003 | drm_WARN_ON(&dev_priv->drm, fifo_size != 511); |
Ville Syrjälä | 54f1b6e | 2015-06-24 22:00:05 +0300 | [diff] [blame] | 2004 | |
Ville Syrjälä | c137d66 | 2017-03-02 19:15:06 +0200 | [diff] [blame] | 2005 | trace_vlv_fifo_size(crtc, sprite0_start, sprite1_start, fifo_size); |
| 2006 | |
Ville Syrjälä | 44e921d | 2017-03-09 17:44:34 +0200 | [diff] [blame] | 2007 | /* |
| 2008 | * uncore.lock serves a double purpose here. It allows us to |
| 2009 | * use the less expensive I915_{READ,WRITE}_FW() functions, and |
| 2010 | * it protects the DSPARB registers from getting clobbered by |
| 2011 | * parallel updates from multiple pipes. |
| 2012 | * |
| 2013 | * intel_pipe_update_start() has already disabled interrupts |
| 2014 | * for us, so a plain spin_lock() is sufficient here. |
| 2015 | */ |
Tvrtko Ursulin | e33a4be | 2019-06-11 11:45:44 +0100 | [diff] [blame] | 2016 | spin_lock(&uncore->lock); |
Ville Syrjälä | 467a14d | 2016-12-05 16:13:28 +0200 | [diff] [blame] | 2017 | |
Ville Syrjälä | 54f1b6e | 2015-06-24 22:00:05 +0300 | [diff] [blame] | 2018 | switch (crtc->pipe) { |
Ville Syrjälä | 54f1b6e | 2015-06-24 22:00:05 +0300 | [diff] [blame] | 2019 | case PIPE_A: |
Tvrtko Ursulin | e33a4be | 2019-06-11 11:45:44 +0100 | [diff] [blame] | 2020 | dsparb = intel_uncore_read_fw(uncore, DSPARB); |
| 2021 | dsparb2 = intel_uncore_read_fw(uncore, DSPARB2); |
Ville Syrjälä | 54f1b6e | 2015-06-24 22:00:05 +0300 | [diff] [blame] | 2022 | |
| 2023 | dsparb &= ~(VLV_FIFO(SPRITEA, 0xff) | |
| 2024 | VLV_FIFO(SPRITEB, 0xff)); |
| 2025 | dsparb |= (VLV_FIFO(SPRITEA, sprite0_start) | |
| 2026 | VLV_FIFO(SPRITEB, sprite1_start)); |
| 2027 | |
| 2028 | dsparb2 &= ~(VLV_FIFO(SPRITEA_HI, 0x1) | |
| 2029 | VLV_FIFO(SPRITEB_HI, 0x1)); |
| 2030 | dsparb2 |= (VLV_FIFO(SPRITEA_HI, sprite0_start >> 8) | |
| 2031 | VLV_FIFO(SPRITEB_HI, sprite1_start >> 8)); |
| 2032 | |
Tvrtko Ursulin | e33a4be | 2019-06-11 11:45:44 +0100 | [diff] [blame] | 2033 | intel_uncore_write_fw(uncore, DSPARB, dsparb); |
| 2034 | intel_uncore_write_fw(uncore, DSPARB2, dsparb2); |
Ville Syrjälä | 54f1b6e | 2015-06-24 22:00:05 +0300 | [diff] [blame] | 2035 | break; |
| 2036 | case PIPE_B: |
Tvrtko Ursulin | e33a4be | 2019-06-11 11:45:44 +0100 | [diff] [blame] | 2037 | dsparb = intel_uncore_read_fw(uncore, DSPARB); |
| 2038 | dsparb2 = intel_uncore_read_fw(uncore, DSPARB2); |
Ville Syrjälä | 54f1b6e | 2015-06-24 22:00:05 +0300 | [diff] [blame] | 2039 | |
| 2040 | dsparb &= ~(VLV_FIFO(SPRITEC, 0xff) | |
| 2041 | VLV_FIFO(SPRITED, 0xff)); |
| 2042 | dsparb |= (VLV_FIFO(SPRITEC, sprite0_start) | |
| 2043 | VLV_FIFO(SPRITED, sprite1_start)); |
| 2044 | |
| 2045 | dsparb2 &= ~(VLV_FIFO(SPRITEC_HI, 0xff) | |
| 2046 | VLV_FIFO(SPRITED_HI, 0xff)); |
| 2047 | dsparb2 |= (VLV_FIFO(SPRITEC_HI, sprite0_start >> 8) | |
| 2048 | VLV_FIFO(SPRITED_HI, sprite1_start >> 8)); |
| 2049 | |
Tvrtko Ursulin | e33a4be | 2019-06-11 11:45:44 +0100 | [diff] [blame] | 2050 | intel_uncore_write_fw(uncore, DSPARB, dsparb); |
| 2051 | intel_uncore_write_fw(uncore, DSPARB2, dsparb2); |
Ville Syrjälä | 54f1b6e | 2015-06-24 22:00:05 +0300 | [diff] [blame] | 2052 | break; |
| 2053 | case PIPE_C: |
Tvrtko Ursulin | e33a4be | 2019-06-11 11:45:44 +0100 | [diff] [blame] | 2054 | dsparb3 = intel_uncore_read_fw(uncore, DSPARB3); |
| 2055 | dsparb2 = intel_uncore_read_fw(uncore, DSPARB2); |
Ville Syrjälä | 54f1b6e | 2015-06-24 22:00:05 +0300 | [diff] [blame] | 2056 | |
| 2057 | dsparb3 &= ~(VLV_FIFO(SPRITEE, 0xff) | |
| 2058 | VLV_FIFO(SPRITEF, 0xff)); |
| 2059 | dsparb3 |= (VLV_FIFO(SPRITEE, sprite0_start) | |
| 2060 | VLV_FIFO(SPRITEF, sprite1_start)); |
| 2061 | |
| 2062 | dsparb2 &= ~(VLV_FIFO(SPRITEE_HI, 0xff) | |
| 2063 | VLV_FIFO(SPRITEF_HI, 0xff)); |
| 2064 | dsparb2 |= (VLV_FIFO(SPRITEE_HI, sprite0_start >> 8) | |
| 2065 | VLV_FIFO(SPRITEF_HI, sprite1_start >> 8)); |
| 2066 | |
Tvrtko Ursulin | e33a4be | 2019-06-11 11:45:44 +0100 | [diff] [blame] | 2067 | intel_uncore_write_fw(uncore, DSPARB3, dsparb3); |
| 2068 | intel_uncore_write_fw(uncore, DSPARB2, dsparb2); |
Ville Syrjälä | 54f1b6e | 2015-06-24 22:00:05 +0300 | [diff] [blame] | 2069 | break; |
| 2070 | default: |
| 2071 | break; |
| 2072 | } |
Ville Syrjälä | 467a14d | 2016-12-05 16:13:28 +0200 | [diff] [blame] | 2073 | |
Tvrtko Ursulin | e33a4be | 2019-06-11 11:45:44 +0100 | [diff] [blame] | 2074 | intel_uncore_posting_read_fw(uncore, DSPARB); |
Ville Syrjälä | 467a14d | 2016-12-05 16:13:28 +0200 | [diff] [blame] | 2075 | |
Tvrtko Ursulin | e33a4be | 2019-06-11 11:45:44 +0100 | [diff] [blame] | 2076 | spin_unlock(&uncore->lock); |
Ville Syrjälä | 54f1b6e | 2015-06-24 22:00:05 +0300 | [diff] [blame] | 2077 | } |
| 2078 | |
| 2079 | #undef VLV_FIFO |
| 2080 | |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 2081 | static int vlv_compute_intermediate_wm(struct intel_crtc_state *new_crtc_state) |
Ville Syrjälä | 4841da5 | 2017-03-02 19:14:59 +0200 | [diff] [blame] | 2082 | { |
Maarten Lankhorst | 2225f3c | 2019-10-31 12:26:03 +0100 | [diff] [blame] | 2083 | struct intel_crtc *crtc = to_intel_crtc(new_crtc_state->uapi.crtc); |
Maarten Lankhorst | 5b9489c | 2017-11-15 17:31:56 +0100 | [diff] [blame] | 2084 | struct vlv_wm_state *intermediate = &new_crtc_state->wm.vlv.intermediate; |
| 2085 | const struct vlv_wm_state *optimal = &new_crtc_state->wm.vlv.optimal; |
| 2086 | struct intel_atomic_state *intel_state = |
Maarten Lankhorst | 2225f3c | 2019-10-31 12:26:03 +0100 | [diff] [blame] | 2087 | to_intel_atomic_state(new_crtc_state->uapi.state); |
Maarten Lankhorst | 5b9489c | 2017-11-15 17:31:56 +0100 | [diff] [blame] | 2088 | const struct intel_crtc_state *old_crtc_state = |
| 2089 | intel_atomic_get_old_crtc_state(intel_state, crtc); |
| 2090 | const struct vlv_wm_state *active = &old_crtc_state->wm.vlv.optimal; |
Ville Syrjälä | 4841da5 | 2017-03-02 19:14:59 +0200 | [diff] [blame] | 2091 | int level; |
| 2092 | |
Maarten Lankhorst | 2225f3c | 2019-10-31 12:26:03 +0100 | [diff] [blame] | 2093 | if (!new_crtc_state->hw.active || drm_atomic_crtc_needs_modeset(&new_crtc_state->uapi)) { |
Maarten Lankhorst | 5b9489c | 2017-11-15 17:31:56 +0100 | [diff] [blame] | 2094 | *intermediate = *optimal; |
| 2095 | |
| 2096 | intermediate->cxsr = false; |
| 2097 | goto out; |
| 2098 | } |
| 2099 | |
Ville Syrjälä | 4841da5 | 2017-03-02 19:14:59 +0200 | [diff] [blame] | 2100 | intermediate->num_levels = min(optimal->num_levels, active->num_levels); |
Ville Syrjälä | 5eeb798 | 2017-03-02 19:15:00 +0200 | [diff] [blame] | 2101 | intermediate->cxsr = optimal->cxsr && active->cxsr && |
Maarten Lankhorst | 5b9489c | 2017-11-15 17:31:56 +0100 | [diff] [blame] | 2102 | !new_crtc_state->disable_cxsr; |
Ville Syrjälä | 4841da5 | 2017-03-02 19:14:59 +0200 | [diff] [blame] | 2103 | |
| 2104 | for (level = 0; level < intermediate->num_levels; level++) { |
| 2105 | enum plane_id plane_id; |
| 2106 | |
| 2107 | for_each_plane_id_on_crtc(crtc, plane_id) { |
| 2108 | intermediate->wm[level].plane[plane_id] = |
| 2109 | min(optimal->wm[level].plane[plane_id], |
| 2110 | active->wm[level].plane[plane_id]); |
| 2111 | } |
| 2112 | |
| 2113 | intermediate->sr[level].plane = min(optimal->sr[level].plane, |
| 2114 | active->sr[level].plane); |
| 2115 | intermediate->sr[level].cursor = min(optimal->sr[level].cursor, |
| 2116 | active->sr[level].cursor); |
| 2117 | } |
| 2118 | |
| 2119 | vlv_invalidate_wms(crtc, intermediate, level); |
| 2120 | |
Maarten Lankhorst | 5b9489c | 2017-11-15 17:31:56 +0100 | [diff] [blame] | 2121 | out: |
Ville Syrjälä | 4841da5 | 2017-03-02 19:14:59 +0200 | [diff] [blame] | 2122 | /* |
| 2123 | * If our intermediate WM are identical to the final WM, then we can |
| 2124 | * omit the post-vblank programming; only update if it's different. |
| 2125 | */ |
Ville Syrjälä | 5eeb798 | 2017-03-02 19:15:00 +0200 | [diff] [blame] | 2126 | if (memcmp(intermediate, optimal, sizeof(*intermediate)) != 0) |
Maarten Lankhorst | 5b9489c | 2017-11-15 17:31:56 +0100 | [diff] [blame] | 2127 | new_crtc_state->wm.need_postvbl_update = true; |
Ville Syrjälä | 4841da5 | 2017-03-02 19:14:59 +0200 | [diff] [blame] | 2128 | |
| 2129 | return 0; |
| 2130 | } |
| 2131 | |
Ville Syrjälä | 7c951c0 | 2016-11-28 19:37:10 +0200 | [diff] [blame] | 2132 | static void vlv_merge_wm(struct drm_i915_private *dev_priv, |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 2133 | struct vlv_wm_values *wm) |
| 2134 | { |
| 2135 | struct intel_crtc *crtc; |
Ville Syrjälä | c08e913 | 2019-08-21 20:30:32 +0300 | [diff] [blame] | 2136 | int num_active_pipes = 0; |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 2137 | |
Ville Syrjälä | 7c951c0 | 2016-11-28 19:37:10 +0200 | [diff] [blame] | 2138 | wm->level = dev_priv->wm.max_level; |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 2139 | wm->cxsr = true; |
| 2140 | |
Ville Syrjälä | 7c951c0 | 2016-11-28 19:37:10 +0200 | [diff] [blame] | 2141 | for_each_intel_crtc(&dev_priv->drm, crtc) { |
Ville Syrjälä | 7eb4941 | 2017-03-02 19:14:53 +0200 | [diff] [blame] | 2142 | const struct vlv_wm_state *wm_state = &crtc->wm.active.vlv; |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 2143 | |
| 2144 | if (!crtc->active) |
| 2145 | continue; |
| 2146 | |
| 2147 | if (!wm_state->cxsr) |
| 2148 | wm->cxsr = false; |
| 2149 | |
Ville Syrjälä | c08e913 | 2019-08-21 20:30:32 +0300 | [diff] [blame] | 2150 | num_active_pipes++; |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 2151 | wm->level = min_t(int, wm->level, wm_state->num_levels - 1); |
| 2152 | } |
| 2153 | |
Ville Syrjälä | c08e913 | 2019-08-21 20:30:32 +0300 | [diff] [blame] | 2154 | if (num_active_pipes != 1) |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 2155 | wm->cxsr = false; |
| 2156 | |
Ville Syrjälä | c08e913 | 2019-08-21 20:30:32 +0300 | [diff] [blame] | 2157 | if (num_active_pipes > 1) |
Ville Syrjälä | 6f9c784 | 2015-06-24 22:00:08 +0300 | [diff] [blame] | 2158 | wm->level = VLV_WM_LEVEL_PM2; |
| 2159 | |
Ville Syrjälä | 7c951c0 | 2016-11-28 19:37:10 +0200 | [diff] [blame] | 2160 | for_each_intel_crtc(&dev_priv->drm, crtc) { |
Ville Syrjälä | 7eb4941 | 2017-03-02 19:14:53 +0200 | [diff] [blame] | 2161 | const struct vlv_wm_state *wm_state = &crtc->wm.active.vlv; |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 2162 | enum pipe pipe = crtc->pipe; |
| 2163 | |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 2164 | wm->pipe[pipe] = wm_state->wm[wm->level]; |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 2165 | if (crtc->active && wm->cxsr) |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 2166 | wm->sr = wm_state->sr[wm->level]; |
| 2167 | |
Ville Syrjälä | 1b31389 | 2016-11-28 19:37:08 +0200 | [diff] [blame] | 2168 | wm->ddl[pipe].plane[PLANE_PRIMARY] = DDL_PRECISION_HIGH | 2; |
| 2169 | wm->ddl[pipe].plane[PLANE_SPRITE0] = DDL_PRECISION_HIGH | 2; |
| 2170 | wm->ddl[pipe].plane[PLANE_SPRITE1] = DDL_PRECISION_HIGH | 2; |
| 2171 | wm->ddl[pipe].plane[PLANE_CURSOR] = DDL_PRECISION_HIGH | 2; |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 2172 | } |
| 2173 | } |
| 2174 | |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 2175 | static void vlv_program_watermarks(struct drm_i915_private *dev_priv) |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 2176 | { |
Ville Syrjälä | fa292a4 | 2016-11-28 19:37:16 +0200 | [diff] [blame] | 2177 | struct vlv_wm_values *old_wm = &dev_priv->wm.vlv; |
| 2178 | struct vlv_wm_values new_wm = {}; |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 2179 | |
Ville Syrjälä | fa292a4 | 2016-11-28 19:37:16 +0200 | [diff] [blame] | 2180 | vlv_merge_wm(dev_priv, &new_wm); |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 2181 | |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 2182 | if (memcmp(old_wm, &new_wm, sizeof(new_wm)) == 0) |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 2183 | return; |
| 2184 | |
Ville Syrjälä | fa292a4 | 2016-11-28 19:37:16 +0200 | [diff] [blame] | 2185 | if (is_disabling(old_wm->level, new_wm.level, VLV_WM_LEVEL_DDR_DVFS)) |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 2186 | chv_set_memory_dvfs(dev_priv, false); |
| 2187 | |
Ville Syrjälä | fa292a4 | 2016-11-28 19:37:16 +0200 | [diff] [blame] | 2188 | if (is_disabling(old_wm->level, new_wm.level, VLV_WM_LEVEL_PM5)) |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 2189 | chv_set_memory_pm5(dev_priv, false); |
| 2190 | |
Ville Syrjälä | fa292a4 | 2016-11-28 19:37:16 +0200 | [diff] [blame] | 2191 | if (is_disabling(old_wm->cxsr, new_wm.cxsr, true)) |
Ville Syrjälä | 3d90e64 | 2016-11-28 19:37:11 +0200 | [diff] [blame] | 2192 | _intel_set_memory_cxsr(dev_priv, false); |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 2193 | |
Ville Syrjälä | fa292a4 | 2016-11-28 19:37:16 +0200 | [diff] [blame] | 2194 | vlv_write_wm_values(dev_priv, &new_wm); |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 2195 | |
Ville Syrjälä | fa292a4 | 2016-11-28 19:37:16 +0200 | [diff] [blame] | 2196 | if (is_enabling(old_wm->cxsr, new_wm.cxsr, true)) |
Ville Syrjälä | 3d90e64 | 2016-11-28 19:37:11 +0200 | [diff] [blame] | 2197 | _intel_set_memory_cxsr(dev_priv, true); |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 2198 | |
Ville Syrjälä | fa292a4 | 2016-11-28 19:37:16 +0200 | [diff] [blame] | 2199 | if (is_enabling(old_wm->level, new_wm.level, VLV_WM_LEVEL_PM5)) |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 2200 | chv_set_memory_pm5(dev_priv, true); |
| 2201 | |
Ville Syrjälä | fa292a4 | 2016-11-28 19:37:16 +0200 | [diff] [blame] | 2202 | if (is_enabling(old_wm->level, new_wm.level, VLV_WM_LEVEL_DDR_DVFS)) |
Ville Syrjälä | 262cd2e | 2015-06-24 22:00:04 +0300 | [diff] [blame] | 2203 | chv_set_memory_dvfs(dev_priv, true); |
| 2204 | |
Ville Syrjälä | fa292a4 | 2016-11-28 19:37:16 +0200 | [diff] [blame] | 2205 | *old_wm = new_wm; |
Ville Syrjälä | 3c2777f | 2014-06-26 17:03:06 +0300 | [diff] [blame] | 2206 | } |
| 2207 | |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 2208 | static void vlv_initial_watermarks(struct intel_atomic_state *state, |
Ville Syrjälä | 7a8fdb1f | 2019-11-18 18:44:26 +0200 | [diff] [blame] | 2209 | struct intel_crtc *crtc) |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 2210 | { |
Ville Syrjälä | 7a8fdb1f | 2019-11-18 18:44:26 +0200 | [diff] [blame] | 2211 | struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); |
| 2212 | const struct intel_crtc_state *crtc_state = |
| 2213 | intel_atomic_get_new_crtc_state(state, crtc); |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 2214 | |
| 2215 | mutex_lock(&dev_priv->wm.wm_mutex); |
Ville Syrjälä | 4841da5 | 2017-03-02 19:14:59 +0200 | [diff] [blame] | 2216 | crtc->wm.active.vlv = crtc_state->wm.vlv.intermediate; |
| 2217 | vlv_program_watermarks(dev_priv); |
| 2218 | mutex_unlock(&dev_priv->wm.wm_mutex); |
| 2219 | } |
| 2220 | |
| 2221 | static void vlv_optimize_watermarks(struct intel_atomic_state *state, |
Ville Syrjälä | 7a8fdb1f | 2019-11-18 18:44:26 +0200 | [diff] [blame] | 2222 | struct intel_crtc *crtc) |
Ville Syrjälä | 4841da5 | 2017-03-02 19:14:59 +0200 | [diff] [blame] | 2223 | { |
Ville Syrjälä | 7a8fdb1f | 2019-11-18 18:44:26 +0200 | [diff] [blame] | 2224 | struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); |
| 2225 | const struct intel_crtc_state *crtc_state = |
| 2226 | intel_atomic_get_new_crtc_state(state, crtc); |
Ville Syrjälä | 4841da5 | 2017-03-02 19:14:59 +0200 | [diff] [blame] | 2227 | |
| 2228 | if (!crtc_state->wm.need_postvbl_update) |
| 2229 | return; |
| 2230 | |
| 2231 | mutex_lock(&dev_priv->wm.wm_mutex); |
Ville Syrjälä | 88016a9 | 2019-07-01 19:05:45 +0300 | [diff] [blame] | 2232 | crtc->wm.active.vlv = crtc_state->wm.vlv.optimal; |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 2233 | vlv_program_watermarks(dev_priv); |
| 2234 | mutex_unlock(&dev_priv->wm.wm_mutex); |
| 2235 | } |
| 2236 | |
Ville Syrjälä | 432081b | 2016-10-31 22:37:03 +0200 | [diff] [blame] | 2237 | static void i965_update_wm(struct intel_crtc *unused_crtc) |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2238 | { |
Ville Syrjälä | ffc7a76 | 2016-10-31 22:37:21 +0200 | [diff] [blame] | 2239 | struct drm_i915_private *dev_priv = to_i915(unused_crtc->base.dev); |
Ville Syrjälä | efc2611 | 2016-10-31 22:37:04 +0200 | [diff] [blame] | 2240 | struct intel_crtc *crtc; |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2241 | int srwm = 1; |
| 2242 | int cursor_sr = 16; |
Imre Deak | 9858425 | 2014-06-13 14:54:20 +0300 | [diff] [blame] | 2243 | bool cxsr_enabled; |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2244 | |
| 2245 | /* Calc sr entries for one plane configs */ |
Ville Syrjälä | ffc7a76 | 2016-10-31 22:37:21 +0200 | [diff] [blame] | 2246 | crtc = single_enabled_crtc(dev_priv); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2247 | if (crtc) { |
| 2248 | /* self-refresh has much higher latency */ |
| 2249 | static const int sr_latency_ns = 12000; |
Ville Syrjälä | efc2611 | 2016-10-31 22:37:04 +0200 | [diff] [blame] | 2250 | const struct drm_display_mode *adjusted_mode = |
Maarten Lankhorst | 1326a92 | 2019-10-31 12:26:02 +0100 | [diff] [blame] | 2251 | &crtc->config->hw.adjusted_mode; |
Ville Syrjälä | efc2611 | 2016-10-31 22:37:04 +0200 | [diff] [blame] | 2252 | const struct drm_framebuffer *fb = |
| 2253 | crtc->base.primary->state->fb; |
Damien Lespiau | 241bfc3 | 2013-09-25 16:45:37 +0100 | [diff] [blame] | 2254 | int clock = adjusted_mode->crtc_clock; |
Jesse Barnes | fec8cba | 2013-11-27 11:10:26 -0800 | [diff] [blame] | 2255 | int htotal = adjusted_mode->crtc_htotal; |
Ville Syrjälä | efc2611 | 2016-10-31 22:37:04 +0200 | [diff] [blame] | 2256 | int hdisplay = crtc->config->pipe_src_w; |
Ville Syrjälä | 353c859 | 2016-12-14 23:30:57 +0200 | [diff] [blame] | 2257 | int cpp = fb->format->cpp[0]; |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2258 | int entries; |
| 2259 | |
Ville Syrjälä | baf69ca | 2017-04-21 21:14:27 +0300 | [diff] [blame] | 2260 | entries = intel_wm_method2(clock, htotal, |
| 2261 | hdisplay, cpp, sr_latency_ns / 100); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2262 | entries = DIV_ROUND_UP(entries, I915_FIFO_LINE_SIZE); |
| 2263 | srwm = I965_FIFO_SIZE - entries; |
| 2264 | if (srwm < 0) |
| 2265 | srwm = 1; |
| 2266 | srwm &= 0x1ff; |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 2267 | drm_dbg_kms(&dev_priv->drm, |
| 2268 | "self-refresh entries: %d, wm: %d\n", |
| 2269 | entries, srwm); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2270 | |
Ville Syrjälä | baf69ca | 2017-04-21 21:14:27 +0300 | [diff] [blame] | 2271 | entries = intel_wm_method2(clock, htotal, |
| 2272 | crtc->base.cursor->state->crtc_w, 4, |
| 2273 | sr_latency_ns / 100); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2274 | entries = DIV_ROUND_UP(entries, |
Ville Syrjälä | baf69ca | 2017-04-21 21:14:27 +0300 | [diff] [blame] | 2275 | i965_cursor_wm_info.cacheline_size) + |
| 2276 | i965_cursor_wm_info.guard_size; |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2277 | |
Ville Syrjälä | baf69ca | 2017-04-21 21:14:27 +0300 | [diff] [blame] | 2278 | cursor_sr = i965_cursor_wm_info.fifo_size - entries; |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2279 | if (cursor_sr > i965_cursor_wm_info.max_wm) |
| 2280 | cursor_sr = i965_cursor_wm_info.max_wm; |
| 2281 | |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 2282 | drm_dbg_kms(&dev_priv->drm, |
| 2283 | "self-refresh watermark: display plane %d " |
| 2284 | "cursor %d\n", srwm, cursor_sr); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2285 | |
Imre Deak | 9858425 | 2014-06-13 14:54:20 +0300 | [diff] [blame] | 2286 | cxsr_enabled = true; |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2287 | } else { |
Imre Deak | 9858425 | 2014-06-13 14:54:20 +0300 | [diff] [blame] | 2288 | cxsr_enabled = false; |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2289 | /* Turn off self refresh if both pipes are enabled */ |
Imre Deak | 5209b1f | 2014-07-01 12:36:17 +0300 | [diff] [blame] | 2290 | intel_set_memory_cxsr(dev_priv, false); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2291 | } |
| 2292 | |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 2293 | drm_dbg_kms(&dev_priv->drm, |
| 2294 | "Setting FIFO watermarks - A: 8, B: 8, C: 8, SR %d\n", |
| 2295 | srwm); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2296 | |
| 2297 | /* 965 has limitations... */ |
Ville Syrjälä | f499896 | 2015-03-10 17:02:21 +0200 | [diff] [blame] | 2298 | I915_WRITE(DSPFW1, FW_WM(srwm, SR) | |
| 2299 | FW_WM(8, CURSORB) | |
| 2300 | FW_WM(8, PLANEB) | |
| 2301 | FW_WM(8, PLANEA)); |
| 2302 | I915_WRITE(DSPFW2, FW_WM(8, CURSORA) | |
| 2303 | FW_WM(8, PLANEC_OLD)); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2304 | /* update cursor SR watermark */ |
Ville Syrjälä | f499896 | 2015-03-10 17:02:21 +0200 | [diff] [blame] | 2305 | I915_WRITE(DSPFW3, FW_WM(cursor_sr, CURSOR_SR)); |
Imre Deak | 9858425 | 2014-06-13 14:54:20 +0300 | [diff] [blame] | 2306 | |
| 2307 | if (cxsr_enabled) |
| 2308 | intel_set_memory_cxsr(dev_priv, true); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2309 | } |
| 2310 | |
Ville Syrjälä | f499896 | 2015-03-10 17:02:21 +0200 | [diff] [blame] | 2311 | #undef FW_WM |
| 2312 | |
Ville Syrjälä | 432081b | 2016-10-31 22:37:03 +0200 | [diff] [blame] | 2313 | static void i9xx_update_wm(struct intel_crtc *unused_crtc) |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2314 | { |
Ville Syrjälä | ffc7a76 | 2016-10-31 22:37:21 +0200 | [diff] [blame] | 2315 | struct drm_i915_private *dev_priv = to_i915(unused_crtc->base.dev); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2316 | const struct intel_watermark_params *wm_info; |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 2317 | u32 fwater_lo; |
| 2318 | u32 fwater_hi; |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2319 | int cwm, srwm = 1; |
| 2320 | int fifo_size; |
| 2321 | int planea_wm, planeb_wm; |
Ville Syrjälä | efc2611 | 2016-10-31 22:37:04 +0200 | [diff] [blame] | 2322 | struct intel_crtc *crtc, *enabled = NULL; |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2323 | |
Ville Syrjälä | a9097be | 2016-10-31 22:37:20 +0200 | [diff] [blame] | 2324 | if (IS_I945GM(dev_priv)) |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2325 | wm_info = &i945_wm_info; |
Lucas De Marchi | cf819ef | 2018-12-12 10:10:43 -0800 | [diff] [blame] | 2326 | else if (!IS_GEN(dev_priv, 2)) |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2327 | wm_info = &i915_wm_info; |
| 2328 | else |
Ville Syrjälä | 9d53910 | 2014-08-15 01:21:53 +0300 | [diff] [blame] | 2329 | wm_info = &i830_a_wm_info; |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2330 | |
Ville Syrjälä | bdaf843 | 2017-11-17 21:19:11 +0200 | [diff] [blame] | 2331 | fifo_size = dev_priv->display.get_fifo_size(dev_priv, PLANE_A); |
| 2332 | crtc = intel_get_crtc_for_plane(dev_priv, PLANE_A); |
Ville Syrjälä | efc2611 | 2016-10-31 22:37:04 +0200 | [diff] [blame] | 2333 | if (intel_crtc_active(crtc)) { |
| 2334 | const struct drm_display_mode *adjusted_mode = |
Maarten Lankhorst | 1326a92 | 2019-10-31 12:26:02 +0100 | [diff] [blame] | 2335 | &crtc->config->hw.adjusted_mode; |
Ville Syrjälä | efc2611 | 2016-10-31 22:37:04 +0200 | [diff] [blame] | 2336 | const struct drm_framebuffer *fb = |
| 2337 | crtc->base.primary->state->fb; |
| 2338 | int cpp; |
| 2339 | |
Lucas De Marchi | cf819ef | 2018-12-12 10:10:43 -0800 | [diff] [blame] | 2340 | if (IS_GEN(dev_priv, 2)) |
Chris Wilson | b9e0bda | 2012-10-22 12:32:15 +0100 | [diff] [blame] | 2341 | cpp = 4; |
Ville Syrjälä | efc2611 | 2016-10-31 22:37:04 +0200 | [diff] [blame] | 2342 | else |
Ville Syrjälä | 353c859 | 2016-12-14 23:30:57 +0200 | [diff] [blame] | 2343 | cpp = fb->format->cpp[0]; |
Chris Wilson | b9e0bda | 2012-10-22 12:32:15 +0100 | [diff] [blame] | 2344 | |
Damien Lespiau | 241bfc3 | 2013-09-25 16:45:37 +0100 | [diff] [blame] | 2345 | planea_wm = intel_calculate_wm(adjusted_mode->crtc_clock, |
Chris Wilson | b9e0bda | 2012-10-22 12:32:15 +0100 | [diff] [blame] | 2346 | wm_info, fifo_size, cpp, |
Chris Wilson | 5aef600 | 2014-09-03 11:56:07 +0100 | [diff] [blame] | 2347 | pessimal_latency_ns); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2348 | enabled = crtc; |
Ville Syrjälä | 9d53910 | 2014-08-15 01:21:53 +0300 | [diff] [blame] | 2349 | } else { |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2350 | planea_wm = fifo_size - wm_info->guard_size; |
Ville Syrjälä | 9d53910 | 2014-08-15 01:21:53 +0300 | [diff] [blame] | 2351 | if (planea_wm > (long)wm_info->max_wm) |
| 2352 | planea_wm = wm_info->max_wm; |
| 2353 | } |
| 2354 | |
Lucas De Marchi | cf819ef | 2018-12-12 10:10:43 -0800 | [diff] [blame] | 2355 | if (IS_GEN(dev_priv, 2)) |
Ville Syrjälä | 9d53910 | 2014-08-15 01:21:53 +0300 | [diff] [blame] | 2356 | wm_info = &i830_bc_wm_info; |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2357 | |
Ville Syrjälä | bdaf843 | 2017-11-17 21:19:11 +0200 | [diff] [blame] | 2358 | fifo_size = dev_priv->display.get_fifo_size(dev_priv, PLANE_B); |
| 2359 | crtc = intel_get_crtc_for_plane(dev_priv, PLANE_B); |
Ville Syrjälä | efc2611 | 2016-10-31 22:37:04 +0200 | [diff] [blame] | 2360 | if (intel_crtc_active(crtc)) { |
| 2361 | const struct drm_display_mode *adjusted_mode = |
Maarten Lankhorst | 1326a92 | 2019-10-31 12:26:02 +0100 | [diff] [blame] | 2362 | &crtc->config->hw.adjusted_mode; |
Ville Syrjälä | efc2611 | 2016-10-31 22:37:04 +0200 | [diff] [blame] | 2363 | const struct drm_framebuffer *fb = |
| 2364 | crtc->base.primary->state->fb; |
| 2365 | int cpp; |
| 2366 | |
Lucas De Marchi | cf819ef | 2018-12-12 10:10:43 -0800 | [diff] [blame] | 2367 | if (IS_GEN(dev_priv, 2)) |
Chris Wilson | b9e0bda | 2012-10-22 12:32:15 +0100 | [diff] [blame] | 2368 | cpp = 4; |
Ville Syrjälä | efc2611 | 2016-10-31 22:37:04 +0200 | [diff] [blame] | 2369 | else |
Ville Syrjälä | 353c859 | 2016-12-14 23:30:57 +0200 | [diff] [blame] | 2370 | cpp = fb->format->cpp[0]; |
Chris Wilson | b9e0bda | 2012-10-22 12:32:15 +0100 | [diff] [blame] | 2371 | |
Damien Lespiau | 241bfc3 | 2013-09-25 16:45:37 +0100 | [diff] [blame] | 2372 | planeb_wm = intel_calculate_wm(adjusted_mode->crtc_clock, |
Chris Wilson | b9e0bda | 2012-10-22 12:32:15 +0100 | [diff] [blame] | 2373 | wm_info, fifo_size, cpp, |
Chris Wilson | 5aef600 | 2014-09-03 11:56:07 +0100 | [diff] [blame] | 2374 | pessimal_latency_ns); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2375 | if (enabled == NULL) |
| 2376 | enabled = crtc; |
| 2377 | else |
| 2378 | enabled = NULL; |
Ville Syrjälä | 9d53910 | 2014-08-15 01:21:53 +0300 | [diff] [blame] | 2379 | } else { |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2380 | planeb_wm = fifo_size - wm_info->guard_size; |
Ville Syrjälä | 9d53910 | 2014-08-15 01:21:53 +0300 | [diff] [blame] | 2381 | if (planeb_wm > (long)wm_info->max_wm) |
| 2382 | planeb_wm = wm_info->max_wm; |
| 2383 | } |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2384 | |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 2385 | drm_dbg_kms(&dev_priv->drm, |
| 2386 | "FIFO watermarks - A: %d, B: %d\n", planea_wm, planeb_wm); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2387 | |
Tvrtko Ursulin | 50a0bc9 | 2016-10-13 11:02:58 +0100 | [diff] [blame] | 2388 | if (IS_I915GM(dev_priv) && enabled) { |
Matt Roper | 2ff8fde | 2014-07-08 07:50:07 -0700 | [diff] [blame] | 2389 | struct drm_i915_gem_object *obj; |
Daniel Vetter | 2ab1bc9 | 2014-04-07 08:54:21 +0200 | [diff] [blame] | 2390 | |
Ville Syrjälä | efc2611 | 2016-10-31 22:37:04 +0200 | [diff] [blame] | 2391 | obj = intel_fb_obj(enabled->base.primary->state->fb); |
Daniel Vetter | 2ab1bc9 | 2014-04-07 08:54:21 +0200 | [diff] [blame] | 2392 | |
| 2393 | /* self-refresh seems busted with untiled */ |
Chris Wilson | 3e510a8 | 2016-08-05 10:14:23 +0100 | [diff] [blame] | 2394 | if (!i915_gem_object_is_tiled(obj)) |
Daniel Vetter | 2ab1bc9 | 2014-04-07 08:54:21 +0200 | [diff] [blame] | 2395 | enabled = NULL; |
| 2396 | } |
| 2397 | |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2398 | /* |
| 2399 | * Overlay gets an aggressive default since video jitter is bad. |
| 2400 | */ |
| 2401 | cwm = 2; |
| 2402 | |
| 2403 | /* Play safe and disable self-refresh before adjusting watermarks. */ |
Imre Deak | 5209b1f | 2014-07-01 12:36:17 +0300 | [diff] [blame] | 2404 | intel_set_memory_cxsr(dev_priv, false); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2405 | |
| 2406 | /* Calc sr entries for one plane configs */ |
Ville Syrjälä | 03427fc | 2016-10-31 22:37:18 +0200 | [diff] [blame] | 2407 | if (HAS_FW_BLC(dev_priv) && enabled) { |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2408 | /* self-refresh has much higher latency */ |
| 2409 | static const int sr_latency_ns = 6000; |
Ville Syrjälä | efc2611 | 2016-10-31 22:37:04 +0200 | [diff] [blame] | 2410 | const struct drm_display_mode *adjusted_mode = |
Maarten Lankhorst | 1326a92 | 2019-10-31 12:26:02 +0100 | [diff] [blame] | 2411 | &enabled->config->hw.adjusted_mode; |
Ville Syrjälä | efc2611 | 2016-10-31 22:37:04 +0200 | [diff] [blame] | 2412 | const struct drm_framebuffer *fb = |
| 2413 | enabled->base.primary->state->fb; |
Damien Lespiau | 241bfc3 | 2013-09-25 16:45:37 +0100 | [diff] [blame] | 2414 | int clock = adjusted_mode->crtc_clock; |
Jesse Barnes | fec8cba | 2013-11-27 11:10:26 -0800 | [diff] [blame] | 2415 | int htotal = adjusted_mode->crtc_htotal; |
Ville Syrjälä | efc2611 | 2016-10-31 22:37:04 +0200 | [diff] [blame] | 2416 | int hdisplay = enabled->config->pipe_src_w; |
| 2417 | int cpp; |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2418 | int entries; |
| 2419 | |
Tvrtko Ursulin | 50a0bc9 | 2016-10-13 11:02:58 +0100 | [diff] [blame] | 2420 | if (IS_I915GM(dev_priv) || IS_I945GM(dev_priv)) |
Ville Syrjälä | 2d1b505 | 2016-07-29 17:57:01 +0300 | [diff] [blame] | 2421 | cpp = 4; |
Ville Syrjälä | efc2611 | 2016-10-31 22:37:04 +0200 | [diff] [blame] | 2422 | else |
Ville Syrjälä | 353c859 | 2016-12-14 23:30:57 +0200 | [diff] [blame] | 2423 | cpp = fb->format->cpp[0]; |
Ville Syrjälä | 2d1b505 | 2016-07-29 17:57:01 +0300 | [diff] [blame] | 2424 | |
Ville Syrjälä | baf69ca | 2017-04-21 21:14:27 +0300 | [diff] [blame] | 2425 | entries = intel_wm_method2(clock, htotal, hdisplay, cpp, |
| 2426 | sr_latency_ns / 100); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2427 | entries = DIV_ROUND_UP(entries, wm_info->cacheline_size); |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 2428 | drm_dbg_kms(&dev_priv->drm, |
| 2429 | "self-refresh entries: %d\n", entries); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2430 | srwm = wm_info->fifo_size - entries; |
| 2431 | if (srwm < 0) |
| 2432 | srwm = 1; |
| 2433 | |
Tvrtko Ursulin | 50a0bc9 | 2016-10-13 11:02:58 +0100 | [diff] [blame] | 2434 | if (IS_I945G(dev_priv) || IS_I945GM(dev_priv)) |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2435 | I915_WRITE(FW_BLC_SELF, |
| 2436 | FW_BLC_SELF_FIFO_MASK | (srwm & 0xff)); |
Ville Syrjälä | acb9135 | 2016-07-29 17:57:02 +0300 | [diff] [blame] | 2437 | else |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2438 | I915_WRITE(FW_BLC_SELF, srwm & 0x3f); |
| 2439 | } |
| 2440 | |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 2441 | drm_dbg_kms(&dev_priv->drm, |
| 2442 | "Setting FIFO watermarks - A: %d, B: %d, C: %d, SR %d\n", |
| 2443 | planea_wm, planeb_wm, cwm, srwm); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2444 | |
| 2445 | fwater_lo = ((planeb_wm & 0x3f) << 16) | (planea_wm & 0x3f); |
| 2446 | fwater_hi = (cwm & 0x1f); |
| 2447 | |
| 2448 | /* Set request length to 8 cachelines per fetch */ |
| 2449 | fwater_lo = fwater_lo | (1 << 24) | (1 << 8); |
| 2450 | fwater_hi = fwater_hi | (1 << 8); |
| 2451 | |
| 2452 | I915_WRITE(FW_BLC, fwater_lo); |
| 2453 | I915_WRITE(FW_BLC2, fwater_hi); |
| 2454 | |
Imre Deak | 5209b1f | 2014-07-01 12:36:17 +0300 | [diff] [blame] | 2455 | if (enabled) |
| 2456 | intel_set_memory_cxsr(dev_priv, true); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2457 | } |
| 2458 | |
Ville Syrjälä | 432081b | 2016-10-31 22:37:03 +0200 | [diff] [blame] | 2459 | static void i845_update_wm(struct intel_crtc *unused_crtc) |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2460 | { |
Ville Syrjälä | ffc7a76 | 2016-10-31 22:37:21 +0200 | [diff] [blame] | 2461 | struct drm_i915_private *dev_priv = to_i915(unused_crtc->base.dev); |
Ville Syrjälä | efc2611 | 2016-10-31 22:37:04 +0200 | [diff] [blame] | 2462 | struct intel_crtc *crtc; |
Damien Lespiau | 241bfc3 | 2013-09-25 16:45:37 +0100 | [diff] [blame] | 2463 | const struct drm_display_mode *adjusted_mode; |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 2464 | u32 fwater_lo; |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2465 | int planea_wm; |
| 2466 | |
Ville Syrjälä | ffc7a76 | 2016-10-31 22:37:21 +0200 | [diff] [blame] | 2467 | crtc = single_enabled_crtc(dev_priv); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2468 | if (crtc == NULL) |
| 2469 | return; |
| 2470 | |
Maarten Lankhorst | 1326a92 | 2019-10-31 12:26:02 +0100 | [diff] [blame] | 2471 | adjusted_mode = &crtc->config->hw.adjusted_mode; |
Damien Lespiau | 241bfc3 | 2013-09-25 16:45:37 +0100 | [diff] [blame] | 2472 | planea_wm = intel_calculate_wm(adjusted_mode->crtc_clock, |
Daniel Vetter | feb56b9 | 2013-12-14 20:38:30 -0200 | [diff] [blame] | 2473 | &i845_wm_info, |
Ville Syrjälä | bdaf843 | 2017-11-17 21:19:11 +0200 | [diff] [blame] | 2474 | dev_priv->display.get_fifo_size(dev_priv, PLANE_A), |
Chris Wilson | 5aef600 | 2014-09-03 11:56:07 +0100 | [diff] [blame] | 2475 | 4, pessimal_latency_ns); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2476 | fwater_lo = I915_READ(FW_BLC) & ~0xfff; |
| 2477 | fwater_lo |= (3<<8) | planea_wm; |
| 2478 | |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 2479 | drm_dbg_kms(&dev_priv->drm, |
| 2480 | "Setting FIFO watermarks - A: %d\n", planea_wm); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 2481 | |
| 2482 | I915_WRITE(FW_BLC, fwater_lo); |
| 2483 | } |
| 2484 | |
Ville Syrjälä | 3712646 | 2013-08-01 16:18:55 +0300 | [diff] [blame] | 2485 | /* latency must be in 0.1us units. */ |
Ville Syrjälä | baf69ca | 2017-04-21 21:14:27 +0300 | [diff] [blame] | 2486 | static unsigned int ilk_wm_method1(unsigned int pixel_rate, |
| 2487 | unsigned int cpp, |
| 2488 | unsigned int latency) |
Paulo Zanoni | 801bcff | 2013-05-31 10:08:35 -0300 | [diff] [blame] | 2489 | { |
Ville Syrjälä | baf69ca | 2017-04-21 21:14:27 +0300 | [diff] [blame] | 2490 | unsigned int ret; |
Paulo Zanoni | 801bcff | 2013-05-31 10:08:35 -0300 | [diff] [blame] | 2491 | |
Ville Syrjälä | baf69ca | 2017-04-21 21:14:27 +0300 | [diff] [blame] | 2492 | ret = intel_wm_method1(pixel_rate, cpp, latency); |
| 2493 | ret = DIV_ROUND_UP(ret, 64) + 2; |
Paulo Zanoni | 801bcff | 2013-05-31 10:08:35 -0300 | [diff] [blame] | 2494 | |
| 2495 | return ret; |
| 2496 | } |
| 2497 | |
Ville Syrjälä | 3712646 | 2013-08-01 16:18:55 +0300 | [diff] [blame] | 2498 | /* latency must be in 0.1us units. */ |
Ville Syrjälä | baf69ca | 2017-04-21 21:14:27 +0300 | [diff] [blame] | 2499 | static unsigned int ilk_wm_method2(unsigned int pixel_rate, |
| 2500 | unsigned int htotal, |
| 2501 | unsigned int width, |
| 2502 | unsigned int cpp, |
| 2503 | unsigned int latency) |
Paulo Zanoni | 801bcff | 2013-05-31 10:08:35 -0300 | [diff] [blame] | 2504 | { |
Ville Syrjälä | baf69ca | 2017-04-21 21:14:27 +0300 | [diff] [blame] | 2505 | unsigned int ret; |
Paulo Zanoni | 801bcff | 2013-05-31 10:08:35 -0300 | [diff] [blame] | 2506 | |
Ville Syrjälä | baf69ca | 2017-04-21 21:14:27 +0300 | [diff] [blame] | 2507 | ret = intel_wm_method2(pixel_rate, htotal, |
| 2508 | width, cpp, latency); |
Paulo Zanoni | 801bcff | 2013-05-31 10:08:35 -0300 | [diff] [blame] | 2509 | ret = DIV_ROUND_UP(ret, 64) + 2; |
Ville Syrjälä | baf69ca | 2017-04-21 21:14:27 +0300 | [diff] [blame] | 2510 | |
Paulo Zanoni | 801bcff | 2013-05-31 10:08:35 -0300 | [diff] [blame] | 2511 | return ret; |
| 2512 | } |
| 2513 | |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 2514 | static u32 ilk_wm_fbc(u32 pri_val, u32 horiz_pixels, u8 cpp) |
Paulo Zanoni | cca32e9 | 2013-05-31 11:45:06 -0300 | [diff] [blame] | 2515 | { |
Matt Roper | 1512688 | 2015-12-03 11:37:40 -0800 | [diff] [blame] | 2516 | /* |
| 2517 | * Neither of these should be possible since this function shouldn't be |
| 2518 | * called if the CRTC is off or the plane is invisible. But let's be |
| 2519 | * extra paranoid to avoid a potential divide-by-zero if we screw up |
| 2520 | * elsewhere in the driver. |
| 2521 | */ |
Ville Syrjälä | ac48496 | 2016-01-20 21:05:26 +0200 | [diff] [blame] | 2522 | if (WARN_ON(!cpp)) |
Matt Roper | 1512688 | 2015-12-03 11:37:40 -0800 | [diff] [blame] | 2523 | return 0; |
| 2524 | if (WARN_ON(!horiz_pixels)) |
| 2525 | return 0; |
| 2526 | |
Ville Syrjälä | ac48496 | 2016-01-20 21:05:26 +0200 | [diff] [blame] | 2527 | return DIV_ROUND_UP(pri_val * 64, horiz_pixels * cpp) + 2; |
Paulo Zanoni | cca32e9 | 2013-05-31 11:45:06 -0300 | [diff] [blame] | 2528 | } |
| 2529 | |
Imre Deak | 820c198 | 2013-12-17 14:46:36 +0200 | [diff] [blame] | 2530 | struct ilk_wm_maximums { |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 2531 | u16 pri; |
| 2532 | u16 spr; |
| 2533 | u16 cur; |
| 2534 | u16 fbc; |
Paulo Zanoni | cca32e9 | 2013-05-31 11:45:06 -0300 | [diff] [blame] | 2535 | }; |
| 2536 | |
Ville Syrjälä | 3712646 | 2013-08-01 16:18:55 +0300 | [diff] [blame] | 2537 | /* |
| 2538 | * For both WM_PIPE and WM_LP. |
| 2539 | * mem_value must be in 0.1us units. |
| 2540 | */ |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 2541 | static u32 ilk_compute_pri_wm(const struct intel_crtc_state *crtc_state, |
| 2542 | const struct intel_plane_state *plane_state, |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 2543 | u32 mem_value, bool is_lp) |
Paulo Zanoni | 801bcff | 2013-05-31 10:08:35 -0300 | [diff] [blame] | 2544 | { |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 2545 | u32 method1, method2; |
Ville Syrjälä | 8305494 | 2016-11-18 21:53:00 +0200 | [diff] [blame] | 2546 | int cpp; |
Paulo Zanoni | cca32e9 | 2013-05-31 11:45:06 -0300 | [diff] [blame] | 2547 | |
Ville Syrjälä | 03981c6 | 2018-11-14 19:34:40 +0200 | [diff] [blame] | 2548 | if (mem_value == 0) |
| 2549 | return U32_MAX; |
| 2550 | |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 2551 | if (!intel_wm_plane_visible(crtc_state, plane_state)) |
Paulo Zanoni | 801bcff | 2013-05-31 10:08:35 -0300 | [diff] [blame] | 2552 | return 0; |
| 2553 | |
Maarten Lankhorst | 7b3cb17 | 2019-10-31 12:26:07 +0100 | [diff] [blame] | 2554 | cpp = plane_state->hw.fb->format->cpp[0]; |
Ville Syrjälä | 8305494 | 2016-11-18 21:53:00 +0200 | [diff] [blame] | 2555 | |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 2556 | method1 = ilk_wm_method1(crtc_state->pixel_rate, cpp, mem_value); |
Paulo Zanoni | cca32e9 | 2013-05-31 11:45:06 -0300 | [diff] [blame] | 2557 | |
| 2558 | if (!is_lp) |
| 2559 | return method1; |
| 2560 | |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 2561 | method2 = ilk_wm_method2(crtc_state->pixel_rate, |
Maarten Lankhorst | 1326a92 | 2019-10-31 12:26:02 +0100 | [diff] [blame] | 2562 | crtc_state->hw.adjusted_mode.crtc_htotal, |
Maarten Lankhorst | f90a85e | 2019-10-31 12:26:08 +0100 | [diff] [blame] | 2563 | drm_rect_width(&plane_state->uapi.dst), |
Ville Syrjälä | ac48496 | 2016-01-20 21:05:26 +0200 | [diff] [blame] | 2564 | cpp, mem_value); |
Paulo Zanoni | cca32e9 | 2013-05-31 11:45:06 -0300 | [diff] [blame] | 2565 | |
| 2566 | return min(method1, method2); |
Paulo Zanoni | 801bcff | 2013-05-31 10:08:35 -0300 | [diff] [blame] | 2567 | } |
| 2568 | |
Ville Syrjälä | 3712646 | 2013-08-01 16:18:55 +0300 | [diff] [blame] | 2569 | /* |
| 2570 | * For both WM_PIPE and WM_LP. |
| 2571 | * mem_value must be in 0.1us units. |
| 2572 | */ |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 2573 | static u32 ilk_compute_spr_wm(const struct intel_crtc_state *crtc_state, |
| 2574 | const struct intel_plane_state *plane_state, |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 2575 | u32 mem_value) |
Paulo Zanoni | 801bcff | 2013-05-31 10:08:35 -0300 | [diff] [blame] | 2576 | { |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 2577 | u32 method1, method2; |
Ville Syrjälä | 8305494 | 2016-11-18 21:53:00 +0200 | [diff] [blame] | 2578 | int cpp; |
Paulo Zanoni | 801bcff | 2013-05-31 10:08:35 -0300 | [diff] [blame] | 2579 | |
Ville Syrjälä | 03981c6 | 2018-11-14 19:34:40 +0200 | [diff] [blame] | 2580 | if (mem_value == 0) |
| 2581 | return U32_MAX; |
| 2582 | |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 2583 | if (!intel_wm_plane_visible(crtc_state, plane_state)) |
Paulo Zanoni | 801bcff | 2013-05-31 10:08:35 -0300 | [diff] [blame] | 2584 | return 0; |
| 2585 | |
Maarten Lankhorst | 7b3cb17 | 2019-10-31 12:26:07 +0100 | [diff] [blame] | 2586 | cpp = plane_state->hw.fb->format->cpp[0]; |
Ville Syrjälä | 8305494 | 2016-11-18 21:53:00 +0200 | [diff] [blame] | 2587 | |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 2588 | method1 = ilk_wm_method1(crtc_state->pixel_rate, cpp, mem_value); |
| 2589 | method2 = ilk_wm_method2(crtc_state->pixel_rate, |
Maarten Lankhorst | 1326a92 | 2019-10-31 12:26:02 +0100 | [diff] [blame] | 2590 | crtc_state->hw.adjusted_mode.crtc_htotal, |
Maarten Lankhorst | f90a85e | 2019-10-31 12:26:08 +0100 | [diff] [blame] | 2591 | drm_rect_width(&plane_state->uapi.dst), |
Ville Syrjälä | ac48496 | 2016-01-20 21:05:26 +0200 | [diff] [blame] | 2592 | cpp, mem_value); |
Paulo Zanoni | 801bcff | 2013-05-31 10:08:35 -0300 | [diff] [blame] | 2593 | return min(method1, method2); |
| 2594 | } |
| 2595 | |
Ville Syrjälä | 3712646 | 2013-08-01 16:18:55 +0300 | [diff] [blame] | 2596 | /* |
| 2597 | * For both WM_PIPE and WM_LP. |
| 2598 | * mem_value must be in 0.1us units. |
| 2599 | */ |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 2600 | static u32 ilk_compute_cur_wm(const struct intel_crtc_state *crtc_state, |
| 2601 | const struct intel_plane_state *plane_state, |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 2602 | u32 mem_value) |
Paulo Zanoni | 801bcff | 2013-05-31 10:08:35 -0300 | [diff] [blame] | 2603 | { |
Ville Syrjälä | a5509ab | 2017-02-17 17:01:59 +0200 | [diff] [blame] | 2604 | int cpp; |
Matt Roper | 43d59ed | 2015-09-24 15:53:07 -0700 | [diff] [blame] | 2605 | |
Ville Syrjälä | 03981c6 | 2018-11-14 19:34:40 +0200 | [diff] [blame] | 2606 | if (mem_value == 0) |
| 2607 | return U32_MAX; |
| 2608 | |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 2609 | if (!intel_wm_plane_visible(crtc_state, plane_state)) |
Paulo Zanoni | 801bcff | 2013-05-31 10:08:35 -0300 | [diff] [blame] | 2610 | return 0; |
| 2611 | |
Maarten Lankhorst | 7b3cb17 | 2019-10-31 12:26:07 +0100 | [diff] [blame] | 2612 | cpp = plane_state->hw.fb->format->cpp[0]; |
Ville Syrjälä | a5509ab | 2017-02-17 17:01:59 +0200 | [diff] [blame] | 2613 | |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 2614 | return ilk_wm_method2(crtc_state->pixel_rate, |
Maarten Lankhorst | 1326a92 | 2019-10-31 12:26:02 +0100 | [diff] [blame] | 2615 | crtc_state->hw.adjusted_mode.crtc_htotal, |
Maarten Lankhorst | f90a85e | 2019-10-31 12:26:08 +0100 | [diff] [blame] | 2616 | drm_rect_width(&plane_state->uapi.dst), |
Maarten Lankhorst | 3a61276 | 2019-10-04 13:34:54 +0200 | [diff] [blame] | 2617 | cpp, mem_value); |
Paulo Zanoni | 801bcff | 2013-05-31 10:08:35 -0300 | [diff] [blame] | 2618 | } |
| 2619 | |
Paulo Zanoni | cca32e9 | 2013-05-31 11:45:06 -0300 | [diff] [blame] | 2620 | /* Only for WM_LP. */ |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 2621 | static u32 ilk_compute_fbc_wm(const struct intel_crtc_state *crtc_state, |
| 2622 | const struct intel_plane_state *plane_state, |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 2623 | u32 pri_val) |
Paulo Zanoni | cca32e9 | 2013-05-31 11:45:06 -0300 | [diff] [blame] | 2624 | { |
Ville Syrjälä | 8305494 | 2016-11-18 21:53:00 +0200 | [diff] [blame] | 2625 | int cpp; |
Matt Roper | 43d59ed | 2015-09-24 15:53:07 -0700 | [diff] [blame] | 2626 | |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 2627 | if (!intel_wm_plane_visible(crtc_state, plane_state)) |
Paulo Zanoni | cca32e9 | 2013-05-31 11:45:06 -0300 | [diff] [blame] | 2628 | return 0; |
| 2629 | |
Maarten Lankhorst | 7b3cb17 | 2019-10-31 12:26:07 +0100 | [diff] [blame] | 2630 | cpp = plane_state->hw.fb->format->cpp[0]; |
Ville Syrjälä | 8305494 | 2016-11-18 21:53:00 +0200 | [diff] [blame] | 2631 | |
Maarten Lankhorst | f90a85e | 2019-10-31 12:26:08 +0100 | [diff] [blame] | 2632 | return ilk_wm_fbc(pri_val, drm_rect_width(&plane_state->uapi.dst), |
| 2633 | cpp); |
Paulo Zanoni | cca32e9 | 2013-05-31 11:45:06 -0300 | [diff] [blame] | 2634 | } |
| 2635 | |
Tvrtko Ursulin | 175fded | 2016-11-16 08:55:42 +0000 | [diff] [blame] | 2636 | static unsigned int |
| 2637 | ilk_display_fifo_size(const struct drm_i915_private *dev_priv) |
Ville Syrjälä | 158ae64 | 2013-08-07 13:28:19 +0300 | [diff] [blame] | 2638 | { |
Tvrtko Ursulin | 175fded | 2016-11-16 08:55:42 +0000 | [diff] [blame] | 2639 | if (INTEL_GEN(dev_priv) >= 8) |
Ville Syrjälä | 416f472 | 2013-11-02 21:07:46 -0700 | [diff] [blame] | 2640 | return 3072; |
Tvrtko Ursulin | 175fded | 2016-11-16 08:55:42 +0000 | [diff] [blame] | 2641 | else if (INTEL_GEN(dev_priv) >= 7) |
Ville Syrjälä | 158ae64 | 2013-08-07 13:28:19 +0300 | [diff] [blame] | 2642 | return 768; |
| 2643 | else |
| 2644 | return 512; |
| 2645 | } |
| 2646 | |
Tvrtko Ursulin | 175fded | 2016-11-16 08:55:42 +0000 | [diff] [blame] | 2647 | static unsigned int |
| 2648 | ilk_plane_wm_reg_max(const struct drm_i915_private *dev_priv, |
| 2649 | int level, bool is_sprite) |
Ville Syrjälä | 4e97508 | 2014-03-07 18:32:11 +0200 | [diff] [blame] | 2650 | { |
Tvrtko Ursulin | 175fded | 2016-11-16 08:55:42 +0000 | [diff] [blame] | 2651 | if (INTEL_GEN(dev_priv) >= 8) |
Ville Syrjälä | 4e97508 | 2014-03-07 18:32:11 +0200 | [diff] [blame] | 2652 | /* BDW primary/sprite plane watermarks */ |
| 2653 | return level == 0 ? 255 : 2047; |
Tvrtko Ursulin | 175fded | 2016-11-16 08:55:42 +0000 | [diff] [blame] | 2654 | else if (INTEL_GEN(dev_priv) >= 7) |
Ville Syrjälä | 4e97508 | 2014-03-07 18:32:11 +0200 | [diff] [blame] | 2655 | /* IVB/HSW primary/sprite plane watermarks */ |
| 2656 | return level == 0 ? 127 : 1023; |
| 2657 | else if (!is_sprite) |
| 2658 | /* ILK/SNB primary plane watermarks */ |
| 2659 | return level == 0 ? 127 : 511; |
| 2660 | else |
| 2661 | /* ILK/SNB sprite plane watermarks */ |
| 2662 | return level == 0 ? 63 : 255; |
| 2663 | } |
| 2664 | |
Tvrtko Ursulin | 175fded | 2016-11-16 08:55:42 +0000 | [diff] [blame] | 2665 | static unsigned int |
| 2666 | ilk_cursor_wm_reg_max(const struct drm_i915_private *dev_priv, int level) |
Ville Syrjälä | 4e97508 | 2014-03-07 18:32:11 +0200 | [diff] [blame] | 2667 | { |
Tvrtko Ursulin | 175fded | 2016-11-16 08:55:42 +0000 | [diff] [blame] | 2668 | if (INTEL_GEN(dev_priv) >= 7) |
Ville Syrjälä | 4e97508 | 2014-03-07 18:32:11 +0200 | [diff] [blame] | 2669 | return level == 0 ? 63 : 255; |
| 2670 | else |
| 2671 | return level == 0 ? 31 : 63; |
| 2672 | } |
| 2673 | |
Tvrtko Ursulin | 175fded | 2016-11-16 08:55:42 +0000 | [diff] [blame] | 2674 | static unsigned int ilk_fbc_wm_reg_max(const struct drm_i915_private *dev_priv) |
Ville Syrjälä | 4e97508 | 2014-03-07 18:32:11 +0200 | [diff] [blame] | 2675 | { |
Tvrtko Ursulin | 175fded | 2016-11-16 08:55:42 +0000 | [diff] [blame] | 2676 | if (INTEL_GEN(dev_priv) >= 8) |
Ville Syrjälä | 4e97508 | 2014-03-07 18:32:11 +0200 | [diff] [blame] | 2677 | return 31; |
| 2678 | else |
| 2679 | return 15; |
| 2680 | } |
| 2681 | |
Ville Syrjälä | 158ae64 | 2013-08-07 13:28:19 +0300 | [diff] [blame] | 2682 | /* Calculate the maximum primary/sprite plane watermark */ |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 2683 | static unsigned int ilk_plane_wm_max(const struct drm_i915_private *dev_priv, |
Ville Syrjälä | 158ae64 | 2013-08-07 13:28:19 +0300 | [diff] [blame] | 2684 | int level, |
Ville Syrjälä | 240264f | 2013-08-07 13:29:12 +0300 | [diff] [blame] | 2685 | const struct intel_wm_config *config, |
Ville Syrjälä | 158ae64 | 2013-08-07 13:28:19 +0300 | [diff] [blame] | 2686 | enum intel_ddb_partitioning ddb_partitioning, |
| 2687 | bool is_sprite) |
| 2688 | { |
Tvrtko Ursulin | 175fded | 2016-11-16 08:55:42 +0000 | [diff] [blame] | 2689 | unsigned int fifo_size = ilk_display_fifo_size(dev_priv); |
Ville Syrjälä | 158ae64 | 2013-08-07 13:28:19 +0300 | [diff] [blame] | 2690 | |
| 2691 | /* if sprites aren't enabled, sprites get nothing */ |
Ville Syrjälä | 240264f | 2013-08-07 13:29:12 +0300 | [diff] [blame] | 2692 | if (is_sprite && !config->sprites_enabled) |
Ville Syrjälä | 158ae64 | 2013-08-07 13:28:19 +0300 | [diff] [blame] | 2693 | return 0; |
| 2694 | |
| 2695 | /* HSW allows LP1+ watermarks even with multiple pipes */ |
Ville Syrjälä | 240264f | 2013-08-07 13:29:12 +0300 | [diff] [blame] | 2696 | if (level == 0 || config->num_pipes_active > 1) { |
Jani Nikula | 2497787 | 2019-09-11 12:26:08 +0300 | [diff] [blame] | 2697 | fifo_size /= INTEL_NUM_PIPES(dev_priv); |
Ville Syrjälä | 158ae64 | 2013-08-07 13:28:19 +0300 | [diff] [blame] | 2698 | |
| 2699 | /* |
| 2700 | * For some reason the non self refresh |
| 2701 | * FIFO size is only half of the self |
| 2702 | * refresh FIFO size on ILK/SNB. |
| 2703 | */ |
Tvrtko Ursulin | 175fded | 2016-11-16 08:55:42 +0000 | [diff] [blame] | 2704 | if (INTEL_GEN(dev_priv) <= 6) |
Ville Syrjälä | 158ae64 | 2013-08-07 13:28:19 +0300 | [diff] [blame] | 2705 | fifo_size /= 2; |
| 2706 | } |
| 2707 | |
Ville Syrjälä | 240264f | 2013-08-07 13:29:12 +0300 | [diff] [blame] | 2708 | if (config->sprites_enabled) { |
Ville Syrjälä | 158ae64 | 2013-08-07 13:28:19 +0300 | [diff] [blame] | 2709 | /* level 0 is always calculated with 1:1 split */ |
| 2710 | if (level > 0 && ddb_partitioning == INTEL_DDB_PART_5_6) { |
| 2711 | if (is_sprite) |
| 2712 | fifo_size *= 5; |
| 2713 | fifo_size /= 6; |
| 2714 | } else { |
| 2715 | fifo_size /= 2; |
| 2716 | } |
| 2717 | } |
| 2718 | |
| 2719 | /* clamp to max that the registers can hold */ |
Tvrtko Ursulin | 175fded | 2016-11-16 08:55:42 +0000 | [diff] [blame] | 2720 | return min(fifo_size, ilk_plane_wm_reg_max(dev_priv, level, is_sprite)); |
Ville Syrjälä | 158ae64 | 2013-08-07 13:28:19 +0300 | [diff] [blame] | 2721 | } |
| 2722 | |
| 2723 | /* Calculate the maximum cursor plane watermark */ |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 2724 | static unsigned int ilk_cursor_wm_max(const struct drm_i915_private *dev_priv, |
Ville Syrjälä | 240264f | 2013-08-07 13:29:12 +0300 | [diff] [blame] | 2725 | int level, |
| 2726 | const struct intel_wm_config *config) |
Ville Syrjälä | 158ae64 | 2013-08-07 13:28:19 +0300 | [diff] [blame] | 2727 | { |
| 2728 | /* HSW LP1+ watermarks w/ multiple pipes */ |
Ville Syrjälä | 240264f | 2013-08-07 13:29:12 +0300 | [diff] [blame] | 2729 | if (level > 0 && config->num_pipes_active > 1) |
Ville Syrjälä | 158ae64 | 2013-08-07 13:28:19 +0300 | [diff] [blame] | 2730 | return 64; |
| 2731 | |
| 2732 | /* otherwise just report max that registers can hold */ |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 2733 | return ilk_cursor_wm_reg_max(dev_priv, level); |
Ville Syrjälä | 158ae64 | 2013-08-07 13:28:19 +0300 | [diff] [blame] | 2734 | } |
| 2735 | |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 2736 | static void ilk_compute_wm_maximums(const struct drm_i915_private *dev_priv, |
Ville Syrjälä | 34982fe | 2013-10-09 19:18:09 +0300 | [diff] [blame] | 2737 | int level, |
| 2738 | const struct intel_wm_config *config, |
| 2739 | enum intel_ddb_partitioning ddb_partitioning, |
Imre Deak | 820c198 | 2013-12-17 14:46:36 +0200 | [diff] [blame] | 2740 | struct ilk_wm_maximums *max) |
Ville Syrjälä | 158ae64 | 2013-08-07 13:28:19 +0300 | [diff] [blame] | 2741 | { |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 2742 | max->pri = ilk_plane_wm_max(dev_priv, level, config, ddb_partitioning, false); |
| 2743 | max->spr = ilk_plane_wm_max(dev_priv, level, config, ddb_partitioning, true); |
| 2744 | max->cur = ilk_cursor_wm_max(dev_priv, level, config); |
| 2745 | max->fbc = ilk_fbc_wm_reg_max(dev_priv); |
Ville Syrjälä | 158ae64 | 2013-08-07 13:28:19 +0300 | [diff] [blame] | 2746 | } |
| 2747 | |
Tvrtko Ursulin | 175fded | 2016-11-16 08:55:42 +0000 | [diff] [blame] | 2748 | static void ilk_compute_wm_reg_maximums(const struct drm_i915_private *dev_priv, |
Ville Syrjälä | a3cb404 | 2014-04-28 15:44:56 +0300 | [diff] [blame] | 2749 | int level, |
| 2750 | struct ilk_wm_maximums *max) |
| 2751 | { |
Tvrtko Ursulin | 175fded | 2016-11-16 08:55:42 +0000 | [diff] [blame] | 2752 | max->pri = ilk_plane_wm_reg_max(dev_priv, level, false); |
| 2753 | max->spr = ilk_plane_wm_reg_max(dev_priv, level, true); |
| 2754 | max->cur = ilk_cursor_wm_reg_max(dev_priv, level); |
| 2755 | max->fbc = ilk_fbc_wm_reg_max(dev_priv); |
Ville Syrjälä | a3cb404 | 2014-04-28 15:44:56 +0300 | [diff] [blame] | 2756 | } |
| 2757 | |
Ville Syrjälä | d939565 | 2013-10-09 19:18:10 +0300 | [diff] [blame] | 2758 | static bool ilk_validate_wm_level(int level, |
Imre Deak | 820c198 | 2013-12-17 14:46:36 +0200 | [diff] [blame] | 2759 | const struct ilk_wm_maximums *max, |
Ville Syrjälä | d939565 | 2013-10-09 19:18:10 +0300 | [diff] [blame] | 2760 | struct intel_wm_level *result) |
Ville Syrjälä | a9786a1 | 2013-08-07 13:24:47 +0300 | [diff] [blame] | 2761 | { |
| 2762 | bool ret; |
| 2763 | |
| 2764 | /* already determined to be invalid? */ |
| 2765 | if (!result->enable) |
| 2766 | return false; |
| 2767 | |
| 2768 | result->enable = result->pri_val <= max->pri && |
| 2769 | result->spr_val <= max->spr && |
| 2770 | result->cur_val <= max->cur; |
| 2771 | |
| 2772 | ret = result->enable; |
| 2773 | |
| 2774 | /* |
| 2775 | * HACK until we can pre-compute everything, |
| 2776 | * and thus fail gracefully if LP0 watermarks |
| 2777 | * are exceeded... |
| 2778 | */ |
| 2779 | if (level == 0 && !result->enable) { |
| 2780 | if (result->pri_val > max->pri) |
| 2781 | DRM_DEBUG_KMS("Primary WM%d too large %u (max %u)\n", |
| 2782 | level, result->pri_val, max->pri); |
| 2783 | if (result->spr_val > max->spr) |
| 2784 | DRM_DEBUG_KMS("Sprite WM%d too large %u (max %u)\n", |
| 2785 | level, result->spr_val, max->spr); |
| 2786 | if (result->cur_val > max->cur) |
| 2787 | DRM_DEBUG_KMS("Cursor WM%d too large %u (max %u)\n", |
| 2788 | level, result->cur_val, max->cur); |
| 2789 | |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 2790 | result->pri_val = min_t(u32, result->pri_val, max->pri); |
| 2791 | result->spr_val = min_t(u32, result->spr_val, max->spr); |
| 2792 | result->cur_val = min_t(u32, result->cur_val, max->cur); |
Ville Syrjälä | a9786a1 | 2013-08-07 13:24:47 +0300 | [diff] [blame] | 2793 | result->enable = true; |
| 2794 | } |
| 2795 | |
Ville Syrjälä | a9786a1 | 2013-08-07 13:24:47 +0300 | [diff] [blame] | 2796 | return ret; |
| 2797 | } |
| 2798 | |
Damien Lespiau | d34ff9c | 2014-01-06 19:17:23 +0000 | [diff] [blame] | 2799 | static void ilk_compute_wm_level(const struct drm_i915_private *dev_priv, |
Ville Syrjälä | 2a67054b | 2020-02-25 19:11:06 +0200 | [diff] [blame] | 2800 | const struct intel_crtc *crtc, |
Ville Syrjälä | 6f5ddd1 | 2013-08-06 22:24:02 +0300 | [diff] [blame] | 2801 | int level, |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 2802 | struct intel_crtc_state *crtc_state, |
Maarten Lankhorst | 28283f4 | 2017-10-19 17:13:40 +0200 | [diff] [blame] | 2803 | const struct intel_plane_state *pristate, |
| 2804 | const struct intel_plane_state *sprstate, |
| 2805 | const struct intel_plane_state *curstate, |
Ville Syrjälä | 1fd527c | 2013-08-06 22:24:05 +0300 | [diff] [blame] | 2806 | struct intel_wm_level *result) |
Ville Syrjälä | 6f5ddd1 | 2013-08-06 22:24:02 +0300 | [diff] [blame] | 2807 | { |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 2808 | u16 pri_latency = dev_priv->wm.pri_latency[level]; |
| 2809 | u16 spr_latency = dev_priv->wm.spr_latency[level]; |
| 2810 | u16 cur_latency = dev_priv->wm.cur_latency[level]; |
Ville Syrjälä | 6f5ddd1 | 2013-08-06 22:24:02 +0300 | [diff] [blame] | 2811 | |
| 2812 | /* WM1+ latency values stored in 0.5us units */ |
| 2813 | if (level > 0) { |
| 2814 | pri_latency *= 5; |
| 2815 | spr_latency *= 5; |
| 2816 | cur_latency *= 5; |
| 2817 | } |
| 2818 | |
Maarten Lankhorst | e3bddde | 2016-03-01 11:07:22 +0100 | [diff] [blame] | 2819 | if (pristate) { |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 2820 | result->pri_val = ilk_compute_pri_wm(crtc_state, pristate, |
Maarten Lankhorst | e3bddde | 2016-03-01 11:07:22 +0100 | [diff] [blame] | 2821 | pri_latency, level); |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 2822 | result->fbc_val = ilk_compute_fbc_wm(crtc_state, pristate, result->pri_val); |
Maarten Lankhorst | e3bddde | 2016-03-01 11:07:22 +0100 | [diff] [blame] | 2823 | } |
| 2824 | |
| 2825 | if (sprstate) |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 2826 | result->spr_val = ilk_compute_spr_wm(crtc_state, sprstate, spr_latency); |
Maarten Lankhorst | e3bddde | 2016-03-01 11:07:22 +0100 | [diff] [blame] | 2827 | |
| 2828 | if (curstate) |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 2829 | result->cur_val = ilk_compute_cur_wm(crtc_state, curstate, cur_latency); |
Maarten Lankhorst | e3bddde | 2016-03-01 11:07:22 +0100 | [diff] [blame] | 2830 | |
Ville Syrjälä | 6f5ddd1 | 2013-08-06 22:24:02 +0300 | [diff] [blame] | 2831 | result->enable = true; |
| 2832 | } |
| 2833 | |
Ville Syrjälä | bb72651 | 2016-10-31 22:37:24 +0200 | [diff] [blame] | 2834 | static void intel_read_wm_latency(struct drm_i915_private *dev_priv, |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 2835 | u16 wm[8]) |
Ville Syrjälä | 12b134d | 2013-07-05 11:57:21 +0300 | [diff] [blame] | 2836 | { |
Tvrtko Ursulin | 1cea02d | 2019-06-10 13:06:07 +0100 | [diff] [blame] | 2837 | struct intel_uncore *uncore = &dev_priv->uncore; |
| 2838 | |
Paulo Zanoni | 50682ee | 2017-08-09 13:52:43 -0700 | [diff] [blame] | 2839 | if (INTEL_GEN(dev_priv) >= 9) { |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 2840 | u32 val; |
Vandana Kannan | 4f94738 | 2014-11-04 17:06:47 +0000 | [diff] [blame] | 2841 | int ret, i; |
Tvrtko Ursulin | 5db9401 | 2016-10-13 11:03:10 +0100 | [diff] [blame] | 2842 | int level, max_level = ilk_wm_max_level(dev_priv); |
Pradeep Bhat | 2af30a5 | 2014-11-04 17:06:38 +0000 | [diff] [blame] | 2843 | |
| 2844 | /* read the first set of memory latencies[0:3] */ |
| 2845 | val = 0; /* data0 to be programmed to 0 for first set */ |
Pradeep Bhat | 2af30a5 | 2014-11-04 17:06:38 +0000 | [diff] [blame] | 2846 | ret = sandybridge_pcode_read(dev_priv, |
| 2847 | GEN9_PCODE_READ_MEM_LATENCY, |
Ville Syrjälä | d284d51 | 2019-05-21 19:40:24 +0300 | [diff] [blame] | 2848 | &val, NULL); |
Pradeep Bhat | 2af30a5 | 2014-11-04 17:06:38 +0000 | [diff] [blame] | 2849 | |
| 2850 | if (ret) { |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 2851 | drm_err(&dev_priv->drm, |
| 2852 | "SKL Mailbox read error = %d\n", ret); |
Pradeep Bhat | 2af30a5 | 2014-11-04 17:06:38 +0000 | [diff] [blame] | 2853 | return; |
| 2854 | } |
| 2855 | |
| 2856 | wm[0] = val & GEN9_MEM_LATENCY_LEVEL_MASK; |
| 2857 | wm[1] = (val >> GEN9_MEM_LATENCY_LEVEL_1_5_SHIFT) & |
| 2858 | GEN9_MEM_LATENCY_LEVEL_MASK; |
| 2859 | wm[2] = (val >> GEN9_MEM_LATENCY_LEVEL_2_6_SHIFT) & |
| 2860 | GEN9_MEM_LATENCY_LEVEL_MASK; |
| 2861 | wm[3] = (val >> GEN9_MEM_LATENCY_LEVEL_3_7_SHIFT) & |
| 2862 | GEN9_MEM_LATENCY_LEVEL_MASK; |
| 2863 | |
| 2864 | /* read the second set of memory latencies[4:7] */ |
| 2865 | val = 1; /* data0 to be programmed to 1 for second set */ |
Pradeep Bhat | 2af30a5 | 2014-11-04 17:06:38 +0000 | [diff] [blame] | 2866 | ret = sandybridge_pcode_read(dev_priv, |
| 2867 | GEN9_PCODE_READ_MEM_LATENCY, |
Ville Syrjälä | d284d51 | 2019-05-21 19:40:24 +0300 | [diff] [blame] | 2868 | &val, NULL); |
Pradeep Bhat | 2af30a5 | 2014-11-04 17:06:38 +0000 | [diff] [blame] | 2869 | if (ret) { |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 2870 | drm_err(&dev_priv->drm, |
| 2871 | "SKL Mailbox read error = %d\n", ret); |
Pradeep Bhat | 2af30a5 | 2014-11-04 17:06:38 +0000 | [diff] [blame] | 2872 | return; |
| 2873 | } |
| 2874 | |
| 2875 | wm[4] = val & GEN9_MEM_LATENCY_LEVEL_MASK; |
| 2876 | wm[5] = (val >> GEN9_MEM_LATENCY_LEVEL_1_5_SHIFT) & |
| 2877 | GEN9_MEM_LATENCY_LEVEL_MASK; |
| 2878 | wm[6] = (val >> GEN9_MEM_LATENCY_LEVEL_2_6_SHIFT) & |
| 2879 | GEN9_MEM_LATENCY_LEVEL_MASK; |
| 2880 | wm[7] = (val >> GEN9_MEM_LATENCY_LEVEL_3_7_SHIFT) & |
| 2881 | GEN9_MEM_LATENCY_LEVEL_MASK; |
| 2882 | |
Vandana Kannan | 367294b | 2014-11-04 17:06:46 +0000 | [diff] [blame] | 2883 | /* |
Paulo Zanoni | 0727e40 | 2016-09-22 18:00:30 -0300 | [diff] [blame] | 2884 | * If a level n (n > 1) has a 0us latency, all levels m (m >= n) |
| 2885 | * need to be disabled. We make sure to sanitize the values out |
| 2886 | * of the punit to satisfy this requirement. |
| 2887 | */ |
| 2888 | for (level = 1; level <= max_level; level++) { |
| 2889 | if (wm[level] == 0) { |
| 2890 | for (i = level + 1; i <= max_level; i++) |
| 2891 | wm[i] = 0; |
| 2892 | break; |
| 2893 | } |
| 2894 | } |
| 2895 | |
| 2896 | /* |
Paulo Zanoni | 50682ee | 2017-08-09 13:52:43 -0700 | [diff] [blame] | 2897 | * WaWmMemoryReadLatency:skl+,glk |
Damien Lespiau | 6f97235 | 2015-02-09 19:33:07 +0000 | [diff] [blame] | 2898 | * |
Vandana Kannan | 367294b | 2014-11-04 17:06:46 +0000 | [diff] [blame] | 2899 | * punit doesn't take into account the read latency so we need |
Paulo Zanoni | 0727e40 | 2016-09-22 18:00:30 -0300 | [diff] [blame] | 2900 | * to add 2us to the various latency levels we retrieve from the |
| 2901 | * punit when level 0 response data us 0us. |
Vandana Kannan | 367294b | 2014-11-04 17:06:46 +0000 | [diff] [blame] | 2902 | */ |
Paulo Zanoni | 0727e40 | 2016-09-22 18:00:30 -0300 | [diff] [blame] | 2903 | if (wm[0] == 0) { |
| 2904 | wm[0] += 2; |
| 2905 | for (level = 1; level <= max_level; level++) { |
| 2906 | if (wm[level] == 0) |
| 2907 | break; |
Vandana Kannan | 367294b | 2014-11-04 17:06:46 +0000 | [diff] [blame] | 2908 | wm[level] += 2; |
Vandana Kannan | 4f94738 | 2014-11-04 17:06:47 +0000 | [diff] [blame] | 2909 | } |
Paulo Zanoni | 0727e40 | 2016-09-22 18:00:30 -0300 | [diff] [blame] | 2910 | } |
| 2911 | |
Mahesh Kumar | 86b5928 | 2018-08-31 16:39:42 +0530 | [diff] [blame] | 2912 | /* |
| 2913 | * WA Level-0 adjustment for 16GB DIMMs: SKL+ |
| 2914 | * If we could not get dimm info enable this WA to prevent from |
| 2915 | * any underrun. If not able to get Dimm info assume 16GB dimm |
| 2916 | * to avoid any underrun. |
| 2917 | */ |
Ville Syrjälä | 5d6f36b | 2018-10-23 21:21:02 +0300 | [diff] [blame] | 2918 | if (dev_priv->dram_info.is_16gb_dimm) |
Mahesh Kumar | 86b5928 | 2018-08-31 16:39:42 +0530 | [diff] [blame] | 2919 | wm[0] += 1; |
| 2920 | |
Tvrtko Ursulin | 8652744 | 2016-10-13 11:03:00 +0100 | [diff] [blame] | 2921 | } else if (IS_HASWELL(dev_priv) || IS_BROADWELL(dev_priv)) { |
Tvrtko Ursulin | 1cea02d | 2019-06-10 13:06:07 +0100 | [diff] [blame] | 2922 | u64 sskpd = intel_uncore_read64(uncore, MCH_SSKPD); |
Ville Syrjälä | 12b134d | 2013-07-05 11:57:21 +0300 | [diff] [blame] | 2923 | |
| 2924 | wm[0] = (sskpd >> 56) & 0xFF; |
| 2925 | if (wm[0] == 0) |
| 2926 | wm[0] = sskpd & 0xF; |
Ville Syrjälä | e5d5019 | 2013-07-05 11:57:22 +0300 | [diff] [blame] | 2927 | wm[1] = (sskpd >> 4) & 0xFF; |
| 2928 | wm[2] = (sskpd >> 12) & 0xFF; |
| 2929 | wm[3] = (sskpd >> 20) & 0x1FF; |
| 2930 | wm[4] = (sskpd >> 32) & 0x1FF; |
Ville Syrjälä | bb72651 | 2016-10-31 22:37:24 +0200 | [diff] [blame] | 2931 | } else if (INTEL_GEN(dev_priv) >= 6) { |
Tvrtko Ursulin | 1cea02d | 2019-06-10 13:06:07 +0100 | [diff] [blame] | 2932 | u32 sskpd = intel_uncore_read(uncore, MCH_SSKPD); |
Ville Syrjälä | 63cf9a1 | 2013-07-05 11:57:23 +0300 | [diff] [blame] | 2933 | |
| 2934 | wm[0] = (sskpd >> SSKPD_WM0_SHIFT) & SSKPD_WM_MASK; |
| 2935 | wm[1] = (sskpd >> SSKPD_WM1_SHIFT) & SSKPD_WM_MASK; |
| 2936 | wm[2] = (sskpd >> SSKPD_WM2_SHIFT) & SSKPD_WM_MASK; |
| 2937 | wm[3] = (sskpd >> SSKPD_WM3_SHIFT) & SSKPD_WM_MASK; |
Ville Syrjälä | bb72651 | 2016-10-31 22:37:24 +0200 | [diff] [blame] | 2938 | } else if (INTEL_GEN(dev_priv) >= 5) { |
Tvrtko Ursulin | 1cea02d | 2019-06-10 13:06:07 +0100 | [diff] [blame] | 2939 | u32 mltr = intel_uncore_read(uncore, MLTR_ILK); |
Ville Syrjälä | 3a88d0a | 2013-08-01 16:18:49 +0300 | [diff] [blame] | 2940 | |
| 2941 | /* ILK primary LP0 latency is 700 ns */ |
| 2942 | wm[0] = 7; |
| 2943 | wm[1] = (mltr >> MLTR_WM1_SHIFT) & ILK_SRLT_MASK; |
| 2944 | wm[2] = (mltr >> MLTR_WM2_SHIFT) & ILK_SRLT_MASK; |
Paulo Zanoni | 50682ee | 2017-08-09 13:52:43 -0700 | [diff] [blame] | 2945 | } else { |
| 2946 | MISSING_CASE(INTEL_DEVID(dev_priv)); |
Ville Syrjälä | 12b134d | 2013-07-05 11:57:21 +0300 | [diff] [blame] | 2947 | } |
| 2948 | } |
| 2949 | |
Tvrtko Ursulin | 5db9401 | 2016-10-13 11:03:10 +0100 | [diff] [blame] | 2950 | static void intel_fixup_spr_wm_latency(struct drm_i915_private *dev_priv, |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 2951 | u16 wm[5]) |
Ville Syrjälä | 53615a5 | 2013-08-01 16:18:50 +0300 | [diff] [blame] | 2952 | { |
| 2953 | /* ILK sprite LP0 latency is 1300 ns */ |
Lucas De Marchi | cf819ef | 2018-12-12 10:10:43 -0800 | [diff] [blame] | 2954 | if (IS_GEN(dev_priv, 5)) |
Ville Syrjälä | 53615a5 | 2013-08-01 16:18:50 +0300 | [diff] [blame] | 2955 | wm[0] = 13; |
| 2956 | } |
| 2957 | |
Tvrtko Ursulin | fd6b8f4 | 2016-10-14 10:13:06 +0100 | [diff] [blame] | 2958 | static void intel_fixup_cur_wm_latency(struct drm_i915_private *dev_priv, |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 2959 | u16 wm[5]) |
Ville Syrjälä | 53615a5 | 2013-08-01 16:18:50 +0300 | [diff] [blame] | 2960 | { |
| 2961 | /* ILK cursor LP0 latency is 1300 ns */ |
Lucas De Marchi | cf819ef | 2018-12-12 10:10:43 -0800 | [diff] [blame] | 2962 | if (IS_GEN(dev_priv, 5)) |
Ville Syrjälä | 53615a5 | 2013-08-01 16:18:50 +0300 | [diff] [blame] | 2963 | wm[0] = 13; |
Ville Syrjälä | 53615a5 | 2013-08-01 16:18:50 +0300 | [diff] [blame] | 2964 | } |
| 2965 | |
Tvrtko Ursulin | 5db9401 | 2016-10-13 11:03:10 +0100 | [diff] [blame] | 2966 | int ilk_wm_max_level(const struct drm_i915_private *dev_priv) |
Ville Syrjälä | ad0d6dc | 2013-08-30 14:30:25 +0300 | [diff] [blame] | 2967 | { |
| 2968 | /* how many WM levels are we expecting */ |
Tvrtko Ursulin | 8652744 | 2016-10-13 11:03:00 +0100 | [diff] [blame] | 2969 | if (INTEL_GEN(dev_priv) >= 9) |
Pradeep Bhat | 2af30a5 | 2014-11-04 17:06:38 +0000 | [diff] [blame] | 2970 | return 7; |
Tvrtko Ursulin | 8652744 | 2016-10-13 11:03:00 +0100 | [diff] [blame] | 2971 | else if (IS_HASWELL(dev_priv) || IS_BROADWELL(dev_priv)) |
Ville Syrjälä | ad0d6dc | 2013-08-30 14:30:25 +0300 | [diff] [blame] | 2972 | return 4; |
Tvrtko Ursulin | 8652744 | 2016-10-13 11:03:00 +0100 | [diff] [blame] | 2973 | else if (INTEL_GEN(dev_priv) >= 6) |
Ville Syrjälä | ad0d6dc | 2013-08-30 14:30:25 +0300 | [diff] [blame] | 2974 | return 3; |
| 2975 | else |
| 2976 | return 2; |
| 2977 | } |
Daniel Vetter | 7526ed7 | 2014-09-29 15:07:19 +0200 | [diff] [blame] | 2978 | |
Tvrtko Ursulin | 5db9401 | 2016-10-13 11:03:10 +0100 | [diff] [blame] | 2979 | static void intel_print_wm_latency(struct drm_i915_private *dev_priv, |
Ville Syrjälä | 26ec971 | 2013-08-01 16:18:52 +0300 | [diff] [blame] | 2980 | const char *name, |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 2981 | const u16 wm[8]) |
Ville Syrjälä | 26ec971 | 2013-08-01 16:18:52 +0300 | [diff] [blame] | 2982 | { |
Tvrtko Ursulin | 5db9401 | 2016-10-13 11:03:10 +0100 | [diff] [blame] | 2983 | int level, max_level = ilk_wm_max_level(dev_priv); |
Ville Syrjälä | 26ec971 | 2013-08-01 16:18:52 +0300 | [diff] [blame] | 2984 | |
| 2985 | for (level = 0; level <= max_level; level++) { |
| 2986 | unsigned int latency = wm[level]; |
| 2987 | |
| 2988 | if (latency == 0) { |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 2989 | drm_dbg_kms(&dev_priv->drm, |
| 2990 | "%s WM%d latency not provided\n", |
| 2991 | name, level); |
Ville Syrjälä | 26ec971 | 2013-08-01 16:18:52 +0300 | [diff] [blame] | 2992 | continue; |
| 2993 | } |
| 2994 | |
Pradeep Bhat | 2af30a5 | 2014-11-04 17:06:38 +0000 | [diff] [blame] | 2995 | /* |
| 2996 | * - latencies are in us on gen9. |
| 2997 | * - before then, WM1+ latency values are in 0.5us units |
| 2998 | */ |
Paulo Zanoni | dfc267a | 2017-08-09 13:52:46 -0700 | [diff] [blame] | 2999 | if (INTEL_GEN(dev_priv) >= 9) |
Pradeep Bhat | 2af30a5 | 2014-11-04 17:06:38 +0000 | [diff] [blame] | 3000 | latency *= 10; |
| 3001 | else if (level > 0) |
Ville Syrjälä | 26ec971 | 2013-08-01 16:18:52 +0300 | [diff] [blame] | 3002 | latency *= 5; |
| 3003 | |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 3004 | drm_dbg_kms(&dev_priv->drm, |
| 3005 | "%s WM%d latency %u (%u.%u usec)\n", name, level, |
| 3006 | wm[level], latency / 10, latency % 10); |
Ville Syrjälä | 26ec971 | 2013-08-01 16:18:52 +0300 | [diff] [blame] | 3007 | } |
| 3008 | } |
| 3009 | |
Ville Syrjälä | e95a2f7 | 2014-05-08 15:09:19 +0300 | [diff] [blame] | 3010 | static bool ilk_increase_wm_latency(struct drm_i915_private *dev_priv, |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 3011 | u16 wm[5], u16 min) |
Ville Syrjälä | e95a2f7 | 2014-05-08 15:09:19 +0300 | [diff] [blame] | 3012 | { |
Tvrtko Ursulin | 5db9401 | 2016-10-13 11:03:10 +0100 | [diff] [blame] | 3013 | int level, max_level = ilk_wm_max_level(dev_priv); |
Ville Syrjälä | e95a2f7 | 2014-05-08 15:09:19 +0300 | [diff] [blame] | 3014 | |
| 3015 | if (wm[0] >= min) |
| 3016 | return false; |
| 3017 | |
| 3018 | wm[0] = max(wm[0], min); |
| 3019 | for (level = 1; level <= max_level; level++) |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 3020 | wm[level] = max_t(u16, wm[level], DIV_ROUND_UP(min, 5)); |
Ville Syrjälä | e95a2f7 | 2014-05-08 15:09:19 +0300 | [diff] [blame] | 3021 | |
| 3022 | return true; |
| 3023 | } |
| 3024 | |
Ville Syrjälä | bb72651 | 2016-10-31 22:37:24 +0200 | [diff] [blame] | 3025 | static void snb_wm_latency_quirk(struct drm_i915_private *dev_priv) |
Ville Syrjälä | e95a2f7 | 2014-05-08 15:09:19 +0300 | [diff] [blame] | 3026 | { |
Ville Syrjälä | e95a2f7 | 2014-05-08 15:09:19 +0300 | [diff] [blame] | 3027 | bool changed; |
| 3028 | |
| 3029 | /* |
| 3030 | * The BIOS provided WM memory latency values are often |
| 3031 | * inadequate for high resolution displays. Adjust them. |
| 3032 | */ |
| 3033 | changed = ilk_increase_wm_latency(dev_priv, dev_priv->wm.pri_latency, 12) | |
| 3034 | ilk_increase_wm_latency(dev_priv, dev_priv->wm.spr_latency, 12) | |
| 3035 | ilk_increase_wm_latency(dev_priv, dev_priv->wm.cur_latency, 12); |
| 3036 | |
| 3037 | if (!changed) |
| 3038 | return; |
| 3039 | |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 3040 | drm_dbg_kms(&dev_priv->drm, |
| 3041 | "WM latency values increased to avoid potential underruns\n"); |
Tvrtko Ursulin | 5db9401 | 2016-10-13 11:03:10 +0100 | [diff] [blame] | 3042 | intel_print_wm_latency(dev_priv, "Primary", dev_priv->wm.pri_latency); |
| 3043 | intel_print_wm_latency(dev_priv, "Sprite", dev_priv->wm.spr_latency); |
| 3044 | intel_print_wm_latency(dev_priv, "Cursor", dev_priv->wm.cur_latency); |
Ville Syrjälä | e95a2f7 | 2014-05-08 15:09:19 +0300 | [diff] [blame] | 3045 | } |
| 3046 | |
Ville Syrjälä | 03981c6 | 2018-11-14 19:34:40 +0200 | [diff] [blame] | 3047 | static void snb_wm_lp3_irq_quirk(struct drm_i915_private *dev_priv) |
| 3048 | { |
| 3049 | /* |
| 3050 | * On some SNB machines (Thinkpad X220 Tablet at least) |
| 3051 | * LP3 usage can cause vblank interrupts to be lost. |
| 3052 | * The DEIIR bit will go high but it looks like the CPU |
| 3053 | * never gets interrupted. |
| 3054 | * |
| 3055 | * It's not clear whether other interrupt source could |
| 3056 | * be affected or if this is somehow limited to vblank |
| 3057 | * interrupts only. To play it safe we disable LP3 |
| 3058 | * watermarks entirely. |
| 3059 | */ |
| 3060 | if (dev_priv->wm.pri_latency[3] == 0 && |
| 3061 | dev_priv->wm.spr_latency[3] == 0 && |
| 3062 | dev_priv->wm.cur_latency[3] == 0) |
| 3063 | return; |
| 3064 | |
| 3065 | dev_priv->wm.pri_latency[3] = 0; |
| 3066 | dev_priv->wm.spr_latency[3] = 0; |
| 3067 | dev_priv->wm.cur_latency[3] = 0; |
| 3068 | |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 3069 | drm_dbg_kms(&dev_priv->drm, |
| 3070 | "LP3 watermarks disabled due to potential for lost interrupts\n"); |
Ville Syrjälä | 03981c6 | 2018-11-14 19:34:40 +0200 | [diff] [blame] | 3071 | intel_print_wm_latency(dev_priv, "Primary", dev_priv->wm.pri_latency); |
| 3072 | intel_print_wm_latency(dev_priv, "Sprite", dev_priv->wm.spr_latency); |
| 3073 | intel_print_wm_latency(dev_priv, "Cursor", dev_priv->wm.cur_latency); |
| 3074 | } |
| 3075 | |
Ville Syrjälä | bb72651 | 2016-10-31 22:37:24 +0200 | [diff] [blame] | 3076 | static void ilk_setup_wm_latency(struct drm_i915_private *dev_priv) |
Ville Syrjälä | 53615a5 | 2013-08-01 16:18:50 +0300 | [diff] [blame] | 3077 | { |
Ville Syrjälä | bb72651 | 2016-10-31 22:37:24 +0200 | [diff] [blame] | 3078 | intel_read_wm_latency(dev_priv, dev_priv->wm.pri_latency); |
Ville Syrjälä | 53615a5 | 2013-08-01 16:18:50 +0300 | [diff] [blame] | 3079 | |
| 3080 | memcpy(dev_priv->wm.spr_latency, dev_priv->wm.pri_latency, |
| 3081 | sizeof(dev_priv->wm.pri_latency)); |
| 3082 | memcpy(dev_priv->wm.cur_latency, dev_priv->wm.pri_latency, |
| 3083 | sizeof(dev_priv->wm.pri_latency)); |
| 3084 | |
Tvrtko Ursulin | 5db9401 | 2016-10-13 11:03:10 +0100 | [diff] [blame] | 3085 | intel_fixup_spr_wm_latency(dev_priv, dev_priv->wm.spr_latency); |
Tvrtko Ursulin | fd6b8f4 | 2016-10-14 10:13:06 +0100 | [diff] [blame] | 3086 | intel_fixup_cur_wm_latency(dev_priv, dev_priv->wm.cur_latency); |
Ville Syrjälä | 26ec971 | 2013-08-01 16:18:52 +0300 | [diff] [blame] | 3087 | |
Tvrtko Ursulin | 5db9401 | 2016-10-13 11:03:10 +0100 | [diff] [blame] | 3088 | intel_print_wm_latency(dev_priv, "Primary", dev_priv->wm.pri_latency); |
| 3089 | intel_print_wm_latency(dev_priv, "Sprite", dev_priv->wm.spr_latency); |
| 3090 | intel_print_wm_latency(dev_priv, "Cursor", dev_priv->wm.cur_latency); |
Ville Syrjälä | e95a2f7 | 2014-05-08 15:09:19 +0300 | [diff] [blame] | 3091 | |
Lucas De Marchi | cf819ef | 2018-12-12 10:10:43 -0800 | [diff] [blame] | 3092 | if (IS_GEN(dev_priv, 6)) { |
Ville Syrjälä | bb72651 | 2016-10-31 22:37:24 +0200 | [diff] [blame] | 3093 | snb_wm_latency_quirk(dev_priv); |
Ville Syrjälä | 03981c6 | 2018-11-14 19:34:40 +0200 | [diff] [blame] | 3094 | snb_wm_lp3_irq_quirk(dev_priv); |
| 3095 | } |
Ville Syrjälä | 53615a5 | 2013-08-01 16:18:50 +0300 | [diff] [blame] | 3096 | } |
| 3097 | |
Ville Syrjälä | bb72651 | 2016-10-31 22:37:24 +0200 | [diff] [blame] | 3098 | static void skl_setup_wm_latency(struct drm_i915_private *dev_priv) |
Pradeep Bhat | 2af30a5 | 2014-11-04 17:06:38 +0000 | [diff] [blame] | 3099 | { |
Ville Syrjälä | bb72651 | 2016-10-31 22:37:24 +0200 | [diff] [blame] | 3100 | intel_read_wm_latency(dev_priv, dev_priv->wm.skl_latency); |
Tvrtko Ursulin | 5db9401 | 2016-10-13 11:03:10 +0100 | [diff] [blame] | 3101 | intel_print_wm_latency(dev_priv, "Gen9 Plane", dev_priv->wm.skl_latency); |
Pradeep Bhat | 2af30a5 | 2014-11-04 17:06:38 +0000 | [diff] [blame] | 3102 | } |
| 3103 | |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 3104 | static bool ilk_validate_pipe_wm(const struct drm_i915_private *dev_priv, |
Matt Roper | ed4a6a7 | 2016-02-23 17:20:13 -0800 | [diff] [blame] | 3105 | struct intel_pipe_wm *pipe_wm) |
| 3106 | { |
| 3107 | /* LP0 watermark maximums depend on this pipe alone */ |
| 3108 | const struct intel_wm_config config = { |
| 3109 | .num_pipes_active = 1, |
| 3110 | .sprites_enabled = pipe_wm->sprites_enabled, |
| 3111 | .sprites_scaled = pipe_wm->sprites_scaled, |
| 3112 | }; |
| 3113 | struct ilk_wm_maximums max; |
| 3114 | |
| 3115 | /* LP0 watermarks always use 1/2 DDB partitioning */ |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 3116 | ilk_compute_wm_maximums(dev_priv, 0, &config, INTEL_DDB_PART_1_2, &max); |
Matt Roper | ed4a6a7 | 2016-02-23 17:20:13 -0800 | [diff] [blame] | 3117 | |
| 3118 | /* At least LP0 must be valid */ |
| 3119 | if (!ilk_validate_wm_level(0, &max, &pipe_wm->wm[0])) { |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 3120 | drm_dbg_kms(&dev_priv->drm, "LP0 watermark invalid\n"); |
Matt Roper | ed4a6a7 | 2016-02-23 17:20:13 -0800 | [diff] [blame] | 3121 | return false; |
| 3122 | } |
| 3123 | |
| 3124 | return true; |
| 3125 | } |
| 3126 | |
Matt Roper | 261a27d | 2015-10-08 15:28:25 -0700 | [diff] [blame] | 3127 | /* Compute new watermarks for the pipe */ |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 3128 | static int ilk_compute_pipe_wm(struct intel_crtc_state *crtc_state) |
Matt Roper | 261a27d | 2015-10-08 15:28:25 -0700 | [diff] [blame] | 3129 | { |
Maarten Lankhorst | 2225f3c | 2019-10-31 12:26:03 +0100 | [diff] [blame] | 3130 | struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev); |
Ville Syrjälä | 2a67054b | 2020-02-25 19:11:06 +0200 | [diff] [blame] | 3131 | struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); |
Matt Roper | 86c8bbb | 2015-09-24 15:53:16 -0700 | [diff] [blame] | 3132 | struct intel_pipe_wm *pipe_wm; |
Maarten Lankhorst | af9fbfa | 2019-10-04 13:34:53 +0200 | [diff] [blame] | 3133 | struct intel_plane *plane; |
| 3134 | const struct intel_plane_state *plane_state; |
Maarten Lankhorst | 28283f4 | 2017-10-19 17:13:40 +0200 | [diff] [blame] | 3135 | const struct intel_plane_state *pristate = NULL; |
| 3136 | const struct intel_plane_state *sprstate = NULL; |
| 3137 | const struct intel_plane_state *curstate = NULL; |
Tvrtko Ursulin | 5db9401 | 2016-10-13 11:03:10 +0100 | [diff] [blame] | 3138 | int level, max_level = ilk_wm_max_level(dev_priv), usable_level; |
Imre Deak | 820c198 | 2013-12-17 14:46:36 +0200 | [diff] [blame] | 3139 | struct ilk_wm_maximums max; |
Ville Syrjälä | 0b2ae6d | 2013-10-09 19:17:55 +0300 | [diff] [blame] | 3140 | |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 3141 | pipe_wm = &crtc_state->wm.ilk.optimal; |
Matt Roper | 86c8bbb | 2015-09-24 15:53:16 -0700 | [diff] [blame] | 3142 | |
Maarten Lankhorst | af9fbfa | 2019-10-04 13:34:53 +0200 | [diff] [blame] | 3143 | intel_atomic_crtc_state_for_each_plane_state(plane, plane_state, crtc_state) { |
| 3144 | if (plane->base.type == DRM_PLANE_TYPE_PRIMARY) |
| 3145 | pristate = plane_state; |
| 3146 | else if (plane->base.type == DRM_PLANE_TYPE_OVERLAY) |
| 3147 | sprstate = plane_state; |
| 3148 | else if (plane->base.type == DRM_PLANE_TYPE_CURSOR) |
| 3149 | curstate = plane_state; |
Matt Roper | 43d59ed | 2015-09-24 15:53:07 -0700 | [diff] [blame] | 3150 | } |
| 3151 | |
Maarten Lankhorst | 1326a92 | 2019-10-31 12:26:02 +0100 | [diff] [blame] | 3152 | pipe_wm->pipe_enabled = crtc_state->hw.active; |
Maarten Lankhorst | e3bddde | 2016-03-01 11:07:22 +0100 | [diff] [blame] | 3153 | if (sprstate) { |
Maarten Lankhorst | f90a85e | 2019-10-31 12:26:08 +0100 | [diff] [blame] | 3154 | pipe_wm->sprites_enabled = sprstate->uapi.visible; |
| 3155 | pipe_wm->sprites_scaled = sprstate->uapi.visible && |
| 3156 | (drm_rect_width(&sprstate->uapi.dst) != drm_rect_width(&sprstate->uapi.src) >> 16 || |
| 3157 | drm_rect_height(&sprstate->uapi.dst) != drm_rect_height(&sprstate->uapi.src) >> 16); |
Maarten Lankhorst | e3bddde | 2016-03-01 11:07:22 +0100 | [diff] [blame] | 3158 | } |
| 3159 | |
Maarten Lankhorst | d81f04c | 2016-03-02 12:38:06 +0100 | [diff] [blame] | 3160 | usable_level = max_level; |
| 3161 | |
Ville Syrjälä | 7b39a0b | 2013-12-05 15:51:30 +0200 | [diff] [blame] | 3162 | /* ILK/SNB: LP2+ watermarks only w/o sprites */ |
Tvrtko Ursulin | 175fded | 2016-11-16 08:55:42 +0000 | [diff] [blame] | 3163 | if (INTEL_GEN(dev_priv) <= 6 && pipe_wm->sprites_enabled) |
Maarten Lankhorst | d81f04c | 2016-03-02 12:38:06 +0100 | [diff] [blame] | 3164 | usable_level = 1; |
Ville Syrjälä | 7b39a0b | 2013-12-05 15:51:30 +0200 | [diff] [blame] | 3165 | |
| 3166 | /* ILK/SNB/IVB: LP1+ watermarks only w/o scaling */ |
Matt Roper | ed4a6a7 | 2016-02-23 17:20:13 -0800 | [diff] [blame] | 3167 | if (pipe_wm->sprites_scaled) |
Maarten Lankhorst | d81f04c | 2016-03-02 12:38:06 +0100 | [diff] [blame] | 3168 | usable_level = 0; |
Ville Syrjälä | 7b39a0b | 2013-12-05 15:51:30 +0200 | [diff] [blame] | 3169 | |
Maarten Lankhorst | 71f0a62 | 2016-03-08 10:57:16 +0100 | [diff] [blame] | 3170 | memset(&pipe_wm->wm, 0, sizeof(pipe_wm->wm)); |
Ville Syrjälä | 2a67054b | 2020-02-25 19:11:06 +0200 | [diff] [blame] | 3171 | ilk_compute_wm_level(dev_priv, crtc, 0, crtc_state, |
Maarten Lankhorst | 28283f4 | 2017-10-19 17:13:40 +0200 | [diff] [blame] | 3172 | pristate, sprstate, curstate, &pipe_wm->wm[0]); |
Ville Syrjälä | 0b2ae6d | 2013-10-09 19:17:55 +0300 | [diff] [blame] | 3173 | |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 3174 | if (!ilk_validate_pipe_wm(dev_priv, pipe_wm)) |
Maarten Lankhorst | 1a426d6 | 2016-03-02 12:36:03 +0100 | [diff] [blame] | 3175 | return -EINVAL; |
Ville Syrjälä | a3cb404 | 2014-04-28 15:44:56 +0300 | [diff] [blame] | 3176 | |
Tvrtko Ursulin | 175fded | 2016-11-16 08:55:42 +0000 | [diff] [blame] | 3177 | ilk_compute_wm_reg_maximums(dev_priv, 1, &max); |
Ville Syrjälä | a3cb404 | 2014-04-28 15:44:56 +0300 | [diff] [blame] | 3178 | |
Maarten Lankhorst | 28283f4 | 2017-10-19 17:13:40 +0200 | [diff] [blame] | 3179 | for (level = 1; level <= usable_level; level++) { |
| 3180 | struct intel_wm_level *wm = &pipe_wm->wm[level]; |
Ville Syrjälä | a3cb404 | 2014-04-28 15:44:56 +0300 | [diff] [blame] | 3181 | |
Ville Syrjälä | 2a67054b | 2020-02-25 19:11:06 +0200 | [diff] [blame] | 3182 | ilk_compute_wm_level(dev_priv, crtc, level, crtc_state, |
Maarten Lankhorst | d81f04c | 2016-03-02 12:38:06 +0100 | [diff] [blame] | 3183 | pristate, sprstate, curstate, wm); |
Ville Syrjälä | a3cb404 | 2014-04-28 15:44:56 +0300 | [diff] [blame] | 3184 | |
| 3185 | /* |
| 3186 | * Disable any watermark level that exceeds the |
| 3187 | * register maximums since such watermarks are |
| 3188 | * always invalid. |
| 3189 | */ |
Maarten Lankhorst | 28283f4 | 2017-10-19 17:13:40 +0200 | [diff] [blame] | 3190 | if (!ilk_validate_wm_level(level, &max, wm)) { |
| 3191 | memset(wm, 0, sizeof(*wm)); |
| 3192 | break; |
| 3193 | } |
Ville Syrjälä | a3cb404 | 2014-04-28 15:44:56 +0300 | [diff] [blame] | 3194 | } |
| 3195 | |
Matt Roper | 86c8bbb | 2015-09-24 15:53:16 -0700 | [diff] [blame] | 3196 | return 0; |
Ville Syrjälä | 0b2ae6d | 2013-10-09 19:17:55 +0300 | [diff] [blame] | 3197 | } |
| 3198 | |
| 3199 | /* |
Matt Roper | ed4a6a7 | 2016-02-23 17:20:13 -0800 | [diff] [blame] | 3200 | * Build a set of 'intermediate' watermark values that satisfy both the old |
| 3201 | * state and the new state. These can be programmed to the hardware |
| 3202 | * immediately. |
| 3203 | */ |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 3204 | static int ilk_compute_intermediate_wm(struct intel_crtc_state *newstate) |
Matt Roper | ed4a6a7 | 2016-02-23 17:20:13 -0800 | [diff] [blame] | 3205 | { |
Maarten Lankhorst | 2225f3c | 2019-10-31 12:26:03 +0100 | [diff] [blame] | 3206 | struct intel_crtc *intel_crtc = to_intel_crtc(newstate->uapi.crtc); |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 3207 | struct drm_i915_private *dev_priv = to_i915(intel_crtc->base.dev); |
Matt Roper | e8f1f02 | 2016-05-12 07:05:55 -0700 | [diff] [blame] | 3208 | struct intel_pipe_wm *a = &newstate->wm.ilk.intermediate; |
Maarten Lankhorst | b6b178a | 2017-10-19 17:13:41 +0200 | [diff] [blame] | 3209 | struct intel_atomic_state *intel_state = |
Maarten Lankhorst | 2225f3c | 2019-10-31 12:26:03 +0100 | [diff] [blame] | 3210 | to_intel_atomic_state(newstate->uapi.state); |
Maarten Lankhorst | b6b178a | 2017-10-19 17:13:41 +0200 | [diff] [blame] | 3211 | const struct intel_crtc_state *oldstate = |
| 3212 | intel_atomic_get_old_crtc_state(intel_state, intel_crtc); |
| 3213 | const struct intel_pipe_wm *b = &oldstate->wm.ilk.optimal; |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 3214 | int level, max_level = ilk_wm_max_level(dev_priv); |
Matt Roper | ed4a6a7 | 2016-02-23 17:20:13 -0800 | [diff] [blame] | 3215 | |
| 3216 | /* |
| 3217 | * Start with the final, target watermarks, then combine with the |
| 3218 | * currently active watermarks to get values that are safe both before |
| 3219 | * and after the vblank. |
| 3220 | */ |
Matt Roper | e8f1f02 | 2016-05-12 07:05:55 -0700 | [diff] [blame] | 3221 | *a = newstate->wm.ilk.optimal; |
Maarten Lankhorst | 2225f3c | 2019-10-31 12:26:03 +0100 | [diff] [blame] | 3222 | if (!newstate->hw.active || drm_atomic_crtc_needs_modeset(&newstate->uapi) || |
Ville Syrjälä | f255c62 | 2018-11-08 17:10:13 +0200 | [diff] [blame] | 3223 | intel_state->skip_intermediate_wm) |
Maarten Lankhorst | b6b178a | 2017-10-19 17:13:41 +0200 | [diff] [blame] | 3224 | return 0; |
| 3225 | |
Matt Roper | ed4a6a7 | 2016-02-23 17:20:13 -0800 | [diff] [blame] | 3226 | a->pipe_enabled |= b->pipe_enabled; |
| 3227 | a->sprites_enabled |= b->sprites_enabled; |
| 3228 | a->sprites_scaled |= b->sprites_scaled; |
| 3229 | |
| 3230 | for (level = 0; level <= max_level; level++) { |
| 3231 | struct intel_wm_level *a_wm = &a->wm[level]; |
| 3232 | const struct intel_wm_level *b_wm = &b->wm[level]; |
| 3233 | |
| 3234 | a_wm->enable &= b_wm->enable; |
| 3235 | a_wm->pri_val = max(a_wm->pri_val, b_wm->pri_val); |
| 3236 | a_wm->spr_val = max(a_wm->spr_val, b_wm->spr_val); |
| 3237 | a_wm->cur_val = max(a_wm->cur_val, b_wm->cur_val); |
| 3238 | a_wm->fbc_val = max(a_wm->fbc_val, b_wm->fbc_val); |
| 3239 | } |
| 3240 | |
| 3241 | /* |
| 3242 | * We need to make sure that these merged watermark values are |
| 3243 | * actually a valid configuration themselves. If they're not, |
| 3244 | * there's no safe way to transition from the old state to |
| 3245 | * the new state, so we need to fail the atomic transaction. |
| 3246 | */ |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 3247 | if (!ilk_validate_pipe_wm(dev_priv, a)) |
Matt Roper | ed4a6a7 | 2016-02-23 17:20:13 -0800 | [diff] [blame] | 3248 | return -EINVAL; |
| 3249 | |
| 3250 | /* |
| 3251 | * If our intermediate WM are identical to the final WM, then we can |
| 3252 | * omit the post-vblank programming; only update if it's different. |
| 3253 | */ |
Ville Syrjälä | 5eeb798 | 2017-03-02 19:15:00 +0200 | [diff] [blame] | 3254 | if (memcmp(a, &newstate->wm.ilk.optimal, sizeof(*a)) != 0) |
| 3255 | newstate->wm.need_postvbl_update = true; |
Matt Roper | ed4a6a7 | 2016-02-23 17:20:13 -0800 | [diff] [blame] | 3256 | |
| 3257 | return 0; |
| 3258 | } |
| 3259 | |
| 3260 | /* |
Ville Syrjälä | 0b2ae6d | 2013-10-09 19:17:55 +0300 | [diff] [blame] | 3261 | * Merge the watermarks from all active pipes for a specific level. |
| 3262 | */ |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 3263 | static void ilk_merge_wm_level(struct drm_i915_private *dev_priv, |
Ville Syrjälä | 0b2ae6d | 2013-10-09 19:17:55 +0300 | [diff] [blame] | 3264 | int level, |
| 3265 | struct intel_wm_level *ret_wm) |
| 3266 | { |
| 3267 | const struct intel_crtc *intel_crtc; |
| 3268 | |
Ville Syrjälä | d52fea5 | 2014-04-28 15:44:57 +0300 | [diff] [blame] | 3269 | ret_wm->enable = true; |
| 3270 | |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 3271 | for_each_intel_crtc(&dev_priv->drm, intel_crtc) { |
Matt Roper | ed4a6a7 | 2016-02-23 17:20:13 -0800 | [diff] [blame] | 3272 | const struct intel_pipe_wm *active = &intel_crtc->wm.active.ilk; |
Ville Syrjälä | fe392ef | 2014-03-07 18:32:10 +0200 | [diff] [blame] | 3273 | const struct intel_wm_level *wm = &active->wm[level]; |
| 3274 | |
| 3275 | if (!active->pipe_enabled) |
| 3276 | continue; |
Ville Syrjälä | 0b2ae6d | 2013-10-09 19:17:55 +0300 | [diff] [blame] | 3277 | |
Ville Syrjälä | d52fea5 | 2014-04-28 15:44:57 +0300 | [diff] [blame] | 3278 | /* |
| 3279 | * The watermark values may have been used in the past, |
| 3280 | * so we must maintain them in the registers for some |
| 3281 | * time even if the level is now disabled. |
| 3282 | */ |
Ville Syrjälä | 0b2ae6d | 2013-10-09 19:17:55 +0300 | [diff] [blame] | 3283 | if (!wm->enable) |
Ville Syrjälä | d52fea5 | 2014-04-28 15:44:57 +0300 | [diff] [blame] | 3284 | ret_wm->enable = false; |
Ville Syrjälä | 0b2ae6d | 2013-10-09 19:17:55 +0300 | [diff] [blame] | 3285 | |
| 3286 | ret_wm->pri_val = max(ret_wm->pri_val, wm->pri_val); |
| 3287 | ret_wm->spr_val = max(ret_wm->spr_val, wm->spr_val); |
| 3288 | ret_wm->cur_val = max(ret_wm->cur_val, wm->cur_val); |
| 3289 | ret_wm->fbc_val = max(ret_wm->fbc_val, wm->fbc_val); |
| 3290 | } |
Ville Syrjälä | 0b2ae6d | 2013-10-09 19:17:55 +0300 | [diff] [blame] | 3291 | } |
| 3292 | |
| 3293 | /* |
| 3294 | * Merge all low power watermarks for all active pipes. |
| 3295 | */ |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 3296 | static void ilk_wm_merge(struct drm_i915_private *dev_priv, |
Ville Syrjälä | 0ba22e2 | 2013-12-05 15:51:34 +0200 | [diff] [blame] | 3297 | const struct intel_wm_config *config, |
Imre Deak | 820c198 | 2013-12-17 14:46:36 +0200 | [diff] [blame] | 3298 | const struct ilk_wm_maximums *max, |
Ville Syrjälä | 0b2ae6d | 2013-10-09 19:17:55 +0300 | [diff] [blame] | 3299 | struct intel_pipe_wm *merged) |
| 3300 | { |
Tvrtko Ursulin | 5db9401 | 2016-10-13 11:03:10 +0100 | [diff] [blame] | 3301 | int level, max_level = ilk_wm_max_level(dev_priv); |
Ville Syrjälä | d52fea5 | 2014-04-28 15:44:57 +0300 | [diff] [blame] | 3302 | int last_enabled_level = max_level; |
Ville Syrjälä | 0b2ae6d | 2013-10-09 19:17:55 +0300 | [diff] [blame] | 3303 | |
Ville Syrjälä | 0ba22e2 | 2013-12-05 15:51:34 +0200 | [diff] [blame] | 3304 | /* ILK/SNB/IVB: LP1+ watermarks only w/ single pipe */ |
Tvrtko Ursulin | fd6b8f4 | 2016-10-14 10:13:06 +0100 | [diff] [blame] | 3305 | if ((INTEL_GEN(dev_priv) <= 6 || IS_IVYBRIDGE(dev_priv)) && |
Ville Syrjälä | 0ba22e2 | 2013-12-05 15:51:34 +0200 | [diff] [blame] | 3306 | config->num_pipes_active > 1) |
Ville Syrjälä | 1204d5b | 2016-04-01 21:53:18 +0300 | [diff] [blame] | 3307 | last_enabled_level = 0; |
Ville Syrjälä | 0ba22e2 | 2013-12-05 15:51:34 +0200 | [diff] [blame] | 3308 | |
Ville Syrjälä | 6c8b6c2 | 2013-12-05 15:51:35 +0200 | [diff] [blame] | 3309 | /* ILK: FBC WM must be disabled always */ |
Tvrtko Ursulin | 175fded | 2016-11-16 08:55:42 +0000 | [diff] [blame] | 3310 | merged->fbc_wm_enabled = INTEL_GEN(dev_priv) >= 6; |
Ville Syrjälä | 0b2ae6d | 2013-10-09 19:17:55 +0300 | [diff] [blame] | 3311 | |
| 3312 | /* merge each WM1+ level */ |
| 3313 | for (level = 1; level <= max_level; level++) { |
| 3314 | struct intel_wm_level *wm = &merged->wm[level]; |
| 3315 | |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 3316 | ilk_merge_wm_level(dev_priv, level, wm); |
Ville Syrjälä | 0b2ae6d | 2013-10-09 19:17:55 +0300 | [diff] [blame] | 3317 | |
Ville Syrjälä | d52fea5 | 2014-04-28 15:44:57 +0300 | [diff] [blame] | 3318 | if (level > last_enabled_level) |
| 3319 | wm->enable = false; |
| 3320 | else if (!ilk_validate_wm_level(level, max, wm)) |
| 3321 | /* make sure all following levels get disabled */ |
| 3322 | last_enabled_level = level - 1; |
Ville Syrjälä | 0b2ae6d | 2013-10-09 19:17:55 +0300 | [diff] [blame] | 3323 | |
| 3324 | /* |
| 3325 | * The spec says it is preferred to disable |
| 3326 | * FBC WMs instead of disabling a WM level. |
| 3327 | */ |
| 3328 | if (wm->fbc_val > max->fbc) { |
Ville Syrjälä | d52fea5 | 2014-04-28 15:44:57 +0300 | [diff] [blame] | 3329 | if (wm->enable) |
| 3330 | merged->fbc_wm_enabled = false; |
Ville Syrjälä | 0b2ae6d | 2013-10-09 19:17:55 +0300 | [diff] [blame] | 3331 | wm->fbc_val = 0; |
| 3332 | } |
| 3333 | } |
Ville Syrjälä | 6c8b6c2 | 2013-12-05 15:51:35 +0200 | [diff] [blame] | 3334 | |
| 3335 | /* ILK: LP2+ must be disabled when FBC WM is disabled but FBC enabled */ |
| 3336 | /* |
| 3337 | * FIXME this is racy. FBC might get enabled later. |
| 3338 | * What we should check here is whether FBC can be |
| 3339 | * enabled sometime later. |
| 3340 | */ |
Lucas De Marchi | cf819ef | 2018-12-12 10:10:43 -0800 | [diff] [blame] | 3341 | if (IS_GEN(dev_priv, 5) && !merged->fbc_wm_enabled && |
Paulo Zanoni | 0e631ad | 2015-10-14 17:45:36 -0300 | [diff] [blame] | 3342 | intel_fbc_is_active(dev_priv)) { |
Ville Syrjälä | 6c8b6c2 | 2013-12-05 15:51:35 +0200 | [diff] [blame] | 3343 | for (level = 2; level <= max_level; level++) { |
| 3344 | struct intel_wm_level *wm = &merged->wm[level]; |
| 3345 | |
| 3346 | wm->enable = false; |
| 3347 | } |
| 3348 | } |
Ville Syrjälä | 0b2ae6d | 2013-10-09 19:17:55 +0300 | [diff] [blame] | 3349 | } |
| 3350 | |
Ville Syrjälä | b380ca3 | 2013-10-09 19:18:01 +0300 | [diff] [blame] | 3351 | static int ilk_wm_lp_to_level(int wm_lp, const struct intel_pipe_wm *pipe_wm) |
| 3352 | { |
| 3353 | /* LP1,LP2,LP3 levels are either 1,2,3 or 1,3,4 */ |
| 3354 | return wm_lp + (wm_lp >= 2 && pipe_wm->wm[4].enable); |
| 3355 | } |
| 3356 | |
Ville Syrjälä | a68d68e | 2013-12-05 15:51:29 +0200 | [diff] [blame] | 3357 | /* The value we need to program into the WM_LPx latency field */ |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 3358 | static unsigned int ilk_wm_lp_latency(struct drm_i915_private *dev_priv, |
| 3359 | int level) |
Ville Syrjälä | a68d68e | 2013-12-05 15:51:29 +0200 | [diff] [blame] | 3360 | { |
Tvrtko Ursulin | 8652744 | 2016-10-13 11:03:00 +0100 | [diff] [blame] | 3361 | if (IS_HASWELL(dev_priv) || IS_BROADWELL(dev_priv)) |
Ville Syrjälä | a68d68e | 2013-12-05 15:51:29 +0200 | [diff] [blame] | 3362 | return 2 * level; |
| 3363 | else |
| 3364 | return dev_priv->wm.pri_latency[level]; |
| 3365 | } |
| 3366 | |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 3367 | static void ilk_compute_wm_results(struct drm_i915_private *dev_priv, |
Ville Syrjälä | 0362c78 | 2013-10-09 19:17:57 +0300 | [diff] [blame] | 3368 | const struct intel_pipe_wm *merged, |
Ville Syrjälä | 609cede | 2013-10-09 19:18:03 +0300 | [diff] [blame] | 3369 | enum intel_ddb_partitioning partitioning, |
Imre Deak | 820c198 | 2013-12-17 14:46:36 +0200 | [diff] [blame] | 3370 | struct ilk_wm_values *results) |
Paulo Zanoni | 1011d8c | 2013-05-09 16:55:50 -0300 | [diff] [blame] | 3371 | { |
Ville Syrjälä | 0b2ae6d | 2013-10-09 19:17:55 +0300 | [diff] [blame] | 3372 | struct intel_crtc *intel_crtc; |
| 3373 | int level, wm_lp; |
Paulo Zanoni | cca32e9 | 2013-05-31 11:45:06 -0300 | [diff] [blame] | 3374 | |
Ville Syrjälä | 0362c78 | 2013-10-09 19:17:57 +0300 | [diff] [blame] | 3375 | results->enable_fbc_wm = merged->fbc_wm_enabled; |
Ville Syrjälä | 609cede | 2013-10-09 19:18:03 +0300 | [diff] [blame] | 3376 | results->partitioning = partitioning; |
Paulo Zanoni | cca32e9 | 2013-05-31 11:45:06 -0300 | [diff] [blame] | 3377 | |
Ville Syrjälä | 0b2ae6d | 2013-10-09 19:17:55 +0300 | [diff] [blame] | 3378 | /* LP1+ register values */ |
Paulo Zanoni | cca32e9 | 2013-05-31 11:45:06 -0300 | [diff] [blame] | 3379 | for (wm_lp = 1; wm_lp <= 3; wm_lp++) { |
Ville Syrjälä | 1fd527c | 2013-08-06 22:24:05 +0300 | [diff] [blame] | 3380 | const struct intel_wm_level *r; |
Paulo Zanoni | cca32e9 | 2013-05-31 11:45:06 -0300 | [diff] [blame] | 3381 | |
Ville Syrjälä | b380ca3 | 2013-10-09 19:18:01 +0300 | [diff] [blame] | 3382 | level = ilk_wm_lp_to_level(wm_lp, merged); |
Ville Syrjälä | 0b2ae6d | 2013-10-09 19:17:55 +0300 | [diff] [blame] | 3383 | |
Ville Syrjälä | 0362c78 | 2013-10-09 19:17:57 +0300 | [diff] [blame] | 3384 | r = &merged->wm[level]; |
Paulo Zanoni | cca32e9 | 2013-05-31 11:45:06 -0300 | [diff] [blame] | 3385 | |
Ville Syrjälä | d52fea5 | 2014-04-28 15:44:57 +0300 | [diff] [blame] | 3386 | /* |
| 3387 | * Maintain the watermark values even if the level is |
| 3388 | * disabled. Doing otherwise could cause underruns. |
| 3389 | */ |
| 3390 | results->wm_lp[wm_lp - 1] = |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 3391 | (ilk_wm_lp_latency(dev_priv, level) << WM1_LP_LATENCY_SHIFT) | |
Ville Syrjälä | 416f472 | 2013-11-02 21:07:46 -0700 | [diff] [blame] | 3392 | (r->pri_val << WM1_LP_SR_SHIFT) | |
| 3393 | r->cur_val; |
| 3394 | |
Ville Syrjälä | d52fea5 | 2014-04-28 15:44:57 +0300 | [diff] [blame] | 3395 | if (r->enable) |
| 3396 | results->wm_lp[wm_lp - 1] |= WM1_LP_SR_EN; |
| 3397 | |
Tvrtko Ursulin | 175fded | 2016-11-16 08:55:42 +0000 | [diff] [blame] | 3398 | if (INTEL_GEN(dev_priv) >= 8) |
Ville Syrjälä | 416f472 | 2013-11-02 21:07:46 -0700 | [diff] [blame] | 3399 | results->wm_lp[wm_lp - 1] |= |
| 3400 | r->fbc_val << WM1_LP_FBC_SHIFT_BDW; |
| 3401 | else |
| 3402 | results->wm_lp[wm_lp - 1] |= |
| 3403 | r->fbc_val << WM1_LP_FBC_SHIFT; |
| 3404 | |
Ville Syrjälä | d52fea5 | 2014-04-28 15:44:57 +0300 | [diff] [blame] | 3405 | /* |
| 3406 | * Always set WM1S_LP_EN when spr_val != 0, even if the |
| 3407 | * level is disabled. Doing otherwise could cause underruns. |
| 3408 | */ |
Tvrtko Ursulin | 175fded | 2016-11-16 08:55:42 +0000 | [diff] [blame] | 3409 | if (INTEL_GEN(dev_priv) <= 6 && r->spr_val) { |
Pankaj Bharadiya | 48a1b8d | 2020-01-15 09:14:53 +0530 | [diff] [blame] | 3410 | drm_WARN_ON(&dev_priv->drm, wm_lp != 1); |
Ville Syrjälä | 6cef2b8a | 2013-12-05 15:51:32 +0200 | [diff] [blame] | 3411 | results->wm_lp_spr[wm_lp - 1] = WM1S_LP_EN | r->spr_val; |
| 3412 | } else |
| 3413 | results->wm_lp_spr[wm_lp - 1] = r->spr_val; |
Paulo Zanoni | cca32e9 | 2013-05-31 11:45:06 -0300 | [diff] [blame] | 3414 | } |
Paulo Zanoni | 801bcff | 2013-05-31 10:08:35 -0300 | [diff] [blame] | 3415 | |
Ville Syrjälä | 0b2ae6d | 2013-10-09 19:17:55 +0300 | [diff] [blame] | 3416 | /* LP0 register values */ |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 3417 | for_each_intel_crtc(&dev_priv->drm, intel_crtc) { |
Ville Syrjälä | 0b2ae6d | 2013-10-09 19:17:55 +0300 | [diff] [blame] | 3418 | enum pipe pipe = intel_crtc->pipe; |
Ville Syrjälä | 0560b0c | 2020-01-20 19:47:11 +0200 | [diff] [blame] | 3419 | const struct intel_pipe_wm *pipe_wm = &intel_crtc->wm.active.ilk; |
| 3420 | const struct intel_wm_level *r = &pipe_wm->wm[0]; |
Paulo Zanoni | 1011d8c | 2013-05-09 16:55:50 -0300 | [diff] [blame] | 3421 | |
Pankaj Bharadiya | 48a1b8d | 2020-01-15 09:14:53 +0530 | [diff] [blame] | 3422 | if (drm_WARN_ON(&dev_priv->drm, !r->enable)) |
Ville Syrjälä | 0b2ae6d | 2013-10-09 19:17:55 +0300 | [diff] [blame] | 3423 | continue; |
Ville Syrjälä | 0b2ae6d | 2013-10-09 19:17:55 +0300 | [diff] [blame] | 3424 | |
| 3425 | results->wm_pipe[pipe] = |
| 3426 | (r->pri_val << WM0_PIPE_PLANE_SHIFT) | |
| 3427 | (r->spr_val << WM0_PIPE_SPRITE_SHIFT) | |
| 3428 | r->cur_val; |
Paulo Zanoni | 801bcff | 2013-05-31 10:08:35 -0300 | [diff] [blame] | 3429 | } |
| 3430 | } |
| 3431 | |
Paulo Zanoni | 861f338 | 2013-05-31 10:19:21 -0300 | [diff] [blame] | 3432 | /* Find the result with the highest level enabled. Check for enable_fbc_wm in |
| 3433 | * case both are at the same level. Prefer r1 in case they're the same. */ |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 3434 | static struct intel_pipe_wm * |
| 3435 | ilk_find_best_result(struct drm_i915_private *dev_priv, |
| 3436 | struct intel_pipe_wm *r1, |
| 3437 | struct intel_pipe_wm *r2) |
Paulo Zanoni | 861f338 | 2013-05-31 10:19:21 -0300 | [diff] [blame] | 3438 | { |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 3439 | int level, max_level = ilk_wm_max_level(dev_priv); |
Ville Syrjälä | 198a1e9 | 2013-10-09 19:17:58 +0300 | [diff] [blame] | 3440 | int level1 = 0, level2 = 0; |
Paulo Zanoni | 861f338 | 2013-05-31 10:19:21 -0300 | [diff] [blame] | 3441 | |
Ville Syrjälä | 198a1e9 | 2013-10-09 19:17:58 +0300 | [diff] [blame] | 3442 | for (level = 1; level <= max_level; level++) { |
| 3443 | if (r1->wm[level].enable) |
| 3444 | level1 = level; |
| 3445 | if (r2->wm[level].enable) |
| 3446 | level2 = level; |
Paulo Zanoni | 861f338 | 2013-05-31 10:19:21 -0300 | [diff] [blame] | 3447 | } |
| 3448 | |
Ville Syrjälä | 198a1e9 | 2013-10-09 19:17:58 +0300 | [diff] [blame] | 3449 | if (level1 == level2) { |
| 3450 | if (r2->fbc_wm_enabled && !r1->fbc_wm_enabled) |
Paulo Zanoni | 861f338 | 2013-05-31 10:19:21 -0300 | [diff] [blame] | 3451 | return r2; |
| 3452 | else |
| 3453 | return r1; |
Ville Syrjälä | 198a1e9 | 2013-10-09 19:17:58 +0300 | [diff] [blame] | 3454 | } else if (level1 > level2) { |
Paulo Zanoni | 861f338 | 2013-05-31 10:19:21 -0300 | [diff] [blame] | 3455 | return r1; |
| 3456 | } else { |
| 3457 | return r2; |
| 3458 | } |
| 3459 | } |
| 3460 | |
Ville Syrjälä | 49a687c | 2013-10-11 19:39:52 +0300 | [diff] [blame] | 3461 | /* dirty bits used to track which watermarks need changes */ |
| 3462 | #define WM_DIRTY_PIPE(pipe) (1 << (pipe)) |
Ville Syrjälä | 49a687c | 2013-10-11 19:39:52 +0300 | [diff] [blame] | 3463 | #define WM_DIRTY_LP(wm_lp) (1 << (15 + (wm_lp))) |
| 3464 | #define WM_DIRTY_LP_ALL (WM_DIRTY_LP(1) | WM_DIRTY_LP(2) | WM_DIRTY_LP(3)) |
| 3465 | #define WM_DIRTY_FBC (1 << 24) |
| 3466 | #define WM_DIRTY_DDB (1 << 25) |
| 3467 | |
Damien Lespiau | 055e393 | 2014-08-18 13:49:10 +0100 | [diff] [blame] | 3468 | static unsigned int ilk_compute_wm_dirty(struct drm_i915_private *dev_priv, |
Imre Deak | 820c198 | 2013-12-17 14:46:36 +0200 | [diff] [blame] | 3469 | const struct ilk_wm_values *old, |
| 3470 | const struct ilk_wm_values *new) |
Ville Syrjälä | 49a687c | 2013-10-11 19:39:52 +0300 | [diff] [blame] | 3471 | { |
| 3472 | unsigned int dirty = 0; |
| 3473 | enum pipe pipe; |
| 3474 | int wm_lp; |
| 3475 | |
Damien Lespiau | 055e393 | 2014-08-18 13:49:10 +0100 | [diff] [blame] | 3476 | for_each_pipe(dev_priv, pipe) { |
Ville Syrjälä | 49a687c | 2013-10-11 19:39:52 +0300 | [diff] [blame] | 3477 | if (old->wm_pipe[pipe] != new->wm_pipe[pipe]) { |
| 3478 | dirty |= WM_DIRTY_PIPE(pipe); |
| 3479 | /* Must disable LP1+ watermarks too */ |
| 3480 | dirty |= WM_DIRTY_LP_ALL; |
| 3481 | } |
| 3482 | } |
| 3483 | |
| 3484 | if (old->enable_fbc_wm != new->enable_fbc_wm) { |
| 3485 | dirty |= WM_DIRTY_FBC; |
| 3486 | /* Must disable LP1+ watermarks too */ |
| 3487 | dirty |= WM_DIRTY_LP_ALL; |
| 3488 | } |
| 3489 | |
| 3490 | if (old->partitioning != new->partitioning) { |
| 3491 | dirty |= WM_DIRTY_DDB; |
| 3492 | /* Must disable LP1+ watermarks too */ |
| 3493 | dirty |= WM_DIRTY_LP_ALL; |
| 3494 | } |
| 3495 | |
| 3496 | /* LP1+ watermarks already deemed dirty, no need to continue */ |
| 3497 | if (dirty & WM_DIRTY_LP_ALL) |
| 3498 | return dirty; |
| 3499 | |
| 3500 | /* Find the lowest numbered LP1+ watermark in need of an update... */ |
| 3501 | for (wm_lp = 1; wm_lp <= 3; wm_lp++) { |
| 3502 | if (old->wm_lp[wm_lp - 1] != new->wm_lp[wm_lp - 1] || |
| 3503 | old->wm_lp_spr[wm_lp - 1] != new->wm_lp_spr[wm_lp - 1]) |
| 3504 | break; |
| 3505 | } |
| 3506 | |
| 3507 | /* ...and mark it and all higher numbered LP1+ watermarks as dirty */ |
| 3508 | for (; wm_lp <= 3; wm_lp++) |
| 3509 | dirty |= WM_DIRTY_LP(wm_lp); |
| 3510 | |
| 3511 | return dirty; |
| 3512 | } |
| 3513 | |
Ville Syrjälä | 8553c18 | 2013-12-05 15:51:39 +0200 | [diff] [blame] | 3514 | static bool _ilk_disable_lp_wm(struct drm_i915_private *dev_priv, |
| 3515 | unsigned int dirty) |
| 3516 | { |
Imre Deak | 820c198 | 2013-12-17 14:46:36 +0200 | [diff] [blame] | 3517 | struct ilk_wm_values *previous = &dev_priv->wm.hw; |
Ville Syrjälä | 8553c18 | 2013-12-05 15:51:39 +0200 | [diff] [blame] | 3518 | bool changed = false; |
| 3519 | |
| 3520 | if (dirty & WM_DIRTY_LP(3) && previous->wm_lp[2] & WM1_LP_SR_EN) { |
| 3521 | previous->wm_lp[2] &= ~WM1_LP_SR_EN; |
| 3522 | I915_WRITE(WM3_LP_ILK, previous->wm_lp[2]); |
| 3523 | changed = true; |
| 3524 | } |
| 3525 | if (dirty & WM_DIRTY_LP(2) && previous->wm_lp[1] & WM1_LP_SR_EN) { |
| 3526 | previous->wm_lp[1] &= ~WM1_LP_SR_EN; |
| 3527 | I915_WRITE(WM2_LP_ILK, previous->wm_lp[1]); |
| 3528 | changed = true; |
| 3529 | } |
| 3530 | if (dirty & WM_DIRTY_LP(1) && previous->wm_lp[0] & WM1_LP_SR_EN) { |
| 3531 | previous->wm_lp[0] &= ~WM1_LP_SR_EN; |
| 3532 | I915_WRITE(WM1_LP_ILK, previous->wm_lp[0]); |
| 3533 | changed = true; |
| 3534 | } |
| 3535 | |
| 3536 | /* |
| 3537 | * Don't touch WM1S_LP_EN here. |
| 3538 | * Doing so could cause underruns. |
| 3539 | */ |
| 3540 | |
| 3541 | return changed; |
| 3542 | } |
| 3543 | |
Paulo Zanoni | 801bcff | 2013-05-31 10:08:35 -0300 | [diff] [blame] | 3544 | /* |
| 3545 | * The spec says we shouldn't write when we don't need, because every write |
| 3546 | * causes WMs to be re-evaluated, expending some power. |
| 3547 | */ |
Imre Deak | 820c198 | 2013-12-17 14:46:36 +0200 | [diff] [blame] | 3548 | static void ilk_write_wm_values(struct drm_i915_private *dev_priv, |
| 3549 | struct ilk_wm_values *results) |
Paulo Zanoni | 801bcff | 2013-05-31 10:08:35 -0300 | [diff] [blame] | 3550 | { |
Imre Deak | 820c198 | 2013-12-17 14:46:36 +0200 | [diff] [blame] | 3551 | struct ilk_wm_values *previous = &dev_priv->wm.hw; |
Ville Syrjälä | 49a687c | 2013-10-11 19:39:52 +0300 | [diff] [blame] | 3552 | unsigned int dirty; |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 3553 | u32 val; |
Paulo Zanoni | 801bcff | 2013-05-31 10:08:35 -0300 | [diff] [blame] | 3554 | |
Damien Lespiau | 055e393 | 2014-08-18 13:49:10 +0100 | [diff] [blame] | 3555 | dirty = ilk_compute_wm_dirty(dev_priv, previous, results); |
Ville Syrjälä | 49a687c | 2013-10-11 19:39:52 +0300 | [diff] [blame] | 3556 | if (!dirty) |
Paulo Zanoni | 801bcff | 2013-05-31 10:08:35 -0300 | [diff] [blame] | 3557 | return; |
| 3558 | |
Ville Syrjälä | 8553c18 | 2013-12-05 15:51:39 +0200 | [diff] [blame] | 3559 | _ilk_disable_lp_wm(dev_priv, dirty); |
Ville Syrjälä | 6cef2b8a | 2013-12-05 15:51:32 +0200 | [diff] [blame] | 3560 | |
Ville Syrjälä | 49a687c | 2013-10-11 19:39:52 +0300 | [diff] [blame] | 3561 | if (dirty & WM_DIRTY_PIPE(PIPE_A)) |
Paulo Zanoni | 801bcff | 2013-05-31 10:08:35 -0300 | [diff] [blame] | 3562 | I915_WRITE(WM0_PIPEA_ILK, results->wm_pipe[0]); |
Ville Syrjälä | 49a687c | 2013-10-11 19:39:52 +0300 | [diff] [blame] | 3563 | if (dirty & WM_DIRTY_PIPE(PIPE_B)) |
Paulo Zanoni | 801bcff | 2013-05-31 10:08:35 -0300 | [diff] [blame] | 3564 | I915_WRITE(WM0_PIPEB_ILK, results->wm_pipe[1]); |
Ville Syrjälä | 49a687c | 2013-10-11 19:39:52 +0300 | [diff] [blame] | 3565 | if (dirty & WM_DIRTY_PIPE(PIPE_C)) |
Paulo Zanoni | 801bcff | 2013-05-31 10:08:35 -0300 | [diff] [blame] | 3566 | I915_WRITE(WM0_PIPEC_IVB, results->wm_pipe[2]); |
| 3567 | |
Ville Syrjälä | 49a687c | 2013-10-11 19:39:52 +0300 | [diff] [blame] | 3568 | if (dirty & WM_DIRTY_DDB) { |
Tvrtko Ursulin | 8652744 | 2016-10-13 11:03:00 +0100 | [diff] [blame] | 3569 | if (IS_HASWELL(dev_priv) || IS_BROADWELL(dev_priv)) { |
Ville Syrjälä | ac9545f | 2013-12-05 15:51:28 +0200 | [diff] [blame] | 3570 | val = I915_READ(WM_MISC); |
| 3571 | if (results->partitioning == INTEL_DDB_PART_1_2) |
| 3572 | val &= ~WM_MISC_DATA_PARTITION_5_6; |
| 3573 | else |
| 3574 | val |= WM_MISC_DATA_PARTITION_5_6; |
| 3575 | I915_WRITE(WM_MISC, val); |
| 3576 | } else { |
| 3577 | val = I915_READ(DISP_ARB_CTL2); |
| 3578 | if (results->partitioning == INTEL_DDB_PART_1_2) |
| 3579 | val &= ~DISP_DATA_PARTITION_5_6; |
| 3580 | else |
| 3581 | val |= DISP_DATA_PARTITION_5_6; |
| 3582 | I915_WRITE(DISP_ARB_CTL2, val); |
| 3583 | } |
Paulo Zanoni | 1011d8c | 2013-05-09 16:55:50 -0300 | [diff] [blame] | 3584 | } |
| 3585 | |
Ville Syrjälä | 49a687c | 2013-10-11 19:39:52 +0300 | [diff] [blame] | 3586 | if (dirty & WM_DIRTY_FBC) { |
Paulo Zanoni | cca32e9 | 2013-05-31 11:45:06 -0300 | [diff] [blame] | 3587 | val = I915_READ(DISP_ARB_CTL); |
| 3588 | if (results->enable_fbc_wm) |
| 3589 | val &= ~DISP_FBC_WM_DIS; |
| 3590 | else |
| 3591 | val |= DISP_FBC_WM_DIS; |
| 3592 | I915_WRITE(DISP_ARB_CTL, val); |
| 3593 | } |
| 3594 | |
Imre Deak | 954911e | 2013-12-17 14:46:34 +0200 | [diff] [blame] | 3595 | if (dirty & WM_DIRTY_LP(1) && |
| 3596 | previous->wm_lp_spr[0] != results->wm_lp_spr[0]) |
| 3597 | I915_WRITE(WM1S_LP_ILK, results->wm_lp_spr[0]); |
| 3598 | |
Tvrtko Ursulin | 175fded | 2016-11-16 08:55:42 +0000 | [diff] [blame] | 3599 | if (INTEL_GEN(dev_priv) >= 7) { |
Ville Syrjälä | 6cef2b8a | 2013-12-05 15:51:32 +0200 | [diff] [blame] | 3600 | if (dirty & WM_DIRTY_LP(2) && previous->wm_lp_spr[1] != results->wm_lp_spr[1]) |
| 3601 | I915_WRITE(WM2S_LP_IVB, results->wm_lp_spr[1]); |
| 3602 | if (dirty & WM_DIRTY_LP(3) && previous->wm_lp_spr[2] != results->wm_lp_spr[2]) |
| 3603 | I915_WRITE(WM3S_LP_IVB, results->wm_lp_spr[2]); |
| 3604 | } |
Paulo Zanoni | 801bcff | 2013-05-31 10:08:35 -0300 | [diff] [blame] | 3605 | |
Ville Syrjälä | facd619b | 2013-12-05 15:51:33 +0200 | [diff] [blame] | 3606 | if (dirty & WM_DIRTY_LP(1) && previous->wm_lp[0] != results->wm_lp[0]) |
Paulo Zanoni | 801bcff | 2013-05-31 10:08:35 -0300 | [diff] [blame] | 3607 | I915_WRITE(WM1_LP_ILK, results->wm_lp[0]); |
Ville Syrjälä | facd619b | 2013-12-05 15:51:33 +0200 | [diff] [blame] | 3608 | if (dirty & WM_DIRTY_LP(2) && previous->wm_lp[1] != results->wm_lp[1]) |
Paulo Zanoni | 801bcff | 2013-05-31 10:08:35 -0300 | [diff] [blame] | 3609 | I915_WRITE(WM2_LP_ILK, results->wm_lp[1]); |
Ville Syrjälä | facd619b | 2013-12-05 15:51:33 +0200 | [diff] [blame] | 3610 | if (dirty & WM_DIRTY_LP(3) && previous->wm_lp[2] != results->wm_lp[2]) |
Paulo Zanoni | 801bcff | 2013-05-31 10:08:35 -0300 | [diff] [blame] | 3611 | I915_WRITE(WM3_LP_ILK, results->wm_lp[2]); |
Ville Syrjälä | 609cede | 2013-10-09 19:18:03 +0300 | [diff] [blame] | 3612 | |
| 3613 | dev_priv->wm.hw = *results; |
Paulo Zanoni | 801bcff | 2013-05-31 10:08:35 -0300 | [diff] [blame] | 3614 | } |
| 3615 | |
Ville Syrjälä | 60aca57 | 2019-11-27 21:05:51 +0200 | [diff] [blame] | 3616 | bool ilk_disable_lp_wm(struct drm_i915_private *dev_priv) |
Ville Syrjälä | 8553c18 | 2013-12-05 15:51:39 +0200 | [diff] [blame] | 3617 | { |
Ville Syrjälä | 8553c18 | 2013-12-05 15:51:39 +0200 | [diff] [blame] | 3618 | return _ilk_disable_lp_wm(dev_priv, WM_DIRTY_LP_ALL); |
| 3619 | } |
| 3620 | |
Stanislav Lisovskiy | 0f0f9ae | 2020-02-03 01:06:29 +0200 | [diff] [blame] | 3621 | u8 intel_enabled_dbuf_slices_mask(struct drm_i915_private *dev_priv) |
Mahesh Kumar | 74bd800 | 2018-04-26 19:55:15 +0530 | [diff] [blame] | 3622 | { |
Stanislav Lisovskiy | 0f0f9ae | 2020-02-03 01:06:29 +0200 | [diff] [blame] | 3623 | int i; |
| 3624 | int max_slices = INTEL_INFO(dev_priv)->num_supported_dbuf_slices; |
| 3625 | u8 enabled_slices_mask = 0; |
Mahesh Kumar | 74bd800 | 2018-04-26 19:55:15 +0530 | [diff] [blame] | 3626 | |
Stanislav Lisovskiy | 0f0f9ae | 2020-02-03 01:06:29 +0200 | [diff] [blame] | 3627 | for (i = 0; i < max_slices; i++) { |
| 3628 | if (I915_READ(DBUF_CTL_S(i)) & DBUF_POWER_STATE) |
| 3629 | enabled_slices_mask |= BIT(i); |
| 3630 | } |
Mahesh Kumar | 74bd800 | 2018-04-26 19:55:15 +0530 | [diff] [blame] | 3631 | |
Stanislav Lisovskiy | 0f0f9ae | 2020-02-03 01:06:29 +0200 | [diff] [blame] | 3632 | return enabled_slices_mask; |
Mahesh Kumar | 74bd800 | 2018-04-26 19:55:15 +0530 | [diff] [blame] | 3633 | } |
| 3634 | |
Matt Roper | 024c904 | 2015-09-24 15:53:11 -0700 | [diff] [blame] | 3635 | /* |
Paulo Zanoni | ee3d532 | 2016-10-11 15:25:38 -0300 | [diff] [blame] | 3636 | * FIXME: We still don't have the proper code detect if we need to apply the WA, |
| 3637 | * so assume we'll always need it in order to avoid underruns. |
| 3638 | */ |
Ville Syrjälä | 60e983f | 2018-12-21 19:14:33 +0200 | [diff] [blame] | 3639 | static bool skl_needs_memory_bw_wa(struct drm_i915_private *dev_priv) |
Paulo Zanoni | ee3d532 | 2016-10-11 15:25:38 -0300 | [diff] [blame] | 3640 | { |
Ville Syrjälä | 60e983f | 2018-12-21 19:14:33 +0200 | [diff] [blame] | 3641 | return IS_GEN9_BC(dev_priv) || IS_BROXTON(dev_priv); |
Paulo Zanoni | ee3d532 | 2016-10-11 15:25:38 -0300 | [diff] [blame] | 3642 | } |
| 3643 | |
Paulo Zanoni | 56feca9 | 2016-09-22 18:00:28 -0300 | [diff] [blame] | 3644 | static bool |
| 3645 | intel_has_sagv(struct drm_i915_private *dev_priv) |
| 3646 | { |
Rodrigo Vivi | 1ca2b06 | 2018-10-26 13:03:17 -0700 | [diff] [blame] | 3647 | return (IS_GEN9_BC(dev_priv) || INTEL_GEN(dev_priv) >= 10) && |
| 3648 | dev_priv->sagv_status != I915_SAGV_NOT_CONTROLLED; |
Paulo Zanoni | 56feca9 | 2016-09-22 18:00:28 -0300 | [diff] [blame] | 3649 | } |
| 3650 | |
James Ausmus | b068a86 | 2019-10-09 10:23:14 -0700 | [diff] [blame] | 3651 | static void |
| 3652 | skl_setup_sagv_block_time(struct drm_i915_private *dev_priv) |
| 3653 | { |
James Ausmus | da80f04 | 2019-10-09 10:23:15 -0700 | [diff] [blame] | 3654 | if (INTEL_GEN(dev_priv) >= 12) { |
| 3655 | u32 val = 0; |
| 3656 | int ret; |
| 3657 | |
| 3658 | ret = sandybridge_pcode_read(dev_priv, |
| 3659 | GEN12_PCODE_READ_SAGV_BLOCK_TIME_US, |
| 3660 | &val, NULL); |
| 3661 | if (!ret) { |
| 3662 | dev_priv->sagv_block_time_us = val; |
| 3663 | return; |
| 3664 | } |
| 3665 | |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 3666 | drm_dbg(&dev_priv->drm, "Couldn't read SAGV block time!\n"); |
James Ausmus | da80f04 | 2019-10-09 10:23:15 -0700 | [diff] [blame] | 3667 | } else if (IS_GEN(dev_priv, 11)) { |
James Ausmus | b068a86 | 2019-10-09 10:23:14 -0700 | [diff] [blame] | 3668 | dev_priv->sagv_block_time_us = 10; |
| 3669 | return; |
| 3670 | } else if (IS_GEN(dev_priv, 10)) { |
| 3671 | dev_priv->sagv_block_time_us = 20; |
| 3672 | return; |
| 3673 | } else if (IS_GEN(dev_priv, 9)) { |
| 3674 | dev_priv->sagv_block_time_us = 30; |
| 3675 | return; |
| 3676 | } else { |
| 3677 | MISSING_CASE(INTEL_GEN(dev_priv)); |
| 3678 | } |
| 3679 | |
| 3680 | /* Default to an unusable block time */ |
| 3681 | dev_priv->sagv_block_time_us = -1; |
| 3682 | } |
| 3683 | |
Lyude | 656d1b8 | 2016-08-17 15:55:54 -0400 | [diff] [blame] | 3684 | /* |
| 3685 | * SAGV dynamically adjusts the system agent voltage and clock frequencies |
| 3686 | * depending on power and performance requirements. The display engine access |
| 3687 | * to system memory is blocked during the adjustment time. Because of the |
| 3688 | * blocking time, having this enabled can cause full system hangs and/or pipe |
| 3689 | * underruns if we don't meet all of the following requirements: |
| 3690 | * |
| 3691 | * - <= 1 pipe enabled |
| 3692 | * - All planes can enable watermarks for latencies >= SAGV engine block time |
| 3693 | * - We're not using an interlaced display configuration |
| 3694 | */ |
| 3695 | int |
Paulo Zanoni | 16dcdc4 | 2016-09-22 18:00:27 -0300 | [diff] [blame] | 3696 | intel_enable_sagv(struct drm_i915_private *dev_priv) |
Lyude | 656d1b8 | 2016-08-17 15:55:54 -0400 | [diff] [blame] | 3697 | { |
| 3698 | int ret; |
| 3699 | |
Paulo Zanoni | 56feca9 | 2016-09-22 18:00:28 -0300 | [diff] [blame] | 3700 | if (!intel_has_sagv(dev_priv)) |
| 3701 | return 0; |
| 3702 | |
| 3703 | if (dev_priv->sagv_status == I915_SAGV_ENABLED) |
Lyude | 656d1b8 | 2016-08-17 15:55:54 -0400 | [diff] [blame] | 3704 | return 0; |
| 3705 | |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 3706 | drm_dbg_kms(&dev_priv->drm, "Enabling SAGV\n"); |
Lyude | 656d1b8 | 2016-08-17 15:55:54 -0400 | [diff] [blame] | 3707 | ret = sandybridge_pcode_write(dev_priv, GEN9_PCODE_SAGV_CONTROL, |
| 3708 | GEN9_SAGV_ENABLE); |
| 3709 | |
Ville Syrjälä | ff61a97 | 2018-12-21 19:14:34 +0200 | [diff] [blame] | 3710 | /* We don't need to wait for SAGV when enabling */ |
Lyude | 656d1b8 | 2016-08-17 15:55:54 -0400 | [diff] [blame] | 3711 | |
| 3712 | /* |
| 3713 | * Some skl systems, pre-release machines in particular, |
Ville Syrjälä | ff61a97 | 2018-12-21 19:14:34 +0200 | [diff] [blame] | 3714 | * don't actually have SAGV. |
Lyude | 656d1b8 | 2016-08-17 15:55:54 -0400 | [diff] [blame] | 3715 | */ |
Paulo Zanoni | 6e3100e | 2016-09-22 18:00:29 -0300 | [diff] [blame] | 3716 | if (IS_SKYLAKE(dev_priv) && ret == -ENXIO) { |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 3717 | drm_dbg(&dev_priv->drm, "No SAGV found on system, ignoring\n"); |
Paulo Zanoni | 16dcdc4 | 2016-09-22 18:00:27 -0300 | [diff] [blame] | 3718 | dev_priv->sagv_status = I915_SAGV_NOT_CONTROLLED; |
Lyude | 656d1b8 | 2016-08-17 15:55:54 -0400 | [diff] [blame] | 3719 | return 0; |
| 3720 | } else if (ret < 0) { |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 3721 | drm_err(&dev_priv->drm, "Failed to enable SAGV\n"); |
Lyude | 656d1b8 | 2016-08-17 15:55:54 -0400 | [diff] [blame] | 3722 | return ret; |
| 3723 | } |
| 3724 | |
Paulo Zanoni | 16dcdc4 | 2016-09-22 18:00:27 -0300 | [diff] [blame] | 3725 | dev_priv->sagv_status = I915_SAGV_ENABLED; |
Lyude | 656d1b8 | 2016-08-17 15:55:54 -0400 | [diff] [blame] | 3726 | return 0; |
| 3727 | } |
| 3728 | |
Lyude | 656d1b8 | 2016-08-17 15:55:54 -0400 | [diff] [blame] | 3729 | int |
Paulo Zanoni | 16dcdc4 | 2016-09-22 18:00:27 -0300 | [diff] [blame] | 3730 | intel_disable_sagv(struct drm_i915_private *dev_priv) |
Lyude | 656d1b8 | 2016-08-17 15:55:54 -0400 | [diff] [blame] | 3731 | { |
Imre Deak | b3b8e99 | 2016-12-05 18:27:38 +0200 | [diff] [blame] | 3732 | int ret; |
Lyude | 656d1b8 | 2016-08-17 15:55:54 -0400 | [diff] [blame] | 3733 | |
Paulo Zanoni | 56feca9 | 2016-09-22 18:00:28 -0300 | [diff] [blame] | 3734 | if (!intel_has_sagv(dev_priv)) |
| 3735 | return 0; |
| 3736 | |
| 3737 | if (dev_priv->sagv_status == I915_SAGV_DISABLED) |
Lyude | 656d1b8 | 2016-08-17 15:55:54 -0400 | [diff] [blame] | 3738 | return 0; |
| 3739 | |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 3740 | drm_dbg_kms(&dev_priv->drm, "Disabling SAGV\n"); |
Lyude | 656d1b8 | 2016-08-17 15:55:54 -0400 | [diff] [blame] | 3741 | /* bspec says to keep retrying for at least 1 ms */ |
Imre Deak | b3b8e99 | 2016-12-05 18:27:38 +0200 | [diff] [blame] | 3742 | ret = skl_pcode_request(dev_priv, GEN9_PCODE_SAGV_CONTROL, |
| 3743 | GEN9_SAGV_DISABLE, |
| 3744 | GEN9_SAGV_IS_DISABLED, GEN9_SAGV_IS_DISABLED, |
| 3745 | 1); |
Lyude | 656d1b8 | 2016-08-17 15:55:54 -0400 | [diff] [blame] | 3746 | /* |
| 3747 | * Some skl systems, pre-release machines in particular, |
Ville Syrjälä | ff61a97 | 2018-12-21 19:14:34 +0200 | [diff] [blame] | 3748 | * don't actually have SAGV. |
Lyude | 656d1b8 | 2016-08-17 15:55:54 -0400 | [diff] [blame] | 3749 | */ |
Imre Deak | b3b8e99 | 2016-12-05 18:27:38 +0200 | [diff] [blame] | 3750 | if (IS_SKYLAKE(dev_priv) && ret == -ENXIO) { |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 3751 | drm_dbg(&dev_priv->drm, "No SAGV found on system, ignoring\n"); |
Paulo Zanoni | 16dcdc4 | 2016-09-22 18:00:27 -0300 | [diff] [blame] | 3752 | dev_priv->sagv_status = I915_SAGV_NOT_CONTROLLED; |
Lyude | 656d1b8 | 2016-08-17 15:55:54 -0400 | [diff] [blame] | 3753 | return 0; |
Imre Deak | b3b8e99 | 2016-12-05 18:27:38 +0200 | [diff] [blame] | 3754 | } else if (ret < 0) { |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 3755 | drm_err(&dev_priv->drm, "Failed to disable SAGV (%d)\n", ret); |
Imre Deak | b3b8e99 | 2016-12-05 18:27:38 +0200 | [diff] [blame] | 3756 | return ret; |
Lyude | 656d1b8 | 2016-08-17 15:55:54 -0400 | [diff] [blame] | 3757 | } |
| 3758 | |
Paulo Zanoni | 16dcdc4 | 2016-09-22 18:00:27 -0300 | [diff] [blame] | 3759 | dev_priv->sagv_status = I915_SAGV_DISABLED; |
Lyude | 656d1b8 | 2016-08-17 15:55:54 -0400 | [diff] [blame] | 3760 | return 0; |
| 3761 | } |
| 3762 | |
Stanislav Lisovskiy | 680e1af | 2020-04-15 17:39:04 +0300 | [diff] [blame] | 3763 | void intel_sagv_pre_plane_update(struct intel_atomic_state *state) |
| 3764 | { |
| 3765 | struct drm_i915_private *dev_priv = to_i915(state->base.dev); |
Stanislav Lisovskiy | 9728889 | 2020-04-30 22:17:57 +0300 | [diff] [blame] | 3766 | const struct intel_bw_state *new_bw_state; |
Stanislav Lisovskiy | 20f505f | 2020-05-14 10:48:52 +0300 | [diff] [blame] | 3767 | const struct intel_bw_state *old_bw_state; |
| 3768 | u32 new_mask = 0; |
Stanislav Lisovskiy | 680e1af | 2020-04-15 17:39:04 +0300 | [diff] [blame] | 3769 | |
Stanislav Lisovskiy | 9728889 | 2020-04-30 22:17:57 +0300 | [diff] [blame] | 3770 | /* |
| 3771 | * Just return if we can't control SAGV or don't have it. |
| 3772 | * This is different from situation when we have SAGV but just can't |
| 3773 | * afford it due to DBuf limitation - in case if SAGV is completely |
| 3774 | * disabled in a BIOS, we are not even allowed to send a PCode request, |
| 3775 | * as it will throw an error. So have to check it here. |
| 3776 | */ |
| 3777 | if (!intel_has_sagv(dev_priv)) |
| 3778 | return; |
| 3779 | |
| 3780 | new_bw_state = intel_atomic_get_new_bw_state(state); |
| 3781 | if (!new_bw_state) |
| 3782 | return; |
| 3783 | |
Stanislav Lisovskiy | 20f505f | 2020-05-14 10:48:52 +0300 | [diff] [blame] | 3784 | if (INTEL_GEN(dev_priv) < 11 && !intel_can_enable_sagv(dev_priv, new_bw_state)) { |
Stanislav Lisovskiy | 680e1af | 2020-04-15 17:39:04 +0300 | [diff] [blame] | 3785 | intel_disable_sagv(dev_priv); |
Stanislav Lisovskiy | 20f505f | 2020-05-14 10:48:52 +0300 | [diff] [blame] | 3786 | return; |
| 3787 | } |
| 3788 | |
| 3789 | old_bw_state = intel_atomic_get_old_bw_state(state); |
| 3790 | /* |
| 3791 | * Nothing to mask |
| 3792 | */ |
| 3793 | if (new_bw_state->qgv_points_mask == old_bw_state->qgv_points_mask) |
| 3794 | return; |
| 3795 | |
| 3796 | new_mask = old_bw_state->qgv_points_mask | new_bw_state->qgv_points_mask; |
| 3797 | |
| 3798 | /* |
| 3799 | * If new mask is zero - means there is nothing to mask, |
| 3800 | * we can only unmask, which should be done in unmask. |
| 3801 | */ |
| 3802 | if (!new_mask) |
| 3803 | return; |
| 3804 | |
| 3805 | /* |
| 3806 | * Restrict required qgv points before updating the configuration. |
| 3807 | * According to BSpec we can't mask and unmask qgv points at the same |
| 3808 | * time. Also masking should be done before updating the configuration |
| 3809 | * and unmasking afterwards. |
| 3810 | */ |
| 3811 | icl_pcode_restrict_qgv_points(dev_priv, new_mask); |
Stanislav Lisovskiy | 680e1af | 2020-04-15 17:39:04 +0300 | [diff] [blame] | 3812 | } |
| 3813 | |
| 3814 | void intel_sagv_post_plane_update(struct intel_atomic_state *state) |
| 3815 | { |
| 3816 | struct drm_i915_private *dev_priv = to_i915(state->base.dev); |
Stanislav Lisovskiy | 9728889 | 2020-04-30 22:17:57 +0300 | [diff] [blame] | 3817 | const struct intel_bw_state *new_bw_state; |
Stanislav Lisovskiy | 20f505f | 2020-05-14 10:48:52 +0300 | [diff] [blame] | 3818 | const struct intel_bw_state *old_bw_state; |
| 3819 | u32 new_mask = 0; |
Stanislav Lisovskiy | 680e1af | 2020-04-15 17:39:04 +0300 | [diff] [blame] | 3820 | |
Stanislav Lisovskiy | 9728889 | 2020-04-30 22:17:57 +0300 | [diff] [blame] | 3821 | /* |
| 3822 | * Just return if we can't control SAGV or don't have it. |
| 3823 | * This is different from situation when we have SAGV but just can't |
| 3824 | * afford it due to DBuf limitation - in case if SAGV is completely |
| 3825 | * disabled in a BIOS, we are not even allowed to send a PCode request, |
| 3826 | * as it will throw an error. So have to check it here. |
| 3827 | */ |
| 3828 | if (!intel_has_sagv(dev_priv)) |
| 3829 | return; |
| 3830 | |
| 3831 | new_bw_state = intel_atomic_get_new_bw_state(state); |
| 3832 | if (!new_bw_state) |
| 3833 | return; |
| 3834 | |
Stanislav Lisovskiy | 20f505f | 2020-05-14 10:48:52 +0300 | [diff] [blame] | 3835 | if (INTEL_GEN(dev_priv) < 11 && intel_can_enable_sagv(dev_priv, new_bw_state)) { |
Stanislav Lisovskiy | 680e1af | 2020-04-15 17:39:04 +0300 | [diff] [blame] | 3836 | intel_enable_sagv(dev_priv); |
Stanislav Lisovskiy | 20f505f | 2020-05-14 10:48:52 +0300 | [diff] [blame] | 3837 | return; |
| 3838 | } |
| 3839 | |
| 3840 | old_bw_state = intel_atomic_get_old_bw_state(state); |
| 3841 | /* |
| 3842 | * Nothing to unmask |
| 3843 | */ |
| 3844 | if (new_bw_state->qgv_points_mask == old_bw_state->qgv_points_mask) |
| 3845 | return; |
| 3846 | |
| 3847 | new_mask = new_bw_state->qgv_points_mask; |
| 3848 | |
| 3849 | /* |
| 3850 | * Allow required qgv points after updating the configuration. |
| 3851 | * According to BSpec we can't mask and unmask qgv points at the same |
| 3852 | * time. Also masking should be done before updating the configuration |
| 3853 | * and unmasking afterwards. |
| 3854 | */ |
| 3855 | icl_pcode_restrict_qgv_points(dev_priv, new_mask); |
Stanislav Lisovskiy | 680e1af | 2020-04-15 17:39:04 +0300 | [diff] [blame] | 3856 | } |
| 3857 | |
Stanislav Lisovskiy | 1d0a6c8 | 2020-05-13 12:38:12 +0300 | [diff] [blame] | 3858 | static bool skl_crtc_can_enable_sagv(const struct intel_crtc_state *crtc_state) |
Lyude | 656d1b8 | 2016-08-17 15:55:54 -0400 | [diff] [blame] | 3859 | { |
Stanislav Lisovskiy | a389c49 | 2020-04-15 17:57:40 +0300 | [diff] [blame] | 3860 | struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); |
Stanislav Lisovskiy | 9728889 | 2020-04-30 22:17:57 +0300 | [diff] [blame] | 3861 | struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); |
Paulo Zanoni | ee3d532 | 2016-10-11 15:25:38 -0300 | [diff] [blame] | 3862 | struct intel_plane *plane; |
Stanislav Lisovskiy | 9728889 | 2020-04-30 22:17:57 +0300 | [diff] [blame] | 3863 | const struct intel_plane_state *plane_state; |
cpaul@redhat.com | d8c0faf | 2016-10-18 16:09:49 -0200 | [diff] [blame] | 3864 | int level, latency; |
Lyude | 656d1b8 | 2016-08-17 15:55:54 -0400 | [diff] [blame] | 3865 | |
Stanislav Lisovskiy | 9728889 | 2020-04-30 22:17:57 +0300 | [diff] [blame] | 3866 | if (!intel_has_sagv(dev_priv)) |
| 3867 | return false; |
| 3868 | |
Stanislav Lisovskiy | a389c49 | 2020-04-15 17:57:40 +0300 | [diff] [blame] | 3869 | if (!crtc_state->hw.active) |
Lyude | 656d1b8 | 2016-08-17 15:55:54 -0400 | [diff] [blame] | 3870 | return true; |
Lucas De Marchi | da17223 | 2019-04-04 16:04:26 -0700 | [diff] [blame] | 3871 | |
Maarten Lankhorst | 1326a92 | 2019-10-31 12:26:02 +0100 | [diff] [blame] | 3872 | if (crtc_state->hw.adjusted_mode.flags & DRM_MODE_FLAG_INTERLACE) |
Lyude | 656d1b8 | 2016-08-17 15:55:54 -0400 | [diff] [blame] | 3873 | return false; |
| 3874 | |
Stanislav Lisovskiy | 9728889 | 2020-04-30 22:17:57 +0300 | [diff] [blame] | 3875 | intel_atomic_crtc_state_for_each_plane_state(plane, plane_state, crtc_state) { |
Stanislav Lisovskiy | a389c49 | 2020-04-15 17:57:40 +0300 | [diff] [blame] | 3876 | const struct skl_plane_wm *wm = |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 3877 | &crtc_state->wm.skl.optimal.planes[plane->id]; |
Paulo Zanoni | ee3d532 | 2016-10-11 15:25:38 -0300 | [diff] [blame] | 3878 | |
Lyude | 656d1b8 | 2016-08-17 15:55:54 -0400 | [diff] [blame] | 3879 | /* Skip this plane if it's not enabled */ |
cpaul@redhat.com | d8c0faf | 2016-10-18 16:09:49 -0200 | [diff] [blame] | 3880 | if (!wm->wm[0].plane_en) |
Lyude | 656d1b8 | 2016-08-17 15:55:54 -0400 | [diff] [blame] | 3881 | continue; |
| 3882 | |
| 3883 | /* Find the highest enabled wm level for this plane */ |
Tvrtko Ursulin | 5db9401 | 2016-10-13 11:03:10 +0100 | [diff] [blame] | 3884 | for (level = ilk_wm_max_level(dev_priv); |
cpaul@redhat.com | d8c0faf | 2016-10-18 16:09:49 -0200 | [diff] [blame] | 3885 | !wm->wm[level].plane_en; --level) |
Lyude | 656d1b8 | 2016-08-17 15:55:54 -0400 | [diff] [blame] | 3886 | { } |
| 3887 | |
Paulo Zanoni | ee3d532 | 2016-10-11 15:25:38 -0300 | [diff] [blame] | 3888 | latency = dev_priv->wm.skl_latency[level]; |
| 3889 | |
Ville Syrjälä | 60e983f | 2018-12-21 19:14:33 +0200 | [diff] [blame] | 3890 | if (skl_needs_memory_bw_wa(dev_priv) && |
Stanislav Lisovskiy | 9728889 | 2020-04-30 22:17:57 +0300 | [diff] [blame] | 3891 | plane_state->uapi.fb->modifier == |
Paulo Zanoni | ee3d532 | 2016-10-11 15:25:38 -0300 | [diff] [blame] | 3892 | I915_FORMAT_MOD_X_TILED) |
| 3893 | latency += 15; |
| 3894 | |
Lyude | 656d1b8 | 2016-08-17 15:55:54 -0400 | [diff] [blame] | 3895 | /* |
Paulo Zanoni | fdd11c2 | 2017-08-09 13:52:45 -0700 | [diff] [blame] | 3896 | * If any of the planes on this pipe don't enable wm levels that |
| 3897 | * incur memory latencies higher than sagv_block_time_us we |
Ville Syrjälä | ff61a97 | 2018-12-21 19:14:34 +0200 | [diff] [blame] | 3898 | * can't enable SAGV. |
Lyude | 656d1b8 | 2016-08-17 15:55:54 -0400 | [diff] [blame] | 3899 | */ |
James Ausmus | b068a86 | 2019-10-09 10:23:14 -0700 | [diff] [blame] | 3900 | if (latency < dev_priv->sagv_block_time_us) |
Lyude | 656d1b8 | 2016-08-17 15:55:54 -0400 | [diff] [blame] | 3901 | return false; |
| 3902 | } |
| 3903 | |
| 3904 | return true; |
| 3905 | } |
| 3906 | |
Stanislav Lisovskiy | 7241c57 | 2020-05-14 10:48:51 +0300 | [diff] [blame] | 3907 | static bool tgl_crtc_can_enable_sagv(const struct intel_crtc_state *crtc_state) |
| 3908 | { |
| 3909 | struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); |
| 3910 | enum plane_id plane_id; |
| 3911 | |
| 3912 | if (!crtc_state->hw.active) |
| 3913 | return true; |
| 3914 | |
| 3915 | for_each_plane_id_on_crtc(crtc, plane_id) { |
| 3916 | const struct skl_ddb_entry *plane_alloc = |
| 3917 | &crtc_state->wm.skl.plane_ddb_y[plane_id]; |
| 3918 | const struct skl_plane_wm *wm = |
| 3919 | &crtc_state->wm.skl.optimal.planes[plane_id]; |
| 3920 | |
| 3921 | if (skl_ddb_entry_size(plane_alloc) < wm->sagv_wm0.min_ddb_alloc) |
| 3922 | return false; |
| 3923 | } |
| 3924 | |
| 3925 | return true; |
| 3926 | } |
| 3927 | |
Stanislav Lisovskiy | 1d0a6c8 | 2020-05-13 12:38:12 +0300 | [diff] [blame] | 3928 | static bool intel_crtc_can_enable_sagv(const struct intel_crtc_state *crtc_state) |
| 3929 | { |
Stanislav Lisovskiy | 7241c57 | 2020-05-14 10:48:51 +0300 | [diff] [blame] | 3930 | struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); |
| 3931 | struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); |
| 3932 | |
| 3933 | if (INTEL_GEN(dev_priv) >= 12) |
| 3934 | return tgl_crtc_can_enable_sagv(crtc_state); |
| 3935 | else |
| 3936 | return skl_crtc_can_enable_sagv(crtc_state); |
Stanislav Lisovskiy | 1d0a6c8 | 2020-05-13 12:38:12 +0300 | [diff] [blame] | 3937 | } |
| 3938 | |
Stanislav Lisovskiy | d8d5afe | 2020-05-13 12:38:13 +0300 | [diff] [blame] | 3939 | bool intel_can_enable_sagv(struct drm_i915_private *dev_priv, |
| 3940 | const struct intel_bw_state *bw_state) |
Stanislav Lisovskiy | a389c49 | 2020-04-15 17:57:40 +0300 | [diff] [blame] | 3941 | { |
Stanislav Lisovskiy | d8d5afe | 2020-05-13 12:38:13 +0300 | [diff] [blame] | 3942 | if (INTEL_GEN(dev_priv) < 11 && |
| 3943 | bw_state->active_pipes && !is_power_of_2(bw_state->active_pipes)) |
Stanislav Lisovskiy | ecab0f3 | 2020-04-30 22:56:34 +0300 | [diff] [blame] | 3944 | return false; |
| 3945 | |
Stanislav Lisovskiy | 9728889 | 2020-04-30 22:17:57 +0300 | [diff] [blame] | 3946 | return bw_state->pipe_sagv_reject == 0; |
| 3947 | } |
| 3948 | |
| 3949 | static int intel_compute_sagv_mask(struct intel_atomic_state *state) |
| 3950 | { |
Stanislav Lisovskiy | d8d5afe | 2020-05-13 12:38:13 +0300 | [diff] [blame] | 3951 | struct drm_i915_private *dev_priv = to_i915(state->base.dev); |
Stanislav Lisovskiy | 9728889 | 2020-04-30 22:17:57 +0300 | [diff] [blame] | 3952 | int ret; |
Stanislav Lisovskiy | a389c49 | 2020-04-15 17:57:40 +0300 | [diff] [blame] | 3953 | struct intel_crtc *crtc; |
Stanislav Lisovskiy | 7241c57 | 2020-05-14 10:48:51 +0300 | [diff] [blame] | 3954 | struct intel_crtc_state *new_crtc_state; |
Stanislav Lisovskiy | 9728889 | 2020-04-30 22:17:57 +0300 | [diff] [blame] | 3955 | struct intel_bw_state *new_bw_state = NULL; |
| 3956 | const struct intel_bw_state *old_bw_state = NULL; |
| 3957 | int i; |
Stanislav Lisovskiy | a389c49 | 2020-04-15 17:57:40 +0300 | [diff] [blame] | 3958 | |
Stanislav Lisovskiy | 9728889 | 2020-04-30 22:17:57 +0300 | [diff] [blame] | 3959 | for_each_new_intel_crtc_in_state(state, crtc, |
| 3960 | new_crtc_state, i) { |
| 3961 | new_bw_state = intel_atomic_get_bw_state(state); |
| 3962 | if (IS_ERR(new_bw_state)) |
| 3963 | return PTR_ERR(new_bw_state); |
Stanislav Lisovskiy | a389c49 | 2020-04-15 17:57:40 +0300 | [diff] [blame] | 3964 | |
Stanislav Lisovskiy | 9728889 | 2020-04-30 22:17:57 +0300 | [diff] [blame] | 3965 | old_bw_state = intel_atomic_get_old_bw_state(state); |
Stanislav Lisovskiy | a389c49 | 2020-04-15 17:57:40 +0300 | [diff] [blame] | 3966 | |
Stanislav Lisovskiy | 9728889 | 2020-04-30 22:17:57 +0300 | [diff] [blame] | 3967 | if (intel_crtc_can_enable_sagv(new_crtc_state)) |
| 3968 | new_bw_state->pipe_sagv_reject &= ~BIT(crtc->pipe); |
| 3969 | else |
| 3970 | new_bw_state->pipe_sagv_reject |= BIT(crtc->pipe); |
| 3971 | } |
Stanislav Lisovskiy | a389c49 | 2020-04-15 17:57:40 +0300 | [diff] [blame] | 3972 | |
Stanislav Lisovskiy | 9728889 | 2020-04-30 22:17:57 +0300 | [diff] [blame] | 3973 | if (!new_bw_state) |
| 3974 | return 0; |
Stanislav Lisovskiy | a389c49 | 2020-04-15 17:57:40 +0300 | [diff] [blame] | 3975 | |
Stanislav Lisovskiy | ecab0f3 | 2020-04-30 22:56:34 +0300 | [diff] [blame] | 3976 | new_bw_state->active_pipes = |
| 3977 | intel_calc_active_pipes(state, old_bw_state->active_pipes); |
Stanislav Lisovskiy | 1d0a6c8 | 2020-05-13 12:38:12 +0300 | [diff] [blame] | 3978 | |
Stanislav Lisovskiy | ecab0f3 | 2020-04-30 22:56:34 +0300 | [diff] [blame] | 3979 | if (new_bw_state->active_pipes != old_bw_state->active_pipes) { |
| 3980 | ret = intel_atomic_lock_global_state(&new_bw_state->base); |
| 3981 | if (ret) |
| 3982 | return ret; |
| 3983 | } |
| 3984 | |
Stanislav Lisovskiy | 7241c57 | 2020-05-14 10:48:51 +0300 | [diff] [blame] | 3985 | for_each_new_intel_crtc_in_state(state, crtc, |
| 3986 | new_crtc_state, i) { |
| 3987 | struct skl_pipe_wm *pipe_wm = &new_crtc_state->wm.skl.optimal; |
| 3988 | |
| 3989 | /* |
| 3990 | * We store use_sagv_wm in the crtc state rather than relying on |
| 3991 | * that bw state since we have no convenient way to get at the |
| 3992 | * latter from the plane commit hooks (especially in the legacy |
| 3993 | * cursor case) |
| 3994 | */ |
| 3995 | pipe_wm->use_sagv_wm = INTEL_GEN(dev_priv) >= 12 && |
| 3996 | intel_can_enable_sagv(dev_priv, new_bw_state); |
| 3997 | } |
| 3998 | |
Stanislav Lisovskiy | d8d5afe | 2020-05-13 12:38:13 +0300 | [diff] [blame] | 3999 | if (intel_can_enable_sagv(dev_priv, new_bw_state) != |
| 4000 | intel_can_enable_sagv(dev_priv, old_bw_state)) { |
Stanislav Lisovskiy | 9728889 | 2020-04-30 22:17:57 +0300 | [diff] [blame] | 4001 | ret = intel_atomic_serialize_global_state(&new_bw_state->base); |
| 4002 | if (ret) |
| 4003 | return ret; |
| 4004 | } else if (new_bw_state->pipe_sagv_reject != old_bw_state->pipe_sagv_reject) { |
| 4005 | ret = intel_atomic_lock_global_state(&new_bw_state->base); |
| 4006 | if (ret) |
| 4007 | return ret; |
| 4008 | } |
| 4009 | |
| 4010 | return 0; |
Stanislav Lisovskiy | a389c49 | 2020-04-15 17:57:40 +0300 | [diff] [blame] | 4011 | } |
| 4012 | |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4013 | /* |
| 4014 | * Calculate initial DBuf slice offset, based on slice size |
| 4015 | * and mask(i.e if slice size is 1024 and second slice is enabled |
| 4016 | * offset would be 1024) |
| 4017 | */ |
| 4018 | static unsigned int |
| 4019 | icl_get_first_dbuf_slice_offset(u32 dbuf_slice_mask, |
| 4020 | u32 slice_size, |
| 4021 | u32 ddb_size) |
Mahesh Kumar | aa9664f | 2018-04-26 19:55:16 +0530 | [diff] [blame] | 4022 | { |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4023 | unsigned int offset = 0; |
| 4024 | |
| 4025 | if (!dbuf_slice_mask) |
| 4026 | return 0; |
| 4027 | |
| 4028 | offset = (ffs(dbuf_slice_mask) - 1) * slice_size; |
| 4029 | |
| 4030 | WARN_ON(offset >= ddb_size); |
| 4031 | return offset; |
| 4032 | } |
| 4033 | |
Stanislav Lisovskiy | cd19154 | 2020-05-20 18:00:58 +0300 | [diff] [blame] | 4034 | u16 intel_get_ddb_size(struct drm_i915_private *dev_priv) |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4035 | { |
Mahesh Kumar | aa9664f | 2018-04-26 19:55:16 +0530 | [diff] [blame] | 4036 | u16 ddb_size = INTEL_INFO(dev_priv)->ddb_size; |
Pankaj Bharadiya | 48a1b8d | 2020-01-15 09:14:53 +0530 | [diff] [blame] | 4037 | drm_WARN_ON(&dev_priv->drm, ddb_size == 0); |
Mahesh Kumar | aa9664f | 2018-04-26 19:55:16 +0530 | [diff] [blame] | 4038 | |
| 4039 | if (INTEL_GEN(dev_priv) < 11) |
| 4040 | return ddb_size - 4; /* 4 blocks for bypass path allocation */ |
| 4041 | |
Mahesh Kumar | aa9664f | 2018-04-26 19:55:16 +0530 | [diff] [blame] | 4042 | return ddb_size; |
| 4043 | } |
| 4044 | |
Stanislav Lisovskiy | cd19154 | 2020-05-20 18:00:58 +0300 | [diff] [blame] | 4045 | u32 skl_ddb_dbuf_slice_mask(struct drm_i915_private *dev_priv, |
| 4046 | const struct skl_ddb_entry *entry) |
| 4047 | { |
| 4048 | u32 slice_mask = 0; |
| 4049 | u16 ddb_size = intel_get_ddb_size(dev_priv); |
| 4050 | u16 num_supported_slices = INTEL_INFO(dev_priv)->num_supported_dbuf_slices; |
| 4051 | u16 slice_size = ddb_size / num_supported_slices; |
| 4052 | u16 start_slice; |
| 4053 | u16 end_slice; |
| 4054 | |
| 4055 | if (!skl_ddb_entry_size(entry)) |
| 4056 | return 0; |
| 4057 | |
| 4058 | start_slice = entry->start / slice_size; |
| 4059 | end_slice = (entry->end - 1) / slice_size; |
| 4060 | |
| 4061 | /* |
| 4062 | * Per plane DDB entry can in a really worst case be on multiple slices |
| 4063 | * but single entry is anyway contigious. |
| 4064 | */ |
| 4065 | while (start_slice <= end_slice) { |
| 4066 | slice_mask |= BIT(start_slice); |
| 4067 | start_slice++; |
| 4068 | } |
| 4069 | |
| 4070 | return slice_mask; |
| 4071 | } |
| 4072 | |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4073 | static u8 skl_compute_dbuf_slices(const struct intel_crtc_state *crtc_state, |
Ville Syrjälä | 05e8155 | 2020-02-25 19:11:09 +0200 | [diff] [blame] | 4074 | u8 active_pipes); |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4075 | |
Ville Syrjälä | 3cf43cd | 2020-02-25 19:11:13 +0200 | [diff] [blame] | 4076 | static int |
Maarten Lankhorst | b048a00 | 2018-10-18 13:51:30 +0200 | [diff] [blame] | 4077 | skl_ddb_get_pipe_allocation_limits(struct drm_i915_private *dev_priv, |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 4078 | const struct intel_crtc_state *crtc_state, |
Maarten Lankhorst | 24719e9 | 2018-10-22 12:20:00 +0200 | [diff] [blame] | 4079 | const u64 total_data_rate, |
Matt Roper | c107acf | 2016-05-12 07:06:01 -0700 | [diff] [blame] | 4080 | struct skl_ddb_entry *alloc, /* out */ |
| 4081 | int *num_active /* out */) |
Damien Lespiau | b9cec07 | 2014-11-04 17:06:43 +0000 | [diff] [blame] | 4082 | { |
Maarten Lankhorst | 2225f3c | 2019-10-31 12:26:03 +0100 | [diff] [blame] | 4083 | struct drm_atomic_state *state = crtc_state->uapi.state; |
Matt Roper | c107acf | 2016-05-12 07:06:01 -0700 | [diff] [blame] | 4084 | struct intel_atomic_state *intel_state = to_intel_atomic_state(state); |
Maarten Lankhorst | 2225f3c | 2019-10-31 12:26:03 +0100 | [diff] [blame] | 4085 | struct drm_crtc *for_crtc = crtc_state->uapi.crtc; |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 4086 | const struct intel_crtc *crtc; |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4087 | u32 pipe_width = 0, total_width_in_range = 0, width_before_pipe_in_range = 0; |
Mahesh Kumar | cf1f697 | 2018-08-01 20:41:13 +0530 | [diff] [blame] | 4088 | enum pipe for_pipe = to_intel_crtc(for_crtc)->pipe; |
Ville Syrjälä | 3cf43cd | 2020-02-25 19:11:13 +0200 | [diff] [blame] | 4089 | struct intel_dbuf_state *new_dbuf_state = |
| 4090 | intel_atomic_get_new_dbuf_state(intel_state); |
| 4091 | const struct intel_dbuf_state *old_dbuf_state = |
| 4092 | intel_atomic_get_old_dbuf_state(intel_state); |
| 4093 | u8 active_pipes = new_dbuf_state->active_pipes; |
Mahesh Kumar | cf1f697 | 2018-08-01 20:41:13 +0530 | [diff] [blame] | 4094 | u16 ddb_size; |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4095 | u32 ddb_range_size; |
Mahesh Kumar | cf1f697 | 2018-08-01 20:41:13 +0530 | [diff] [blame] | 4096 | u32 i; |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4097 | u32 dbuf_slice_mask; |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4098 | u32 offset; |
| 4099 | u32 slice_size; |
| 4100 | u32 total_slice_mask; |
| 4101 | u32 start, end; |
Ville Syrjälä | 3cf43cd | 2020-02-25 19:11:13 +0200 | [diff] [blame] | 4102 | int ret; |
Matt Roper | a6d3460e | 2016-05-12 07:06:04 -0700 | [diff] [blame] | 4103 | |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4104 | *num_active = hweight8(active_pipes); |
| 4105 | |
Ville Syrjälä | 3cf43cd | 2020-02-25 19:11:13 +0200 | [diff] [blame] | 4106 | if (!crtc_state->hw.active) { |
| 4107 | alloc->start = 0; |
| 4108 | alloc->end = 0; |
| 4109 | return 0; |
| 4110 | } |
| 4111 | |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4112 | ddb_size = intel_get_ddb_size(dev_priv); |
| 4113 | |
| 4114 | slice_size = ddb_size / INTEL_INFO(dev_priv)->num_supported_dbuf_slices; |
Damien Lespiau | b9cec07 | 2014-11-04 17:06:43 +0000 | [diff] [blame] | 4115 | |
Matt Roper | c107acf | 2016-05-12 07:06:01 -0700 | [diff] [blame] | 4116 | /* |
Mahesh Kumar | cf1f697 | 2018-08-01 20:41:13 +0530 | [diff] [blame] | 4117 | * If the state doesn't change the active CRTC's or there is no |
| 4118 | * modeset request, then there's no need to recalculate; |
| 4119 | * the existing pipe allocation limits should remain unchanged. |
| 4120 | * Note that we're safe from racing commits since any racing commit |
| 4121 | * that changes the active CRTC list or do modeset would need to |
| 4122 | * grab _all_ crtc locks, including the one we currently hold. |
Matt Roper | c107acf | 2016-05-12 07:06:01 -0700 | [diff] [blame] | 4123 | */ |
Ville Syrjälä | 3cf43cd | 2020-02-25 19:11:13 +0200 | [diff] [blame] | 4124 | if (old_dbuf_state->active_pipes == new_dbuf_state->active_pipes && |
| 4125 | !dev_priv->wm.distrust_bios_wm) { |
Maarten Lankhorst | 512b552 | 2016-11-08 13:55:34 +0100 | [diff] [blame] | 4126 | /* |
| 4127 | * alloc may be cleared by clear_intel_crtc_state, |
| 4128 | * copy from old state to be sure |
Ville Syrjälä | 3cf43cd | 2020-02-25 19:11:13 +0200 | [diff] [blame] | 4129 | * |
| 4130 | * FIXME get rid of this mess |
Maarten Lankhorst | 512b552 | 2016-11-08 13:55:34 +0100 | [diff] [blame] | 4131 | */ |
| 4132 | *alloc = to_intel_crtc_state(for_crtc->state)->wm.skl.ddb; |
Ville Syrjälä | 3cf43cd | 2020-02-25 19:11:13 +0200 | [diff] [blame] | 4133 | return 0; |
Damien Lespiau | b9cec07 | 2014-11-04 17:06:43 +0000 | [diff] [blame] | 4134 | } |
Matt Roper | a6d3460e | 2016-05-12 07:06:04 -0700 | [diff] [blame] | 4135 | |
Mahesh Kumar | cf1f697 | 2018-08-01 20:41:13 +0530 | [diff] [blame] | 4136 | /* |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4137 | * Get allowed DBuf slices for correspondent pipe and platform. |
| 4138 | */ |
| 4139 | dbuf_slice_mask = skl_compute_dbuf_slices(crtc_state, active_pipes); |
| 4140 | |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4141 | /* |
| 4142 | * Figure out at which DBuf slice we start, i.e if we start at Dbuf S2 |
| 4143 | * and slice size is 1024, the offset would be 1024 |
| 4144 | */ |
| 4145 | offset = icl_get_first_dbuf_slice_offset(dbuf_slice_mask, |
| 4146 | slice_size, ddb_size); |
| 4147 | |
| 4148 | /* |
| 4149 | * Figure out total size of allowed DBuf slices, which is basically |
| 4150 | * a number of allowed slices for that pipe multiplied by slice size. |
| 4151 | * Inside of this |
| 4152 | * range ddb entries are still allocated in proportion to display width. |
| 4153 | */ |
| 4154 | ddb_range_size = hweight8(dbuf_slice_mask) * slice_size; |
| 4155 | |
| 4156 | /* |
Mahesh Kumar | cf1f697 | 2018-08-01 20:41:13 +0530 | [diff] [blame] | 4157 | * Watermark/ddb requirement highly depends upon width of the |
| 4158 | * framebuffer, So instead of allocating DDB equally among pipes |
| 4159 | * distribute DDB based on resolution/width of the display. |
| 4160 | */ |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4161 | total_slice_mask = dbuf_slice_mask; |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 4162 | for_each_new_intel_crtc_in_state(intel_state, crtc, crtc_state, i) { |
| 4163 | const struct drm_display_mode *adjusted_mode = |
Maarten Lankhorst | 1326a92 | 2019-10-31 12:26:02 +0100 | [diff] [blame] | 4164 | &crtc_state->hw.adjusted_mode; |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 4165 | enum pipe pipe = crtc->pipe; |
Mahesh Kumar | cf1f697 | 2018-08-01 20:41:13 +0530 | [diff] [blame] | 4166 | int hdisplay, vdisplay; |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4167 | u32 pipe_dbuf_slice_mask; |
Mahesh Kumar | cf1f697 | 2018-08-01 20:41:13 +0530 | [diff] [blame] | 4168 | |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4169 | if (!crtc_state->hw.active) |
| 4170 | continue; |
| 4171 | |
| 4172 | pipe_dbuf_slice_mask = skl_compute_dbuf_slices(crtc_state, |
| 4173 | active_pipes); |
| 4174 | |
| 4175 | /* |
| 4176 | * According to BSpec pipe can share one dbuf slice with another |
| 4177 | * pipes or pipe can use multiple dbufs, in both cases we |
| 4178 | * account for other pipes only if they have exactly same mask. |
| 4179 | * However we need to account how many slices we should enable |
| 4180 | * in total. |
| 4181 | */ |
| 4182 | total_slice_mask |= pipe_dbuf_slice_mask; |
| 4183 | |
| 4184 | /* |
| 4185 | * Do not account pipes using other slice sets |
| 4186 | * luckily as of current BSpec slice sets do not partially |
| 4187 | * intersect(pipes share either same one slice or same slice set |
| 4188 | * i.e no partial intersection), so it is enough to check for |
| 4189 | * equality for now. |
| 4190 | */ |
| 4191 | if (dbuf_slice_mask != pipe_dbuf_slice_mask) |
Mahesh Kumar | cf1f697 | 2018-08-01 20:41:13 +0530 | [diff] [blame] | 4192 | continue; |
| 4193 | |
Mahesh Kumar | cf1f697 | 2018-08-01 20:41:13 +0530 | [diff] [blame] | 4194 | drm_mode_get_hv_timing(adjusted_mode, &hdisplay, &vdisplay); |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4195 | |
| 4196 | total_width_in_range += hdisplay; |
Mahesh Kumar | cf1f697 | 2018-08-01 20:41:13 +0530 | [diff] [blame] | 4197 | |
| 4198 | if (pipe < for_pipe) |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4199 | width_before_pipe_in_range += hdisplay; |
Mahesh Kumar | cf1f697 | 2018-08-01 20:41:13 +0530 | [diff] [blame] | 4200 | else if (pipe == for_pipe) |
| 4201 | pipe_width = hdisplay; |
| 4202 | } |
| 4203 | |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4204 | /* |
| 4205 | * FIXME: For now we always enable slice S1 as per |
| 4206 | * the Bspec display initialization sequence. |
| 4207 | */ |
Ville Syrjälä | 3cf43cd | 2020-02-25 19:11:13 +0200 | [diff] [blame] | 4208 | new_dbuf_state->enabled_slices = total_slice_mask | BIT(DBUF_S1); |
| 4209 | |
| 4210 | if (old_dbuf_state->enabled_slices != new_dbuf_state->enabled_slices) { |
| 4211 | ret = intel_atomic_serialize_global_state(&new_dbuf_state->base); |
| 4212 | if (ret) |
| 4213 | return ret; |
| 4214 | } |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4215 | |
| 4216 | start = ddb_range_size * width_before_pipe_in_range / total_width_in_range; |
| 4217 | end = ddb_range_size * |
| 4218 | (width_before_pipe_in_range + pipe_width) / total_width_in_range; |
| 4219 | |
| 4220 | alloc->start = offset + start; |
| 4221 | alloc->end = offset + end; |
| 4222 | |
Ville Syrjälä | 70b1a26 | 2020-02-25 19:11:16 +0200 | [diff] [blame] | 4223 | drm_dbg_kms(&dev_priv->drm, |
| 4224 | "[CRTC:%d:%s] dbuf slices 0x%x, ddb (%d - %d), active pipes 0x%x\n", |
| 4225 | for_crtc->base.id, for_crtc->name, |
| 4226 | dbuf_slice_mask, alloc->start, alloc->end, active_pipes); |
Ville Syrjälä | 3cf43cd | 2020-02-25 19:11:13 +0200 | [diff] [blame] | 4227 | |
| 4228 | return 0; |
Damien Lespiau | b9cec07 | 2014-11-04 17:06:43 +0000 | [diff] [blame] | 4229 | } |
| 4230 | |
Ville Syrjälä | df331de | 2019-03-19 18:03:11 +0200 | [diff] [blame] | 4231 | static int skl_compute_wm_params(const struct intel_crtc_state *crtc_state, |
| 4232 | int width, const struct drm_format_info *format, |
| 4233 | u64 modifier, unsigned int rotation, |
| 4234 | u32 plane_pixel_rate, struct skl_wm_params *wp, |
| 4235 | int color_plane); |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 4236 | static void skl_compute_plane_wm(const struct intel_crtc_state *crtc_state, |
Ville Syrjälä | df331de | 2019-03-19 18:03:11 +0200 | [diff] [blame] | 4237 | int level, |
Stanislav Lisovskiy | 7b99475 | 2020-04-09 18:47:18 +0300 | [diff] [blame] | 4238 | unsigned int latency, |
Ville Syrjälä | df331de | 2019-03-19 18:03:11 +0200 | [diff] [blame] | 4239 | const struct skl_wm_params *wp, |
| 4240 | const struct skl_wm_level *result_prev, |
| 4241 | struct skl_wm_level *result /* out */); |
Damien Lespiau | b9cec07 | 2014-11-04 17:06:43 +0000 | [diff] [blame] | 4242 | |
Ville Syrjälä | df331de | 2019-03-19 18:03:11 +0200 | [diff] [blame] | 4243 | static unsigned int |
| 4244 | skl_cursor_allocation(const struct intel_crtc_state *crtc_state, |
| 4245 | int num_active) |
| 4246 | { |
Maarten Lankhorst | 2225f3c | 2019-10-31 12:26:03 +0100 | [diff] [blame] | 4247 | struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev); |
Ville Syrjälä | df331de | 2019-03-19 18:03:11 +0200 | [diff] [blame] | 4248 | int level, max_level = ilk_wm_max_level(dev_priv); |
| 4249 | struct skl_wm_level wm = {}; |
| 4250 | int ret, min_ddb_alloc = 0; |
| 4251 | struct skl_wm_params wp; |
| 4252 | |
| 4253 | ret = skl_compute_wm_params(crtc_state, 256, |
| 4254 | drm_format_info(DRM_FORMAT_ARGB8888), |
| 4255 | DRM_FORMAT_MOD_LINEAR, |
| 4256 | DRM_MODE_ROTATE_0, |
| 4257 | crtc_state->pixel_rate, &wp, 0); |
Pankaj Bharadiya | 48a1b8d | 2020-01-15 09:14:53 +0530 | [diff] [blame] | 4258 | drm_WARN_ON(&dev_priv->drm, ret); |
Ville Syrjälä | df331de | 2019-03-19 18:03:11 +0200 | [diff] [blame] | 4259 | |
| 4260 | for (level = 0; level <= max_level; level++) { |
Stanislav Lisovskiy | 7b99475 | 2020-04-09 18:47:18 +0300 | [diff] [blame] | 4261 | unsigned int latency = dev_priv->wm.skl_latency[level]; |
| 4262 | |
| 4263 | skl_compute_plane_wm(crtc_state, level, latency, &wp, &wm, &wm); |
Ville Syrjälä | df331de | 2019-03-19 18:03:11 +0200 | [diff] [blame] | 4264 | if (wm.min_ddb_alloc == U16_MAX) |
| 4265 | break; |
| 4266 | |
| 4267 | min_ddb_alloc = wm.min_ddb_alloc; |
| 4268 | } |
| 4269 | |
| 4270 | return max(num_active == 1 ? 32 : 8, min_ddb_alloc); |
Damien Lespiau | b9cec07 | 2014-11-04 17:06:43 +0000 | [diff] [blame] | 4271 | } |
| 4272 | |
Mahesh Kumar | 37cde11 | 2018-04-26 19:55:17 +0530 | [diff] [blame] | 4273 | static void skl_ddb_entry_init_from_hw(struct drm_i915_private *dev_priv, |
| 4274 | struct skl_ddb_entry *entry, u32 reg) |
Damien Lespiau | a269c58 | 2014-11-04 17:06:49 +0000 | [diff] [blame] | 4275 | { |
Mahesh Kumar | 37cde11 | 2018-04-26 19:55:17 +0530 | [diff] [blame] | 4276 | |
Ville Syrjälä | d7e449a | 2019-02-05 22:50:56 +0200 | [diff] [blame] | 4277 | entry->start = reg & DDB_ENTRY_MASK; |
| 4278 | entry->end = (reg >> DDB_ENTRY_END_SHIFT) & DDB_ENTRY_MASK; |
Mahesh Kumar | 37cde11 | 2018-04-26 19:55:17 +0530 | [diff] [blame] | 4279 | |
Damien Lespiau | 16160e3 | 2014-11-04 17:06:53 +0000 | [diff] [blame] | 4280 | if (entry->end) |
| 4281 | entry->end += 1; |
Damien Lespiau | a269c58 | 2014-11-04 17:06:49 +0000 | [diff] [blame] | 4282 | } |
| 4283 | |
Mahesh Kumar | ddf3431 | 2018-04-09 09:11:03 +0530 | [diff] [blame] | 4284 | static void |
| 4285 | skl_ddb_get_hw_plane_state(struct drm_i915_private *dev_priv, |
| 4286 | const enum pipe pipe, |
| 4287 | const enum plane_id plane_id, |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 4288 | struct skl_ddb_entry *ddb_y, |
| 4289 | struct skl_ddb_entry *ddb_uv) |
Mahesh Kumar | ddf3431 | 2018-04-09 09:11:03 +0530 | [diff] [blame] | 4290 | { |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 4291 | u32 val, val2; |
| 4292 | u32 fourcc = 0; |
Mahesh Kumar | ddf3431 | 2018-04-09 09:11:03 +0530 | [diff] [blame] | 4293 | |
| 4294 | /* Cursor doesn't support NV12/planar, so no extra calculation needed */ |
| 4295 | if (plane_id == PLANE_CURSOR) { |
| 4296 | val = I915_READ(CUR_BUF_CFG(pipe)); |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 4297 | skl_ddb_entry_init_from_hw(dev_priv, ddb_y, val); |
Mahesh Kumar | ddf3431 | 2018-04-09 09:11:03 +0530 | [diff] [blame] | 4298 | return; |
| 4299 | } |
| 4300 | |
| 4301 | val = I915_READ(PLANE_CTL(pipe, plane_id)); |
| 4302 | |
| 4303 | /* No DDB allocated for disabled planes */ |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 4304 | if (val & PLANE_CTL_ENABLE) |
| 4305 | fourcc = skl_format_to_fourcc(val & PLANE_CTL_FORMAT_MASK, |
| 4306 | val & PLANE_CTL_ORDER_RGBX, |
| 4307 | val & PLANE_CTL_ALPHA_MASK); |
Mahesh Kumar | ddf3431 | 2018-04-09 09:11:03 +0530 | [diff] [blame] | 4308 | |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 4309 | if (INTEL_GEN(dev_priv) >= 11) { |
| 4310 | val = I915_READ(PLANE_BUF_CFG(pipe, plane_id)); |
| 4311 | skl_ddb_entry_init_from_hw(dev_priv, ddb_y, val); |
| 4312 | } else { |
| 4313 | val = I915_READ(PLANE_BUF_CFG(pipe, plane_id)); |
Paulo Zanoni | 12a6c93 | 2018-07-31 17:46:14 -0700 | [diff] [blame] | 4314 | val2 = I915_READ(PLANE_NV12_BUF_CFG(pipe, plane_id)); |
Mahesh Kumar | ddf3431 | 2018-04-09 09:11:03 +0530 | [diff] [blame] | 4315 | |
Ville Syrjälä | d1d23d7 | 2019-09-13 22:31:54 +0300 | [diff] [blame] | 4316 | if (fourcc && |
| 4317 | drm_format_info_is_yuv_semiplanar(drm_format_info(fourcc))) |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 4318 | swap(val, val2); |
| 4319 | |
| 4320 | skl_ddb_entry_init_from_hw(dev_priv, ddb_y, val); |
| 4321 | skl_ddb_entry_init_from_hw(dev_priv, ddb_uv, val2); |
Mahesh Kumar | ddf3431 | 2018-04-09 09:11:03 +0530 | [diff] [blame] | 4322 | } |
| 4323 | } |
| 4324 | |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 4325 | void skl_pipe_ddb_get_hw_state(struct intel_crtc *crtc, |
| 4326 | struct skl_ddb_entry *ddb_y, |
| 4327 | struct skl_ddb_entry *ddb_uv) |
| 4328 | { |
| 4329 | struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); |
| 4330 | enum intel_display_power_domain power_domain; |
| 4331 | enum pipe pipe = crtc->pipe; |
Chris Wilson | 0e6e0be | 2019-01-14 14:21:24 +0000 | [diff] [blame] | 4332 | intel_wakeref_t wakeref; |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 4333 | enum plane_id plane_id; |
| 4334 | |
| 4335 | power_domain = POWER_DOMAIN_PIPE(pipe); |
Chris Wilson | 0e6e0be | 2019-01-14 14:21:24 +0000 | [diff] [blame] | 4336 | wakeref = intel_display_power_get_if_enabled(dev_priv, power_domain); |
| 4337 | if (!wakeref) |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 4338 | return; |
| 4339 | |
| 4340 | for_each_plane_id_on_crtc(crtc, plane_id) |
| 4341 | skl_ddb_get_hw_plane_state(dev_priv, pipe, |
| 4342 | plane_id, |
| 4343 | &ddb_y[plane_id], |
| 4344 | &ddb_uv[plane_id]); |
| 4345 | |
Chris Wilson | 0e6e0be | 2019-01-14 14:21:24 +0000 | [diff] [blame] | 4346 | intel_display_power_put(dev_priv, power_domain, wakeref); |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 4347 | } |
| 4348 | |
Kumar, Mahesh | 9c2f7a9 | 2016-05-16 15:52:00 -0700 | [diff] [blame] | 4349 | /* |
| 4350 | * Determines the downscale amount of a plane for the purposes of watermark calculations. |
| 4351 | * The bspec defines downscale amount as: |
| 4352 | * |
| 4353 | * """ |
| 4354 | * Horizontal down scale amount = maximum[1, Horizontal source size / |
| 4355 | * Horizontal destination size] |
| 4356 | * Vertical down scale amount = maximum[1, Vertical source size / |
| 4357 | * Vertical destination size] |
| 4358 | * Total down scale amount = Horizontal down scale amount * |
| 4359 | * Vertical down scale amount |
| 4360 | * """ |
| 4361 | * |
| 4362 | * Return value is provided in 16.16 fixed point form to retain fractional part. |
| 4363 | * Caller should take care of dividing & rounding off the value. |
| 4364 | */ |
Kumar, Mahesh | 7084b50 | 2017-05-17 17:28:23 +0530 | [diff] [blame] | 4365 | static uint_fixed_16_16_t |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 4366 | skl_plane_downscale_amount(const struct intel_crtc_state *crtc_state, |
| 4367 | const struct intel_plane_state *plane_state) |
Kumar, Mahesh | 9c2f7a9 | 2016-05-16 15:52:00 -0700 | [diff] [blame] | 4368 | { |
Pankaj Bharadiya | 19edeb38 | 2020-05-04 23:45:59 +0530 | [diff] [blame] | 4369 | struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev); |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 4370 | u32 src_w, src_h, dst_w, dst_h; |
Kumar, Mahesh | 7084b50 | 2017-05-17 17:28:23 +0530 | [diff] [blame] | 4371 | uint_fixed_16_16_t fp_w_ratio, fp_h_ratio; |
| 4372 | uint_fixed_16_16_t downscale_h, downscale_w; |
Kumar, Mahesh | 9c2f7a9 | 2016-05-16 15:52:00 -0700 | [diff] [blame] | 4373 | |
Pankaj Bharadiya | 19edeb38 | 2020-05-04 23:45:59 +0530 | [diff] [blame] | 4374 | if (drm_WARN_ON(&dev_priv->drm, |
| 4375 | !intel_wm_plane_visible(crtc_state, plane_state))) |
Kumar, Mahesh | eac2cb8 | 2017-07-05 20:01:46 +0530 | [diff] [blame] | 4376 | return u32_to_fixed16(0); |
Kumar, Mahesh | 9c2f7a9 | 2016-05-16 15:52:00 -0700 | [diff] [blame] | 4377 | |
Maarten Lankhorst | 3a61276 | 2019-10-04 13:34:54 +0200 | [diff] [blame] | 4378 | /* |
| 4379 | * Src coordinates are already rotated by 270 degrees for |
| 4380 | * the 90/270 degree plane rotation cases (to match the |
| 4381 | * GTT mapping), hence no need to account for rotation here. |
| 4382 | * |
| 4383 | * n.b., src is 16.16 fixed point, dst is whole integer. |
| 4384 | */ |
Maarten Lankhorst | f90a85e | 2019-10-31 12:26:08 +0100 | [diff] [blame] | 4385 | src_w = drm_rect_width(&plane_state->uapi.src) >> 16; |
| 4386 | src_h = drm_rect_height(&plane_state->uapi.src) >> 16; |
| 4387 | dst_w = drm_rect_width(&plane_state->uapi.dst); |
| 4388 | dst_h = drm_rect_height(&plane_state->uapi.dst); |
Ville Syrjälä | 93aa2a1 | 2017-03-14 17:10:50 +0200 | [diff] [blame] | 4389 | |
Kumar, Mahesh | eac2cb8 | 2017-07-05 20:01:46 +0530 | [diff] [blame] | 4390 | fp_w_ratio = div_fixed16(src_w, dst_w); |
| 4391 | fp_h_ratio = div_fixed16(src_h, dst_h); |
| 4392 | downscale_w = max_fixed16(fp_w_ratio, u32_to_fixed16(1)); |
| 4393 | downscale_h = max_fixed16(fp_h_ratio, u32_to_fixed16(1)); |
Kumar, Mahesh | 9c2f7a9 | 2016-05-16 15:52:00 -0700 | [diff] [blame] | 4394 | |
Kumar, Mahesh | 7084b50 | 2017-05-17 17:28:23 +0530 | [diff] [blame] | 4395 | return mul_fixed16(downscale_w, downscale_h); |
Kumar, Mahesh | 9c2f7a9 | 2016-05-16 15:52:00 -0700 | [diff] [blame] | 4396 | } |
| 4397 | |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4398 | struct dbuf_slice_conf_entry { |
| 4399 | u8 active_pipes; |
| 4400 | u8 dbuf_mask[I915_MAX_PIPES]; |
| 4401 | }; |
| 4402 | |
| 4403 | /* |
| 4404 | * Table taken from Bspec 12716 |
| 4405 | * Pipes do have some preferred DBuf slice affinity, |
| 4406 | * plus there are some hardcoded requirements on how |
| 4407 | * those should be distributed for multipipe scenarios. |
| 4408 | * For more DBuf slices algorithm can get even more messy |
| 4409 | * and less readable, so decided to use a table almost |
| 4410 | * as is from BSpec itself - that way it is at least easier |
| 4411 | * to compare, change and check. |
| 4412 | */ |
Jani Nikula | f8226d0 | 2020-02-19 17:45:42 +0200 | [diff] [blame] | 4413 | static const struct dbuf_slice_conf_entry icl_allowed_dbufs[] = |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4414 | /* Autogenerated with igt/tools/intel_dbuf_map tool: */ |
| 4415 | { |
| 4416 | { |
| 4417 | .active_pipes = BIT(PIPE_A), |
| 4418 | .dbuf_mask = { |
Ville Syrjälä | 06812bd | 2020-02-25 19:11:08 +0200 | [diff] [blame] | 4419 | [PIPE_A] = BIT(DBUF_S1), |
| 4420 | }, |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4421 | }, |
| 4422 | { |
| 4423 | .active_pipes = BIT(PIPE_B), |
| 4424 | .dbuf_mask = { |
Ville Syrjälä | 06812bd | 2020-02-25 19:11:08 +0200 | [diff] [blame] | 4425 | [PIPE_B] = BIT(DBUF_S1), |
| 4426 | }, |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4427 | }, |
| 4428 | { |
| 4429 | .active_pipes = BIT(PIPE_A) | BIT(PIPE_B), |
| 4430 | .dbuf_mask = { |
| 4431 | [PIPE_A] = BIT(DBUF_S1), |
Ville Syrjälä | 06812bd | 2020-02-25 19:11:08 +0200 | [diff] [blame] | 4432 | [PIPE_B] = BIT(DBUF_S2), |
| 4433 | }, |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4434 | }, |
| 4435 | { |
| 4436 | .active_pipes = BIT(PIPE_C), |
| 4437 | .dbuf_mask = { |
Ville Syrjälä | 06812bd | 2020-02-25 19:11:08 +0200 | [diff] [blame] | 4438 | [PIPE_C] = BIT(DBUF_S2), |
| 4439 | }, |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4440 | }, |
| 4441 | { |
| 4442 | .active_pipes = BIT(PIPE_A) | BIT(PIPE_C), |
| 4443 | .dbuf_mask = { |
| 4444 | [PIPE_A] = BIT(DBUF_S1), |
Ville Syrjälä | 06812bd | 2020-02-25 19:11:08 +0200 | [diff] [blame] | 4445 | [PIPE_C] = BIT(DBUF_S2), |
| 4446 | }, |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4447 | }, |
| 4448 | { |
| 4449 | .active_pipes = BIT(PIPE_B) | BIT(PIPE_C), |
| 4450 | .dbuf_mask = { |
| 4451 | [PIPE_B] = BIT(DBUF_S1), |
Ville Syrjälä | 06812bd | 2020-02-25 19:11:08 +0200 | [diff] [blame] | 4452 | [PIPE_C] = BIT(DBUF_S2), |
| 4453 | }, |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4454 | }, |
| 4455 | { |
| 4456 | .active_pipes = BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_C), |
| 4457 | .dbuf_mask = { |
| 4458 | [PIPE_A] = BIT(DBUF_S1), |
| 4459 | [PIPE_B] = BIT(DBUF_S1), |
Ville Syrjälä | 06812bd | 2020-02-25 19:11:08 +0200 | [diff] [blame] | 4460 | [PIPE_C] = BIT(DBUF_S2), |
| 4461 | }, |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4462 | }, |
Ville Syrjälä | 05e8155 | 2020-02-25 19:11:09 +0200 | [diff] [blame] | 4463 | {} |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4464 | }; |
| 4465 | |
| 4466 | /* |
| 4467 | * Table taken from Bspec 49255 |
| 4468 | * Pipes do have some preferred DBuf slice affinity, |
| 4469 | * plus there are some hardcoded requirements on how |
| 4470 | * those should be distributed for multipipe scenarios. |
| 4471 | * For more DBuf slices algorithm can get even more messy |
| 4472 | * and less readable, so decided to use a table almost |
| 4473 | * as is from BSpec itself - that way it is at least easier |
| 4474 | * to compare, change and check. |
| 4475 | */ |
Jani Nikula | f8226d0 | 2020-02-19 17:45:42 +0200 | [diff] [blame] | 4476 | static const struct dbuf_slice_conf_entry tgl_allowed_dbufs[] = |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4477 | /* Autogenerated with igt/tools/intel_dbuf_map tool: */ |
| 4478 | { |
| 4479 | { |
| 4480 | .active_pipes = BIT(PIPE_A), |
| 4481 | .dbuf_mask = { |
Ville Syrjälä | 06812bd | 2020-02-25 19:11:08 +0200 | [diff] [blame] | 4482 | [PIPE_A] = BIT(DBUF_S1) | BIT(DBUF_S2), |
| 4483 | }, |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4484 | }, |
| 4485 | { |
| 4486 | .active_pipes = BIT(PIPE_B), |
| 4487 | .dbuf_mask = { |
Ville Syrjälä | 06812bd | 2020-02-25 19:11:08 +0200 | [diff] [blame] | 4488 | [PIPE_B] = BIT(DBUF_S1) | BIT(DBUF_S2), |
| 4489 | }, |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4490 | }, |
| 4491 | { |
| 4492 | .active_pipes = BIT(PIPE_A) | BIT(PIPE_B), |
| 4493 | .dbuf_mask = { |
| 4494 | [PIPE_A] = BIT(DBUF_S2), |
Ville Syrjälä | 06812bd | 2020-02-25 19:11:08 +0200 | [diff] [blame] | 4495 | [PIPE_B] = BIT(DBUF_S1), |
| 4496 | }, |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4497 | }, |
| 4498 | { |
| 4499 | .active_pipes = BIT(PIPE_C), |
| 4500 | .dbuf_mask = { |
Ville Syrjälä | 06812bd | 2020-02-25 19:11:08 +0200 | [diff] [blame] | 4501 | [PIPE_C] = BIT(DBUF_S2) | BIT(DBUF_S1), |
| 4502 | }, |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4503 | }, |
| 4504 | { |
| 4505 | .active_pipes = BIT(PIPE_A) | BIT(PIPE_C), |
| 4506 | .dbuf_mask = { |
| 4507 | [PIPE_A] = BIT(DBUF_S1), |
Ville Syrjälä | 06812bd | 2020-02-25 19:11:08 +0200 | [diff] [blame] | 4508 | [PIPE_C] = BIT(DBUF_S2), |
| 4509 | }, |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4510 | }, |
| 4511 | { |
| 4512 | .active_pipes = BIT(PIPE_B) | BIT(PIPE_C), |
| 4513 | .dbuf_mask = { |
| 4514 | [PIPE_B] = BIT(DBUF_S1), |
Ville Syrjälä | 06812bd | 2020-02-25 19:11:08 +0200 | [diff] [blame] | 4515 | [PIPE_C] = BIT(DBUF_S2), |
| 4516 | }, |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4517 | }, |
| 4518 | { |
| 4519 | .active_pipes = BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_C), |
| 4520 | .dbuf_mask = { |
| 4521 | [PIPE_A] = BIT(DBUF_S1), |
| 4522 | [PIPE_B] = BIT(DBUF_S1), |
Ville Syrjälä | 06812bd | 2020-02-25 19:11:08 +0200 | [diff] [blame] | 4523 | [PIPE_C] = BIT(DBUF_S2), |
| 4524 | }, |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4525 | }, |
| 4526 | { |
| 4527 | .active_pipes = BIT(PIPE_D), |
| 4528 | .dbuf_mask = { |
Ville Syrjälä | 06812bd | 2020-02-25 19:11:08 +0200 | [diff] [blame] | 4529 | [PIPE_D] = BIT(DBUF_S2) | BIT(DBUF_S1), |
| 4530 | }, |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4531 | }, |
| 4532 | { |
| 4533 | .active_pipes = BIT(PIPE_A) | BIT(PIPE_D), |
| 4534 | .dbuf_mask = { |
| 4535 | [PIPE_A] = BIT(DBUF_S1), |
Ville Syrjälä | 06812bd | 2020-02-25 19:11:08 +0200 | [diff] [blame] | 4536 | [PIPE_D] = BIT(DBUF_S2), |
| 4537 | }, |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4538 | }, |
| 4539 | { |
| 4540 | .active_pipes = BIT(PIPE_B) | BIT(PIPE_D), |
| 4541 | .dbuf_mask = { |
| 4542 | [PIPE_B] = BIT(DBUF_S1), |
Ville Syrjälä | 06812bd | 2020-02-25 19:11:08 +0200 | [diff] [blame] | 4543 | [PIPE_D] = BIT(DBUF_S2), |
| 4544 | }, |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4545 | }, |
| 4546 | { |
| 4547 | .active_pipes = BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_D), |
| 4548 | .dbuf_mask = { |
| 4549 | [PIPE_A] = BIT(DBUF_S1), |
| 4550 | [PIPE_B] = BIT(DBUF_S1), |
Ville Syrjälä | 06812bd | 2020-02-25 19:11:08 +0200 | [diff] [blame] | 4551 | [PIPE_D] = BIT(DBUF_S2), |
| 4552 | }, |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4553 | }, |
| 4554 | { |
| 4555 | .active_pipes = BIT(PIPE_C) | BIT(PIPE_D), |
| 4556 | .dbuf_mask = { |
| 4557 | [PIPE_C] = BIT(DBUF_S1), |
Ville Syrjälä | 06812bd | 2020-02-25 19:11:08 +0200 | [diff] [blame] | 4558 | [PIPE_D] = BIT(DBUF_S2), |
| 4559 | }, |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4560 | }, |
| 4561 | { |
| 4562 | .active_pipes = BIT(PIPE_A) | BIT(PIPE_C) | BIT(PIPE_D), |
| 4563 | .dbuf_mask = { |
| 4564 | [PIPE_A] = BIT(DBUF_S1), |
| 4565 | [PIPE_C] = BIT(DBUF_S2), |
Ville Syrjälä | 06812bd | 2020-02-25 19:11:08 +0200 | [diff] [blame] | 4566 | [PIPE_D] = BIT(DBUF_S2), |
| 4567 | }, |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4568 | }, |
| 4569 | { |
| 4570 | .active_pipes = BIT(PIPE_B) | BIT(PIPE_C) | BIT(PIPE_D), |
| 4571 | .dbuf_mask = { |
| 4572 | [PIPE_B] = BIT(DBUF_S1), |
| 4573 | [PIPE_C] = BIT(DBUF_S2), |
Ville Syrjälä | 06812bd | 2020-02-25 19:11:08 +0200 | [diff] [blame] | 4574 | [PIPE_D] = BIT(DBUF_S2), |
| 4575 | }, |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4576 | }, |
| 4577 | { |
| 4578 | .active_pipes = BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_C) | BIT(PIPE_D), |
| 4579 | .dbuf_mask = { |
| 4580 | [PIPE_A] = BIT(DBUF_S1), |
| 4581 | [PIPE_B] = BIT(DBUF_S1), |
| 4582 | [PIPE_C] = BIT(DBUF_S2), |
Ville Syrjälä | 06812bd | 2020-02-25 19:11:08 +0200 | [diff] [blame] | 4583 | [PIPE_D] = BIT(DBUF_S2), |
| 4584 | }, |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4585 | }, |
Ville Syrjälä | 05e8155 | 2020-02-25 19:11:09 +0200 | [diff] [blame] | 4586 | {} |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4587 | }; |
| 4588 | |
Ville Syrjälä | 05e8155 | 2020-02-25 19:11:09 +0200 | [diff] [blame] | 4589 | static u8 compute_dbuf_slices(enum pipe pipe, u8 active_pipes, |
| 4590 | const struct dbuf_slice_conf_entry *dbuf_slices) |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4591 | { |
| 4592 | int i; |
| 4593 | |
Ville Syrjälä | 05e8155 | 2020-02-25 19:11:09 +0200 | [diff] [blame] | 4594 | for (i = 0; i < dbuf_slices[i].active_pipes; i++) { |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4595 | if (dbuf_slices[i].active_pipes == active_pipes) |
| 4596 | return dbuf_slices[i].dbuf_mask[pipe]; |
| 4597 | } |
| 4598 | return 0; |
| 4599 | } |
| 4600 | |
| 4601 | /* |
| 4602 | * This function finds an entry with same enabled pipe configuration and |
| 4603 | * returns correspondent DBuf slice mask as stated in BSpec for particular |
| 4604 | * platform. |
| 4605 | */ |
Ville Syrjälä | 05e8155 | 2020-02-25 19:11:09 +0200 | [diff] [blame] | 4606 | static u8 icl_compute_dbuf_slices(enum pipe pipe, u8 active_pipes) |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4607 | { |
| 4608 | /* |
| 4609 | * FIXME: For ICL this is still a bit unclear as prev BSpec revision |
| 4610 | * required calculating "pipe ratio" in order to determine |
| 4611 | * if one or two slices can be used for single pipe configurations |
| 4612 | * as additional constraint to the existing table. |
| 4613 | * However based on recent info, it should be not "pipe ratio" |
| 4614 | * but rather ratio between pixel_rate and cdclk with additional |
| 4615 | * constants, so for now we are using only table until this is |
| 4616 | * clarified. Also this is the reason why crtc_state param is |
| 4617 | * still here - we will need it once those additional constraints |
| 4618 | * pop up. |
| 4619 | */ |
Ville Syrjälä | 05e8155 | 2020-02-25 19:11:09 +0200 | [diff] [blame] | 4620 | return compute_dbuf_slices(pipe, active_pipes, icl_allowed_dbufs); |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4621 | } |
| 4622 | |
Ville Syrjälä | 05e8155 | 2020-02-25 19:11:09 +0200 | [diff] [blame] | 4623 | static u8 tgl_compute_dbuf_slices(enum pipe pipe, u8 active_pipes) |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4624 | { |
Ville Syrjälä | 05e8155 | 2020-02-25 19:11:09 +0200 | [diff] [blame] | 4625 | return compute_dbuf_slices(pipe, active_pipes, tgl_allowed_dbufs); |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4626 | } |
| 4627 | |
| 4628 | static u8 skl_compute_dbuf_slices(const struct intel_crtc_state *crtc_state, |
Ville Syrjälä | 05e8155 | 2020-02-25 19:11:09 +0200 | [diff] [blame] | 4629 | u8 active_pipes) |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4630 | { |
| 4631 | struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); |
| 4632 | struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); |
| 4633 | enum pipe pipe = crtc->pipe; |
| 4634 | |
| 4635 | if (IS_GEN(dev_priv, 12)) |
Ville Syrjälä | 05e8155 | 2020-02-25 19:11:09 +0200 | [diff] [blame] | 4636 | return tgl_compute_dbuf_slices(pipe, active_pipes); |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4637 | else if (IS_GEN(dev_priv, 11)) |
Ville Syrjälä | 05e8155 | 2020-02-25 19:11:09 +0200 | [diff] [blame] | 4638 | return icl_compute_dbuf_slices(pipe, active_pipes); |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4639 | /* |
| 4640 | * For anything else just return one slice yet. |
| 4641 | * Should be extended for other platforms. |
| 4642 | */ |
Ville Syrjälä | 2f9078c | 2020-02-25 19:11:10 +0200 | [diff] [blame] | 4643 | return active_pipes & BIT(pipe) ? BIT(DBUF_S1) : 0; |
Stanislav Lisovskiy | ff2cd86 | 2020-02-03 01:06:30 +0200 | [diff] [blame] | 4644 | } |
| 4645 | |
Maarten Lankhorst | 24719e9 | 2018-10-22 12:20:00 +0200 | [diff] [blame] | 4646 | static u64 |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 4647 | skl_plane_relative_data_rate(const struct intel_crtc_state *crtc_state, |
| 4648 | const struct intel_plane_state *plane_state, |
Ville Syrjälä | d1d23d7 | 2019-09-13 22:31:54 +0300 | [diff] [blame] | 4649 | int color_plane) |
Damien Lespiau | b9cec07 | 2014-11-04 17:06:43 +0000 | [diff] [blame] | 4650 | { |
Maarten Lankhorst | f90a85e | 2019-10-31 12:26:08 +0100 | [diff] [blame] | 4651 | struct intel_plane *plane = to_intel_plane(plane_state->uapi.plane); |
Maarten Lankhorst | 7b3cb17 | 2019-10-31 12:26:07 +0100 | [diff] [blame] | 4652 | const struct drm_framebuffer *fb = plane_state->hw.fb; |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 4653 | u32 data_rate; |
| 4654 | u32 width = 0, height = 0; |
Kumar, Mahesh | 7084b50 | 2017-05-17 17:28:23 +0530 | [diff] [blame] | 4655 | uint_fixed_16_16_t down_scale_amount; |
Maarten Lankhorst | 24719e9 | 2018-10-22 12:20:00 +0200 | [diff] [blame] | 4656 | u64 rate; |
Matt Roper | a1de91e | 2016-05-12 07:05:57 -0700 | [diff] [blame] | 4657 | |
Maarten Lankhorst | f90a85e | 2019-10-31 12:26:08 +0100 | [diff] [blame] | 4658 | if (!plane_state->uapi.visible) |
Matt Roper | a1de91e | 2016-05-12 07:05:57 -0700 | [diff] [blame] | 4659 | return 0; |
Ville Syrjälä | 8305494 | 2016-11-18 21:53:00 +0200 | [diff] [blame] | 4660 | |
Ville Syrjälä | d1d23d7 | 2019-09-13 22:31:54 +0300 | [diff] [blame] | 4661 | if (plane->id == PLANE_CURSOR) |
Matt Roper | a1de91e | 2016-05-12 07:05:57 -0700 | [diff] [blame] | 4662 | return 0; |
Ville Syrjälä | d1d23d7 | 2019-09-13 22:31:54 +0300 | [diff] [blame] | 4663 | |
| 4664 | if (color_plane == 1 && |
Imre Deak | 4941f35 | 2019-12-21 14:05:43 +0200 | [diff] [blame] | 4665 | !intel_format_info_is_yuv_semiplanar(fb->format, fb->modifier)) |
Matt Roper | a1de91e | 2016-05-12 07:05:57 -0700 | [diff] [blame] | 4666 | return 0; |
Kumar, Mahesh | a280f7d | 2016-04-06 08:26:39 -0700 | [diff] [blame] | 4667 | |
Ville Syrjälä | fce5adf | 2017-03-31 21:00:55 +0300 | [diff] [blame] | 4668 | /* |
| 4669 | * Src coordinates are already rotated by 270 degrees for |
| 4670 | * the 90/270 degree plane rotation cases (to match the |
| 4671 | * GTT mapping), hence no need to account for rotation here. |
| 4672 | */ |
Maarten Lankhorst | f90a85e | 2019-10-31 12:26:08 +0100 | [diff] [blame] | 4673 | width = drm_rect_width(&plane_state->uapi.src) >> 16; |
| 4674 | height = drm_rect_height(&plane_state->uapi.src) >> 16; |
Kumar, Mahesh | a280f7d | 2016-04-06 08:26:39 -0700 | [diff] [blame] | 4675 | |
Mahesh Kumar | b879d58 | 2018-04-09 09:11:01 +0530 | [diff] [blame] | 4676 | /* UV plane does 1/2 pixel sub-sampling */ |
Ville Syrjälä | d1d23d7 | 2019-09-13 22:31:54 +0300 | [diff] [blame] | 4677 | if (color_plane == 1) { |
Mahesh Kumar | b879d58 | 2018-04-09 09:11:01 +0530 | [diff] [blame] | 4678 | width /= 2; |
| 4679 | height /= 2; |
Chandra Konduru | 2cd601c | 2015-04-27 15:47:37 -0700 | [diff] [blame] | 4680 | } |
| 4681 | |
Maarten Lankhorst | 24719e9 | 2018-10-22 12:20:00 +0200 | [diff] [blame] | 4682 | data_rate = width * height; |
Mahesh Kumar | b879d58 | 2018-04-09 09:11:01 +0530 | [diff] [blame] | 4683 | |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 4684 | down_scale_amount = skl_plane_downscale_amount(crtc_state, plane_state); |
Kumar, Mahesh | 8d19d7d | 2016-05-19 15:03:01 -0700 | [diff] [blame] | 4685 | |
Maarten Lankhorst | 24719e9 | 2018-10-22 12:20:00 +0200 | [diff] [blame] | 4686 | rate = mul_round_up_u32_fixed16(data_rate, down_scale_amount); |
| 4687 | |
Ville Syrjälä | d1d23d7 | 2019-09-13 22:31:54 +0300 | [diff] [blame] | 4688 | rate *= fb->format->cpp[color_plane]; |
Maarten Lankhorst | 24719e9 | 2018-10-22 12:20:00 +0200 | [diff] [blame] | 4689 | return rate; |
Damien Lespiau | b9cec07 | 2014-11-04 17:06:43 +0000 | [diff] [blame] | 4690 | } |
| 4691 | |
Maarten Lankhorst | 24719e9 | 2018-10-22 12:20:00 +0200 | [diff] [blame] | 4692 | static u64 |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 4693 | skl_get_total_relative_data_rate(struct intel_crtc_state *crtc_state, |
Maarten Lankhorst | 24719e9 | 2018-10-22 12:20:00 +0200 | [diff] [blame] | 4694 | u64 *plane_data_rate, |
| 4695 | u64 *uv_plane_data_rate) |
Damien Lespiau | b9cec07 | 2014-11-04 17:06:43 +0000 | [diff] [blame] | 4696 | { |
Maarten Lankhorst | af9fbfa | 2019-10-04 13:34:53 +0200 | [diff] [blame] | 4697 | struct intel_plane *plane; |
| 4698 | const struct intel_plane_state *plane_state; |
Maarten Lankhorst | 24719e9 | 2018-10-22 12:20:00 +0200 | [diff] [blame] | 4699 | u64 total_data_rate = 0; |
Matt Roper | a6d3460e | 2016-05-12 07:06:04 -0700 | [diff] [blame] | 4700 | |
Matt Roper | a1de91e | 2016-05-12 07:05:57 -0700 | [diff] [blame] | 4701 | /* Calculate and cache data rate for each plane */ |
Maarten Lankhorst | af9fbfa | 2019-10-04 13:34:53 +0200 | [diff] [blame] | 4702 | intel_atomic_crtc_state_for_each_plane_state(plane, plane_state, crtc_state) { |
| 4703 | enum plane_id plane_id = plane->id; |
Maarten Lankhorst | 24719e9 | 2018-10-22 12:20:00 +0200 | [diff] [blame] | 4704 | u64 rate; |
Matt Roper | 024c904 | 2015-09-24 15:53:11 -0700 | [diff] [blame] | 4705 | |
Mahesh Kumar | b879d58 | 2018-04-09 09:11:01 +0530 | [diff] [blame] | 4706 | /* packed/y */ |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 4707 | rate = skl_plane_relative_data_rate(crtc_state, plane_state, 0); |
Ville Syrjälä | d5cdfdf5 | 2016-11-22 18:01:58 +0200 | [diff] [blame] | 4708 | plane_data_rate[plane_id] = rate; |
Maarten Lankhorst | 1e6ee54 | 2016-10-26 15:41:32 +0200 | [diff] [blame] | 4709 | total_data_rate += rate; |
Matt Roper | 9c74d82 | 2016-05-12 07:05:58 -0700 | [diff] [blame] | 4710 | |
Mahesh Kumar | b879d58 | 2018-04-09 09:11:01 +0530 | [diff] [blame] | 4711 | /* uv-plane */ |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 4712 | rate = skl_plane_relative_data_rate(crtc_state, plane_state, 1); |
Mahesh Kumar | b879d58 | 2018-04-09 09:11:01 +0530 | [diff] [blame] | 4713 | uv_plane_data_rate[plane_id] = rate; |
Maarten Lankhorst | 1e6ee54 | 2016-10-26 15:41:32 +0200 | [diff] [blame] | 4714 | total_data_rate += rate; |
Damien Lespiau | b9cec07 | 2014-11-04 17:06:43 +0000 | [diff] [blame] | 4715 | } |
| 4716 | |
| 4717 | return total_data_rate; |
| 4718 | } |
| 4719 | |
Maarten Lankhorst | b048a00 | 2018-10-18 13:51:30 +0200 | [diff] [blame] | 4720 | static u64 |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 4721 | icl_get_total_relative_data_rate(struct intel_crtc_state *crtc_state, |
Maarten Lankhorst | b048a00 | 2018-10-18 13:51:30 +0200 | [diff] [blame] | 4722 | u64 *plane_data_rate) |
| 4723 | { |
Maarten Lankhorst | af9fbfa | 2019-10-04 13:34:53 +0200 | [diff] [blame] | 4724 | struct intel_plane *plane; |
| 4725 | const struct intel_plane_state *plane_state; |
Maarten Lankhorst | b048a00 | 2018-10-18 13:51:30 +0200 | [diff] [blame] | 4726 | u64 total_data_rate = 0; |
| 4727 | |
Maarten Lankhorst | b048a00 | 2018-10-18 13:51:30 +0200 | [diff] [blame] | 4728 | /* Calculate and cache data rate for each plane */ |
Maarten Lankhorst | af9fbfa | 2019-10-04 13:34:53 +0200 | [diff] [blame] | 4729 | intel_atomic_crtc_state_for_each_plane_state(plane, plane_state, crtc_state) { |
| 4730 | enum plane_id plane_id = plane->id; |
Maarten Lankhorst | b048a00 | 2018-10-18 13:51:30 +0200 | [diff] [blame] | 4731 | u64 rate; |
| 4732 | |
Maarten Lankhorst | c47b7dd | 2019-09-20 13:42:20 +0200 | [diff] [blame] | 4733 | if (!plane_state->planar_linked_plane) { |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 4734 | rate = skl_plane_relative_data_rate(crtc_state, plane_state, 0); |
Maarten Lankhorst | b048a00 | 2018-10-18 13:51:30 +0200 | [diff] [blame] | 4735 | plane_data_rate[plane_id] = rate; |
| 4736 | total_data_rate += rate; |
| 4737 | } else { |
| 4738 | enum plane_id y_plane_id; |
| 4739 | |
| 4740 | /* |
| 4741 | * The slave plane might not iterate in |
Maarten Lankhorst | af9fbfa | 2019-10-04 13:34:53 +0200 | [diff] [blame] | 4742 | * intel_atomic_crtc_state_for_each_plane_state(), |
Maarten Lankhorst | b048a00 | 2018-10-18 13:51:30 +0200 | [diff] [blame] | 4743 | * and needs the master plane state which may be |
| 4744 | * NULL if we try get_new_plane_state(), so we |
| 4745 | * always calculate from the master. |
| 4746 | */ |
Maarten Lankhorst | c47b7dd | 2019-09-20 13:42:20 +0200 | [diff] [blame] | 4747 | if (plane_state->planar_slave) |
Maarten Lankhorst | b048a00 | 2018-10-18 13:51:30 +0200 | [diff] [blame] | 4748 | continue; |
| 4749 | |
| 4750 | /* Y plane rate is calculated on the slave */ |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 4751 | rate = skl_plane_relative_data_rate(crtc_state, plane_state, 0); |
Maarten Lankhorst | c47b7dd | 2019-09-20 13:42:20 +0200 | [diff] [blame] | 4752 | y_plane_id = plane_state->planar_linked_plane->id; |
Maarten Lankhorst | b048a00 | 2018-10-18 13:51:30 +0200 | [diff] [blame] | 4753 | plane_data_rate[y_plane_id] = rate; |
| 4754 | total_data_rate += rate; |
| 4755 | |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 4756 | rate = skl_plane_relative_data_rate(crtc_state, plane_state, 1); |
Maarten Lankhorst | b048a00 | 2018-10-18 13:51:30 +0200 | [diff] [blame] | 4757 | plane_data_rate[plane_id] = rate; |
| 4758 | total_data_rate += rate; |
| 4759 | } |
| 4760 | } |
| 4761 | |
| 4762 | return total_data_rate; |
| 4763 | } |
| 4764 | |
Stanislav Lisovskiy | d916234 | 2020-05-13 12:38:11 +0300 | [diff] [blame] | 4765 | static const struct skl_wm_level * |
| 4766 | skl_plane_wm_level(const struct intel_crtc_state *crtc_state, |
| 4767 | enum plane_id plane_id, |
| 4768 | int level) |
| 4769 | { |
Stanislav Lisovskiy | 7241c57 | 2020-05-14 10:48:51 +0300 | [diff] [blame] | 4770 | const struct skl_pipe_wm *pipe_wm = &crtc_state->wm.skl.optimal; |
| 4771 | const struct skl_plane_wm *wm = &pipe_wm->planes[plane_id]; |
| 4772 | |
| 4773 | if (level == 0 && pipe_wm->use_sagv_wm) |
| 4774 | return &wm->sagv_wm0; |
Stanislav Lisovskiy | d916234 | 2020-05-13 12:38:11 +0300 | [diff] [blame] | 4775 | |
| 4776 | return &wm->wm[level]; |
| 4777 | } |
| 4778 | |
Matt Roper | c107acf | 2016-05-12 07:06:01 -0700 | [diff] [blame] | 4779 | static int |
Stanislav Lisovskiy | 072fcc3 | 2020-02-03 01:06:25 +0200 | [diff] [blame] | 4780 | skl_allocate_pipe_ddb(struct intel_crtc_state *crtc_state) |
Damien Lespiau | b9cec07 | 2014-11-04 17:06:43 +0000 | [diff] [blame] | 4781 | { |
Ville Syrjälä | 2a67054b | 2020-02-25 19:11:06 +0200 | [diff] [blame] | 4782 | struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); |
| 4783 | struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 4784 | struct skl_ddb_entry *alloc = &crtc_state->wm.skl.ddb; |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 4785 | u16 alloc_size, start = 0; |
| 4786 | u16 total[I915_MAX_PLANES] = {}; |
| 4787 | u16 uv_total[I915_MAX_PLANES] = {}; |
Maarten Lankhorst | 24719e9 | 2018-10-22 12:20:00 +0200 | [diff] [blame] | 4788 | u64 total_data_rate; |
Ville Syrjälä | d5cdfdf5 | 2016-11-22 18:01:58 +0200 | [diff] [blame] | 4789 | enum plane_id plane_id; |
Matt Roper | c107acf | 2016-05-12 07:06:01 -0700 | [diff] [blame] | 4790 | int num_active; |
Maarten Lankhorst | 24719e9 | 2018-10-22 12:20:00 +0200 | [diff] [blame] | 4791 | u64 plane_data_rate[I915_MAX_PLANES] = {}; |
| 4792 | u64 uv_plane_data_rate[I915_MAX_PLANES] = {}; |
Ville Syrjälä | 0aded17 | 2019-02-05 17:50:53 +0200 | [diff] [blame] | 4793 | u32 blocks; |
Matt Roper | d8e8749 | 2018-12-11 09:31:07 -0800 | [diff] [blame] | 4794 | int level; |
Ville Syrjälä | 3cf43cd | 2020-02-25 19:11:13 +0200 | [diff] [blame] | 4795 | int ret; |
Damien Lespiau | b9cec07 | 2014-11-04 17:06:43 +0000 | [diff] [blame] | 4796 | |
Paulo Zanoni | 5a920b8 | 2016-10-04 14:37:32 -0300 | [diff] [blame] | 4797 | /* Clear the partitioning for disabled planes. */ |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 4798 | memset(crtc_state->wm.skl.plane_ddb_y, 0, sizeof(crtc_state->wm.skl.plane_ddb_y)); |
| 4799 | memset(crtc_state->wm.skl.plane_ddb_uv, 0, sizeof(crtc_state->wm.skl.plane_ddb_uv)); |
Paulo Zanoni | 5a920b8 | 2016-10-04 14:37:32 -0300 | [diff] [blame] | 4800 | |
Maarten Lankhorst | 1326a92 | 2019-10-31 12:26:02 +0100 | [diff] [blame] | 4801 | if (!crtc_state->hw.active) { |
Ville Syrjälä | b6a13a3 | 2020-05-18 15:13:54 +0300 | [diff] [blame] | 4802 | struct intel_atomic_state *state = |
| 4803 | to_intel_atomic_state(crtc_state->uapi.state); |
| 4804 | struct intel_dbuf_state *new_dbuf_state = |
| 4805 | intel_atomic_get_new_dbuf_state(state); |
| 4806 | const struct intel_dbuf_state *old_dbuf_state = |
| 4807 | intel_atomic_get_old_dbuf_state(state); |
| 4808 | |
| 4809 | /* |
| 4810 | * FIXME hack to make sure we compute this sensibly when |
| 4811 | * turning off all the pipes. Otherwise we leave it at |
| 4812 | * whatever we had previously, and then runtime PM will |
| 4813 | * mess it up by turning off all but S1. Remove this |
| 4814 | * once the dbuf state computation flow becomes sane. |
| 4815 | */ |
| 4816 | if (new_dbuf_state->active_pipes == 0) { |
| 4817 | new_dbuf_state->enabled_slices = BIT(DBUF_S1); |
| 4818 | |
| 4819 | if (old_dbuf_state->enabled_slices != new_dbuf_state->enabled_slices) { |
| 4820 | ret = intel_atomic_serialize_global_state(&new_dbuf_state->base); |
| 4821 | if (ret) |
| 4822 | return ret; |
| 4823 | } |
| 4824 | } |
| 4825 | |
Lyude | ce0ba28 | 2016-09-15 10:46:35 -0400 | [diff] [blame] | 4826 | alloc->start = alloc->end = 0; |
Matt Roper | c107acf | 2016-05-12 07:06:01 -0700 | [diff] [blame] | 4827 | return 0; |
| 4828 | } |
| 4829 | |
Lucas De Marchi | 323b0a8 | 2019-04-04 16:04:25 -0700 | [diff] [blame] | 4830 | if (INTEL_GEN(dev_priv) >= 11) |
| 4831 | total_data_rate = |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 4832 | icl_get_total_relative_data_rate(crtc_state, |
Lucas De Marchi | 323b0a8 | 2019-04-04 16:04:25 -0700 | [diff] [blame] | 4833 | plane_data_rate); |
| 4834 | else |
Maarten Lankhorst | b048a00 | 2018-10-18 13:51:30 +0200 | [diff] [blame] | 4835 | total_data_rate = |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 4836 | skl_get_total_relative_data_rate(crtc_state, |
Maarten Lankhorst | b048a00 | 2018-10-18 13:51:30 +0200 | [diff] [blame] | 4837 | plane_data_rate, |
| 4838 | uv_plane_data_rate); |
Lucas De Marchi | 323b0a8 | 2019-04-04 16:04:25 -0700 | [diff] [blame] | 4839 | |
Ville Syrjälä | 3cf43cd | 2020-02-25 19:11:13 +0200 | [diff] [blame] | 4840 | ret = skl_ddb_get_pipe_allocation_limits(dev_priv, crtc_state, |
| 4841 | total_data_rate, |
| 4842 | alloc, &num_active); |
| 4843 | if (ret) |
| 4844 | return ret; |
| 4845 | |
Damien Lespiau | 34bb56a | 2014-11-04 17:07:01 +0000 | [diff] [blame] | 4846 | alloc_size = skl_ddb_entry_size(alloc); |
Kumar, Mahesh | 336031e | 2017-05-17 17:28:25 +0530 | [diff] [blame] | 4847 | if (alloc_size == 0) |
Matt Roper | c107acf | 2016-05-12 07:06:01 -0700 | [diff] [blame] | 4848 | return 0; |
Damien Lespiau | b9cec07 | 2014-11-04 17:06:43 +0000 | [diff] [blame] | 4849 | |
Matt Roper | d8e8749 | 2018-12-11 09:31:07 -0800 | [diff] [blame] | 4850 | /* Allocate fixed number of blocks for cursor. */ |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 4851 | total[PLANE_CURSOR] = skl_cursor_allocation(crtc_state, num_active); |
Matt Roper | d8e8749 | 2018-12-11 09:31:07 -0800 | [diff] [blame] | 4852 | alloc_size -= total[PLANE_CURSOR]; |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 4853 | crtc_state->wm.skl.plane_ddb_y[PLANE_CURSOR].start = |
Matt Roper | d8e8749 | 2018-12-11 09:31:07 -0800 | [diff] [blame] | 4854 | alloc->end - total[PLANE_CURSOR]; |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 4855 | crtc_state->wm.skl.plane_ddb_y[PLANE_CURSOR].end = alloc->end; |
Maarten Lankhorst | 49845a7 | 2016-10-26 15:41:34 +0200 | [diff] [blame] | 4856 | |
Matt Roper | a1de91e | 2016-05-12 07:05:57 -0700 | [diff] [blame] | 4857 | if (total_data_rate == 0) |
Matt Roper | c107acf | 2016-05-12 07:06:01 -0700 | [diff] [blame] | 4858 | return 0; |
Damien Lespiau | b9cec07 | 2014-11-04 17:06:43 +0000 | [diff] [blame] | 4859 | |
Matt Roper | d8e8749 | 2018-12-11 09:31:07 -0800 | [diff] [blame] | 4860 | /* |
| 4861 | * Find the highest watermark level for which we can satisfy the block |
| 4862 | * requirement of active planes. |
| 4863 | */ |
| 4864 | for (level = ilk_wm_max_level(dev_priv); level >= 0; level--) { |
Matt Roper | 25db2ea | 2018-12-12 11:17:20 -0800 | [diff] [blame] | 4865 | blocks = 0; |
Ville Syrjälä | 2a67054b | 2020-02-25 19:11:06 +0200 | [diff] [blame] | 4866 | for_each_plane_id_on_crtc(crtc, plane_id) { |
Ville Syrjälä | 5e6037c | 2019-03-12 22:58:42 +0200 | [diff] [blame] | 4867 | const struct skl_plane_wm *wm = |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 4868 | &crtc_state->wm.skl.optimal.planes[plane_id]; |
Ville Syrjälä | 10a7e07 | 2019-03-12 22:58:40 +0200 | [diff] [blame] | 4869 | |
| 4870 | if (plane_id == PLANE_CURSOR) { |
Vandita Kulkarni | 4ba4870 | 2019-12-16 13:36:19 +0530 | [diff] [blame] | 4871 | if (wm->wm[level].min_ddb_alloc > total[PLANE_CURSOR]) { |
Pankaj Bharadiya | 48a1b8d | 2020-01-15 09:14:53 +0530 | [diff] [blame] | 4872 | drm_WARN_ON(&dev_priv->drm, |
| 4873 | wm->wm[level].min_ddb_alloc != U16_MAX); |
Ville Syrjälä | 10a7e07 | 2019-03-12 22:58:40 +0200 | [diff] [blame] | 4874 | blocks = U32_MAX; |
| 4875 | break; |
| 4876 | } |
| 4877 | continue; |
| 4878 | } |
| 4879 | |
Ville Syrjälä | 961d95e | 2018-12-21 19:14:32 +0200 | [diff] [blame] | 4880 | blocks += wm->wm[level].min_ddb_alloc; |
| 4881 | blocks += wm->uv_wm[level].min_ddb_alloc; |
Matt Roper | d8e8749 | 2018-12-11 09:31:07 -0800 | [diff] [blame] | 4882 | } |
| 4883 | |
Ville Syrjälä | 3cf963c | 2019-03-12 22:58:36 +0200 | [diff] [blame] | 4884 | if (blocks <= alloc_size) { |
Matt Roper | d8e8749 | 2018-12-11 09:31:07 -0800 | [diff] [blame] | 4885 | alloc_size -= blocks; |
| 4886 | break; |
| 4887 | } |
| 4888 | } |
| 4889 | |
| 4890 | if (level < 0) { |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 4891 | drm_dbg_kms(&dev_priv->drm, |
| 4892 | "Requested display configuration exceeds system DDB limitations"); |
| 4893 | drm_dbg_kms(&dev_priv->drm, "minimum required %d/%d\n", |
| 4894 | blocks, alloc_size); |
Matt Roper | d8e8749 | 2018-12-11 09:31:07 -0800 | [diff] [blame] | 4895 | return -EINVAL; |
| 4896 | } |
| 4897 | |
| 4898 | /* |
| 4899 | * Grant each plane the blocks it requires at the highest achievable |
| 4900 | * watermark level, plus an extra share of the leftover blocks |
| 4901 | * proportional to its relative data rate. |
| 4902 | */ |
Ville Syrjälä | 2a67054b | 2020-02-25 19:11:06 +0200 | [diff] [blame] | 4903 | for_each_plane_id_on_crtc(crtc, plane_id) { |
Ville Syrjälä | 5e6037c | 2019-03-12 22:58:42 +0200 | [diff] [blame] | 4904 | const struct skl_plane_wm *wm = |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 4905 | &crtc_state->wm.skl.optimal.planes[plane_id]; |
Matt Roper | d8e8749 | 2018-12-11 09:31:07 -0800 | [diff] [blame] | 4906 | u64 rate; |
| 4907 | u16 extra; |
Damien Lespiau | b9cec07 | 2014-11-04 17:06:43 +0000 | [diff] [blame] | 4908 | |
Ville Syrjälä | d5cdfdf5 | 2016-11-22 18:01:58 +0200 | [diff] [blame] | 4909 | if (plane_id == PLANE_CURSOR) |
Maarten Lankhorst | 49845a7 | 2016-10-26 15:41:34 +0200 | [diff] [blame] | 4910 | continue; |
| 4911 | |
Damien Lespiau | b9cec07 | 2014-11-04 17:06:43 +0000 | [diff] [blame] | 4912 | /* |
Matt Roper | d8e8749 | 2018-12-11 09:31:07 -0800 | [diff] [blame] | 4913 | * We've accounted for all active planes; remaining planes are |
| 4914 | * all disabled. |
Damien Lespiau | b9cec07 | 2014-11-04 17:06:43 +0000 | [diff] [blame] | 4915 | */ |
Matt Roper | d8e8749 | 2018-12-11 09:31:07 -0800 | [diff] [blame] | 4916 | if (total_data_rate == 0) |
| 4917 | break; |
Damien Lespiau | b9cec07 | 2014-11-04 17:06:43 +0000 | [diff] [blame] | 4918 | |
Matt Roper | d8e8749 | 2018-12-11 09:31:07 -0800 | [diff] [blame] | 4919 | rate = plane_data_rate[plane_id]; |
| 4920 | extra = min_t(u16, alloc_size, |
| 4921 | DIV64_U64_ROUND_UP(alloc_size * rate, |
| 4922 | total_data_rate)); |
Ville Syrjälä | 961d95e | 2018-12-21 19:14:32 +0200 | [diff] [blame] | 4923 | total[plane_id] = wm->wm[level].min_ddb_alloc + extra; |
Matt Roper | d8e8749 | 2018-12-11 09:31:07 -0800 | [diff] [blame] | 4924 | alloc_size -= extra; |
| 4925 | total_data_rate -= rate; |
Rodrigo Vivi | 9a30a26 | 2017-06-13 10:52:30 -0700 | [diff] [blame] | 4926 | |
Matt Roper | d8e8749 | 2018-12-11 09:31:07 -0800 | [diff] [blame] | 4927 | if (total_data_rate == 0) |
| 4928 | break; |
Chandra Konduru | 2cd601c | 2015-04-27 15:47:37 -0700 | [diff] [blame] | 4929 | |
Matt Roper | d8e8749 | 2018-12-11 09:31:07 -0800 | [diff] [blame] | 4930 | rate = uv_plane_data_rate[plane_id]; |
| 4931 | extra = min_t(u16, alloc_size, |
| 4932 | DIV64_U64_ROUND_UP(alloc_size * rate, |
| 4933 | total_data_rate)); |
Ville Syrjälä | 961d95e | 2018-12-21 19:14:32 +0200 | [diff] [blame] | 4934 | uv_total[plane_id] = wm->uv_wm[level].min_ddb_alloc + extra; |
Matt Roper | d8e8749 | 2018-12-11 09:31:07 -0800 | [diff] [blame] | 4935 | alloc_size -= extra; |
| 4936 | total_data_rate -= rate; |
| 4937 | } |
Pankaj Bharadiya | 48a1b8d | 2020-01-15 09:14:53 +0530 | [diff] [blame] | 4938 | drm_WARN_ON(&dev_priv->drm, alloc_size != 0 || total_data_rate != 0); |
Matt Roper | d8e8749 | 2018-12-11 09:31:07 -0800 | [diff] [blame] | 4939 | |
| 4940 | /* Set the actual DDB start/end points for each plane */ |
| 4941 | start = alloc->start; |
Ville Syrjälä | 2a67054b | 2020-02-25 19:11:06 +0200 | [diff] [blame] | 4942 | for_each_plane_id_on_crtc(crtc, plane_id) { |
Ville Syrjälä | 5e6037c | 2019-03-12 22:58:42 +0200 | [diff] [blame] | 4943 | struct skl_ddb_entry *plane_alloc = |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 4944 | &crtc_state->wm.skl.plane_ddb_y[plane_id]; |
Ville Syrjälä | 5e6037c | 2019-03-12 22:58:42 +0200 | [diff] [blame] | 4945 | struct skl_ddb_entry *uv_plane_alloc = |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 4946 | &crtc_state->wm.skl.plane_ddb_uv[plane_id]; |
Matt Roper | d8e8749 | 2018-12-11 09:31:07 -0800 | [diff] [blame] | 4947 | |
| 4948 | if (plane_id == PLANE_CURSOR) |
| 4949 | continue; |
| 4950 | |
Maarten Lankhorst | b048a00 | 2018-10-18 13:51:30 +0200 | [diff] [blame] | 4951 | /* Gen11+ uses a separate plane for UV watermarks */ |
Pankaj Bharadiya | 48a1b8d | 2020-01-15 09:14:53 +0530 | [diff] [blame] | 4952 | drm_WARN_ON(&dev_priv->drm, |
| 4953 | INTEL_GEN(dev_priv) >= 11 && uv_total[plane_id]); |
Maarten Lankhorst | b048a00 | 2018-10-18 13:51:30 +0200 | [diff] [blame] | 4954 | |
Matt Roper | d8e8749 | 2018-12-11 09:31:07 -0800 | [diff] [blame] | 4955 | /* Leave disabled planes at (0,0) */ |
| 4956 | if (total[plane_id]) { |
| 4957 | plane_alloc->start = start; |
| 4958 | start += total[plane_id]; |
| 4959 | plane_alloc->end = start; |
Matt Roper | c107acf | 2016-05-12 07:06:01 -0700 | [diff] [blame] | 4960 | } |
Rodrigo Vivi | 9a30a26 | 2017-06-13 10:52:30 -0700 | [diff] [blame] | 4961 | |
Matt Roper | d8e8749 | 2018-12-11 09:31:07 -0800 | [diff] [blame] | 4962 | if (uv_total[plane_id]) { |
| 4963 | uv_plane_alloc->start = start; |
| 4964 | start += uv_total[plane_id]; |
| 4965 | uv_plane_alloc->end = start; |
| 4966 | } |
| 4967 | } |
| 4968 | |
| 4969 | /* |
| 4970 | * When we calculated watermark values we didn't know how high |
| 4971 | * of a level we'd actually be able to hit, so we just marked |
| 4972 | * all levels as "enabled." Go back now and disable the ones |
| 4973 | * that aren't actually possible. |
| 4974 | */ |
| 4975 | for (level++; level <= ilk_wm_max_level(dev_priv); level++) { |
Ville Syrjälä | 2a67054b | 2020-02-25 19:11:06 +0200 | [diff] [blame] | 4976 | for_each_plane_id_on_crtc(crtc, plane_id) { |
Ville Syrjälä | 5e6037c | 2019-03-12 22:58:42 +0200 | [diff] [blame] | 4977 | struct skl_plane_wm *wm = |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 4978 | &crtc_state->wm.skl.optimal.planes[plane_id]; |
Ville Syrjälä | a301cb0 | 2019-03-12 22:58:41 +0200 | [diff] [blame] | 4979 | |
| 4980 | /* |
| 4981 | * We only disable the watermarks for each plane if |
| 4982 | * they exceed the ddb allocation of said plane. This |
| 4983 | * is done so that we don't end up touching cursor |
| 4984 | * watermarks needlessly when some other plane reduces |
| 4985 | * our max possible watermark level. |
| 4986 | * |
| 4987 | * Bspec has this to say about the PLANE_WM enable bit: |
| 4988 | * "All the watermarks at this level for all enabled |
| 4989 | * planes must be enabled before the level will be used." |
| 4990 | * So this is actually safe to do. |
| 4991 | */ |
| 4992 | if (wm->wm[level].min_ddb_alloc > total[plane_id] || |
| 4993 | wm->uv_wm[level].min_ddb_alloc > uv_total[plane_id]) |
| 4994 | memset(&wm->wm[level], 0, sizeof(wm->wm[level])); |
Ville Syrjälä | 290248c | 2019-02-13 18:54:24 +0200 | [diff] [blame] | 4995 | |
Ville Syrjälä | c384afe | 2019-02-28 19:36:39 +0200 | [diff] [blame] | 4996 | /* |
Bob Paauwe | 39564ae | 2019-04-12 11:09:20 -0700 | [diff] [blame] | 4997 | * Wa_1408961008:icl, ehl |
Ville Syrjälä | c384afe | 2019-02-28 19:36:39 +0200 | [diff] [blame] | 4998 | * Underruns with WM1+ disabled |
| 4999 | */ |
Bob Paauwe | 39564ae | 2019-04-12 11:09:20 -0700 | [diff] [blame] | 5000 | if (IS_GEN(dev_priv, 11) && |
Ville Syrjälä | 290248c | 2019-02-13 18:54:24 +0200 | [diff] [blame] | 5001 | level == 1 && wm->wm[0].plane_en) { |
| 5002 | wm->wm[level].plane_res_b = wm->wm[0].plane_res_b; |
Ville Syrjälä | c384afe | 2019-02-28 19:36:39 +0200 | [diff] [blame] | 5003 | wm->wm[level].plane_res_l = wm->wm[0].plane_res_l; |
| 5004 | wm->wm[level].ignore_lines = wm->wm[0].ignore_lines; |
Ville Syrjälä | 290248c | 2019-02-13 18:54:24 +0200 | [diff] [blame] | 5005 | } |
Matt Roper | d8e8749 | 2018-12-11 09:31:07 -0800 | [diff] [blame] | 5006 | } |
| 5007 | } |
| 5008 | |
| 5009 | /* |
| 5010 | * Go back and disable the transition watermark if it turns out we |
| 5011 | * don't have enough DDB blocks for it. |
| 5012 | */ |
Ville Syrjälä | 2a67054b | 2020-02-25 19:11:06 +0200 | [diff] [blame] | 5013 | for_each_plane_id_on_crtc(crtc, plane_id) { |
Ville Syrjälä | 5e6037c | 2019-03-12 22:58:42 +0200 | [diff] [blame] | 5014 | struct skl_plane_wm *wm = |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 5015 | &crtc_state->wm.skl.optimal.planes[plane_id]; |
Ville Syrjälä | 5e6037c | 2019-03-12 22:58:42 +0200 | [diff] [blame] | 5016 | |
Ville Syrjälä | b19c9bc | 2018-12-21 19:14:31 +0200 | [diff] [blame] | 5017 | if (wm->trans_wm.plane_res_b >= total[plane_id]) |
Matt Roper | d8e8749 | 2018-12-11 09:31:07 -0800 | [diff] [blame] | 5018 | memset(&wm->trans_wm, 0, sizeof(wm->trans_wm)); |
Damien Lespiau | b9cec07 | 2014-11-04 17:06:43 +0000 | [diff] [blame] | 5019 | } |
| 5020 | |
Matt Roper | c107acf | 2016-05-12 07:06:01 -0700 | [diff] [blame] | 5021 | return 0; |
Damien Lespiau | b9cec07 | 2014-11-04 17:06:43 +0000 | [diff] [blame] | 5022 | } |
| 5023 | |
Pradeep Bhat | 2d41c0b | 2014-11-04 17:06:42 +0000 | [diff] [blame] | 5024 | /* |
| 5025 | * The max latency should be 257 (max the punit can code is 255 and we add 2us |
Ville Syrjälä | ac48496 | 2016-01-20 21:05:26 +0200 | [diff] [blame] | 5026 | * for the read latency) and cpp should always be <= 8, so that |
Pradeep Bhat | 2d41c0b | 2014-11-04 17:06:42 +0000 | [diff] [blame] | 5027 | * should allow pixel_rate up to ~2 GHz which seems sufficient since max |
| 5028 | * 2xcdclk is 1350 MHz and the pixel rate should never exceed that. |
| 5029 | */ |
Paulo Zanoni | 6c64dd3 | 2017-08-11 16:38:25 -0700 | [diff] [blame] | 5030 | static uint_fixed_16_16_t |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 5031 | skl_wm_method1(const struct drm_i915_private *dev_priv, u32 pixel_rate, |
| 5032 | u8 cpp, u32 latency, u32 dbuf_block_size) |
Pradeep Bhat | 2d41c0b | 2014-11-04 17:06:42 +0000 | [diff] [blame] | 5033 | { |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 5034 | u32 wm_intermediate_val; |
Mahesh Kumar | b95320b | 2016-12-01 21:19:37 +0530 | [diff] [blame] | 5035 | uint_fixed_16_16_t ret; |
Pradeep Bhat | 2d41c0b | 2014-11-04 17:06:42 +0000 | [diff] [blame] | 5036 | |
| 5037 | if (latency == 0) |
Mahesh Kumar | b95320b | 2016-12-01 21:19:37 +0530 | [diff] [blame] | 5038 | return FP_16_16_MAX; |
Pradeep Bhat | 2d41c0b | 2014-11-04 17:06:42 +0000 | [diff] [blame] | 5039 | |
Mahesh Kumar | b95320b | 2016-12-01 21:19:37 +0530 | [diff] [blame] | 5040 | wm_intermediate_val = latency * pixel_rate * cpp; |
Mahesh Kumar | df8ee19 | 2018-01-30 11:49:11 -0200 | [diff] [blame] | 5041 | ret = div_fixed16(wm_intermediate_val, 1000 * dbuf_block_size); |
Paulo Zanoni | 6c64dd3 | 2017-08-11 16:38:25 -0700 | [diff] [blame] | 5042 | |
Ville Syrjälä | 260a6c1b | 2020-04-30 15:58:21 +0300 | [diff] [blame] | 5043 | if (INTEL_GEN(dev_priv) >= 10 || IS_GEMINILAKE(dev_priv)) |
Paulo Zanoni | 6c64dd3 | 2017-08-11 16:38:25 -0700 | [diff] [blame] | 5044 | ret = add_fixed16_u32(ret, 1); |
| 5045 | |
Pradeep Bhat | 2d41c0b | 2014-11-04 17:06:42 +0000 | [diff] [blame] | 5046 | return ret; |
| 5047 | } |
| 5048 | |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 5049 | static uint_fixed_16_16_t |
| 5050 | skl_wm_method2(u32 pixel_rate, u32 pipe_htotal, u32 latency, |
| 5051 | uint_fixed_16_16_t plane_blocks_per_line) |
Pradeep Bhat | 2d41c0b | 2014-11-04 17:06:42 +0000 | [diff] [blame] | 5052 | { |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 5053 | u32 wm_intermediate_val; |
Mahesh Kumar | b95320b | 2016-12-01 21:19:37 +0530 | [diff] [blame] | 5054 | uint_fixed_16_16_t ret; |
Pradeep Bhat | 2d41c0b | 2014-11-04 17:06:42 +0000 | [diff] [blame] | 5055 | |
| 5056 | if (latency == 0) |
Mahesh Kumar | b95320b | 2016-12-01 21:19:37 +0530 | [diff] [blame] | 5057 | return FP_16_16_MAX; |
Pradeep Bhat | 2d41c0b | 2014-11-04 17:06:42 +0000 | [diff] [blame] | 5058 | |
Pradeep Bhat | 2d41c0b | 2014-11-04 17:06:42 +0000 | [diff] [blame] | 5059 | wm_intermediate_val = latency * pixel_rate; |
Mahesh Kumar | b95320b | 2016-12-01 21:19:37 +0530 | [diff] [blame] | 5060 | wm_intermediate_val = DIV_ROUND_UP(wm_intermediate_val, |
| 5061 | pipe_htotal * 1000); |
Kumar, Mahesh | eac2cb8 | 2017-07-05 20:01:46 +0530 | [diff] [blame] | 5062 | ret = mul_u32_fixed16(wm_intermediate_val, plane_blocks_per_line); |
Pradeep Bhat | 2d41c0b | 2014-11-04 17:06:42 +0000 | [diff] [blame] | 5063 | return ret; |
| 5064 | } |
| 5065 | |
Kumar, Mahesh | d555cb5 | 2017-05-17 17:28:29 +0530 | [diff] [blame] | 5066 | static uint_fixed_16_16_t |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 5067 | intel_get_linetime_us(const struct intel_crtc_state *crtc_state) |
Kumar, Mahesh | d555cb5 | 2017-05-17 17:28:29 +0530 | [diff] [blame] | 5068 | { |
Pankaj Bharadiya | 19edeb38 | 2020-05-04 23:45:59 +0530 | [diff] [blame] | 5069 | struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev); |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 5070 | u32 pixel_rate; |
| 5071 | u32 crtc_htotal; |
Kumar, Mahesh | d555cb5 | 2017-05-17 17:28:29 +0530 | [diff] [blame] | 5072 | uint_fixed_16_16_t linetime_us; |
| 5073 | |
Maarten Lankhorst | 1326a92 | 2019-10-31 12:26:02 +0100 | [diff] [blame] | 5074 | if (!crtc_state->hw.active) |
Kumar, Mahesh | eac2cb8 | 2017-07-05 20:01:46 +0530 | [diff] [blame] | 5075 | return u32_to_fixed16(0); |
Kumar, Mahesh | d555cb5 | 2017-05-17 17:28:29 +0530 | [diff] [blame] | 5076 | |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 5077 | pixel_rate = crtc_state->pixel_rate; |
Kumar, Mahesh | d555cb5 | 2017-05-17 17:28:29 +0530 | [diff] [blame] | 5078 | |
Pankaj Bharadiya | 19edeb38 | 2020-05-04 23:45:59 +0530 | [diff] [blame] | 5079 | if (drm_WARN_ON(&dev_priv->drm, pixel_rate == 0)) |
Kumar, Mahesh | eac2cb8 | 2017-07-05 20:01:46 +0530 | [diff] [blame] | 5080 | return u32_to_fixed16(0); |
Kumar, Mahesh | d555cb5 | 2017-05-17 17:28:29 +0530 | [diff] [blame] | 5081 | |
Maarten Lankhorst | 1326a92 | 2019-10-31 12:26:02 +0100 | [diff] [blame] | 5082 | crtc_htotal = crtc_state->hw.adjusted_mode.crtc_htotal; |
Kumar, Mahesh | eac2cb8 | 2017-07-05 20:01:46 +0530 | [diff] [blame] | 5083 | linetime_us = div_fixed16(crtc_htotal * 1000, pixel_rate); |
Kumar, Mahesh | d555cb5 | 2017-05-17 17:28:29 +0530 | [diff] [blame] | 5084 | |
| 5085 | return linetime_us; |
| 5086 | } |
| 5087 | |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 5088 | static u32 |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 5089 | skl_adjusted_plane_pixel_rate(const struct intel_crtc_state *crtc_state, |
| 5090 | const struct intel_plane_state *plane_state) |
Kumar, Mahesh | 9c2f7a9 | 2016-05-16 15:52:00 -0700 | [diff] [blame] | 5091 | { |
Pankaj Bharadiya | 19edeb38 | 2020-05-04 23:45:59 +0530 | [diff] [blame] | 5092 | struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev); |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 5093 | u64 adjusted_pixel_rate; |
Kumar, Mahesh | 7084b50 | 2017-05-17 17:28:23 +0530 | [diff] [blame] | 5094 | uint_fixed_16_16_t downscale_amount; |
Kumar, Mahesh | 9c2f7a9 | 2016-05-16 15:52:00 -0700 | [diff] [blame] | 5095 | |
| 5096 | /* Shouldn't reach here on disabled planes... */ |
Pankaj Bharadiya | 19edeb38 | 2020-05-04 23:45:59 +0530 | [diff] [blame] | 5097 | if (drm_WARN_ON(&dev_priv->drm, |
| 5098 | !intel_wm_plane_visible(crtc_state, plane_state))) |
Kumar, Mahesh | 9c2f7a9 | 2016-05-16 15:52:00 -0700 | [diff] [blame] | 5099 | return 0; |
| 5100 | |
| 5101 | /* |
| 5102 | * Adjusted plane pixel rate is just the pipe's adjusted pixel rate |
| 5103 | * with additional adjustments for plane-specific scaling. |
| 5104 | */ |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 5105 | adjusted_pixel_rate = crtc_state->pixel_rate; |
| 5106 | downscale_amount = skl_plane_downscale_amount(crtc_state, plane_state); |
Kumar, Mahesh | 9c2f7a9 | 2016-05-16 15:52:00 -0700 | [diff] [blame] | 5107 | |
Kumar, Mahesh | 7084b50 | 2017-05-17 17:28:23 +0530 | [diff] [blame] | 5108 | return mul_round_up_u32_fixed16(adjusted_pixel_rate, |
| 5109 | downscale_amount); |
Kumar, Mahesh | 9c2f7a9 | 2016-05-16 15:52:00 -0700 | [diff] [blame] | 5110 | } |
| 5111 | |
Kumar, Mahesh | 7e452fd | 2017-08-17 19:15:23 +0530 | [diff] [blame] | 5112 | static int |
Ville Syrjälä | c92558a | 2019-03-12 22:58:38 +0200 | [diff] [blame] | 5113 | skl_compute_wm_params(const struct intel_crtc_state *crtc_state, |
| 5114 | int width, const struct drm_format_info *format, |
| 5115 | u64 modifier, unsigned int rotation, |
| 5116 | u32 plane_pixel_rate, struct skl_wm_params *wp, |
| 5117 | int color_plane) |
Kumar, Mahesh | 7e452fd | 2017-08-17 19:15:23 +0530 | [diff] [blame] | 5118 | { |
Maarten Lankhorst | 2225f3c | 2019-10-31 12:26:03 +0100 | [diff] [blame] | 5119 | struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); |
Ville Syrjälä | c92558a | 2019-03-12 22:58:38 +0200 | [diff] [blame] | 5120 | struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 5121 | u32 interm_pbpl; |
Kumar, Mahesh | 7e452fd | 2017-08-17 19:15:23 +0530 | [diff] [blame] | 5122 | |
Juha-Pekka Heikkila | df7d415 | 2019-03-04 17:26:31 +0530 | [diff] [blame] | 5123 | /* only planar format has two planes */ |
Imre Deak | 4941f35 | 2019-12-21 14:05:43 +0200 | [diff] [blame] | 5124 | if (color_plane == 1 && |
| 5125 | !intel_format_info_is_yuv_semiplanar(format, modifier)) { |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 5126 | drm_dbg_kms(&dev_priv->drm, |
| 5127 | "Non planar format have single plane\n"); |
Mahesh Kumar | 942aa2d | 2018-04-09 09:11:04 +0530 | [diff] [blame] | 5128 | return -EINVAL; |
| 5129 | } |
| 5130 | |
Ville Syrjälä | c92558a | 2019-03-12 22:58:38 +0200 | [diff] [blame] | 5131 | wp->y_tiled = modifier == I915_FORMAT_MOD_Y_TILED || |
| 5132 | modifier == I915_FORMAT_MOD_Yf_TILED || |
| 5133 | modifier == I915_FORMAT_MOD_Y_TILED_CCS || |
| 5134 | modifier == I915_FORMAT_MOD_Yf_TILED_CCS; |
| 5135 | wp->x_tiled = modifier == I915_FORMAT_MOD_X_TILED; |
| 5136 | wp->rc_surface = modifier == I915_FORMAT_MOD_Y_TILED_CCS || |
| 5137 | modifier == I915_FORMAT_MOD_Yf_TILED_CCS; |
Imre Deak | 4941f35 | 2019-12-21 14:05:43 +0200 | [diff] [blame] | 5138 | wp->is_planar = intel_format_info_is_yuv_semiplanar(format, modifier); |
Kumar, Mahesh | 7e452fd | 2017-08-17 19:15:23 +0530 | [diff] [blame] | 5139 | |
Ville Syrjälä | c92558a | 2019-03-12 22:58:38 +0200 | [diff] [blame] | 5140 | wp->width = width; |
Ville Syrjälä | 45bee43 | 2018-11-14 23:07:28 +0200 | [diff] [blame] | 5141 | if (color_plane == 1 && wp->is_planar) |
Mahesh Kumar | 942aa2d | 2018-04-09 09:11:04 +0530 | [diff] [blame] | 5142 | wp->width /= 2; |
| 5143 | |
Ville Syrjälä | c92558a | 2019-03-12 22:58:38 +0200 | [diff] [blame] | 5144 | wp->cpp = format->cpp[color_plane]; |
| 5145 | wp->plane_pixel_rate = plane_pixel_rate; |
Kumar, Mahesh | 7e452fd | 2017-08-17 19:15:23 +0530 | [diff] [blame] | 5146 | |
Mahesh Kumar | df8ee19 | 2018-01-30 11:49:11 -0200 | [diff] [blame] | 5147 | if (INTEL_GEN(dev_priv) >= 11 && |
Ville Syrjälä | c92558a | 2019-03-12 22:58:38 +0200 | [diff] [blame] | 5148 | modifier == I915_FORMAT_MOD_Yf_TILED && wp->cpp == 1) |
Mahesh Kumar | df8ee19 | 2018-01-30 11:49:11 -0200 | [diff] [blame] | 5149 | wp->dbuf_block_size = 256; |
| 5150 | else |
| 5151 | wp->dbuf_block_size = 512; |
| 5152 | |
Ville Syrjälä | c92558a | 2019-03-12 22:58:38 +0200 | [diff] [blame] | 5153 | if (drm_rotation_90_or_270(rotation)) { |
Kumar, Mahesh | 7e452fd | 2017-08-17 19:15:23 +0530 | [diff] [blame] | 5154 | switch (wp->cpp) { |
| 5155 | case 1: |
| 5156 | wp->y_min_scanlines = 16; |
| 5157 | break; |
| 5158 | case 2: |
| 5159 | wp->y_min_scanlines = 8; |
| 5160 | break; |
| 5161 | case 4: |
| 5162 | wp->y_min_scanlines = 4; |
| 5163 | break; |
| 5164 | default: |
| 5165 | MISSING_CASE(wp->cpp); |
| 5166 | return -EINVAL; |
| 5167 | } |
| 5168 | } else { |
| 5169 | wp->y_min_scanlines = 4; |
| 5170 | } |
| 5171 | |
Ville Syrjälä | 60e983f | 2018-12-21 19:14:33 +0200 | [diff] [blame] | 5172 | if (skl_needs_memory_bw_wa(dev_priv)) |
Kumar, Mahesh | 7e452fd | 2017-08-17 19:15:23 +0530 | [diff] [blame] | 5173 | wp->y_min_scanlines *= 2; |
| 5174 | |
| 5175 | wp->plane_bytes_per_line = wp->width * wp->cpp; |
| 5176 | if (wp->y_tiled) { |
| 5177 | interm_pbpl = DIV_ROUND_UP(wp->plane_bytes_per_line * |
Mahesh Kumar | df8ee19 | 2018-01-30 11:49:11 -0200 | [diff] [blame] | 5178 | wp->y_min_scanlines, |
| 5179 | wp->dbuf_block_size); |
Kumar, Mahesh | 7e452fd | 2017-08-17 19:15:23 +0530 | [diff] [blame] | 5180 | |
Ville Syrjälä | 260a6c1b | 2020-04-30 15:58:21 +0300 | [diff] [blame] | 5181 | if (INTEL_GEN(dev_priv) >= 10 || IS_GEMINILAKE(dev_priv)) |
Kumar, Mahesh | 7e452fd | 2017-08-17 19:15:23 +0530 | [diff] [blame] | 5182 | interm_pbpl++; |
| 5183 | |
| 5184 | wp->plane_blocks_per_line = div_fixed16(interm_pbpl, |
| 5185 | wp->y_min_scanlines); |
Kumar, Mahesh | 7e452fd | 2017-08-17 19:15:23 +0530 | [diff] [blame] | 5186 | } else { |
Mahesh Kumar | df8ee19 | 2018-01-30 11:49:11 -0200 | [diff] [blame] | 5187 | interm_pbpl = DIV_ROUND_UP(wp->plane_bytes_per_line, |
Ville Syrjälä | 260a6c1b | 2020-04-30 15:58:21 +0300 | [diff] [blame] | 5188 | wp->dbuf_block_size); |
| 5189 | |
| 5190 | if (!wp->x_tiled || |
| 5191 | INTEL_GEN(dev_priv) >= 10 || IS_GEMINILAKE(dev_priv)) |
| 5192 | interm_pbpl++; |
| 5193 | |
Kumar, Mahesh | 7e452fd | 2017-08-17 19:15:23 +0530 | [diff] [blame] | 5194 | wp->plane_blocks_per_line = u32_to_fixed16(interm_pbpl); |
| 5195 | } |
| 5196 | |
| 5197 | wp->y_tile_minimum = mul_u32_fixed16(wp->y_min_scanlines, |
| 5198 | wp->plane_blocks_per_line); |
Ville Syrjälä | c92558a | 2019-03-12 22:58:38 +0200 | [diff] [blame] | 5199 | |
Kumar, Mahesh | 7e452fd | 2017-08-17 19:15:23 +0530 | [diff] [blame] | 5200 | wp->linetime_us = fixed16_to_u32_round_up( |
Ville Syrjälä | c92558a | 2019-03-12 22:58:38 +0200 | [diff] [blame] | 5201 | intel_get_linetime_us(crtc_state)); |
Kumar, Mahesh | 7e452fd | 2017-08-17 19:15:23 +0530 | [diff] [blame] | 5202 | |
| 5203 | return 0; |
| 5204 | } |
| 5205 | |
Ville Syrjälä | c92558a | 2019-03-12 22:58:38 +0200 | [diff] [blame] | 5206 | static int |
| 5207 | skl_compute_plane_wm_params(const struct intel_crtc_state *crtc_state, |
| 5208 | const struct intel_plane_state *plane_state, |
| 5209 | struct skl_wm_params *wp, int color_plane) |
| 5210 | { |
Maarten Lankhorst | 7b3cb17 | 2019-10-31 12:26:07 +0100 | [diff] [blame] | 5211 | const struct drm_framebuffer *fb = plane_state->hw.fb; |
Ville Syrjälä | c92558a | 2019-03-12 22:58:38 +0200 | [diff] [blame] | 5212 | int width; |
| 5213 | |
Maarten Lankhorst | 3a61276 | 2019-10-04 13:34:54 +0200 | [diff] [blame] | 5214 | /* |
| 5215 | * Src coordinates are already rotated by 270 degrees for |
| 5216 | * the 90/270 degree plane rotation cases (to match the |
| 5217 | * GTT mapping), hence no need to account for rotation here. |
| 5218 | */ |
Maarten Lankhorst | f90a85e | 2019-10-31 12:26:08 +0100 | [diff] [blame] | 5219 | width = drm_rect_width(&plane_state->uapi.src) >> 16; |
Ville Syrjälä | c92558a | 2019-03-12 22:58:38 +0200 | [diff] [blame] | 5220 | |
| 5221 | return skl_compute_wm_params(crtc_state, width, |
| 5222 | fb->format, fb->modifier, |
Maarten Lankhorst | 7b3cb17 | 2019-10-31 12:26:07 +0100 | [diff] [blame] | 5223 | plane_state->hw.rotation, |
Ville Syrjälä | c92558a | 2019-03-12 22:58:38 +0200 | [diff] [blame] | 5224 | skl_adjusted_plane_pixel_rate(crtc_state, plane_state), |
| 5225 | wp, color_plane); |
| 5226 | } |
| 5227 | |
Ville Syrjälä | b52c273 | 2018-12-21 19:14:28 +0200 | [diff] [blame] | 5228 | static bool skl_wm_has_lines(struct drm_i915_private *dev_priv, int level) |
| 5229 | { |
| 5230 | if (INTEL_GEN(dev_priv) >= 10 || IS_GEMINILAKE(dev_priv)) |
| 5231 | return true; |
| 5232 | |
| 5233 | /* The number of lines are ignored for the level 0 watermark. */ |
| 5234 | return level > 0; |
| 5235 | } |
| 5236 | |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 5237 | static void skl_compute_plane_wm(const struct intel_crtc_state *crtc_state, |
Matt Roper | d8e8749 | 2018-12-11 09:31:07 -0800 | [diff] [blame] | 5238 | int level, |
Stanislav Lisovskiy | 7b99475 | 2020-04-09 18:47:18 +0300 | [diff] [blame] | 5239 | unsigned int latency, |
Matt Roper | d8e8749 | 2018-12-11 09:31:07 -0800 | [diff] [blame] | 5240 | const struct skl_wm_params *wp, |
| 5241 | const struct skl_wm_level *result_prev, |
| 5242 | struct skl_wm_level *result /* out */) |
Pradeep Bhat | 2d41c0b | 2014-11-04 17:06:42 +0000 | [diff] [blame] | 5243 | { |
Maarten Lankhorst | 2225f3c | 2019-10-31 12:26:03 +0100 | [diff] [blame] | 5244 | struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev); |
Mahesh Kumar | b95320b | 2016-12-01 21:19:37 +0530 | [diff] [blame] | 5245 | uint_fixed_16_16_t method1, method2; |
Mahesh Kumar | b95320b | 2016-12-01 21:19:37 +0530 | [diff] [blame] | 5246 | uint_fixed_16_16_t selected_result; |
Ville Syrjälä | 961d95e | 2018-12-21 19:14:32 +0200 | [diff] [blame] | 5247 | u32 res_blocks, res_lines, min_ddb_alloc = 0; |
Ville Syrjälä | ce110ec | 2018-11-14 23:07:21 +0200 | [diff] [blame] | 5248 | |
Ville Syrjälä | 0aded17 | 2019-02-05 17:50:53 +0200 | [diff] [blame] | 5249 | if (latency == 0) { |
| 5250 | /* reject it */ |
| 5251 | result->min_ddb_alloc = U16_MAX; |
Ville Syrjälä | 692927f | 2018-12-21 19:14:29 +0200 | [diff] [blame] | 5252 | return; |
Ville Syrjälä | 0aded17 | 2019-02-05 17:50:53 +0200 | [diff] [blame] | 5253 | } |
Ville Syrjälä | 692927f | 2018-12-21 19:14:29 +0200 | [diff] [blame] | 5254 | |
Ville Syrjälä | 25312ef | 2019-05-03 20:38:05 +0300 | [diff] [blame] | 5255 | /* |
| 5256 | * WaIncreaseLatencyIPCEnabled: kbl,cfl |
| 5257 | * Display WA #1141: kbl,cfl |
| 5258 | */ |
Chris Wilson | 5f4ae27 | 2020-06-02 15:05:40 +0100 | [diff] [blame] | 5259 | if ((IS_KABYLAKE(dev_priv) || |
| 5260 | IS_COFFEELAKE(dev_priv) || |
| 5261 | IS_COMETLAKE(dev_priv)) && |
Rodrigo Vivi | 82525c1 | 2017-06-08 08:50:00 -0700 | [diff] [blame] | 5262 | dev_priv->ipc_enabled) |
Mahesh Kumar | 4b7b233 | 2016-12-01 21:19:35 +0530 | [diff] [blame] | 5263 | latency += 4; |
| 5264 | |
Ville Syrjälä | 60e983f | 2018-12-21 19:14:33 +0200 | [diff] [blame] | 5265 | if (skl_needs_memory_bw_wa(dev_priv) && wp->x_tiled) |
Paulo Zanoni | ee3d532 | 2016-10-11 15:25:38 -0300 | [diff] [blame] | 5266 | latency += 15; |
| 5267 | |
Kumar, Mahesh | 7e452fd | 2017-08-17 19:15:23 +0530 | [diff] [blame] | 5268 | method1 = skl_wm_method1(dev_priv, wp->plane_pixel_rate, |
Mahesh Kumar | df8ee19 | 2018-01-30 11:49:11 -0200 | [diff] [blame] | 5269 | wp->cpp, latency, wp->dbuf_block_size); |
Kumar, Mahesh | 7e452fd | 2017-08-17 19:15:23 +0530 | [diff] [blame] | 5270 | method2 = skl_wm_method2(wp->plane_pixel_rate, |
Maarten Lankhorst | 1326a92 | 2019-10-31 12:26:02 +0100 | [diff] [blame] | 5271 | crtc_state->hw.adjusted_mode.crtc_htotal, |
Paulo Zanoni | 1186fa8 | 2016-09-22 18:00:31 -0300 | [diff] [blame] | 5272 | latency, |
Kumar, Mahesh | 7e452fd | 2017-08-17 19:15:23 +0530 | [diff] [blame] | 5273 | wp->plane_blocks_per_line); |
Pradeep Bhat | 2d41c0b | 2014-11-04 17:06:42 +0000 | [diff] [blame] | 5274 | |
Kumar, Mahesh | 7e452fd | 2017-08-17 19:15:23 +0530 | [diff] [blame] | 5275 | if (wp->y_tiled) { |
| 5276 | selected_result = max_fixed16(method2, wp->y_tile_minimum); |
Tvrtko Ursulin | 0fda656 | 2015-02-27 15:12:35 +0000 | [diff] [blame] | 5277 | } else { |
Maarten Lankhorst | 1326a92 | 2019-10-31 12:26:02 +0100 | [diff] [blame] | 5278 | if ((wp->cpp * crtc_state->hw.adjusted_mode.crtc_htotal / |
Mahesh Kumar | df8ee19 | 2018-01-30 11:49:11 -0200 | [diff] [blame] | 5279 | wp->dbuf_block_size < 1) && |
Paulo Zanoni | 077b582 | 2018-10-04 16:15:57 -0700 | [diff] [blame] | 5280 | (wp->plane_bytes_per_line / wp->dbuf_block_size < 1)) { |
Paulo Zanoni | f1db3ea | 2016-09-22 18:00:34 -0300 | [diff] [blame] | 5281 | selected_result = method2; |
Paulo Zanoni | 077b582 | 2018-10-04 16:15:57 -0700 | [diff] [blame] | 5282 | } else if (latency >= wp->linetime_us) { |
Lucas De Marchi | cf819ef | 2018-12-12 10:10:43 -0800 | [diff] [blame] | 5283 | if (IS_GEN(dev_priv, 9) && |
Paulo Zanoni | 077b582 | 2018-10-04 16:15:57 -0700 | [diff] [blame] | 5284 | !IS_GEMINILAKE(dev_priv)) |
| 5285 | selected_result = min_fixed16(method1, method2); |
| 5286 | else |
| 5287 | selected_result = method2; |
| 5288 | } else { |
Tvrtko Ursulin | 0fda656 | 2015-02-27 15:12:35 +0000 | [diff] [blame] | 5289 | selected_result = method1; |
Paulo Zanoni | 077b582 | 2018-10-04 16:15:57 -0700 | [diff] [blame] | 5290 | } |
Tvrtko Ursulin | 0fda656 | 2015-02-27 15:12:35 +0000 | [diff] [blame] | 5291 | } |
Pradeep Bhat | 2d41c0b | 2014-11-04 17:06:42 +0000 | [diff] [blame] | 5292 | |
Kumar, Mahesh | eac2cb8 | 2017-07-05 20:01:46 +0530 | [diff] [blame] | 5293 | res_blocks = fixed16_to_u32_round_up(selected_result) + 1; |
Kumar, Mahesh | d273ecc | 2017-05-17 17:28:22 +0530 | [diff] [blame] | 5294 | res_lines = div_round_up_fixed16(selected_result, |
Kumar, Mahesh | 7e452fd | 2017-08-17 19:15:23 +0530 | [diff] [blame] | 5295 | wp->plane_blocks_per_line); |
Damien Lespiau | e6d6617 | 2014-11-04 17:06:55 +0000 | [diff] [blame] | 5296 | |
Paulo Zanoni | a5b79d3 | 2018-11-13 17:24:32 -0800 | [diff] [blame] | 5297 | if (IS_GEN9_BC(dev_priv) || IS_BROXTON(dev_priv)) { |
| 5298 | /* Display WA #1125: skl,bxt,kbl */ |
| 5299 | if (level == 0 && wp->rc_surface) |
| 5300 | res_blocks += |
| 5301 | fixed16_to_u32_round_up(wp->y_tile_minimum); |
Ville Syrjälä | 2e2adb0 | 2017-08-01 09:58:13 -0700 | [diff] [blame] | 5302 | |
Paulo Zanoni | a5b79d3 | 2018-11-13 17:24:32 -0800 | [diff] [blame] | 5303 | /* Display WA #1126: skl,bxt,kbl */ |
| 5304 | if (level >= 1 && level <= 7) { |
| 5305 | if (wp->y_tiled) { |
| 5306 | res_blocks += |
| 5307 | fixed16_to_u32_round_up(wp->y_tile_minimum); |
| 5308 | res_lines += wp->y_min_scanlines; |
| 5309 | } else { |
| 5310 | res_blocks++; |
| 5311 | } |
| 5312 | |
| 5313 | /* |
| 5314 | * Make sure result blocks for higher latency levels are |
| 5315 | * atleast as high as level below the current level. |
| 5316 | * Assumption in DDB algorithm optimization for special |
| 5317 | * cases. Also covers Display WA #1125 for RC. |
| 5318 | */ |
| 5319 | if (result_prev->plane_res_b > res_blocks) |
| 5320 | res_blocks = result_prev->plane_res_b; |
Paulo Zanoni | 75676ed | 2016-09-22 18:00:33 -0300 | [diff] [blame] | 5321 | } |
Tvrtko Ursulin | 0fda656 | 2015-02-27 15:12:35 +0000 | [diff] [blame] | 5322 | } |
Tvrtko Ursulin | d4c2aa6 | 2015-02-27 11:15:22 +0000 | [diff] [blame] | 5323 | |
Ville Syrjälä | 961d95e | 2018-12-21 19:14:32 +0200 | [diff] [blame] | 5324 | if (INTEL_GEN(dev_priv) >= 11) { |
| 5325 | if (wp->y_tiled) { |
| 5326 | int extra_lines; |
| 5327 | |
| 5328 | if (res_lines % wp->y_min_scanlines == 0) |
| 5329 | extra_lines = wp->y_min_scanlines; |
| 5330 | else |
| 5331 | extra_lines = wp->y_min_scanlines * 2 - |
| 5332 | res_lines % wp->y_min_scanlines; |
| 5333 | |
| 5334 | min_ddb_alloc = mul_round_up_u32_fixed16(res_lines + extra_lines, |
| 5335 | wp->plane_blocks_per_line); |
| 5336 | } else { |
| 5337 | min_ddb_alloc = res_blocks + |
| 5338 | DIV_ROUND_UP(res_blocks, 10); |
| 5339 | } |
| 5340 | } |
| 5341 | |
Ville Syrjälä | b52c273 | 2018-12-21 19:14:28 +0200 | [diff] [blame] | 5342 | if (!skl_wm_has_lines(dev_priv, level)) |
| 5343 | res_lines = 0; |
| 5344 | |
Ville Syrjälä | 0aded17 | 2019-02-05 17:50:53 +0200 | [diff] [blame] | 5345 | if (res_lines > 31) { |
| 5346 | /* reject it */ |
| 5347 | result->min_ddb_alloc = U16_MAX; |
Matt Roper | d8e8749 | 2018-12-11 09:31:07 -0800 | [diff] [blame] | 5348 | return; |
Ville Syrjälä | 0aded17 | 2019-02-05 17:50:53 +0200 | [diff] [blame] | 5349 | } |
Matt Roper | d8e8749 | 2018-12-11 09:31:07 -0800 | [diff] [blame] | 5350 | |
| 5351 | /* |
| 5352 | * If res_lines is valid, assume we can use this watermark level |
| 5353 | * for now. We'll come back and disable it after we calculate the |
| 5354 | * DDB allocation if it turns out we don't actually have enough |
| 5355 | * blocks to satisfy it. |
| 5356 | */ |
Mahesh Kumar | 62027b7 | 2018-04-09 09:11:05 +0530 | [diff] [blame] | 5357 | result->plane_res_b = res_blocks; |
| 5358 | result->plane_res_l = res_lines; |
Ville Syrjälä | 961d95e | 2018-12-21 19:14:32 +0200 | [diff] [blame] | 5359 | /* Bspec says: value >= plane ddb allocation -> invalid, hence the +1 here */ |
| 5360 | result->min_ddb_alloc = max(min_ddb_alloc, res_blocks) + 1; |
Mahesh Kumar | 62027b7 | 2018-04-09 09:11:05 +0530 | [diff] [blame] | 5361 | result->plane_en = true; |
Pradeep Bhat | 2d41c0b | 2014-11-04 17:06:42 +0000 | [diff] [blame] | 5362 | } |
| 5363 | |
Matt Roper | d8e8749 | 2018-12-11 09:31:07 -0800 | [diff] [blame] | 5364 | static void |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 5365 | skl_compute_wm_levels(const struct intel_crtc_state *crtc_state, |
Kumar, Mahesh | 7e452fd | 2017-08-17 19:15:23 +0530 | [diff] [blame] | 5366 | const struct skl_wm_params *wm_params, |
Maarten Lankhorst | b048a00 | 2018-10-18 13:51:30 +0200 | [diff] [blame] | 5367 | struct skl_wm_level *levels) |
Pradeep Bhat | 2d41c0b | 2014-11-04 17:06:42 +0000 | [diff] [blame] | 5368 | { |
Maarten Lankhorst | 2225f3c | 2019-10-31 12:26:03 +0100 | [diff] [blame] | 5369 | struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev); |
Kumar, Mahesh | d2f5e36 | 2017-05-17 17:28:28 +0530 | [diff] [blame] | 5370 | int level, max_level = ilk_wm_max_level(dev_priv); |
Maarten Lankhorst | b048a00 | 2018-10-18 13:51:30 +0200 | [diff] [blame] | 5371 | struct skl_wm_level *result_prev = &levels[0]; |
Lyude | a62163e | 2016-10-04 14:28:20 -0400 | [diff] [blame] | 5372 | |
Kumar, Mahesh | d2f5e36 | 2017-05-17 17:28:28 +0530 | [diff] [blame] | 5373 | for (level = 0; level <= max_level; level++) { |
Maarten Lankhorst | b048a00 | 2018-10-18 13:51:30 +0200 | [diff] [blame] | 5374 | struct skl_wm_level *result = &levels[level]; |
Stanislav Lisovskiy | 7b99475 | 2020-04-09 18:47:18 +0300 | [diff] [blame] | 5375 | unsigned int latency = dev_priv->wm.skl_latency[level]; |
Kumar, Mahesh | d2f5e36 | 2017-05-17 17:28:28 +0530 | [diff] [blame] | 5376 | |
Stanislav Lisovskiy | 7b99475 | 2020-04-09 18:47:18 +0300 | [diff] [blame] | 5377 | skl_compute_plane_wm(crtc_state, level, latency, |
| 5378 | wm_params, result_prev, result); |
Maarten Lankhorst | b048a00 | 2018-10-18 13:51:30 +0200 | [diff] [blame] | 5379 | |
| 5380 | result_prev = result; |
Kumar, Mahesh | d2f5e36 | 2017-05-17 17:28:28 +0530 | [diff] [blame] | 5381 | } |
Pradeep Bhat | 2d41c0b | 2014-11-04 17:06:42 +0000 | [diff] [blame] | 5382 | } |
| 5383 | |
Stanislav Lisovskiy | 7241c57 | 2020-05-14 10:48:51 +0300 | [diff] [blame] | 5384 | static void tgl_compute_sagv_wm(const struct intel_crtc_state *crtc_state, |
| 5385 | const struct skl_wm_params *wm_params, |
| 5386 | struct skl_plane_wm *plane_wm) |
| 5387 | { |
| 5388 | struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev); |
| 5389 | struct skl_wm_level *sagv_wm = &plane_wm->sagv_wm0; |
| 5390 | struct skl_wm_level *levels = plane_wm->wm; |
| 5391 | unsigned int latency = dev_priv->wm.skl_latency[0] + dev_priv->sagv_block_time_us; |
| 5392 | |
| 5393 | skl_compute_plane_wm(crtc_state, 0, latency, |
| 5394 | wm_params, &levels[0], |
| 5395 | sagv_wm); |
| 5396 | } |
| 5397 | |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 5398 | static void skl_compute_transition_wm(const struct intel_crtc_state *crtc_state, |
Ville Syrjälä | 6a3c910b | 2018-11-14 23:07:23 +0200 | [diff] [blame] | 5399 | const struct skl_wm_params *wp, |
Matt Roper | d8e8749 | 2018-12-11 09:31:07 -0800 | [diff] [blame] | 5400 | struct skl_plane_wm *wm) |
Damien Lespiau | 407b50f | 2014-11-04 17:06:57 +0000 | [diff] [blame] | 5401 | { |
Maarten Lankhorst | 2225f3c | 2019-10-31 12:26:03 +0100 | [diff] [blame] | 5402 | struct drm_device *dev = crtc_state->uapi.crtc->dev; |
Kumar, Mahesh | ca47667 | 2017-08-17 19:15:24 +0530 | [diff] [blame] | 5403 | const struct drm_i915_private *dev_priv = to_i915(dev); |
Ville Syrjälä | c834d03 | 2020-02-28 22:35:52 +0200 | [diff] [blame] | 5404 | u16 trans_min, trans_amount, trans_y_tile_min; |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 5405 | u16 wm0_sel_res_b, trans_offset_b, res_blocks; |
Damien Lespiau | 9414f56 | 2014-11-04 17:06:58 +0000 | [diff] [blame] | 5406 | |
Kumar, Mahesh | ca47667 | 2017-08-17 19:15:24 +0530 | [diff] [blame] | 5407 | /* Transition WM don't make any sense if ipc is disabled */ |
| 5408 | if (!dev_priv->ipc_enabled) |
Ville Syrjälä | 14a4306 | 2018-11-14 23:07:22 +0200 | [diff] [blame] | 5409 | return; |
Kumar, Mahesh | ca47667 | 2017-08-17 19:15:24 +0530 | [diff] [blame] | 5410 | |
Ville Syrjälä | a7f1e8e | 2020-02-28 22:35:51 +0200 | [diff] [blame] | 5411 | /* |
| 5412 | * WaDisableTWM:skl,kbl,cfl,bxt |
| 5413 | * Transition WM are not recommended by HW team for GEN9 |
| 5414 | */ |
| 5415 | if (IS_GEN9_BC(dev_priv) || IS_BROXTON(dev_priv)) |
| 5416 | return; |
| 5417 | |
Paulo Zanoni | 91961a8 | 2018-10-04 16:15:56 -0700 | [diff] [blame] | 5418 | if (INTEL_GEN(dev_priv) >= 11) |
Kumar, Mahesh | ca47667 | 2017-08-17 19:15:24 +0530 | [diff] [blame] | 5419 | trans_min = 4; |
Ville Syrjälä | c834d03 | 2020-02-28 22:35:52 +0200 | [diff] [blame] | 5420 | else |
| 5421 | trans_min = 14; |
| 5422 | |
| 5423 | /* Display WA #1140: glk,cnl */ |
| 5424 | if (IS_CANNONLAKE(dev_priv) || IS_GEMINILAKE(dev_priv)) |
| 5425 | trans_amount = 0; |
| 5426 | else |
| 5427 | trans_amount = 10; /* This is configurable amount */ |
Kumar, Mahesh | ca47667 | 2017-08-17 19:15:24 +0530 | [diff] [blame] | 5428 | |
| 5429 | trans_offset_b = trans_min + trans_amount; |
| 5430 | |
Paulo Zanoni | cbacc79 | 2018-10-04 16:15:58 -0700 | [diff] [blame] | 5431 | /* |
| 5432 | * The spec asks for Selected Result Blocks for wm0 (the real value), |
| 5433 | * not Result Blocks (the integer value). Pay attention to the capital |
| 5434 | * letters. The value wm_l0->plane_res_b is actually Result Blocks, but |
| 5435 | * since Result Blocks is the ceiling of Selected Result Blocks plus 1, |
| 5436 | * and since we later will have to get the ceiling of the sum in the |
| 5437 | * transition watermarks calculation, we can just pretend Selected |
| 5438 | * Result Blocks is Result Blocks minus 1 and it should work for the |
| 5439 | * current platforms. |
| 5440 | */ |
Ville Syrjälä | 6a3c910b | 2018-11-14 23:07:23 +0200 | [diff] [blame] | 5441 | wm0_sel_res_b = wm->wm[0].plane_res_b - 1; |
Paulo Zanoni | cbacc79 | 2018-10-04 16:15:58 -0700 | [diff] [blame] | 5442 | |
Kumar, Mahesh | ca47667 | 2017-08-17 19:15:24 +0530 | [diff] [blame] | 5443 | if (wp->y_tiled) { |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 5444 | trans_y_tile_min = |
| 5445 | (u16)mul_round_up_u32_fixed16(2, wp->y_tile_minimum); |
Paulo Zanoni | cbacc79 | 2018-10-04 16:15:58 -0700 | [diff] [blame] | 5446 | res_blocks = max(wm0_sel_res_b, trans_y_tile_min) + |
Kumar, Mahesh | ca47667 | 2017-08-17 19:15:24 +0530 | [diff] [blame] | 5447 | trans_offset_b; |
| 5448 | } else { |
Paulo Zanoni | cbacc79 | 2018-10-04 16:15:58 -0700 | [diff] [blame] | 5449 | res_blocks = wm0_sel_res_b + trans_offset_b; |
Kumar, Mahesh | ca47667 | 2017-08-17 19:15:24 +0530 | [diff] [blame] | 5450 | } |
| 5451 | |
Matt Roper | d8e8749 | 2018-12-11 09:31:07 -0800 | [diff] [blame] | 5452 | /* |
| 5453 | * Just assume we can enable the transition watermark. After |
| 5454 | * computing the DDB we'll come back and disable it if that |
| 5455 | * assumption turns out to be false. |
| 5456 | */ |
| 5457 | wm->trans_wm.plane_res_b = res_blocks + 1; |
| 5458 | wm->trans_wm.plane_en = true; |
Damien Lespiau | 407b50f | 2014-11-04 17:06:57 +0000 | [diff] [blame] | 5459 | } |
| 5460 | |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 5461 | static int skl_build_plane_wm_single(struct intel_crtc_state *crtc_state, |
Ville Syrjälä | 8315847 | 2018-11-27 18:57:26 +0200 | [diff] [blame] | 5462 | const struct intel_plane_state *plane_state, |
| 5463 | enum plane_id plane_id, int color_plane) |
Maarten Lankhorst | b048a00 | 2018-10-18 13:51:30 +0200 | [diff] [blame] | 5464 | { |
Stanislav Lisovskiy | 7241c57 | 2020-05-14 10:48:51 +0300 | [diff] [blame] | 5465 | struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); |
| 5466 | struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); |
Ville Syrjälä | 8315847 | 2018-11-27 18:57:26 +0200 | [diff] [blame] | 5467 | struct skl_plane_wm *wm = &crtc_state->wm.skl.optimal.planes[plane_id]; |
Maarten Lankhorst | b048a00 | 2018-10-18 13:51:30 +0200 | [diff] [blame] | 5468 | struct skl_wm_params wm_params; |
Maarten Lankhorst | b048a00 | 2018-10-18 13:51:30 +0200 | [diff] [blame] | 5469 | int ret; |
| 5470 | |
Ville Syrjälä | 51de9c6 | 2018-11-14 23:07:25 +0200 | [diff] [blame] | 5471 | ret = skl_compute_plane_wm_params(crtc_state, plane_state, |
Ville Syrjälä | 8315847 | 2018-11-27 18:57:26 +0200 | [diff] [blame] | 5472 | &wm_params, color_plane); |
Maarten Lankhorst | b048a00 | 2018-10-18 13:51:30 +0200 | [diff] [blame] | 5473 | if (ret) |
| 5474 | return ret; |
| 5475 | |
Ville Syrjälä | 67155a6 | 2019-03-12 22:58:37 +0200 | [diff] [blame] | 5476 | skl_compute_wm_levels(crtc_state, &wm_params, wm->wm); |
Stanislav Lisovskiy | 7241c57 | 2020-05-14 10:48:51 +0300 | [diff] [blame] | 5477 | |
| 5478 | if (INTEL_GEN(dev_priv) >= 12) |
| 5479 | tgl_compute_sagv_wm(crtc_state, &wm_params, wm); |
| 5480 | |
Matt Roper | d8e8749 | 2018-12-11 09:31:07 -0800 | [diff] [blame] | 5481 | skl_compute_transition_wm(crtc_state, &wm_params, wm); |
Ville Syrjälä | 8315847 | 2018-11-27 18:57:26 +0200 | [diff] [blame] | 5482 | |
| 5483 | return 0; |
Maarten Lankhorst | b048a00 | 2018-10-18 13:51:30 +0200 | [diff] [blame] | 5484 | } |
| 5485 | |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 5486 | static int skl_build_plane_wm_uv(struct intel_crtc_state *crtc_state, |
Ville Syrjälä | 8315847 | 2018-11-27 18:57:26 +0200 | [diff] [blame] | 5487 | const struct intel_plane_state *plane_state, |
| 5488 | enum plane_id plane_id) |
Maarten Lankhorst | b048a00 | 2018-10-18 13:51:30 +0200 | [diff] [blame] | 5489 | { |
Ville Syrjälä | 8315847 | 2018-11-27 18:57:26 +0200 | [diff] [blame] | 5490 | struct skl_plane_wm *wm = &crtc_state->wm.skl.optimal.planes[plane_id]; |
| 5491 | struct skl_wm_params wm_params; |
Maarten Lankhorst | b048a00 | 2018-10-18 13:51:30 +0200 | [diff] [blame] | 5492 | int ret; |
Maarten Lankhorst | b048a00 | 2018-10-18 13:51:30 +0200 | [diff] [blame] | 5493 | |
Ville Syrjälä | 8315847 | 2018-11-27 18:57:26 +0200 | [diff] [blame] | 5494 | wm->is_planar = true; |
| 5495 | |
| 5496 | /* uv plane watermarks must also be validated for NV12/Planar */ |
Ville Syrjälä | 51de9c6 | 2018-11-14 23:07:25 +0200 | [diff] [blame] | 5497 | ret = skl_compute_plane_wm_params(crtc_state, plane_state, |
Ville Syrjälä | 8315847 | 2018-11-27 18:57:26 +0200 | [diff] [blame] | 5498 | &wm_params, 1); |
Maarten Lankhorst | b048a00 | 2018-10-18 13:51:30 +0200 | [diff] [blame] | 5499 | if (ret) |
| 5500 | return ret; |
| 5501 | |
Ville Syrjälä | 67155a6 | 2019-03-12 22:58:37 +0200 | [diff] [blame] | 5502 | skl_compute_wm_levels(crtc_state, &wm_params, wm->uv_wm); |
Ville Syrjälä | 8315847 | 2018-11-27 18:57:26 +0200 | [diff] [blame] | 5503 | |
| 5504 | return 0; |
| 5505 | } |
| 5506 | |
Ville Syrjälä | 96cb7cd | 2019-03-12 22:58:43 +0200 | [diff] [blame] | 5507 | static int skl_build_plane_wm(struct intel_crtc_state *crtc_state, |
Ville Syrjälä | 8315847 | 2018-11-27 18:57:26 +0200 | [diff] [blame] | 5508 | const struct intel_plane_state *plane_state) |
| 5509 | { |
Maarten Lankhorst | f90a85e | 2019-10-31 12:26:08 +0100 | [diff] [blame] | 5510 | struct intel_plane *plane = to_intel_plane(plane_state->uapi.plane); |
Maarten Lankhorst | 7b3cb17 | 2019-10-31 12:26:07 +0100 | [diff] [blame] | 5511 | const struct drm_framebuffer *fb = plane_state->hw.fb; |
Ville Syrjälä | 8315847 | 2018-11-27 18:57:26 +0200 | [diff] [blame] | 5512 | enum plane_id plane_id = plane->id; |
| 5513 | int ret; |
| 5514 | |
| 5515 | if (!intel_wm_plane_visible(crtc_state, plane_state)) |
| 5516 | return 0; |
| 5517 | |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 5518 | ret = skl_build_plane_wm_single(crtc_state, plane_state, |
Ville Syrjälä | 8315847 | 2018-11-27 18:57:26 +0200 | [diff] [blame] | 5519 | plane_id, 0); |
| 5520 | if (ret) |
| 5521 | return ret; |
| 5522 | |
| 5523 | if (fb->format->is_yuv && fb->format->num_planes > 1) { |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 5524 | ret = skl_build_plane_wm_uv(crtc_state, plane_state, |
Ville Syrjälä | 8315847 | 2018-11-27 18:57:26 +0200 | [diff] [blame] | 5525 | plane_id); |
| 5526 | if (ret) |
| 5527 | return ret; |
| 5528 | } |
| 5529 | |
| 5530 | return 0; |
| 5531 | } |
| 5532 | |
Ville Syrjälä | 96cb7cd | 2019-03-12 22:58:43 +0200 | [diff] [blame] | 5533 | static int icl_build_plane_wm(struct intel_crtc_state *crtc_state, |
Ville Syrjälä | 8315847 | 2018-11-27 18:57:26 +0200 | [diff] [blame] | 5534 | const struct intel_plane_state *plane_state) |
| 5535 | { |
Pankaj Bharadiya | 19edeb38 | 2020-05-04 23:45:59 +0530 | [diff] [blame] | 5536 | struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev); |
Maarten Lankhorst | f90a85e | 2019-10-31 12:26:08 +0100 | [diff] [blame] | 5537 | enum plane_id plane_id = to_intel_plane(plane_state->uapi.plane)->id; |
Ville Syrjälä | 8315847 | 2018-11-27 18:57:26 +0200 | [diff] [blame] | 5538 | int ret; |
| 5539 | |
| 5540 | /* Watermarks calculated in master */ |
Maarten Lankhorst | c47b7dd | 2019-09-20 13:42:20 +0200 | [diff] [blame] | 5541 | if (plane_state->planar_slave) |
Ville Syrjälä | 8315847 | 2018-11-27 18:57:26 +0200 | [diff] [blame] | 5542 | return 0; |
| 5543 | |
Maarten Lankhorst | c47b7dd | 2019-09-20 13:42:20 +0200 | [diff] [blame] | 5544 | if (plane_state->planar_linked_plane) { |
Maarten Lankhorst | 7b3cb17 | 2019-10-31 12:26:07 +0100 | [diff] [blame] | 5545 | const struct drm_framebuffer *fb = plane_state->hw.fb; |
Maarten Lankhorst | c47b7dd | 2019-09-20 13:42:20 +0200 | [diff] [blame] | 5546 | enum plane_id y_plane_id = plane_state->planar_linked_plane->id; |
Ville Syrjälä | 8315847 | 2018-11-27 18:57:26 +0200 | [diff] [blame] | 5547 | |
Pankaj Bharadiya | 19edeb38 | 2020-05-04 23:45:59 +0530 | [diff] [blame] | 5548 | drm_WARN_ON(&dev_priv->drm, |
| 5549 | !intel_wm_plane_visible(crtc_state, plane_state)); |
| 5550 | drm_WARN_ON(&dev_priv->drm, !fb->format->is_yuv || |
| 5551 | fb->format->num_planes == 1); |
Ville Syrjälä | 8315847 | 2018-11-27 18:57:26 +0200 | [diff] [blame] | 5552 | |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 5553 | ret = skl_build_plane_wm_single(crtc_state, plane_state, |
Ville Syrjälä | 8315847 | 2018-11-27 18:57:26 +0200 | [diff] [blame] | 5554 | y_plane_id, 0); |
| 5555 | if (ret) |
| 5556 | return ret; |
| 5557 | |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 5558 | ret = skl_build_plane_wm_single(crtc_state, plane_state, |
Ville Syrjälä | 8315847 | 2018-11-27 18:57:26 +0200 | [diff] [blame] | 5559 | plane_id, 1); |
| 5560 | if (ret) |
| 5561 | return ret; |
| 5562 | } else if (intel_wm_plane_visible(crtc_state, plane_state)) { |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 5563 | ret = skl_build_plane_wm_single(crtc_state, plane_state, |
Ville Syrjälä | 8315847 | 2018-11-27 18:57:26 +0200 | [diff] [blame] | 5564 | plane_id, 0); |
| 5565 | if (ret) |
| 5566 | return ret; |
| 5567 | } |
| 5568 | |
| 5569 | return 0; |
Maarten Lankhorst | b048a00 | 2018-10-18 13:51:30 +0200 | [diff] [blame] | 5570 | } |
| 5571 | |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 5572 | static int skl_build_pipe_wm(struct intel_crtc_state *crtc_state) |
Pradeep Bhat | 2d41c0b | 2014-11-04 17:06:42 +0000 | [diff] [blame] | 5573 | { |
Maarten Lankhorst | 2225f3c | 2019-10-31 12:26:03 +0100 | [diff] [blame] | 5574 | struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev); |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 5575 | struct skl_pipe_wm *pipe_wm = &crtc_state->wm.skl.optimal; |
Maarten Lankhorst | af9fbfa | 2019-10-04 13:34:53 +0200 | [diff] [blame] | 5576 | struct intel_plane *plane; |
| 5577 | const struct intel_plane_state *plane_state; |
Matt Roper | 55994c2 | 2016-05-12 07:06:08 -0700 | [diff] [blame] | 5578 | int ret; |
Pradeep Bhat | 2d41c0b | 2014-11-04 17:06:42 +0000 | [diff] [blame] | 5579 | |
Lyude | a62163e | 2016-10-04 14:28:20 -0400 | [diff] [blame] | 5580 | /* |
| 5581 | * We'll only calculate watermarks for planes that are actually |
| 5582 | * enabled, so make sure all other planes are set as disabled. |
| 5583 | */ |
| 5584 | memset(pipe_wm->planes, 0, sizeof(pipe_wm->planes)); |
| 5585 | |
Maarten Lankhorst | af9fbfa | 2019-10-04 13:34:53 +0200 | [diff] [blame] | 5586 | intel_atomic_crtc_state_for_each_plane_state(plane, plane_state, |
| 5587 | crtc_state) { |
Kumar, Mahesh | eb2fdcd | 2017-05-17 17:28:27 +0530 | [diff] [blame] | 5588 | |
Ville Syrjälä | 8315847 | 2018-11-27 18:57:26 +0200 | [diff] [blame] | 5589 | if (INTEL_GEN(dev_priv) >= 11) |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 5590 | ret = icl_build_plane_wm(crtc_state, plane_state); |
Maarten Lankhorst | b048a00 | 2018-10-18 13:51:30 +0200 | [diff] [blame] | 5591 | else |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 5592 | ret = skl_build_plane_wm(crtc_state, plane_state); |
Kumar, Mahesh | 7e452fd | 2017-08-17 19:15:23 +0530 | [diff] [blame] | 5593 | if (ret) |
| 5594 | return ret; |
Pradeep Bhat | 2d41c0b | 2014-11-04 17:06:42 +0000 | [diff] [blame] | 5595 | } |
Mahesh Kumar | 942aa2d | 2018-04-09 09:11:04 +0530 | [diff] [blame] | 5596 | |
Matt Roper | 55994c2 | 2016-05-12 07:06:08 -0700 | [diff] [blame] | 5597 | return 0; |
Pradeep Bhat | 2d41c0b | 2014-11-04 17:06:42 +0000 | [diff] [blame] | 5598 | } |
| 5599 | |
Ville Syrjälä | f0f59a0 | 2015-11-18 15:33:26 +0200 | [diff] [blame] | 5600 | static void skl_ddb_entry_write(struct drm_i915_private *dev_priv, |
| 5601 | i915_reg_t reg, |
Damien Lespiau | 16160e3 | 2014-11-04 17:06:53 +0000 | [diff] [blame] | 5602 | const struct skl_ddb_entry *entry) |
| 5603 | { |
| 5604 | if (entry->end) |
Jani Nikula | 9b6320a | 2020-01-23 16:00:04 +0200 | [diff] [blame] | 5605 | intel_de_write_fw(dev_priv, reg, |
| 5606 | (entry->end - 1) << 16 | entry->start); |
Damien Lespiau | 16160e3 | 2014-11-04 17:06:53 +0000 | [diff] [blame] | 5607 | else |
Jani Nikula | 9b6320a | 2020-01-23 16:00:04 +0200 | [diff] [blame] | 5608 | intel_de_write_fw(dev_priv, reg, 0); |
Damien Lespiau | 16160e3 | 2014-11-04 17:06:53 +0000 | [diff] [blame] | 5609 | } |
| 5610 | |
cpaul@redhat.com | d8c0faf | 2016-10-18 16:09:49 -0200 | [diff] [blame] | 5611 | static void skl_write_wm_level(struct drm_i915_private *dev_priv, |
| 5612 | i915_reg_t reg, |
| 5613 | const struct skl_wm_level *level) |
| 5614 | { |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 5615 | u32 val = 0; |
cpaul@redhat.com | d8c0faf | 2016-10-18 16:09:49 -0200 | [diff] [blame] | 5616 | |
Ville Syrjälä | 2ed8e1f | 2019-02-13 18:54:23 +0200 | [diff] [blame] | 5617 | if (level->plane_en) |
cpaul@redhat.com | d8c0faf | 2016-10-18 16:09:49 -0200 | [diff] [blame] | 5618 | val |= PLANE_WM_EN; |
Ville Syrjälä | 2ed8e1f | 2019-02-13 18:54:23 +0200 | [diff] [blame] | 5619 | if (level->ignore_lines) |
| 5620 | val |= PLANE_WM_IGNORE_LINES; |
| 5621 | val |= level->plane_res_b; |
| 5622 | val |= level->plane_res_l << PLANE_WM_LINES_SHIFT; |
cpaul@redhat.com | d8c0faf | 2016-10-18 16:09:49 -0200 | [diff] [blame] | 5623 | |
Jani Nikula | 9b6320a | 2020-01-23 16:00:04 +0200 | [diff] [blame] | 5624 | intel_de_write_fw(dev_priv, reg, val); |
cpaul@redhat.com | d8c0faf | 2016-10-18 16:09:49 -0200 | [diff] [blame] | 5625 | } |
| 5626 | |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 5627 | void skl_write_plane_wm(struct intel_plane *plane, |
| 5628 | const struct intel_crtc_state *crtc_state) |
Lyude | 62e0fb8 | 2016-08-22 12:50:08 -0400 | [diff] [blame] | 5629 | { |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 5630 | struct drm_i915_private *dev_priv = to_i915(plane->base.dev); |
Tvrtko Ursulin | 5db9401 | 2016-10-13 11:03:10 +0100 | [diff] [blame] | 5631 | int level, max_level = ilk_wm_max_level(dev_priv); |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 5632 | enum plane_id plane_id = plane->id; |
| 5633 | enum pipe pipe = plane->pipe; |
| 5634 | const struct skl_plane_wm *wm = |
| 5635 | &crtc_state->wm.skl.optimal.planes[plane_id]; |
| 5636 | const struct skl_ddb_entry *ddb_y = |
| 5637 | &crtc_state->wm.skl.plane_ddb_y[plane_id]; |
| 5638 | const struct skl_ddb_entry *ddb_uv = |
| 5639 | &crtc_state->wm.skl.plane_ddb_uv[plane_id]; |
Lyude | 62e0fb8 | 2016-08-22 12:50:08 -0400 | [diff] [blame] | 5640 | |
| 5641 | for (level = 0; level <= max_level; level++) { |
Stanislav Lisovskiy | d916234 | 2020-05-13 12:38:11 +0300 | [diff] [blame] | 5642 | const struct skl_wm_level *wm_level; |
| 5643 | |
| 5644 | wm_level = skl_plane_wm_level(crtc_state, plane_id, level); |
| 5645 | |
Ville Syrjälä | d5cdfdf5 | 2016-11-22 18:01:58 +0200 | [diff] [blame] | 5646 | skl_write_wm_level(dev_priv, PLANE_WM(pipe, plane_id, level), |
Stanislav Lisovskiy | d916234 | 2020-05-13 12:38:11 +0300 | [diff] [blame] | 5647 | wm_level); |
Lyude | 62e0fb8 | 2016-08-22 12:50:08 -0400 | [diff] [blame] | 5648 | } |
Ville Syrjälä | d5cdfdf5 | 2016-11-22 18:01:58 +0200 | [diff] [blame] | 5649 | skl_write_wm_level(dev_priv, PLANE_WM_TRANS(pipe, plane_id), |
cpaul@redhat.com | d8c0faf | 2016-10-18 16:09:49 -0200 | [diff] [blame] | 5650 | &wm->trans_wm); |
Lyude | 2708249 | 2016-08-24 07:48:10 +0200 | [diff] [blame] | 5651 | |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 5652 | if (INTEL_GEN(dev_priv) >= 11) { |
Mahesh Kumar | 234059d | 2018-01-30 11:49:13 -0200 | [diff] [blame] | 5653 | skl_ddb_entry_write(dev_priv, |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 5654 | PLANE_BUF_CFG(pipe, plane_id), ddb_y); |
| 5655 | return; |
Mahesh Kumar | b879d58 | 2018-04-09 09:11:01 +0530 | [diff] [blame] | 5656 | } |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 5657 | |
| 5658 | if (wm->is_planar) |
| 5659 | swap(ddb_y, ddb_uv); |
| 5660 | |
| 5661 | skl_ddb_entry_write(dev_priv, |
| 5662 | PLANE_BUF_CFG(pipe, plane_id), ddb_y); |
| 5663 | skl_ddb_entry_write(dev_priv, |
| 5664 | PLANE_NV12_BUF_CFG(pipe, plane_id), ddb_uv); |
Lyude | 62e0fb8 | 2016-08-22 12:50:08 -0400 | [diff] [blame] | 5665 | } |
| 5666 | |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 5667 | void skl_write_cursor_wm(struct intel_plane *plane, |
| 5668 | const struct intel_crtc_state *crtc_state) |
Lyude | 62e0fb8 | 2016-08-22 12:50:08 -0400 | [diff] [blame] | 5669 | { |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 5670 | struct drm_i915_private *dev_priv = to_i915(plane->base.dev); |
Tvrtko Ursulin | 5db9401 | 2016-10-13 11:03:10 +0100 | [diff] [blame] | 5671 | int level, max_level = ilk_wm_max_level(dev_priv); |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 5672 | enum plane_id plane_id = plane->id; |
| 5673 | enum pipe pipe = plane->pipe; |
| 5674 | const struct skl_plane_wm *wm = |
| 5675 | &crtc_state->wm.skl.optimal.planes[plane_id]; |
| 5676 | const struct skl_ddb_entry *ddb = |
| 5677 | &crtc_state->wm.skl.plane_ddb_y[plane_id]; |
Lyude | 62e0fb8 | 2016-08-22 12:50:08 -0400 | [diff] [blame] | 5678 | |
| 5679 | for (level = 0; level <= max_level; level++) { |
Stanislav Lisovskiy | d916234 | 2020-05-13 12:38:11 +0300 | [diff] [blame] | 5680 | const struct skl_wm_level *wm_level; |
| 5681 | |
| 5682 | wm_level = skl_plane_wm_level(crtc_state, plane_id, level); |
| 5683 | |
cpaul@redhat.com | d8c0faf | 2016-10-18 16:09:49 -0200 | [diff] [blame] | 5684 | skl_write_wm_level(dev_priv, CUR_WM(pipe, level), |
Stanislav Lisovskiy | d916234 | 2020-05-13 12:38:11 +0300 | [diff] [blame] | 5685 | wm_level); |
Lyude | 62e0fb8 | 2016-08-22 12:50:08 -0400 | [diff] [blame] | 5686 | } |
cpaul@redhat.com | d8c0faf | 2016-10-18 16:09:49 -0200 | [diff] [blame] | 5687 | skl_write_wm_level(dev_priv, CUR_WM_TRANS(pipe), &wm->trans_wm); |
Lyude | 2708249 | 2016-08-24 07:48:10 +0200 | [diff] [blame] | 5688 | |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 5689 | skl_ddb_entry_write(dev_priv, CUR_BUF_CFG(pipe), ddb); |
Lyude | 62e0fb8 | 2016-08-22 12:50:08 -0400 | [diff] [blame] | 5690 | } |
| 5691 | |
cpaul@redhat.com | 45ece23 | 2016-10-14 17:31:56 -0400 | [diff] [blame] | 5692 | bool skl_wm_level_equals(const struct skl_wm_level *l1, |
| 5693 | const struct skl_wm_level *l2) |
| 5694 | { |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 5695 | return l1->plane_en == l2->plane_en && |
Ville Syrjälä | 2ed8e1f | 2019-02-13 18:54:23 +0200 | [diff] [blame] | 5696 | l1->ignore_lines == l2->ignore_lines && |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 5697 | l1->plane_res_l == l2->plane_res_l && |
| 5698 | l1->plane_res_b == l2->plane_res_b; |
| 5699 | } |
cpaul@redhat.com | 45ece23 | 2016-10-14 17:31:56 -0400 | [diff] [blame] | 5700 | |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 5701 | static bool skl_plane_wm_equals(struct drm_i915_private *dev_priv, |
| 5702 | const struct skl_plane_wm *wm1, |
| 5703 | const struct skl_plane_wm *wm2) |
| 5704 | { |
| 5705 | int level, max_level = ilk_wm_max_level(dev_priv); |
cpaul@redhat.com | 45ece23 | 2016-10-14 17:31:56 -0400 | [diff] [blame] | 5706 | |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 5707 | for (level = 0; level <= max_level; level++) { |
Ville Syrjälä | e7f54e6 | 2020-02-28 22:35:49 +0200 | [diff] [blame] | 5708 | /* |
| 5709 | * We don't check uv_wm as the hardware doesn't actually |
| 5710 | * use it. It only gets used for calculating the required |
| 5711 | * ddb allocation. |
| 5712 | */ |
| 5713 | if (!skl_wm_level_equals(&wm1->wm[level], &wm2->wm[level])) |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 5714 | return false; |
| 5715 | } |
| 5716 | |
| 5717 | return skl_wm_level_equals(&wm1->trans_wm, &wm2->trans_wm); |
cpaul@redhat.com | 45ece23 | 2016-10-14 17:31:56 -0400 | [diff] [blame] | 5718 | } |
| 5719 | |
Jani Nikula | 81b55ef | 2020-04-20 17:04:38 +0300 | [diff] [blame] | 5720 | static bool skl_ddb_entries_overlap(const struct skl_ddb_entry *a, |
| 5721 | const struct skl_ddb_entry *b) |
Damien Lespiau | 0e8fb7b | 2014-11-04 17:07:02 +0000 | [diff] [blame] | 5722 | { |
Lyude | 2708249 | 2016-08-24 07:48:10 +0200 | [diff] [blame] | 5723 | return a->start < b->end && b->start < a->end; |
Damien Lespiau | 0e8fb7b | 2014-11-04 17:07:02 +0000 | [diff] [blame] | 5724 | } |
| 5725 | |
Ville Syrjälä | 53cc6880 | 2018-11-01 17:05:59 +0200 | [diff] [blame] | 5726 | bool skl_ddb_allocation_overlaps(const struct skl_ddb_entry *ddb, |
Jani Nikula | 696173b | 2019-04-05 14:00:15 +0300 | [diff] [blame] | 5727 | const struct skl_ddb_entry *entries, |
Ville Syrjälä | 53cc6880 | 2018-11-01 17:05:59 +0200 | [diff] [blame] | 5728 | int num_entries, int ignore_idx) |
Damien Lespiau | 0e8fb7b | 2014-11-04 17:07:02 +0000 | [diff] [blame] | 5729 | { |
Ville Syrjälä | 53cc6880 | 2018-11-01 17:05:59 +0200 | [diff] [blame] | 5730 | int i; |
Damien Lespiau | 0e8fb7b | 2014-11-04 17:07:02 +0000 | [diff] [blame] | 5731 | |
Ville Syrjälä | 53cc6880 | 2018-11-01 17:05:59 +0200 | [diff] [blame] | 5732 | for (i = 0; i < num_entries; i++) { |
| 5733 | if (i != ignore_idx && |
| 5734 | skl_ddb_entries_overlap(ddb, &entries[i])) |
Lyude | 2708249 | 2016-08-24 07:48:10 +0200 | [diff] [blame] | 5735 | return true; |
Mika Kahola | 2b68504 | 2017-10-10 13:17:03 +0300 | [diff] [blame] | 5736 | } |
Damien Lespiau | 0e8fb7b | 2014-11-04 17:07:02 +0000 | [diff] [blame] | 5737 | |
Lyude | 2708249 | 2016-08-24 07:48:10 +0200 | [diff] [blame] | 5738 | return false; |
Damien Lespiau | 0e8fb7b | 2014-11-04 17:07:02 +0000 | [diff] [blame] | 5739 | } |
| 5740 | |
Jani Nikula | bb7791b | 2016-10-04 12:29:17 +0300 | [diff] [blame] | 5741 | static int |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 5742 | skl_ddb_add_affected_planes(const struct intel_crtc_state *old_crtc_state, |
| 5743 | struct intel_crtc_state *new_crtc_state) |
Rodrigo Vivi | 9a30a26 | 2017-06-13 10:52:30 -0700 | [diff] [blame] | 5744 | { |
Maarten Lankhorst | 2225f3c | 2019-10-31 12:26:03 +0100 | [diff] [blame] | 5745 | struct intel_atomic_state *state = to_intel_atomic_state(new_crtc_state->uapi.state); |
| 5746 | struct intel_crtc *crtc = to_intel_crtc(new_crtc_state->uapi.crtc); |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 5747 | struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); |
| 5748 | struct intel_plane *plane; |
Rodrigo Vivi | 9a30a26 | 2017-06-13 10:52:30 -0700 | [diff] [blame] | 5749 | |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 5750 | for_each_intel_plane_on_crtc(&dev_priv->drm, crtc, plane) { |
| 5751 | struct intel_plane_state *plane_state; |
| 5752 | enum plane_id plane_id = plane->id; |
Rodrigo Vivi | 9a30a26 | 2017-06-13 10:52:30 -0700 | [diff] [blame] | 5753 | |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 5754 | if (skl_ddb_entry_equal(&old_crtc_state->wm.skl.plane_ddb_y[plane_id], |
| 5755 | &new_crtc_state->wm.skl.plane_ddb_y[plane_id]) && |
| 5756 | skl_ddb_entry_equal(&old_crtc_state->wm.skl.plane_ddb_uv[plane_id], |
| 5757 | &new_crtc_state->wm.skl.plane_ddb_uv[plane_id])) |
Rodrigo Vivi | 9a30a26 | 2017-06-13 10:52:30 -0700 | [diff] [blame] | 5758 | continue; |
| 5759 | |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 5760 | plane_state = intel_atomic_get_plane_state(state, plane); |
Rodrigo Vivi | 9a30a26 | 2017-06-13 10:52:30 -0700 | [diff] [blame] | 5761 | if (IS_ERR(plane_state)) |
| 5762 | return PTR_ERR(plane_state); |
Maarten Lankhorst | 1ab554b | 2018-10-22 15:51:52 +0200 | [diff] [blame] | 5763 | |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 5764 | new_crtc_state->update_planes |= BIT(plane_id); |
Rodrigo Vivi | 9a30a26 | 2017-06-13 10:52:30 -0700 | [diff] [blame] | 5765 | } |
| 5766 | |
| 5767 | return 0; |
| 5768 | } |
| 5769 | |
| 5770 | static int |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 5771 | skl_compute_ddb(struct intel_atomic_state *state) |
Matt Roper | 98d3949 | 2016-05-12 07:06:03 -0700 | [diff] [blame] | 5772 | { |
Ville Syrjälä | 70b1a26 | 2020-02-25 19:11:16 +0200 | [diff] [blame] | 5773 | struct drm_i915_private *dev_priv = to_i915(state->base.dev); |
| 5774 | const struct intel_dbuf_state *old_dbuf_state; |
| 5775 | const struct intel_dbuf_state *new_dbuf_state; |
| 5776 | const struct intel_crtc_state *old_crtc_state; |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 5777 | struct intel_crtc_state *new_crtc_state; |
Mahesh Kumar | e1f96a6 | 2018-04-09 09:11:08 +0530 | [diff] [blame] | 5778 | struct intel_crtc *crtc; |
Mahesh Kumar | e1f96a6 | 2018-04-09 09:11:08 +0530 | [diff] [blame] | 5779 | int ret, i; |
Matt Roper | 98d3949 | 2016-05-12 07:06:03 -0700 | [diff] [blame] | 5780 | |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 5781 | for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state, |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 5782 | new_crtc_state, i) { |
Stanislav Lisovskiy | 072fcc3 | 2020-02-03 01:06:25 +0200 | [diff] [blame] | 5783 | ret = skl_allocate_pipe_ddb(new_crtc_state); |
Rodrigo Vivi | 9a30a26 | 2017-06-13 10:52:30 -0700 | [diff] [blame] | 5784 | if (ret) |
| 5785 | return ret; |
| 5786 | |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 5787 | ret = skl_ddb_add_affected_planes(old_crtc_state, |
| 5788 | new_crtc_state); |
Rodrigo Vivi | 9a30a26 | 2017-06-13 10:52:30 -0700 | [diff] [blame] | 5789 | if (ret) |
| 5790 | return ret; |
Matt Roper | 98d3949 | 2016-05-12 07:06:03 -0700 | [diff] [blame] | 5791 | } |
| 5792 | |
Ville Syrjälä | 70b1a26 | 2020-02-25 19:11:16 +0200 | [diff] [blame] | 5793 | old_dbuf_state = intel_atomic_get_old_dbuf_state(state); |
| 5794 | new_dbuf_state = intel_atomic_get_new_dbuf_state(state); |
| 5795 | |
| 5796 | if (new_dbuf_state && |
| 5797 | new_dbuf_state->enabled_slices != old_dbuf_state->enabled_slices) |
| 5798 | drm_dbg_kms(&dev_priv->drm, |
| 5799 | "Enabled dbuf slices 0x%x -> 0x%x (out of %d dbuf slices)\n", |
| 5800 | old_dbuf_state->enabled_slices, |
| 5801 | new_dbuf_state->enabled_slices, |
| 5802 | INTEL_INFO(dev_priv)->num_supported_dbuf_slices); |
| 5803 | |
Matt Roper | 98d3949 | 2016-05-12 07:06:03 -0700 | [diff] [blame] | 5804 | return 0; |
| 5805 | } |
| 5806 | |
Ville Syrjälä | ab98e94 | 2019-02-08 22:05:27 +0200 | [diff] [blame] | 5807 | static char enast(bool enable) |
| 5808 | { |
| 5809 | return enable ? '*' : ' '; |
| 5810 | } |
| 5811 | |
Matt Roper | 2722efb | 2016-08-17 15:55:55 -0400 | [diff] [blame] | 5812 | static void |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 5813 | skl_print_wm_changes(struct intel_atomic_state *state) |
cpaul@redhat.com | 413fc53 | 2016-10-14 17:31:54 -0400 | [diff] [blame] | 5814 | { |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 5815 | struct drm_i915_private *dev_priv = to_i915(state->base.dev); |
| 5816 | const struct intel_crtc_state *old_crtc_state; |
| 5817 | const struct intel_crtc_state *new_crtc_state; |
| 5818 | struct intel_plane *plane; |
| 5819 | struct intel_crtc *crtc; |
Maarten Lankhorst | 7570498 | 2016-11-01 12:04:10 +0100 | [diff] [blame] | 5820 | int i; |
cpaul@redhat.com | 413fc53 | 2016-10-14 17:31:54 -0400 | [diff] [blame] | 5821 | |
Jani Nikula | bdbf43d | 2019-10-28 12:38:15 +0200 | [diff] [blame] | 5822 | if (!drm_debug_enabled(DRM_UT_KMS)) |
Ville Syrjälä | ab98e94 | 2019-02-08 22:05:27 +0200 | [diff] [blame] | 5823 | return; |
| 5824 | |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 5825 | for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state, |
| 5826 | new_crtc_state, i) { |
Ville Syrjälä | ab98e94 | 2019-02-08 22:05:27 +0200 | [diff] [blame] | 5827 | const struct skl_pipe_wm *old_pipe_wm, *new_pipe_wm; |
| 5828 | |
| 5829 | old_pipe_wm = &old_crtc_state->wm.skl.optimal; |
| 5830 | new_pipe_wm = &new_crtc_state->wm.skl.optimal; |
| 5831 | |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 5832 | for_each_intel_plane_on_crtc(&dev_priv->drm, crtc, plane) { |
| 5833 | enum plane_id plane_id = plane->id; |
cpaul@redhat.com | 413fc53 | 2016-10-14 17:31:54 -0400 | [diff] [blame] | 5834 | const struct skl_ddb_entry *old, *new; |
| 5835 | |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 5836 | old = &old_crtc_state->wm.skl.plane_ddb_y[plane_id]; |
| 5837 | new = &new_crtc_state->wm.skl.plane_ddb_y[plane_id]; |
cpaul@redhat.com | 413fc53 | 2016-10-14 17:31:54 -0400 | [diff] [blame] | 5838 | |
cpaul@redhat.com | 413fc53 | 2016-10-14 17:31:54 -0400 | [diff] [blame] | 5839 | if (skl_ddb_entry_equal(old, new)) |
| 5840 | continue; |
| 5841 | |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 5842 | drm_dbg_kms(&dev_priv->drm, |
| 5843 | "[PLANE:%d:%s] ddb (%4d - %4d) -> (%4d - %4d), size %4d -> %4d\n", |
| 5844 | plane->base.base.id, plane->base.name, |
| 5845 | old->start, old->end, new->start, new->end, |
| 5846 | skl_ddb_entry_size(old), skl_ddb_entry_size(new)); |
Ville Syrjälä | ab98e94 | 2019-02-08 22:05:27 +0200 | [diff] [blame] | 5847 | } |
| 5848 | |
| 5849 | for_each_intel_plane_on_crtc(&dev_priv->drm, crtc, plane) { |
| 5850 | enum plane_id plane_id = plane->id; |
| 5851 | const struct skl_plane_wm *old_wm, *new_wm; |
| 5852 | |
| 5853 | old_wm = &old_pipe_wm->planes[plane_id]; |
| 5854 | new_wm = &new_pipe_wm->planes[plane_id]; |
| 5855 | |
| 5856 | if (skl_plane_wm_equals(dev_priv, old_wm, new_wm)) |
| 5857 | continue; |
| 5858 | |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 5859 | drm_dbg_kms(&dev_priv->drm, |
Stanislav Lisovskiy | 7241c57 | 2020-05-14 10:48:51 +0300 | [diff] [blame] | 5860 | "[PLANE:%d:%s] level %cwm0,%cwm1,%cwm2,%cwm3,%cwm4,%cwm5,%cwm6,%cwm7,%ctwm,%cswm" |
| 5861 | " -> %cwm0,%cwm1,%cwm2,%cwm3,%cwm4,%cwm5,%cwm6,%cwm7,%ctwm,%cswm\n", |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 5862 | plane->base.base.id, plane->base.name, |
| 5863 | enast(old_wm->wm[0].plane_en), enast(old_wm->wm[1].plane_en), |
| 5864 | enast(old_wm->wm[2].plane_en), enast(old_wm->wm[3].plane_en), |
| 5865 | enast(old_wm->wm[4].plane_en), enast(old_wm->wm[5].plane_en), |
| 5866 | enast(old_wm->wm[6].plane_en), enast(old_wm->wm[7].plane_en), |
| 5867 | enast(old_wm->trans_wm.plane_en), |
Stanislav Lisovskiy | 7241c57 | 2020-05-14 10:48:51 +0300 | [diff] [blame] | 5868 | enast(old_wm->sagv_wm0.plane_en), |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 5869 | enast(new_wm->wm[0].plane_en), enast(new_wm->wm[1].plane_en), |
| 5870 | enast(new_wm->wm[2].plane_en), enast(new_wm->wm[3].plane_en), |
| 5871 | enast(new_wm->wm[4].plane_en), enast(new_wm->wm[5].plane_en), |
| 5872 | enast(new_wm->wm[6].plane_en), enast(new_wm->wm[7].plane_en), |
Stanislav Lisovskiy | 7241c57 | 2020-05-14 10:48:51 +0300 | [diff] [blame] | 5873 | enast(new_wm->trans_wm.plane_en), |
| 5874 | enast(new_wm->sagv_wm0.plane_en)); |
Ville Syrjälä | ab98e94 | 2019-02-08 22:05:27 +0200 | [diff] [blame] | 5875 | |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 5876 | drm_dbg_kms(&dev_priv->drm, |
Stanislav Lisovskiy | 7241c57 | 2020-05-14 10:48:51 +0300 | [diff] [blame] | 5877 | "[PLANE:%d:%s] lines %c%3d,%c%3d,%c%3d,%c%3d,%c%3d,%c%3d,%c%3d,%c%3d,%c%3d,%c%3d" |
| 5878 | " -> %c%3d,%c%3d,%c%3d,%c%3d,%c%3d,%c%3d,%c%3d,%c%3d,%c%3d,%c%3d\n", |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 5879 | plane->base.base.id, plane->base.name, |
| 5880 | enast(old_wm->wm[0].ignore_lines), old_wm->wm[0].plane_res_l, |
| 5881 | enast(old_wm->wm[1].ignore_lines), old_wm->wm[1].plane_res_l, |
| 5882 | enast(old_wm->wm[2].ignore_lines), old_wm->wm[2].plane_res_l, |
| 5883 | enast(old_wm->wm[3].ignore_lines), old_wm->wm[3].plane_res_l, |
| 5884 | enast(old_wm->wm[4].ignore_lines), old_wm->wm[4].plane_res_l, |
| 5885 | enast(old_wm->wm[5].ignore_lines), old_wm->wm[5].plane_res_l, |
| 5886 | enast(old_wm->wm[6].ignore_lines), old_wm->wm[6].plane_res_l, |
| 5887 | enast(old_wm->wm[7].ignore_lines), old_wm->wm[7].plane_res_l, |
| 5888 | enast(old_wm->trans_wm.ignore_lines), old_wm->trans_wm.plane_res_l, |
Stanislav Lisovskiy | 7241c57 | 2020-05-14 10:48:51 +0300 | [diff] [blame] | 5889 | enast(old_wm->sagv_wm0.ignore_lines), old_wm->sagv_wm0.plane_res_l, |
Ville Syrjälä | 2ed8e1f | 2019-02-13 18:54:23 +0200 | [diff] [blame] | 5890 | |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 5891 | enast(new_wm->wm[0].ignore_lines), new_wm->wm[0].plane_res_l, |
| 5892 | enast(new_wm->wm[1].ignore_lines), new_wm->wm[1].plane_res_l, |
| 5893 | enast(new_wm->wm[2].ignore_lines), new_wm->wm[2].plane_res_l, |
| 5894 | enast(new_wm->wm[3].ignore_lines), new_wm->wm[3].plane_res_l, |
| 5895 | enast(new_wm->wm[4].ignore_lines), new_wm->wm[4].plane_res_l, |
| 5896 | enast(new_wm->wm[5].ignore_lines), new_wm->wm[5].plane_res_l, |
| 5897 | enast(new_wm->wm[6].ignore_lines), new_wm->wm[6].plane_res_l, |
| 5898 | enast(new_wm->wm[7].ignore_lines), new_wm->wm[7].plane_res_l, |
Stanislav Lisovskiy | 7241c57 | 2020-05-14 10:48:51 +0300 | [diff] [blame] | 5899 | enast(new_wm->trans_wm.ignore_lines), new_wm->trans_wm.plane_res_l, |
| 5900 | enast(new_wm->sagv_wm0.ignore_lines), new_wm->sagv_wm0.plane_res_l); |
Ville Syrjälä | ab98e94 | 2019-02-08 22:05:27 +0200 | [diff] [blame] | 5901 | |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 5902 | drm_dbg_kms(&dev_priv->drm, |
Stanislav Lisovskiy | 7241c57 | 2020-05-14 10:48:51 +0300 | [diff] [blame] | 5903 | "[PLANE:%d:%s] blocks %4d,%4d,%4d,%4d,%4d,%4d,%4d,%4d,%4d,%4d" |
| 5904 | " -> %4d,%4d,%4d,%4d,%4d,%4d,%4d,%4d,%4d,%4d\n", |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 5905 | plane->base.base.id, plane->base.name, |
| 5906 | old_wm->wm[0].plane_res_b, old_wm->wm[1].plane_res_b, |
| 5907 | old_wm->wm[2].plane_res_b, old_wm->wm[3].plane_res_b, |
| 5908 | old_wm->wm[4].plane_res_b, old_wm->wm[5].plane_res_b, |
| 5909 | old_wm->wm[6].plane_res_b, old_wm->wm[7].plane_res_b, |
| 5910 | old_wm->trans_wm.plane_res_b, |
Stanislav Lisovskiy | 7241c57 | 2020-05-14 10:48:51 +0300 | [diff] [blame] | 5911 | old_wm->sagv_wm0.plane_res_b, |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 5912 | new_wm->wm[0].plane_res_b, new_wm->wm[1].plane_res_b, |
| 5913 | new_wm->wm[2].plane_res_b, new_wm->wm[3].plane_res_b, |
| 5914 | new_wm->wm[4].plane_res_b, new_wm->wm[5].plane_res_b, |
| 5915 | new_wm->wm[6].plane_res_b, new_wm->wm[7].plane_res_b, |
Stanislav Lisovskiy | 7241c57 | 2020-05-14 10:48:51 +0300 | [diff] [blame] | 5916 | new_wm->trans_wm.plane_res_b, |
| 5917 | new_wm->sagv_wm0.plane_res_b); |
Ville Syrjälä | ab98e94 | 2019-02-08 22:05:27 +0200 | [diff] [blame] | 5918 | |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 5919 | drm_dbg_kms(&dev_priv->drm, |
Stanislav Lisovskiy | 7241c57 | 2020-05-14 10:48:51 +0300 | [diff] [blame] | 5920 | "[PLANE:%d:%s] min_ddb %4d,%4d,%4d,%4d,%4d,%4d,%4d,%4d,%4d,%4d" |
| 5921 | " -> %4d,%4d,%4d,%4d,%4d,%4d,%4d,%4d,%4d,%4d\n", |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 5922 | plane->base.base.id, plane->base.name, |
| 5923 | old_wm->wm[0].min_ddb_alloc, old_wm->wm[1].min_ddb_alloc, |
| 5924 | old_wm->wm[2].min_ddb_alloc, old_wm->wm[3].min_ddb_alloc, |
| 5925 | old_wm->wm[4].min_ddb_alloc, old_wm->wm[5].min_ddb_alloc, |
| 5926 | old_wm->wm[6].min_ddb_alloc, old_wm->wm[7].min_ddb_alloc, |
| 5927 | old_wm->trans_wm.min_ddb_alloc, |
Stanislav Lisovskiy | 7241c57 | 2020-05-14 10:48:51 +0300 | [diff] [blame] | 5928 | old_wm->sagv_wm0.min_ddb_alloc, |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 5929 | new_wm->wm[0].min_ddb_alloc, new_wm->wm[1].min_ddb_alloc, |
| 5930 | new_wm->wm[2].min_ddb_alloc, new_wm->wm[3].min_ddb_alloc, |
| 5931 | new_wm->wm[4].min_ddb_alloc, new_wm->wm[5].min_ddb_alloc, |
| 5932 | new_wm->wm[6].min_ddb_alloc, new_wm->wm[7].min_ddb_alloc, |
Stanislav Lisovskiy | 7241c57 | 2020-05-14 10:48:51 +0300 | [diff] [blame] | 5933 | new_wm->trans_wm.min_ddb_alloc, |
| 5934 | new_wm->sagv_wm0.min_ddb_alloc); |
cpaul@redhat.com | 413fc53 | 2016-10-14 17:31:54 -0400 | [diff] [blame] | 5935 | } |
| 5936 | } |
| 5937 | } |
| 5938 | |
Ville Syrjälä | 3cf43cd | 2020-02-25 19:11:13 +0200 | [diff] [blame] | 5939 | static int intel_add_affected_pipes(struct intel_atomic_state *state, |
| 5940 | u8 pipe_mask) |
Ville Syrjälä | 49e0ed3 | 2019-10-11 23:09:43 +0300 | [diff] [blame] | 5941 | { |
| 5942 | struct drm_i915_private *dev_priv = to_i915(state->base.dev); |
| 5943 | struct intel_crtc *crtc; |
| 5944 | |
| 5945 | for_each_intel_crtc(&dev_priv->drm, crtc) { |
| 5946 | struct intel_crtc_state *crtc_state; |
| 5947 | |
Ville Syrjälä | 3cf43cd | 2020-02-25 19:11:13 +0200 | [diff] [blame] | 5948 | if ((pipe_mask & BIT(crtc->pipe)) == 0) |
| 5949 | continue; |
| 5950 | |
Ville Syrjälä | 49e0ed3 | 2019-10-11 23:09:43 +0300 | [diff] [blame] | 5951 | crtc_state = intel_atomic_get_crtc_state(&state->base, crtc); |
| 5952 | if (IS_ERR(crtc_state)) |
| 5953 | return PTR_ERR(crtc_state); |
| 5954 | } |
| 5955 | |
| 5956 | return 0; |
| 5957 | } |
| 5958 | |
Matt Roper | 98d3949 | 2016-05-12 07:06:03 -0700 | [diff] [blame] | 5959 | static int |
Ville Syrjälä | d7a1458 | 2019-10-11 23:09:42 +0300 | [diff] [blame] | 5960 | skl_ddb_add_affected_pipes(struct intel_atomic_state *state) |
Matt Roper | 98d3949 | 2016-05-12 07:06:03 -0700 | [diff] [blame] | 5961 | { |
Ville Syrjälä | 49e0ed3 | 2019-10-11 23:09:43 +0300 | [diff] [blame] | 5962 | struct drm_i915_private *dev_priv = to_i915(state->base.dev); |
Ville Syrjälä | 3cf43cd | 2020-02-25 19:11:13 +0200 | [diff] [blame] | 5963 | struct intel_crtc_state *crtc_state; |
| 5964 | struct intel_crtc *crtc; |
| 5965 | int i, ret; |
Matt Roper | 98d3949 | 2016-05-12 07:06:03 -0700 | [diff] [blame] | 5966 | |
Mahesh Kumar | e1f96a6 | 2018-04-09 09:11:08 +0530 | [diff] [blame] | 5967 | if (dev_priv->wm.distrust_bios_wm) { |
Ville Syrjälä | 3cf43cd | 2020-02-25 19:11:13 +0200 | [diff] [blame] | 5968 | /* |
| 5969 | * skl_ddb_get_pipe_allocation_limits() currently requires |
| 5970 | * all active pipes to be included in the state so that |
| 5971 | * it can redistribute the dbuf among them, and it really |
| 5972 | * wants to recompute things when distrust_bios_wm is set |
| 5973 | * so we add all the pipes to the state. |
| 5974 | */ |
| 5975 | ret = intel_add_affected_pipes(state, ~0); |
Mahesh Kumar | e1f96a6 | 2018-04-09 09:11:08 +0530 | [diff] [blame] | 5976 | if (ret) |
| 5977 | return ret; |
Mahesh Kumar | e1f96a6 | 2018-04-09 09:11:08 +0530 | [diff] [blame] | 5978 | } |
| 5979 | |
Ville Syrjälä | 3cf43cd | 2020-02-25 19:11:13 +0200 | [diff] [blame] | 5980 | for_each_new_intel_crtc_in_state(state, crtc, crtc_state, i) { |
| 5981 | struct intel_dbuf_state *new_dbuf_state; |
| 5982 | const struct intel_dbuf_state *old_dbuf_state; |
| 5983 | |
| 5984 | new_dbuf_state = intel_atomic_get_dbuf_state(state); |
| 5985 | if (IS_ERR(new_dbuf_state)) |
Chris Wilson | cba597a | 2020-05-16 20:09:40 +0100 | [diff] [blame] | 5986 | return PTR_ERR(new_dbuf_state); |
Ville Syrjälä | 3cf43cd | 2020-02-25 19:11:13 +0200 | [diff] [blame] | 5987 | |
| 5988 | old_dbuf_state = intel_atomic_get_old_dbuf_state(state); |
| 5989 | |
| 5990 | new_dbuf_state->active_pipes = |
| 5991 | intel_calc_active_pipes(state, old_dbuf_state->active_pipes); |
| 5992 | |
| 5993 | if (old_dbuf_state->active_pipes == new_dbuf_state->active_pipes) |
| 5994 | break; |
| 5995 | |
| 5996 | ret = intel_atomic_lock_global_state(&new_dbuf_state->base); |
Ville Syrjälä | 49e0ed3 | 2019-10-11 23:09:43 +0300 | [diff] [blame] | 5997 | if (ret) |
| 5998 | return ret; |
Ville Syrjälä | 3cf43cd | 2020-02-25 19:11:13 +0200 | [diff] [blame] | 5999 | |
| 6000 | /* |
| 6001 | * skl_ddb_get_pipe_allocation_limits() currently requires |
| 6002 | * all active pipes to be included in the state so that |
| 6003 | * it can redistribute the dbuf among them. |
| 6004 | */ |
| 6005 | ret = intel_add_affected_pipes(state, |
| 6006 | new_dbuf_state->active_pipes); |
| 6007 | if (ret) |
| 6008 | return ret; |
| 6009 | |
| 6010 | break; |
Mahesh Kumar | e1f96a6 | 2018-04-09 09:11:08 +0530 | [diff] [blame] | 6011 | } |
| 6012 | |
| 6013 | return 0; |
| 6014 | } |
| 6015 | |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 6016 | /* |
| 6017 | * To make sure the cursor watermark registers are always consistent |
| 6018 | * with our computed state the following scenario needs special |
| 6019 | * treatment: |
| 6020 | * |
| 6021 | * 1. enable cursor |
| 6022 | * 2. move cursor entirely offscreen |
| 6023 | * 3. disable cursor |
| 6024 | * |
| 6025 | * Step 2. does call .disable_plane() but does not zero the watermarks |
| 6026 | * (since we consider an offscreen cursor still active for the purposes |
| 6027 | * of watermarks). Step 3. would not normally call .disable_plane() |
| 6028 | * because the actual plane visibility isn't changing, and we don't |
| 6029 | * deallocate the cursor ddb until the pipe gets disabled. So we must |
| 6030 | * force step 3. to call .disable_plane() to update the watermark |
| 6031 | * registers properly. |
| 6032 | * |
| 6033 | * Other planes do not suffer from this issues as their watermarks are |
| 6034 | * calculated based on the actual plane visibility. The only time this |
| 6035 | * can trigger for the other planes is during the initial readout as the |
| 6036 | * default value of the watermarks registers is not zero. |
| 6037 | */ |
| 6038 | static int skl_wm_add_affected_planes(struct intel_atomic_state *state, |
| 6039 | struct intel_crtc *crtc) |
| 6040 | { |
| 6041 | struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); |
| 6042 | const struct intel_crtc_state *old_crtc_state = |
| 6043 | intel_atomic_get_old_crtc_state(state, crtc); |
| 6044 | struct intel_crtc_state *new_crtc_state = |
| 6045 | intel_atomic_get_new_crtc_state(state, crtc); |
| 6046 | struct intel_plane *plane; |
| 6047 | |
| 6048 | for_each_intel_plane_on_crtc(&dev_priv->drm, crtc, plane) { |
| 6049 | struct intel_plane_state *plane_state; |
| 6050 | enum plane_id plane_id = plane->id; |
| 6051 | |
| 6052 | /* |
| 6053 | * Force a full wm update for every plane on modeset. |
| 6054 | * Required because the reset value of the wm registers |
| 6055 | * is non-zero, whereas we want all disabled planes to |
| 6056 | * have zero watermarks. So if we turn off the relevant |
| 6057 | * power well the hardware state will go out of sync |
| 6058 | * with the software state. |
| 6059 | */ |
Maarten Lankhorst | 2225f3c | 2019-10-31 12:26:03 +0100 | [diff] [blame] | 6060 | if (!drm_atomic_crtc_needs_modeset(&new_crtc_state->uapi) && |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 6061 | skl_plane_wm_equals(dev_priv, |
| 6062 | &old_crtc_state->wm.skl.optimal.planes[plane_id], |
| 6063 | &new_crtc_state->wm.skl.optimal.planes[plane_id])) |
| 6064 | continue; |
| 6065 | |
| 6066 | plane_state = intel_atomic_get_plane_state(state, plane); |
| 6067 | if (IS_ERR(plane_state)) |
| 6068 | return PTR_ERR(plane_state); |
| 6069 | |
| 6070 | new_crtc_state->update_planes |= BIT(plane_id); |
| 6071 | } |
| 6072 | |
| 6073 | return 0; |
| 6074 | } |
| 6075 | |
Mahesh Kumar | e1f96a6 | 2018-04-09 09:11:08 +0530 | [diff] [blame] | 6076 | static int |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 6077 | skl_compute_wm(struct intel_atomic_state *state) |
Mahesh Kumar | e1f96a6 | 2018-04-09 09:11:08 +0530 | [diff] [blame] | 6078 | { |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 6079 | struct intel_crtc *crtc; |
Ville Syrjälä | 8cac9fd | 2019-03-12 22:58:44 +0200 | [diff] [blame] | 6080 | struct intel_crtc_state *new_crtc_state; |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 6081 | struct intel_crtc_state *old_crtc_state; |
Mahesh Kumar | e1f96a6 | 2018-04-09 09:11:08 +0530 | [diff] [blame] | 6082 | int ret, i; |
| 6083 | |
Ville Syrjälä | d7a1458 | 2019-10-11 23:09:42 +0300 | [diff] [blame] | 6084 | ret = skl_ddb_add_affected_pipes(state); |
| 6085 | if (ret) |
Mahesh Kumar | e1f96a6 | 2018-04-09 09:11:08 +0530 | [diff] [blame] | 6086 | return ret; |
| 6087 | |
Matt Roper | 734fa01 | 2016-05-12 15:11:40 -0700 | [diff] [blame] | 6088 | /* |
| 6089 | * Calculate WM's for all pipes that are part of this transaction. |
Matt Roper | d8e8749 | 2018-12-11 09:31:07 -0800 | [diff] [blame] | 6090 | * Note that skl_ddb_add_affected_pipes may have added more CRTC's that |
Ville Syrjälä | f119a5e | 2020-01-20 19:47:13 +0200 | [diff] [blame] | 6091 | * weren't otherwise being modified if pipe allocations had to change. |
Matt Roper | 734fa01 | 2016-05-12 15:11:40 -0700 | [diff] [blame] | 6092 | */ |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 6093 | for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state, |
Ville Syrjälä | 8cac9fd | 2019-03-12 22:58:44 +0200 | [diff] [blame] | 6094 | new_crtc_state, i) { |
| 6095 | ret = skl_build_pipe_wm(new_crtc_state); |
Ville Syrjälä | ff43bc3 | 2018-11-27 18:59:00 +0200 | [diff] [blame] | 6096 | if (ret) |
| 6097 | return ret; |
Matt Roper | 734fa01 | 2016-05-12 15:11:40 -0700 | [diff] [blame] | 6098 | } |
| 6099 | |
Matt Roper | d8e8749 | 2018-12-11 09:31:07 -0800 | [diff] [blame] | 6100 | ret = skl_compute_ddb(state); |
| 6101 | if (ret) |
| 6102 | return ret; |
| 6103 | |
Stanislav Lisovskiy | ecab0f3 | 2020-04-30 22:56:34 +0300 | [diff] [blame] | 6104 | ret = intel_compute_sagv_mask(state); |
| 6105 | if (ret) |
| 6106 | return ret; |
Stanislav Lisovskiy | 9728889 | 2020-04-30 22:17:57 +0300 | [diff] [blame] | 6107 | |
Ville Syrjälä | 23baedd | 2020-02-28 22:35:50 +0200 | [diff] [blame] | 6108 | /* |
| 6109 | * skl_compute_ddb() will have adjusted the final watermarks |
| 6110 | * based on how much ddb is available. Now we can actually |
| 6111 | * check if the final watermarks changed. |
| 6112 | */ |
| 6113 | for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state, |
| 6114 | new_crtc_state, i) { |
| 6115 | ret = skl_wm_add_affected_planes(state, crtc); |
| 6116 | if (ret) |
| 6117 | return ret; |
| 6118 | } |
| 6119 | |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 6120 | skl_print_wm_changes(state); |
cpaul@redhat.com | 413fc53 | 2016-10-14 17:31:54 -0400 | [diff] [blame] | 6121 | |
Matt Roper | 98d3949 | 2016-05-12 07:06:03 -0700 | [diff] [blame] | 6122 | return 0; |
| 6123 | } |
| 6124 | |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 6125 | static void ilk_compute_wm_config(struct drm_i915_private *dev_priv, |
Ville Syrjälä | d890565 | 2016-01-14 14:53:35 +0200 | [diff] [blame] | 6126 | struct intel_wm_config *config) |
| 6127 | { |
| 6128 | struct intel_crtc *crtc; |
| 6129 | |
| 6130 | /* Compute the currently _active_ config */ |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 6131 | for_each_intel_crtc(&dev_priv->drm, crtc) { |
Ville Syrjälä | d890565 | 2016-01-14 14:53:35 +0200 | [diff] [blame] | 6132 | const struct intel_pipe_wm *wm = &crtc->wm.active.ilk; |
| 6133 | |
| 6134 | if (!wm->pipe_enabled) |
| 6135 | continue; |
| 6136 | |
| 6137 | config->sprites_enabled |= wm->sprites_enabled; |
| 6138 | config->sprites_scaled |= wm->sprites_scaled; |
| 6139 | config->num_pipes_active++; |
| 6140 | } |
| 6141 | } |
| 6142 | |
Matt Roper | ed4a6a7 | 2016-02-23 17:20:13 -0800 | [diff] [blame] | 6143 | static void ilk_program_watermarks(struct drm_i915_private *dev_priv) |
Paulo Zanoni | 801bcff | 2013-05-31 10:08:35 -0300 | [diff] [blame] | 6144 | { |
Ville Syrjälä | b9d5c83 | 2015-09-24 15:53:14 -0700 | [diff] [blame] | 6145 | struct intel_pipe_wm lp_wm_1_2 = {}, lp_wm_5_6 = {}, *best_lp_wm; |
Imre Deak | 820c198 | 2013-12-17 14:46:36 +0200 | [diff] [blame] | 6146 | struct ilk_wm_maximums max; |
Ville Syrjälä | d890565 | 2016-01-14 14:53:35 +0200 | [diff] [blame] | 6147 | struct intel_wm_config config = {}; |
Imre Deak | 820c198 | 2013-12-17 14:46:36 +0200 | [diff] [blame] | 6148 | struct ilk_wm_values results = {}; |
Ville Syrjälä | 77c122b | 2013-08-06 22:24:04 +0300 | [diff] [blame] | 6149 | enum intel_ddb_partitioning partitioning; |
Matt Roper | 261a27d | 2015-10-08 15:28:25 -0700 | [diff] [blame] | 6150 | |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 6151 | ilk_compute_wm_config(dev_priv, &config); |
Ville Syrjälä | d890565 | 2016-01-14 14:53:35 +0200 | [diff] [blame] | 6152 | |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 6153 | ilk_compute_wm_maximums(dev_priv, 1, &config, INTEL_DDB_PART_1_2, &max); |
| 6154 | ilk_wm_merge(dev_priv, &config, &max, &lp_wm_1_2); |
Ville Syrjälä | 0362c78 | 2013-10-09 19:17:57 +0300 | [diff] [blame] | 6155 | |
Ville Syrjälä | a485bfb | 2013-10-09 19:17:59 +0300 | [diff] [blame] | 6156 | /* 5/6 split only in single pipe config on IVB+ */ |
Tvrtko Ursulin | 175fded | 2016-11-16 08:55:42 +0000 | [diff] [blame] | 6157 | if (INTEL_GEN(dev_priv) >= 7 && |
Ville Syrjälä | d890565 | 2016-01-14 14:53:35 +0200 | [diff] [blame] | 6158 | config.num_pipes_active == 1 && config.sprites_enabled) { |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 6159 | ilk_compute_wm_maximums(dev_priv, 1, &config, INTEL_DDB_PART_5_6, &max); |
| 6160 | ilk_wm_merge(dev_priv, &config, &max, &lp_wm_5_6); |
Ville Syrjälä | a485bfb | 2013-10-09 19:17:59 +0300 | [diff] [blame] | 6161 | |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 6162 | best_lp_wm = ilk_find_best_result(dev_priv, &lp_wm_1_2, &lp_wm_5_6); |
Paulo Zanoni | 861f338 | 2013-05-31 10:19:21 -0300 | [diff] [blame] | 6163 | } else { |
Ville Syrjälä | 198a1e9 | 2013-10-09 19:17:58 +0300 | [diff] [blame] | 6164 | best_lp_wm = &lp_wm_1_2; |
Paulo Zanoni | 861f338 | 2013-05-31 10:19:21 -0300 | [diff] [blame] | 6165 | } |
| 6166 | |
Ville Syrjälä | 198a1e9 | 2013-10-09 19:17:58 +0300 | [diff] [blame] | 6167 | partitioning = (best_lp_wm == &lp_wm_1_2) ? |
Ville Syrjälä | 77c122b | 2013-08-06 22:24:04 +0300 | [diff] [blame] | 6168 | INTEL_DDB_PART_1_2 : INTEL_DDB_PART_5_6; |
Paulo Zanoni | 861f338 | 2013-05-31 10:19:21 -0300 | [diff] [blame] | 6169 | |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 6170 | ilk_compute_wm_results(dev_priv, best_lp_wm, partitioning, &results); |
Ville Syrjälä | 609cede | 2013-10-09 19:18:03 +0300 | [diff] [blame] | 6171 | |
Imre Deak | 820c198 | 2013-12-17 14:46:36 +0200 | [diff] [blame] | 6172 | ilk_write_wm_values(dev_priv, &results); |
Paulo Zanoni | 1011d8c | 2013-05-09 16:55:50 -0300 | [diff] [blame] | 6173 | } |
| 6174 | |
Maarten Lankhorst | ccf010f | 2016-11-08 13:55:32 +0100 | [diff] [blame] | 6175 | static void ilk_initial_watermarks(struct intel_atomic_state *state, |
Ville Syrjälä | 7a8fdb1f | 2019-11-18 18:44:26 +0200 | [diff] [blame] | 6176 | struct intel_crtc *crtc) |
Ville Syrjälä | b9d5c83 | 2015-09-24 15:53:14 -0700 | [diff] [blame] | 6177 | { |
Ville Syrjälä | 7a8fdb1f | 2019-11-18 18:44:26 +0200 | [diff] [blame] | 6178 | struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); |
| 6179 | const struct intel_crtc_state *crtc_state = |
| 6180 | intel_atomic_get_new_crtc_state(state, crtc); |
Ville Syrjälä | b9d5c83 | 2015-09-24 15:53:14 -0700 | [diff] [blame] | 6181 | |
Matt Roper | ed4a6a7 | 2016-02-23 17:20:13 -0800 | [diff] [blame] | 6182 | mutex_lock(&dev_priv->wm.wm_mutex); |
Ville Syrjälä | 88016a9 | 2019-07-01 19:05:45 +0300 | [diff] [blame] | 6183 | crtc->wm.active.ilk = crtc_state->wm.ilk.intermediate; |
Matt Roper | ed4a6a7 | 2016-02-23 17:20:13 -0800 | [diff] [blame] | 6184 | ilk_program_watermarks(dev_priv); |
| 6185 | mutex_unlock(&dev_priv->wm.wm_mutex); |
| 6186 | } |
Ville Syrjälä | b9d5c83 | 2015-09-24 15:53:14 -0700 | [diff] [blame] | 6187 | |
Maarten Lankhorst | ccf010f | 2016-11-08 13:55:32 +0100 | [diff] [blame] | 6188 | static void ilk_optimize_watermarks(struct intel_atomic_state *state, |
Ville Syrjälä | 7a8fdb1f | 2019-11-18 18:44:26 +0200 | [diff] [blame] | 6189 | struct intel_crtc *crtc) |
Matt Roper | ed4a6a7 | 2016-02-23 17:20:13 -0800 | [diff] [blame] | 6190 | { |
Ville Syrjälä | 7a8fdb1f | 2019-11-18 18:44:26 +0200 | [diff] [blame] | 6191 | struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); |
| 6192 | const struct intel_crtc_state *crtc_state = |
| 6193 | intel_atomic_get_new_crtc_state(state, crtc); |
Ville Syrjälä | 88016a9 | 2019-07-01 19:05:45 +0300 | [diff] [blame] | 6194 | |
| 6195 | if (!crtc_state->wm.need_postvbl_update) |
| 6196 | return; |
Matt Roper | ed4a6a7 | 2016-02-23 17:20:13 -0800 | [diff] [blame] | 6197 | |
| 6198 | mutex_lock(&dev_priv->wm.wm_mutex); |
Ville Syrjälä | 88016a9 | 2019-07-01 19:05:45 +0300 | [diff] [blame] | 6199 | crtc->wm.active.ilk = crtc_state->wm.ilk.optimal; |
| 6200 | ilk_program_watermarks(dev_priv); |
Matt Roper | ed4a6a7 | 2016-02-23 17:20:13 -0800 | [diff] [blame] | 6201 | mutex_unlock(&dev_priv->wm.wm_mutex); |
Ville Syrjälä | b9d5c83 | 2015-09-24 15:53:14 -0700 | [diff] [blame] | 6202 | } |
| 6203 | |
Jani Nikula | 81b55ef | 2020-04-20 17:04:38 +0300 | [diff] [blame] | 6204 | static void skl_wm_level_from_reg_val(u32 val, struct skl_wm_level *level) |
Pradeep Bhat | 3078999 | 2014-11-04 17:06:45 +0000 | [diff] [blame] | 6205 | { |
cpaul@redhat.com | d8c0faf | 2016-10-18 16:09:49 -0200 | [diff] [blame] | 6206 | level->plane_en = val & PLANE_WM_EN; |
Ville Syrjälä | 2ed8e1f | 2019-02-13 18:54:23 +0200 | [diff] [blame] | 6207 | level->ignore_lines = val & PLANE_WM_IGNORE_LINES; |
cpaul@redhat.com | d8c0faf | 2016-10-18 16:09:49 -0200 | [diff] [blame] | 6208 | level->plane_res_b = val & PLANE_WM_BLOCKS_MASK; |
| 6209 | level->plane_res_l = (val >> PLANE_WM_LINES_SHIFT) & |
| 6210 | PLANE_WM_LINES_MASK; |
Pradeep Bhat | 3078999 | 2014-11-04 17:06:45 +0000 | [diff] [blame] | 6211 | } |
| 6212 | |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 6213 | void skl_pipe_wm_get_hw_state(struct intel_crtc *crtc, |
cpaul@redhat.com | bf9d99a | 2016-10-14 17:31:55 -0400 | [diff] [blame] | 6214 | struct skl_pipe_wm *out) |
Pradeep Bhat | 3078999 | 2014-11-04 17:06:45 +0000 | [diff] [blame] | 6215 | { |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 6216 | struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); |
| 6217 | enum pipe pipe = crtc->pipe; |
Ville Syrjälä | d5cdfdf5 | 2016-11-22 18:01:58 +0200 | [diff] [blame] | 6218 | int level, max_level; |
| 6219 | enum plane_id plane_id; |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 6220 | u32 val; |
Pradeep Bhat | 3078999 | 2014-11-04 17:06:45 +0000 | [diff] [blame] | 6221 | |
Tvrtko Ursulin | 5db9401 | 2016-10-13 11:03:10 +0100 | [diff] [blame] | 6222 | max_level = ilk_wm_max_level(dev_priv); |
Pradeep Bhat | 3078999 | 2014-11-04 17:06:45 +0000 | [diff] [blame] | 6223 | |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 6224 | for_each_plane_id_on_crtc(crtc, plane_id) { |
Ville Syrjälä | d5cdfdf5 | 2016-11-22 18:01:58 +0200 | [diff] [blame] | 6225 | struct skl_plane_wm *wm = &out->planes[plane_id]; |
Pradeep Bhat | 3078999 | 2014-11-04 17:06:45 +0000 | [diff] [blame] | 6226 | |
cpaul@redhat.com | d8c0faf | 2016-10-18 16:09:49 -0200 | [diff] [blame] | 6227 | for (level = 0; level <= max_level; level++) { |
Ville Syrjälä | d5cdfdf5 | 2016-11-22 18:01:58 +0200 | [diff] [blame] | 6228 | if (plane_id != PLANE_CURSOR) |
| 6229 | val = I915_READ(PLANE_WM(pipe, plane_id, level)); |
cpaul@redhat.com | d8c0faf | 2016-10-18 16:09:49 -0200 | [diff] [blame] | 6230 | else |
| 6231 | val = I915_READ(CUR_WM(pipe, level)); |
| 6232 | |
| 6233 | skl_wm_level_from_reg_val(val, &wm->wm[level]); |
| 6234 | } |
| 6235 | |
Stanislav Lisovskiy | 7241c57 | 2020-05-14 10:48:51 +0300 | [diff] [blame] | 6236 | if (INTEL_GEN(dev_priv) >= 12) |
| 6237 | wm->sagv_wm0 = wm->wm[0]; |
| 6238 | |
Ville Syrjälä | d5cdfdf5 | 2016-11-22 18:01:58 +0200 | [diff] [blame] | 6239 | if (plane_id != PLANE_CURSOR) |
| 6240 | val = I915_READ(PLANE_WM_TRANS(pipe, plane_id)); |
cpaul@redhat.com | d8c0faf | 2016-10-18 16:09:49 -0200 | [diff] [blame] | 6241 | else |
| 6242 | val = I915_READ(CUR_WM_TRANS(pipe)); |
| 6243 | |
| 6244 | skl_wm_level_from_reg_val(val, &wm->trans_wm); |
| 6245 | } |
Pradeep Bhat | 3078999 | 2014-11-04 17:06:45 +0000 | [diff] [blame] | 6246 | |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 6247 | if (!crtc->active) |
Pradeep Bhat | 3078999 | 2014-11-04 17:06:45 +0000 | [diff] [blame] | 6248 | return; |
Pradeep Bhat | 3078999 | 2014-11-04 17:06:45 +0000 | [diff] [blame] | 6249 | } |
| 6250 | |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 6251 | void skl_wm_get_hw_state(struct drm_i915_private *dev_priv) |
Pradeep Bhat | 3078999 | 2014-11-04 17:06:45 +0000 | [diff] [blame] | 6252 | { |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 6253 | struct intel_crtc *crtc; |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 6254 | struct intel_crtc_state *crtc_state; |
Pradeep Bhat | 3078999 | 2014-11-04 17:06:45 +0000 | [diff] [blame] | 6255 | |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 6256 | for_each_intel_crtc(&dev_priv->drm, crtc) { |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 6257 | crtc_state = to_intel_crtc_state(crtc->base.state); |
cpaul@redhat.com | bf9d99a | 2016-10-14 17:31:55 -0400 | [diff] [blame] | 6258 | |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 6259 | skl_pipe_wm_get_hw_state(crtc, &crtc_state->wm.skl.optimal); |
cpaul@redhat.com | bf9d99a | 2016-10-14 17:31:55 -0400 | [diff] [blame] | 6260 | } |
Matt Roper | a1de91e | 2016-05-12 07:05:57 -0700 | [diff] [blame] | 6261 | |
Ville Syrjälä | d06a79d | 2019-08-21 20:30:29 +0300 | [diff] [blame] | 6262 | if (dev_priv->active_pipes) { |
Matt Roper | 279e99d | 2016-05-12 07:06:02 -0700 | [diff] [blame] | 6263 | /* Fully recompute DDB on first atomic commit */ |
| 6264 | dev_priv->wm.distrust_bios_wm = true; |
Matt Roper | 279e99d | 2016-05-12 07:06:02 -0700 | [diff] [blame] | 6265 | } |
Pradeep Bhat | 3078999 | 2014-11-04 17:06:45 +0000 | [diff] [blame] | 6266 | } |
| 6267 | |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 6268 | static void ilk_pipe_wm_get_hw_state(struct intel_crtc *crtc) |
Ville Syrjälä | 243e6a4 | 2013-10-14 14:55:24 +0300 | [diff] [blame] | 6269 | { |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 6270 | struct drm_device *dev = crtc->base.dev; |
Chris Wilson | fac5e23 | 2016-07-04 11:34:36 +0100 | [diff] [blame] | 6271 | struct drm_i915_private *dev_priv = to_i915(dev); |
Imre Deak | 820c198 | 2013-12-17 14:46:36 +0200 | [diff] [blame] | 6272 | struct ilk_wm_values *hw = &dev_priv->wm.hw; |
Maarten Lankhorst | ec19364 | 2019-06-28 10:55:17 +0200 | [diff] [blame] | 6273 | struct intel_crtc_state *crtc_state = to_intel_crtc_state(crtc->base.state); |
| 6274 | struct intel_pipe_wm *active = &crtc_state->wm.ilk.optimal; |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 6275 | enum pipe pipe = crtc->pipe; |
Ville Syrjälä | f0f59a0 | 2015-11-18 15:33:26 +0200 | [diff] [blame] | 6276 | static const i915_reg_t wm0_pipe_reg[] = { |
Ville Syrjälä | 243e6a4 | 2013-10-14 14:55:24 +0300 | [diff] [blame] | 6277 | [PIPE_A] = WM0_PIPEA_ILK, |
| 6278 | [PIPE_B] = WM0_PIPEB_ILK, |
| 6279 | [PIPE_C] = WM0_PIPEC_IVB, |
| 6280 | }; |
| 6281 | |
| 6282 | hw->wm_pipe[pipe] = I915_READ(wm0_pipe_reg[pipe]); |
Ville Syrjälä | 243e6a4 | 2013-10-14 14:55:24 +0300 | [diff] [blame] | 6283 | |
Ville Syrjälä | 1560653 | 2016-05-13 17:55:17 +0300 | [diff] [blame] | 6284 | memset(active, 0, sizeof(*active)); |
| 6285 | |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 6286 | active->pipe_enabled = crtc->active; |
Ville Syrjälä | 2a44b76 | 2014-03-07 18:32:09 +0200 | [diff] [blame] | 6287 | |
| 6288 | if (active->pipe_enabled) { |
Ville Syrjälä | 243e6a4 | 2013-10-14 14:55:24 +0300 | [diff] [blame] | 6289 | u32 tmp = hw->wm_pipe[pipe]; |
| 6290 | |
| 6291 | /* |
| 6292 | * For active pipes LP0 watermark is marked as |
| 6293 | * enabled, and LP1+ watermaks as disabled since |
| 6294 | * we can't really reverse compute them in case |
| 6295 | * multiple pipes are active. |
| 6296 | */ |
| 6297 | active->wm[0].enable = true; |
| 6298 | active->wm[0].pri_val = (tmp & WM0_PIPE_PLANE_MASK) >> WM0_PIPE_PLANE_SHIFT; |
| 6299 | active->wm[0].spr_val = (tmp & WM0_PIPE_SPRITE_MASK) >> WM0_PIPE_SPRITE_SHIFT; |
| 6300 | active->wm[0].cur_val = tmp & WM0_PIPE_CURSOR_MASK; |
Ville Syrjälä | 243e6a4 | 2013-10-14 14:55:24 +0300 | [diff] [blame] | 6301 | } else { |
Tvrtko Ursulin | 5db9401 | 2016-10-13 11:03:10 +0100 | [diff] [blame] | 6302 | int level, max_level = ilk_wm_max_level(dev_priv); |
Ville Syrjälä | 243e6a4 | 2013-10-14 14:55:24 +0300 | [diff] [blame] | 6303 | |
| 6304 | /* |
| 6305 | * For inactive pipes, all watermark levels |
| 6306 | * should be marked as enabled but zeroed, |
| 6307 | * which is what we'd compute them to. |
| 6308 | */ |
| 6309 | for (level = 0; level <= max_level; level++) |
| 6310 | active->wm[level].enable = true; |
| 6311 | } |
Matt Roper | 4e0963c | 2015-09-24 15:53:15 -0700 | [diff] [blame] | 6312 | |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 6313 | crtc->wm.active.ilk = *active; |
Ville Syrjälä | 243e6a4 | 2013-10-14 14:55:24 +0300 | [diff] [blame] | 6314 | } |
| 6315 | |
Ville Syrjälä | 6eb1a68 | 2015-06-24 22:00:03 +0300 | [diff] [blame] | 6316 | #define _FW_WM(value, plane) \ |
| 6317 | (((value) & DSPFW_ ## plane ## _MASK) >> DSPFW_ ## plane ## _SHIFT) |
| 6318 | #define _FW_WM_VLV(value, plane) \ |
| 6319 | (((value) & DSPFW_ ## plane ## _MASK_VLV) >> DSPFW_ ## plane ## _SHIFT) |
| 6320 | |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 6321 | static void g4x_read_wm_values(struct drm_i915_private *dev_priv, |
| 6322 | struct g4x_wm_values *wm) |
| 6323 | { |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 6324 | u32 tmp; |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 6325 | |
| 6326 | tmp = I915_READ(DSPFW1); |
| 6327 | wm->sr.plane = _FW_WM(tmp, SR); |
| 6328 | wm->pipe[PIPE_B].plane[PLANE_CURSOR] = _FW_WM(tmp, CURSORB); |
| 6329 | wm->pipe[PIPE_B].plane[PLANE_PRIMARY] = _FW_WM(tmp, PLANEB); |
| 6330 | wm->pipe[PIPE_A].plane[PLANE_PRIMARY] = _FW_WM(tmp, PLANEA); |
| 6331 | |
| 6332 | tmp = I915_READ(DSPFW2); |
| 6333 | wm->fbc_en = tmp & DSPFW_FBC_SR_EN; |
| 6334 | wm->sr.fbc = _FW_WM(tmp, FBC_SR); |
| 6335 | wm->hpll.fbc = _FW_WM(tmp, FBC_HPLL_SR); |
| 6336 | wm->pipe[PIPE_B].plane[PLANE_SPRITE0] = _FW_WM(tmp, SPRITEB); |
| 6337 | wm->pipe[PIPE_A].plane[PLANE_CURSOR] = _FW_WM(tmp, CURSORA); |
| 6338 | wm->pipe[PIPE_A].plane[PLANE_SPRITE0] = _FW_WM(tmp, SPRITEA); |
| 6339 | |
| 6340 | tmp = I915_READ(DSPFW3); |
| 6341 | wm->hpll_en = tmp & DSPFW_HPLL_SR_EN; |
| 6342 | wm->sr.cursor = _FW_WM(tmp, CURSOR_SR); |
| 6343 | wm->hpll.cursor = _FW_WM(tmp, HPLL_CURSOR); |
| 6344 | wm->hpll.plane = _FW_WM(tmp, HPLL_SR); |
| 6345 | } |
| 6346 | |
Ville Syrjälä | 6eb1a68 | 2015-06-24 22:00:03 +0300 | [diff] [blame] | 6347 | static void vlv_read_wm_values(struct drm_i915_private *dev_priv, |
| 6348 | struct vlv_wm_values *wm) |
| 6349 | { |
| 6350 | enum pipe pipe; |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 6351 | u32 tmp; |
Ville Syrjälä | 6eb1a68 | 2015-06-24 22:00:03 +0300 | [diff] [blame] | 6352 | |
| 6353 | for_each_pipe(dev_priv, pipe) { |
| 6354 | tmp = I915_READ(VLV_DDL(pipe)); |
| 6355 | |
Ville Syrjälä | 1b31389 | 2016-11-28 19:37:08 +0200 | [diff] [blame] | 6356 | wm->ddl[pipe].plane[PLANE_PRIMARY] = |
Ville Syrjälä | 6eb1a68 | 2015-06-24 22:00:03 +0300 | [diff] [blame] | 6357 | (tmp >> DDL_PLANE_SHIFT) & (DDL_PRECISION_HIGH | DRAIN_LATENCY_MASK); |
Ville Syrjälä | 1b31389 | 2016-11-28 19:37:08 +0200 | [diff] [blame] | 6358 | wm->ddl[pipe].plane[PLANE_CURSOR] = |
Ville Syrjälä | 6eb1a68 | 2015-06-24 22:00:03 +0300 | [diff] [blame] | 6359 | (tmp >> DDL_CURSOR_SHIFT) & (DDL_PRECISION_HIGH | DRAIN_LATENCY_MASK); |
Ville Syrjälä | 1b31389 | 2016-11-28 19:37:08 +0200 | [diff] [blame] | 6360 | wm->ddl[pipe].plane[PLANE_SPRITE0] = |
Ville Syrjälä | 6eb1a68 | 2015-06-24 22:00:03 +0300 | [diff] [blame] | 6361 | (tmp >> DDL_SPRITE_SHIFT(0)) & (DDL_PRECISION_HIGH | DRAIN_LATENCY_MASK); |
Ville Syrjälä | 1b31389 | 2016-11-28 19:37:08 +0200 | [diff] [blame] | 6362 | wm->ddl[pipe].plane[PLANE_SPRITE1] = |
Ville Syrjälä | 6eb1a68 | 2015-06-24 22:00:03 +0300 | [diff] [blame] | 6363 | (tmp >> DDL_SPRITE_SHIFT(1)) & (DDL_PRECISION_HIGH | DRAIN_LATENCY_MASK); |
| 6364 | } |
| 6365 | |
| 6366 | tmp = I915_READ(DSPFW1); |
| 6367 | wm->sr.plane = _FW_WM(tmp, SR); |
Ville Syrjälä | 1b31389 | 2016-11-28 19:37:08 +0200 | [diff] [blame] | 6368 | wm->pipe[PIPE_B].plane[PLANE_CURSOR] = _FW_WM(tmp, CURSORB); |
| 6369 | wm->pipe[PIPE_B].plane[PLANE_PRIMARY] = _FW_WM_VLV(tmp, PLANEB); |
| 6370 | wm->pipe[PIPE_A].plane[PLANE_PRIMARY] = _FW_WM_VLV(tmp, PLANEA); |
Ville Syrjälä | 6eb1a68 | 2015-06-24 22:00:03 +0300 | [diff] [blame] | 6371 | |
| 6372 | tmp = I915_READ(DSPFW2); |
Ville Syrjälä | 1b31389 | 2016-11-28 19:37:08 +0200 | [diff] [blame] | 6373 | wm->pipe[PIPE_A].plane[PLANE_SPRITE1] = _FW_WM_VLV(tmp, SPRITEB); |
| 6374 | wm->pipe[PIPE_A].plane[PLANE_CURSOR] = _FW_WM(tmp, CURSORA); |
| 6375 | wm->pipe[PIPE_A].plane[PLANE_SPRITE0] = _FW_WM_VLV(tmp, SPRITEA); |
Ville Syrjälä | 6eb1a68 | 2015-06-24 22:00:03 +0300 | [diff] [blame] | 6376 | |
| 6377 | tmp = I915_READ(DSPFW3); |
| 6378 | wm->sr.cursor = _FW_WM(tmp, CURSOR_SR); |
| 6379 | |
| 6380 | if (IS_CHERRYVIEW(dev_priv)) { |
| 6381 | tmp = I915_READ(DSPFW7_CHV); |
Ville Syrjälä | 1b31389 | 2016-11-28 19:37:08 +0200 | [diff] [blame] | 6382 | wm->pipe[PIPE_B].plane[PLANE_SPRITE1] = _FW_WM_VLV(tmp, SPRITED); |
| 6383 | wm->pipe[PIPE_B].plane[PLANE_SPRITE0] = _FW_WM_VLV(tmp, SPRITEC); |
Ville Syrjälä | 6eb1a68 | 2015-06-24 22:00:03 +0300 | [diff] [blame] | 6384 | |
| 6385 | tmp = I915_READ(DSPFW8_CHV); |
Ville Syrjälä | 1b31389 | 2016-11-28 19:37:08 +0200 | [diff] [blame] | 6386 | wm->pipe[PIPE_C].plane[PLANE_SPRITE1] = _FW_WM_VLV(tmp, SPRITEF); |
| 6387 | wm->pipe[PIPE_C].plane[PLANE_SPRITE0] = _FW_WM_VLV(tmp, SPRITEE); |
Ville Syrjälä | 6eb1a68 | 2015-06-24 22:00:03 +0300 | [diff] [blame] | 6388 | |
| 6389 | tmp = I915_READ(DSPFW9_CHV); |
Ville Syrjälä | 1b31389 | 2016-11-28 19:37:08 +0200 | [diff] [blame] | 6390 | wm->pipe[PIPE_C].plane[PLANE_PRIMARY] = _FW_WM_VLV(tmp, PLANEC); |
| 6391 | wm->pipe[PIPE_C].plane[PLANE_CURSOR] = _FW_WM(tmp, CURSORC); |
Ville Syrjälä | 6eb1a68 | 2015-06-24 22:00:03 +0300 | [diff] [blame] | 6392 | |
| 6393 | tmp = I915_READ(DSPHOWM); |
| 6394 | wm->sr.plane |= _FW_WM(tmp, SR_HI) << 9; |
Ville Syrjälä | 1b31389 | 2016-11-28 19:37:08 +0200 | [diff] [blame] | 6395 | wm->pipe[PIPE_C].plane[PLANE_SPRITE1] |= _FW_WM(tmp, SPRITEF_HI) << 8; |
| 6396 | wm->pipe[PIPE_C].plane[PLANE_SPRITE0] |= _FW_WM(tmp, SPRITEE_HI) << 8; |
| 6397 | wm->pipe[PIPE_C].plane[PLANE_PRIMARY] |= _FW_WM(tmp, PLANEC_HI) << 8; |
| 6398 | wm->pipe[PIPE_B].plane[PLANE_SPRITE1] |= _FW_WM(tmp, SPRITED_HI) << 8; |
| 6399 | wm->pipe[PIPE_B].plane[PLANE_SPRITE0] |= _FW_WM(tmp, SPRITEC_HI) << 8; |
| 6400 | wm->pipe[PIPE_B].plane[PLANE_PRIMARY] |= _FW_WM(tmp, PLANEB_HI) << 8; |
| 6401 | wm->pipe[PIPE_A].plane[PLANE_SPRITE1] |= _FW_WM(tmp, SPRITEB_HI) << 8; |
| 6402 | wm->pipe[PIPE_A].plane[PLANE_SPRITE0] |= _FW_WM(tmp, SPRITEA_HI) << 8; |
| 6403 | wm->pipe[PIPE_A].plane[PLANE_PRIMARY] |= _FW_WM(tmp, PLANEA_HI) << 8; |
Ville Syrjälä | 6eb1a68 | 2015-06-24 22:00:03 +0300 | [diff] [blame] | 6404 | } else { |
| 6405 | tmp = I915_READ(DSPFW7); |
Ville Syrjälä | 1b31389 | 2016-11-28 19:37:08 +0200 | [diff] [blame] | 6406 | wm->pipe[PIPE_B].plane[PLANE_SPRITE1] = _FW_WM_VLV(tmp, SPRITED); |
| 6407 | wm->pipe[PIPE_B].plane[PLANE_SPRITE0] = _FW_WM_VLV(tmp, SPRITEC); |
Ville Syrjälä | 6eb1a68 | 2015-06-24 22:00:03 +0300 | [diff] [blame] | 6408 | |
| 6409 | tmp = I915_READ(DSPHOWM); |
| 6410 | wm->sr.plane |= _FW_WM(tmp, SR_HI) << 9; |
Ville Syrjälä | 1b31389 | 2016-11-28 19:37:08 +0200 | [diff] [blame] | 6411 | wm->pipe[PIPE_B].plane[PLANE_SPRITE1] |= _FW_WM(tmp, SPRITED_HI) << 8; |
| 6412 | wm->pipe[PIPE_B].plane[PLANE_SPRITE0] |= _FW_WM(tmp, SPRITEC_HI) << 8; |
| 6413 | wm->pipe[PIPE_B].plane[PLANE_PRIMARY] |= _FW_WM(tmp, PLANEB_HI) << 8; |
| 6414 | wm->pipe[PIPE_A].plane[PLANE_SPRITE1] |= _FW_WM(tmp, SPRITEB_HI) << 8; |
| 6415 | wm->pipe[PIPE_A].plane[PLANE_SPRITE0] |= _FW_WM(tmp, SPRITEA_HI) << 8; |
| 6416 | wm->pipe[PIPE_A].plane[PLANE_PRIMARY] |= _FW_WM(tmp, PLANEA_HI) << 8; |
Ville Syrjälä | 6eb1a68 | 2015-06-24 22:00:03 +0300 | [diff] [blame] | 6417 | } |
| 6418 | } |
| 6419 | |
| 6420 | #undef _FW_WM |
| 6421 | #undef _FW_WM_VLV |
| 6422 | |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 6423 | void g4x_wm_get_hw_state(struct drm_i915_private *dev_priv) |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 6424 | { |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 6425 | struct g4x_wm_values *wm = &dev_priv->wm.g4x; |
| 6426 | struct intel_crtc *crtc; |
| 6427 | |
| 6428 | g4x_read_wm_values(dev_priv, wm); |
| 6429 | |
| 6430 | wm->cxsr = I915_READ(FW_BLC_SELF) & FW_BLC_SELF_EN; |
| 6431 | |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 6432 | for_each_intel_crtc(&dev_priv->drm, crtc) { |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 6433 | struct intel_crtc_state *crtc_state = |
| 6434 | to_intel_crtc_state(crtc->base.state); |
| 6435 | struct g4x_wm_state *active = &crtc->wm.active.g4x; |
| 6436 | struct g4x_pipe_wm *raw; |
| 6437 | enum pipe pipe = crtc->pipe; |
| 6438 | enum plane_id plane_id; |
| 6439 | int level, max_level; |
| 6440 | |
| 6441 | active->cxsr = wm->cxsr; |
| 6442 | active->hpll_en = wm->hpll_en; |
| 6443 | active->fbc_en = wm->fbc_en; |
| 6444 | |
| 6445 | active->sr = wm->sr; |
| 6446 | active->hpll = wm->hpll; |
| 6447 | |
| 6448 | for_each_plane_id_on_crtc(crtc, plane_id) { |
| 6449 | active->wm.plane[plane_id] = |
| 6450 | wm->pipe[pipe].plane[plane_id]; |
| 6451 | } |
| 6452 | |
| 6453 | if (wm->cxsr && wm->hpll_en) |
| 6454 | max_level = G4X_WM_LEVEL_HPLL; |
| 6455 | else if (wm->cxsr) |
| 6456 | max_level = G4X_WM_LEVEL_SR; |
| 6457 | else |
| 6458 | max_level = G4X_WM_LEVEL_NORMAL; |
| 6459 | |
| 6460 | level = G4X_WM_LEVEL_NORMAL; |
| 6461 | raw = &crtc_state->wm.g4x.raw[level]; |
| 6462 | for_each_plane_id_on_crtc(crtc, plane_id) |
| 6463 | raw->plane[plane_id] = active->wm.plane[plane_id]; |
| 6464 | |
| 6465 | if (++level > max_level) |
| 6466 | goto out; |
| 6467 | |
| 6468 | raw = &crtc_state->wm.g4x.raw[level]; |
| 6469 | raw->plane[PLANE_PRIMARY] = active->sr.plane; |
| 6470 | raw->plane[PLANE_CURSOR] = active->sr.cursor; |
| 6471 | raw->plane[PLANE_SPRITE0] = 0; |
| 6472 | raw->fbc = active->sr.fbc; |
| 6473 | |
| 6474 | if (++level > max_level) |
| 6475 | goto out; |
| 6476 | |
| 6477 | raw = &crtc_state->wm.g4x.raw[level]; |
| 6478 | raw->plane[PLANE_PRIMARY] = active->hpll.plane; |
| 6479 | raw->plane[PLANE_CURSOR] = active->hpll.cursor; |
| 6480 | raw->plane[PLANE_SPRITE0] = 0; |
| 6481 | raw->fbc = active->hpll.fbc; |
| 6482 | |
| 6483 | out: |
| 6484 | for_each_plane_id_on_crtc(crtc, plane_id) |
| 6485 | g4x_raw_plane_wm_set(crtc_state, level, |
| 6486 | plane_id, USHRT_MAX); |
| 6487 | g4x_raw_fbc_wm_set(crtc_state, level, USHRT_MAX); |
| 6488 | |
| 6489 | crtc_state->wm.g4x.optimal = *active; |
| 6490 | crtc_state->wm.g4x.intermediate = *active; |
| 6491 | |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 6492 | drm_dbg_kms(&dev_priv->drm, |
| 6493 | "Initial watermarks: pipe %c, plane=%d, cursor=%d, sprite=%d\n", |
| 6494 | pipe_name(pipe), |
| 6495 | wm->pipe[pipe].plane[PLANE_PRIMARY], |
| 6496 | wm->pipe[pipe].plane[PLANE_CURSOR], |
| 6497 | wm->pipe[pipe].plane[PLANE_SPRITE0]); |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 6498 | } |
| 6499 | |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 6500 | drm_dbg_kms(&dev_priv->drm, |
| 6501 | "Initial SR watermarks: plane=%d, cursor=%d fbc=%d\n", |
| 6502 | wm->sr.plane, wm->sr.cursor, wm->sr.fbc); |
| 6503 | drm_dbg_kms(&dev_priv->drm, |
| 6504 | "Initial HPLL watermarks: plane=%d, SR cursor=%d fbc=%d\n", |
| 6505 | wm->hpll.plane, wm->hpll.cursor, wm->hpll.fbc); |
| 6506 | drm_dbg_kms(&dev_priv->drm, "Initial SR=%s HPLL=%s FBC=%s\n", |
| 6507 | yesno(wm->cxsr), yesno(wm->hpll_en), yesno(wm->fbc_en)); |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 6508 | } |
| 6509 | |
| 6510 | void g4x_wm_sanitize(struct drm_i915_private *dev_priv) |
| 6511 | { |
| 6512 | struct intel_plane *plane; |
| 6513 | struct intel_crtc *crtc; |
| 6514 | |
| 6515 | mutex_lock(&dev_priv->wm.wm_mutex); |
| 6516 | |
| 6517 | for_each_intel_plane(&dev_priv->drm, plane) { |
| 6518 | struct intel_crtc *crtc = |
| 6519 | intel_get_crtc_for_pipe(dev_priv, plane->pipe); |
| 6520 | struct intel_crtc_state *crtc_state = |
| 6521 | to_intel_crtc_state(crtc->base.state); |
| 6522 | struct intel_plane_state *plane_state = |
| 6523 | to_intel_plane_state(plane->base.state); |
| 6524 | struct g4x_wm_state *wm_state = &crtc_state->wm.g4x.optimal; |
| 6525 | enum plane_id plane_id = plane->id; |
| 6526 | int level; |
| 6527 | |
Maarten Lankhorst | f90a85e | 2019-10-31 12:26:08 +0100 | [diff] [blame] | 6528 | if (plane_state->uapi.visible) |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 6529 | continue; |
| 6530 | |
| 6531 | for (level = 0; level < 3; level++) { |
| 6532 | struct g4x_pipe_wm *raw = |
| 6533 | &crtc_state->wm.g4x.raw[level]; |
| 6534 | |
| 6535 | raw->plane[plane_id] = 0; |
| 6536 | wm_state->wm.plane[plane_id] = 0; |
| 6537 | } |
| 6538 | |
| 6539 | if (plane_id == PLANE_PRIMARY) { |
| 6540 | for (level = 0; level < 3; level++) { |
| 6541 | struct g4x_pipe_wm *raw = |
| 6542 | &crtc_state->wm.g4x.raw[level]; |
| 6543 | raw->fbc = 0; |
| 6544 | } |
| 6545 | |
| 6546 | wm_state->sr.fbc = 0; |
| 6547 | wm_state->hpll.fbc = 0; |
| 6548 | wm_state->fbc_en = false; |
| 6549 | } |
| 6550 | } |
| 6551 | |
| 6552 | for_each_intel_crtc(&dev_priv->drm, crtc) { |
| 6553 | struct intel_crtc_state *crtc_state = |
| 6554 | to_intel_crtc_state(crtc->base.state); |
| 6555 | |
| 6556 | crtc_state->wm.g4x.intermediate = |
| 6557 | crtc_state->wm.g4x.optimal; |
| 6558 | crtc->wm.active.g4x = crtc_state->wm.g4x.optimal; |
| 6559 | } |
| 6560 | |
| 6561 | g4x_program_watermarks(dev_priv); |
| 6562 | |
| 6563 | mutex_unlock(&dev_priv->wm.wm_mutex); |
| 6564 | } |
| 6565 | |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 6566 | void vlv_wm_get_hw_state(struct drm_i915_private *dev_priv) |
Ville Syrjälä | 6eb1a68 | 2015-06-24 22:00:03 +0300 | [diff] [blame] | 6567 | { |
Ville Syrjälä | 6eb1a68 | 2015-06-24 22:00:03 +0300 | [diff] [blame] | 6568 | struct vlv_wm_values *wm = &dev_priv->wm.vlv; |
Ville Syrjälä | f07d43d | 2017-03-02 19:14:52 +0200 | [diff] [blame] | 6569 | struct intel_crtc *crtc; |
Ville Syrjälä | 6eb1a68 | 2015-06-24 22:00:03 +0300 | [diff] [blame] | 6570 | u32 val; |
| 6571 | |
| 6572 | vlv_read_wm_values(dev_priv, wm); |
| 6573 | |
Ville Syrjälä | 6eb1a68 | 2015-06-24 22:00:03 +0300 | [diff] [blame] | 6574 | wm->cxsr = I915_READ(FW_BLC_SELF_VLV) & FW_CSPWRDWNEN; |
| 6575 | wm->level = VLV_WM_LEVEL_PM2; |
| 6576 | |
| 6577 | if (IS_CHERRYVIEW(dev_priv)) { |
Chris Wilson | 337fa6e | 2019-04-26 09:17:20 +0100 | [diff] [blame] | 6578 | vlv_punit_get(dev_priv); |
Ville Syrjälä | 6eb1a68 | 2015-06-24 22:00:03 +0300 | [diff] [blame] | 6579 | |
Ville Syrjälä | c11b813 | 2018-11-29 19:55:03 +0200 | [diff] [blame] | 6580 | val = vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM); |
Ville Syrjälä | 6eb1a68 | 2015-06-24 22:00:03 +0300 | [diff] [blame] | 6581 | if (val & DSP_MAXFIFO_PM5_ENABLE) |
| 6582 | wm->level = VLV_WM_LEVEL_PM5; |
| 6583 | |
Ville Syrjälä | 58590c1 | 2015-09-08 21:05:12 +0300 | [diff] [blame] | 6584 | /* |
| 6585 | * If DDR DVFS is disabled in the BIOS, Punit |
| 6586 | * will never ack the request. So if that happens |
| 6587 | * assume we don't have to enable/disable DDR DVFS |
| 6588 | * dynamically. To test that just set the REQ_ACK |
| 6589 | * bit to poke the Punit, but don't change the |
| 6590 | * HIGH/LOW bits so that we don't actually change |
| 6591 | * the current state. |
| 6592 | */ |
Ville Syrjälä | 6eb1a68 | 2015-06-24 22:00:03 +0300 | [diff] [blame] | 6593 | val = vlv_punit_read(dev_priv, PUNIT_REG_DDR_SETUP2); |
Ville Syrjälä | 58590c1 | 2015-09-08 21:05:12 +0300 | [diff] [blame] | 6594 | val |= FORCE_DDR_FREQ_REQ_ACK; |
| 6595 | vlv_punit_write(dev_priv, PUNIT_REG_DDR_SETUP2, val); |
| 6596 | |
| 6597 | if (wait_for((vlv_punit_read(dev_priv, PUNIT_REG_DDR_SETUP2) & |
| 6598 | FORCE_DDR_FREQ_REQ_ACK) == 0, 3)) { |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 6599 | drm_dbg_kms(&dev_priv->drm, |
| 6600 | "Punit not acking DDR DVFS request, " |
| 6601 | "assuming DDR DVFS is disabled\n"); |
Ville Syrjälä | 58590c1 | 2015-09-08 21:05:12 +0300 | [diff] [blame] | 6602 | dev_priv->wm.max_level = VLV_WM_LEVEL_PM5; |
| 6603 | } else { |
| 6604 | val = vlv_punit_read(dev_priv, PUNIT_REG_DDR_SETUP2); |
| 6605 | if ((val & FORCE_DDR_HIGH_FREQ) == 0) |
| 6606 | wm->level = VLV_WM_LEVEL_DDR_DVFS; |
| 6607 | } |
Ville Syrjälä | 6eb1a68 | 2015-06-24 22:00:03 +0300 | [diff] [blame] | 6608 | |
Chris Wilson | 337fa6e | 2019-04-26 09:17:20 +0100 | [diff] [blame] | 6609 | vlv_punit_put(dev_priv); |
Ville Syrjälä | 6eb1a68 | 2015-06-24 22:00:03 +0300 | [diff] [blame] | 6610 | } |
| 6611 | |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 6612 | for_each_intel_crtc(&dev_priv->drm, crtc) { |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 6613 | struct intel_crtc_state *crtc_state = |
| 6614 | to_intel_crtc_state(crtc->base.state); |
| 6615 | struct vlv_wm_state *active = &crtc->wm.active.vlv; |
| 6616 | const struct vlv_fifo_state *fifo_state = |
| 6617 | &crtc_state->wm.vlv.fifo_state; |
| 6618 | enum pipe pipe = crtc->pipe; |
| 6619 | enum plane_id plane_id; |
| 6620 | int level; |
| 6621 | |
| 6622 | vlv_get_fifo_size(crtc_state); |
| 6623 | |
| 6624 | active->num_levels = wm->level + 1; |
| 6625 | active->cxsr = wm->cxsr; |
| 6626 | |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 6627 | for (level = 0; level < active->num_levels; level++) { |
Ville Syrjälä | 114d7dc | 2017-04-21 21:14:21 +0300 | [diff] [blame] | 6628 | struct g4x_pipe_wm *raw = |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 6629 | &crtc_state->wm.vlv.raw[level]; |
| 6630 | |
| 6631 | active->sr[level].plane = wm->sr.plane; |
| 6632 | active->sr[level].cursor = wm->sr.cursor; |
| 6633 | |
| 6634 | for_each_plane_id_on_crtc(crtc, plane_id) { |
| 6635 | active->wm[level].plane[plane_id] = |
| 6636 | wm->pipe[pipe].plane[plane_id]; |
| 6637 | |
| 6638 | raw->plane[plane_id] = |
| 6639 | vlv_invert_wm_value(active->wm[level].plane[plane_id], |
| 6640 | fifo_state->plane[plane_id]); |
| 6641 | } |
| 6642 | } |
| 6643 | |
| 6644 | for_each_plane_id_on_crtc(crtc, plane_id) |
| 6645 | vlv_raw_plane_wm_set(crtc_state, level, |
| 6646 | plane_id, USHRT_MAX); |
| 6647 | vlv_invalidate_wms(crtc, active, level); |
| 6648 | |
| 6649 | crtc_state->wm.vlv.optimal = *active; |
Ville Syrjälä | 4841da5 | 2017-03-02 19:14:59 +0200 | [diff] [blame] | 6650 | crtc_state->wm.vlv.intermediate = *active; |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 6651 | |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 6652 | drm_dbg_kms(&dev_priv->drm, |
| 6653 | "Initial watermarks: pipe %c, plane=%d, cursor=%d, sprite0=%d, sprite1=%d\n", |
| 6654 | pipe_name(pipe), |
| 6655 | wm->pipe[pipe].plane[PLANE_PRIMARY], |
| 6656 | wm->pipe[pipe].plane[PLANE_CURSOR], |
| 6657 | wm->pipe[pipe].plane[PLANE_SPRITE0], |
| 6658 | wm->pipe[pipe].plane[PLANE_SPRITE1]); |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 6659 | } |
Ville Syrjälä | 6eb1a68 | 2015-06-24 22:00:03 +0300 | [diff] [blame] | 6660 | |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 6661 | drm_dbg_kms(&dev_priv->drm, |
| 6662 | "Initial watermarks: SR plane=%d, SR cursor=%d level=%d cxsr=%d\n", |
| 6663 | wm->sr.plane, wm->sr.cursor, wm->level, wm->cxsr); |
Ville Syrjälä | 6eb1a68 | 2015-06-24 22:00:03 +0300 | [diff] [blame] | 6664 | } |
| 6665 | |
Ville Syrjälä | 602ae83 | 2017-03-02 19:15:02 +0200 | [diff] [blame] | 6666 | void vlv_wm_sanitize(struct drm_i915_private *dev_priv) |
| 6667 | { |
| 6668 | struct intel_plane *plane; |
| 6669 | struct intel_crtc *crtc; |
| 6670 | |
| 6671 | mutex_lock(&dev_priv->wm.wm_mutex); |
| 6672 | |
| 6673 | for_each_intel_plane(&dev_priv->drm, plane) { |
| 6674 | struct intel_crtc *crtc = |
| 6675 | intel_get_crtc_for_pipe(dev_priv, plane->pipe); |
| 6676 | struct intel_crtc_state *crtc_state = |
| 6677 | to_intel_crtc_state(crtc->base.state); |
| 6678 | struct intel_plane_state *plane_state = |
| 6679 | to_intel_plane_state(plane->base.state); |
| 6680 | struct vlv_wm_state *wm_state = &crtc_state->wm.vlv.optimal; |
| 6681 | const struct vlv_fifo_state *fifo_state = |
| 6682 | &crtc_state->wm.vlv.fifo_state; |
| 6683 | enum plane_id plane_id = plane->id; |
| 6684 | int level; |
| 6685 | |
Maarten Lankhorst | f90a85e | 2019-10-31 12:26:08 +0100 | [diff] [blame] | 6686 | if (plane_state->uapi.visible) |
Ville Syrjälä | 602ae83 | 2017-03-02 19:15:02 +0200 | [diff] [blame] | 6687 | continue; |
| 6688 | |
| 6689 | for (level = 0; level < wm_state->num_levels; level++) { |
Ville Syrjälä | 114d7dc | 2017-04-21 21:14:21 +0300 | [diff] [blame] | 6690 | struct g4x_pipe_wm *raw = |
Ville Syrjälä | 602ae83 | 2017-03-02 19:15:02 +0200 | [diff] [blame] | 6691 | &crtc_state->wm.vlv.raw[level]; |
| 6692 | |
| 6693 | raw->plane[plane_id] = 0; |
| 6694 | |
| 6695 | wm_state->wm[level].plane[plane_id] = |
| 6696 | vlv_invert_wm_value(raw->plane[plane_id], |
| 6697 | fifo_state->plane[plane_id]); |
| 6698 | } |
| 6699 | } |
| 6700 | |
| 6701 | for_each_intel_crtc(&dev_priv->drm, crtc) { |
| 6702 | struct intel_crtc_state *crtc_state = |
| 6703 | to_intel_crtc_state(crtc->base.state); |
| 6704 | |
| 6705 | crtc_state->wm.vlv.intermediate = |
| 6706 | crtc_state->wm.vlv.optimal; |
| 6707 | crtc->wm.active.vlv = crtc_state->wm.vlv.optimal; |
| 6708 | } |
| 6709 | |
| 6710 | vlv_program_watermarks(dev_priv); |
| 6711 | |
| 6712 | mutex_unlock(&dev_priv->wm.wm_mutex); |
| 6713 | } |
| 6714 | |
Ville Syrjälä | f72b84c | 2017-11-08 15:35:55 +0200 | [diff] [blame] | 6715 | /* |
| 6716 | * FIXME should probably kill this and improve |
| 6717 | * the real watermark readout/sanitation instead |
| 6718 | */ |
| 6719 | static void ilk_init_lp_watermarks(struct drm_i915_private *dev_priv) |
| 6720 | { |
| 6721 | I915_WRITE(WM3_LP_ILK, I915_READ(WM3_LP_ILK) & ~WM1_LP_SR_EN); |
| 6722 | I915_WRITE(WM2_LP_ILK, I915_READ(WM2_LP_ILK) & ~WM1_LP_SR_EN); |
| 6723 | I915_WRITE(WM1_LP_ILK, I915_READ(WM1_LP_ILK) & ~WM1_LP_SR_EN); |
| 6724 | |
| 6725 | /* |
| 6726 | * Don't touch WM1S_LP_EN here. |
| 6727 | * Doing so could cause underruns. |
| 6728 | */ |
| 6729 | } |
| 6730 | |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 6731 | void ilk_wm_get_hw_state(struct drm_i915_private *dev_priv) |
Ville Syrjälä | 243e6a4 | 2013-10-14 14:55:24 +0300 | [diff] [blame] | 6732 | { |
Imre Deak | 820c198 | 2013-12-17 14:46:36 +0200 | [diff] [blame] | 6733 | struct ilk_wm_values *hw = &dev_priv->wm.hw; |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 6734 | struct intel_crtc *crtc; |
Ville Syrjälä | 243e6a4 | 2013-10-14 14:55:24 +0300 | [diff] [blame] | 6735 | |
Ville Syrjälä | f72b84c | 2017-11-08 15:35:55 +0200 | [diff] [blame] | 6736 | ilk_init_lp_watermarks(dev_priv); |
| 6737 | |
Matt Roper | cd1d3ee | 2018-12-10 13:54:14 -0800 | [diff] [blame] | 6738 | for_each_intel_crtc(&dev_priv->drm, crtc) |
Ville Syrjälä | 243e6a4 | 2013-10-14 14:55:24 +0300 | [diff] [blame] | 6739 | ilk_pipe_wm_get_hw_state(crtc); |
| 6740 | |
| 6741 | hw->wm_lp[0] = I915_READ(WM1_LP_ILK); |
| 6742 | hw->wm_lp[1] = I915_READ(WM2_LP_ILK); |
| 6743 | hw->wm_lp[2] = I915_READ(WM3_LP_ILK); |
| 6744 | |
| 6745 | hw->wm_lp_spr[0] = I915_READ(WM1S_LP_ILK); |
Tvrtko Ursulin | 175fded | 2016-11-16 08:55:42 +0000 | [diff] [blame] | 6746 | if (INTEL_GEN(dev_priv) >= 7) { |
Ville Syrjälä | cfa7698 | 2014-03-07 18:32:08 +0200 | [diff] [blame] | 6747 | hw->wm_lp_spr[1] = I915_READ(WM2S_LP_IVB); |
| 6748 | hw->wm_lp_spr[2] = I915_READ(WM3S_LP_IVB); |
| 6749 | } |
Ville Syrjälä | 243e6a4 | 2013-10-14 14:55:24 +0300 | [diff] [blame] | 6750 | |
Tvrtko Ursulin | 8652744 | 2016-10-13 11:03:00 +0100 | [diff] [blame] | 6751 | if (IS_HASWELL(dev_priv) || IS_BROADWELL(dev_priv)) |
Ville Syrjälä | ac9545f | 2013-12-05 15:51:28 +0200 | [diff] [blame] | 6752 | hw->partitioning = (I915_READ(WM_MISC) & WM_MISC_DATA_PARTITION_5_6) ? |
| 6753 | INTEL_DDB_PART_5_6 : INTEL_DDB_PART_1_2; |
Tvrtko Ursulin | fd6b8f4 | 2016-10-14 10:13:06 +0100 | [diff] [blame] | 6754 | else if (IS_IVYBRIDGE(dev_priv)) |
Ville Syrjälä | ac9545f | 2013-12-05 15:51:28 +0200 | [diff] [blame] | 6755 | hw->partitioning = (I915_READ(DISP_ARB_CTL2) & DISP_DATA_PARTITION_5_6) ? |
| 6756 | INTEL_DDB_PART_5_6 : INTEL_DDB_PART_1_2; |
Ville Syrjälä | 243e6a4 | 2013-10-14 14:55:24 +0300 | [diff] [blame] | 6757 | |
| 6758 | hw->enable_fbc_wm = |
| 6759 | !(I915_READ(DISP_ARB_CTL) & DISP_FBC_WM_DIS); |
| 6760 | } |
| 6761 | |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 6762 | /** |
| 6763 | * intel_update_watermarks - update FIFO watermark values based on current modes |
Chris Wilson | 3138341 | 2018-02-14 14:03:03 +0000 | [diff] [blame] | 6764 | * @crtc: the #intel_crtc on which to compute the WM |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 6765 | * |
| 6766 | * Calculate watermark values for the various WM regs based on current mode |
| 6767 | * and plane configuration. |
| 6768 | * |
| 6769 | * There are several cases to deal with here: |
| 6770 | * - normal (i.e. non-self-refresh) |
| 6771 | * - self-refresh (SR) mode |
| 6772 | * - lines are large relative to FIFO size (buffer can hold up to 2) |
| 6773 | * - lines are small relative to FIFO size (buffer can hold more than 2 |
| 6774 | * lines), so need to account for TLB latency |
| 6775 | * |
| 6776 | * The normal calculation is: |
| 6777 | * watermark = dotclock * bytes per pixel * latency |
| 6778 | * where latency is platform & configuration dependent (we assume pessimal |
| 6779 | * values here). |
| 6780 | * |
| 6781 | * The SR calculation is: |
| 6782 | * watermark = (trunc(latency/line time)+1) * surface width * |
| 6783 | * bytes per pixel |
| 6784 | * where |
| 6785 | * line time = htotal / dotclock |
| 6786 | * surface width = hdisplay for normal plane and 64 for cursor |
| 6787 | * and latency is assumed to be high, as above. |
| 6788 | * |
| 6789 | * The final value programmed to the register should always be rounded up, |
| 6790 | * and include an extra 2 entries to account for clock crossings. |
| 6791 | * |
| 6792 | * We don't use the sprite, so we can ignore that. And on Crestline we have |
| 6793 | * to set the non-SR watermarks to 8. |
| 6794 | */ |
Ville Syrjälä | 432081b | 2016-10-31 22:37:03 +0200 | [diff] [blame] | 6795 | void intel_update_watermarks(struct intel_crtc *crtc) |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 6796 | { |
Ville Syrjälä | 432081b | 2016-10-31 22:37:03 +0200 | [diff] [blame] | 6797 | struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 6798 | |
| 6799 | if (dev_priv->display.update_wm) |
Ville Syrjälä | 46ba614 | 2013-09-10 11:40:40 +0300 | [diff] [blame] | 6800 | dev_priv->display.update_wm(crtc); |
Eugeni Dodonov | b445e3b | 2012-04-16 22:20:35 -0300 | [diff] [blame] | 6801 | } |
| 6802 | |
Kumar, Mahesh | 2503a0f | 2017-08-17 19:15:28 +0530 | [diff] [blame] | 6803 | void intel_enable_ipc(struct drm_i915_private *dev_priv) |
| 6804 | { |
| 6805 | u32 val; |
| 6806 | |
José Roberto de Souza | fd847b8 | 2018-09-18 13:47:11 -0700 | [diff] [blame] | 6807 | if (!HAS_IPC(dev_priv)) |
| 6808 | return; |
| 6809 | |
Kumar, Mahesh | 2503a0f | 2017-08-17 19:15:28 +0530 | [diff] [blame] | 6810 | val = I915_READ(DISP_ARB_CTL2); |
| 6811 | |
| 6812 | if (dev_priv->ipc_enabled) |
| 6813 | val |= DISP_IPC_ENABLE; |
| 6814 | else |
| 6815 | val &= ~DISP_IPC_ENABLE; |
| 6816 | |
| 6817 | I915_WRITE(DISP_ARB_CTL2, val); |
| 6818 | } |
| 6819 | |
Ville Syrjälä | c91a45f | 2019-05-03 20:38:07 +0300 | [diff] [blame] | 6820 | static bool intel_can_enable_ipc(struct drm_i915_private *dev_priv) |
| 6821 | { |
| 6822 | /* Display WA #0477 WaDisableIPC: skl */ |
| 6823 | if (IS_SKYLAKE(dev_priv)) |
| 6824 | return false; |
| 6825 | |
| 6826 | /* Display WA #1141: SKL:all KBL:all CFL */ |
Chris Wilson | 5f4ae27 | 2020-06-02 15:05:40 +0100 | [diff] [blame] | 6827 | if (IS_KABYLAKE(dev_priv) || |
| 6828 | IS_COFFEELAKE(dev_priv) || |
| 6829 | IS_COMETLAKE(dev_priv)) |
Ville Syrjälä | c91a45f | 2019-05-03 20:38:07 +0300 | [diff] [blame] | 6830 | return dev_priv->dram_info.symmetric_memory; |
| 6831 | |
| 6832 | return true; |
| 6833 | } |
| 6834 | |
Kumar, Mahesh | 2503a0f | 2017-08-17 19:15:28 +0530 | [diff] [blame] | 6835 | void intel_init_ipc(struct drm_i915_private *dev_priv) |
| 6836 | { |
Kumar, Mahesh | 2503a0f | 2017-08-17 19:15:28 +0530 | [diff] [blame] | 6837 | if (!HAS_IPC(dev_priv)) |
| 6838 | return; |
| 6839 | |
Ville Syrjälä | c91a45f | 2019-05-03 20:38:07 +0300 | [diff] [blame] | 6840 | dev_priv->ipc_enabled = intel_can_enable_ipc(dev_priv); |
José Roberto de Souza | c9b818d | 2018-09-18 13:47:13 -0700 | [diff] [blame] | 6841 | |
Kumar, Mahesh | 2503a0f | 2017-08-17 19:15:28 +0530 | [diff] [blame] | 6842 | intel_enable_ipc(dev_priv); |
| 6843 | } |
| 6844 | |
Ville Syrjälä | 46f16e6 | 2016-10-31 22:37:22 +0200 | [diff] [blame] | 6845 | static void ibx_init_clock_gating(struct drm_i915_private *dev_priv) |
Daniel Vetter | 3107bd4 | 2012-10-31 22:52:31 +0100 | [diff] [blame] | 6846 | { |
Daniel Vetter | 3107bd4 | 2012-10-31 22:52:31 +0100 | [diff] [blame] | 6847 | /* |
| 6848 | * On Ibex Peak and Cougar Point, we need to disable clock |
| 6849 | * gating for the panel power sequencer or it will fail to |
| 6850 | * start up when no ports are active. |
| 6851 | */ |
| 6852 | I915_WRITE(SOUTH_DSPCLK_GATE_D, PCH_DPLSUNIT_CLOCK_GATE_DISABLE); |
| 6853 | } |
| 6854 | |
Ville Syrjälä | 46f16e6 | 2016-10-31 22:37:22 +0200 | [diff] [blame] | 6855 | static void g4x_disable_trickle_feed(struct drm_i915_private *dev_priv) |
Ville Syrjälä | 0e088b8 | 2013-06-07 10:47:04 +0300 | [diff] [blame] | 6856 | { |
Ville Syrjälä | b12ce1d | 2015-05-26 20:27:23 +0300 | [diff] [blame] | 6857 | enum pipe pipe; |
Ville Syrjälä | 0e088b8 | 2013-06-07 10:47:04 +0300 | [diff] [blame] | 6858 | |
Damien Lespiau | 055e393 | 2014-08-18 13:49:10 +0100 | [diff] [blame] | 6859 | for_each_pipe(dev_priv, pipe) { |
Ville Syrjälä | 0e088b8 | 2013-06-07 10:47:04 +0300 | [diff] [blame] | 6860 | I915_WRITE(DSPCNTR(pipe), |
| 6861 | I915_READ(DSPCNTR(pipe)) | |
| 6862 | DISPPLANE_TRICKLE_FEED_DISABLE); |
Ville Syrjälä | b12ce1d | 2015-05-26 20:27:23 +0300 | [diff] [blame] | 6863 | |
| 6864 | I915_WRITE(DSPSURF(pipe), I915_READ(DSPSURF(pipe))); |
| 6865 | POSTING_READ(DSPSURF(pipe)); |
Ville Syrjälä | 0e088b8 | 2013-06-07 10:47:04 +0300 | [diff] [blame] | 6866 | } |
| 6867 | } |
| 6868 | |
Rodrigo Vivi | 91200c0 | 2017-08-28 22:20:26 -0700 | [diff] [blame] | 6869 | static void ilk_init_clock_gating(struct drm_i915_private *dev_priv) |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 6870 | { |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 6871 | u32 dspclk_gate = ILK_VRHUNIT_CLOCK_GATE_DISABLE; |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 6872 | |
Damien Lespiau | f1e8fa5 | 2013-06-07 17:41:09 +0100 | [diff] [blame] | 6873 | /* |
| 6874 | * Required for FBC |
| 6875 | * WaFbcDisableDpfcClockGating:ilk |
| 6876 | */ |
Damien Lespiau | 4d47e4f | 2012-10-19 17:55:42 +0100 | [diff] [blame] | 6877 | dspclk_gate |= ILK_DPFCRUNIT_CLOCK_GATE_DISABLE | |
| 6878 | ILK_DPFCUNIT_CLOCK_GATE_DISABLE | |
| 6879 | ILK_DPFDUNIT_CLOCK_GATE_ENABLE; |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 6880 | |
| 6881 | I915_WRITE(PCH_3DCGDIS0, |
| 6882 | MARIUNIT_CLOCK_GATE_DISABLE | |
| 6883 | SVSMUNIT_CLOCK_GATE_DISABLE); |
| 6884 | I915_WRITE(PCH_3DCGDIS1, |
| 6885 | VFMUNIT_CLOCK_GATE_DISABLE); |
| 6886 | |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 6887 | /* |
| 6888 | * According to the spec the following bits should be set in |
| 6889 | * order to enable memory self-refresh |
| 6890 | * The bit 22/21 of 0x42004 |
| 6891 | * The bit 5 of 0x42020 |
| 6892 | * The bit 15 of 0x45000 |
| 6893 | */ |
| 6894 | I915_WRITE(ILK_DISPLAY_CHICKEN2, |
| 6895 | (I915_READ(ILK_DISPLAY_CHICKEN2) | |
| 6896 | ILK_DPARB_GATE | ILK_VSDPFD_FULL)); |
Damien Lespiau | 4d47e4f | 2012-10-19 17:55:42 +0100 | [diff] [blame] | 6897 | dspclk_gate |= ILK_DPARBUNIT_CLOCK_GATE_ENABLE; |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 6898 | I915_WRITE(DISP_ARB_CTL, |
| 6899 | (I915_READ(DISP_ARB_CTL) | |
| 6900 | DISP_FBC_WM_DIS)); |
Ville Syrjälä | 017636c | 2013-12-05 15:51:37 +0200 | [diff] [blame] | 6901 | |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 6902 | /* |
| 6903 | * Based on the document from hardware guys the following bits |
| 6904 | * should be set unconditionally in order to enable FBC. |
| 6905 | * The bit 22 of 0x42000 |
| 6906 | * The bit 22 of 0x42004 |
| 6907 | * The bit 7,8,9 of 0x42020. |
| 6908 | */ |
Tvrtko Ursulin | 50a0bc9 | 2016-10-13 11:02:58 +0100 | [diff] [blame] | 6909 | if (IS_IRONLAKE_M(dev_priv)) { |
Damien Lespiau | 4bb3533 | 2013-06-14 15:23:24 +0100 | [diff] [blame] | 6910 | /* WaFbcAsynchFlipDisableFbcQueue:ilk */ |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 6911 | I915_WRITE(ILK_DISPLAY_CHICKEN1, |
| 6912 | I915_READ(ILK_DISPLAY_CHICKEN1) | |
| 6913 | ILK_FBCQ_DIS); |
| 6914 | I915_WRITE(ILK_DISPLAY_CHICKEN2, |
| 6915 | I915_READ(ILK_DISPLAY_CHICKEN2) | |
| 6916 | ILK_DPARB_GATE); |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 6917 | } |
| 6918 | |
Damien Lespiau | 4d47e4f | 2012-10-19 17:55:42 +0100 | [diff] [blame] | 6919 | I915_WRITE(ILK_DSPCLK_GATE_D, dspclk_gate); |
| 6920 | |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 6921 | I915_WRITE(ILK_DISPLAY_CHICKEN2, |
| 6922 | I915_READ(ILK_DISPLAY_CHICKEN2) | |
| 6923 | ILK_ELPIN_409_SELECT); |
| 6924 | I915_WRITE(_3D_CHICKEN2, |
| 6925 | _3D_CHICKEN2_WM_READ_PIPELINED << 16 | |
| 6926 | _3D_CHICKEN2_WM_READ_PIPELINED); |
Daniel Vetter | 4358a37 | 2012-10-18 11:49:51 +0200 | [diff] [blame] | 6927 | |
Damien Lespiau | ecdb4eb7 | 2013-05-03 18:48:10 +0100 | [diff] [blame] | 6928 | /* WaDisableRenderCachePipelinedFlush:ilk */ |
Daniel Vetter | 4358a37 | 2012-10-18 11:49:51 +0200 | [diff] [blame] | 6929 | I915_WRITE(CACHE_MODE_0, |
| 6930 | _MASKED_BIT_ENABLE(CM0_PIPELINED_RENDER_FLUSH_DISABLE)); |
Daniel Vetter | 3107bd4 | 2012-10-31 22:52:31 +0100 | [diff] [blame] | 6931 | |
Akash Goel | 4e04632 | 2014-04-04 17:14:38 +0530 | [diff] [blame] | 6932 | /* WaDisable_RenderCache_OperationalFlush:ilk */ |
| 6933 | I915_WRITE(CACHE_MODE_0, _MASKED_BIT_DISABLE(RC_OP_FLUSH_ENABLE)); |
| 6934 | |
Ville Syrjälä | 46f16e6 | 2016-10-31 22:37:22 +0200 | [diff] [blame] | 6935 | g4x_disable_trickle_feed(dev_priv); |
Ville Syrjälä | bdad2b2 | 2013-06-07 10:47:03 +0300 | [diff] [blame] | 6936 | |
Ville Syrjälä | 46f16e6 | 2016-10-31 22:37:22 +0200 | [diff] [blame] | 6937 | ibx_init_clock_gating(dev_priv); |
Daniel Vetter | 3107bd4 | 2012-10-31 22:52:31 +0100 | [diff] [blame] | 6938 | } |
| 6939 | |
Ville Syrjälä | 46f16e6 | 2016-10-31 22:37:22 +0200 | [diff] [blame] | 6940 | static void cpt_init_clock_gating(struct drm_i915_private *dev_priv) |
Daniel Vetter | 3107bd4 | 2012-10-31 22:52:31 +0100 | [diff] [blame] | 6941 | { |
Ville Syrjälä | d048a26 | 2019-08-21 20:30:31 +0300 | [diff] [blame] | 6942 | enum pipe pipe; |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 6943 | u32 val; |
Daniel Vetter | 3107bd4 | 2012-10-31 22:52:31 +0100 | [diff] [blame] | 6944 | |
| 6945 | /* |
| 6946 | * On Ibex Peak and Cougar Point, we need to disable clock |
| 6947 | * gating for the panel power sequencer or it will fail to |
| 6948 | * start up when no ports are active. |
| 6949 | */ |
Jesse Barnes | cd66407 | 2013-10-02 10:34:19 -0700 | [diff] [blame] | 6950 | I915_WRITE(SOUTH_DSPCLK_GATE_D, PCH_DPLSUNIT_CLOCK_GATE_DISABLE | |
| 6951 | PCH_DPLUNIT_CLOCK_GATE_DISABLE | |
| 6952 | PCH_CPUNIT_CLOCK_GATE_DISABLE); |
Daniel Vetter | 3107bd4 | 2012-10-31 22:52:31 +0100 | [diff] [blame] | 6953 | I915_WRITE(SOUTH_CHICKEN2, I915_READ(SOUTH_CHICKEN2) | |
| 6954 | DPLS_EDP_PPS_FIX_DIS); |
Takashi Iwai | 335c07b | 2012-12-11 11:46:29 +0100 | [diff] [blame] | 6955 | /* The below fixes the weird display corruption, a few pixels shifted |
| 6956 | * downward, on (only) LVDS of some HP laptops with IVY. |
| 6957 | */ |
Damien Lespiau | 055e393 | 2014-08-18 13:49:10 +0100 | [diff] [blame] | 6958 | for_each_pipe(dev_priv, pipe) { |
Paulo Zanoni | dc4bd2d | 2013-04-08 15:48:08 -0300 | [diff] [blame] | 6959 | val = I915_READ(TRANS_CHICKEN2(pipe)); |
| 6960 | val |= TRANS_CHICKEN2_TIMING_OVERRIDE; |
| 6961 | val &= ~TRANS_CHICKEN2_FDI_POLARITY_REVERSED; |
Rodrigo Vivi | 41aa344 | 2013-05-09 20:03:18 -0300 | [diff] [blame] | 6962 | if (dev_priv->vbt.fdi_rx_polarity_inverted) |
Paulo Zanoni | 3f704fa | 2013-04-08 15:48:07 -0300 | [diff] [blame] | 6963 | val |= TRANS_CHICKEN2_FDI_POLARITY_REVERSED; |
Paulo Zanoni | dc4bd2d | 2013-04-08 15:48:08 -0300 | [diff] [blame] | 6964 | val &= ~TRANS_CHICKEN2_DISABLE_DEEP_COLOR_COUNTER; |
| 6965 | val &= ~TRANS_CHICKEN2_DISABLE_DEEP_COLOR_MODESWITCH; |
Paulo Zanoni | 3f704fa | 2013-04-08 15:48:07 -0300 | [diff] [blame] | 6966 | I915_WRITE(TRANS_CHICKEN2(pipe), val); |
| 6967 | } |
Daniel Vetter | 3107bd4 | 2012-10-31 22:52:31 +0100 | [diff] [blame] | 6968 | /* WADP0ClockGatingDisable */ |
Damien Lespiau | 055e393 | 2014-08-18 13:49:10 +0100 | [diff] [blame] | 6969 | for_each_pipe(dev_priv, pipe) { |
Daniel Vetter | 3107bd4 | 2012-10-31 22:52:31 +0100 | [diff] [blame] | 6970 | I915_WRITE(TRANS_CHICKEN1(pipe), |
| 6971 | TRANS_CHICKEN1_DP0UNIT_GC_DISABLE); |
| 6972 | } |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 6973 | } |
| 6974 | |
Ville Syrjälä | 46f16e6 | 2016-10-31 22:37:22 +0200 | [diff] [blame] | 6975 | static void gen6_check_mch_setup(struct drm_i915_private *dev_priv) |
Daniel Vetter | 1d7aaa0 | 2013-02-09 21:03:42 +0100 | [diff] [blame] | 6976 | { |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 6977 | u32 tmp; |
Daniel Vetter | 1d7aaa0 | 2013-02-09 21:03:42 +0100 | [diff] [blame] | 6978 | |
| 6979 | tmp = I915_READ(MCH_SSKPD); |
Daniel Vetter | df662a2 | 2014-08-04 11:17:25 +0200 | [diff] [blame] | 6980 | if ((tmp & MCH_SSKPD_WM0_MASK) != MCH_SSKPD_WM0_VAL) |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 6981 | drm_dbg_kms(&dev_priv->drm, |
| 6982 | "Wrong MCH_SSKPD value: 0x%08x This can cause underruns.\n", |
| 6983 | tmp); |
Daniel Vetter | 1d7aaa0 | 2013-02-09 21:03:42 +0100 | [diff] [blame] | 6984 | } |
| 6985 | |
Ville Syrjälä | 46f16e6 | 2016-10-31 22:37:22 +0200 | [diff] [blame] | 6986 | static void gen6_init_clock_gating(struct drm_i915_private *dev_priv) |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 6987 | { |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 6988 | u32 dspclk_gate = ILK_VRHUNIT_CLOCK_GATE_DISABLE; |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 6989 | |
Damien Lespiau | 231e54f | 2012-10-19 17:55:41 +0100 | [diff] [blame] | 6990 | I915_WRITE(ILK_DSPCLK_GATE_D, dspclk_gate); |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 6991 | |
| 6992 | I915_WRITE(ILK_DISPLAY_CHICKEN2, |
| 6993 | I915_READ(ILK_DISPLAY_CHICKEN2) | |
| 6994 | ILK_ELPIN_409_SELECT); |
| 6995 | |
Damien Lespiau | ecdb4eb7 | 2013-05-03 18:48:10 +0100 | [diff] [blame] | 6996 | /* WaDisableHiZPlanesWhenMSAAEnabled:snb */ |
Daniel Vetter | 4283908 | 2012-12-14 23:38:28 +0100 | [diff] [blame] | 6997 | I915_WRITE(_3D_CHICKEN, |
| 6998 | _MASKED_BIT_ENABLE(_3D_CHICKEN_HIZ_PLANE_DISABLE_MSAA_4X_SNB)); |
| 6999 | |
Akash Goel | 4e04632 | 2014-04-04 17:14:38 +0530 | [diff] [blame] | 7000 | /* WaDisable_RenderCache_OperationalFlush:snb */ |
| 7001 | I915_WRITE(CACHE_MODE_0, _MASKED_BIT_DISABLE(RC_OP_FLUSH_ENABLE)); |
| 7002 | |
Ville Syrjälä | 8d85d27 | 2014-02-04 21:59:15 +0200 | [diff] [blame] | 7003 | /* |
| 7004 | * BSpec recoomends 8x4 when MSAA is used, |
| 7005 | * however in practice 16x4 seems fastest. |
Ville Syrjälä | c5c98a5 | 2014-02-05 12:43:47 +0200 | [diff] [blame] | 7006 | * |
| 7007 | * Note that PS/WM thread counts depend on the WIZ hashing |
| 7008 | * disable bit, which we don't touch here, but it's good |
| 7009 | * to keep in mind (see 3DSTATE_PS and 3DSTATE_WM). |
Ville Syrjälä | 8d85d27 | 2014-02-04 21:59:15 +0200 | [diff] [blame] | 7010 | */ |
| 7011 | I915_WRITE(GEN6_GT_MODE, |
Damien Lespiau | 9853325 | 2014-12-08 17:33:51 +0000 | [diff] [blame] | 7012 | _MASKED_FIELD(GEN6_WIZ_HASHING_MASK, GEN6_WIZ_HASHING_16x4)); |
Ville Syrjälä | 8d85d27 | 2014-02-04 21:59:15 +0200 | [diff] [blame] | 7013 | |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7014 | I915_WRITE(CACHE_MODE_0, |
Daniel Vetter | 5074329 | 2012-04-26 22:02:54 +0200 | [diff] [blame] | 7015 | _MASKED_BIT_DISABLE(CM0_STC_EVICT_DISABLE_LRA_SNB)); |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7016 | |
| 7017 | I915_WRITE(GEN6_UCGCTL1, |
| 7018 | I915_READ(GEN6_UCGCTL1) | |
| 7019 | GEN6_BLBUNIT_CLOCK_GATE_DISABLE | |
| 7020 | GEN6_CSUNIT_CLOCK_GATE_DISABLE); |
| 7021 | |
| 7022 | /* According to the BSpec vol1g, bit 12 (RCPBUNIT) clock |
| 7023 | * gating disable must be set. Failure to set it results in |
| 7024 | * flickering pixels due to Z write ordering failures after |
| 7025 | * some amount of runtime in the Mesa "fire" demo, and Unigine |
| 7026 | * Sanctuary and Tropics, and apparently anything else with |
| 7027 | * alpha test or pixel discard. |
| 7028 | * |
| 7029 | * According to the spec, bit 11 (RCCUNIT) must also be set, |
| 7030 | * but we didn't debug actual testcases to find it out. |
Jesse Barnes | 0f846f8 | 2012-06-14 11:04:47 -0700 | [diff] [blame] | 7031 | * |
Ville Syrjälä | ef59318 | 2014-01-22 21:32:47 +0200 | [diff] [blame] | 7032 | * WaDisableRCCUnitClockGating:snb |
| 7033 | * WaDisableRCPBUnitClockGating:snb |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7034 | */ |
| 7035 | I915_WRITE(GEN6_UCGCTL2, |
| 7036 | GEN6_RCPBUNIT_CLOCK_GATE_DISABLE | |
| 7037 | GEN6_RCCUNIT_CLOCK_GATE_DISABLE); |
| 7038 | |
Ville Syrjälä | 5eb146d | 2014-02-04 21:59:16 +0200 | [diff] [blame] | 7039 | /* WaStripsFansDisableFastClipPerformanceFix:snb */ |
Ville Syrjälä | 743b57d | 2014-02-04 21:59:17 +0200 | [diff] [blame] | 7040 | I915_WRITE(_3D_CHICKEN3, |
| 7041 | _MASKED_BIT_ENABLE(_3D_CHICKEN3_SF_DISABLE_FASTCLIP_CULL)); |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7042 | |
| 7043 | /* |
Ville Syrjälä | e927ecd | 2014-02-04 21:59:18 +0200 | [diff] [blame] | 7044 | * Bspec says: |
| 7045 | * "This bit must be set if 3DSTATE_CLIP clip mode is set to normal and |
| 7046 | * 3DSTATE_SF number of SF output attributes is more than 16." |
| 7047 | */ |
| 7048 | I915_WRITE(_3D_CHICKEN3, |
| 7049 | _MASKED_BIT_ENABLE(_3D_CHICKEN3_SF_DISABLE_PIPELINED_ATTR_FETCH)); |
| 7050 | |
| 7051 | /* |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7052 | * According to the spec the following bits should be |
| 7053 | * set in order to enable memory self-refresh and fbc: |
| 7054 | * The bit21 and bit22 of 0x42000 |
| 7055 | * The bit21 and bit22 of 0x42004 |
| 7056 | * The bit5 and bit7 of 0x42020 |
| 7057 | * The bit14 of 0x70180 |
| 7058 | * The bit14 of 0x71180 |
Damien Lespiau | 4bb3533 | 2013-06-14 15:23:24 +0100 | [diff] [blame] | 7059 | * |
| 7060 | * WaFbcAsynchFlipDisableFbcQueue:snb |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7061 | */ |
| 7062 | I915_WRITE(ILK_DISPLAY_CHICKEN1, |
| 7063 | I915_READ(ILK_DISPLAY_CHICKEN1) | |
| 7064 | ILK_FBCQ_DIS | ILK_PABSTRETCH_DIS); |
| 7065 | I915_WRITE(ILK_DISPLAY_CHICKEN2, |
| 7066 | I915_READ(ILK_DISPLAY_CHICKEN2) | |
| 7067 | ILK_DPARB_GATE | ILK_VSDPFD_FULL); |
Damien Lespiau | 231e54f | 2012-10-19 17:55:41 +0100 | [diff] [blame] | 7068 | I915_WRITE(ILK_DSPCLK_GATE_D, |
| 7069 | I915_READ(ILK_DSPCLK_GATE_D) | |
| 7070 | ILK_DPARBUNIT_CLOCK_GATE_ENABLE | |
| 7071 | ILK_DPFDUNIT_CLOCK_GATE_ENABLE); |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7072 | |
Ville Syrjälä | 46f16e6 | 2016-10-31 22:37:22 +0200 | [diff] [blame] | 7073 | g4x_disable_trickle_feed(dev_priv); |
Ben Widawsky | f8f2ac9 | 2012-10-03 19:34:24 -0700 | [diff] [blame] | 7074 | |
Ville Syrjälä | 46f16e6 | 2016-10-31 22:37:22 +0200 | [diff] [blame] | 7075 | cpt_init_clock_gating(dev_priv); |
Daniel Vetter | 1d7aaa0 | 2013-02-09 21:03:42 +0100 | [diff] [blame] | 7076 | |
Ville Syrjälä | 46f16e6 | 2016-10-31 22:37:22 +0200 | [diff] [blame] | 7077 | gen6_check_mch_setup(dev_priv); |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7078 | } |
| 7079 | |
| 7080 | static void gen7_setup_fixed_func_scheduler(struct drm_i915_private *dev_priv) |
| 7081 | { |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 7082 | u32 reg = I915_READ(GEN7_FF_THREAD_MODE); |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7083 | |
Ville Syrjälä | 3aad905 | 2014-01-22 21:32:59 +0200 | [diff] [blame] | 7084 | /* |
Ville Syrjälä | 46680e0 | 2014-01-22 21:33:01 +0200 | [diff] [blame] | 7085 | * WaVSThreadDispatchOverride:ivb,vlv |
Ville Syrjälä | 3aad905 | 2014-01-22 21:32:59 +0200 | [diff] [blame] | 7086 | * |
| 7087 | * This actually overrides the dispatch |
| 7088 | * mode for all thread types. |
| 7089 | */ |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7090 | reg &= ~GEN7_FF_SCHED_MASK; |
| 7091 | reg |= GEN7_FF_TS_SCHED_HW; |
| 7092 | reg |= GEN7_FF_VS_SCHED_HW; |
| 7093 | reg |= GEN7_FF_DS_SCHED_HW; |
| 7094 | |
| 7095 | I915_WRITE(GEN7_FF_THREAD_MODE, reg); |
| 7096 | } |
| 7097 | |
Ville Syrjälä | 46f16e6 | 2016-10-31 22:37:22 +0200 | [diff] [blame] | 7098 | static void lpt_init_clock_gating(struct drm_i915_private *dev_priv) |
Paulo Zanoni | 17a303e | 2012-11-20 15:12:07 -0200 | [diff] [blame] | 7099 | { |
Paulo Zanoni | 17a303e | 2012-11-20 15:12:07 -0200 | [diff] [blame] | 7100 | /* |
| 7101 | * TODO: this bit should only be enabled when really needed, then |
| 7102 | * disabled when not needed anymore in order to save power. |
| 7103 | */ |
Tvrtko Ursulin | 4f8036a | 2016-10-13 11:02:52 +0100 | [diff] [blame] | 7104 | if (HAS_PCH_LPT_LP(dev_priv)) |
Paulo Zanoni | 17a303e | 2012-11-20 15:12:07 -0200 | [diff] [blame] | 7105 | I915_WRITE(SOUTH_DSPCLK_GATE_D, |
| 7106 | I915_READ(SOUTH_DSPCLK_GATE_D) | |
| 7107 | PCH_LP_PARTITION_LEVEL_DISABLE); |
Paulo Zanoni | 0a790cd | 2013-04-17 18:15:49 -0300 | [diff] [blame] | 7108 | |
| 7109 | /* WADPOClockGatingDisable:hsw */ |
Ville Syrjälä | 36c0d0c | 2015-09-18 20:03:31 +0300 | [diff] [blame] | 7110 | I915_WRITE(TRANS_CHICKEN1(PIPE_A), |
| 7111 | I915_READ(TRANS_CHICKEN1(PIPE_A)) | |
Paulo Zanoni | 0a790cd | 2013-04-17 18:15:49 -0300 | [diff] [blame] | 7112 | TRANS_CHICKEN1_DP0UNIT_GC_DISABLE); |
Paulo Zanoni | 17a303e | 2012-11-20 15:12:07 -0200 | [diff] [blame] | 7113 | } |
| 7114 | |
Ville Syrjälä | 712bf36 | 2016-10-31 22:37:23 +0200 | [diff] [blame] | 7115 | static void lpt_suspend_hw(struct drm_i915_private *dev_priv) |
Imre Deak | 7d708ee | 2013-04-17 14:04:50 +0300 | [diff] [blame] | 7116 | { |
Tvrtko Ursulin | 4f8036a | 2016-10-13 11:02:52 +0100 | [diff] [blame] | 7117 | if (HAS_PCH_LPT_LP(dev_priv)) { |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 7118 | u32 val = I915_READ(SOUTH_DSPCLK_GATE_D); |
Imre Deak | 7d708ee | 2013-04-17 14:04:50 +0300 | [diff] [blame] | 7119 | |
| 7120 | val &= ~PCH_LP_PARTITION_LEVEL_DISABLE; |
| 7121 | I915_WRITE(SOUTH_DSPCLK_GATE_D, val); |
| 7122 | } |
| 7123 | } |
| 7124 | |
Imre Deak | 450174f | 2016-05-03 15:54:21 +0300 | [diff] [blame] | 7125 | static void gen8_set_l3sqc_credits(struct drm_i915_private *dev_priv, |
| 7126 | int general_prio_credits, |
| 7127 | int high_prio_credits) |
| 7128 | { |
| 7129 | u32 misccpctl; |
Oscar Mateo | 930a784 | 2017-10-17 13:25:45 -0700 | [diff] [blame] | 7130 | u32 val; |
Imre Deak | 450174f | 2016-05-03 15:54:21 +0300 | [diff] [blame] | 7131 | |
| 7132 | /* WaTempDisableDOPClkGating:bdw */ |
| 7133 | misccpctl = I915_READ(GEN7_MISCCPCTL); |
| 7134 | I915_WRITE(GEN7_MISCCPCTL, misccpctl & ~GEN7_DOP_CLOCK_GATE_ENABLE); |
| 7135 | |
Oscar Mateo | 930a784 | 2017-10-17 13:25:45 -0700 | [diff] [blame] | 7136 | val = I915_READ(GEN8_L3SQCREG1); |
| 7137 | val &= ~L3_PRIO_CREDITS_MASK; |
| 7138 | val |= L3_GENERAL_PRIO_CREDITS(general_prio_credits); |
| 7139 | val |= L3_HIGH_PRIO_CREDITS(high_prio_credits); |
| 7140 | I915_WRITE(GEN8_L3SQCREG1, val); |
Imre Deak | 450174f | 2016-05-03 15:54:21 +0300 | [diff] [blame] | 7141 | |
| 7142 | /* |
| 7143 | * Wait at least 100 clocks before re-enabling clock gating. |
| 7144 | * See the definition of L3SQCREG1 in BSpec. |
| 7145 | */ |
| 7146 | POSTING_READ(GEN8_L3SQCREG1); |
| 7147 | udelay(1); |
| 7148 | I915_WRITE(GEN7_MISCCPCTL, misccpctl); |
| 7149 | } |
| 7150 | |
Oscar Mateo | d65dc3e | 2018-05-08 14:29:24 -0700 | [diff] [blame] | 7151 | static void icl_init_clock_gating(struct drm_i915_private *dev_priv) |
| 7152 | { |
| 7153 | /* This is not an Wa. Enable to reduce Sampler power */ |
| 7154 | I915_WRITE(GEN10_DFR_RATIO_EN_AND_CHICKEN, |
| 7155 | I915_READ(GEN10_DFR_RATIO_EN_AND_CHICKEN) & ~DFR_DISABLE); |
Radhakrishna Sripada | 622b3f6 | 2018-10-30 01:45:01 -0700 | [diff] [blame] | 7156 | |
Matt Atwood | 6f4194c | 2020-01-13 23:11:28 -0500 | [diff] [blame] | 7157 | /*Wa_14010594013:icl, ehl */ |
| 7158 | intel_uncore_rmw(&dev_priv->uncore, GEN8_CHICKEN_DCPR_1, |
| 7159 | 0, CNL_DELAY_PMRSP); |
Oscar Mateo | d65dc3e | 2018-05-08 14:29:24 -0700 | [diff] [blame] | 7160 | } |
| 7161 | |
Michel Thierry | 5d86923 | 2019-08-23 01:20:34 -0700 | [diff] [blame] | 7162 | static void tgl_init_clock_gating(struct drm_i915_private *dev_priv) |
| 7163 | { |
| 7164 | u32 vd_pg_enable = 0; |
| 7165 | unsigned int i; |
| 7166 | |
| 7167 | /* This is not a WA. Enable VD HCP & MFX_ENC powergate */ |
| 7168 | for (i = 0; i < I915_MAX_VCS; i++) { |
| 7169 | if (HAS_ENGINE(dev_priv, _VCS(i))) |
| 7170 | vd_pg_enable |= VDN_HCP_POWERGATE_ENABLE(i) | |
| 7171 | VDN_MFX_POWERGATE_ENABLE(i); |
| 7172 | } |
| 7173 | |
| 7174 | I915_WRITE(POWERGATE_ENABLE, |
| 7175 | I915_READ(POWERGATE_ENABLE) | vd_pg_enable); |
Radhakrishna Sripada | f78d5da | 2020-01-09 14:37:27 -0800 | [diff] [blame] | 7176 | |
| 7177 | /* Wa_1409825376:tgl (pre-prod)*/ |
| 7178 | if (IS_TGL_REVID(dev_priv, TGL_REVID_A0, TGL_REVID_A0)) |
| 7179 | I915_WRITE(GEN9_CLKGATE_DIS_3, I915_READ(GEN9_CLKGATE_DIS_3) | |
| 7180 | TGL_VRH_GATING_DIS); |
Matt Atwood | f9d7742 | 2020-04-15 15:35:35 -0400 | [diff] [blame] | 7181 | |
| 7182 | /* Wa_14011059788:tgl */ |
| 7183 | intel_uncore_rmw(&dev_priv->uncore, GEN10_DFR_RATIO_EN_AND_CHICKEN, |
| 7184 | 0, DFR_DISABLE); |
Michel Thierry | 5d86923 | 2019-08-23 01:20:34 -0700 | [diff] [blame] | 7185 | } |
| 7186 | |
Rodrigo Vivi | 0a46ddd | 2017-08-30 21:52:23 -0700 | [diff] [blame] | 7187 | static void cnp_init_clock_gating(struct drm_i915_private *dev_priv) |
| 7188 | { |
| 7189 | if (!HAS_PCH_CNP(dev_priv)) |
| 7190 | return; |
| 7191 | |
Rodrigo Vivi | 470e7c6 | 2018-03-05 17:28:12 -0800 | [diff] [blame] | 7192 | /* Display WA #1181 WaSouthDisplayDisablePWMCGEGating: cnp */ |
Rodrigo Vivi | 4cc6feb | 2017-09-08 16:45:33 -0700 | [diff] [blame] | 7193 | I915_WRITE(SOUTH_DSPCLK_GATE_D, I915_READ(SOUTH_DSPCLK_GATE_D) | |
| 7194 | CNP_PWM_CGE_GATING_DISABLE); |
Rodrigo Vivi | 0a46ddd | 2017-08-30 21:52:23 -0700 | [diff] [blame] | 7195 | } |
| 7196 | |
Rodrigo Vivi | 91200c0 | 2017-08-28 22:20:26 -0700 | [diff] [blame] | 7197 | static void cnl_init_clock_gating(struct drm_i915_private *dev_priv) |
Rodrigo Vivi | 90007bc | 2017-08-15 16:16:48 -0700 | [diff] [blame] | 7198 | { |
Rodrigo Vivi | 8f06783 | 2017-09-05 12:30:13 -0700 | [diff] [blame] | 7199 | u32 val; |
Rodrigo Vivi | 0a46ddd | 2017-08-30 21:52:23 -0700 | [diff] [blame] | 7200 | cnp_init_clock_gating(dev_priv); |
| 7201 | |
Rodrigo Vivi | 1a25db6 | 2017-08-15 16:16:51 -0700 | [diff] [blame] | 7202 | /* This is not an Wa. Enable for better image quality */ |
| 7203 | I915_WRITE(_3D_CHICKEN3, |
| 7204 | _MASKED_BIT_ENABLE(_3D_CHICKEN3_AA_LINE_QUALITY_FIX_ENABLE)); |
| 7205 | |
Rodrigo Vivi | 90007bc | 2017-08-15 16:16:48 -0700 | [diff] [blame] | 7206 | /* WaEnableChickenDCPR:cnl */ |
| 7207 | I915_WRITE(GEN8_CHICKEN_DCPR_1, |
| 7208 | I915_READ(GEN8_CHICKEN_DCPR_1) | MASK_WAKEMEM); |
| 7209 | |
| 7210 | /* WaFbcWakeMemOn:cnl */ |
| 7211 | I915_WRITE(DISP_ARB_CTL, I915_READ(DISP_ARB_CTL) | |
| 7212 | DISP_FBC_MEMORY_WAKE); |
| 7213 | |
Chris Wilson | 34991bd | 2017-11-11 10:03:36 +0000 | [diff] [blame] | 7214 | val = I915_READ(SLICE_UNIT_LEVEL_CLKGATE); |
| 7215 | /* ReadHitWriteOnlyDisable:cnl */ |
| 7216 | val |= RCCUNIT_CLKGATE_DIS; |
Chris Wilson | 34991bd | 2017-11-11 10:03:36 +0000 | [diff] [blame] | 7217 | I915_WRITE(SLICE_UNIT_LEVEL_CLKGATE, val); |
Rafael Antognolli | 01ab0f9 | 2017-12-15 16:11:16 -0800 | [diff] [blame] | 7218 | |
Rodrigo Vivi | a4713c5 | 2018-03-07 14:09:12 -0800 | [diff] [blame] | 7219 | /* Wa_2201832410:cnl */ |
| 7220 | val = I915_READ(SUBSLICE_UNIT_LEVEL_CLKGATE); |
| 7221 | val |= GWUNIT_CLKGATE_DIS; |
| 7222 | I915_WRITE(SUBSLICE_UNIT_LEVEL_CLKGATE, val); |
| 7223 | |
Rafael Antognolli | 01ab0f9 | 2017-12-15 16:11:16 -0800 | [diff] [blame] | 7224 | /* WaDisableVFclkgate:cnl */ |
Rodrigo Vivi | 14941b6 | 2018-03-05 17:20:00 -0800 | [diff] [blame] | 7225 | /* WaVFUnitClockGatingDisable:cnl */ |
Rafael Antognolli | 01ab0f9 | 2017-12-15 16:11:16 -0800 | [diff] [blame] | 7226 | val = I915_READ(UNSLICE_UNIT_LEVEL_CLKGATE); |
| 7227 | val |= VFUNIT_CLKGATE_DIS; |
| 7228 | I915_WRITE(UNSLICE_UNIT_LEVEL_CLKGATE, val); |
Rodrigo Vivi | 90007bc | 2017-08-15 16:16:48 -0700 | [diff] [blame] | 7229 | } |
| 7230 | |
Rodrigo Vivi | 0a46ddd | 2017-08-30 21:52:23 -0700 | [diff] [blame] | 7231 | static void cfl_init_clock_gating(struct drm_i915_private *dev_priv) |
| 7232 | { |
| 7233 | cnp_init_clock_gating(dev_priv); |
| 7234 | gen9_init_clock_gating(dev_priv); |
| 7235 | |
| 7236 | /* WaFbcNukeOnHostModify:cfl */ |
| 7237 | I915_WRITE(ILK_DPFC_CHICKEN, I915_READ(ILK_DPFC_CHICKEN) | |
| 7238 | ILK_DPFC_NUKE_ON_ANY_MODIFICATION); |
| 7239 | } |
| 7240 | |
Rodrigo Vivi | 91200c0 | 2017-08-28 22:20:26 -0700 | [diff] [blame] | 7241 | static void kbl_init_clock_gating(struct drm_i915_private *dev_priv) |
Mika Kuoppala | 9498dba | 2016-06-07 17:19:01 +0300 | [diff] [blame] | 7242 | { |
Ville Syrjälä | 46f16e6 | 2016-10-31 22:37:22 +0200 | [diff] [blame] | 7243 | gen9_init_clock_gating(dev_priv); |
Mika Kuoppala | 9498dba | 2016-06-07 17:19:01 +0300 | [diff] [blame] | 7244 | |
| 7245 | /* WaDisableSDEUnitClockGating:kbl */ |
| 7246 | if (IS_KBL_REVID(dev_priv, 0, KBL_REVID_B0)) |
| 7247 | I915_WRITE(GEN8_UCGCTL6, I915_READ(GEN8_UCGCTL6) | |
| 7248 | GEN8_SDEUNIT_CLOCK_GATE_DISABLE); |
Mika Kuoppala | 8aeb7f6 | 2016-06-07 17:19:05 +0300 | [diff] [blame] | 7249 | |
| 7250 | /* WaDisableGamClockGating:kbl */ |
| 7251 | if (IS_KBL_REVID(dev_priv, 0, KBL_REVID_B0)) |
| 7252 | I915_WRITE(GEN6_UCGCTL1, I915_READ(GEN6_UCGCTL1) | |
| 7253 | GEN6_GAMUNIT_CLOCK_GATE_DISABLE); |
Mika Kuoppala | 031cd8c | 2016-06-07 17:19:18 +0300 | [diff] [blame] | 7254 | |
Rodrigo Vivi | 0a46ddd | 2017-08-30 21:52:23 -0700 | [diff] [blame] | 7255 | /* WaFbcNukeOnHostModify:kbl */ |
Mika Kuoppala | 031cd8c | 2016-06-07 17:19:18 +0300 | [diff] [blame] | 7256 | I915_WRITE(ILK_DPFC_CHICKEN, I915_READ(ILK_DPFC_CHICKEN) | |
| 7257 | ILK_DPFC_NUKE_ON_ANY_MODIFICATION); |
Mika Kuoppala | 9498dba | 2016-06-07 17:19:01 +0300 | [diff] [blame] | 7258 | } |
| 7259 | |
Rodrigo Vivi | 91200c0 | 2017-08-28 22:20:26 -0700 | [diff] [blame] | 7260 | static void skl_init_clock_gating(struct drm_i915_private *dev_priv) |
Daniel Vetter | dc00b6a | 2016-05-19 09:14:20 +0200 | [diff] [blame] | 7261 | { |
Ville Syrjälä | 46f16e6 | 2016-10-31 22:37:22 +0200 | [diff] [blame] | 7262 | gen9_init_clock_gating(dev_priv); |
Mika Kuoppala | 44fff99 | 2016-06-07 17:19:09 +0300 | [diff] [blame] | 7263 | |
| 7264 | /* WAC6entrylatency:skl */ |
| 7265 | I915_WRITE(FBC_LLC_READ_CTRL, I915_READ(FBC_LLC_READ_CTRL) | |
| 7266 | FBC_LLC_FULLY_OPEN); |
Mika Kuoppala | 031cd8c | 2016-06-07 17:19:18 +0300 | [diff] [blame] | 7267 | |
| 7268 | /* WaFbcNukeOnHostModify:skl */ |
| 7269 | I915_WRITE(ILK_DPFC_CHICKEN, I915_READ(ILK_DPFC_CHICKEN) | |
| 7270 | ILK_DPFC_NUKE_ON_ANY_MODIFICATION); |
Daniel Vetter | dc00b6a | 2016-05-19 09:14:20 +0200 | [diff] [blame] | 7271 | } |
| 7272 | |
Rodrigo Vivi | 91200c0 | 2017-08-28 22:20:26 -0700 | [diff] [blame] | 7273 | static void bdw_init_clock_gating(struct drm_i915_private *dev_priv) |
Ben Widawsky | 1020a5c | 2013-11-02 21:07:06 -0700 | [diff] [blame] | 7274 | { |
Damien Lespiau | 07d27e2 | 2014-03-03 17:31:46 +0000 | [diff] [blame] | 7275 | enum pipe pipe; |
Ben Widawsky | 1020a5c | 2013-11-02 21:07:06 -0700 | [diff] [blame] | 7276 | |
Ben Widawsky | ab57fff | 2013-12-12 15:28:04 -0800 | [diff] [blame] | 7277 | /* WaSwitchSolVfFArbitrationPriority:bdw */ |
Ben Widawsky | 50ed5fb | 2013-11-02 21:07:40 -0700 | [diff] [blame] | 7278 | I915_WRITE(GAM_ECOCHK, I915_READ(GAM_ECOCHK) | HSW_ECOCHK_ARB_PRIO_SOL); |
Ben Widawsky | fe4ab3c | 2013-11-02 21:07:54 -0700 | [diff] [blame] | 7279 | |
Ben Widawsky | ab57fff | 2013-12-12 15:28:04 -0800 | [diff] [blame] | 7280 | /* WaPsrDPAMaskVBlankInSRD:bdw */ |
Ben Widawsky | fe4ab3c | 2013-11-02 21:07:54 -0700 | [diff] [blame] | 7281 | I915_WRITE(CHICKEN_PAR1_1, |
| 7282 | I915_READ(CHICKEN_PAR1_1) | DPA_MASK_VBLANK_SRD); |
| 7283 | |
Ben Widawsky | ab57fff | 2013-12-12 15:28:04 -0800 | [diff] [blame] | 7284 | /* WaPsrDPRSUnmaskVBlankInSRD:bdw */ |
Damien Lespiau | 055e393 | 2014-08-18 13:49:10 +0100 | [diff] [blame] | 7285 | for_each_pipe(dev_priv, pipe) { |
Damien Lespiau | 07d27e2 | 2014-03-03 17:31:46 +0000 | [diff] [blame] | 7286 | I915_WRITE(CHICKEN_PIPESL_1(pipe), |
Ville Syrjälä | c7c6562 | 2014-03-05 13:05:45 +0200 | [diff] [blame] | 7287 | I915_READ(CHICKEN_PIPESL_1(pipe)) | |
Ville Syrjälä | 8f670bb | 2014-03-05 13:05:47 +0200 | [diff] [blame] | 7288 | BDW_DPRS_MASK_VBLANK_SRD); |
Ben Widawsky | fe4ab3c | 2013-11-02 21:07:54 -0700 | [diff] [blame] | 7289 | } |
Ben Widawsky | 63801f2 | 2013-12-12 17:26:03 -0800 | [diff] [blame] | 7290 | |
Ben Widawsky | ab57fff | 2013-12-12 15:28:04 -0800 | [diff] [blame] | 7291 | /* WaVSRefCountFullforceMissDisable:bdw */ |
| 7292 | /* WaDSRefCountFullforceMissDisable:bdw */ |
| 7293 | I915_WRITE(GEN7_FF_THREAD_MODE, |
| 7294 | I915_READ(GEN7_FF_THREAD_MODE) & |
| 7295 | ~(GEN8_FF_DS_REF_CNT_FFME | GEN7_FF_VS_REF_CNT_FFME)); |
Ville Syrjälä | 36075a4 | 2014-02-04 21:59:21 +0200 | [diff] [blame] | 7296 | |
Ville Syrjälä | 295e8bb | 2014-02-27 21:59:01 +0200 | [diff] [blame] | 7297 | I915_WRITE(GEN6_RC_SLEEP_PSMI_CONTROL, |
| 7298 | _MASKED_BIT_ENABLE(GEN8_RC_SEMA_IDLE_MSG_DISABLE)); |
Ville Syrjälä | 4f1ca9e | 2014-02-27 21:59:02 +0200 | [diff] [blame] | 7299 | |
| 7300 | /* WaDisableSDEUnitClockGating:bdw */ |
| 7301 | I915_WRITE(GEN8_UCGCTL6, I915_READ(GEN8_UCGCTL6) | |
| 7302 | GEN8_SDEUNIT_CLOCK_GATE_DISABLE); |
Damien Lespiau | 5d70868 | 2014-03-26 18:41:51 +0000 | [diff] [blame] | 7303 | |
Imre Deak | 450174f | 2016-05-03 15:54:21 +0300 | [diff] [blame] | 7304 | /* WaProgramL3SqcReg1Default:bdw */ |
| 7305 | gen8_set_l3sqc_credits(dev_priv, 30, 2); |
Ville Syrjälä | 4d487cf | 2015-05-19 20:32:56 +0300 | [diff] [blame] | 7306 | |
Mika Kuoppala | 17e0adf | 2016-06-07 17:19:02 +0300 | [diff] [blame] | 7307 | /* WaKVMNotificationOnConfigChange:bdw */ |
| 7308 | I915_WRITE(CHICKEN_PAR2_1, I915_READ(CHICKEN_PAR2_1) |
| 7309 | | KVM_CONFIG_CHANGE_NOTIFICATION_SELECT); |
| 7310 | |
Ville Syrjälä | 46f16e6 | 2016-10-31 22:37:22 +0200 | [diff] [blame] | 7311 | lpt_init_clock_gating(dev_priv); |
Robert Bragg | 9cc1973 | 2017-02-12 13:32:52 +0000 | [diff] [blame] | 7312 | |
| 7313 | /* WaDisableDopClockGating:bdw |
| 7314 | * |
| 7315 | * Also see the CHICKEN2 write in bdw_init_workarounds() to disable DOP |
| 7316 | * clock gating. |
| 7317 | */ |
| 7318 | I915_WRITE(GEN6_UCGCTL1, |
| 7319 | I915_READ(GEN6_UCGCTL1) | GEN6_EU_TCUNIT_CLOCK_GATE_DISABLE); |
Ben Widawsky | 1020a5c | 2013-11-02 21:07:06 -0700 | [diff] [blame] | 7320 | } |
| 7321 | |
Rodrigo Vivi | 91200c0 | 2017-08-28 22:20:26 -0700 | [diff] [blame] | 7322 | static void hsw_init_clock_gating(struct drm_i915_private *dev_priv) |
Eugeni Dodonov | cad2a2d | 2012-07-02 11:51:09 -0300 | [diff] [blame] | 7323 | { |
Damien Lespiau | ecdb4eb7 | 2013-05-03 18:48:10 +0100 | [diff] [blame] | 7324 | /* This is required by WaCatErrorRejectionIssue:hsw */ |
Eugeni Dodonov | cad2a2d | 2012-07-02 11:51:09 -0300 | [diff] [blame] | 7325 | I915_WRITE(GEN7_SQ_CHICKEN_MBCUNIT_CONFIG, |
Chris Wilson | f93ec5f | 2020-06-11 10:30:15 +0100 | [diff] [blame^] | 7326 | I915_READ(GEN7_SQ_CHICKEN_MBCUNIT_CONFIG) | |
| 7327 | GEN7_SQ_CHICKEN_MBCUNIT_SQINTMOB); |
Kenneth Graunke | 9441159 | 2014-12-31 16:23:00 -0800 | [diff] [blame] | 7328 | |
Damien Lespiau | ecdb4eb7 | 2013-05-03 18:48:10 +0100 | [diff] [blame] | 7329 | /* WaSwitchSolVfFArbitrationPriority:hsw */ |
Ben Widawsky | e3dff58 | 2013-03-20 14:49:14 -0700 | [diff] [blame] | 7330 | I915_WRITE(GAM_ECOCHK, I915_READ(GAM_ECOCHK) | HSW_ECOCHK_ARB_PRIO_SOL); |
| 7331 | |
Ville Syrjälä | 46f16e6 | 2016-10-31 22:37:22 +0200 | [diff] [blame] | 7332 | lpt_init_clock_gating(dev_priv); |
Eugeni Dodonov | cad2a2d | 2012-07-02 11:51:09 -0300 | [diff] [blame] | 7333 | } |
| 7334 | |
Rodrigo Vivi | 91200c0 | 2017-08-28 22:20:26 -0700 | [diff] [blame] | 7335 | static void ivb_init_clock_gating(struct drm_i915_private *dev_priv) |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7336 | { |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 7337 | u32 snpcr; |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7338 | |
Damien Lespiau | 231e54f | 2012-10-19 17:55:41 +0100 | [diff] [blame] | 7339 | I915_WRITE(ILK_DSPCLK_GATE_D, ILK_VRHUNIT_CLOCK_GATE_DISABLE); |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7340 | |
Damien Lespiau | ecdb4eb7 | 2013-05-03 18:48:10 +0100 | [diff] [blame] | 7341 | /* WaDisableEarlyCull:ivb */ |
Jesse Barnes | 87f8020 | 2012-10-02 17:43:41 -0500 | [diff] [blame] | 7342 | I915_WRITE(_3D_CHICKEN3, |
| 7343 | _MASKED_BIT_ENABLE(_3D_CHICKEN_SF_DISABLE_OBJEND_CULL)); |
| 7344 | |
Damien Lespiau | ecdb4eb7 | 2013-05-03 18:48:10 +0100 | [diff] [blame] | 7345 | /* WaDisableBackToBackFlipFix:ivb */ |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7346 | I915_WRITE(IVB_CHICKEN3, |
| 7347 | CHICKEN3_DGMG_REQ_OUT_FIX_DISABLE | |
| 7348 | CHICKEN3_DGMG_DONE_FIX_DISABLE); |
| 7349 | |
Damien Lespiau | ecdb4eb7 | 2013-05-03 18:48:10 +0100 | [diff] [blame] | 7350 | /* WaDisablePSDDualDispatchEnable:ivb */ |
Tvrtko Ursulin | 50a0bc9 | 2016-10-13 11:02:58 +0100 | [diff] [blame] | 7351 | if (IS_IVB_GT1(dev_priv)) |
Jesse Barnes | 12f3382 | 2012-10-25 12:15:45 -0700 | [diff] [blame] | 7352 | I915_WRITE(GEN7_HALF_SLICE_CHICKEN1, |
| 7353 | _MASKED_BIT_ENABLE(GEN7_PSD_SINGLE_PORT_DISPATCH_ENABLE)); |
Jesse Barnes | 12f3382 | 2012-10-25 12:15:45 -0700 | [diff] [blame] | 7354 | |
Akash Goel | 4e04632 | 2014-04-04 17:14:38 +0530 | [diff] [blame] | 7355 | /* WaDisable_RenderCache_OperationalFlush:ivb */ |
| 7356 | I915_WRITE(CACHE_MODE_0_GEN7, _MASKED_BIT_DISABLE(RC_OP_FLUSH_ENABLE)); |
| 7357 | |
Damien Lespiau | ecdb4eb7 | 2013-05-03 18:48:10 +0100 | [diff] [blame] | 7358 | /* Apply the WaDisableRHWOOptimizationForRenderHang:ivb workaround. */ |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7359 | I915_WRITE(GEN7_COMMON_SLICE_CHICKEN1, |
| 7360 | GEN7_CSC1_RHWO_OPT_DISABLE_IN_RCC); |
| 7361 | |
Damien Lespiau | ecdb4eb7 | 2013-05-03 18:48:10 +0100 | [diff] [blame] | 7362 | /* WaApplyL3ControlAndL3ChickenMode:ivb */ |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7363 | I915_WRITE(GEN7_L3CNTLREG1, |
| 7364 | GEN7_WA_FOR_GEN7_L3_CONTROL); |
| 7365 | I915_WRITE(GEN7_L3_CHICKEN_MODE_REGISTER, |
Jesse Barnes | 8ab4397 | 2012-10-25 12:15:42 -0700 | [diff] [blame] | 7366 | GEN7_WA_L3_CHICKEN_MODE); |
Tvrtko Ursulin | 50a0bc9 | 2016-10-13 11:02:58 +0100 | [diff] [blame] | 7367 | if (IS_IVB_GT1(dev_priv)) |
Jesse Barnes | 8ab4397 | 2012-10-25 12:15:42 -0700 | [diff] [blame] | 7368 | I915_WRITE(GEN7_ROW_CHICKEN2, |
| 7369 | _MASKED_BIT_ENABLE(DOP_CLOCK_GATING_DISABLE)); |
Ville Syrjälä | 412236c | 2014-01-22 21:32:44 +0200 | [diff] [blame] | 7370 | else { |
| 7371 | /* must write both registers */ |
| 7372 | I915_WRITE(GEN7_ROW_CHICKEN2, |
| 7373 | _MASKED_BIT_ENABLE(DOP_CLOCK_GATING_DISABLE)); |
Jesse Barnes | 8ab4397 | 2012-10-25 12:15:42 -0700 | [diff] [blame] | 7374 | I915_WRITE(GEN7_ROW_CHICKEN2_GT2, |
| 7375 | _MASKED_BIT_ENABLE(DOP_CLOCK_GATING_DISABLE)); |
Ville Syrjälä | 412236c | 2014-01-22 21:32:44 +0200 | [diff] [blame] | 7376 | } |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7377 | |
Damien Lespiau | ecdb4eb7 | 2013-05-03 18:48:10 +0100 | [diff] [blame] | 7378 | /* WaForceL3Serialization:ivb */ |
Jesse Barnes | 61939d9 | 2012-10-02 17:43:38 -0500 | [diff] [blame] | 7379 | I915_WRITE(GEN7_L3SQCREG4, I915_READ(GEN7_L3SQCREG4) & |
| 7380 | ~L3SQ_URB_READ_CAM_MATCH_DISABLE); |
| 7381 | |
Ville Syrjälä | 1b80a19a | 2014-01-22 21:32:53 +0200 | [diff] [blame] | 7382 | /* |
Jesse Barnes | 0f846f8 | 2012-06-14 11:04:47 -0700 | [diff] [blame] | 7383 | * According to the spec, bit 13 (RCZUNIT) must be set on IVB. |
Damien Lespiau | ecdb4eb7 | 2013-05-03 18:48:10 +0100 | [diff] [blame] | 7384 | * This implements the WaDisableRCZUnitClockGating:ivb workaround. |
Jesse Barnes | 0f846f8 | 2012-06-14 11:04:47 -0700 | [diff] [blame] | 7385 | */ |
| 7386 | I915_WRITE(GEN6_UCGCTL2, |
Ville Syrjälä | 28acf3b | 2014-01-22 21:32:48 +0200 | [diff] [blame] | 7387 | GEN6_RCZUNIT_CLOCK_GATE_DISABLE); |
Jesse Barnes | 0f846f8 | 2012-06-14 11:04:47 -0700 | [diff] [blame] | 7388 | |
Damien Lespiau | ecdb4eb7 | 2013-05-03 18:48:10 +0100 | [diff] [blame] | 7389 | /* This is required by WaCatErrorRejectionIssue:ivb */ |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7390 | I915_WRITE(GEN7_SQ_CHICKEN_MBCUNIT_CONFIG, |
| 7391 | I915_READ(GEN7_SQ_CHICKEN_MBCUNIT_CONFIG) | |
| 7392 | GEN7_SQ_CHICKEN_MBCUNIT_SQINTMOB); |
| 7393 | |
Ville Syrjälä | 46f16e6 | 2016-10-31 22:37:22 +0200 | [diff] [blame] | 7394 | g4x_disable_trickle_feed(dev_priv); |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7395 | |
| 7396 | gen7_setup_fixed_func_scheduler(dev_priv); |
Daniel Vetter | 97e1930 | 2012-04-24 16:00:21 +0200 | [diff] [blame] | 7397 | |
Chris Wilson | 2272134 | 2014-03-04 09:41:43 +0000 | [diff] [blame] | 7398 | if (0) { /* causes HiZ corruption on ivb:gt1 */ |
| 7399 | /* enable HiZ Raw Stall Optimization */ |
| 7400 | I915_WRITE(CACHE_MODE_0_GEN7, |
| 7401 | _MASKED_BIT_DISABLE(HIZ_RAW_STALL_OPT_DISABLE)); |
| 7402 | } |
Chia-I Wu | 116f2b6 | 2014-01-28 13:29:34 +0800 | [diff] [blame] | 7403 | |
Damien Lespiau | ecdb4eb7 | 2013-05-03 18:48:10 +0100 | [diff] [blame] | 7404 | /* WaDisable4x2SubspanOptimization:ivb */ |
Daniel Vetter | 97e1930 | 2012-04-24 16:00:21 +0200 | [diff] [blame] | 7405 | I915_WRITE(CACHE_MODE_1, |
| 7406 | _MASKED_BIT_ENABLE(PIXEL_SUBSPAN_COLLECT_OPT_DISABLE)); |
Ben Widawsky | 2084822 | 2012-05-04 18:58:59 -0700 | [diff] [blame] | 7407 | |
Ville Syrjälä | a607c1a | 2014-02-04 21:59:19 +0200 | [diff] [blame] | 7408 | /* |
| 7409 | * BSpec recommends 8x4 when MSAA is used, |
| 7410 | * however in practice 16x4 seems fastest. |
Ville Syrjälä | c5c98a5 | 2014-02-05 12:43:47 +0200 | [diff] [blame] | 7411 | * |
| 7412 | * Note that PS/WM thread counts depend on the WIZ hashing |
| 7413 | * disable bit, which we don't touch here, but it's good |
| 7414 | * to keep in mind (see 3DSTATE_PS and 3DSTATE_WM). |
Ville Syrjälä | a607c1a | 2014-02-04 21:59:19 +0200 | [diff] [blame] | 7415 | */ |
| 7416 | I915_WRITE(GEN7_GT_MODE, |
Damien Lespiau | 9853325 | 2014-12-08 17:33:51 +0000 | [diff] [blame] | 7417 | _MASKED_FIELD(GEN6_WIZ_HASHING_MASK, GEN6_WIZ_HASHING_16x4)); |
Ville Syrjälä | a607c1a | 2014-02-04 21:59:19 +0200 | [diff] [blame] | 7418 | |
Ben Widawsky | 2084822 | 2012-05-04 18:58:59 -0700 | [diff] [blame] | 7419 | snpcr = I915_READ(GEN6_MBCUNIT_SNPCR); |
| 7420 | snpcr &= ~GEN6_MBC_SNPCR_MASK; |
| 7421 | snpcr |= GEN6_MBC_SNPCR_MED; |
| 7422 | I915_WRITE(GEN6_MBCUNIT_SNPCR, snpcr); |
Daniel Vetter | 3107bd4 | 2012-10-31 22:52:31 +0100 | [diff] [blame] | 7423 | |
Tvrtko Ursulin | 6e26695 | 2016-10-13 11:02:53 +0100 | [diff] [blame] | 7424 | if (!HAS_PCH_NOP(dev_priv)) |
Ville Syrjälä | 46f16e6 | 2016-10-31 22:37:22 +0200 | [diff] [blame] | 7425 | cpt_init_clock_gating(dev_priv); |
Daniel Vetter | 1d7aaa0 | 2013-02-09 21:03:42 +0100 | [diff] [blame] | 7426 | |
Ville Syrjälä | 46f16e6 | 2016-10-31 22:37:22 +0200 | [diff] [blame] | 7427 | gen6_check_mch_setup(dev_priv); |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7428 | } |
| 7429 | |
Rodrigo Vivi | 91200c0 | 2017-08-28 22:20:26 -0700 | [diff] [blame] | 7430 | static void vlv_init_clock_gating(struct drm_i915_private *dev_priv) |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7431 | { |
Damien Lespiau | ecdb4eb7 | 2013-05-03 18:48:10 +0100 | [diff] [blame] | 7432 | /* WaDisableEarlyCull:vlv */ |
Jesse Barnes | 87f8020 | 2012-10-02 17:43:41 -0500 | [diff] [blame] | 7433 | I915_WRITE(_3D_CHICKEN3, |
| 7434 | _MASKED_BIT_ENABLE(_3D_CHICKEN_SF_DISABLE_OBJEND_CULL)); |
| 7435 | |
Damien Lespiau | ecdb4eb7 | 2013-05-03 18:48:10 +0100 | [diff] [blame] | 7436 | /* WaDisableBackToBackFlipFix:vlv */ |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7437 | I915_WRITE(IVB_CHICKEN3, |
| 7438 | CHICKEN3_DGMG_REQ_OUT_FIX_DISABLE | |
| 7439 | CHICKEN3_DGMG_DONE_FIX_DISABLE); |
| 7440 | |
Ville Syrjälä | fad7d36 | 2014-01-22 21:32:39 +0200 | [diff] [blame] | 7441 | /* WaPsdDispatchEnable:vlv */ |
Damien Lespiau | ecdb4eb7 | 2013-05-03 18:48:10 +0100 | [diff] [blame] | 7442 | /* WaDisablePSDDualDispatchEnable:vlv */ |
Jesse Barnes | 12f3382 | 2012-10-25 12:15:45 -0700 | [diff] [blame] | 7443 | I915_WRITE(GEN7_HALF_SLICE_CHICKEN1, |
Jesse Barnes | d3bc030 | 2013-03-08 10:45:51 -0800 | [diff] [blame] | 7444 | _MASKED_BIT_ENABLE(GEN7_MAX_PS_THREAD_DEP | |
| 7445 | GEN7_PSD_SINGLE_PORT_DISPATCH_ENABLE)); |
Jesse Barnes | 12f3382 | 2012-10-25 12:15:45 -0700 | [diff] [blame] | 7446 | |
Akash Goel | 4e04632 | 2014-04-04 17:14:38 +0530 | [diff] [blame] | 7447 | /* WaDisable_RenderCache_OperationalFlush:vlv */ |
| 7448 | I915_WRITE(CACHE_MODE_0_GEN7, _MASKED_BIT_DISABLE(RC_OP_FLUSH_ENABLE)); |
| 7449 | |
Damien Lespiau | ecdb4eb7 | 2013-05-03 18:48:10 +0100 | [diff] [blame] | 7450 | /* WaForceL3Serialization:vlv */ |
Jesse Barnes | 61939d9 | 2012-10-02 17:43:38 -0500 | [diff] [blame] | 7451 | I915_WRITE(GEN7_L3SQCREG4, I915_READ(GEN7_L3SQCREG4) & |
| 7452 | ~L3SQ_URB_READ_CAM_MATCH_DISABLE); |
| 7453 | |
Damien Lespiau | ecdb4eb7 | 2013-05-03 18:48:10 +0100 | [diff] [blame] | 7454 | /* WaDisableDopClockGating:vlv */ |
Jesse Barnes | 8ab4397 | 2012-10-25 12:15:42 -0700 | [diff] [blame] | 7455 | I915_WRITE(GEN7_ROW_CHICKEN2, |
| 7456 | _MASKED_BIT_ENABLE(DOP_CLOCK_GATING_DISABLE)); |
| 7457 | |
Damien Lespiau | ecdb4eb7 | 2013-05-03 18:48:10 +0100 | [diff] [blame] | 7458 | /* This is required by WaCatErrorRejectionIssue:vlv */ |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7459 | I915_WRITE(GEN7_SQ_CHICKEN_MBCUNIT_CONFIG, |
| 7460 | I915_READ(GEN7_SQ_CHICKEN_MBCUNIT_CONFIG) | |
| 7461 | GEN7_SQ_CHICKEN_MBCUNIT_SQINTMOB); |
| 7462 | |
Ville Syrjälä | 46680e0 | 2014-01-22 21:33:01 +0200 | [diff] [blame] | 7463 | gen7_setup_fixed_func_scheduler(dev_priv); |
| 7464 | |
Ville Syrjälä | 3c0edae | 2014-01-22 21:32:56 +0200 | [diff] [blame] | 7465 | /* |
Jesse Barnes | 0f846f8 | 2012-06-14 11:04:47 -0700 | [diff] [blame] | 7466 | * According to the spec, bit 13 (RCZUNIT) must be set on IVB. |
Damien Lespiau | ecdb4eb7 | 2013-05-03 18:48:10 +0100 | [diff] [blame] | 7467 | * This implements the WaDisableRCZUnitClockGating:vlv workaround. |
Jesse Barnes | 0f846f8 | 2012-06-14 11:04:47 -0700 | [diff] [blame] | 7468 | */ |
| 7469 | I915_WRITE(GEN6_UCGCTL2, |
Ville Syrjälä | 3c0edae | 2014-01-22 21:32:56 +0200 | [diff] [blame] | 7470 | GEN6_RCZUNIT_CLOCK_GATE_DISABLE); |
Jesse Barnes | 0f846f8 | 2012-06-14 11:04:47 -0700 | [diff] [blame] | 7471 | |
Akash Goel | c98f506 | 2014-03-24 23:00:07 +0530 | [diff] [blame] | 7472 | /* WaDisableL3Bank2xClockGate:vlv |
| 7473 | * Disabling L3 clock gating- MMIO 940c[25] = 1 |
| 7474 | * Set bit 25, to disable L3_BANK_2x_CLK_GATING */ |
| 7475 | I915_WRITE(GEN7_UCGCTL4, |
| 7476 | I915_READ(GEN7_UCGCTL4) | GEN7_L3BANK2X_CLOCK_GATE_DISABLE); |
Jesse Barnes | e3f33d4 | 2012-06-14 11:04:50 -0700 | [diff] [blame] | 7477 | |
Ville Syrjälä | afd58e7 | 2014-01-22 21:33:03 +0200 | [diff] [blame] | 7478 | /* |
| 7479 | * BSpec says this must be set, even though |
| 7480 | * WaDisable4x2SubspanOptimization isn't listed for VLV. |
| 7481 | */ |
Daniel Vetter | 6b26c86 | 2012-04-24 14:04:12 +0200 | [diff] [blame] | 7482 | I915_WRITE(CACHE_MODE_1, |
| 7483 | _MASKED_BIT_ENABLE(PIXEL_SUBSPAN_COLLECT_OPT_DISABLE)); |
Jesse Barnes | 7983117 | 2012-06-20 10:53:12 -0700 | [diff] [blame] | 7484 | |
| 7485 | /* |
Ville Syrjälä | da2518f | 2015-01-21 19:38:01 +0200 | [diff] [blame] | 7486 | * BSpec recommends 8x4 when MSAA is used, |
| 7487 | * however in practice 16x4 seems fastest. |
| 7488 | * |
| 7489 | * Note that PS/WM thread counts depend on the WIZ hashing |
| 7490 | * disable bit, which we don't touch here, but it's good |
| 7491 | * to keep in mind (see 3DSTATE_PS and 3DSTATE_WM). |
| 7492 | */ |
| 7493 | I915_WRITE(GEN7_GT_MODE, |
| 7494 | _MASKED_FIELD(GEN6_WIZ_HASHING_MASK, GEN6_WIZ_HASHING_16x4)); |
| 7495 | |
| 7496 | /* |
Ville Syrjälä | 031994e | 2014-01-22 21:32:46 +0200 | [diff] [blame] | 7497 | * WaIncreaseL3CreditsForVLVB0:vlv |
| 7498 | * This is the hardware default actually. |
| 7499 | */ |
| 7500 | I915_WRITE(GEN7_L3SQCREG1, VLV_B0_WA_L3SQCREG1_VALUE); |
| 7501 | |
| 7502 | /* |
Damien Lespiau | ecdb4eb7 | 2013-05-03 18:48:10 +0100 | [diff] [blame] | 7503 | * WaDisableVLVClockGating_VBIIssue:vlv |
Jesse Barnes | 2d80957 | 2012-10-25 12:15:44 -0700 | [diff] [blame] | 7504 | * Disable clock gating on th GCFG unit to prevent a delay |
| 7505 | * in the reporting of vblank events. |
| 7506 | */ |
Ville Syrjälä | 7a0d1ee | 2014-01-22 21:33:04 +0200 | [diff] [blame] | 7507 | I915_WRITE(VLV_GUNIT_CLOCK_GATE, GCFG_DIS); |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7508 | } |
| 7509 | |
Rodrigo Vivi | 91200c0 | 2017-08-28 22:20:26 -0700 | [diff] [blame] | 7510 | static void chv_init_clock_gating(struct drm_i915_private *dev_priv) |
Ville Syrjälä | a4565da | 2014-04-09 13:28:10 +0300 | [diff] [blame] | 7511 | { |
Ville Syrjälä | 232ce33 | 2014-04-09 13:28:35 +0300 | [diff] [blame] | 7512 | /* WaVSRefCountFullforceMissDisable:chv */ |
| 7513 | /* WaDSRefCountFullforceMissDisable:chv */ |
| 7514 | I915_WRITE(GEN7_FF_THREAD_MODE, |
| 7515 | I915_READ(GEN7_FF_THREAD_MODE) & |
| 7516 | ~(GEN8_FF_DS_REF_CNT_FFME | GEN7_FF_VS_REF_CNT_FFME)); |
Ville Syrjälä | acea6f9 | 2014-04-09 13:28:36 +0300 | [diff] [blame] | 7517 | |
| 7518 | /* WaDisableSemaphoreAndSyncFlipWait:chv */ |
| 7519 | I915_WRITE(GEN6_RC_SLEEP_PSMI_CONTROL, |
| 7520 | _MASKED_BIT_ENABLE(GEN8_RC_SEMA_IDLE_MSG_DISABLE)); |
Ville Syrjälä | 0846697 | 2014-04-09 13:28:37 +0300 | [diff] [blame] | 7521 | |
| 7522 | /* WaDisableCSUnitClockGating:chv */ |
| 7523 | I915_WRITE(GEN6_UCGCTL1, I915_READ(GEN6_UCGCTL1) | |
| 7524 | GEN6_CSUNIT_CLOCK_GATE_DISABLE); |
Ville Syrjälä | c631780 | 2014-04-09 13:28:38 +0300 | [diff] [blame] | 7525 | |
| 7526 | /* WaDisableSDEUnitClockGating:chv */ |
| 7527 | I915_WRITE(GEN8_UCGCTL6, I915_READ(GEN8_UCGCTL6) | |
| 7528 | GEN8_SDEUNIT_CLOCK_GATE_DISABLE); |
Ville Syrjälä | 6d50b06 | 2015-05-19 20:32:57 +0300 | [diff] [blame] | 7529 | |
| 7530 | /* |
Imre Deak | 450174f | 2016-05-03 15:54:21 +0300 | [diff] [blame] | 7531 | * WaProgramL3SqcReg1Default:chv |
| 7532 | * See gfxspecs/Related Documents/Performance Guide/ |
| 7533 | * LSQC Setting Recommendations. |
| 7534 | */ |
| 7535 | gen8_set_l3sqc_credits(dev_priv, 38, 2); |
Ville Syrjälä | a4565da | 2014-04-09 13:28:10 +0300 | [diff] [blame] | 7536 | } |
| 7537 | |
Ville Syrjälä | 46f16e6 | 2016-10-31 22:37:22 +0200 | [diff] [blame] | 7538 | static void g4x_init_clock_gating(struct drm_i915_private *dev_priv) |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7539 | { |
Jani Nikula | 5ce9a649 | 2019-01-18 14:01:20 +0200 | [diff] [blame] | 7540 | u32 dspclk_gate; |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7541 | |
| 7542 | I915_WRITE(RENCLK_GATE_D1, 0); |
| 7543 | I915_WRITE(RENCLK_GATE_D2, VF_UNIT_CLOCK_GATE_DISABLE | |
| 7544 | GS_UNIT_CLOCK_GATE_DISABLE | |
| 7545 | CL_UNIT_CLOCK_GATE_DISABLE); |
| 7546 | I915_WRITE(RAMCLK_GATE_D, 0); |
| 7547 | dspclk_gate = VRHUNIT_CLOCK_GATE_DISABLE | |
| 7548 | OVRUNIT_CLOCK_GATE_DISABLE | |
| 7549 | OVCUNIT_CLOCK_GATE_DISABLE; |
Tvrtko Ursulin | 50a0bc9 | 2016-10-13 11:02:58 +0100 | [diff] [blame] | 7550 | if (IS_GM45(dev_priv)) |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7551 | dspclk_gate |= DSSUNIT_CLOCK_GATE_DISABLE; |
| 7552 | I915_WRITE(DSPCLK_GATE_D, dspclk_gate); |
Daniel Vetter | 4358a37 | 2012-10-18 11:49:51 +0200 | [diff] [blame] | 7553 | |
| 7554 | /* WaDisableRenderCachePipelinedFlush */ |
| 7555 | I915_WRITE(CACHE_MODE_0, |
| 7556 | _MASKED_BIT_ENABLE(CM0_PIPELINED_RENDER_FLUSH_DISABLE)); |
Ville Syrjälä | de1aa62 | 2013-06-07 10:47:01 +0300 | [diff] [blame] | 7557 | |
Akash Goel | 4e04632 | 2014-04-04 17:14:38 +0530 | [diff] [blame] | 7558 | /* WaDisable_RenderCache_OperationalFlush:g4x */ |
| 7559 | I915_WRITE(CACHE_MODE_0, _MASKED_BIT_DISABLE(RC_OP_FLUSH_ENABLE)); |
| 7560 | |
Ville Syrjälä | 46f16e6 | 2016-10-31 22:37:22 +0200 | [diff] [blame] | 7561 | g4x_disable_trickle_feed(dev_priv); |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7562 | } |
| 7563 | |
Rodrigo Vivi | 91200c0 | 2017-08-28 22:20:26 -0700 | [diff] [blame] | 7564 | static void i965gm_init_clock_gating(struct drm_i915_private *dev_priv) |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7565 | { |
Tvrtko Ursulin | 4f5fd91 | 2019-06-11 11:45:48 +0100 | [diff] [blame] | 7566 | struct intel_uncore *uncore = &dev_priv->uncore; |
| 7567 | |
| 7568 | intel_uncore_write(uncore, RENCLK_GATE_D1, I965_RCC_CLOCK_GATE_DISABLE); |
| 7569 | intel_uncore_write(uncore, RENCLK_GATE_D2, 0); |
| 7570 | intel_uncore_write(uncore, DSPCLK_GATE_D, 0); |
| 7571 | intel_uncore_write(uncore, RAMCLK_GATE_D, 0); |
| 7572 | intel_uncore_write16(uncore, DEUC, 0); |
| 7573 | intel_uncore_write(uncore, |
| 7574 | MI_ARB_STATE, |
| 7575 | _MASKED_BIT_ENABLE(MI_ARB_DISPLAY_TRICKLE_FEED_DISABLE)); |
Akash Goel | 4e04632 | 2014-04-04 17:14:38 +0530 | [diff] [blame] | 7576 | |
| 7577 | /* WaDisable_RenderCache_OperationalFlush:gen4 */ |
Tvrtko Ursulin | 4f5fd91 | 2019-06-11 11:45:48 +0100 | [diff] [blame] | 7578 | intel_uncore_write(uncore, |
| 7579 | CACHE_MODE_0, |
| 7580 | _MASKED_BIT_DISABLE(RC_OP_FLUSH_ENABLE)); |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7581 | } |
| 7582 | |
Rodrigo Vivi | 91200c0 | 2017-08-28 22:20:26 -0700 | [diff] [blame] | 7583 | static void i965g_init_clock_gating(struct drm_i915_private *dev_priv) |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7584 | { |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7585 | I915_WRITE(RENCLK_GATE_D1, I965_RCZ_CLOCK_GATE_DISABLE | |
| 7586 | I965_RCC_CLOCK_GATE_DISABLE | |
| 7587 | I965_RCPB_CLOCK_GATE_DISABLE | |
| 7588 | I965_ISC_CLOCK_GATE_DISABLE | |
| 7589 | I965_FBC_CLOCK_GATE_DISABLE); |
| 7590 | I915_WRITE(RENCLK_GATE_D2, 0); |
Ville Syrjälä | 20f9496 | 2013-06-07 10:47:02 +0300 | [diff] [blame] | 7591 | I915_WRITE(MI_ARB_STATE, |
| 7592 | _MASKED_BIT_ENABLE(MI_ARB_DISPLAY_TRICKLE_FEED_DISABLE)); |
Akash Goel | 4e04632 | 2014-04-04 17:14:38 +0530 | [diff] [blame] | 7593 | |
| 7594 | /* WaDisable_RenderCache_OperationalFlush:gen4 */ |
| 7595 | I915_WRITE(CACHE_MODE_0, _MASKED_BIT_DISABLE(RC_OP_FLUSH_ENABLE)); |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7596 | } |
| 7597 | |
Ville Syrjälä | 46f16e6 | 2016-10-31 22:37:22 +0200 | [diff] [blame] | 7598 | static void gen3_init_clock_gating(struct drm_i915_private *dev_priv) |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7599 | { |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7600 | u32 dstate = I915_READ(D_STATE); |
| 7601 | |
| 7602 | dstate |= DSTATE_PLL_D3_OFF | DSTATE_GFX_CLOCK_GATING | |
| 7603 | DSTATE_DOT_CLOCK_GATING; |
| 7604 | I915_WRITE(D_STATE, dstate); |
Chris Wilson | 13a86b8 | 2012-04-24 14:51:43 +0100 | [diff] [blame] | 7605 | |
Ville Syrjälä | 9b1e14f | 2016-10-31 22:37:15 +0200 | [diff] [blame] | 7606 | if (IS_PINEVIEW(dev_priv)) |
Chris Wilson | 13a86b8 | 2012-04-24 14:51:43 +0100 | [diff] [blame] | 7607 | I915_WRITE(ECOSKPD, _MASKED_BIT_ENABLE(ECO_GATING_CX_ONLY)); |
Daniel Vetter | 974a3b0 | 2012-09-09 11:54:16 +0200 | [diff] [blame] | 7608 | |
| 7609 | /* IIR "flip pending" means done if this bit is set */ |
| 7610 | I915_WRITE(ECOSKPD, _MASKED_BIT_DISABLE(ECO_FLIP_DONE)); |
Ville Syrjälä | 12fabbcb9 | 2014-02-25 15:13:38 +0200 | [diff] [blame] | 7611 | |
| 7612 | /* interrupts should cause a wake up from C3 */ |
Ville Syrjälä | 3299254 | 2014-02-25 15:13:39 +0200 | [diff] [blame] | 7613 | I915_WRITE(INSTPM, _MASKED_BIT_ENABLE(INSTPM_AGPBUSY_INT_EN)); |
Ville Syrjälä | dbb4274 | 2014-02-25 15:13:41 +0200 | [diff] [blame] | 7614 | |
| 7615 | /* On GEN3 we really need to make sure the ARB C3 LP bit is set */ |
| 7616 | I915_WRITE(MI_ARB_STATE, _MASKED_BIT_ENABLE(MI_ARB_C3_LP_WRITE_ENABLE)); |
Ville Syrjälä | 1038392 | 2014-08-15 01:21:54 +0300 | [diff] [blame] | 7617 | |
| 7618 | I915_WRITE(MI_ARB_STATE, |
| 7619 | _MASKED_BIT_ENABLE(MI_ARB_DISPLAY_TRICKLE_FEED_DISABLE)); |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7620 | } |
| 7621 | |
Ville Syrjälä | 46f16e6 | 2016-10-31 22:37:22 +0200 | [diff] [blame] | 7622 | static void i85x_init_clock_gating(struct drm_i915_private *dev_priv) |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7623 | { |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7624 | I915_WRITE(RENCLK_GATE_D1, SV_CLOCK_GATE_DISABLE); |
Ville Syrjälä | 54e472a | 2014-02-25 15:13:40 +0200 | [diff] [blame] | 7625 | |
| 7626 | /* interrupts should cause a wake up from C3 */ |
| 7627 | I915_WRITE(MI_STATE, _MASKED_BIT_ENABLE(MI_AGPBUSY_INT_EN) | |
| 7628 | _MASKED_BIT_DISABLE(MI_AGPBUSY_830_MODE)); |
Ville Syrjälä | 1038392 | 2014-08-15 01:21:54 +0300 | [diff] [blame] | 7629 | |
| 7630 | I915_WRITE(MEM_MODE, |
| 7631 | _MASKED_BIT_ENABLE(MEM_DISPLAY_TRICKLE_FEED_DISABLE)); |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7632 | } |
| 7633 | |
Ville Syrjälä | 46f16e6 | 2016-10-31 22:37:22 +0200 | [diff] [blame] | 7634 | static void i830_init_clock_gating(struct drm_i915_private *dev_priv) |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7635 | { |
Ville Syrjälä | 1038392 | 2014-08-15 01:21:54 +0300 | [diff] [blame] | 7636 | I915_WRITE(MEM_MODE, |
| 7637 | _MASKED_BIT_ENABLE(MEM_DISPLAY_A_TRICKLE_FEED_DISABLE) | |
| 7638 | _MASKED_BIT_ENABLE(MEM_DISPLAY_B_TRICKLE_FEED_DISABLE)); |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7639 | } |
| 7640 | |
Ville Syrjälä | 46f16e6 | 2016-10-31 22:37:22 +0200 | [diff] [blame] | 7641 | void intel_init_clock_gating(struct drm_i915_private *dev_priv) |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7642 | { |
Ville Syrjälä | 46f16e6 | 2016-10-31 22:37:22 +0200 | [diff] [blame] | 7643 | dev_priv->display.init_clock_gating(dev_priv); |
Eugeni Dodonov | 6f1d69b | 2012-04-18 15:29:25 -0300 | [diff] [blame] | 7644 | } |
| 7645 | |
Ville Syrjälä | 712bf36 | 2016-10-31 22:37:23 +0200 | [diff] [blame] | 7646 | void intel_suspend_hw(struct drm_i915_private *dev_priv) |
Imre Deak | 7d708ee | 2013-04-17 14:04:50 +0300 | [diff] [blame] | 7647 | { |
Ville Syrjälä | 712bf36 | 2016-10-31 22:37:23 +0200 | [diff] [blame] | 7648 | if (HAS_PCH_LPT(dev_priv)) |
| 7649 | lpt_suspend_hw(dev_priv); |
Imre Deak | 7d708ee | 2013-04-17 14:04:50 +0300 | [diff] [blame] | 7650 | } |
| 7651 | |
Ville Syrjälä | 46f16e6 | 2016-10-31 22:37:22 +0200 | [diff] [blame] | 7652 | static void nop_init_clock_gating(struct drm_i915_private *dev_priv) |
Imre Deak | bb400da | 2016-03-16 13:38:54 +0200 | [diff] [blame] | 7653 | { |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 7654 | drm_dbg_kms(&dev_priv->drm, |
| 7655 | "No clock gating settings or workarounds applied.\n"); |
Imre Deak | bb400da | 2016-03-16 13:38:54 +0200 | [diff] [blame] | 7656 | } |
| 7657 | |
| 7658 | /** |
| 7659 | * intel_init_clock_gating_hooks - setup the clock gating hooks |
| 7660 | * @dev_priv: device private |
| 7661 | * |
| 7662 | * Setup the hooks that configure which clocks of a given platform can be |
| 7663 | * gated and also apply various GT and display specific workarounds for these |
| 7664 | * platforms. Note that some GT specific workarounds are applied separately |
| 7665 | * when GPU contexts or batchbuffers start their execution. |
| 7666 | */ |
| 7667 | void intel_init_clock_gating_hooks(struct drm_i915_private *dev_priv) |
| 7668 | { |
Lucas De Marchi | 13e53c5 | 2019-08-17 02:38:42 -0700 | [diff] [blame] | 7669 | if (IS_GEN(dev_priv, 12)) |
Michel Thierry | 5d86923 | 2019-08-23 01:20:34 -0700 | [diff] [blame] | 7670 | dev_priv->display.init_clock_gating = tgl_init_clock_gating; |
Lucas De Marchi | 13e53c5 | 2019-08-17 02:38:42 -0700 | [diff] [blame] | 7671 | else if (IS_GEN(dev_priv, 11)) |
Oscar Mateo | d65dc3e | 2018-05-08 14:29:24 -0700 | [diff] [blame] | 7672 | dev_priv->display.init_clock_gating = icl_init_clock_gating; |
Oscar Mateo | cc38cae | 2018-05-08 14:29:23 -0700 | [diff] [blame] | 7673 | else if (IS_CANNONLAKE(dev_priv)) |
Rodrigo Vivi | 91200c0 | 2017-08-28 22:20:26 -0700 | [diff] [blame] | 7674 | dev_priv->display.init_clock_gating = cnl_init_clock_gating; |
Chris Wilson | 5f4ae27 | 2020-06-02 15:05:40 +0100 | [diff] [blame] | 7675 | else if (IS_COFFEELAKE(dev_priv) || IS_COMETLAKE(dev_priv)) |
Rodrigo Vivi | 0a46ddd | 2017-08-30 21:52:23 -0700 | [diff] [blame] | 7676 | dev_priv->display.init_clock_gating = cfl_init_clock_gating; |
Rodrigo Vivi | 90007bc | 2017-08-15 16:16:48 -0700 | [diff] [blame] | 7677 | else if (IS_SKYLAKE(dev_priv)) |
Rodrigo Vivi | 91200c0 | 2017-08-28 22:20:26 -0700 | [diff] [blame] | 7678 | dev_priv->display.init_clock_gating = skl_init_clock_gating; |
Rodrigo Vivi | 0a46ddd | 2017-08-30 21:52:23 -0700 | [diff] [blame] | 7679 | else if (IS_KABYLAKE(dev_priv)) |
Rodrigo Vivi | 91200c0 | 2017-08-28 22:20:26 -0700 | [diff] [blame] | 7680 | dev_priv->display.init_clock_gating = kbl_init_clock_gating; |
Ander Conselvan de Oliveira | 9fb5026 | 2017-01-26 11:16:58 +0200 | [diff] [blame] | 7681 | else if (IS_BROXTON(dev_priv)) |
Imre Deak | bb400da | 2016-03-16 13:38:54 +0200 | [diff] [blame] | 7682 | dev_priv->display.init_clock_gating = bxt_init_clock_gating; |
Ander Conselvan de Oliveira | 9fb5026 | 2017-01-26 11:16:58 +0200 | [diff] [blame] | 7683 | else if (IS_GEMINILAKE(dev_priv)) |
| 7684 | dev_priv->display.init_clock_gating = glk_init_clock_gating; |
Imre Deak | bb400da | 2016-03-16 13:38:54 +0200 | [diff] [blame] | 7685 | else if (IS_BROADWELL(dev_priv)) |
Rodrigo Vivi | 91200c0 | 2017-08-28 22:20:26 -0700 | [diff] [blame] | 7686 | dev_priv->display.init_clock_gating = bdw_init_clock_gating; |
Imre Deak | bb400da | 2016-03-16 13:38:54 +0200 | [diff] [blame] | 7687 | else if (IS_CHERRYVIEW(dev_priv)) |
Rodrigo Vivi | 91200c0 | 2017-08-28 22:20:26 -0700 | [diff] [blame] | 7688 | dev_priv->display.init_clock_gating = chv_init_clock_gating; |
Imre Deak | bb400da | 2016-03-16 13:38:54 +0200 | [diff] [blame] | 7689 | else if (IS_HASWELL(dev_priv)) |
Rodrigo Vivi | 91200c0 | 2017-08-28 22:20:26 -0700 | [diff] [blame] | 7690 | dev_priv->display.init_clock_gating = hsw_init_clock_gating; |
Imre Deak | bb400da | 2016-03-16 13:38:54 +0200 | [diff] [blame] | 7691 | else if (IS_IVYBRIDGE(dev_priv)) |
Rodrigo Vivi | 91200c0 | 2017-08-28 22:20:26 -0700 | [diff] [blame] | 7692 | dev_priv->display.init_clock_gating = ivb_init_clock_gating; |
Imre Deak | bb400da | 2016-03-16 13:38:54 +0200 | [diff] [blame] | 7693 | else if (IS_VALLEYVIEW(dev_priv)) |
Rodrigo Vivi | 91200c0 | 2017-08-28 22:20:26 -0700 | [diff] [blame] | 7694 | dev_priv->display.init_clock_gating = vlv_init_clock_gating; |
Lucas De Marchi | cf819ef | 2018-12-12 10:10:43 -0800 | [diff] [blame] | 7695 | else if (IS_GEN(dev_priv, 6)) |
Imre Deak | bb400da | 2016-03-16 13:38:54 +0200 | [diff] [blame] | 7696 | dev_priv->display.init_clock_gating = gen6_init_clock_gating; |
Lucas De Marchi | cf819ef | 2018-12-12 10:10:43 -0800 | [diff] [blame] | 7697 | else if (IS_GEN(dev_priv, 5)) |
Rodrigo Vivi | 91200c0 | 2017-08-28 22:20:26 -0700 | [diff] [blame] | 7698 | dev_priv->display.init_clock_gating = ilk_init_clock_gating; |
Imre Deak | bb400da | 2016-03-16 13:38:54 +0200 | [diff] [blame] | 7699 | else if (IS_G4X(dev_priv)) |
| 7700 | dev_priv->display.init_clock_gating = g4x_init_clock_gating; |
Jani Nikula | c0f8683 | 2016-12-07 12:13:04 +0200 | [diff] [blame] | 7701 | else if (IS_I965GM(dev_priv)) |
Rodrigo Vivi | 91200c0 | 2017-08-28 22:20:26 -0700 | [diff] [blame] | 7702 | dev_priv->display.init_clock_gating = i965gm_init_clock_gating; |
Jani Nikula | c0f8683 | 2016-12-07 12:13:04 +0200 | [diff] [blame] | 7703 | else if (IS_I965G(dev_priv)) |
Rodrigo Vivi | 91200c0 | 2017-08-28 22:20:26 -0700 | [diff] [blame] | 7704 | dev_priv->display.init_clock_gating = i965g_init_clock_gating; |
Lucas De Marchi | cf819ef | 2018-12-12 10:10:43 -0800 | [diff] [blame] | 7705 | else if (IS_GEN(dev_priv, 3)) |
Imre Deak | bb400da | 2016-03-16 13:38:54 +0200 | [diff] [blame] | 7706 | dev_priv->display.init_clock_gating = gen3_init_clock_gating; |
| 7707 | else if (IS_I85X(dev_priv) || IS_I865G(dev_priv)) |
| 7708 | dev_priv->display.init_clock_gating = i85x_init_clock_gating; |
Lucas De Marchi | cf819ef | 2018-12-12 10:10:43 -0800 | [diff] [blame] | 7709 | else if (IS_GEN(dev_priv, 2)) |
Imre Deak | bb400da | 2016-03-16 13:38:54 +0200 | [diff] [blame] | 7710 | dev_priv->display.init_clock_gating = i830_init_clock_gating; |
| 7711 | else { |
| 7712 | MISSING_CASE(INTEL_DEVID(dev_priv)); |
| 7713 | dev_priv->display.init_clock_gating = nop_init_clock_gating; |
| 7714 | } |
| 7715 | } |
| 7716 | |
Eugeni Dodonov | 1fa6110 | 2012-04-18 15:29:26 -0300 | [diff] [blame] | 7717 | /* Set up chip specific power management-related functions */ |
Ville Syrjälä | 62d75df | 2016-10-31 22:37:25 +0200 | [diff] [blame] | 7718 | void intel_init_pm(struct drm_i915_private *dev_priv) |
Eugeni Dodonov | 1fa6110 | 2012-04-18 15:29:26 -0300 | [diff] [blame] | 7719 | { |
Daniel Vetter | c921aba | 2012-04-26 23:28:17 +0200 | [diff] [blame] | 7720 | /* For cxsr */ |
Ville Syrjälä | 9b1e14f | 2016-10-31 22:37:15 +0200 | [diff] [blame] | 7721 | if (IS_PINEVIEW(dev_priv)) |
Lucas De Marchi | 1d21822 | 2019-12-24 00:40:04 -0800 | [diff] [blame] | 7722 | pnv_get_mem_freq(dev_priv); |
Lucas De Marchi | cf819ef | 2018-12-12 10:10:43 -0800 | [diff] [blame] | 7723 | else if (IS_GEN(dev_priv, 5)) |
Lucas De Marchi | 9eae5e2 | 2019-12-24 00:40:09 -0800 | [diff] [blame] | 7724 | ilk_get_mem_freq(dev_priv); |
Daniel Vetter | c921aba | 2012-04-26 23:28:17 +0200 | [diff] [blame] | 7725 | |
James Ausmus | b068a86 | 2019-10-09 10:23:14 -0700 | [diff] [blame] | 7726 | if (intel_has_sagv(dev_priv)) |
| 7727 | skl_setup_sagv_block_time(dev_priv); |
| 7728 | |
Eugeni Dodonov | 1fa6110 | 2012-04-18 15:29:26 -0300 | [diff] [blame] | 7729 | /* For FIFO watermark updates */ |
Ville Syrjälä | 62d75df | 2016-10-31 22:37:25 +0200 | [diff] [blame] | 7730 | if (INTEL_GEN(dev_priv) >= 9) { |
Ville Syrjälä | bb72651 | 2016-10-31 22:37:24 +0200 | [diff] [blame] | 7731 | skl_setup_wm_latency(dev_priv); |
Matt Roper | 98d3949 | 2016-05-12 07:06:03 -0700 | [diff] [blame] | 7732 | dev_priv->display.compute_global_watermarks = skl_compute_wm; |
Tvrtko Ursulin | 6e26695 | 2016-10-13 11:02:53 +0100 | [diff] [blame] | 7733 | } else if (HAS_PCH_SPLIT(dev_priv)) { |
Ville Syrjälä | bb72651 | 2016-10-31 22:37:24 +0200 | [diff] [blame] | 7734 | ilk_setup_wm_latency(dev_priv); |
Ville Syrjälä | 53615a5 | 2013-08-01 16:18:50 +0300 | [diff] [blame] | 7735 | |
Lucas De Marchi | cf819ef | 2018-12-12 10:10:43 -0800 | [diff] [blame] | 7736 | if ((IS_GEN(dev_priv, 5) && dev_priv->wm.pri_latency[1] && |
Ville Syrjälä | bd60254 | 2014-01-07 16:14:10 +0200 | [diff] [blame] | 7737 | dev_priv->wm.spr_latency[1] && dev_priv->wm.cur_latency[1]) || |
Lucas De Marchi | cf819ef | 2018-12-12 10:10:43 -0800 | [diff] [blame] | 7738 | (!IS_GEN(dev_priv, 5) && dev_priv->wm.pri_latency[0] && |
Ville Syrjälä | bd60254 | 2014-01-07 16:14:10 +0200 | [diff] [blame] | 7739 | dev_priv->wm.spr_latency[0] && dev_priv->wm.cur_latency[0])) { |
Matt Roper | 86c8bbb | 2015-09-24 15:53:16 -0700 | [diff] [blame] | 7740 | dev_priv->display.compute_pipe_wm = ilk_compute_pipe_wm; |
Matt Roper | ed4a6a7 | 2016-02-23 17:20:13 -0800 | [diff] [blame] | 7741 | dev_priv->display.compute_intermediate_wm = |
| 7742 | ilk_compute_intermediate_wm; |
| 7743 | dev_priv->display.initial_watermarks = |
| 7744 | ilk_initial_watermarks; |
| 7745 | dev_priv->display.optimize_watermarks = |
| 7746 | ilk_optimize_watermarks; |
Ville Syrjälä | bd60254 | 2014-01-07 16:14:10 +0200 | [diff] [blame] | 7747 | } else { |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 7748 | drm_dbg_kms(&dev_priv->drm, |
| 7749 | "Failed to read display plane latency. " |
| 7750 | "Disable CxSR\n"); |
Ville Syrjälä | bd60254 | 2014-01-07 16:14:10 +0200 | [diff] [blame] | 7751 | } |
Ville Syrjälä | 6b6b3ee | 2016-11-28 19:37:07 +0200 | [diff] [blame] | 7752 | } else if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) { |
Ville Syrjälä | bb72651 | 2016-10-31 22:37:24 +0200 | [diff] [blame] | 7753 | vlv_setup_wm_latency(dev_priv); |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 7754 | dev_priv->display.compute_pipe_wm = vlv_compute_pipe_wm; |
Ville Syrjälä | 4841da5 | 2017-03-02 19:14:59 +0200 | [diff] [blame] | 7755 | dev_priv->display.compute_intermediate_wm = vlv_compute_intermediate_wm; |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 7756 | dev_priv->display.initial_watermarks = vlv_initial_watermarks; |
Ville Syrjälä | 4841da5 | 2017-03-02 19:14:59 +0200 | [diff] [blame] | 7757 | dev_priv->display.optimize_watermarks = vlv_optimize_watermarks; |
Ville Syrjälä | ff32c54 | 2017-03-02 19:14:57 +0200 | [diff] [blame] | 7758 | dev_priv->display.atomic_update_watermarks = vlv_atomic_update_fifo; |
Ville Syrjälä | 04548cb | 2017-04-21 21:14:29 +0300 | [diff] [blame] | 7759 | } else if (IS_G4X(dev_priv)) { |
| 7760 | g4x_setup_wm_latency(dev_priv); |
| 7761 | dev_priv->display.compute_pipe_wm = g4x_compute_pipe_wm; |
| 7762 | dev_priv->display.compute_intermediate_wm = g4x_compute_intermediate_wm; |
| 7763 | dev_priv->display.initial_watermarks = g4x_initial_watermarks; |
| 7764 | dev_priv->display.optimize_watermarks = g4x_optimize_watermarks; |
Ville Syrjälä | 9b1e14f | 2016-10-31 22:37:15 +0200 | [diff] [blame] | 7765 | } else if (IS_PINEVIEW(dev_priv)) { |
Tvrtko Ursulin | 86d35d4 | 2019-03-26 07:40:54 +0000 | [diff] [blame] | 7766 | if (!intel_get_cxsr_latency(!IS_MOBILE(dev_priv), |
Eugeni Dodonov | 1fa6110 | 2012-04-18 15:29:26 -0300 | [diff] [blame] | 7767 | dev_priv->is_ddr3, |
| 7768 | dev_priv->fsb_freq, |
| 7769 | dev_priv->mem_freq)) { |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 7770 | drm_info(&dev_priv->drm, |
| 7771 | "failed to find known CxSR latency " |
Eugeni Dodonov | 1fa6110 | 2012-04-18 15:29:26 -0300 | [diff] [blame] | 7772 | "(found ddr%s fsb freq %d, mem freq %d), " |
| 7773 | "disabling CxSR\n", |
| 7774 | (dev_priv->is_ddr3 == 1) ? "3" : "2", |
| 7775 | dev_priv->fsb_freq, dev_priv->mem_freq); |
| 7776 | /* Disable CxSR and never update its watermark again */ |
Imre Deak | 5209b1f | 2014-07-01 12:36:17 +0300 | [diff] [blame] | 7777 | intel_set_memory_cxsr(dev_priv, false); |
Eugeni Dodonov | 1fa6110 | 2012-04-18 15:29:26 -0300 | [diff] [blame] | 7778 | dev_priv->display.update_wm = NULL; |
| 7779 | } else |
Lucas De Marchi | 1d21822 | 2019-12-24 00:40:04 -0800 | [diff] [blame] | 7780 | dev_priv->display.update_wm = pnv_update_wm; |
Lucas De Marchi | cf819ef | 2018-12-12 10:10:43 -0800 | [diff] [blame] | 7781 | } else if (IS_GEN(dev_priv, 4)) { |
Eugeni Dodonov | 1fa6110 | 2012-04-18 15:29:26 -0300 | [diff] [blame] | 7782 | dev_priv->display.update_wm = i965_update_wm; |
Lucas De Marchi | cf819ef | 2018-12-12 10:10:43 -0800 | [diff] [blame] | 7783 | } else if (IS_GEN(dev_priv, 3)) { |
Eugeni Dodonov | 1fa6110 | 2012-04-18 15:29:26 -0300 | [diff] [blame] | 7784 | dev_priv->display.update_wm = i9xx_update_wm; |
| 7785 | dev_priv->display.get_fifo_size = i9xx_get_fifo_size; |
Lucas De Marchi | cf819ef | 2018-12-12 10:10:43 -0800 | [diff] [blame] | 7786 | } else if (IS_GEN(dev_priv, 2)) { |
Jani Nikula | 2497787 | 2019-09-11 12:26:08 +0300 | [diff] [blame] | 7787 | if (INTEL_NUM_PIPES(dev_priv) == 1) { |
Daniel Vetter | feb56b9 | 2013-12-14 20:38:30 -0200 | [diff] [blame] | 7788 | dev_priv->display.update_wm = i845_update_wm; |
Eugeni Dodonov | 1fa6110 | 2012-04-18 15:29:26 -0300 | [diff] [blame] | 7789 | dev_priv->display.get_fifo_size = i845_get_fifo_size; |
Daniel Vetter | feb56b9 | 2013-12-14 20:38:30 -0200 | [diff] [blame] | 7790 | } else { |
| 7791 | dev_priv->display.update_wm = i9xx_update_wm; |
Eugeni Dodonov | 1fa6110 | 2012-04-18 15:29:26 -0300 | [diff] [blame] | 7792 | dev_priv->display.get_fifo_size = i830_get_fifo_size; |
Daniel Vetter | feb56b9 | 2013-12-14 20:38:30 -0200 | [diff] [blame] | 7793 | } |
Daniel Vetter | feb56b9 | 2013-12-14 20:38:30 -0200 | [diff] [blame] | 7794 | } else { |
Wambui Karuga | f8d18d5 | 2020-01-07 18:13:30 +0300 | [diff] [blame] | 7795 | drm_err(&dev_priv->drm, |
| 7796 | "unexpected fall-through in %s\n", __func__); |
Eugeni Dodonov | 1fa6110 | 2012-04-18 15:29:26 -0300 | [diff] [blame] | 7797 | } |
| 7798 | } |
| 7799 | |
Tvrtko Ursulin | 192aa18 | 2016-12-01 14:16:45 +0000 | [diff] [blame] | 7800 | void intel_pm_setup(struct drm_i915_private *dev_priv) |
Chris Wilson | 907b28c | 2013-07-19 20:36:52 +0100 | [diff] [blame] | 7801 | { |
Sagar Arun Kamble | ad1443f | 2017-10-10 22:30:04 +0100 | [diff] [blame] | 7802 | dev_priv->runtime_pm.suspended = false; |
| 7803 | atomic_set(&dev_priv->runtime_pm.wakeref_count, 0); |
Chris Wilson | 907b28c | 2013-07-19 20:36:52 +0100 | [diff] [blame] | 7804 | } |
Ville Syrjälä | 3cf43cd | 2020-02-25 19:11:13 +0200 | [diff] [blame] | 7805 | |
| 7806 | static struct intel_global_state *intel_dbuf_duplicate_state(struct intel_global_obj *obj) |
| 7807 | { |
| 7808 | struct intel_dbuf_state *dbuf_state; |
| 7809 | |
| 7810 | dbuf_state = kmemdup(obj->state, sizeof(*dbuf_state), GFP_KERNEL); |
| 7811 | if (!dbuf_state) |
| 7812 | return NULL; |
| 7813 | |
| 7814 | return &dbuf_state->base; |
| 7815 | } |
| 7816 | |
| 7817 | static void intel_dbuf_destroy_state(struct intel_global_obj *obj, |
| 7818 | struct intel_global_state *state) |
| 7819 | { |
| 7820 | kfree(state); |
| 7821 | } |
| 7822 | |
| 7823 | static const struct intel_global_state_funcs intel_dbuf_funcs = { |
| 7824 | .atomic_duplicate_state = intel_dbuf_duplicate_state, |
| 7825 | .atomic_destroy_state = intel_dbuf_destroy_state, |
| 7826 | }; |
| 7827 | |
| 7828 | struct intel_dbuf_state * |
| 7829 | intel_atomic_get_dbuf_state(struct intel_atomic_state *state) |
| 7830 | { |
| 7831 | struct drm_i915_private *dev_priv = to_i915(state->base.dev); |
| 7832 | struct intel_global_state *dbuf_state; |
| 7833 | |
| 7834 | dbuf_state = intel_atomic_get_global_obj_state(state, &dev_priv->dbuf.obj); |
| 7835 | if (IS_ERR(dbuf_state)) |
| 7836 | return ERR_CAST(dbuf_state); |
| 7837 | |
| 7838 | return to_intel_dbuf_state(dbuf_state); |
| 7839 | } |
| 7840 | |
| 7841 | int intel_dbuf_init(struct drm_i915_private *dev_priv) |
| 7842 | { |
| 7843 | struct intel_dbuf_state *dbuf_state; |
| 7844 | |
| 7845 | dbuf_state = kzalloc(sizeof(*dbuf_state), GFP_KERNEL); |
| 7846 | if (!dbuf_state) |
| 7847 | return -ENOMEM; |
| 7848 | |
| 7849 | intel_atomic_global_obj_init(dev_priv, &dev_priv->dbuf.obj, |
| 7850 | &dbuf_state->base, &intel_dbuf_funcs); |
| 7851 | |
| 7852 | return 0; |
| 7853 | } |
Ville Syrjälä | c7c0e7e | 2020-02-25 19:11:15 +0200 | [diff] [blame] | 7854 | |
| 7855 | void intel_dbuf_pre_plane_update(struct intel_atomic_state *state) |
| 7856 | { |
| 7857 | struct drm_i915_private *dev_priv = to_i915(state->base.dev); |
| 7858 | const struct intel_dbuf_state *new_dbuf_state = |
| 7859 | intel_atomic_get_new_dbuf_state(state); |
| 7860 | const struct intel_dbuf_state *old_dbuf_state = |
| 7861 | intel_atomic_get_old_dbuf_state(state); |
| 7862 | |
| 7863 | if (!new_dbuf_state || |
| 7864 | new_dbuf_state->enabled_slices == old_dbuf_state->enabled_slices) |
| 7865 | return; |
| 7866 | |
| 7867 | WARN_ON(!new_dbuf_state->base.changed); |
| 7868 | |
| 7869 | gen9_dbuf_slices_update(dev_priv, |
| 7870 | old_dbuf_state->enabled_slices | |
| 7871 | new_dbuf_state->enabled_slices); |
| 7872 | } |
| 7873 | |
| 7874 | void intel_dbuf_post_plane_update(struct intel_atomic_state *state) |
| 7875 | { |
| 7876 | struct drm_i915_private *dev_priv = to_i915(state->base.dev); |
| 7877 | const struct intel_dbuf_state *new_dbuf_state = |
| 7878 | intel_atomic_get_new_dbuf_state(state); |
| 7879 | const struct intel_dbuf_state *old_dbuf_state = |
| 7880 | intel_atomic_get_old_dbuf_state(state); |
| 7881 | |
| 7882 | if (!new_dbuf_state || |
| 7883 | new_dbuf_state->enabled_slices == old_dbuf_state->enabled_slices) |
| 7884 | return; |
| 7885 | |
| 7886 | WARN_ON(!new_dbuf_state->base.changed); |
| 7887 | |
| 7888 | gen9_dbuf_slices_update(dev_priv, |
| 7889 | new_dbuf_state->enabled_slices); |
| 7890 | } |