blob: cbcecbaa041b11aa6b69f21ebf3a556c3eae25dc [file] [log] [blame]
Thomas Gleixnera61127c2019-05-29 16:57:49 -07001// SPDX-License-Identifier: GPL-2.0-only
Patrik Jakobsson5ea75e02013-06-30 21:39:00 +02002/*
3 * Copyright © 2006-2011 Intel Corporation
4 *
Patrik Jakobsson5ea75e02013-06-30 21:39:00 +02005 * Authors:
6 * Eric Anholt <eric@anholt.net>
7 * Patrik Jakobsson <patrik.r.jakobsson@gmail.com>
8 */
9
Sam Ravnborg0c7b1782019-05-19 21:55:26 +020010#include <linux/delay.h>
11#include <linux/highmem.h>
12
13#include <drm/drm_crtc.h>
14#include <drm/drm_fourcc.h>
15#include <drm/drm_vblank.h>
16
17#include "framebuffer.h"
Patrik Jakobsson5ea75e02013-06-30 21:39:00 +020018#include "gma_display.h"
Sam Ravnborg0c7b1782019-05-19 21:55:26 +020019#include "psb_drv.h"
Patrik Jakobsson5ea75e02013-06-30 21:39:00 +020020#include "psb_intel_drv.h"
21#include "psb_intel_reg.h"
Patrik Jakobsson5ea75e02013-06-30 21:39:00 +020022
Lee Jones98daaba2021-01-15 18:12:54 +000023/*
Patrik Jakobsson5ea75e02013-06-30 21:39:00 +020024 * Returns whether any output on the specified pipe is of the specified type
25 */
26bool gma_pipe_has_type(struct drm_crtc *crtc, int type)
27{
28 struct drm_device *dev = crtc->dev;
29 struct drm_mode_config *mode_config = &dev->mode_config;
30 struct drm_connector *l_entry;
31
32 list_for_each_entry(l_entry, &mode_config->connector_list, head) {
33 if (l_entry->encoder && l_entry->encoder->crtc == crtc) {
Patrik Jakobsson367e4402013-07-22 17:45:26 +020034 struct gma_encoder *gma_encoder =
Patrik Jakobssonc9d49592013-07-11 01:02:01 +020035 gma_attached_encoder(l_entry);
Patrik Jakobsson367e4402013-07-22 17:45:26 +020036 if (gma_encoder->type == type)
Patrik Jakobsson5ea75e02013-06-30 21:39:00 +020037 return true;
38 }
39 }
40
41 return false;
42}
43
Patrik Jakobsson2eff0b32013-07-05 16:41:49 +020044void gma_wait_for_vblank(struct drm_device *dev)
45{
46 /* Wait for 20ms, i.e. one cycle at 50hz. */
47 mdelay(20);
48}
49
50int gma_pipe_set_base(struct drm_crtc *crtc, int x, int y,
51 struct drm_framebuffer *old_fb)
52{
53 struct drm_device *dev = crtc->dev;
Thomas Zimmermannf71635e2021-09-20 16:10:47 +020054 struct drm_psb_private *dev_priv = to_drm_psb_private(dev);
Patrik Jakobsson63068652013-07-22 01:31:23 +020055 struct gma_crtc *gma_crtc = to_gma_crtc(crtc);
Ville Syrjälä51560192016-11-18 21:52:42 +020056 struct drm_framebuffer *fb = crtc->primary->fb;
Gustavo A. R. Silva80c18ba2018-06-25 07:18:44 -050057 struct gtt_range *gtt;
Patrik Jakobsson63068652013-07-22 01:31:23 +020058 int pipe = gma_crtc->pipe;
Patrik Jakobsson2eff0b32013-07-05 16:41:49 +020059 const struct psb_offset *map = &dev_priv->regmap[pipe];
60 unsigned long start, offset;
61 u32 dspcntr;
62 int ret = 0;
63
64 if (!gma_power_begin(dev, true))
65 return 0;
66
67 /* no fb bound */
Ville Syrjälä51560192016-11-18 21:52:42 +020068 if (!fb) {
Patrik Jakobsson2eff0b32013-07-05 16:41:49 +020069 dev_err(dev->dev, "No FB bound\n");
70 goto gma_pipe_cleaner;
71 }
72
Gustavo A. R. Silva80c18ba2018-06-25 07:18:44 -050073 gtt = to_gtt_range(fb->obj[0]);
74
Patrik Jakobsson2eff0b32013-07-05 16:41:49 +020075 /* We are displaying this buffer, make sure it is actually loaded
76 into the GTT */
Daniel Stonebc61c972018-03-30 15:11:34 +010077 ret = psb_gtt_pin(gtt);
Patrik Jakobsson2eff0b32013-07-05 16:41:49 +020078 if (ret < 0)
79 goto gma_pipe_set_base_exit;
Daniel Stonebc61c972018-03-30 15:11:34 +010080 start = gtt->offset;
Ville Syrjälä272725c2016-12-14 23:32:20 +020081 offset = y * fb->pitches[0] + x * fb->format->cpp[0];
Patrik Jakobsson2eff0b32013-07-05 16:41:49 +020082
Ville Syrjälä51560192016-11-18 21:52:42 +020083 REG_WRITE(map->stride, fb->pitches[0]);
Patrik Jakobsson2eff0b32013-07-05 16:41:49 +020084
85 dspcntr = REG_READ(map->cntr);
86 dspcntr &= ~DISPPLANE_PIXFORMAT_MASK;
87
Ville Syrjälä272725c2016-12-14 23:32:20 +020088 switch (fb->format->cpp[0] * 8) {
Patrik Jakobsson2eff0b32013-07-05 16:41:49 +020089 case 8:
90 dspcntr |= DISPPLANE_8BPP;
91 break;
92 case 16:
Ville Syrjäläb00c6002016-12-14 23:31:35 +020093 if (fb->format->depth == 15)
Patrik Jakobsson2eff0b32013-07-05 16:41:49 +020094 dspcntr |= DISPPLANE_15_16BPP;
95 else
96 dspcntr |= DISPPLANE_16BPP;
97 break;
98 case 24:
99 case 32:
100 dspcntr |= DISPPLANE_32BPP_NO_ALPHA;
101 break;
102 default:
103 dev_err(dev->dev, "Unknown color depth\n");
104 ret = -EINVAL;
105 goto gma_pipe_set_base_exit;
106 }
107 REG_WRITE(map->cntr, dspcntr);
108
109 dev_dbg(dev->dev,
110 "Writing base %08lX %08lX %d %d\n", start, offset, x, y);
111
112 /* FIXME: Investigate whether this really is the base for psb and why
113 the linear offset is named base for the other chips. map->surf
114 should be the base and map->linoff the offset for all chips */
115 if (IS_PSB(dev)) {
116 REG_WRITE(map->base, offset + start);
117 REG_READ(map->base);
118 } else {
119 REG_WRITE(map->base, offset);
120 REG_READ(map->base);
121 REG_WRITE(map->surf, start);
122 REG_READ(map->surf);
123 }
124
125gma_pipe_cleaner:
126 /* If there was a previous display we can now unpin it */
127 if (old_fb)
Daniel Stonebc61c972018-03-30 15:11:34 +0100128 psb_gtt_unpin(to_gtt_range(old_fb->obj[0]));
Patrik Jakobsson2eff0b32013-07-05 16:41:49 +0200129
130gma_pipe_set_base_exit:
131 gma_power_end(dev);
132 return ret;
133}
134
135/* Loads the palette/gamma unit for the CRTC with the prepared values */
136void gma_crtc_load_lut(struct drm_crtc *crtc)
137{
138 struct drm_device *dev = crtc->dev;
Thomas Zimmermannf71635e2021-09-20 16:10:47 +0200139 struct drm_psb_private *dev_priv = to_drm_psb_private(dev);
Patrik Jakobsson63068652013-07-22 01:31:23 +0200140 struct gma_crtc *gma_crtc = to_gma_crtc(crtc);
141 const struct psb_offset *map = &dev_priv->regmap[gma_crtc->pipe];
Patrik Jakobsson2eff0b32013-07-05 16:41:49 +0200142 int palreg = map->palette;
Peter Rosin9e084a52017-07-13 18:25:32 +0200143 u16 *r, *g, *b;
Patrik Jakobsson2eff0b32013-07-05 16:41:49 +0200144 int i;
145
146 /* The clocks have to be on to load the palette. */
147 if (!crtc->enabled)
148 return;
149
Peter Rosin9e084a52017-07-13 18:25:32 +0200150 r = crtc->gamma_store;
151 g = r + crtc->gamma_size;
152 b = g + crtc->gamma_size;
153
Patrik Jakobsson2eff0b32013-07-05 16:41:49 +0200154 if (gma_power_begin(dev, false)) {
155 for (i = 0; i < 256; i++) {
156 REG_WRITE(palreg + 4 * i,
Peter Rosin9e084a52017-07-13 18:25:32 +0200157 (((*r++ >> 8) + gma_crtc->lut_adj[i]) << 16) |
158 (((*g++ >> 8) + gma_crtc->lut_adj[i]) << 8) |
159 ((*b++ >> 8) + gma_crtc->lut_adj[i]));
Patrik Jakobsson2eff0b32013-07-05 16:41:49 +0200160 }
161 gma_power_end(dev);
162 } else {
163 for (i = 0; i < 256; i++) {
164 /* FIXME: Why pipe[0] and not pipe[..._crtc->pipe]? */
165 dev_priv->regs.pipe[0].palette[i] =
Peter Rosin9e084a52017-07-13 18:25:32 +0200166 (((*r++ >> 8) + gma_crtc->lut_adj[i]) << 16) |
167 (((*g++ >> 8) + gma_crtc->lut_adj[i]) << 8) |
168 ((*b++ >> 8) + gma_crtc->lut_adj[i]);
Patrik Jakobsson2eff0b32013-07-05 16:41:49 +0200169 }
170
171 }
172}
173
Maarten Lankhorst7ea77282016-06-07 12:49:30 +0200174int gma_crtc_gamma_set(struct drm_crtc *crtc, u16 *red, u16 *green, u16 *blue,
Daniel Vetter6d124ff2017-04-03 10:33:01 +0200175 u32 size,
176 struct drm_modeset_acquire_ctx *ctx)
Patrik Jakobsson2eff0b32013-07-05 16:41:49 +0200177{
Patrik Jakobsson2eff0b32013-07-05 16:41:49 +0200178 gma_crtc_load_lut(crtc);
Maarten Lankhorst7ea77282016-06-07 12:49:30 +0200179
180 return 0;
Patrik Jakobsson2eff0b32013-07-05 16:41:49 +0200181}
182
Lee Jones98daaba2021-01-15 18:12:54 +0000183/*
Patrik Jakobsson2eff0b32013-07-05 16:41:49 +0200184 * Sets the power management mode of the pipe and plane.
185 *
186 * This code should probably grow support for turning the cursor off and back
187 * on appropriately at the same time as we're turning the pipe off/on.
188 */
189void gma_crtc_dpms(struct drm_crtc *crtc, int mode)
190{
191 struct drm_device *dev = crtc->dev;
Thomas Zimmermannf71635e2021-09-20 16:10:47 +0200192 struct drm_psb_private *dev_priv = to_drm_psb_private(dev);
Patrik Jakobsson63068652013-07-22 01:31:23 +0200193 struct gma_crtc *gma_crtc = to_gma_crtc(crtc);
194 int pipe = gma_crtc->pipe;
Patrik Jakobsson2eff0b32013-07-05 16:41:49 +0200195 const struct psb_offset *map = &dev_priv->regmap[pipe];
196 u32 temp;
197
198 /* XXX: When our outputs are all unaware of DPMS modes other than off
199 * and on, we should map those modes to DRM_MODE_DPMS_OFF in the CRTC.
200 */
201
Patrik Jakobsson2eff0b32013-07-05 16:41:49 +0200202 if (IS_CDV(dev))
Patrik Jakobsson75346fe2013-08-15 00:54:44 +0200203 dev_priv->ops->disable_sr(dev);
Patrik Jakobsson2eff0b32013-07-05 16:41:49 +0200204
205 switch (mode) {
206 case DRM_MODE_DPMS_ON:
207 case DRM_MODE_DPMS_STANDBY:
208 case DRM_MODE_DPMS_SUSPEND:
Patrik Jakobsson63068652013-07-22 01:31:23 +0200209 if (gma_crtc->active)
Patrik Jakobsson2eff0b32013-07-05 16:41:49 +0200210 break;
211
Patrik Jakobsson63068652013-07-22 01:31:23 +0200212 gma_crtc->active = true;
Patrik Jakobsson2eff0b32013-07-05 16:41:49 +0200213
214 /* Enable the DPLL */
215 temp = REG_READ(map->dpll);
216 if ((temp & DPLL_VCO_ENABLE) == 0) {
217 REG_WRITE(map->dpll, temp);
218 REG_READ(map->dpll);
219 /* Wait for the clocks to stabilize. */
220 udelay(150);
221 REG_WRITE(map->dpll, temp | DPLL_VCO_ENABLE);
222 REG_READ(map->dpll);
223 /* Wait for the clocks to stabilize. */
224 udelay(150);
225 REG_WRITE(map->dpll, temp | DPLL_VCO_ENABLE);
226 REG_READ(map->dpll);
227 /* Wait for the clocks to stabilize. */
228 udelay(150);
229 }
230
231 /* Enable the plane */
232 temp = REG_READ(map->cntr);
233 if ((temp & DISPLAY_PLANE_ENABLE) == 0) {
234 REG_WRITE(map->cntr,
235 temp | DISPLAY_PLANE_ENABLE);
236 /* Flush the plane changes */
237 REG_WRITE(map->base, REG_READ(map->base));
238 }
239
240 udelay(150);
241
242 /* Enable the pipe */
243 temp = REG_READ(map->conf);
244 if ((temp & PIPEACONF_ENABLE) == 0)
245 REG_WRITE(map->conf, temp | PIPEACONF_ENABLE);
246
247 temp = REG_READ(map->status);
248 temp &= ~(0xFFFF);
249 temp |= PIPE_FIFO_UNDERRUN;
250 REG_WRITE(map->status, temp);
251 REG_READ(map->status);
252
253 gma_crtc_load_lut(crtc);
254
255 /* Give the overlay scaler a chance to enable
256 * if it's on this pipe */
257 /* psb_intel_crtc_dpms_video(crtc, true); TODO */
Paul Kocialkowskib20e9af2019-11-06 10:43:59 +0100258
259 drm_crtc_vblank_on(crtc);
Patrik Jakobsson2eff0b32013-07-05 16:41:49 +0200260 break;
261 case DRM_MODE_DPMS_OFF:
Patrik Jakobsson63068652013-07-22 01:31:23 +0200262 if (!gma_crtc->active)
Patrik Jakobsson2eff0b32013-07-05 16:41:49 +0200263 break;
264
Patrik Jakobsson63068652013-07-22 01:31:23 +0200265 gma_crtc->active = false;
Patrik Jakobsson2eff0b32013-07-05 16:41:49 +0200266
267 /* Give the overlay scaler a chance to disable
268 * if it's on this pipe */
269 /* psb_intel_crtc_dpms_video(crtc, FALSE); TODO */
270
271 /* Disable the VGA plane that we never use */
272 REG_WRITE(VGACNTRL, VGA_DISP_DISABLE);
273
274 /* Turn off vblank interrupts */
Gustavo Padovanc02726f2016-06-07 11:07:59 -0300275 drm_crtc_vblank_off(crtc);
Patrik Jakobsson2eff0b32013-07-05 16:41:49 +0200276
277 /* Wait for vblank for the disable to take effect */
278 gma_wait_for_vblank(dev);
279
280 /* Disable plane */
281 temp = REG_READ(map->cntr);
282 if ((temp & DISPLAY_PLANE_ENABLE) != 0) {
283 REG_WRITE(map->cntr,
284 temp & ~DISPLAY_PLANE_ENABLE);
285 /* Flush the plane changes */
286 REG_WRITE(map->base, REG_READ(map->base));
287 REG_READ(map->base);
288 }
289
290 /* Disable pipe */
291 temp = REG_READ(map->conf);
292 if ((temp & PIPEACONF_ENABLE) != 0) {
293 REG_WRITE(map->conf, temp & ~PIPEACONF_ENABLE);
294 REG_READ(map->conf);
295 }
296
297 /* Wait for vblank for the disable to take effect. */
298 gma_wait_for_vblank(dev);
299
300 udelay(150);
301
302 /* Disable DPLL */
303 temp = REG_READ(map->dpll);
304 if ((temp & DPLL_VCO_ENABLE) != 0) {
305 REG_WRITE(map->dpll, temp & ~DPLL_VCO_ENABLE);
306 REG_READ(map->dpll);
307 }
308
309 /* Wait for the clocks to turn off. */
310 udelay(150);
311 break;
312 }
313
Patrik Jakobsson2eff0b32013-07-05 16:41:49 +0200314 if (IS_CDV(dev))
Patrik Jakobsson28a81942013-08-14 19:14:17 +0200315 dev_priv->ops->update_wm(dev, crtc);
Patrik Jakobsson2eff0b32013-07-05 16:41:49 +0200316
317 /* Set FIFO watermarks */
318 REG_WRITE(DSPARB, 0x3F3E);
319}
320
Patrik Jakobsson38945be2013-07-10 23:43:01 +0200321int gma_crtc_cursor_set(struct drm_crtc *crtc,
322 struct drm_file *file_priv,
323 uint32_t handle,
324 uint32_t width, uint32_t height)
325{
326 struct drm_device *dev = crtc->dev;
Thomas Zimmermannf71635e2021-09-20 16:10:47 +0200327 struct drm_psb_private *dev_priv = to_drm_psb_private(dev);
Patrik Jakobsson63068652013-07-22 01:31:23 +0200328 struct gma_crtc *gma_crtc = to_gma_crtc(crtc);
329 int pipe = gma_crtc->pipe;
Patrik Jakobsson38945be2013-07-10 23:43:01 +0200330 uint32_t control = (pipe == 0) ? CURACNTR : CURBCNTR;
331 uint32_t base = (pipe == 0) ? CURABASE : CURBBASE;
332 uint32_t temp;
333 size_t addr = 0;
334 struct gtt_range *gt;
Patrik Jakobsson63068652013-07-22 01:31:23 +0200335 struct gtt_range *cursor_gt = gma_crtc->cursor_gt;
Patrik Jakobsson38945be2013-07-10 23:43:01 +0200336 struct drm_gem_object *obj;
337 void *tmp_dst, *tmp_src;
338 int ret = 0, i, cursor_pages;
339
340 /* If we didn't get a handle then turn the cursor off */
341 if (!handle) {
342 temp = CURSOR_MODE_DISABLE;
Patrik Jakobsson38945be2013-07-10 23:43:01 +0200343 if (gma_power_begin(dev, false)) {
344 REG_WRITE(control, temp);
345 REG_WRITE(base, 0);
346 gma_power_end(dev);
347 }
348
349 /* Unpin the old GEM object */
Patrik Jakobsson63068652013-07-22 01:31:23 +0200350 if (gma_crtc->cursor_obj) {
351 gt = container_of(gma_crtc->cursor_obj,
Patrik Jakobsson38945be2013-07-10 23:43:01 +0200352 struct gtt_range, gem);
353 psb_gtt_unpin(gt);
Emil Velikove19baf72020-05-15 10:50:59 +0100354 drm_gem_object_put(gma_crtc->cursor_obj);
Patrik Jakobsson63068652013-07-22 01:31:23 +0200355 gma_crtc->cursor_obj = NULL;
Patrik Jakobsson38945be2013-07-10 23:43:01 +0200356 }
Patrik Jakobsson38945be2013-07-10 23:43:01 +0200357 return 0;
358 }
359
360 /* Currently we only support 64x64 cursors */
361 if (width != 64 || height != 64) {
362 dev_dbg(dev->dev, "We currently only support 64x64 cursors\n");
363 return -EINVAL;
364 }
365
Chris Wilsona8ad0bd2016-05-09 11:04:54 +0100366 obj = drm_gem_object_lookup(file_priv, handle);
Patrik Jakobsson631794b2014-01-08 19:30:40 +0100367 if (!obj) {
368 ret = -ENOENT;
369 goto unlock;
370 }
Patrik Jakobsson38945be2013-07-10 23:43:01 +0200371
372 if (obj->size < width * height * 4) {
373 dev_dbg(dev->dev, "Buffer is too small\n");
374 ret = -ENOMEM;
375 goto unref_cursor;
376 }
377
378 gt = container_of(obj, struct gtt_range, gem);
379
380 /* Pin the memory into the GTT */
381 ret = psb_gtt_pin(gt);
382 if (ret) {
383 dev_err(dev->dev, "Can not pin down handle 0x%x\n", handle);
384 goto unref_cursor;
385 }
386
387 if (dev_priv->ops->cursor_needs_phys) {
388 if (cursor_gt == NULL) {
389 dev_err(dev->dev, "No hardware cursor mem available");
390 ret = -ENOMEM;
391 goto unref_cursor;
392 }
393
394 /* Prevent overflow */
395 if (gt->npage > 4)
396 cursor_pages = 4;
397 else
398 cursor_pages = gt->npage;
399
400 /* Copy the cursor to cursor mem */
401 tmp_dst = dev_priv->vram_addr + cursor_gt->offset;
402 for (i = 0; i < cursor_pages; i++) {
403 tmp_src = kmap(gt->pages[i]);
404 memcpy(tmp_dst, tmp_src, PAGE_SIZE);
405 kunmap(gt->pages[i]);
406 tmp_dst += PAGE_SIZE;
407 }
408
Patrik Jakobsson63068652013-07-22 01:31:23 +0200409 addr = gma_crtc->cursor_addr;
Patrik Jakobsson38945be2013-07-10 23:43:01 +0200410 } else {
411 addr = gt->offset;
Patrik Jakobsson63068652013-07-22 01:31:23 +0200412 gma_crtc->cursor_addr = addr;
Patrik Jakobsson38945be2013-07-10 23:43:01 +0200413 }
414
415 temp = 0;
416 /* set the pipe for the cursor */
417 temp |= (pipe << 28);
418 temp |= CURSOR_MODE_64_ARGB_AX | MCURSOR_GAMMA_ENABLE;
419
420 if (gma_power_begin(dev, false)) {
421 REG_WRITE(control, temp);
422 REG_WRITE(base, addr);
423 gma_power_end(dev);
424 }
425
426 /* unpin the old bo */
Patrik Jakobsson63068652013-07-22 01:31:23 +0200427 if (gma_crtc->cursor_obj) {
428 gt = container_of(gma_crtc->cursor_obj, struct gtt_range, gem);
Patrik Jakobsson38945be2013-07-10 23:43:01 +0200429 psb_gtt_unpin(gt);
Emil Velikove19baf72020-05-15 10:50:59 +0100430 drm_gem_object_put(gma_crtc->cursor_obj);
Patrik Jakobsson38945be2013-07-10 23:43:01 +0200431 }
432
Patrik Jakobsson63068652013-07-22 01:31:23 +0200433 gma_crtc->cursor_obj = obj;
Patrik Jakobsson631794b2014-01-08 19:30:40 +0100434unlock:
Patrik Jakobsson38945be2013-07-10 23:43:01 +0200435 return ret;
436
437unref_cursor:
Emil Velikove19baf72020-05-15 10:50:59 +0100438 drm_gem_object_put(obj);
Patrik Jakobsson38945be2013-07-10 23:43:01 +0200439 return ret;
440}
441
442int gma_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
443{
444 struct drm_device *dev = crtc->dev;
Patrik Jakobsson63068652013-07-22 01:31:23 +0200445 struct gma_crtc *gma_crtc = to_gma_crtc(crtc);
446 int pipe = gma_crtc->pipe;
Patrik Jakobsson38945be2013-07-10 23:43:01 +0200447 uint32_t temp = 0;
448 uint32_t addr;
449
450 if (x < 0) {
451 temp |= (CURSOR_POS_SIGN << CURSOR_X_SHIFT);
452 x = -x;
453 }
454 if (y < 0) {
455 temp |= (CURSOR_POS_SIGN << CURSOR_Y_SHIFT);
456 y = -y;
457 }
458
459 temp |= ((x & CURSOR_POS_MASK) << CURSOR_X_SHIFT);
460 temp |= ((y & CURSOR_POS_MASK) << CURSOR_Y_SHIFT);
461
Patrik Jakobsson63068652013-07-22 01:31:23 +0200462 addr = gma_crtc->cursor_addr;
Patrik Jakobsson38945be2013-07-10 23:43:01 +0200463
464 if (gma_power_begin(dev, false)) {
465 REG_WRITE((pipe == 0) ? CURAPOS : CURBPOS, temp);
466 REG_WRITE((pipe == 0) ? CURABASE : CURBBASE, addr);
467 gma_power_end(dev);
468 }
469 return 0;
470}
471
Patrik Jakobsson2eff0b32013-07-05 16:41:49 +0200472void gma_crtc_prepare(struct drm_crtc *crtc)
473{
Jani Nikula45fe7342015-03-11 11:51:01 +0200474 const struct drm_crtc_helper_funcs *crtc_funcs = crtc->helper_private;
Patrik Jakobsson2eff0b32013-07-05 16:41:49 +0200475 crtc_funcs->dpms(crtc, DRM_MODE_DPMS_OFF);
476}
477
478void gma_crtc_commit(struct drm_crtc *crtc)
479{
Jani Nikula45fe7342015-03-11 11:51:01 +0200480 const struct drm_crtc_helper_funcs *crtc_funcs = crtc->helper_private;
Patrik Jakobsson2eff0b32013-07-05 16:41:49 +0200481 crtc_funcs->dpms(crtc, DRM_MODE_DPMS_ON);
482}
483
484void gma_crtc_disable(struct drm_crtc *crtc)
485{
486 struct gtt_range *gt;
Jani Nikula45fe7342015-03-11 11:51:01 +0200487 const struct drm_crtc_helper_funcs *crtc_funcs = crtc->helper_private;
Patrik Jakobsson2eff0b32013-07-05 16:41:49 +0200488
489 crtc_funcs->dpms(crtc, DRM_MODE_DPMS_OFF);
490
Matt Roperf4510a22014-04-01 15:22:40 -0700491 if (crtc->primary->fb) {
Daniel Stonebc61c972018-03-30 15:11:34 +0100492 gt = to_gtt_range(crtc->primary->fb->obj[0]);
Patrik Jakobsson2eff0b32013-07-05 16:41:49 +0200493 psb_gtt_unpin(gt);
494 }
495}
496
497void gma_crtc_destroy(struct drm_crtc *crtc)
498{
Patrik Jakobsson63068652013-07-22 01:31:23 +0200499 struct gma_crtc *gma_crtc = to_gma_crtc(crtc);
Patrik Jakobsson2eff0b32013-07-05 16:41:49 +0200500
Patrik Jakobsson63068652013-07-22 01:31:23 +0200501 kfree(gma_crtc->crtc_state);
Patrik Jakobsson2eff0b32013-07-05 16:41:49 +0200502 drm_crtc_cleanup(crtc);
Patrik Jakobsson63068652013-07-22 01:31:23 +0200503 kfree(gma_crtc);
Patrik Jakobsson2eff0b32013-07-05 16:41:49 +0200504}
505
Paul Kocialkowskif76c22ce2019-11-06 10:44:00 +0100506int gma_crtc_page_flip(struct drm_crtc *crtc,
507 struct drm_framebuffer *fb,
508 struct drm_pending_vblank_event *event,
509 uint32_t page_flip_flags,
510 struct drm_modeset_acquire_ctx *ctx)
511{
512 struct gma_crtc *gma_crtc = to_gma_crtc(crtc);
513 struct drm_framebuffer *current_fb = crtc->primary->fb;
514 struct drm_framebuffer *old_fb = crtc->primary->old_fb;
515 const struct drm_crtc_helper_funcs *crtc_funcs = crtc->helper_private;
516 struct drm_device *dev = crtc->dev;
517 unsigned long flags;
518 int ret;
519
520 if (!crtc_funcs->mode_set_base)
521 return -EINVAL;
522
523 /* Using mode_set_base requires the new fb to be set already. */
524 crtc->primary->fb = fb;
525
526 if (event) {
527 spin_lock_irqsave(&dev->event_lock, flags);
528
529 WARN_ON(drm_crtc_vblank_get(crtc) != 0);
530
531 gma_crtc->page_flip_event = event;
532
533 /* Call this locked if we want an event at vblank interrupt. */
534 ret = crtc_funcs->mode_set_base(crtc, crtc->x, crtc->y, old_fb);
535 if (ret) {
536 gma_crtc->page_flip_event = NULL;
537 drm_crtc_vblank_put(crtc);
538 }
539
540 spin_unlock_irqrestore(&dev->event_lock, flags);
541 } else {
542 ret = crtc_funcs->mode_set_base(crtc, crtc->x, crtc->y, old_fb);
543 }
544
545 /* Restore previous fb in case of failure. */
546 if (ret)
547 crtc->primary->fb = current_fb;
548
549 return ret;
550}
551
Daniel Vettera4eff9a2017-03-22 22:50:57 +0100552int gma_crtc_set_config(struct drm_mode_set *set,
553 struct drm_modeset_acquire_ctx *ctx)
Patrik Jakobsson924cb5f2013-07-12 15:38:52 +0200554{
555 struct drm_device *dev = set->crtc->dev;
Thomas Zimmermannf71635e2021-09-20 16:10:47 +0200556 struct drm_psb_private *dev_priv = to_drm_psb_private(dev);
Patrik Jakobsson924cb5f2013-07-12 15:38:52 +0200557 int ret;
558
559 if (!dev_priv->rpm_enabled)
Daniel Vettera4eff9a2017-03-22 22:50:57 +0100560 return drm_crtc_helper_set_config(set, ctx);
Patrik Jakobsson924cb5f2013-07-12 15:38:52 +0200561
Thomas Zimmermanna2c68492020-12-01 11:35:29 +0100562 pm_runtime_forbid(dev->dev);
Daniel Vettera4eff9a2017-03-22 22:50:57 +0100563 ret = drm_crtc_helper_set_config(set, ctx);
Thomas Zimmermanna2c68492020-12-01 11:35:29 +0100564 pm_runtime_allow(dev->dev);
Patrik Jakobsson924cb5f2013-07-12 15:38:52 +0200565
566 return ret;
567}
568
Lee Jones98daaba2021-01-15 18:12:54 +0000569/*
Patrik Jakobsson2e775702013-07-12 15:30:56 +0200570 * Save HW states of given crtc
571 */
572void gma_crtc_save(struct drm_crtc *crtc)
573{
574 struct drm_device *dev = crtc->dev;
Thomas Zimmermannf71635e2021-09-20 16:10:47 +0200575 struct drm_psb_private *dev_priv = to_drm_psb_private(dev);
Patrik Jakobsson63068652013-07-22 01:31:23 +0200576 struct gma_crtc *gma_crtc = to_gma_crtc(crtc);
577 struct psb_intel_crtc_state *crtc_state = gma_crtc->crtc_state;
578 const struct psb_offset *map = &dev_priv->regmap[gma_crtc->pipe];
Patrik Jakobsson2e775702013-07-12 15:30:56 +0200579 uint32_t palette_reg;
580 int i;
581
582 if (!crtc_state) {
583 dev_err(dev->dev, "No CRTC state found\n");
584 return;
585 }
586
587 crtc_state->saveDSPCNTR = REG_READ(map->cntr);
588 crtc_state->savePIPECONF = REG_READ(map->conf);
589 crtc_state->savePIPESRC = REG_READ(map->src);
590 crtc_state->saveFP0 = REG_READ(map->fp0);
591 crtc_state->saveFP1 = REG_READ(map->fp1);
592 crtc_state->saveDPLL = REG_READ(map->dpll);
593 crtc_state->saveHTOTAL = REG_READ(map->htotal);
594 crtc_state->saveHBLANK = REG_READ(map->hblank);
595 crtc_state->saveHSYNC = REG_READ(map->hsync);
596 crtc_state->saveVTOTAL = REG_READ(map->vtotal);
597 crtc_state->saveVBLANK = REG_READ(map->vblank);
598 crtc_state->saveVSYNC = REG_READ(map->vsync);
599 crtc_state->saveDSPSTRIDE = REG_READ(map->stride);
600
601 /* NOTE: DSPSIZE DSPPOS only for psb */
602 crtc_state->saveDSPSIZE = REG_READ(map->size);
603 crtc_state->saveDSPPOS = REG_READ(map->pos);
604
605 crtc_state->saveDSPBASE = REG_READ(map->base);
606
607 palette_reg = map->palette;
608 for (i = 0; i < 256; ++i)
609 crtc_state->savePalette[i] = REG_READ(palette_reg + (i << 2));
610}
611
Lee Jones98daaba2021-01-15 18:12:54 +0000612/*
Patrik Jakobsson2e775702013-07-12 15:30:56 +0200613 * Restore HW states of given crtc
614 */
615void gma_crtc_restore(struct drm_crtc *crtc)
616{
617 struct drm_device *dev = crtc->dev;
Thomas Zimmermannf71635e2021-09-20 16:10:47 +0200618 struct drm_psb_private *dev_priv = to_drm_psb_private(dev);
Patrik Jakobsson63068652013-07-22 01:31:23 +0200619 struct gma_crtc *gma_crtc = to_gma_crtc(crtc);
620 struct psb_intel_crtc_state *crtc_state = gma_crtc->crtc_state;
621 const struct psb_offset *map = &dev_priv->regmap[gma_crtc->pipe];
Patrik Jakobsson2e775702013-07-12 15:30:56 +0200622 uint32_t palette_reg;
623 int i;
624
625 if (!crtc_state) {
626 dev_err(dev->dev, "No crtc state\n");
627 return;
628 }
629
630 if (crtc_state->saveDPLL & DPLL_VCO_ENABLE) {
631 REG_WRITE(map->dpll,
632 crtc_state->saveDPLL & ~DPLL_VCO_ENABLE);
633 REG_READ(map->dpll);
634 udelay(150);
635 }
636
637 REG_WRITE(map->fp0, crtc_state->saveFP0);
638 REG_READ(map->fp0);
639
640 REG_WRITE(map->fp1, crtc_state->saveFP1);
641 REG_READ(map->fp1);
642
643 REG_WRITE(map->dpll, crtc_state->saveDPLL);
644 REG_READ(map->dpll);
645 udelay(150);
646
647 REG_WRITE(map->htotal, crtc_state->saveHTOTAL);
648 REG_WRITE(map->hblank, crtc_state->saveHBLANK);
649 REG_WRITE(map->hsync, crtc_state->saveHSYNC);
650 REG_WRITE(map->vtotal, crtc_state->saveVTOTAL);
651 REG_WRITE(map->vblank, crtc_state->saveVBLANK);
652 REG_WRITE(map->vsync, crtc_state->saveVSYNC);
653 REG_WRITE(map->stride, crtc_state->saveDSPSTRIDE);
654
655 REG_WRITE(map->size, crtc_state->saveDSPSIZE);
656 REG_WRITE(map->pos, crtc_state->saveDSPPOS);
657
658 REG_WRITE(map->src, crtc_state->savePIPESRC);
659 REG_WRITE(map->base, crtc_state->saveDSPBASE);
660 REG_WRITE(map->conf, crtc_state->savePIPECONF);
661
662 gma_wait_for_vblank(dev);
663
664 REG_WRITE(map->cntr, crtc_state->saveDSPCNTR);
665 REG_WRITE(map->base, crtc_state->saveDSPBASE);
666
667 gma_wait_for_vblank(dev);
668
669 palette_reg = map->palette;
670 for (i = 0; i < 256; ++i)
671 REG_WRITE(palette_reg + (i << 2), crtc_state->savePalette[i]);
672}
673
Patrik Jakobsson59345842013-07-11 00:54:45 +0200674void gma_encoder_prepare(struct drm_encoder *encoder)
675{
Jani Nikula45fe7342015-03-11 11:51:01 +0200676 const struct drm_encoder_helper_funcs *encoder_funcs =
Patrik Jakobsson59345842013-07-11 00:54:45 +0200677 encoder->helper_private;
678 /* lvds has its own version of prepare see psb_intel_lvds_prepare */
679 encoder_funcs->dpms(encoder, DRM_MODE_DPMS_OFF);
680}
681
682void gma_encoder_commit(struct drm_encoder *encoder)
683{
Jani Nikula45fe7342015-03-11 11:51:01 +0200684 const struct drm_encoder_helper_funcs *encoder_funcs =
Patrik Jakobsson59345842013-07-11 00:54:45 +0200685 encoder->helper_private;
686 /* lvds has its own version of commit see psb_intel_lvds_commit */
687 encoder_funcs->dpms(encoder, DRM_MODE_DPMS_ON);
688}
689
690void gma_encoder_destroy(struct drm_encoder *encoder)
691{
Patrik Jakobsson367e4402013-07-22 17:45:26 +0200692 struct gma_encoder *intel_encoder = to_gma_encoder(encoder);
Patrik Jakobsson59345842013-07-11 00:54:45 +0200693
694 drm_encoder_cleanup(encoder);
695 kfree(intel_encoder);
696}
697
698/* Currently there is only a 1:1 mapping of encoders and connectors */
699struct drm_encoder *gma_best_encoder(struct drm_connector *connector)
700{
Patrik Jakobsson367e4402013-07-22 17:45:26 +0200701 struct gma_encoder *gma_encoder = gma_attached_encoder(connector);
Patrik Jakobsson59345842013-07-11 00:54:45 +0200702
Patrik Jakobsson367e4402013-07-22 17:45:26 +0200703 return &gma_encoder->base;
Patrik Jakobsson59345842013-07-11 00:54:45 +0200704}
705
Patrik Jakobssona3d5d752013-07-22 17:05:25 +0200706void gma_connector_attach_encoder(struct gma_connector *connector,
Patrik Jakobsson367e4402013-07-22 17:45:26 +0200707 struct gma_encoder *encoder)
Patrik Jakobsson59345842013-07-11 00:54:45 +0200708{
709 connector->encoder = encoder;
Daniel Vettercde4c442018-07-09 10:40:07 +0200710 drm_connector_attach_encoder(&connector->base,
Patrik Jakobsson59345842013-07-11 00:54:45 +0200711 &encoder->base);
712}
713
Patrik Jakobsson5ea75e02013-06-30 21:39:00 +0200714#define GMA_PLL_INVALID(s) { /* DRM_ERROR(s); */ return false; }
715
716bool gma_pll_is_valid(struct drm_crtc *crtc,
717 const struct gma_limit_t *limit,
718 struct gma_clock_t *clock)
719{
720 if (clock->p1 < limit->p1.min || limit->p1.max < clock->p1)
721 GMA_PLL_INVALID("p1 out of range");
722 if (clock->p < limit->p.min || limit->p.max < clock->p)
723 GMA_PLL_INVALID("p out of range");
724 if (clock->m2 < limit->m2.min || limit->m2.max < clock->m2)
725 GMA_PLL_INVALID("m2 out of range");
726 if (clock->m1 < limit->m1.min || limit->m1.max < clock->m1)
727 GMA_PLL_INVALID("m1 out of range");
728 /* On CDV m1 is always 0 */
729 if (clock->m1 <= clock->m2 && clock->m1 != 0)
730 GMA_PLL_INVALID("m1 <= m2 && m1 != 0");
731 if (clock->m < limit->m.min || limit->m.max < clock->m)
732 GMA_PLL_INVALID("m out of range");
733 if (clock->n < limit->n.min || limit->n.max < clock->n)
734 GMA_PLL_INVALID("n out of range");
735 if (clock->vco < limit->vco.min || limit->vco.max < clock->vco)
736 GMA_PLL_INVALID("vco out of range");
737 /* XXX: We may need to be checking "Dot clock"
738 * depending on the multiplier, connector, etc.,
739 * rather than just a single range.
740 */
741 if (clock->dot < limit->dot.min || limit->dot.max < clock->dot)
742 GMA_PLL_INVALID("dot out of range");
743
744 return true;
745}
746
747bool gma_find_best_pll(const struct gma_limit_t *limit,
748 struct drm_crtc *crtc, int target, int refclk,
749 struct gma_clock_t *best_clock)
750{
751 struct drm_device *dev = crtc->dev;
752 const struct gma_clock_funcs *clock_funcs =
Patrik Jakobsson63068652013-07-22 01:31:23 +0200753 to_gma_crtc(crtc)->clock_funcs;
Patrik Jakobsson5ea75e02013-06-30 21:39:00 +0200754 struct gma_clock_t clock;
755 int err = target;
756
757 if (gma_pipe_has_type(crtc, INTEL_OUTPUT_LVDS) &&
758 (REG_READ(LVDS) & LVDS_PORT_EN) != 0) {
759 /*
760 * For LVDS, if the panel is on, just rely on its current
761 * settings for dual-channel. We haven't figured out how to
762 * reliably set up different single/dual channel state, if we
763 * even can.
764 */
765 if ((REG_READ(LVDS) & LVDS_CLKB_POWER_MASK) ==
766 LVDS_CLKB_POWER_UP)
767 clock.p2 = limit->p2.p2_fast;
768 else
769 clock.p2 = limit->p2.p2_slow;
770 } else {
771 if (target < limit->p2.dot_limit)
772 clock.p2 = limit->p2.p2_slow;
773 else
774 clock.p2 = limit->p2.p2_fast;
775 }
776
777 memset(best_clock, 0, sizeof(*best_clock));
778
779 /* m1 is always 0 on CDV so the outmost loop will run just once */
780 for (clock.m1 = limit->m1.min; clock.m1 <= limit->m1.max; clock.m1++) {
781 for (clock.m2 = limit->m2.min;
782 (clock.m2 < clock.m1 || clock.m1 == 0) &&
783 clock.m2 <= limit->m2.max; clock.m2++) {
784 for (clock.n = limit->n.min;
785 clock.n <= limit->n.max; clock.n++) {
786 for (clock.p1 = limit->p1.min;
787 clock.p1 <= limit->p1.max;
788 clock.p1++) {
789 int this_err;
790
791 clock_funcs->clock(refclk, &clock);
792
793 if (!clock_funcs->pll_is_valid(crtc,
794 limit, &clock))
795 continue;
796
797 this_err = abs(clock.dot - target);
798 if (this_err < err) {
799 *best_clock = clock;
800 err = this_err;
801 }
802 }
803 }
804 }
805 }
806
807 return err != target;
808}