blob: f4788d819ed68f1952fe10c6f19b62aaa71c6379 [file] [log] [blame]
Ben Skeggs26f6d882011-07-04 16:25:18 +10001/*
2 * Copyright 2011 Red Hat Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: Ben Skeggs
23 */
24
Ben Skeggs51beb422011-07-05 10:33:08 +100025#include <linux/dma-mapping.h>
Ben Skeggs83fc0832011-07-05 13:08:40 +100026
Ben Skeggs26f6d882011-07-04 16:25:18 +100027#include "drmP.h"
Ben Skeggs83fc0832011-07-05 13:08:40 +100028#include "drm_crtc_helper.h"
Ben Skeggs26f6d882011-07-04 16:25:18 +100029
30#include "nouveau_drv.h"
31#include "nouveau_connector.h"
32#include "nouveau_encoder.h"
33#include "nouveau_crtc.h"
Ben Skeggs438d99e2011-07-05 16:48:06 +100034#include "nouveau_fb.h"
Ben Skeggs3a89cd02011-07-07 10:47:10 +100035#include "nv50_display.h"
Ben Skeggs26f6d882011-07-04 16:25:18 +100036
Ben Skeggsefd272a2011-07-05 11:58:58 +100037#define MEM_SYNC 0xe0000001
38#define MEM_VRAM 0xe0010000
Ben Skeggsc0cc92a2011-07-06 11:40:45 +100039#include "nouveau_dma.h"
Ben Skeggsefd272a2011-07-05 11:58:58 +100040
Ben Skeggs26f6d882011-07-04 16:25:18 +100041struct nvd0_display {
42 struct nouveau_gpuobj *mem;
Ben Skeggs51beb422011-07-05 10:33:08 +100043 struct {
44 dma_addr_t handle;
45 u32 *ptr;
46 } evo[1];
Ben Skeggs3a89cd02011-07-07 10:47:10 +100047 struct {
48 struct dcb_entry *dis;
49 struct dcb_entry *ena;
50 int crtc;
51 int pclk;
52 u16 script;
53 } irq;
Ben Skeggs26f6d882011-07-04 16:25:18 +100054};
55
56static struct nvd0_display *
57nvd0_display(struct drm_device *dev)
58{
59 struct drm_nouveau_private *dev_priv = dev->dev_private;
60 return dev_priv->engine.display.priv;
61}
62
Ben Skeggs51beb422011-07-05 10:33:08 +100063static int
64evo_icmd(struct drm_device *dev, int id, u32 mthd, u32 data)
65{
66 int ret = 0;
67 nv_mask(dev, 0x610700 + (id * 0x10), 0x00000001, 0x00000001);
68 nv_wr32(dev, 0x610704 + (id * 0x10), data);
69 nv_mask(dev, 0x610704 + (id * 0x10), 0x80000ffc, 0x80000000 | mthd);
70 if (!nv_wait(dev, 0x610704 + (id * 0x10), 0x80000000, 0x00000000))
71 ret = -EBUSY;
72 nv_mask(dev, 0x610700 + (id * 0x10), 0x00000001, 0x00000000);
73 return ret;
74}
75
76static u32 *
77evo_wait(struct drm_device *dev, int id, int nr)
78{
79 struct nvd0_display *disp = nvd0_display(dev);
80 u32 put = nv_rd32(dev, 0x640000 + (id * 0x1000)) / 4;
81
82 if (put + nr >= (PAGE_SIZE / 4)) {
83 disp->evo[id].ptr[put] = 0x20000000;
84
85 nv_wr32(dev, 0x640000 + (id * 0x1000), 0x00000000);
86 if (!nv_wait(dev, 0x640004 + (id * 0x1000), ~0, 0x00000000)) {
87 NV_ERROR(dev, "evo %d dma stalled\n", id);
88 return NULL;
89 }
90
91 put = 0;
92 }
93
94 return disp->evo[id].ptr + put;
95}
96
97static void
98evo_kick(u32 *push, struct drm_device *dev, int id)
99{
100 struct nvd0_display *disp = nvd0_display(dev);
101 nv_wr32(dev, 0x640000 + (id * 0x1000), (push - disp->evo[id].ptr) << 2);
102}
103
104#define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m))
105#define evo_data(p,d) *((p)++) = (d)
106
Ben Skeggs83fc0832011-07-05 13:08:40 +1000107static struct drm_crtc *
108nvd0_display_crtc_get(struct drm_encoder *encoder)
109{
110 return nouveau_encoder(encoder)->crtc;
111}
112
Ben Skeggs26f6d882011-07-04 16:25:18 +1000113/******************************************************************************
Ben Skeggs438d99e2011-07-05 16:48:06 +1000114 * CRTC
115 *****************************************************************************/
116static int
117nvd0_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool on, bool update)
118{
119 struct drm_device *dev = nv_crtc->base.dev;
120 u32 *push, mode;
121
122 mode = 0x00000000;
123 if (on) {
124 /* 0x11: 6bpc dynamic 2x2
125 * 0x13: 8bpc dynamic 2x2
126 * 0x19: 6bpc static 2x2
127 * 0x1b: 8bpc static 2x2
128 * 0x21: 6bpc temporal
129 * 0x23: 8bpc temporal
130 */
131 mode = 0x00000011;
132 }
133
134 push = evo_wait(dev, 0, 4);
135 if (push) {
136 evo_mthd(push, 0x0490 + (nv_crtc->index * 0x300), 1);
137 evo_data(push, mode);
138 if (update) {
139 evo_mthd(push, 0x0080, 1);
140 evo_data(push, 0x00000000);
141 }
142 evo_kick(push, dev, 0);
143 }
144
145 return 0;
146}
147
148static int
149nvd0_crtc_set_scale(struct nouveau_crtc *nv_crtc, int type, bool update)
150{
151 struct drm_display_mode *mode = &nv_crtc->base.mode;
152 struct drm_device *dev = nv_crtc->base.dev;
Ben Skeggsf3fdc522011-07-07 16:01:57 +1000153 struct nouveau_connector *nv_connector;
154 u32 *push, outX, outY;
Ben Skeggs438d99e2011-07-05 16:48:06 +1000155
Ben Skeggsf3fdc522011-07-07 16:01:57 +1000156 outX = mode->hdisplay;
157 outY = mode->vdisplay;
158
159 nv_connector = nouveau_crtc_connector_get(nv_crtc);
160 if (nv_connector && nv_connector->native_mode) {
161 struct drm_display_mode *native = nv_connector->native_mode;
162 u32 xratio = (native->hdisplay << 19) / mode->hdisplay;
163 u32 yratio = (native->vdisplay << 19) / mode->vdisplay;
164
165 switch (type) {
166 case DRM_MODE_SCALE_ASPECT:
167 if (xratio > yratio) {
168 outX = (mode->hdisplay * yratio) >> 19;
169 outY = (mode->vdisplay * yratio) >> 19;
170 } else {
171 outX = (mode->hdisplay * xratio) >> 19;
172 outY = (mode->vdisplay * xratio) >> 19;
173 }
174 break;
175 case DRM_MODE_SCALE_FULLSCREEN:
176 outX = native->hdisplay;
177 outY = native->vdisplay;
178 break;
179 default:
180 break;
181 }
182 }
Ben Skeggs438d99e2011-07-05 16:48:06 +1000183
184 push = evo_wait(dev, 0, 16);
185 if (push) {
186 evo_mthd(push, 0x04c0 + (nv_crtc->index * 0x300), 3);
Ben Skeggsf3fdc522011-07-07 16:01:57 +1000187 evo_data(push, (outY << 16) | outX);
188 evo_data(push, (outY << 16) | outX);
189 evo_data(push, (outY << 16) | outX);
Ben Skeggs438d99e2011-07-05 16:48:06 +1000190 evo_mthd(push, 0x0494 + (nv_crtc->index * 0x300), 1);
191 evo_data(push, 0x00000000);
192 evo_mthd(push, 0x04b0 + (nv_crtc->index * 0x300), 1);
193 evo_data(push, 0x00000000);
194 evo_mthd(push, 0x04b8 + (nv_crtc->index * 0x300), 1);
195 evo_data(push, (mode->vdisplay << 16) | mode->hdisplay);
196 if (update) {
197 evo_mthd(push, 0x0080, 1);
198 evo_data(push, 0x00000000);
199 }
200 evo_kick(push, dev, 0);
201 }
202
203 return 0;
204}
205
206static int
207nvd0_crtc_set_image(struct nouveau_crtc *nv_crtc, struct drm_framebuffer *fb,
208 int x, int y, bool update)
209{
210 struct nouveau_framebuffer *nvfb = nouveau_framebuffer(fb);
211 u32 *push;
212
Ben Skeggs438d99e2011-07-05 16:48:06 +1000213 push = evo_wait(fb->dev, 0, 16);
214 if (push) {
215 evo_mthd(push, 0x0460 + (nv_crtc->index * 0x300), 1);
216 evo_data(push, nvfb->nvbo->bo.offset >> 8);
217 evo_mthd(push, 0x0468 + (nv_crtc->index * 0x300), 4);
218 evo_data(push, (fb->height << 16) | fb->width);
219 evo_data(push, nvfb->r_pitch);
220 evo_data(push, nvfb->r_format);
Ben Skeggsc0cc92a2011-07-06 11:40:45 +1000221 evo_data(push, nvfb->r_dma);
Ben Skeggsa46232e2011-07-07 15:23:48 +1000222 if (update) {
223 evo_mthd(push, 0x0080, 1);
224 evo_data(push, 0x00000000);
225 }
Ben Skeggs438d99e2011-07-05 16:48:06 +1000226 evo_kick(push, fb->dev, 0);
227 }
228
Ben Skeggsc0cc92a2011-07-06 11:40:45 +1000229 nv_crtc->fb.tile_flags = nvfb->r_dma;
Ben Skeggs438d99e2011-07-05 16:48:06 +1000230 return 0;
231}
232
233static void
234nvd0_crtc_cursor_show(struct nouveau_crtc *nv_crtc, bool show, bool update)
235{
236 struct drm_device *dev = nv_crtc->base.dev;
237 u32 *push = evo_wait(dev, 0, 16);
238 if (push) {
239 if (show) {
240 evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 2);
241 evo_data(push, 0x85000000);
242 evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
243 evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
244 evo_data(push, MEM_VRAM);
245 } else {
246 evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 1);
247 evo_data(push, 0x05000000);
248 evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
249 evo_data(push, 0x00000000);
250 }
251
252 if (update) {
253 evo_mthd(push, 0x0080, 1);
254 evo_data(push, 0x00000000);
255 }
256
257 evo_kick(push, dev, 0);
258 }
259}
260
261static void
262nvd0_crtc_dpms(struct drm_crtc *crtc, int mode)
263{
264}
265
266static void
267nvd0_crtc_prepare(struct drm_crtc *crtc)
268{
269 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
270 u32 *push;
271
272 push = evo_wait(crtc->dev, 0, 2);
273 if (push) {
274 evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
275 evo_data(push, 0x00000000);
276 evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 1);
277 evo_data(push, 0x03000000);
278 evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
279 evo_data(push, 0x00000000);
280 evo_kick(push, crtc->dev, 0);
281 }
282
283 nvd0_crtc_cursor_show(nv_crtc, false, false);
284}
285
286static void
287nvd0_crtc_commit(struct drm_crtc *crtc)
288{
289 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
290 u32 *push;
291
292 push = evo_wait(crtc->dev, 0, 32);
293 if (push) {
294 evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
295 evo_data(push, nv_crtc->fb.tile_flags);
296 evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 4);
297 evo_data(push, 0x83000000);
298 evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
299 evo_data(push, 0x00000000);
300 evo_data(push, 0x00000000);
301 evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
302 evo_data(push, MEM_VRAM);
Ben Skeggs8ea0d4a2011-07-07 14:49:24 +1000303 evo_mthd(push, 0x0430 + (nv_crtc->index * 0x300), 1);
304 evo_data(push, 0xffffff00);
Ben Skeggs438d99e2011-07-05 16:48:06 +1000305 evo_kick(push, crtc->dev, 0);
306 }
307
308 nvd0_crtc_cursor_show(nv_crtc, nv_crtc->cursor.visible, true);
309}
310
311static bool
312nvd0_crtc_mode_fixup(struct drm_crtc *crtc, struct drm_display_mode *mode,
313 struct drm_display_mode *adjusted_mode)
314{
315 return true;
316}
317
318static int
319nvd0_crtc_swap_fbs(struct drm_crtc *crtc, struct drm_framebuffer *old_fb)
320{
321 struct nouveau_framebuffer *nvfb = nouveau_framebuffer(crtc->fb);
322 int ret;
323
324 ret = nouveau_bo_pin(nvfb->nvbo, TTM_PL_FLAG_VRAM);
325 if (ret)
326 return ret;
327
328 if (old_fb) {
329 nvfb = nouveau_framebuffer(old_fb);
330 nouveau_bo_unpin(nvfb->nvbo);
331 }
332
333 return 0;
334}
335
336static int
337nvd0_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode,
338 struct drm_display_mode *mode, int x, int y,
339 struct drm_framebuffer *old_fb)
340{
341 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
342 struct nouveau_connector *nv_connector;
343 u32 htotal = mode->htotal;
344 u32 vtotal = mode->vtotal;
345 u32 hsyncw = mode->hsync_end - mode->hsync_start - 1;
346 u32 vsyncw = mode->vsync_end - mode->vsync_start - 1;
347 u32 hfrntp = mode->hsync_start - mode->hdisplay;
348 u32 vfrntp = mode->vsync_start - mode->vdisplay;
349 u32 hbackp = mode->htotal - mode->hsync_end;
350 u32 vbackp = mode->vtotal - mode->vsync_end;
351 u32 hss2be = hsyncw + hbackp;
352 u32 vss2be = vsyncw + vbackp;
353 u32 hss2de = htotal - hfrntp;
354 u32 vss2de = vtotal - vfrntp;
Ben Skeggs629c1b92011-07-08 09:43:20 +1000355 u32 syncs, *push;
Ben Skeggs438d99e2011-07-05 16:48:06 +1000356 int ret;
357
Ben Skeggs629c1b92011-07-08 09:43:20 +1000358 syncs = 0x00000001;
359 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
360 syncs |= 0x00000008;
361 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
362 syncs |= 0x00000010;
363
Ben Skeggs438d99e2011-07-05 16:48:06 +1000364 ret = nvd0_crtc_swap_fbs(crtc, old_fb);
365 if (ret)
366 return ret;
367
368 push = evo_wait(crtc->dev, 0, 64);
369 if (push) {
370 evo_mthd(push, 0x0410 + (nv_crtc->index * 0x300), 5);
Ben Skeggs629c1b92011-07-08 09:43:20 +1000371 evo_data(push, 0x00000000);
Ben Skeggs438d99e2011-07-05 16:48:06 +1000372 evo_data(push, (vtotal << 16) | htotal);
373 evo_data(push, (vsyncw << 16) | hsyncw);
374 evo_data(push, (vss2be << 16) | hss2be);
375 evo_data(push, (vss2de << 16) | hss2de);
376 evo_mthd(push, 0x042c + (nv_crtc->index * 0x300), 1);
377 evo_data(push, 0x00000000); /* ??? */
378 evo_mthd(push, 0x0450 + (nv_crtc->index * 0x300), 3);
379 evo_data(push, mode->clock * 1000);
380 evo_data(push, 0x00200000); /* ??? */
381 evo_data(push, mode->clock * 1000);
Ben Skeggs629c1b92011-07-08 09:43:20 +1000382 evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 1);
383 evo_data(push, syncs);
Ben Skeggs438d99e2011-07-05 16:48:06 +1000384 evo_kick(push, crtc->dev, 0);
385 }
386
387 nv_connector = nouveau_crtc_connector_get(nv_crtc);
388 nvd0_crtc_set_dither(nv_crtc, nv_connector->use_dithering, false);
389 nvd0_crtc_set_scale(nv_crtc, nv_connector->scaling_mode, false);
390 nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, false);
391 return 0;
392}
393
394static int
395nvd0_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
396 struct drm_framebuffer *old_fb)
397{
398 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
399 int ret;
400
401 ret = nvd0_crtc_swap_fbs(crtc, old_fb);
402 if (ret)
403 return ret;
404
405 nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, true);
406 return 0;
407}
408
409static int
410nvd0_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
411 struct drm_framebuffer *fb, int x, int y,
412 enum mode_set_atomic state)
413{
414 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
415 nvd0_crtc_set_image(nv_crtc, fb, x, y, true);
416 return 0;
417}
418
419static void
420nvd0_crtc_lut_load(struct drm_crtc *crtc)
421{
422 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
423 void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
424 int i;
425
426 for (i = 0; i < 256; i++) {
Ben Skeggs8ea0d4a2011-07-07 14:49:24 +1000427 writew(0x6000 + (nv_crtc->lut.r[i] >> 2), lut + (i * 0x20) + 0);
428 writew(0x6000 + (nv_crtc->lut.g[i] >> 2), lut + (i * 0x20) + 2);
429 writew(0x6000 + (nv_crtc->lut.b[i] >> 2), lut + (i * 0x20) + 4);
Ben Skeggs438d99e2011-07-05 16:48:06 +1000430 }
431}
432
433static int
434nvd0_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
435 uint32_t handle, uint32_t width, uint32_t height)
436{
437 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
438 struct drm_device *dev = crtc->dev;
439 struct drm_gem_object *gem;
440 struct nouveau_bo *nvbo;
441 bool visible = (handle != 0);
442 int i, ret = 0;
443
444 if (visible) {
445 if (width != 64 || height != 64)
446 return -EINVAL;
447
448 gem = drm_gem_object_lookup(dev, file_priv, handle);
449 if (unlikely(!gem))
450 return -ENOENT;
451 nvbo = nouveau_gem_object(gem);
452
453 ret = nouveau_bo_map(nvbo);
454 if (ret == 0) {
455 for (i = 0; i < 64 * 64; i++) {
456 u32 v = nouveau_bo_rd32(nvbo, i);
457 nouveau_bo_wr32(nv_crtc->cursor.nvbo, i, v);
458 }
459 nouveau_bo_unmap(nvbo);
460 }
461
462 drm_gem_object_unreference_unlocked(gem);
463 }
464
465 if (visible != nv_crtc->cursor.visible) {
466 nvd0_crtc_cursor_show(nv_crtc, visible, true);
467 nv_crtc->cursor.visible = visible;
468 }
469
470 return ret;
471}
472
473static int
474nvd0_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
475{
476 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
477 const u32 data = (y << 16) | x;
478
479 nv_wr32(crtc->dev, 0x64d084 + (nv_crtc->index * 0x1000), data);
480 nv_wr32(crtc->dev, 0x64d080 + (nv_crtc->index * 0x1000), 0x00000000);
481 return 0;
482}
483
484static void
485nvd0_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
486 uint32_t start, uint32_t size)
487{
488 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
489 u32 end = max(start + size, (u32)256);
490 u32 i;
491
492 for (i = start; i < end; i++) {
493 nv_crtc->lut.r[i] = r[i];
494 nv_crtc->lut.g[i] = g[i];
495 nv_crtc->lut.b[i] = b[i];
496 }
497
498 nvd0_crtc_lut_load(crtc);
499}
500
501static void
502nvd0_crtc_destroy(struct drm_crtc *crtc)
503{
504 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
505 nouveau_bo_unmap(nv_crtc->cursor.nvbo);
506 nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
507 nouveau_bo_unmap(nv_crtc->lut.nvbo);
508 nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
509 drm_crtc_cleanup(crtc);
510 kfree(crtc);
511}
512
513static const struct drm_crtc_helper_funcs nvd0_crtc_hfunc = {
514 .dpms = nvd0_crtc_dpms,
515 .prepare = nvd0_crtc_prepare,
516 .commit = nvd0_crtc_commit,
517 .mode_fixup = nvd0_crtc_mode_fixup,
518 .mode_set = nvd0_crtc_mode_set,
519 .mode_set_base = nvd0_crtc_mode_set_base,
520 .mode_set_base_atomic = nvd0_crtc_mode_set_base_atomic,
521 .load_lut = nvd0_crtc_lut_load,
522};
523
524static const struct drm_crtc_funcs nvd0_crtc_func = {
525 .cursor_set = nvd0_crtc_cursor_set,
526 .cursor_move = nvd0_crtc_cursor_move,
527 .gamma_set = nvd0_crtc_gamma_set,
528 .set_config = drm_crtc_helper_set_config,
529 .destroy = nvd0_crtc_destroy,
530};
531
532static int
533nvd0_crtc_create(struct drm_device *dev, int index)
534{
535 struct nouveau_crtc *nv_crtc;
536 struct drm_crtc *crtc;
537 int ret, i;
538
539 nv_crtc = kzalloc(sizeof(*nv_crtc), GFP_KERNEL);
540 if (!nv_crtc)
541 return -ENOMEM;
542
543 nv_crtc->index = index;
544 nv_crtc->set_dither = nvd0_crtc_set_dither;
545 nv_crtc->set_scale = nvd0_crtc_set_scale;
546 for (i = 0; i < 256; i++) {
547 nv_crtc->lut.r[i] = i << 8;
548 nv_crtc->lut.g[i] = i << 8;
549 nv_crtc->lut.b[i] = i << 8;
550 }
551
552 crtc = &nv_crtc->base;
553 drm_crtc_init(dev, crtc, &nvd0_crtc_func);
554 drm_crtc_helper_add(crtc, &nvd0_crtc_hfunc);
555 drm_mode_crtc_set_gamma_size(crtc, 256);
556
557 ret = nouveau_bo_new(dev, 64 * 64 * 4, 0x100, TTM_PL_FLAG_VRAM,
558 0, 0x0000, &nv_crtc->cursor.nvbo);
559 if (!ret) {
560 ret = nouveau_bo_pin(nv_crtc->cursor.nvbo, TTM_PL_FLAG_VRAM);
561 if (!ret)
562 ret = nouveau_bo_map(nv_crtc->cursor.nvbo);
563 if (ret)
564 nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
565 }
566
567 if (ret)
568 goto out;
569
Ben Skeggs8ea0d4a2011-07-07 14:49:24 +1000570 ret = nouveau_bo_new(dev, 8192, 0x100, TTM_PL_FLAG_VRAM,
Ben Skeggs438d99e2011-07-05 16:48:06 +1000571 0, 0x0000, &nv_crtc->lut.nvbo);
572 if (!ret) {
573 ret = nouveau_bo_pin(nv_crtc->lut.nvbo, TTM_PL_FLAG_VRAM);
574 if (!ret)
575 ret = nouveau_bo_map(nv_crtc->lut.nvbo);
576 if (ret)
577 nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
578 }
579
580 if (ret)
581 goto out;
582
583 nvd0_crtc_lut_load(crtc);
584
585out:
586 if (ret)
587 nvd0_crtc_destroy(crtc);
588 return ret;
589}
590
591/******************************************************************************
Ben Skeggs26f6d882011-07-04 16:25:18 +1000592 * DAC
593 *****************************************************************************/
Ben Skeggs8eaa9662011-07-06 15:25:47 +1000594static void
595nvd0_dac_dpms(struct drm_encoder *encoder, int mode)
596{
597 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
598 struct drm_device *dev = encoder->dev;
599 int or = nv_encoder->or;
600 u32 dpms_ctrl;
601
602 dpms_ctrl = 0x80000000;
603 if (mode == DRM_MODE_DPMS_STANDBY || mode == DRM_MODE_DPMS_OFF)
604 dpms_ctrl |= 0x00000001;
605 if (mode == DRM_MODE_DPMS_SUSPEND || mode == DRM_MODE_DPMS_OFF)
606 dpms_ctrl |= 0x00000004;
607
608 nv_wait(dev, 0x61a004 + (or * 0x0800), 0x80000000, 0x00000000);
609 nv_mask(dev, 0x61a004 + (or * 0x0800), 0xc000007f, dpms_ctrl);
610 nv_wait(dev, 0x61a004 + (or * 0x0800), 0x80000000, 0x00000000);
611}
612
613static bool
614nvd0_dac_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
615 struct drm_display_mode *adjusted_mode)
616{
617 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
618 struct nouveau_connector *nv_connector;
619
620 nv_connector = nouveau_encoder_connector_get(nv_encoder);
621 if (nv_connector && nv_connector->native_mode) {
622 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
623 int id = adjusted_mode->base.id;
624 *adjusted_mode = *nv_connector->native_mode;
625 adjusted_mode->base.id = id;
626 }
627 }
628
629 return true;
630}
631
632static void
633nvd0_dac_prepare(struct drm_encoder *encoder)
634{
635}
636
637static void
638nvd0_dac_commit(struct drm_encoder *encoder)
639{
640}
641
642static void
643nvd0_dac_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
644 struct drm_display_mode *adjusted_mode)
645{
646 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
647 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
648 u32 *push;
649
650 nvd0_dac_dpms(encoder, DRM_MODE_DPMS_ON);
651
652 push = evo_wait(encoder->dev, 0, 2);
653 if (push) {
654 evo_mthd(push, 0x0180 + (nv_encoder->or * 0x20), 1);
655 evo_data(push, 1 << nv_crtc->index);
656 evo_kick(push, encoder->dev, 0);
657 }
658
659 nv_encoder->crtc = encoder->crtc;
660}
661
662static void
663nvd0_dac_disconnect(struct drm_encoder *encoder)
664{
665 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
666 struct drm_device *dev = encoder->dev;
667 u32 *push;
668
669 if (nv_encoder->crtc) {
670 nvd0_crtc_prepare(nv_encoder->crtc);
671
672 push = evo_wait(dev, 0, 4);
673 if (push) {
674 evo_mthd(push, 0x0180 + (nv_encoder->or * 0x20), 1);
675 evo_data(push, 0x00000000);
676 evo_mthd(push, 0x0080, 1);
677 evo_data(push, 0x00000000);
678 evo_kick(push, dev, 0);
679 }
680
681 nv_encoder->crtc = NULL;
682 }
683}
684
Ben Skeggsb6d8e7e2011-07-07 09:51:29 +1000685static enum drm_connector_status
686nvd0_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
687{
Ben Skeggsb6819932011-07-08 11:14:50 +1000688 enum drm_connector_status status = connector_status_disconnected;
689 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
690 struct drm_device *dev = encoder->dev;
691 int or = nv_encoder->or;
692 u32 load;
693
694 nv_wr32(dev, 0x61a00c + (or * 0x800), 0x00100000);
695 udelay(9500);
696 nv_wr32(dev, 0x61a00c + (or * 0x800), 0x80000000);
697
698 load = nv_rd32(dev, 0x61a00c + (or * 0x800));
699 if ((load & 0x38000000) == 0x38000000)
700 status = connector_status_connected;
701
702 nv_wr32(dev, 0x61a00c + (or * 0x800), 0x00000000);
703 return status;
Ben Skeggsb6d8e7e2011-07-07 09:51:29 +1000704}
705
Ben Skeggs8eaa9662011-07-06 15:25:47 +1000706static void
707nvd0_dac_destroy(struct drm_encoder *encoder)
708{
709 drm_encoder_cleanup(encoder);
710 kfree(encoder);
711}
712
713static const struct drm_encoder_helper_funcs nvd0_dac_hfunc = {
714 .dpms = nvd0_dac_dpms,
715 .mode_fixup = nvd0_dac_mode_fixup,
716 .prepare = nvd0_dac_prepare,
717 .commit = nvd0_dac_commit,
718 .mode_set = nvd0_dac_mode_set,
719 .disable = nvd0_dac_disconnect,
720 .get_crtc = nvd0_display_crtc_get,
Ben Skeggsb6d8e7e2011-07-07 09:51:29 +1000721 .detect = nvd0_dac_detect
Ben Skeggs8eaa9662011-07-06 15:25:47 +1000722};
723
724static const struct drm_encoder_funcs nvd0_dac_func = {
725 .destroy = nvd0_dac_destroy,
726};
727
728static int
729nvd0_dac_create(struct drm_connector *connector, struct dcb_entry *dcbe)
730{
731 struct drm_device *dev = connector->dev;
732 struct nouveau_encoder *nv_encoder;
733 struct drm_encoder *encoder;
734
735 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
736 if (!nv_encoder)
737 return -ENOMEM;
738 nv_encoder->dcb = dcbe;
739 nv_encoder->or = ffs(dcbe->or) - 1;
740
741 encoder = to_drm_encoder(nv_encoder);
742 encoder->possible_crtcs = dcbe->heads;
743 encoder->possible_clones = 0;
744 drm_encoder_init(dev, encoder, &nvd0_dac_func, DRM_MODE_ENCODER_DAC);
745 drm_encoder_helper_add(encoder, &nvd0_dac_hfunc);
746
747 drm_mode_connector_attach_encoder(connector, encoder);
748 return 0;
749}
Ben Skeggs26f6d882011-07-04 16:25:18 +1000750
751/******************************************************************************
752 * SOR
753 *****************************************************************************/
Ben Skeggs83fc0832011-07-05 13:08:40 +1000754static void
755nvd0_sor_dpms(struct drm_encoder *encoder, int mode)
756{
757 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
758 struct drm_device *dev = encoder->dev;
759 struct drm_encoder *partner;
760 int or = nv_encoder->or;
761 u32 dpms_ctrl;
762
763 nv_encoder->last_dpms = mode;
764
765 list_for_each_entry(partner, &dev->mode_config.encoder_list, head) {
766 struct nouveau_encoder *nv_partner = nouveau_encoder(partner);
767
768 if (partner->encoder_type != DRM_MODE_ENCODER_TMDS)
769 continue;
770
771 if (nv_partner != nv_encoder &&
772 nv_partner->dcb->or == nv_encoder->or) {
773 if (nv_partner->last_dpms == DRM_MODE_DPMS_ON)
774 return;
775 break;
776 }
777 }
778
779 dpms_ctrl = (mode == DRM_MODE_DPMS_ON);
780 dpms_ctrl |= 0x80000000;
781
782 nv_wait(dev, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
783 nv_mask(dev, 0x61c004 + (or * 0x0800), 0x80000001, dpms_ctrl);
784 nv_wait(dev, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
785 nv_wait(dev, 0x61c030 + (or * 0x0800), 0x10000000, 0x00000000);
786}
787
788static bool
789nvd0_sor_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
790 struct drm_display_mode *adjusted_mode)
791{
792 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
793 struct nouveau_connector *nv_connector;
794
795 nv_connector = nouveau_encoder_connector_get(nv_encoder);
796 if (nv_connector && nv_connector->native_mode) {
797 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
798 int id = adjusted_mode->base.id;
799 *adjusted_mode = *nv_connector->native_mode;
800 adjusted_mode->base.id = id;
801 }
802 }
803
804 return true;
805}
806
807static void
808nvd0_sor_prepare(struct drm_encoder *encoder)
809{
810}
811
812static void
813nvd0_sor_commit(struct drm_encoder *encoder)
814{
815}
816
817static void
818nvd0_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
819 struct drm_display_mode *adjusted_mode)
820{
821 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
822 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
823 u32 mode_ctrl = (1 << nv_crtc->index);
824 u32 *push;
825
826 if (nv_encoder->dcb->sorconf.link & 1) {
827 if (adjusted_mode->clock < 165000)
828 mode_ctrl |= 0x00000100;
829 else
830 mode_ctrl |= 0x00000500;
831 } else {
832 mode_ctrl |= 0x00000200;
833 }
834
835 nvd0_sor_dpms(encoder, DRM_MODE_DPMS_ON);
836
837 push = evo_wait(encoder->dev, 0, 2);
838 if (push) {
839 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
840 evo_data(push, mode_ctrl);
Ben Skeggs438d99e2011-07-05 16:48:06 +1000841 evo_kick(push, encoder->dev, 0);
Ben Skeggs83fc0832011-07-05 13:08:40 +1000842 }
843
844 nv_encoder->crtc = encoder->crtc;
845}
846
847static void
848nvd0_sor_disconnect(struct drm_encoder *encoder)
849{
850 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
851 struct drm_device *dev = encoder->dev;
Ben Skeggs438d99e2011-07-05 16:48:06 +1000852 u32 *push;
Ben Skeggs83fc0832011-07-05 13:08:40 +1000853
854 if (nv_encoder->crtc) {
Ben Skeggs438d99e2011-07-05 16:48:06 +1000855 nvd0_crtc_prepare(nv_encoder->crtc);
856
857 push = evo_wait(dev, 0, 4);
Ben Skeggs83fc0832011-07-05 13:08:40 +1000858 if (push) {
859 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
860 evo_data(push, 0x00000000);
861 evo_mthd(push, 0x0080, 1);
862 evo_data(push, 0x00000000);
863 evo_kick(push, dev, 0);
864 }
865
866 nv_encoder->crtc = NULL;
867 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
868 }
869}
870
871static void
872nvd0_sor_destroy(struct drm_encoder *encoder)
873{
874 drm_encoder_cleanup(encoder);
875 kfree(encoder);
876}
877
878static const struct drm_encoder_helper_funcs nvd0_sor_hfunc = {
879 .dpms = nvd0_sor_dpms,
880 .mode_fixup = nvd0_sor_mode_fixup,
881 .prepare = nvd0_sor_prepare,
882 .commit = nvd0_sor_commit,
883 .mode_set = nvd0_sor_mode_set,
884 .disable = nvd0_sor_disconnect,
885 .get_crtc = nvd0_display_crtc_get,
886};
887
888static const struct drm_encoder_funcs nvd0_sor_func = {
889 .destroy = nvd0_sor_destroy,
890};
891
892static int
893nvd0_sor_create(struct drm_connector *connector, struct dcb_entry *dcbe)
894{
895 struct drm_device *dev = connector->dev;
896 struct nouveau_encoder *nv_encoder;
897 struct drm_encoder *encoder;
898
899 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
900 if (!nv_encoder)
901 return -ENOMEM;
902 nv_encoder->dcb = dcbe;
903 nv_encoder->or = ffs(dcbe->or) - 1;
904 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
905
906 encoder = to_drm_encoder(nv_encoder);
907 encoder->possible_crtcs = dcbe->heads;
908 encoder->possible_clones = 0;
909 drm_encoder_init(dev, encoder, &nvd0_sor_func, DRM_MODE_ENCODER_TMDS);
910 drm_encoder_helper_add(encoder, &nvd0_sor_hfunc);
911
912 drm_mode_connector_attach_encoder(connector, encoder);
913 return 0;
914}
Ben Skeggs26f6d882011-07-04 16:25:18 +1000915
916/******************************************************************************
917 * IRQ
918 *****************************************************************************/
Ben Skeggs3a89cd02011-07-07 10:47:10 +1000919static struct dcb_entry *
920lookup_dcb(struct drm_device *dev, int id, u32 mc)
921{
922 struct drm_nouveau_private *dev_priv = dev->dev_private;
923 int type, or, i;
924
925 if (id < 4) {
926 type = OUTPUT_ANALOG;
927 or = id;
928 } else {
929 type = OUTPUT_TMDS;
930 or = id - 4;
931 }
932
933 for (i = 0; i < dev_priv->vbios.dcb.entries; i++) {
934 struct dcb_entry *dcb = &dev_priv->vbios.dcb.entry[i];
935 if (dcb->type == type && (dcb->or & (1 << or)))
936 return dcb;
937 }
938
939 NV_INFO(dev, "PDISP: DCB for %d/0x%08x not found\n", id, mc);
940 return NULL;
941}
942
Ben Skeggs46005222011-07-05 11:01:13 +1000943static void
Ben Skeggs270a5742011-07-05 14:16:05 +1000944nvd0_display_unk1_handler(struct drm_device *dev)
945{
Ben Skeggs3a89cd02011-07-07 10:47:10 +1000946 struct nvd0_display *disp = nvd0_display(dev);
947 struct dcb_entry *dcb;
948 u32 unkn, crtc = 0;
949 int i;
950
Ben Skeggsa36f04c2011-07-06 14:39:23 +1000951 NV_INFO(dev, "PDISP: 1 0x%08x 0x%08x 0x%08x\n", nv_rd32(dev, 0x6101d0),
952 nv_rd32(dev, 0x6101d4), nv_rd32(dev, 0x6109d4));
Ben Skeggs270a5742011-07-05 14:16:05 +1000953
Ben Skeggs3a89cd02011-07-07 10:47:10 +1000954 unkn = nv_rd32(dev, 0x6101d4);
955 if (!unkn) {
956 unkn = nv_rd32(dev, 0x6109d4);
957 crtc = 1;
958 }
959
960 disp->irq.ena = NULL;
961 disp->irq.dis = NULL;
962 disp->irq.crtc = crtc;
963 disp->irq.pclk = nv_rd32(dev, 0x660450 + (disp->irq.crtc * 0x300));
964 disp->irq.pclk /= 1000;
965
966 for (i = 0; i < 8; i++) {
967 u32 mcc = nv_rd32(dev, 0x640180 + (i * 0x20));
968 u32 mcp = nv_rd32(dev, 0x660180 + (i * 0x20));
969
970 if (mcc & (1 << crtc))
971 disp->irq.dis = lookup_dcb(dev, i, mcc);
972
973 if (mcp & (1 << crtc)) {
974 disp->irq.ena = lookup_dcb(dev, i, mcp);
975 switch (disp->irq.ena->type) {
976 case OUTPUT_ANALOG:
977 disp->irq.script = 0x00ff;
978 break;
979 case OUTPUT_TMDS:
980 disp->irq.script = (mcp & 0x00000f00) >> 8;
981 if (disp->irq.pclk >= 165000)
982 disp->irq.script |= 0x0100;
983 break;
984 default:
985 disp->irq.script = 0xbeef;
986 break;
987 }
988 }
989 }
990
991 dcb = disp->irq.dis;
992 if (dcb)
993 nouveau_bios_run_display_table(dev, 0x0000, -1, dcb, crtc);
994
Ben Skeggs270a5742011-07-05 14:16:05 +1000995 nv_wr32(dev, 0x6101d4, 0x00000000);
996 nv_wr32(dev, 0x6109d4, 0x00000000);
997 nv_wr32(dev, 0x6101d0, 0x80000000);
998}
999
1000static void
1001nvd0_display_unk2_handler(struct drm_device *dev)
1002{
Ben Skeggs3a89cd02011-07-07 10:47:10 +10001003 struct nvd0_display *disp = nvd0_display(dev);
1004 struct dcb_entry *dcb;
1005 int crtc = disp->irq.crtc;
1006 int pclk = disp->irq.pclk;
1007 int or;
1008 u32 tmp;
1009
Ben Skeggsa36f04c2011-07-06 14:39:23 +10001010 NV_INFO(dev, "PDISP: 2 0x%08x 0x%08x 0x%08x\n", nv_rd32(dev, 0x6101d0),
1011 nv_rd32(dev, 0x6101d4), nv_rd32(dev, 0x6109d4));
Ben Skeggs270a5742011-07-05 14:16:05 +10001012
Ben Skeggs3a89cd02011-07-07 10:47:10 +10001013 dcb = disp->irq.dis;
1014 disp->irq.dis = NULL;
1015 if (dcb)
1016 nouveau_bios_run_display_table(dev, 0x0000, -2, dcb, crtc);
1017
1018 nv50_crtc_set_clock(dev, crtc, pclk);
1019
1020 dcb = disp->irq.ena;
1021 if (!dcb)
1022 goto ack;
1023 or = ffs(dcb->or) - 1;
1024
1025 nouveau_bios_run_display_table(dev, disp->irq.script, pclk, dcb, crtc);
1026
1027 nv_wr32(dev, 0x612200 + (crtc * 0x800), 0x00000000);
1028 switch (dcb->type) {
1029 case OUTPUT_ANALOG:
1030 nv_wr32(dev, 0x612280 + (or * 0x800), 0x00000000);
1031 break;
1032 case OUTPUT_TMDS:
1033 if (disp->irq.pclk >= 165000)
1034 tmp = 0x00000101;
1035 else
1036 tmp = 0x00000000;
1037
1038 nv_mask(dev, 0x612300 + (or * 0x800), 0x00000707, tmp);
1039 break;
1040 default:
1041 break;
1042 }
1043
1044ack:
Ben Skeggs270a5742011-07-05 14:16:05 +10001045 nv_wr32(dev, 0x6101d4, 0x00000000);
1046 nv_wr32(dev, 0x6109d4, 0x00000000);
1047 nv_wr32(dev, 0x6101d0, 0x80000000);
1048}
1049
1050static void
1051nvd0_display_unk4_handler(struct drm_device *dev)
1052{
Ben Skeggs3a89cd02011-07-07 10:47:10 +10001053 struct nvd0_display *disp = nvd0_display(dev);
1054 struct dcb_entry *dcb;
1055 int crtc = disp->irq.crtc;
1056 int pclk = disp->irq.pclk;
1057
Ben Skeggsa36f04c2011-07-06 14:39:23 +10001058 NV_INFO(dev, "PDISP: 4 0x%08x 0x%08x 0x%08x\n", nv_rd32(dev, 0x6101d0),
1059 nv_rd32(dev, 0x6101d4), nv_rd32(dev, 0x6109d4));
Ben Skeggs270a5742011-07-05 14:16:05 +10001060
Ben Skeggs3a89cd02011-07-07 10:47:10 +10001061 dcb = disp->irq.ena;
1062 disp->irq.ena = NULL;
1063 if (!dcb)
1064 goto ack;
1065
1066 nouveau_bios_run_display_table(dev, disp->irq.script, pclk, dcb, crtc);
1067
1068ack:
Ben Skeggs270a5742011-07-05 14:16:05 +10001069 nv_wr32(dev, 0x6101d4, 0x00000000);
1070 nv_wr32(dev, 0x6109d4, 0x00000000);
1071 nv_wr32(dev, 0x6101d0, 0x80000000);
1072}
1073
1074static void
Ben Skeggs46005222011-07-05 11:01:13 +10001075nvd0_display_intr(struct drm_device *dev)
1076{
1077 u32 intr = nv_rd32(dev, 0x610088);
1078
1079 if (intr & 0x00000002) {
1080 u32 stat = nv_rd32(dev, 0x61009c);
1081 int chid = ffs(stat) - 1;
1082 if (chid >= 0) {
1083 u32 mthd = nv_rd32(dev, 0x6101f0 + (chid * 12));
1084 u32 data = nv_rd32(dev, 0x6101f4 + (chid * 12));
1085 u32 unkn = nv_rd32(dev, 0x6101f8 + (chid * 12));
1086
1087 NV_INFO(dev, "EvoCh: chid %d mthd 0x%04x data 0x%08x "
1088 "0x%08x 0x%08x\n",
1089 chid, (mthd & 0x0000ffc), data, mthd, unkn);
1090 nv_wr32(dev, 0x61009c, (1 << chid));
1091 nv_wr32(dev, 0x6101f0 + (chid * 12), 0x90000000);
1092 }
1093
1094 intr &= ~0x00000002;
1095 }
1096
Ben Skeggs270a5742011-07-05 14:16:05 +10001097 if (intr & 0x00100000) {
1098 u32 stat = nv_rd32(dev, 0x6100ac);
1099
1100 if (stat & 0x00000007) {
1101 nv_wr32(dev, 0x6100ac, (stat & 0x00000007));
1102
1103 if (stat & 0x00000001)
1104 nvd0_display_unk1_handler(dev);
1105 if (stat & 0x00000002)
1106 nvd0_display_unk2_handler(dev);
1107 if (stat & 0x00000004)
1108 nvd0_display_unk4_handler(dev);
1109 stat &= ~0x00000007;
1110 }
1111
1112 if (stat) {
1113 NV_INFO(dev, "PDISP: unknown intr24 0x%08x\n", stat);
1114 nv_wr32(dev, 0x6100ac, stat);
1115 }
1116
1117 intr &= ~0x00100000;
1118 }
1119
Ben Skeggs46005222011-07-05 11:01:13 +10001120 if (intr & 0x01000000) {
1121 u32 stat = nv_rd32(dev, 0x6100bc);
1122 nv_wr32(dev, 0x6100bc, stat);
1123 intr &= ~0x01000000;
1124 }
1125
1126 if (intr & 0x02000000) {
1127 u32 stat = nv_rd32(dev, 0x6108bc);
1128 nv_wr32(dev, 0x6108bc, stat);
1129 intr &= ~0x02000000;
1130 }
1131
1132 if (intr)
1133 NV_INFO(dev, "PDISP: unknown intr 0x%08x\n", intr);
1134}
Ben Skeggs26f6d882011-07-04 16:25:18 +10001135
1136/******************************************************************************
1137 * Init
1138 *****************************************************************************/
1139static void
1140nvd0_display_fini(struct drm_device *dev)
1141{
1142 int i;
1143
1144 /* fini cursors */
1145 for (i = 14; i >= 13; i--) {
1146 if (!(nv_rd32(dev, 0x610490 + (i * 0x10)) & 0x00000001))
1147 continue;
1148
1149 nv_mask(dev, 0x610490 + (i * 0x10), 0x00000001, 0x00000000);
1150 nv_wait(dev, 0x610490 + (i * 0x10), 0x00010000, 0x00000000);
1151 nv_mask(dev, 0x610090, 1 << i, 0x00000000);
1152 nv_mask(dev, 0x6100a0, 1 << i, 0x00000000);
1153 }
1154
1155 /* fini master */
1156 if (nv_rd32(dev, 0x610490) & 0x00000010) {
1157 nv_mask(dev, 0x610490, 0x00000010, 0x00000000);
1158 nv_mask(dev, 0x610490, 0x00000003, 0x00000000);
1159 nv_wait(dev, 0x610490, 0x80000000, 0x00000000);
1160 nv_mask(dev, 0x610090, 0x00000001, 0x00000000);
1161 nv_mask(dev, 0x6100a0, 0x00000001, 0x00000000);
1162 }
1163}
1164
1165int
1166nvd0_display_init(struct drm_device *dev)
1167{
1168 struct nvd0_display *disp = nvd0_display(dev);
Ben Skeggsefd272a2011-07-05 11:58:58 +10001169 u32 *push;
Ben Skeggs26f6d882011-07-04 16:25:18 +10001170 int i;
1171
1172 if (nv_rd32(dev, 0x6100ac) & 0x00000100) {
1173 nv_wr32(dev, 0x6100ac, 0x00000100);
1174 nv_mask(dev, 0x6194e8, 0x00000001, 0x00000000);
1175 if (!nv_wait(dev, 0x6194e8, 0x00000002, 0x00000000)) {
1176 NV_ERROR(dev, "PDISP: 0x6194e8 0x%08x\n",
1177 nv_rd32(dev, 0x6194e8));
1178 return -EBUSY;
1179 }
1180 }
1181
Ben Skeggsa36f04c2011-07-06 14:39:23 +10001182 /* nfi what these are exactly, i do know that SOR_MODE_CTRL won't
1183 * work at all unless you do the SOR part below.
1184 */
1185 for (i = 0; i < 3; i++) {
1186 u32 dac = nv_rd32(dev, 0x61a000 + (i * 0x800));
1187 nv_wr32(dev, 0x6101c0 + (i * 0x800), dac);
1188 }
1189
1190 for (i = 0; i < 4; i++) {
1191 u32 sor = nv_rd32(dev, 0x61c000 + (i * 0x800));
1192 nv_wr32(dev, 0x6301c4 + (i * 0x800), sor);
1193 }
1194
1195 for (i = 0; i < 2; i++) {
1196 u32 crtc0 = nv_rd32(dev, 0x616104 + (i * 0x800));
1197 u32 crtc1 = nv_rd32(dev, 0x616108 + (i * 0x800));
1198 u32 crtc2 = nv_rd32(dev, 0x61610c + (i * 0x800));
1199 nv_wr32(dev, 0x6101b4 + (i * 0x800), crtc0);
1200 nv_wr32(dev, 0x6101b8 + (i * 0x800), crtc1);
1201 nv_wr32(dev, 0x6101bc + (i * 0x800), crtc2);
1202 }
1203
1204 /* point at our hash table / objects, enable interrupts */
Ben Skeggs26f6d882011-07-04 16:25:18 +10001205 nv_wr32(dev, 0x610010, (disp->mem->vinst >> 8) | 9);
Ben Skeggs270a5742011-07-05 14:16:05 +10001206 nv_mask(dev, 0x6100b0, 0x00000307, 0x00000307);
Ben Skeggs26f6d882011-07-04 16:25:18 +10001207
1208 /* init master */
Ben Skeggs51beb422011-07-05 10:33:08 +10001209 nv_wr32(dev, 0x610494, (disp->evo[0].handle >> 8) | 3);
Ben Skeggs26f6d882011-07-04 16:25:18 +10001210 nv_wr32(dev, 0x610498, 0x00010000);
Ben Skeggsefd272a2011-07-05 11:58:58 +10001211 nv_wr32(dev, 0x61049c, 0x00000001);
Ben Skeggs26f6d882011-07-04 16:25:18 +10001212 nv_mask(dev, 0x610490, 0x00000010, 0x00000010);
1213 nv_wr32(dev, 0x640000, 0x00000000);
1214 nv_wr32(dev, 0x610490, 0x01000013);
1215 if (!nv_wait(dev, 0x610490, 0x80000000, 0x00000000)) {
1216 NV_ERROR(dev, "PDISP: master 0x%08x\n",
1217 nv_rd32(dev, 0x610490));
1218 return -EBUSY;
1219 }
1220 nv_mask(dev, 0x610090, 0x00000001, 0x00000001);
1221 nv_mask(dev, 0x6100a0, 0x00000001, 0x00000001);
1222
1223 /* init cursors */
1224 for (i = 13; i <= 14; i++) {
1225 nv_wr32(dev, 0x610490 + (i * 0x10), 0x00000001);
1226 if (!nv_wait(dev, 0x610490 + (i * 0x10), 0x00010000, 0x00010000)) {
1227 NV_ERROR(dev, "PDISP: curs%d 0x%08x\n", i,
1228 nv_rd32(dev, 0x610490 + (i * 0x10)));
1229 return -EBUSY;
1230 }
1231
1232 nv_mask(dev, 0x610090, 1 << i, 1 << i);
1233 nv_mask(dev, 0x6100a0, 1 << i, 1 << i);
1234 }
1235
Ben Skeggsefd272a2011-07-05 11:58:58 +10001236 push = evo_wait(dev, 0, 32);
1237 if (!push)
1238 return -EBUSY;
1239 evo_mthd(push, 0x0088, 1);
1240 evo_data(push, MEM_SYNC);
1241 evo_mthd(push, 0x0084, 1);
1242 evo_data(push, 0x00000000);
1243 evo_mthd(push, 0x0084, 1);
1244 evo_data(push, 0x80000000);
1245 evo_mthd(push, 0x008c, 1);
1246 evo_data(push, 0x00000000);
1247 evo_kick(push, dev, 0);
1248
Ben Skeggs26f6d882011-07-04 16:25:18 +10001249 return 0;
1250}
1251
1252void
1253nvd0_display_destroy(struct drm_device *dev)
1254{
1255 struct drm_nouveau_private *dev_priv = dev->dev_private;
1256 struct nvd0_display *disp = nvd0_display(dev);
Ben Skeggs51beb422011-07-05 10:33:08 +10001257 struct pci_dev *pdev = dev->pdev;
Ben Skeggs26f6d882011-07-04 16:25:18 +10001258
1259 nvd0_display_fini(dev);
1260
Ben Skeggs51beb422011-07-05 10:33:08 +10001261 pci_free_consistent(pdev, PAGE_SIZE, disp->evo[0].ptr, disp->evo[0].handle);
Ben Skeggs26f6d882011-07-04 16:25:18 +10001262 nouveau_gpuobj_ref(NULL, &disp->mem);
Ben Skeggs46005222011-07-05 11:01:13 +10001263 nouveau_irq_unregister(dev, 26);
Ben Skeggs51beb422011-07-05 10:33:08 +10001264
1265 dev_priv->engine.display.priv = NULL;
Ben Skeggs26f6d882011-07-04 16:25:18 +10001266 kfree(disp);
1267}
1268
1269int
1270nvd0_display_create(struct drm_device *dev)
1271{
1272 struct drm_nouveau_private *dev_priv = dev->dev_private;
Ben Skeggsefd272a2011-07-05 11:58:58 +10001273 struct nouveau_instmem_engine *pinstmem = &dev_priv->engine.instmem;
Ben Skeggs83fc0832011-07-05 13:08:40 +10001274 struct dcb_table *dcb = &dev_priv->vbios.dcb;
1275 struct drm_connector *connector, *tmp;
Ben Skeggs51beb422011-07-05 10:33:08 +10001276 struct pci_dev *pdev = dev->pdev;
Ben Skeggs26f6d882011-07-04 16:25:18 +10001277 struct nvd0_display *disp;
Ben Skeggs83fc0832011-07-05 13:08:40 +10001278 struct dcb_entry *dcbe;
1279 int ret, i;
Ben Skeggs26f6d882011-07-04 16:25:18 +10001280
1281 disp = kzalloc(sizeof(*disp), GFP_KERNEL);
1282 if (!disp)
1283 return -ENOMEM;
1284 dev_priv->engine.display.priv = disp;
1285
Ben Skeggs438d99e2011-07-05 16:48:06 +10001286 /* create crtc objects to represent the hw heads */
1287 for (i = 0; i < 2; i++) {
1288 ret = nvd0_crtc_create(dev, i);
1289 if (ret)
1290 goto out;
1291 }
1292
Ben Skeggs83fc0832011-07-05 13:08:40 +10001293 /* create encoder/connector objects based on VBIOS DCB table */
1294 for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
1295 connector = nouveau_connector_create(dev, dcbe->connector);
1296 if (IS_ERR(connector))
1297 continue;
1298
1299 if (dcbe->location != DCB_LOC_ON_CHIP) {
1300 NV_WARN(dev, "skipping off-chip encoder %d/%d\n",
1301 dcbe->type, ffs(dcbe->or) - 1);
1302 continue;
1303 }
1304
1305 switch (dcbe->type) {
1306 case OUTPUT_TMDS:
1307 nvd0_sor_create(connector, dcbe);
1308 break;
Ben Skeggs8eaa9662011-07-06 15:25:47 +10001309 case OUTPUT_ANALOG:
1310 nvd0_dac_create(connector, dcbe);
1311 break;
Ben Skeggs83fc0832011-07-05 13:08:40 +10001312 default:
1313 NV_WARN(dev, "skipping unsupported encoder %d/%d\n",
1314 dcbe->type, ffs(dcbe->or) - 1);
1315 continue;
1316 }
1317 }
1318
1319 /* cull any connectors we created that don't have an encoder */
1320 list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
1321 if (connector->encoder_ids[0])
1322 continue;
1323
1324 NV_WARN(dev, "%s has no encoders, removing\n",
1325 drm_get_connector_name(connector));
1326 connector->funcs->destroy(connector);
1327 }
1328
Ben Skeggs46005222011-07-05 11:01:13 +10001329 /* setup interrupt handling */
1330 nouveau_irq_register(dev, 26, nvd0_display_intr);
1331
Ben Skeggs51beb422011-07-05 10:33:08 +10001332 /* hash table and dma objects for the memory areas we care about */
Ben Skeggsefd272a2011-07-05 11:58:58 +10001333 ret = nouveau_gpuobj_new(dev, NULL, 0x4000, 0x10000,
1334 NVOBJ_FLAG_ZERO_ALLOC, &disp->mem);
Ben Skeggs26f6d882011-07-04 16:25:18 +10001335 if (ret)
1336 goto out;
1337
Ben Skeggsefd272a2011-07-05 11:58:58 +10001338 nv_wo32(disp->mem, 0x1000, 0x00000049);
1339 nv_wo32(disp->mem, 0x1004, (disp->mem->vinst + 0x2000) >> 8);
1340 nv_wo32(disp->mem, 0x1008, (disp->mem->vinst + 0x2fff) >> 8);
1341 nv_wo32(disp->mem, 0x100c, 0x00000000);
1342 nv_wo32(disp->mem, 0x1010, 0x00000000);
1343 nv_wo32(disp->mem, 0x1014, 0x00000000);
1344 nv_wo32(disp->mem, 0x0000, MEM_SYNC);
1345 nv_wo32(disp->mem, 0x0004, (0x1000 << 9) | 0x00000001);
1346
Ben Skeggsc0cc92a2011-07-06 11:40:45 +10001347 nv_wo32(disp->mem, 0x1020, 0x00000049);
Ben Skeggsefd272a2011-07-05 11:58:58 +10001348 nv_wo32(disp->mem, 0x1024, 0x00000000);
1349 nv_wo32(disp->mem, 0x1028, (dev_priv->vram_size - 1) >> 8);
1350 nv_wo32(disp->mem, 0x102c, 0x00000000);
1351 nv_wo32(disp->mem, 0x1030, 0x00000000);
1352 nv_wo32(disp->mem, 0x1034, 0x00000000);
1353 nv_wo32(disp->mem, 0x0008, MEM_VRAM);
1354 nv_wo32(disp->mem, 0x000c, (0x1020 << 9) | 0x00000001);
1355
Ben Skeggsc0cc92a2011-07-06 11:40:45 +10001356 nv_wo32(disp->mem, 0x1040, 0x00000009);
1357 nv_wo32(disp->mem, 0x1044, 0x00000000);
1358 nv_wo32(disp->mem, 0x1048, (dev_priv->vram_size - 1) >> 8);
1359 nv_wo32(disp->mem, 0x104c, 0x00000000);
1360 nv_wo32(disp->mem, 0x1050, 0x00000000);
1361 nv_wo32(disp->mem, 0x1054, 0x00000000);
1362 nv_wo32(disp->mem, 0x0010, NvEvoVRAM_LP);
1363 nv_wo32(disp->mem, 0x0014, (0x1040 << 9) | 0x00000001);
1364
1365 nv_wo32(disp->mem, 0x1060, 0x0fe00009);
1366 nv_wo32(disp->mem, 0x1064, 0x00000000);
1367 nv_wo32(disp->mem, 0x1068, (dev_priv->vram_size - 1) >> 8);
1368 nv_wo32(disp->mem, 0x106c, 0x00000000);
1369 nv_wo32(disp->mem, 0x1070, 0x00000000);
1370 nv_wo32(disp->mem, 0x1074, 0x00000000);
1371 nv_wo32(disp->mem, 0x0018, NvEvoFB32);
1372 nv_wo32(disp->mem, 0x001c, (0x1060 << 9) | 0x00000001);
1373
Ben Skeggsefd272a2011-07-05 11:58:58 +10001374 pinstmem->flush(dev);
1375
Ben Skeggs51beb422011-07-05 10:33:08 +10001376 /* push buffers for evo channels */
1377 disp->evo[0].ptr =
1378 pci_alloc_consistent(pdev, PAGE_SIZE, &disp->evo[0].handle);
1379 if (!disp->evo[0].ptr) {
1380 ret = -ENOMEM;
1381 goto out;
1382 }
1383
Ben Skeggs26f6d882011-07-04 16:25:18 +10001384 ret = nvd0_display_init(dev);
1385 if (ret)
1386 goto out;
1387
1388out:
1389 if (ret)
1390 nvd0_display_destroy(dev);
1391 return ret;
1392}