blob: 3a2a4bb1276de153abb3fbdab4a96b4bb260a846 [file] [log] [blame]
Ben Skeggs26f6d882011-07-04 16:25:18 +10001/*
2 * Copyright 2011 Red Hat Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: Ben Skeggs
23 */
24
Ben Skeggs51beb422011-07-05 10:33:08 +100025#include <linux/dma-mapping.h>
Ben Skeggs83fc0832011-07-05 13:08:40 +100026
Ben Skeggs26f6d882011-07-04 16:25:18 +100027#include "drmP.h"
Ben Skeggs83fc0832011-07-05 13:08:40 +100028#include "drm_crtc_helper.h"
Ben Skeggs26f6d882011-07-04 16:25:18 +100029
30#include "nouveau_drv.h"
31#include "nouveau_connector.h"
32#include "nouveau_encoder.h"
33#include "nouveau_crtc.h"
34
Ben Skeggsefd272a2011-07-05 11:58:58 +100035#define MEM_SYNC 0xe0000001
36#define MEM_VRAM 0xe0010000
37
Ben Skeggs26f6d882011-07-04 16:25:18 +100038struct nvd0_display {
39 struct nouveau_gpuobj *mem;
Ben Skeggs51beb422011-07-05 10:33:08 +100040 struct {
41 dma_addr_t handle;
42 u32 *ptr;
43 } evo[1];
Ben Skeggs26f6d882011-07-04 16:25:18 +100044};
45
46static struct nvd0_display *
47nvd0_display(struct drm_device *dev)
48{
49 struct drm_nouveau_private *dev_priv = dev->dev_private;
50 return dev_priv->engine.display.priv;
51}
52
Ben Skeggs51beb422011-07-05 10:33:08 +100053static int
54evo_icmd(struct drm_device *dev, int id, u32 mthd, u32 data)
55{
56 int ret = 0;
57 nv_mask(dev, 0x610700 + (id * 0x10), 0x00000001, 0x00000001);
58 nv_wr32(dev, 0x610704 + (id * 0x10), data);
59 nv_mask(dev, 0x610704 + (id * 0x10), 0x80000ffc, 0x80000000 | mthd);
60 if (!nv_wait(dev, 0x610704 + (id * 0x10), 0x80000000, 0x00000000))
61 ret = -EBUSY;
62 nv_mask(dev, 0x610700 + (id * 0x10), 0x00000001, 0x00000000);
63 return ret;
64}
65
66static u32 *
67evo_wait(struct drm_device *dev, int id, int nr)
68{
69 struct nvd0_display *disp = nvd0_display(dev);
70 u32 put = nv_rd32(dev, 0x640000 + (id * 0x1000)) / 4;
71
72 if (put + nr >= (PAGE_SIZE / 4)) {
73 disp->evo[id].ptr[put] = 0x20000000;
74
75 nv_wr32(dev, 0x640000 + (id * 0x1000), 0x00000000);
76 if (!nv_wait(dev, 0x640004 + (id * 0x1000), ~0, 0x00000000)) {
77 NV_ERROR(dev, "evo %d dma stalled\n", id);
78 return NULL;
79 }
80
81 put = 0;
82 }
83
84 return disp->evo[id].ptr + put;
85}
86
87static void
88evo_kick(u32 *push, struct drm_device *dev, int id)
89{
90 struct nvd0_display *disp = nvd0_display(dev);
91 nv_wr32(dev, 0x640000 + (id * 0x1000), (push - disp->evo[id].ptr) << 2);
92}
93
94#define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m))
95#define evo_data(p,d) *((p)++) = (d)
96
Ben Skeggs83fc0832011-07-05 13:08:40 +100097static struct drm_crtc *
98nvd0_display_crtc_get(struct drm_encoder *encoder)
99{
100 return nouveau_encoder(encoder)->crtc;
101}
102
Ben Skeggs26f6d882011-07-04 16:25:18 +1000103/******************************************************************************
104 * DAC
105 *****************************************************************************/
106
107/******************************************************************************
108 * SOR
109 *****************************************************************************/
Ben Skeggs83fc0832011-07-05 13:08:40 +1000110static void
111nvd0_sor_dpms(struct drm_encoder *encoder, int mode)
112{
113 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
114 struct drm_device *dev = encoder->dev;
115 struct drm_encoder *partner;
116 int or = nv_encoder->or;
117 u32 dpms_ctrl;
118
119 nv_encoder->last_dpms = mode;
120
121 list_for_each_entry(partner, &dev->mode_config.encoder_list, head) {
122 struct nouveau_encoder *nv_partner = nouveau_encoder(partner);
123
124 if (partner->encoder_type != DRM_MODE_ENCODER_TMDS)
125 continue;
126
127 if (nv_partner != nv_encoder &&
128 nv_partner->dcb->or == nv_encoder->or) {
129 if (nv_partner->last_dpms == DRM_MODE_DPMS_ON)
130 return;
131 break;
132 }
133 }
134
135 dpms_ctrl = (mode == DRM_MODE_DPMS_ON);
136 dpms_ctrl |= 0x80000000;
137
138 nv_wait(dev, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
139 nv_mask(dev, 0x61c004 + (or * 0x0800), 0x80000001, dpms_ctrl);
140 nv_wait(dev, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
141 nv_wait(dev, 0x61c030 + (or * 0x0800), 0x10000000, 0x00000000);
142}
143
144static bool
145nvd0_sor_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
146 struct drm_display_mode *adjusted_mode)
147{
148 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
149 struct nouveau_connector *nv_connector;
150
151 nv_connector = nouveau_encoder_connector_get(nv_encoder);
152 if (nv_connector && nv_connector->native_mode) {
153 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
154 int id = adjusted_mode->base.id;
155 *adjusted_mode = *nv_connector->native_mode;
156 adjusted_mode->base.id = id;
157 }
158 }
159
160 return true;
161}
162
163static void
164nvd0_sor_prepare(struct drm_encoder *encoder)
165{
166}
167
168static void
169nvd0_sor_commit(struct drm_encoder *encoder)
170{
171}
172
173static void
174nvd0_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
175 struct drm_display_mode *adjusted_mode)
176{
177 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
178 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
179 u32 mode_ctrl = (1 << nv_crtc->index);
180 u32 *push;
181
182 if (nv_encoder->dcb->sorconf.link & 1) {
183 if (adjusted_mode->clock < 165000)
184 mode_ctrl |= 0x00000100;
185 else
186 mode_ctrl |= 0x00000500;
187 } else {
188 mode_ctrl |= 0x00000200;
189 }
190
191 nvd0_sor_dpms(encoder, DRM_MODE_DPMS_ON);
192
193 push = evo_wait(encoder->dev, 0, 2);
194 if (push) {
195 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
196 evo_data(push, mode_ctrl);
197 }
198
199 nv_encoder->crtc = encoder->crtc;
200}
201
202static void
203nvd0_sor_disconnect(struct drm_encoder *encoder)
204{
205 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
206 struct drm_device *dev = encoder->dev;
207
208 if (nv_encoder->crtc) {
209 u32 *push = evo_wait(dev, 0, 4);
210 if (push) {
211 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
212 evo_data(push, 0x00000000);
213 evo_mthd(push, 0x0080, 1);
214 evo_data(push, 0x00000000);
215 evo_kick(push, dev, 0);
216 }
217
218 nv_encoder->crtc = NULL;
219 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
220 }
221}
222
223static void
224nvd0_sor_destroy(struct drm_encoder *encoder)
225{
226 drm_encoder_cleanup(encoder);
227 kfree(encoder);
228}
229
230static const struct drm_encoder_helper_funcs nvd0_sor_hfunc = {
231 .dpms = nvd0_sor_dpms,
232 .mode_fixup = nvd0_sor_mode_fixup,
233 .prepare = nvd0_sor_prepare,
234 .commit = nvd0_sor_commit,
235 .mode_set = nvd0_sor_mode_set,
236 .disable = nvd0_sor_disconnect,
237 .get_crtc = nvd0_display_crtc_get,
238};
239
240static const struct drm_encoder_funcs nvd0_sor_func = {
241 .destroy = nvd0_sor_destroy,
242};
243
244static int
245nvd0_sor_create(struct drm_connector *connector, struct dcb_entry *dcbe)
246{
247 struct drm_device *dev = connector->dev;
248 struct nouveau_encoder *nv_encoder;
249 struct drm_encoder *encoder;
250
251 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
252 if (!nv_encoder)
253 return -ENOMEM;
254 nv_encoder->dcb = dcbe;
255 nv_encoder->or = ffs(dcbe->or) - 1;
256 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
257
258 encoder = to_drm_encoder(nv_encoder);
259 encoder->possible_crtcs = dcbe->heads;
260 encoder->possible_clones = 0;
261 drm_encoder_init(dev, encoder, &nvd0_sor_func, DRM_MODE_ENCODER_TMDS);
262 drm_encoder_helper_add(encoder, &nvd0_sor_hfunc);
263
264 drm_mode_connector_attach_encoder(connector, encoder);
265 return 0;
266}
Ben Skeggs26f6d882011-07-04 16:25:18 +1000267
268/******************************************************************************
269 * IRQ
270 *****************************************************************************/
Ben Skeggs46005222011-07-05 11:01:13 +1000271static void
272nvd0_display_intr(struct drm_device *dev)
273{
274 u32 intr = nv_rd32(dev, 0x610088);
275
276 if (intr & 0x00000002) {
277 u32 stat = nv_rd32(dev, 0x61009c);
278 int chid = ffs(stat) - 1;
279 if (chid >= 0) {
280 u32 mthd = nv_rd32(dev, 0x6101f0 + (chid * 12));
281 u32 data = nv_rd32(dev, 0x6101f4 + (chid * 12));
282 u32 unkn = nv_rd32(dev, 0x6101f8 + (chid * 12));
283
284 NV_INFO(dev, "EvoCh: chid %d mthd 0x%04x data 0x%08x "
285 "0x%08x 0x%08x\n",
286 chid, (mthd & 0x0000ffc), data, mthd, unkn);
287 nv_wr32(dev, 0x61009c, (1 << chid));
288 nv_wr32(dev, 0x6101f0 + (chid * 12), 0x90000000);
289 }
290
291 intr &= ~0x00000002;
292 }
293
294 if (intr & 0x01000000) {
295 u32 stat = nv_rd32(dev, 0x6100bc);
296 nv_wr32(dev, 0x6100bc, stat);
297 intr &= ~0x01000000;
298 }
299
300 if (intr & 0x02000000) {
301 u32 stat = nv_rd32(dev, 0x6108bc);
302 nv_wr32(dev, 0x6108bc, stat);
303 intr &= ~0x02000000;
304 }
305
306 if (intr)
307 NV_INFO(dev, "PDISP: unknown intr 0x%08x\n", intr);
308}
Ben Skeggs26f6d882011-07-04 16:25:18 +1000309
310/******************************************************************************
311 * Init
312 *****************************************************************************/
313static void
314nvd0_display_fini(struct drm_device *dev)
315{
316 int i;
317
318 /* fini cursors */
319 for (i = 14; i >= 13; i--) {
320 if (!(nv_rd32(dev, 0x610490 + (i * 0x10)) & 0x00000001))
321 continue;
322
323 nv_mask(dev, 0x610490 + (i * 0x10), 0x00000001, 0x00000000);
324 nv_wait(dev, 0x610490 + (i * 0x10), 0x00010000, 0x00000000);
325 nv_mask(dev, 0x610090, 1 << i, 0x00000000);
326 nv_mask(dev, 0x6100a0, 1 << i, 0x00000000);
327 }
328
329 /* fini master */
330 if (nv_rd32(dev, 0x610490) & 0x00000010) {
331 nv_mask(dev, 0x610490, 0x00000010, 0x00000000);
332 nv_mask(dev, 0x610490, 0x00000003, 0x00000000);
333 nv_wait(dev, 0x610490, 0x80000000, 0x00000000);
334 nv_mask(dev, 0x610090, 0x00000001, 0x00000000);
335 nv_mask(dev, 0x6100a0, 0x00000001, 0x00000000);
336 }
337}
338
339int
340nvd0_display_init(struct drm_device *dev)
341{
342 struct nvd0_display *disp = nvd0_display(dev);
Ben Skeggsefd272a2011-07-05 11:58:58 +1000343 u32 *push;
Ben Skeggs26f6d882011-07-04 16:25:18 +1000344 int i;
345
346 if (nv_rd32(dev, 0x6100ac) & 0x00000100) {
347 nv_wr32(dev, 0x6100ac, 0x00000100);
348 nv_mask(dev, 0x6194e8, 0x00000001, 0x00000000);
349 if (!nv_wait(dev, 0x6194e8, 0x00000002, 0x00000000)) {
350 NV_ERROR(dev, "PDISP: 0x6194e8 0x%08x\n",
351 nv_rd32(dev, 0x6194e8));
352 return -EBUSY;
353 }
354 }
355
356 nv_wr32(dev, 0x610010, (disp->mem->vinst >> 8) | 9);
357
358 /* init master */
Ben Skeggs51beb422011-07-05 10:33:08 +1000359 nv_wr32(dev, 0x610494, (disp->evo[0].handle >> 8) | 3);
Ben Skeggs26f6d882011-07-04 16:25:18 +1000360 nv_wr32(dev, 0x610498, 0x00010000);
Ben Skeggsefd272a2011-07-05 11:58:58 +1000361 nv_wr32(dev, 0x61049c, 0x00000001);
Ben Skeggs26f6d882011-07-04 16:25:18 +1000362 nv_mask(dev, 0x610490, 0x00000010, 0x00000010);
363 nv_wr32(dev, 0x640000, 0x00000000);
364 nv_wr32(dev, 0x610490, 0x01000013);
365 if (!nv_wait(dev, 0x610490, 0x80000000, 0x00000000)) {
366 NV_ERROR(dev, "PDISP: master 0x%08x\n",
367 nv_rd32(dev, 0x610490));
368 return -EBUSY;
369 }
370 nv_mask(dev, 0x610090, 0x00000001, 0x00000001);
371 nv_mask(dev, 0x6100a0, 0x00000001, 0x00000001);
372
373 /* init cursors */
374 for (i = 13; i <= 14; i++) {
375 nv_wr32(dev, 0x610490 + (i * 0x10), 0x00000001);
376 if (!nv_wait(dev, 0x610490 + (i * 0x10), 0x00010000, 0x00010000)) {
377 NV_ERROR(dev, "PDISP: curs%d 0x%08x\n", i,
378 nv_rd32(dev, 0x610490 + (i * 0x10)));
379 return -EBUSY;
380 }
381
382 nv_mask(dev, 0x610090, 1 << i, 1 << i);
383 nv_mask(dev, 0x6100a0, 1 << i, 1 << i);
384 }
385
Ben Skeggsefd272a2011-07-05 11:58:58 +1000386 push = evo_wait(dev, 0, 32);
387 if (!push)
388 return -EBUSY;
389 evo_mthd(push, 0x0088, 1);
390 evo_data(push, MEM_SYNC);
391 evo_mthd(push, 0x0084, 1);
392 evo_data(push, 0x00000000);
393 evo_mthd(push, 0x0084, 1);
394 evo_data(push, 0x80000000);
395 evo_mthd(push, 0x008c, 1);
396 evo_data(push, 0x00000000);
397 evo_kick(push, dev, 0);
398
Ben Skeggs26f6d882011-07-04 16:25:18 +1000399 return 0;
400}
401
402void
403nvd0_display_destroy(struct drm_device *dev)
404{
405 struct drm_nouveau_private *dev_priv = dev->dev_private;
406 struct nvd0_display *disp = nvd0_display(dev);
Ben Skeggs51beb422011-07-05 10:33:08 +1000407 struct pci_dev *pdev = dev->pdev;
Ben Skeggs26f6d882011-07-04 16:25:18 +1000408
409 nvd0_display_fini(dev);
410
Ben Skeggs51beb422011-07-05 10:33:08 +1000411 pci_free_consistent(pdev, PAGE_SIZE, disp->evo[0].ptr, disp->evo[0].handle);
Ben Skeggs26f6d882011-07-04 16:25:18 +1000412 nouveau_gpuobj_ref(NULL, &disp->mem);
Ben Skeggs46005222011-07-05 11:01:13 +1000413 nouveau_irq_unregister(dev, 26);
Ben Skeggs51beb422011-07-05 10:33:08 +1000414
415 dev_priv->engine.display.priv = NULL;
Ben Skeggs26f6d882011-07-04 16:25:18 +1000416 kfree(disp);
417}
418
419int
420nvd0_display_create(struct drm_device *dev)
421{
422 struct drm_nouveau_private *dev_priv = dev->dev_private;
Ben Skeggsefd272a2011-07-05 11:58:58 +1000423 struct nouveau_instmem_engine *pinstmem = &dev_priv->engine.instmem;
Ben Skeggs83fc0832011-07-05 13:08:40 +1000424 struct dcb_table *dcb = &dev_priv->vbios.dcb;
425 struct drm_connector *connector, *tmp;
Ben Skeggs51beb422011-07-05 10:33:08 +1000426 struct pci_dev *pdev = dev->pdev;
Ben Skeggs26f6d882011-07-04 16:25:18 +1000427 struct nvd0_display *disp;
Ben Skeggs83fc0832011-07-05 13:08:40 +1000428 struct dcb_entry *dcbe;
429 int ret, i;
Ben Skeggs26f6d882011-07-04 16:25:18 +1000430
431 disp = kzalloc(sizeof(*disp), GFP_KERNEL);
432 if (!disp)
433 return -ENOMEM;
434 dev_priv->engine.display.priv = disp;
435
Ben Skeggs83fc0832011-07-05 13:08:40 +1000436 /* create encoder/connector objects based on VBIOS DCB table */
437 for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
438 connector = nouveau_connector_create(dev, dcbe->connector);
439 if (IS_ERR(connector))
440 continue;
441
442 if (dcbe->location != DCB_LOC_ON_CHIP) {
443 NV_WARN(dev, "skipping off-chip encoder %d/%d\n",
444 dcbe->type, ffs(dcbe->or) - 1);
445 continue;
446 }
447
448 switch (dcbe->type) {
449 case OUTPUT_TMDS:
450 nvd0_sor_create(connector, dcbe);
451 break;
452 default:
453 NV_WARN(dev, "skipping unsupported encoder %d/%d\n",
454 dcbe->type, ffs(dcbe->or) - 1);
455 continue;
456 }
457 }
458
459 /* cull any connectors we created that don't have an encoder */
460 list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
461 if (connector->encoder_ids[0])
462 continue;
463
464 NV_WARN(dev, "%s has no encoders, removing\n",
465 drm_get_connector_name(connector));
466 connector->funcs->destroy(connector);
467 }
468
Ben Skeggs46005222011-07-05 11:01:13 +1000469 /* setup interrupt handling */
470 nouveau_irq_register(dev, 26, nvd0_display_intr);
471
Ben Skeggs51beb422011-07-05 10:33:08 +1000472 /* hash table and dma objects for the memory areas we care about */
Ben Skeggsefd272a2011-07-05 11:58:58 +1000473 ret = nouveau_gpuobj_new(dev, NULL, 0x4000, 0x10000,
474 NVOBJ_FLAG_ZERO_ALLOC, &disp->mem);
Ben Skeggs26f6d882011-07-04 16:25:18 +1000475 if (ret)
476 goto out;
477
Ben Skeggsefd272a2011-07-05 11:58:58 +1000478 nv_wo32(disp->mem, 0x1000, 0x00000049);
479 nv_wo32(disp->mem, 0x1004, (disp->mem->vinst + 0x2000) >> 8);
480 nv_wo32(disp->mem, 0x1008, (disp->mem->vinst + 0x2fff) >> 8);
481 nv_wo32(disp->mem, 0x100c, 0x00000000);
482 nv_wo32(disp->mem, 0x1010, 0x00000000);
483 nv_wo32(disp->mem, 0x1014, 0x00000000);
484 nv_wo32(disp->mem, 0x0000, MEM_SYNC);
485 nv_wo32(disp->mem, 0x0004, (0x1000 << 9) | 0x00000001);
486
487 nv_wo32(disp->mem, 0x1020, 0x00000009);
488 nv_wo32(disp->mem, 0x1024, 0x00000000);
489 nv_wo32(disp->mem, 0x1028, (dev_priv->vram_size - 1) >> 8);
490 nv_wo32(disp->mem, 0x102c, 0x00000000);
491 nv_wo32(disp->mem, 0x1030, 0x00000000);
492 nv_wo32(disp->mem, 0x1034, 0x00000000);
493 nv_wo32(disp->mem, 0x0008, MEM_VRAM);
494 nv_wo32(disp->mem, 0x000c, (0x1020 << 9) | 0x00000001);
495
496 pinstmem->flush(dev);
497
Ben Skeggs51beb422011-07-05 10:33:08 +1000498 /* push buffers for evo channels */
499 disp->evo[0].ptr =
500 pci_alloc_consistent(pdev, PAGE_SIZE, &disp->evo[0].handle);
501 if (!disp->evo[0].ptr) {
502 ret = -ENOMEM;
503 goto out;
504 }
505
Ben Skeggs26f6d882011-07-04 16:25:18 +1000506 ret = nvd0_display_init(dev);
507 if (ret)
508 goto out;
509
510out:
511 if (ret)
512 nvd0_display_destroy(dev);
513 return ret;
514}