blob: 1b903ffb345bca28bee5bb36e99b1aa406de4c69 [file] [log] [blame]
Benjamin Gaignardba2d53f2014-07-30 18:48:35 +02001/*
2 * Copyright (C) STMicroelectronics SA 2014
3 * Authors: Benjamin Gaignard <benjamin.gaignard@st.com>
4 * Fabien Dessenne <fabien.dessenne@st.com>
5 * for STMicroelectronics.
6 * License terms: GNU General Public License (GPL), version 2
7 */
8
9#include <linux/clk.h>
10#include <linux/dma-mapping.h>
11
Benjamin Gaignardd2196732014-07-30 19:28:27 +020012#include "sti_compositor.h"
Benjamin Gaignardba2d53f2014-07-30 18:48:35 +020013#include "sti_gdp.h"
14#include "sti_layer.h"
15#include "sti_vtg.h"
16
17#define ENA_COLOR_FILL BIT(8)
18#define WAIT_NEXT_VSYNC BIT(31)
19
20/* GDP color formats */
21#define GDP_RGB565 0x00
22#define GDP_RGB888 0x01
23#define GDP_RGB888_32 0x02
24#define GDP_ARGB8565 0x04
25#define GDP_ARGB8888 0x05
26#define GDP_ARGB1555 0x06
27#define GDP_ARGB4444 0x07
28#define GDP_CLUT8 0x0B
29#define GDP_YCBR888 0x10
30#define GDP_YCBR422R 0x12
31#define GDP_AYCBR8888 0x15
32
33#define GAM_GDP_CTL_OFFSET 0x00
34#define GAM_GDP_AGC_OFFSET 0x04
35#define GAM_GDP_VPO_OFFSET 0x0C
36#define GAM_GDP_VPS_OFFSET 0x10
37#define GAM_GDP_PML_OFFSET 0x14
38#define GAM_GDP_PMP_OFFSET 0x18
39#define GAM_GDP_SIZE_OFFSET 0x1C
40#define GAM_GDP_NVN_OFFSET 0x24
41#define GAM_GDP_KEY1_OFFSET 0x28
42#define GAM_GDP_KEY2_OFFSET 0x2C
43#define GAM_GDP_PPT_OFFSET 0x34
44#define GAM_GDP_CML_OFFSET 0x3C
45#define GAM_GDP_MST_OFFSET 0x68
46
47#define GAM_GDP_ALPHARANGE_255 BIT(5)
48#define GAM_GDP_AGC_FULL_RANGE 0x00808080
49#define GAM_GDP_PPT_IGNORE (BIT(1) | BIT(0))
50#define GAM_GDP_SIZE_MAX 0x7FF
51
52#define GDP_NODE_NB_BANK 2
53#define GDP_NODE_PER_FIELD 2
54
55struct sti_gdp_node {
56 u32 gam_gdp_ctl;
57 u32 gam_gdp_agc;
58 u32 reserved1;
59 u32 gam_gdp_vpo;
60 u32 gam_gdp_vps;
61 u32 gam_gdp_pml;
62 u32 gam_gdp_pmp;
63 u32 gam_gdp_size;
64 u32 reserved2;
65 u32 gam_gdp_nvn;
66 u32 gam_gdp_key1;
67 u32 gam_gdp_key2;
68 u32 reserved3;
69 u32 gam_gdp_ppt;
70 u32 reserved4;
71 u32 gam_gdp_cml;
72};
73
74struct sti_gdp_node_list {
75 struct sti_gdp_node *top_field;
Benjamin Gaignarda51fe842014-12-04 11:21:48 +010076 dma_addr_t top_field_paddr;
Benjamin Gaignardba2d53f2014-07-30 18:48:35 +020077 struct sti_gdp_node *btm_field;
Benjamin Gaignarda51fe842014-12-04 11:21:48 +010078 dma_addr_t btm_field_paddr;
Benjamin Gaignardba2d53f2014-07-30 18:48:35 +020079};
80
81/**
82 * STI GDP structure
83 *
84 * @layer: layer structure
85 * @clk_pix: pixel clock for the current gdp
86 * @vtg_field_nb: callback for VTG FIELD (top or bottom) notification
87 * @is_curr_top: true if the current node processed is the top field
88 * @node_list: array of node list
89 */
90struct sti_gdp {
91 struct sti_layer layer;
92 struct clk *clk_pix;
93 struct notifier_block vtg_field_nb;
94 bool is_curr_top;
95 struct sti_gdp_node_list node_list[GDP_NODE_NB_BANK];
96};
97
98#define to_sti_gdp(x) container_of(x, struct sti_gdp, layer)
99
100static const uint32_t gdp_supported_formats[] = {
101 DRM_FORMAT_XRGB8888,
102 DRM_FORMAT_ARGB8888,
103 DRM_FORMAT_ARGB4444,
104 DRM_FORMAT_ARGB1555,
105 DRM_FORMAT_RGB565,
106 DRM_FORMAT_RGB888,
107 DRM_FORMAT_AYUV,
108 DRM_FORMAT_YUV444,
109 DRM_FORMAT_VYUY,
110 DRM_FORMAT_C8,
111};
112
113static const uint32_t *sti_gdp_get_formats(struct sti_layer *layer)
114{
115 return gdp_supported_formats;
116}
117
118static unsigned int sti_gdp_get_nb_formats(struct sti_layer *layer)
119{
120 return ARRAY_SIZE(gdp_supported_formats);
121}
122
123static int sti_gdp_fourcc2format(int fourcc)
124{
125 switch (fourcc) {
126 case DRM_FORMAT_XRGB8888:
127 return GDP_RGB888_32;
128 case DRM_FORMAT_ARGB8888:
129 return GDP_ARGB8888;
130 case DRM_FORMAT_ARGB4444:
131 return GDP_ARGB4444;
132 case DRM_FORMAT_ARGB1555:
133 return GDP_ARGB1555;
134 case DRM_FORMAT_RGB565:
135 return GDP_RGB565;
136 case DRM_FORMAT_RGB888:
137 return GDP_RGB888;
138 case DRM_FORMAT_AYUV:
139 return GDP_AYCBR8888;
140 case DRM_FORMAT_YUV444:
141 return GDP_YCBR888;
142 case DRM_FORMAT_VYUY:
143 return GDP_YCBR422R;
144 case DRM_FORMAT_C8:
145 return GDP_CLUT8;
146 }
147 return -1;
148}
149
150static int sti_gdp_get_alpharange(int format)
151{
152 switch (format) {
153 case GDP_ARGB8565:
154 case GDP_ARGB8888:
155 case GDP_AYCBR8888:
156 return GAM_GDP_ALPHARANGE_255;
157 }
158 return 0;
159}
160
161/**
162 * sti_gdp_get_free_nodes
163 * @layer: gdp layer
164 *
165 * Look for a GDP node list that is not currently read by the HW.
166 *
167 * RETURNS:
168 * Pointer to the free GDP node list
169 */
170static struct sti_gdp_node_list *sti_gdp_get_free_nodes(struct sti_layer *layer)
171{
172 int hw_nvn;
Benjamin Gaignardba2d53f2014-07-30 18:48:35 +0200173 struct sti_gdp *gdp = to_sti_gdp(layer);
174 unsigned int i;
175
176 hw_nvn = readl(layer->regs + GAM_GDP_NVN_OFFSET);
177 if (!hw_nvn)
178 goto end;
179
Benjamin Gaignardba2d53f2014-07-30 18:48:35 +0200180 for (i = 0; i < GDP_NODE_NB_BANK; i++)
Benjamin Gaignarda51fe842014-12-04 11:21:48 +0100181 if ((hw_nvn != gdp->node_list[i].btm_field_paddr) &&
182 (hw_nvn != gdp->node_list[i].top_field_paddr))
Benjamin Gaignardba2d53f2014-07-30 18:48:35 +0200183 return &gdp->node_list[i];
184
Benjamin Gaignardd2196732014-07-30 19:28:27 +0200185 /* in hazardious cases restart with the first node */
186 DRM_ERROR("inconsistent NVN for %s: 0x%08X\n",
187 sti_layer_to_str(layer), hw_nvn);
188
Benjamin Gaignardba2d53f2014-07-30 18:48:35 +0200189end:
190 return &gdp->node_list[0];
191}
192
193/**
194 * sti_gdp_get_current_nodes
195 * @layer: GDP layer
196 *
197 * Look for GDP nodes that are currently read by the HW.
198 *
199 * RETURNS:
200 * Pointer to the current GDP node list
201 */
202static
203struct sti_gdp_node_list *sti_gdp_get_current_nodes(struct sti_layer *layer)
204{
205 int hw_nvn;
Benjamin Gaignardba2d53f2014-07-30 18:48:35 +0200206 struct sti_gdp *gdp = to_sti_gdp(layer);
207 unsigned int i;
208
209 hw_nvn = readl(layer->regs + GAM_GDP_NVN_OFFSET);
210 if (!hw_nvn)
211 goto end;
212
Benjamin Gaignardba2d53f2014-07-30 18:48:35 +0200213 for (i = 0; i < GDP_NODE_NB_BANK; i++)
Benjamin Gaignarda51fe842014-12-04 11:21:48 +0100214 if ((hw_nvn == gdp->node_list[i].btm_field_paddr) ||
215 (hw_nvn == gdp->node_list[i].top_field_paddr))
Benjamin Gaignardba2d53f2014-07-30 18:48:35 +0200216 return &gdp->node_list[i];
217
218end:
Benjamin Gaignardd2196732014-07-30 19:28:27 +0200219 DRM_DEBUG_DRIVER("Warning, NVN 0x%08X for %s does not match any node\n",
220 hw_nvn, sti_layer_to_str(layer));
221
Benjamin Gaignardba2d53f2014-07-30 18:48:35 +0200222 return NULL;
223}
224
225/**
226 * sti_gdp_prepare_layer
227 * @lay: gdp layer
228 * @first_prepare: true if it is the first time this function is called
229 *
230 * Update the free GDP node list according to the layer properties.
231 *
232 * RETURNS:
233 * 0 on success.
234 */
235static int sti_gdp_prepare_layer(struct sti_layer *layer, bool first_prepare)
236{
237 struct sti_gdp_node_list *list;
238 struct sti_gdp_node *top_field, *btm_field;
239 struct drm_display_mode *mode = layer->mode;
240 struct device *dev = layer->dev;
241 struct sti_gdp *gdp = to_sti_gdp(layer);
Benjamin Gaignardd2196732014-07-30 19:28:27 +0200242 struct sti_compositor *compo = dev_get_drvdata(dev);
Benjamin Gaignardba2d53f2014-07-30 18:48:35 +0200243 int format;
244 unsigned int depth, bpp;
245 int rate = mode->clock * 1000;
246 int res;
247 u32 ydo, xdo, yds, xds;
248
249 list = sti_gdp_get_free_nodes(layer);
250 top_field = list->top_field;
251 btm_field = list->btm_field;
252
Benjamin Gaignardd2196732014-07-30 19:28:27 +0200253 dev_dbg(dev, "%s %s top_node:0x%p btm_node:0x%p\n", __func__,
254 sti_layer_to_str(layer), top_field, btm_field);
255
Benjamin Gaignardba2d53f2014-07-30 18:48:35 +0200256 /* Build the top field from layer params */
257 top_field->gam_gdp_agc = GAM_GDP_AGC_FULL_RANGE;
258 top_field->gam_gdp_ctl = WAIT_NEXT_VSYNC;
259 format = sti_gdp_fourcc2format(layer->format);
260 if (format == -1) {
261 DRM_ERROR("Format not supported by GDP %.4s\n",
262 (char *)&layer->format);
263 return 1;
264 }
265 top_field->gam_gdp_ctl |= format;
266 top_field->gam_gdp_ctl |= sti_gdp_get_alpharange(format);
267 top_field->gam_gdp_ppt &= ~GAM_GDP_PPT_IGNORE;
268
269 /* pixel memory location */
270 drm_fb_get_bpp_depth(layer->format, &depth, &bpp);
271 top_field->gam_gdp_pml = (u32) layer->paddr + layer->offsets[0];
272 top_field->gam_gdp_pml += layer->src_x * (bpp >> 3);
273 top_field->gam_gdp_pml += layer->src_y * layer->pitches[0];
274
275 /* input parameters */
276 top_field->gam_gdp_pmp = layer->pitches[0];
277 top_field->gam_gdp_size =
278 clamp_val(layer->src_h, 0, GAM_GDP_SIZE_MAX) << 16 |
279 clamp_val(layer->src_w, 0, GAM_GDP_SIZE_MAX);
280
281 /* output parameters */
282 ydo = sti_vtg_get_line_number(*mode, layer->dst_y);
283 yds = sti_vtg_get_line_number(*mode, layer->dst_y + layer->dst_h - 1);
284 xdo = sti_vtg_get_pixel_number(*mode, layer->dst_x);
285 xds = sti_vtg_get_pixel_number(*mode, layer->dst_x + layer->dst_w - 1);
286 top_field->gam_gdp_vpo = (ydo << 16) | xdo;
287 top_field->gam_gdp_vps = (yds << 16) | xds;
288
289 /* Same content and chained together */
290 memcpy(btm_field, top_field, sizeof(*btm_field));
Benjamin Gaignarda51fe842014-12-04 11:21:48 +0100291 top_field->gam_gdp_nvn = list->btm_field_paddr;
292 btm_field->gam_gdp_nvn = list->top_field_paddr;
Benjamin Gaignardba2d53f2014-07-30 18:48:35 +0200293
294 /* Interlaced mode */
295 if (layer->mode->flags & DRM_MODE_FLAG_INTERLACE)
296 btm_field->gam_gdp_pml = top_field->gam_gdp_pml +
297 layer->pitches[0];
298
299 if (first_prepare) {
Benjamin Gaignardd2196732014-07-30 19:28:27 +0200300 /* Register gdp callback */
301 if (sti_vtg_register_client(layer->mixer_id == STI_MIXER_MAIN ?
302 compo->vtg_main : compo->vtg_aux,
303 &gdp->vtg_field_nb, layer->mixer_id)) {
304 DRM_ERROR("Cannot register VTG notifier\n");
305 return 1;
306 }
307
Benjamin Gaignardba2d53f2014-07-30 18:48:35 +0200308 /* Set and enable gdp clock */
309 if (gdp->clk_pix) {
310 res = clk_set_rate(gdp->clk_pix, rate);
311 if (res < 0) {
312 DRM_ERROR("Cannot set rate (%dHz) for gdp\n",
313 rate);
314 return 1;
315 }
316
317 if (clk_prepare_enable(gdp->clk_pix)) {
318 DRM_ERROR("Failed to prepare/enable gdp\n");
319 return 1;
320 }
321 }
322 }
323
324 return 0;
325}
326
327/**
328 * sti_gdp_commit_layer
329 * @lay: gdp layer
330 *
331 * Update the NVN field of the 'right' field of the current GDP node (being
332 * used by the HW) with the address of the updated ('free') top field GDP node.
333 * - In interlaced mode the 'right' field is the bottom field as we update
334 * frames starting from their top field
335 * - In progressive mode, we update both bottom and top fields which are
336 * equal nodes.
337 * At the next VSYNC, the updated node list will be used by the HW.
338 *
339 * RETURNS:
340 * 0 on success.
341 */
342static int sti_gdp_commit_layer(struct sti_layer *layer)
343{
344 struct sti_gdp_node_list *updated_list = sti_gdp_get_free_nodes(layer);
345 struct sti_gdp_node *updated_top_node = updated_list->top_field;
346 struct sti_gdp_node *updated_btm_node = updated_list->btm_field;
347 struct sti_gdp *gdp = to_sti_gdp(layer);
Benjamin Gaignarda51fe842014-12-04 11:21:48 +0100348 u32 dma_updated_top = updated_list->top_field_paddr;
349 u32 dma_updated_btm = updated_list->btm_field_paddr;
Benjamin Gaignardba2d53f2014-07-30 18:48:35 +0200350 struct sti_gdp_node_list *curr_list = sti_gdp_get_current_nodes(layer);
351
Benjamin Gaignardd2196732014-07-30 19:28:27 +0200352 dev_dbg(layer->dev, "%s %s top/btm_node:0x%p/0x%p\n", __func__,
353 sti_layer_to_str(layer),
354 updated_top_node, updated_btm_node);
Benjamin Gaignardba2d53f2014-07-30 18:48:35 +0200355 dev_dbg(layer->dev, "Current NVN:0x%X\n",
356 readl(layer->regs + GAM_GDP_NVN_OFFSET));
357 dev_dbg(layer->dev, "Posted buff: %lx current buff: %x\n",
358 (unsigned long)layer->paddr,
359 readl(layer->regs + GAM_GDP_PML_OFFSET));
360
361 if (curr_list == NULL) {
362 /* First update or invalid node should directly write in the
363 * hw register */
Benjamin Gaignardd2196732014-07-30 19:28:27 +0200364 DRM_DEBUG_DRIVER("%s first update (or invalid node)",
365 sti_layer_to_str(layer));
366
Benjamin Gaignardba2d53f2014-07-30 18:48:35 +0200367 writel(gdp->is_curr_top == true ?
368 dma_updated_btm : dma_updated_top,
369 layer->regs + GAM_GDP_NVN_OFFSET);
370 return 0;
371 }
372
373 if (layer->mode->flags & DRM_MODE_FLAG_INTERLACE) {
374 if (gdp->is_curr_top == true) {
375 /* Do not update in the middle of the frame, but
376 * postpone the update after the bottom field has
377 * been displayed */
378 curr_list->btm_field->gam_gdp_nvn = dma_updated_top;
379 } else {
380 /* Direct update to avoid one frame delay */
381 writel(dma_updated_top,
382 layer->regs + GAM_GDP_NVN_OFFSET);
383 }
384 } else {
385 /* Direct update for progressive to avoid one frame delay */
386 writel(dma_updated_top, layer->regs + GAM_GDP_NVN_OFFSET);
387 }
388
389 return 0;
390}
391
392/**
393 * sti_gdp_disable_layer
394 * @lay: gdp layer
395 *
396 * Disable a GDP.
397 *
398 * RETURNS:
399 * 0 on success.
400 */
401static int sti_gdp_disable_layer(struct sti_layer *layer)
402{
403 unsigned int i;
404 struct sti_gdp *gdp = to_sti_gdp(layer);
Benjamin Gaignardd2196732014-07-30 19:28:27 +0200405 struct sti_compositor *compo = dev_get_drvdata(layer->dev);
406
407 DRM_DEBUG_DRIVER("%s\n", sti_layer_to_str(layer));
Benjamin Gaignardba2d53f2014-07-30 18:48:35 +0200408
409 /* Set the nodes as 'to be ignored on mixer' */
410 for (i = 0; i < GDP_NODE_NB_BANK; i++) {
411 gdp->node_list[i].top_field->gam_gdp_ppt |= GAM_GDP_PPT_IGNORE;
412 gdp->node_list[i].btm_field->gam_gdp_ppt |= GAM_GDP_PPT_IGNORE;
413 }
414
Benjamin Gaignardd2196732014-07-30 19:28:27 +0200415 if (sti_vtg_unregister_client(layer->mixer_id == STI_MIXER_MAIN ?
416 compo->vtg_main : compo->vtg_aux, &gdp->vtg_field_nb))
417 DRM_DEBUG_DRIVER("Warning: cannot unregister VTG notifier\n");
418
Benjamin Gaignardba2d53f2014-07-30 18:48:35 +0200419 if (gdp->clk_pix)
420 clk_disable_unprepare(gdp->clk_pix);
421
422 return 0;
423}
424
425/**
426 * sti_gdp_field_cb
427 * @nb: notifier block
428 * @event: event message
429 * @data: private data
430 *
431 * Handle VTG top field and bottom field event.
432 *
433 * RETURNS:
434 * 0 on success.
435 */
436int sti_gdp_field_cb(struct notifier_block *nb,
437 unsigned long event, void *data)
438{
439 struct sti_gdp *gdp = container_of(nb, struct sti_gdp, vtg_field_nb);
440
441 switch (event) {
442 case VTG_TOP_FIELD_EVENT:
443 gdp->is_curr_top = true;
444 break;
445 case VTG_BOTTOM_FIELD_EVENT:
446 gdp->is_curr_top = false;
447 break;
448 default:
449 DRM_ERROR("unsupported event: %lu\n", event);
450 break;
451 }
452
453 return 0;
454}
455
456static void sti_gdp_init(struct sti_layer *layer)
457{
458 struct sti_gdp *gdp = to_sti_gdp(layer);
459 struct device_node *np = layer->dev->of_node;
Benjamin Gaignarda51fe842014-12-04 11:21:48 +0100460 dma_addr_t dma_addr;
Benjamin Gaignardba2d53f2014-07-30 18:48:35 +0200461 void *base;
462 unsigned int i, size;
463
464 /* Allocate all the nodes within a single memory page */
465 size = sizeof(struct sti_gdp_node) *
466 GDP_NODE_PER_FIELD * GDP_NODE_NB_BANK;
Benjamin Gaignardba2d53f2014-07-30 18:48:35 +0200467 base = dma_alloc_writecombine(layer->dev,
Benjamin Gaignarda51fe842014-12-04 11:21:48 +0100468 size, &dma_addr, GFP_KERNEL | GFP_DMA);
469
Benjamin Gaignardba2d53f2014-07-30 18:48:35 +0200470 if (!base) {
471 DRM_ERROR("Failed to allocate memory for GDP node\n");
472 return;
473 }
474 memset(base, 0, size);
475
476 for (i = 0; i < GDP_NODE_NB_BANK; i++) {
Benjamin Gaignarda51fe842014-12-04 11:21:48 +0100477 if (dma_addr & 0xF) {
Benjamin Gaignardba2d53f2014-07-30 18:48:35 +0200478 DRM_ERROR("Mem alignment failed\n");
479 return;
480 }
481 gdp->node_list[i].top_field = base;
Benjamin Gaignarda51fe842014-12-04 11:21:48 +0100482 gdp->node_list[i].top_field_paddr = dma_addr;
483
Benjamin Gaignardba2d53f2014-07-30 18:48:35 +0200484 DRM_DEBUG_DRIVER("node[%d].top_field=%p\n", i, base);
485 base += sizeof(struct sti_gdp_node);
Benjamin Gaignarda51fe842014-12-04 11:21:48 +0100486 dma_addr += sizeof(struct sti_gdp_node);
Benjamin Gaignardba2d53f2014-07-30 18:48:35 +0200487
Benjamin Gaignarda51fe842014-12-04 11:21:48 +0100488 if (dma_addr & 0xF) {
Benjamin Gaignardba2d53f2014-07-30 18:48:35 +0200489 DRM_ERROR("Mem alignment failed\n");
490 return;
491 }
492 gdp->node_list[i].btm_field = base;
Benjamin Gaignarda51fe842014-12-04 11:21:48 +0100493 gdp->node_list[i].btm_field_paddr = dma_addr;
Benjamin Gaignardba2d53f2014-07-30 18:48:35 +0200494 DRM_DEBUG_DRIVER("node[%d].btm_field=%p\n", i, base);
495 base += sizeof(struct sti_gdp_node);
Benjamin Gaignarda51fe842014-12-04 11:21:48 +0100496 dma_addr += sizeof(struct sti_gdp_node);
Benjamin Gaignardba2d53f2014-07-30 18:48:35 +0200497 }
498
499 if (of_device_is_compatible(np, "st,stih407-compositor")) {
500 /* GDP of STiH407 chip have its own pixel clock */
501 char *clk_name;
502
503 switch (layer->desc) {
504 case STI_GDP_0:
505 clk_name = "pix_gdp1";
506 break;
507 case STI_GDP_1:
508 clk_name = "pix_gdp2";
509 break;
510 case STI_GDP_2:
511 clk_name = "pix_gdp3";
512 break;
513 case STI_GDP_3:
514 clk_name = "pix_gdp4";
515 break;
516 default:
517 DRM_ERROR("GDP id not recognized\n");
518 return;
519 }
520
521 gdp->clk_pix = devm_clk_get(layer->dev, clk_name);
522 if (IS_ERR(gdp->clk_pix))
523 DRM_ERROR("Cannot get %s clock\n", clk_name);
524 }
525}
526
527static const struct sti_layer_funcs gdp_ops = {
528 .get_formats = sti_gdp_get_formats,
529 .get_nb_formats = sti_gdp_get_nb_formats,
530 .init = sti_gdp_init,
531 .prepare = sti_gdp_prepare_layer,
532 .commit = sti_gdp_commit_layer,
533 .disable = sti_gdp_disable_layer,
534};
535
536struct sti_layer *sti_gdp_create(struct device *dev, int id)
537{
538 struct sti_gdp *gdp;
539
540 gdp = devm_kzalloc(dev, sizeof(*gdp), GFP_KERNEL);
541 if (!gdp) {
542 DRM_ERROR("Failed to allocate memory for GDP\n");
543 return NULL;
544 }
545
546 gdp->layer.ops = &gdp_ops;
547 gdp->vtg_field_nb.notifier_call = sti_gdp_field_cb;
548
549 return (struct sti_layer *)gdp;
550}