]> rtime.felk.cvut.cz Git - zynq/linux.git/blob - drivers/gpu/drm/xlnx/xlnx_pl_disp.c
drm: xlnx: pl_disp: Enable early callback
[zynq/linux.git] / drivers / gpu / drm / xlnx / xlnx_pl_disp.c
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Xilinx DRM CRTC DMA engine driver
4  *
5  * Copyright (C) 2017 - 2018 Xilinx, Inc.
6  *
7  * Author : Saurabh Sengar <saurabhs@xilinx.com>
8  *        : Hyun Woo Kwon <hyun.kwon@xilinx.com>
9  */
10
11 #include <drm/drmP.h>
12 #include <drm/drm_atomic.h>
13 #include <drm/drm_atomic_helper.h>
14 #include <drm/drm_crtc.h>
15 #include <drm/drm_crtc_helper.h>
16 #include <drm/drm_fb_cma_helper.h>
17 #include <drm/drm_fourcc.h>
18 #include <drm/drm_gem_cma_helper.h>
19 #include <linux/component.h>
20 #include <linux/device.h>
21 #include <linux/dmaengine.h>
22 #include <linux/dma/xilinx_frmbuf.h>
23 #include <linux/of.h>
24 #include <linux/of_dma.h>
25 #include <linux/platform_device.h>
26 #include <video/videomode.h>
27 #include "xlnx_bridge.h"
28 #include "xlnx_crtc.h"
29 #include "xlnx_drv.h"
30
31 /*
32  * Overview
33  * --------
34  *
35  * This driver intends to support the display pipeline with DMA engine
36  * driver by initializing DRM crtc and plane objects. The driver makes
37  * an assumption that it's single plane pipeline, as multi-plane pipeline
38  * would require programing beyond the DMA engine interface.
39  */
40
41 /**
42  * struct xlnx_dma_chan - struct for DMA engine
43  * @dma_chan: DMA channel
44  * @xt: Interleaved desc config container
45  * @sgl: Data chunk for dma_interleaved_template
46  */
47 struct xlnx_dma_chan {
48         struct dma_chan *dma_chan;
49         struct dma_interleaved_template xt;
50         struct data_chunk sgl[1];
51 };
52
53 /**
54  * struct xlnx_pl_disp - struct for display subsystem
55  * @dev: device structure
56  * @master: logical master device from xlnx drm
57  * @xlnx_crtc: Xilinx DRM driver crtc object
58  * @plane: base drm plane object
59  * @chan: struct for DMA engine
60  * @event: vblank pending event
61  * @callback: callback for registering DMA callback function
62  * @callback_param: parameter for passing  to DMA callback function
63  * @drm: core drm object
64  * @fmt: drm color format
65  * @vtc_bridge: vtc_bridge structure
66  * @fid: field id
67  */
68 struct xlnx_pl_disp {
69         struct device *dev;
70         struct platform_device *master;
71         struct xlnx_crtc xlnx_crtc;
72         struct drm_plane plane;
73         struct xlnx_dma_chan *chan;
74         struct drm_pending_vblank_event *event;
75         dma_async_tx_callback callback;
76         void *callback_param;
77         struct drm_device *drm;
78         u32 fmt;
79         struct xlnx_bridge *vtc_bridge;
80         u32 fid;
81 };
82
83 /*
84  * Xlnx crtc functions
85  */
86 static inline struct xlnx_pl_disp *crtc_to_dma(struct xlnx_crtc *xlnx_crtc)
87 {
88         return container_of(xlnx_crtc, struct xlnx_pl_disp, xlnx_crtc);
89 }
90
91 /**
92  * xlnx_pl_disp_complete - vblank handler
93  * @param: parameter to vblank handler
94  *
95  * This function handles the vblank interrupt, and sends an event to
96  * CRTC object.
97  */
98 static void xlnx_pl_disp_complete(void *param)
99 {
100         struct xlnx_pl_disp *xlnx_pl_disp = param;
101         struct drm_device *drm = xlnx_pl_disp->drm;
102
103         drm_handle_vblank(drm, 0);
104 }
105
106 /**
107  * xlnx_pl_disp_get_format - Get the current display pipeline format
108  * @xlnx_crtc: xlnx crtc object
109  *
110  * Get the current format of pipeline
111  *
112  * Return: the corresponding DRM_FORMAT_XXX
113  */
114 static uint32_t xlnx_pl_disp_get_format(struct xlnx_crtc *xlnx_crtc)
115 {
116         struct xlnx_pl_disp *xlnx_pl_disp = crtc_to_dma(xlnx_crtc);
117
118         return xlnx_pl_disp->fmt;
119 }
120
121 /**
122  * xlnx_pl_disp_get_align - Get the alignment value for pitch
123  * @xlnx_crtc: xlnx crtc object
124  *
125  * Get the alignment value for pitch from the plane
126  *
127  * Return: The alignment value if successful, or the error code.
128  */
129 static unsigned int xlnx_pl_disp_get_align(struct xlnx_crtc *xlnx_crtc)
130 {
131         struct xlnx_pl_disp *xlnx_pl_disp = crtc_to_dma(xlnx_crtc);
132
133         return 1 << xlnx_pl_disp->chan->dma_chan->device->copy_align;
134 }
135
136 /*
137  * DRM plane functions
138  */
139 static inline struct xlnx_pl_disp *plane_to_dma(struct drm_plane *plane)
140 {
141         return container_of(plane, struct xlnx_pl_disp, plane);
142 }
143
144 /**
145  * xlnx_pl_disp_plane_disable - Disables DRM plane
146  * @plane: DRM plane object
147  *
148  * Disable the DRM plane, by stopping the corrosponding DMA
149  */
150 static void xlnx_pl_disp_plane_disable(struct drm_plane *plane)
151 {
152         struct xlnx_pl_disp *xlnx_pl_disp = plane_to_dma(plane);
153         struct xlnx_dma_chan *xlnx_dma_chan = xlnx_pl_disp->chan;
154
155         dmaengine_terminate_sync(xlnx_dma_chan->dma_chan);
156 }
157
158 /**
159  * xlnx_pl_disp_plane_enable - Enables DRM plane
160  * @plane: DRM plane object
161  *
162  * Enable the DRM plane, by enabling the corresponding DMA
163  */
164 static void xlnx_pl_disp_plane_enable(struct drm_plane *plane)
165 {
166         struct xlnx_pl_disp *xlnx_pl_disp = plane_to_dma(plane);
167         struct dma_async_tx_descriptor *desc;
168         enum dma_ctrl_flags flags;
169         struct xlnx_dma_chan *xlnx_dma_chan = xlnx_pl_disp->chan;
170         struct dma_chan *dma_chan = xlnx_dma_chan->dma_chan;
171         struct dma_interleaved_template *xt = &xlnx_dma_chan->xt;
172
173         flags = DMA_CTRL_ACK | DMA_PREP_INTERRUPT;
174         desc = dmaengine_prep_interleaved_dma(dma_chan, xt, flags);
175         if (!desc) {
176                 dev_err(xlnx_pl_disp->dev,
177                         "failed to prepare DMA descriptor\n");
178                 return;
179         }
180         desc->callback = xlnx_pl_disp->callback;
181         desc->callback_param = xlnx_pl_disp->callback_param;
182         xilinx_xdma_set_earlycb(xlnx_dma_chan->dma_chan, desc, true);
183
184         if (plane->state->fb->flags == DRM_MODE_FB_ALTERNATE_TOP ||
185             plane->state->fb->flags == DRM_MODE_FB_ALTERNATE_BOTTOM) {
186                 if (plane->state->fb->flags == DRM_MODE_FB_ALTERNATE_TOP)
187                         xlnx_pl_disp->fid = 1;
188                 else
189                         xlnx_pl_disp->fid = 0;
190
191                 xilinx_xdma_set_fid(xlnx_dma_chan->dma_chan, desc,
192                                     xlnx_pl_disp->fid);
193         }
194
195         dmaengine_submit(desc);
196         dma_async_issue_pending(xlnx_dma_chan->dma_chan);
197 }
198
199 static void xlnx_pl_disp_plane_atomic_disable(struct drm_plane *plane,
200                                               struct drm_plane_state *old_state)
201 {
202         xlnx_pl_disp_plane_disable(plane);
203 }
204
205 static int xlnx_pl_disp_plane_mode_set(struct drm_plane *plane,
206                                        struct drm_framebuffer *fb,
207                                        int crtc_x, int crtc_y,
208                                        unsigned int crtc_w, unsigned int crtc_h,
209                                        u32 src_x, uint32_t src_y,
210                                        u32 src_w, uint32_t src_h)
211 {
212         struct xlnx_pl_disp *xlnx_pl_disp = plane_to_dma(plane);
213         const struct drm_format_info *info = fb->format;
214         dma_addr_t luma_paddr, chroma_paddr;
215         size_t stride;
216         struct xlnx_dma_chan *xlnx_dma_chan = xlnx_pl_disp->chan;
217
218         if (info->num_planes > 2) {
219                 dev_err(xlnx_pl_disp->dev, "Color format not supported\n");
220                 return -EINVAL;
221         }
222         luma_paddr = drm_fb_cma_get_gem_addr(fb, plane->state, 0);
223         if (!luma_paddr) {
224                 dev_err(xlnx_pl_disp->dev, "failed to get luma paddr\n");
225                 return -EINVAL;
226         }
227
228         dev_dbg(xlnx_pl_disp->dev, "num planes = %d\n", info->num_planes);
229         xlnx_dma_chan->xt.numf = src_h;
230         xlnx_dma_chan->sgl[0].size = drm_format_plane_width_bytes(info,
231                                                                   0, src_w);
232         xlnx_dma_chan->sgl[0].icg = fb->pitches[0] - xlnx_dma_chan->sgl[0].size;
233         xlnx_dma_chan->xt.src_start = luma_paddr;
234         xlnx_dma_chan->xt.frame_size = info->num_planes;
235         xlnx_dma_chan->xt.dir = DMA_MEM_TO_DEV;
236         xlnx_dma_chan->xt.src_sgl = true;
237         xlnx_dma_chan->xt.dst_sgl = false;
238
239         /* Do we have a video format aware dma channel?
240          * so, modify descriptor accordingly. Hueristic test:
241          * we have a multi-plane format but only one dma channel
242          */
243         if (info->num_planes > 1) {
244                 chroma_paddr = drm_fb_cma_get_gem_addr(fb, plane->state, 1);
245                 if (!chroma_paddr) {
246                         dev_err(xlnx_pl_disp->dev,
247                                 "failed to get chroma paddr\n");
248                         return -EINVAL;
249                 }
250                 stride = xlnx_dma_chan->sgl[0].size +
251                         xlnx_dma_chan->sgl[0].icg;
252                 xlnx_dma_chan->sgl[0].src_icg = chroma_paddr -
253                         xlnx_dma_chan->xt.src_start -
254                         (xlnx_dma_chan->xt.numf * stride);
255         }
256
257         return 0;
258 }
259
260 static void xlnx_pl_disp_plane_atomic_update(struct drm_plane *plane,
261                                              struct drm_plane_state *old_state)
262 {
263         int ret;
264         struct xlnx_pl_disp *xlnx_pl_disp = plane_to_dma(plane);
265
266         ret = xlnx_pl_disp_plane_mode_set(plane,
267                                           plane->state->fb,
268                                           plane->state->crtc_x,
269                                           plane->state->crtc_y,
270                                           plane->state->crtc_w,
271                                           plane->state->crtc_h,
272                                           plane->state->src_x >> 16,
273                                           plane->state->src_y >> 16,
274                                           plane->state->src_w >> 16,
275                                           plane->state->src_h >> 16);
276         if (ret) {
277                 dev_err(xlnx_pl_disp->dev, "failed to mode set a plane\n");
278                 return;
279         }
280         /* in case frame buffer is used set the color format */
281         xilinx_xdma_drm_config(xlnx_pl_disp->chan->dma_chan,
282                                xlnx_pl_disp->plane.state->fb->format->format);
283         /* apply the new fb addr and enable */
284         xlnx_pl_disp_plane_enable(plane);
285 }
286
287 static const struct drm_plane_helper_funcs xlnx_pl_disp_plane_helper_funcs = {
288         .atomic_update = xlnx_pl_disp_plane_atomic_update,
289         .atomic_disable = xlnx_pl_disp_plane_atomic_disable,
290 };
291
292 static struct drm_plane_funcs xlnx_pl_disp_plane_funcs = {
293         .update_plane = drm_atomic_helper_update_plane,
294         .disable_plane = drm_atomic_helper_disable_plane,
295         .destroy = drm_plane_cleanup,
296         .reset = drm_atomic_helper_plane_reset,
297         .atomic_duplicate_state = drm_atomic_helper_plane_duplicate_state,
298         .atomic_destroy_state = drm_atomic_helper_plane_destroy_state,
299 };
300
301 static inline struct xlnx_pl_disp *drm_crtc_to_dma(struct drm_crtc *crtc)
302 {
303         struct xlnx_crtc *xlnx_crtc = to_xlnx_crtc(crtc);
304
305         return crtc_to_dma(xlnx_crtc);
306 }
307
308 static void xlnx_pl_disp_crtc_atomic_begin(struct drm_crtc *crtc,
309                                            struct drm_crtc_state *old_state)
310 {
311         spin_lock_irq(&crtc->dev->event_lock);
312         if (crtc->state->event) {
313                 /* Consume the flip_done event from atomic helper */
314                 crtc->state->event->pipe = drm_crtc_index(crtc);
315                 WARN_ON(drm_crtc_vblank_get(crtc) != 0);
316                 drm_crtc_arm_vblank_event(crtc, crtc->state->event);
317                 crtc->state->event = NULL;
318         }
319         spin_unlock_irq(&crtc->dev->event_lock);
320 }
321
322 static void xlnx_pl_disp_clear_event(struct drm_crtc *crtc)
323 {
324         if (crtc->state->event) {
325                 complete_all(crtc->state->event->base.completion);
326                 crtc->state->event = NULL;
327         }
328 }
329
330 static void xlnx_pl_disp_crtc_atomic_enable(struct drm_crtc *crtc,
331                                             struct drm_crtc_state *old_state)
332 {
333         struct drm_display_mode *adjusted_mode = &crtc->state->adjusted_mode;
334         int vrefresh;
335         struct xlnx_crtc *xlnx_crtc = to_xlnx_crtc(crtc);
336         struct xlnx_pl_disp *xlnx_pl_disp = crtc_to_dma(xlnx_crtc);
337         struct videomode vm;
338
339         if (xlnx_pl_disp->vtc_bridge) {
340                 /* set video timing */
341                 drm_display_mode_to_videomode(adjusted_mode, &vm);
342                 xlnx_bridge_set_timing(xlnx_pl_disp->vtc_bridge, &vm);
343                 xlnx_bridge_enable(xlnx_pl_disp->vtc_bridge);
344         }
345
346         xlnx_pl_disp_plane_enable(crtc->primary);
347
348         /* Delay of 1 vblank interval for timing gen to be stable */
349         vrefresh = (adjusted_mode->clock * 1000) /
350                    (adjusted_mode->vtotal * adjusted_mode->htotal);
351         msleep(1 * 1000 / vrefresh);
352 }
353
354 static void xlnx_pl_disp_crtc_atomic_disable(struct drm_crtc *crtc,
355                                              struct drm_crtc_state *old_state)
356 {
357         struct xlnx_crtc *xlnx_crtc = to_xlnx_crtc(crtc);
358         struct xlnx_pl_disp *xlnx_pl_disp = crtc_to_dma(xlnx_crtc);
359
360         xlnx_pl_disp_plane_disable(crtc->primary);
361         xlnx_pl_disp_clear_event(crtc);
362         xlnx_bridge_disable(xlnx_pl_disp->vtc_bridge);
363 }
364
365 static int xlnx_pl_disp_crtc_atomic_check(struct drm_crtc *crtc,
366                                           struct drm_crtc_state *state)
367 {
368         return drm_atomic_add_affected_planes(state->state, crtc);
369 }
370
371 static struct drm_crtc_helper_funcs xlnx_pl_disp_crtc_helper_funcs = {
372         .atomic_enable = xlnx_pl_disp_crtc_atomic_enable,
373         .atomic_disable = xlnx_pl_disp_crtc_atomic_disable,
374         .atomic_check = xlnx_pl_disp_crtc_atomic_check,
375         .atomic_begin = xlnx_pl_disp_crtc_atomic_begin,
376 };
377
378 static void xlnx_pl_disp_crtc_destroy(struct drm_crtc *crtc)
379 {
380         xlnx_pl_disp_plane_disable(crtc->primary);
381         drm_crtc_cleanup(crtc);
382 }
383
384 static int xlnx_pl_disp_crtc_enable_vblank(struct drm_crtc *crtc)
385 {
386         struct xlnx_crtc *xlnx_crtc = to_xlnx_crtc(crtc);
387         struct xlnx_pl_disp *xlnx_pl_disp = crtc_to_dma(xlnx_crtc);
388
389         /*
390          * Use the complete callback for vblank event assuming the dma engine
391          * starts on the next descriptor upon this event. This may not be safe
392          * assumption for some dma engines.
393          */
394         xlnx_pl_disp->callback = xlnx_pl_disp_complete;
395         xlnx_pl_disp->callback_param = xlnx_pl_disp;
396
397         return 0;
398 }
399
400 static void xlnx_pl_disp_crtc_disable_vblank(struct drm_crtc *crtc)
401 {
402         struct xlnx_crtc *xlnx_crtc = to_xlnx_crtc(crtc);
403         struct xlnx_pl_disp *xlnx_pl_disp = crtc_to_dma(xlnx_crtc);
404
405         xlnx_pl_disp->callback = NULL;
406         xlnx_pl_disp->callback_param = NULL;
407 }
408
409 static struct drm_crtc_funcs xlnx_pl_disp_crtc_funcs = {
410         .destroy = xlnx_pl_disp_crtc_destroy,
411         .set_config = drm_atomic_helper_set_config,
412         .page_flip = drm_atomic_helper_page_flip,
413         .reset = drm_atomic_helper_crtc_reset,
414         .atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state,
415         .atomic_destroy_state = drm_atomic_helper_crtc_destroy_state,
416         .enable_vblank = xlnx_pl_disp_crtc_enable_vblank,
417         .disable_vblank = xlnx_pl_disp_crtc_disable_vblank,
418 };
419
420 static int xlnx_pl_disp_bind(struct device *dev, struct device *master,
421                              void *data)
422 {
423         struct drm_device *drm = data;
424         struct xlnx_pl_disp *xlnx_pl_disp = dev_get_drvdata(dev);
425         int ret;
426         u32 *fmts = NULL;
427         unsigned int num_fmts = 0;
428
429         /* in case of fb IP query the supported formats and there count */
430         xilinx_xdma_get_drm_vid_fmts(xlnx_pl_disp->chan->dma_chan,
431                                      &num_fmts, &fmts);
432         ret = drm_universal_plane_init(drm, &xlnx_pl_disp->plane, 0,
433                                        &xlnx_pl_disp_plane_funcs,
434                                        fmts ? fmts : &xlnx_pl_disp->fmt,
435                                        num_fmts ? num_fmts : 1,
436                                        NULL, DRM_PLANE_TYPE_PRIMARY, NULL);
437         if (ret)
438                 return ret;
439
440         drm_plane_helper_add(&xlnx_pl_disp->plane,
441                              &xlnx_pl_disp_plane_helper_funcs);
442
443         ret = drm_crtc_init_with_planes(drm, &xlnx_pl_disp->xlnx_crtc.crtc,
444                                         &xlnx_pl_disp->plane, NULL,
445                                         &xlnx_pl_disp_crtc_funcs, NULL);
446         if (ret) {
447                 drm_plane_cleanup(&xlnx_pl_disp->plane);
448                 return ret;
449         }
450
451         drm_crtc_helper_add(&xlnx_pl_disp->xlnx_crtc.crtc,
452                             &xlnx_pl_disp_crtc_helper_funcs);
453         xlnx_pl_disp->xlnx_crtc.get_format = &xlnx_pl_disp_get_format;
454         xlnx_pl_disp->xlnx_crtc.get_align = &xlnx_pl_disp_get_align;
455         xlnx_pl_disp->drm = drm;
456         xlnx_crtc_register(xlnx_pl_disp->drm, &xlnx_pl_disp->xlnx_crtc);
457
458         return 0;
459 }
460
461 static void xlnx_pl_disp_unbind(struct device *dev, struct device *master,
462                                 void *data)
463 {
464         struct xlnx_pl_disp *xlnx_pl_disp = dev_get_drvdata(dev);
465
466         drm_plane_cleanup(&xlnx_pl_disp->plane);
467         drm_crtc_cleanup(&xlnx_pl_disp->xlnx_crtc.crtc);
468 }
469
470 static const struct component_ops xlnx_pl_disp_component_ops = {
471         .bind   = xlnx_pl_disp_bind,
472         .unbind = xlnx_pl_disp_unbind,
473 };
474
475 static int xlnx_pl_disp_probe(struct platform_device *pdev)
476 {
477         struct device *dev = &pdev->dev;
478         struct device_node *vtc_node;
479         struct xlnx_pl_disp *xlnx_pl_disp;
480         int ret;
481         const char *vformat;
482         struct dma_chan *dma_chan;
483         struct xlnx_dma_chan *xlnx_dma_chan;
484
485         xlnx_pl_disp = devm_kzalloc(dev, sizeof(*xlnx_pl_disp), GFP_KERNEL);
486         if (!xlnx_pl_disp)
487                 return -ENOMEM;
488
489         dma_chan = of_dma_request_slave_channel(dev->of_node, "dma0");
490         if (IS_ERR_OR_NULL(dma_chan)) {
491                 dev_err(dev, "failed to request dma channel\n");
492                 return PTR_ERR(dma_chan);
493         }
494
495         xlnx_dma_chan = devm_kzalloc(dev, sizeof(*xlnx_dma_chan), GFP_KERNEL);
496         if (!xlnx_dma_chan)
497                 return -ENOMEM;
498
499         xlnx_dma_chan->dma_chan = dma_chan;
500         xlnx_pl_disp->chan = xlnx_dma_chan;
501         ret = of_property_read_string(dev->of_node, "xlnx,vformat", &vformat);
502         if (ret) {
503                 dev_err(dev, "No xlnx,vformat value in dts\n");
504                 goto err_dma;
505         }
506
507         strcpy((char *)&xlnx_pl_disp->fmt, vformat);
508
509         /* VTC Bridge support */
510         vtc_node = of_parse_phandle(dev->of_node, "xlnx,bridge", 0);
511         if (vtc_node) {
512                 xlnx_pl_disp->vtc_bridge = of_xlnx_bridge_get(vtc_node);
513                 if (!xlnx_pl_disp->vtc_bridge) {
514                         dev_info(dev, "Didn't get vtc bridge instance\n");
515                         return -EPROBE_DEFER;
516                 }
517         } else {
518                 dev_info(dev, "vtc bridge property not present\n");
519         }
520
521         xlnx_pl_disp->dev = dev;
522         platform_set_drvdata(pdev, xlnx_pl_disp);
523
524         ret = component_add(dev, &xlnx_pl_disp_component_ops);
525         if (ret)
526                 goto err_dma;
527
528         xlnx_pl_disp->master = xlnx_drm_pipeline_init(pdev);
529         if (IS_ERR(xlnx_pl_disp->master)) {
530                 ret = PTR_ERR(xlnx_pl_disp->master);
531                 dev_err(dev, "failed to initialize the drm pipeline\n");
532                 goto err_component;
533         }
534
535         dev_info(&pdev->dev, "Xlnx PL display driver probed\n");
536
537         return 0;
538
539 err_component:
540         component_del(dev, &xlnx_pl_disp_component_ops);
541 err_dma:
542         dma_release_channel(xlnx_pl_disp->chan->dma_chan);
543
544         return ret;
545 }
546
547 static int xlnx_pl_disp_remove(struct platform_device *pdev)
548 {
549         struct xlnx_pl_disp *xlnx_pl_disp = platform_get_drvdata(pdev);
550         struct xlnx_dma_chan *xlnx_dma_chan = xlnx_pl_disp->chan;
551
552         of_xlnx_bridge_put(xlnx_pl_disp->vtc_bridge);
553         xlnx_drm_pipeline_exit(xlnx_pl_disp->master);
554         component_del(&pdev->dev, &xlnx_pl_disp_component_ops);
555
556         /* Make sure the channel is terminated before release */
557         dmaengine_terminate_sync(xlnx_dma_chan->dma_chan);
558         dma_release_channel(xlnx_dma_chan->dma_chan);
559
560         return 0;
561 }
562
563 static const struct of_device_id xlnx_pl_disp_of_match[] = {
564         { .compatible = "xlnx,pl-disp"},
565         { }
566 };
567 MODULE_DEVICE_TABLE(of, xlnx_pl_disp_of_match);
568
569 static struct platform_driver xlnx_pl_disp_driver = {
570         .probe = xlnx_pl_disp_probe,
571         .remove = xlnx_pl_disp_remove,
572         .driver = {
573                 .name = "xlnx-pl-disp",
574                 .of_match_table = xlnx_pl_disp_of_match,
575         },
576 };
577
578 module_platform_driver(xlnx_pl_disp_driver);
579
580 MODULE_AUTHOR("Saurabh Sengar");
581 MODULE_DESCRIPTION("Xilinx DRM Display Driver for PL IPs");
582 MODULE_LICENSE("GPL v2");