]> rtime.felk.cvut.cz Git - zynq/linux.git/blob - drivers/gpu/drm/xlnx/xlnx_pl_disp.c
drm: xlnx: pl_disp: fix odd_ptr_err.cocci warnings
[zynq/linux.git] / drivers / gpu / drm / xlnx / xlnx_pl_disp.c
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Xilinx DRM CRTC DMA engine driver
4  *
5  * Copyright (C) 2017 - 2018 Xilinx, Inc.
6  *
7  * Author : Saurabh Sengar <saurabhs@xilinx.com>
8  *        : Hyun Woo Kwon <hyun.kwon@xilinx.com>
9  */
10
11 #include <drm/drmP.h>
12 #include <drm/drm_atomic.h>
13 #include <drm/drm_atomic_helper.h>
14 #include <drm/drm_crtc.h>
15 #include <drm/drm_crtc_helper.h>
16 #include <drm/drm_fb_cma_helper.h>
17 #include <drm/drm_fourcc.h>
18 #include <drm/drm_gem_cma_helper.h>
19 #include <linux/component.h>
20 #include <linux/device.h>
21 #include <linux/dmaengine.h>
22 #include <linux/dma/xilinx_frmbuf.h>
23 #include <linux/of.h>
24 #include <linux/of_dma.h>
25 #include <linux/platform_device.h>
26 #include "xlnx_drv.h"
27 #include "xlnx_crtc.h"
28
29 /*
30  * Overview
31  * --------
32  *
33  * This driver intends to support the display pipeline with DMA engine
34  * driver by initializing DRM crtc and plane objects. The driver makes
35  * an assumption that it's single plane pipeline, as multi-plane pipeline
36  * would require programing beyond the DMA engine interface.
37  */
38
39 /**
40  * struct xlnx_dma_chan - struct for DMA engine
41  * @dma_chan: DMA channel
42  * @xt: Interleaved desc config container
43  * @sgl: Data chunk for dma_interleaved_template
44  */
45 struct xlnx_dma_chan {
46         struct dma_chan *dma_chan;
47         struct dma_interleaved_template xt;
48         struct data_chunk sgl[1];
49 };
50
51 /**
52  * struct xlnx_pl_disp - struct for display subsystem
53  * @dev: device structure
54  * @master: logical master device from xlnx drm
55  * @xlnx_crtc: Xilinx DRM driver crtc object
56  * @plane: base drm plane object
57  * @chan: struct for DMA engine
58  * @event: vblank pending event
59  * @callback: callback for registering DMA callback function
60  * @callback_param: parameter for passing  to DMA callback function
61  * @drm: core drm object
62  * @fmt: drm color format
63  */
64 struct xlnx_pl_disp {
65         struct device *dev;
66         struct platform_device *master;
67         struct xlnx_crtc xlnx_crtc;
68         struct drm_plane plane;
69         struct xlnx_dma_chan *chan;
70         struct drm_pending_vblank_event *event;
71         dma_async_tx_callback callback;
72         void *callback_param;
73         struct drm_device *drm;
74         u32 fmt;
75 };
76
77 /*
78  * Xlnx crtc functions
79  */
80 static inline struct xlnx_pl_disp *crtc_to_dma(struct xlnx_crtc *xlnx_crtc)
81 {
82         return container_of(xlnx_crtc, struct xlnx_pl_disp, xlnx_crtc);
83 }
84
85 /**
86  * xlnx_pl_disp_complete - vblank handler
87  * @param: parameter to vblank handler
88  *
89  * This function handles the vblank interrupt, and sends an event to
90  * CRTC object.
91  */
92 static void xlnx_pl_disp_complete(void *param)
93 {
94         struct xlnx_pl_disp *xlnx_pl_disp = param;
95         struct drm_device *drm = xlnx_pl_disp->drm;
96
97         drm_handle_vblank(drm, 0);
98 }
99
100 /**
101  * xlnx_pl_disp_get_format - Get the current display pipeline format
102  * @xlnx_crtc: xlnx crtc object
103  *
104  * Get the current format of pipeline
105  *
106  * Return: the corresponding DRM_FORMAT_XXX
107  */
108 static uint32_t xlnx_pl_disp_get_format(struct xlnx_crtc *xlnx_crtc)
109 {
110         struct xlnx_pl_disp *xlnx_pl_disp = crtc_to_dma(xlnx_crtc);
111
112         return xlnx_pl_disp->fmt;
113 }
114
115 /**
116  * xlnx_pl_disp_get_align - Get the alignment value for pitch
117  * @xlnx_crtc: xlnx crtc object
118  *
119  * Get the alignment value for pitch from the plane
120  *
121  * Return: The alignment value if successful, or the error code.
122  */
123 static unsigned int xlnx_pl_disp_get_align(struct xlnx_crtc *xlnx_crtc)
124 {
125         struct xlnx_pl_disp *xlnx_pl_disp = crtc_to_dma(xlnx_crtc);
126
127         return 1 << xlnx_pl_disp->chan->dma_chan->device->copy_align;
128 }
129
130 /*
131  * DRM plane functions
132  */
133 static inline struct xlnx_pl_disp *plane_to_dma(struct drm_plane *plane)
134 {
135         return container_of(plane, struct xlnx_pl_disp, plane);
136 }
137
138 /**
139  * xlnx_pl_disp_plane_disable - Disables DRM plane
140  * @plane: DRM plane object
141  *
142  * Disable the DRM plane, by stopping the corrosponding DMA
143  */
144 static void xlnx_pl_disp_plane_disable(struct drm_plane *plane)
145 {
146         struct xlnx_pl_disp *xlnx_pl_disp = plane_to_dma(plane);
147         struct xlnx_dma_chan *xlnx_dma_chan = xlnx_pl_disp->chan;
148
149         dmaengine_terminate_sync(xlnx_dma_chan->dma_chan);
150 }
151
152 /**
153  * xlnx_pl_disp_plane_enable - Enables DRM plane
154  * @plane: DRM plane object
155  *
156  * Enable the DRM plane, by enabling the corresponding DMA
157  */
158 static void xlnx_pl_disp_plane_enable(struct drm_plane *plane)
159 {
160         struct xlnx_pl_disp *xlnx_pl_disp = plane_to_dma(plane);
161         struct dma_async_tx_descriptor *desc;
162         enum dma_ctrl_flags flags;
163         struct xlnx_dma_chan *xlnx_dma_chan = xlnx_pl_disp->chan;
164         struct dma_chan *dma_chan = xlnx_dma_chan->dma_chan;
165         struct dma_interleaved_template *xt = &xlnx_dma_chan->xt;
166
167         flags = DMA_CTRL_ACK | DMA_PREP_INTERRUPT;
168         desc = dmaengine_prep_interleaved_dma(dma_chan, xt, flags);
169         if (!desc) {
170                 dev_err(xlnx_pl_disp->dev,
171                         "failed to prepare DMA descriptor\n");
172                 return;
173         }
174         desc->callback = xlnx_pl_disp->callback;
175         desc->callback_param = xlnx_pl_disp->callback_param;
176
177         dmaengine_submit(desc);
178         dma_async_issue_pending(xlnx_dma_chan->dma_chan);
179 }
180
181 static void xlnx_pl_disp_plane_atomic_disable(struct drm_plane *plane,
182                                               struct drm_plane_state *old_state)
183 {
184         xlnx_pl_disp_plane_disable(plane);
185 }
186
187 static int xlnx_pl_disp_plane_mode_set(struct drm_plane *plane,
188                                        struct drm_framebuffer *fb,
189                                        int crtc_x, int crtc_y,
190                                        unsigned int crtc_w, unsigned int crtc_h,
191                                        u32 src_x, uint32_t src_y,
192                                        u32 src_w, uint32_t src_h)
193 {
194         struct xlnx_pl_disp *xlnx_pl_disp = plane_to_dma(plane);
195         const struct drm_format_info *info = fb->format;
196         dma_addr_t luma_paddr, chroma_paddr;
197         size_t stride;
198         struct xlnx_dma_chan *xlnx_dma_chan = xlnx_pl_disp->chan;
199
200         if (info->num_planes > 2) {
201                 dev_err(xlnx_pl_disp->dev, "Color format not supported\n");
202                 return -EINVAL;
203         }
204         luma_paddr = drm_fb_cma_get_gem_addr(fb, plane->state, 0);
205         if (!luma_paddr) {
206                 dev_err(xlnx_pl_disp->dev, "failed to get luma paddr\n");
207                 return -EINVAL;
208         }
209
210         dev_dbg(xlnx_pl_disp->dev, "num planes = %d\n", info->num_planes);
211         xlnx_dma_chan->xt.numf = src_h;
212         xlnx_dma_chan->sgl[0].size = drm_format_plane_width_bytes(info,
213                                                                   0, src_w);
214         xlnx_dma_chan->sgl[0].icg = fb->pitches[0] - xlnx_dma_chan->sgl[0].size;
215         xlnx_dma_chan->xt.src_start = luma_paddr;
216         xlnx_dma_chan->xt.frame_size = info->num_planes;
217         xlnx_dma_chan->xt.dir = DMA_MEM_TO_DEV;
218         xlnx_dma_chan->xt.src_sgl = true;
219         xlnx_dma_chan->xt.dst_sgl = false;
220
221         /* Do we have a video format aware dma channel?
222          * so, modify descriptor accordingly. Hueristic test:
223          * we have a multi-plane format but only one dma channel
224          */
225         if (info->num_planes > 1) {
226                 chroma_paddr = drm_fb_cma_get_gem_addr(fb, plane->state, 1);
227                 if (!chroma_paddr) {
228                         dev_err(xlnx_pl_disp->dev,
229                                 "failed to get chroma paddr\n");
230                         return -EINVAL;
231                 }
232                 stride = xlnx_dma_chan->sgl[0].size +
233                         xlnx_dma_chan->sgl[0].icg;
234                 xlnx_dma_chan->sgl[0].src_icg = chroma_paddr -
235                         xlnx_dma_chan->xt.src_start -
236                         (xlnx_dma_chan->xt.numf * stride);
237         }
238
239         return 0;
240 }
241
242 static void xlnx_pl_disp_plane_atomic_update(struct drm_plane *plane,
243                                              struct drm_plane_state *old_state)
244 {
245         int ret;
246         struct xlnx_pl_disp *xlnx_pl_disp = plane_to_dma(plane);
247
248         ret = xlnx_pl_disp_plane_mode_set(plane,
249                                           plane->state->fb,
250                                           plane->state->crtc_x,
251                                           plane->state->crtc_y,
252                                           plane->state->crtc_w,
253                                           plane->state->crtc_h,
254                                           plane->state->src_x >> 16,
255                                           plane->state->src_y >> 16,
256                                           plane->state->src_w >> 16,
257                                           plane->state->src_h >> 16);
258         if (ret) {
259                 dev_err(xlnx_pl_disp->dev, "failed to mode set a plane\n");
260                 return;
261         }
262         /* in case frame buffer is used set the color format */
263         xilinx_xdma_drm_config(xlnx_pl_disp->chan->dma_chan,
264                                xlnx_pl_disp->plane.state->fb->format->format);
265         /* apply the new fb addr and enable */
266         xlnx_pl_disp_plane_enable(plane);
267 }
268
269 static const struct drm_plane_helper_funcs xlnx_pl_disp_plane_helper_funcs = {
270         .atomic_update = xlnx_pl_disp_plane_atomic_update,
271         .atomic_disable = xlnx_pl_disp_plane_atomic_disable,
272 };
273
274 static struct drm_plane_funcs xlnx_pl_disp_plane_funcs = {
275         .update_plane = drm_atomic_helper_update_plane,
276         .disable_plane = drm_atomic_helper_disable_plane,
277         .destroy = drm_plane_cleanup,
278         .reset = drm_atomic_helper_plane_reset,
279         .atomic_duplicate_state = drm_atomic_helper_plane_duplicate_state,
280         .atomic_destroy_state = drm_atomic_helper_plane_destroy_state,
281 };
282
283 static inline struct xlnx_pl_disp *drm_crtc_to_dma(struct drm_crtc *crtc)
284 {
285         struct xlnx_crtc *xlnx_crtc = to_xlnx_crtc(crtc);
286
287         return crtc_to_dma(xlnx_crtc);
288 }
289
290 static void xlnx_pl_disp_crtc_atomic_begin(struct drm_crtc *crtc,
291                                            struct drm_crtc_state *old_state)
292 {
293         spin_lock_irq(&crtc->dev->event_lock);
294         if (crtc->state->event) {
295                 /* Consume the flip_done event from atomic helper */
296                 crtc->state->event->pipe = drm_crtc_index(crtc);
297                 WARN_ON(drm_crtc_vblank_get(crtc) != 0);
298                 drm_crtc_arm_vblank_event(crtc, crtc->state->event);
299                 crtc->state->event = NULL;
300         }
301         spin_unlock_irq(&crtc->dev->event_lock);
302 }
303
304 static void xlnx_pl_disp_clear_event(struct drm_crtc *crtc)
305 {
306         if (crtc->state->event) {
307                 complete_all(crtc->state->event->base.completion);
308                 crtc->state->event = NULL;
309         }
310 }
311
312 static void xlnx_pl_disp_crtc_atomic_enable(struct drm_crtc *crtc,
313                                             struct drm_crtc_state *old_state)
314 {
315         xlnx_pl_disp_plane_enable(crtc->primary);
316 }
317
318 static void xlnx_pl_disp_crtc_atomic_disable(struct drm_crtc *crtc,
319                                              struct drm_crtc_state *old_state)
320 {
321         xlnx_pl_disp_plane_disable(crtc->primary);
322         xlnx_pl_disp_clear_event(crtc);
323 }
324
325 static int xlnx_pl_disp_crtc_atomic_check(struct drm_crtc *crtc,
326                                           struct drm_crtc_state *state)
327 {
328         return drm_atomic_add_affected_planes(state->state, crtc);
329 }
330
331 static struct drm_crtc_helper_funcs xlnx_pl_disp_crtc_helper_funcs = {
332         .atomic_enable = xlnx_pl_disp_crtc_atomic_enable,
333         .atomic_disable = xlnx_pl_disp_crtc_atomic_disable,
334         .atomic_check = xlnx_pl_disp_crtc_atomic_check,
335         .atomic_begin = xlnx_pl_disp_crtc_atomic_begin,
336 };
337
338 static void xlnx_pl_disp_crtc_destroy(struct drm_crtc *crtc)
339 {
340         xlnx_pl_disp_plane_disable(crtc->primary);
341         drm_crtc_cleanup(crtc);
342 }
343
344 static int xlnx_pl_disp_crtc_enable_vblank(struct drm_crtc *crtc)
345 {
346         struct xlnx_crtc *xlnx_crtc = to_xlnx_crtc(crtc);
347         struct xlnx_pl_disp *xlnx_pl_disp = crtc_to_dma(xlnx_crtc);
348
349         /*
350          * Use the complete callback for vblank event assuming the dma engine
351          * starts on the next descriptor upon this event. This may not be safe
352          * assumption for some dma engines.
353          */
354         xlnx_pl_disp->callback = xlnx_pl_disp_complete;
355         xlnx_pl_disp->callback_param = xlnx_pl_disp;
356
357         return 0;
358 }
359
360 static void xlnx_pl_disp_crtc_disable_vblank(struct drm_crtc *crtc)
361 {
362         struct xlnx_crtc *xlnx_crtc = to_xlnx_crtc(crtc);
363         struct xlnx_pl_disp *xlnx_pl_disp = crtc_to_dma(xlnx_crtc);
364
365         xlnx_pl_disp->callback = NULL;
366         xlnx_pl_disp->callback_param = NULL;
367 }
368
369 static struct drm_crtc_funcs xlnx_pl_disp_crtc_funcs = {
370         .destroy = xlnx_pl_disp_crtc_destroy,
371         .set_config = drm_atomic_helper_set_config,
372         .page_flip = drm_atomic_helper_page_flip,
373         .reset = drm_atomic_helper_crtc_reset,
374         .atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state,
375         .atomic_destroy_state = drm_atomic_helper_crtc_destroy_state,
376         .enable_vblank = xlnx_pl_disp_crtc_enable_vblank,
377         .disable_vblank = xlnx_pl_disp_crtc_disable_vblank,
378 };
379
380 static int xlnx_pl_disp_bind(struct device *dev, struct device *master,
381                              void *data)
382 {
383         struct drm_device *drm = data;
384         struct xlnx_pl_disp *xlnx_pl_disp = dev_get_drvdata(dev);
385         int ret;
386         u32 *fmts = NULL;
387         unsigned int num_fmts = 0;
388
389         /* in case of fb IP query the supported formats and there count */
390         xilinx_xdma_get_drm_vid_fmts(xlnx_pl_disp->chan->dma_chan,
391                                      &num_fmts, &fmts);
392         ret = drm_universal_plane_init(drm, &xlnx_pl_disp->plane, 0,
393                                        &xlnx_pl_disp_plane_funcs,
394                                        fmts ? fmts : &xlnx_pl_disp->fmt,
395                                        num_fmts ? num_fmts : 1,
396                                        NULL, DRM_PLANE_TYPE_PRIMARY, NULL);
397         if (ret)
398                 return ret;
399
400         drm_plane_helper_add(&xlnx_pl_disp->plane,
401                              &xlnx_pl_disp_plane_helper_funcs);
402
403         ret = drm_crtc_init_with_planes(drm, &xlnx_pl_disp->xlnx_crtc.crtc,
404                                         &xlnx_pl_disp->plane, NULL,
405                                         &xlnx_pl_disp_crtc_funcs, NULL);
406         if (ret) {
407                 drm_plane_cleanup(&xlnx_pl_disp->plane);
408                 return ret;
409         }
410
411         drm_crtc_helper_add(&xlnx_pl_disp->xlnx_crtc.crtc,
412                             &xlnx_pl_disp_crtc_helper_funcs);
413         xlnx_pl_disp->xlnx_crtc.get_format = &xlnx_pl_disp_get_format;
414         xlnx_pl_disp->xlnx_crtc.get_align = &xlnx_pl_disp_get_align;
415         xlnx_pl_disp->drm = drm;
416         xlnx_crtc_register(xlnx_pl_disp->drm, &xlnx_pl_disp->xlnx_crtc);
417
418         return 0;
419 }
420
421 static void xlnx_pl_disp_unbind(struct device *dev, struct device *master,
422                                 void *data)
423 {
424         struct xlnx_pl_disp *xlnx_pl_disp = dev_get_drvdata(dev);
425
426         drm_plane_cleanup(&xlnx_pl_disp->plane);
427         drm_crtc_cleanup(&xlnx_pl_disp->xlnx_crtc.crtc);
428 }
429
430 static const struct component_ops xlnx_pl_disp_component_ops = {
431         .bind   = xlnx_pl_disp_bind,
432         .unbind = xlnx_pl_disp_unbind,
433 };
434
435 static int xlnx_pl_disp_probe(struct platform_device *pdev)
436 {
437         struct device *dev = &pdev->dev;
438         struct xlnx_pl_disp *xlnx_pl_disp;
439         int ret;
440         const char *vformat;
441         struct dma_chan *dma_chan;
442         struct xlnx_dma_chan *xlnx_dma_chan;
443
444         xlnx_pl_disp = devm_kzalloc(dev, sizeof(*xlnx_pl_disp), GFP_KERNEL);
445         if (!xlnx_pl_disp)
446                 return -ENOMEM;
447
448         dma_chan = of_dma_request_slave_channel(dev->of_node, "dma0");
449         if (IS_ERR_OR_NULL(dma_chan)) {
450                 dev_err(dev, "failed to request dma channel\n");
451                 return PTR_ERR(dma_chan);
452         }
453
454         xlnx_dma_chan = devm_kzalloc(dev, sizeof(*xlnx_dma_chan), GFP_KERNEL);
455         if (!xlnx_dma_chan)
456                 return -ENOMEM;
457
458         xlnx_dma_chan->dma_chan = dma_chan;
459         xlnx_pl_disp->chan = xlnx_dma_chan;
460         ret = of_property_read_string(dev->of_node, "xlnx,vformat", &vformat);
461         if (ret) {
462                 dev_err(dev, "No xlnx,vformat value in dts\n");
463                 goto err_dma;
464         }
465
466         strcpy((char *)&xlnx_pl_disp->fmt, vformat);
467         xlnx_pl_disp->dev = dev;
468         platform_set_drvdata(pdev, xlnx_pl_disp);
469
470         ret = component_add(dev, &xlnx_pl_disp_component_ops);
471         if (ret)
472                 goto err_dma;
473
474         xlnx_pl_disp->master = xlnx_drm_pipeline_init(pdev);
475         if (IS_ERR(xlnx_pl_disp->master)) {
476                 ret = PTR_ERR(xlnx_pl_disp->master);
477                 dev_err(dev, "failed to initialize the drm pipeline\n");
478                 goto err_component;
479         }
480
481         dev_info(&pdev->dev, "Xlnx PL display driver probed\n");
482
483         return 0;
484
485 err_component:
486         component_del(dev, &xlnx_pl_disp_component_ops);
487 err_dma:
488         dma_release_channel(xlnx_pl_disp->chan->dma_chan);
489
490         return ret;
491 }
492
493 static int xlnx_pl_disp_remove(struct platform_device *pdev)
494 {
495         struct xlnx_pl_disp *xlnx_pl_disp = platform_get_drvdata(pdev);
496         struct xlnx_dma_chan *xlnx_dma_chan = xlnx_pl_disp->chan;
497
498         xlnx_drm_pipeline_exit(xlnx_pl_disp->master);
499         component_del(&pdev->dev, &xlnx_pl_disp_component_ops);
500
501         /* Make sure the channel is terminated before release */
502         dmaengine_terminate_sync(xlnx_dma_chan->dma_chan);
503         dma_release_channel(xlnx_dma_chan->dma_chan);
504
505         return 0;
506 }
507
508 static const struct of_device_id xlnx_pl_disp_of_match[] = {
509         { .compatible = "xlnx,pl-disp"},
510         { }
511 };
512 MODULE_DEVICE_TABLE(of, xlnx_pl_disp_of_match);
513
514 static struct platform_driver xlnx_pl_disp_driver = {
515         .probe = xlnx_pl_disp_probe,
516         .remove = xlnx_pl_disp_remove,
517         .driver = {
518                 .name = "xlnx-pl-disp",
519                 .of_match_table = xlnx_pl_disp_of_match,
520         },
521 };
522
523 module_platform_driver(xlnx_pl_disp_driver);
524
525 MODULE_AUTHOR("Saurabh Sengar");
526 MODULE_DESCRIPTION("Xilinx DRM Display Driver for PL IPs");
527 MODULE_LICENSE("GPL v2");