1 // SPDX-License-Identifier: GPL-2.0
3 * Xilinx DRM CRTC DMA engine driver
5 * Copyright (C) 2017 - 2018 Xilinx, Inc.
7 * Author : Saurabh Sengar <saurabhs@xilinx.com>
8 * : Hyun Woo Kwon <hyun.kwon@xilinx.com>
12 #include <drm/drm_atomic.h>
13 #include <drm/drm_atomic_helper.h>
14 #include <drm/drm_crtc.h>
15 #include <drm/drm_crtc_helper.h>
16 #include <drm/drm_fb_cma_helper.h>
17 #include <drm/drm_fourcc.h>
18 #include <drm/drm_gem_cma_helper.h>
19 #include <linux/component.h>
20 #include <linux/device.h>
21 #include <linux/dmaengine.h>
22 #include <linux/dma/xilinx_frmbuf.h>
24 #include <linux/of_dma.h>
25 #include <linux/platform_device.h>
27 #include "xlnx_crtc.h"
33 * This driver intends to support the display pipeline with DMA engine
34 * driver by initializing DRM crtc and plane objects. The driver makes
35 * an assumption that it's single plane pipeline, as multi-plane pipeline
36 * would require programing beyond the DMA engine interface.
40 * struct xlnx_dma_chan - struct for DMA engine
41 * @dma_chan: DMA channel
42 * @xt: Interleaved desc config container
43 * @sgl: Data chunk for dma_interleaved_template
45 struct xlnx_dma_chan {
46 struct dma_chan *dma_chan;
47 struct dma_interleaved_template xt;
48 struct data_chunk sgl[1];
52 * struct xlnx_pl_disp - struct for display subsystem
53 * @dev: device structure
54 * @master: logical master device from xlnx drm
55 * @xlnx_crtc: Xilinx DRM driver crtc object
56 * @plane: base drm plane object
57 * @chan: struct for DMA engine
58 * @event: vblank pending event
59 * @callback: callback for registering DMA callback function
60 * @callback_param: parameter for passing to DMA callback function
61 * @drm: core drm object
62 * @fmt: drm color format
66 struct platform_device *master;
67 struct xlnx_crtc xlnx_crtc;
68 struct drm_plane plane;
69 struct xlnx_dma_chan *chan;
70 struct drm_pending_vblank_event *event;
71 dma_async_tx_callback callback;
73 struct drm_device *drm;
80 static inline struct xlnx_pl_disp *crtc_to_dma(struct xlnx_crtc *xlnx_crtc)
82 return container_of(xlnx_crtc, struct xlnx_pl_disp, xlnx_crtc);
86 * xlnx_pl_disp_complete - vblank handler
87 * @param: parameter to vblank handler
89 * This function handles the vblank interrupt, and sends an event to
92 static void xlnx_pl_disp_complete(void *param)
94 struct xlnx_pl_disp *xlnx_pl_disp = param;
95 struct drm_device *drm = xlnx_pl_disp->drm;
97 drm_handle_vblank(drm, 0);
101 * xlnx_pl_disp_get_format - Get the current display pipeline format
102 * @xlnx_crtc: xlnx crtc object
104 * Get the current format of pipeline
106 * Return: the corresponding DRM_FORMAT_XXX
108 static uint32_t xlnx_pl_disp_get_format(struct xlnx_crtc *xlnx_crtc)
110 struct xlnx_pl_disp *xlnx_pl_disp = crtc_to_dma(xlnx_crtc);
112 return xlnx_pl_disp->fmt;
116 * xlnx_pl_disp_get_align - Get the alignment value for pitch
117 * @xlnx_crtc: xlnx crtc object
119 * Get the alignment value for pitch from the plane
121 * Return: The alignment value if successful, or the error code.
123 static unsigned int xlnx_pl_disp_get_align(struct xlnx_crtc *xlnx_crtc)
125 struct xlnx_pl_disp *xlnx_pl_disp = crtc_to_dma(xlnx_crtc);
127 return 1 << xlnx_pl_disp->chan->dma_chan->device->copy_align;
131 * DRM plane functions
133 static inline struct xlnx_pl_disp *plane_to_dma(struct drm_plane *plane)
135 return container_of(plane, struct xlnx_pl_disp, plane);
139 * xlnx_pl_disp_plane_disable - Disables DRM plane
140 * @plane: DRM plane object
142 * Disable the DRM plane, by stopping the corrosponding DMA
144 static void xlnx_pl_disp_plane_disable(struct drm_plane *plane)
146 struct xlnx_pl_disp *xlnx_pl_disp = plane_to_dma(plane);
147 struct xlnx_dma_chan *xlnx_dma_chan = xlnx_pl_disp->chan;
149 dmaengine_terminate_sync(xlnx_dma_chan->dma_chan);
153 * xlnx_pl_disp_plane_enable - Enables DRM plane
154 * @plane: DRM plane object
156 * Enable the DRM plane, by enabling the corresponding DMA
158 static void xlnx_pl_disp_plane_enable(struct drm_plane *plane)
160 struct xlnx_pl_disp *xlnx_pl_disp = plane_to_dma(plane);
161 struct dma_async_tx_descriptor *desc;
162 enum dma_ctrl_flags flags;
163 struct xlnx_dma_chan *xlnx_dma_chan = xlnx_pl_disp->chan;
164 struct dma_chan *dma_chan = xlnx_dma_chan->dma_chan;
165 struct dma_interleaved_template *xt = &xlnx_dma_chan->xt;
167 flags = DMA_CTRL_ACK | DMA_PREP_INTERRUPT;
168 desc = dmaengine_prep_interleaved_dma(dma_chan, xt, flags);
170 dev_err(xlnx_pl_disp->dev,
171 "failed to prepare DMA descriptor\n");
174 desc->callback = xlnx_pl_disp->callback;
175 desc->callback_param = xlnx_pl_disp->callback_param;
177 dmaengine_submit(desc);
178 dma_async_issue_pending(xlnx_dma_chan->dma_chan);
181 static void xlnx_pl_disp_plane_atomic_disable(struct drm_plane *plane,
182 struct drm_plane_state *old_state)
184 xlnx_pl_disp_plane_disable(plane);
187 static int xlnx_pl_disp_plane_mode_set(struct drm_plane *plane,
188 struct drm_framebuffer *fb,
189 int crtc_x, int crtc_y,
190 unsigned int crtc_w, unsigned int crtc_h,
191 u32 src_x, uint32_t src_y,
192 u32 src_w, uint32_t src_h)
194 struct xlnx_pl_disp *xlnx_pl_disp = plane_to_dma(plane);
195 const struct drm_format_info *info = fb->format;
196 dma_addr_t luma_paddr, chroma_paddr;
198 struct xlnx_dma_chan *xlnx_dma_chan = xlnx_pl_disp->chan;
200 if (info->num_planes > 2) {
201 dev_err(xlnx_pl_disp->dev, "Color format not supported\n");
204 luma_paddr = drm_fb_cma_get_gem_addr(fb, plane->state, 0);
206 dev_err(xlnx_pl_disp->dev, "failed to get luma paddr\n");
210 dev_dbg(xlnx_pl_disp->dev, "num planes = %d\n", info->num_planes);
211 xlnx_dma_chan->xt.numf = src_h;
212 xlnx_dma_chan->sgl[0].size = drm_format_plane_width_bytes(info,
214 xlnx_dma_chan->sgl[0].icg = fb->pitches[0] - xlnx_dma_chan->sgl[0].size;
215 xlnx_dma_chan->xt.src_start = luma_paddr;
216 xlnx_dma_chan->xt.frame_size = info->num_planes;
217 xlnx_dma_chan->xt.dir = DMA_MEM_TO_DEV;
218 xlnx_dma_chan->xt.src_sgl = true;
219 xlnx_dma_chan->xt.dst_sgl = false;
221 /* Do we have a video format aware dma channel?
222 * so, modify descriptor accordingly. Hueristic test:
223 * we have a multi-plane format but only one dma channel
225 if (info->num_planes > 1) {
226 chroma_paddr = drm_fb_cma_get_gem_addr(fb, plane->state, 1);
228 dev_err(xlnx_pl_disp->dev,
229 "failed to get chroma paddr\n");
232 stride = xlnx_dma_chan->sgl[0].size +
233 xlnx_dma_chan->sgl[0].icg;
234 xlnx_dma_chan->sgl[0].src_icg = chroma_paddr -
235 xlnx_dma_chan->xt.src_start -
236 (xlnx_dma_chan->xt.numf * stride);
242 static void xlnx_pl_disp_plane_atomic_update(struct drm_plane *plane,
243 struct drm_plane_state *old_state)
246 struct xlnx_pl_disp *xlnx_pl_disp = plane_to_dma(plane);
248 ret = xlnx_pl_disp_plane_mode_set(plane,
250 plane->state->crtc_x,
251 plane->state->crtc_y,
252 plane->state->crtc_w,
253 plane->state->crtc_h,
254 plane->state->src_x >> 16,
255 plane->state->src_y >> 16,
256 plane->state->src_w >> 16,
257 plane->state->src_h >> 16);
259 dev_err(xlnx_pl_disp->dev, "failed to mode set a plane\n");
262 /* in case frame buffer is used set the color format */
263 xilinx_xdma_drm_config(xlnx_pl_disp->chan->dma_chan,
264 xlnx_pl_disp->plane.state->fb->format->format);
265 /* apply the new fb addr and enable */
266 xlnx_pl_disp_plane_enable(plane);
269 static const struct drm_plane_helper_funcs xlnx_pl_disp_plane_helper_funcs = {
270 .atomic_update = xlnx_pl_disp_plane_atomic_update,
271 .atomic_disable = xlnx_pl_disp_plane_atomic_disable,
274 static struct drm_plane_funcs xlnx_pl_disp_plane_funcs = {
275 .update_plane = drm_atomic_helper_update_plane,
276 .disable_plane = drm_atomic_helper_disable_plane,
277 .destroy = drm_plane_cleanup,
278 .reset = drm_atomic_helper_plane_reset,
279 .atomic_duplicate_state = drm_atomic_helper_plane_duplicate_state,
280 .atomic_destroy_state = drm_atomic_helper_plane_destroy_state,
283 static inline struct xlnx_pl_disp *drm_crtc_to_dma(struct drm_crtc *crtc)
285 struct xlnx_crtc *xlnx_crtc = to_xlnx_crtc(crtc);
287 return crtc_to_dma(xlnx_crtc);
290 static void xlnx_pl_disp_crtc_atomic_begin(struct drm_crtc *crtc,
291 struct drm_crtc_state *old_state)
293 spin_lock_irq(&crtc->dev->event_lock);
294 if (crtc->state->event) {
295 /* Consume the flip_done event from atomic helper */
296 crtc->state->event->pipe = drm_crtc_index(crtc);
297 WARN_ON(drm_crtc_vblank_get(crtc) != 0);
298 drm_crtc_arm_vblank_event(crtc, crtc->state->event);
299 crtc->state->event = NULL;
301 spin_unlock_irq(&crtc->dev->event_lock);
304 static void xlnx_pl_disp_clear_event(struct drm_crtc *crtc)
306 if (crtc->state->event) {
307 complete_all(crtc->state->event->base.completion);
308 crtc->state->event = NULL;
312 static void xlnx_pl_disp_crtc_atomic_enable(struct drm_crtc *crtc,
313 struct drm_crtc_state *old_state)
315 xlnx_pl_disp_plane_enable(crtc->primary);
318 static void xlnx_pl_disp_crtc_atomic_disable(struct drm_crtc *crtc,
319 struct drm_crtc_state *old_state)
321 xlnx_pl_disp_plane_disable(crtc->primary);
322 xlnx_pl_disp_clear_event(crtc);
325 static int xlnx_pl_disp_crtc_atomic_check(struct drm_crtc *crtc,
326 struct drm_crtc_state *state)
328 return drm_atomic_add_affected_planes(state->state, crtc);
331 static struct drm_crtc_helper_funcs xlnx_pl_disp_crtc_helper_funcs = {
332 .atomic_enable = xlnx_pl_disp_crtc_atomic_enable,
333 .atomic_disable = xlnx_pl_disp_crtc_atomic_disable,
334 .atomic_check = xlnx_pl_disp_crtc_atomic_check,
335 .atomic_begin = xlnx_pl_disp_crtc_atomic_begin,
338 static void xlnx_pl_disp_crtc_destroy(struct drm_crtc *crtc)
340 xlnx_pl_disp_plane_disable(crtc->primary);
341 drm_crtc_cleanup(crtc);
344 static int xlnx_pl_disp_crtc_enable_vblank(struct drm_crtc *crtc)
346 struct xlnx_crtc *xlnx_crtc = to_xlnx_crtc(crtc);
347 struct xlnx_pl_disp *xlnx_pl_disp = crtc_to_dma(xlnx_crtc);
350 * Use the complete callback for vblank event assuming the dma engine
351 * starts on the next descriptor upon this event. This may not be safe
352 * assumption for some dma engines.
354 xlnx_pl_disp->callback = xlnx_pl_disp_complete;
355 xlnx_pl_disp->callback_param = xlnx_pl_disp;
360 static void xlnx_pl_disp_crtc_disable_vblank(struct drm_crtc *crtc)
362 struct xlnx_crtc *xlnx_crtc = to_xlnx_crtc(crtc);
363 struct xlnx_pl_disp *xlnx_pl_disp = crtc_to_dma(xlnx_crtc);
365 xlnx_pl_disp->callback = NULL;
366 xlnx_pl_disp->callback_param = NULL;
369 static struct drm_crtc_funcs xlnx_pl_disp_crtc_funcs = {
370 .destroy = xlnx_pl_disp_crtc_destroy,
371 .set_config = drm_atomic_helper_set_config,
372 .page_flip = drm_atomic_helper_page_flip,
373 .reset = drm_atomic_helper_crtc_reset,
374 .atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state,
375 .atomic_destroy_state = drm_atomic_helper_crtc_destroy_state,
376 .enable_vblank = xlnx_pl_disp_crtc_enable_vblank,
377 .disable_vblank = xlnx_pl_disp_crtc_disable_vblank,
380 static int xlnx_pl_disp_bind(struct device *dev, struct device *master,
383 struct drm_device *drm = data;
384 struct xlnx_pl_disp *xlnx_pl_disp = dev_get_drvdata(dev);
387 unsigned int num_fmts = 0;
389 /* in case of fb IP query the supported formats and there count */
390 xilinx_xdma_get_drm_vid_fmts(xlnx_pl_disp->chan->dma_chan,
392 ret = drm_universal_plane_init(drm, &xlnx_pl_disp->plane, 0,
393 &xlnx_pl_disp_plane_funcs,
394 fmts ? fmts : &xlnx_pl_disp->fmt,
395 num_fmts ? num_fmts : 1,
396 NULL, DRM_PLANE_TYPE_PRIMARY, NULL);
400 drm_plane_helper_add(&xlnx_pl_disp->plane,
401 &xlnx_pl_disp_plane_helper_funcs);
403 ret = drm_crtc_init_with_planes(drm, &xlnx_pl_disp->xlnx_crtc.crtc,
404 &xlnx_pl_disp->plane, NULL,
405 &xlnx_pl_disp_crtc_funcs, NULL);
407 drm_plane_cleanup(&xlnx_pl_disp->plane);
411 drm_crtc_helper_add(&xlnx_pl_disp->xlnx_crtc.crtc,
412 &xlnx_pl_disp_crtc_helper_funcs);
413 xlnx_pl_disp->xlnx_crtc.get_format = &xlnx_pl_disp_get_format;
414 xlnx_pl_disp->xlnx_crtc.get_align = &xlnx_pl_disp_get_align;
415 xlnx_pl_disp->drm = drm;
416 xlnx_crtc_register(xlnx_pl_disp->drm, &xlnx_pl_disp->xlnx_crtc);
421 static void xlnx_pl_disp_unbind(struct device *dev, struct device *master,
424 struct xlnx_pl_disp *xlnx_pl_disp = dev_get_drvdata(dev);
426 drm_plane_cleanup(&xlnx_pl_disp->plane);
427 drm_crtc_cleanup(&xlnx_pl_disp->xlnx_crtc.crtc);
430 static const struct component_ops xlnx_pl_disp_component_ops = {
431 .bind = xlnx_pl_disp_bind,
432 .unbind = xlnx_pl_disp_unbind,
435 static int xlnx_pl_disp_probe(struct platform_device *pdev)
437 struct device *dev = &pdev->dev;
438 struct xlnx_pl_disp *xlnx_pl_disp;
441 struct dma_chan *dma_chan;
442 struct xlnx_dma_chan *xlnx_dma_chan;
444 xlnx_pl_disp = devm_kzalloc(dev, sizeof(*xlnx_pl_disp), GFP_KERNEL);
448 dma_chan = of_dma_request_slave_channel(dev->of_node, "dma0");
449 if (IS_ERR_OR_NULL(dma_chan)) {
450 dev_err(dev, "failed to request dma channel\n");
451 return PTR_ERR(dma_chan);
454 xlnx_dma_chan = devm_kzalloc(dev, sizeof(*xlnx_dma_chan), GFP_KERNEL);
458 xlnx_dma_chan->dma_chan = dma_chan;
459 xlnx_pl_disp->chan = xlnx_dma_chan;
460 ret = of_property_read_string(dev->of_node, "xlnx,vformat", &vformat);
462 dev_err(dev, "No xlnx,vformat value in dts\n");
466 strcpy((char *)&xlnx_pl_disp->fmt, vformat);
467 xlnx_pl_disp->dev = dev;
468 platform_set_drvdata(pdev, xlnx_pl_disp);
470 ret = component_add(dev, &xlnx_pl_disp_component_ops);
474 xlnx_pl_disp->master = xlnx_drm_pipeline_init(pdev);
475 if (IS_ERR(xlnx_pl_disp->master)) {
476 ret = PTR_ERR(xlnx_pl_disp->master);
477 dev_err(dev, "failed to initialize the drm pipeline\n");
481 dev_info(&pdev->dev, "Xlnx PL display driver probed\n");
486 component_del(dev, &xlnx_pl_disp_component_ops);
488 dma_release_channel(xlnx_pl_disp->chan->dma_chan);
493 static int xlnx_pl_disp_remove(struct platform_device *pdev)
495 struct xlnx_pl_disp *xlnx_pl_disp = platform_get_drvdata(pdev);
496 struct xlnx_dma_chan *xlnx_dma_chan = xlnx_pl_disp->chan;
498 xlnx_drm_pipeline_exit(xlnx_pl_disp->master);
499 component_del(&pdev->dev, &xlnx_pl_disp_component_ops);
501 /* Make sure the channel is terminated before release */
502 dmaengine_terminate_sync(xlnx_dma_chan->dma_chan);
503 dma_release_channel(xlnx_dma_chan->dma_chan);
508 static const struct of_device_id xlnx_pl_disp_of_match[] = {
509 { .compatible = "xlnx,pl-disp"},
512 MODULE_DEVICE_TABLE(of, xlnx_pl_disp_of_match);
514 static struct platform_driver xlnx_pl_disp_driver = {
515 .probe = xlnx_pl_disp_probe,
516 .remove = xlnx_pl_disp_remove,
518 .name = "xlnx-pl-disp",
519 .of_match_table = xlnx_pl_disp_of_match,
523 module_platform_driver(xlnx_pl_disp_driver);
525 MODULE_AUTHOR("Saurabh Sengar");
526 MODULE_DESCRIPTION("Xilinx DRM Display Driver for PL IPs");
527 MODULE_LICENSE("GPL v2");