1 // SPDX-License-Identifier: GPL-2.0
3 * Xilinx DRM CRTC DMA engine driver
5 * Copyright (C) 2017 - 2018 Xilinx, Inc.
7 * Author : Saurabh Sengar <saurabhs@xilinx.com>
8 * : Hyun Woo Kwon <hyun.kwon@xilinx.com>
12 #include <drm/drm_atomic.h>
13 #include <drm/drm_atomic_helper.h>
14 #include <drm/drm_crtc.h>
15 #include <drm/drm_crtc_helper.h>
16 #include <drm/drm_fb_cma_helper.h>
17 #include <drm/drm_fourcc.h>
18 #include <drm/drm_gem_cma_helper.h>
19 #include <linux/component.h>
20 #include <linux/device.h>
21 #include <linux/dmaengine.h>
22 #include <linux/dma/xilinx_frmbuf.h>
24 #include <linux/of_dma.h>
25 #include <linux/platform_device.h>
26 #include <video/videomode.h>
27 #include "xlnx_bridge.h"
28 #include "xlnx_crtc.h"
35 * This driver intends to support the display pipeline with DMA engine
36 * driver by initializing DRM crtc and plane objects. The driver makes
37 * an assumption that it's single plane pipeline, as multi-plane pipeline
38 * would require programing beyond the DMA engine interface.
42 * struct xlnx_dma_chan - struct for DMA engine
43 * @dma_chan: DMA channel
44 * @xt: Interleaved desc config container
45 * @sgl: Data chunk for dma_interleaved_template
47 struct xlnx_dma_chan {
48 struct dma_chan *dma_chan;
49 struct dma_interleaved_template xt;
50 struct data_chunk sgl[1];
54 * struct xlnx_pl_disp - struct for display subsystem
55 * @dev: device structure
56 * @master: logical master device from xlnx drm
57 * @xlnx_crtc: Xilinx DRM driver crtc object
58 * @plane: base drm plane object
59 * @chan: struct for DMA engine
60 * @event: vblank pending event
61 * @callback: callback for registering DMA callback function
62 * @callback_param: parameter for passing to DMA callback function
63 * @drm: core drm object
64 * @fmt: drm color format
65 * @vtc_bridge: vtc_bridge structure
70 struct platform_device *master;
71 struct xlnx_crtc xlnx_crtc;
72 struct drm_plane plane;
73 struct xlnx_dma_chan *chan;
74 struct drm_pending_vblank_event *event;
75 dma_async_tx_callback callback;
77 struct drm_device *drm;
79 struct xlnx_bridge *vtc_bridge;
86 static inline struct xlnx_pl_disp *crtc_to_dma(struct xlnx_crtc *xlnx_crtc)
88 return container_of(xlnx_crtc, struct xlnx_pl_disp, xlnx_crtc);
92 * xlnx_pl_disp_complete - vblank handler
93 * @param: parameter to vblank handler
95 * This function handles the vblank interrupt, and sends an event to
98 static void xlnx_pl_disp_complete(void *param)
100 struct xlnx_pl_disp *xlnx_pl_disp = param;
101 struct drm_device *drm = xlnx_pl_disp->drm;
103 drm_handle_vblank(drm, 0);
107 * xlnx_pl_disp_get_format - Get the current display pipeline format
108 * @xlnx_crtc: xlnx crtc object
110 * Get the current format of pipeline
112 * Return: the corresponding DRM_FORMAT_XXX
114 static uint32_t xlnx_pl_disp_get_format(struct xlnx_crtc *xlnx_crtc)
116 struct xlnx_pl_disp *xlnx_pl_disp = crtc_to_dma(xlnx_crtc);
118 return xlnx_pl_disp->fmt;
122 * xlnx_pl_disp_get_align - Get the alignment value for pitch
123 * @xlnx_crtc: xlnx crtc object
125 * Get the alignment value for pitch from the plane
127 * Return: The alignment value if successful, or the error code.
129 static unsigned int xlnx_pl_disp_get_align(struct xlnx_crtc *xlnx_crtc)
131 struct xlnx_pl_disp *xlnx_pl_disp = crtc_to_dma(xlnx_crtc);
133 return 1 << xlnx_pl_disp->chan->dma_chan->device->copy_align;
137 * DRM plane functions
139 static inline struct xlnx_pl_disp *plane_to_dma(struct drm_plane *plane)
141 return container_of(plane, struct xlnx_pl_disp, plane);
145 * xlnx_pl_disp_plane_disable - Disables DRM plane
146 * @plane: DRM plane object
148 * Disable the DRM plane, by stopping the corrosponding DMA
150 static void xlnx_pl_disp_plane_disable(struct drm_plane *plane)
152 struct xlnx_pl_disp *xlnx_pl_disp = plane_to_dma(plane);
153 struct xlnx_dma_chan *xlnx_dma_chan = xlnx_pl_disp->chan;
155 dmaengine_terminate_sync(xlnx_dma_chan->dma_chan);
159 * xlnx_pl_disp_plane_enable - Enables DRM plane
160 * @plane: DRM plane object
162 * Enable the DRM plane, by enabling the corresponding DMA
164 static void xlnx_pl_disp_plane_enable(struct drm_plane *plane)
166 struct xlnx_pl_disp *xlnx_pl_disp = plane_to_dma(plane);
167 struct dma_async_tx_descriptor *desc;
168 enum dma_ctrl_flags flags;
169 struct xlnx_dma_chan *xlnx_dma_chan = xlnx_pl_disp->chan;
170 struct dma_chan *dma_chan = xlnx_dma_chan->dma_chan;
171 struct dma_interleaved_template *xt = &xlnx_dma_chan->xt;
173 flags = DMA_CTRL_ACK | DMA_PREP_INTERRUPT;
174 desc = dmaengine_prep_interleaved_dma(dma_chan, xt, flags);
176 dev_err(xlnx_pl_disp->dev,
177 "failed to prepare DMA descriptor\n");
180 desc->callback = xlnx_pl_disp->callback;
181 desc->callback_param = xlnx_pl_disp->callback_param;
182 xilinx_xdma_set_earlycb(xlnx_dma_chan->dma_chan, desc, true);
184 if (plane->state->fb->flags == DRM_MODE_FB_ALTERNATE_TOP ||
185 plane->state->fb->flags == DRM_MODE_FB_ALTERNATE_BOTTOM) {
186 if (plane->state->fb->flags == DRM_MODE_FB_ALTERNATE_TOP)
187 xlnx_pl_disp->fid = 1;
189 xlnx_pl_disp->fid = 0;
191 xilinx_xdma_set_fid(xlnx_dma_chan->dma_chan, desc,
195 dmaengine_submit(desc);
196 dma_async_issue_pending(xlnx_dma_chan->dma_chan);
199 static void xlnx_pl_disp_plane_atomic_disable(struct drm_plane *plane,
200 struct drm_plane_state *old_state)
202 xlnx_pl_disp_plane_disable(plane);
205 static int xlnx_pl_disp_plane_mode_set(struct drm_plane *plane,
206 struct drm_framebuffer *fb,
207 int crtc_x, int crtc_y,
208 unsigned int crtc_w, unsigned int crtc_h,
209 u32 src_x, uint32_t src_y,
210 u32 src_w, uint32_t src_h)
212 struct xlnx_pl_disp *xlnx_pl_disp = plane_to_dma(plane);
213 const struct drm_format_info *info = fb->format;
214 dma_addr_t luma_paddr, chroma_paddr;
216 struct xlnx_dma_chan *xlnx_dma_chan = xlnx_pl_disp->chan;
218 if (info->num_planes > 2) {
219 dev_err(xlnx_pl_disp->dev, "Color format not supported\n");
222 luma_paddr = drm_fb_cma_get_gem_addr(fb, plane->state, 0);
224 dev_err(xlnx_pl_disp->dev, "failed to get luma paddr\n");
228 dev_dbg(xlnx_pl_disp->dev, "num planes = %d\n", info->num_planes);
229 xlnx_dma_chan->xt.numf = src_h;
230 xlnx_dma_chan->sgl[0].size = drm_format_plane_width_bytes(info,
232 xlnx_dma_chan->sgl[0].icg = fb->pitches[0] - xlnx_dma_chan->sgl[0].size;
233 xlnx_dma_chan->xt.src_start = luma_paddr;
234 xlnx_dma_chan->xt.frame_size = info->num_planes;
235 xlnx_dma_chan->xt.dir = DMA_MEM_TO_DEV;
236 xlnx_dma_chan->xt.src_sgl = true;
237 xlnx_dma_chan->xt.dst_sgl = false;
239 /* Do we have a video format aware dma channel?
240 * so, modify descriptor accordingly. Hueristic test:
241 * we have a multi-plane format but only one dma channel
243 if (info->num_planes > 1) {
244 chroma_paddr = drm_fb_cma_get_gem_addr(fb, plane->state, 1);
246 dev_err(xlnx_pl_disp->dev,
247 "failed to get chroma paddr\n");
250 stride = xlnx_dma_chan->sgl[0].size +
251 xlnx_dma_chan->sgl[0].icg;
252 xlnx_dma_chan->sgl[0].src_icg = chroma_paddr -
253 xlnx_dma_chan->xt.src_start -
254 (xlnx_dma_chan->xt.numf * stride);
260 static void xlnx_pl_disp_plane_atomic_update(struct drm_plane *plane,
261 struct drm_plane_state *old_state)
264 struct xlnx_pl_disp *xlnx_pl_disp = plane_to_dma(plane);
266 ret = xlnx_pl_disp_plane_mode_set(plane,
268 plane->state->crtc_x,
269 plane->state->crtc_y,
270 plane->state->crtc_w,
271 plane->state->crtc_h,
272 plane->state->src_x >> 16,
273 plane->state->src_y >> 16,
274 plane->state->src_w >> 16,
275 plane->state->src_h >> 16);
277 dev_err(xlnx_pl_disp->dev, "failed to mode set a plane\n");
280 /* in case frame buffer is used set the color format */
281 xilinx_xdma_drm_config(xlnx_pl_disp->chan->dma_chan,
282 xlnx_pl_disp->plane.state->fb->format->format);
283 /* apply the new fb addr and enable */
284 xlnx_pl_disp_plane_enable(plane);
287 static const struct drm_plane_helper_funcs xlnx_pl_disp_plane_helper_funcs = {
288 .atomic_update = xlnx_pl_disp_plane_atomic_update,
289 .atomic_disable = xlnx_pl_disp_plane_atomic_disable,
292 static struct drm_plane_funcs xlnx_pl_disp_plane_funcs = {
293 .update_plane = drm_atomic_helper_update_plane,
294 .disable_plane = drm_atomic_helper_disable_plane,
295 .destroy = drm_plane_cleanup,
296 .reset = drm_atomic_helper_plane_reset,
297 .atomic_duplicate_state = drm_atomic_helper_plane_duplicate_state,
298 .atomic_destroy_state = drm_atomic_helper_plane_destroy_state,
301 static inline struct xlnx_pl_disp *drm_crtc_to_dma(struct drm_crtc *crtc)
303 struct xlnx_crtc *xlnx_crtc = to_xlnx_crtc(crtc);
305 return crtc_to_dma(xlnx_crtc);
308 static void xlnx_pl_disp_crtc_atomic_begin(struct drm_crtc *crtc,
309 struct drm_crtc_state *old_state)
311 spin_lock_irq(&crtc->dev->event_lock);
312 if (crtc->state->event) {
313 /* Consume the flip_done event from atomic helper */
314 crtc->state->event->pipe = drm_crtc_index(crtc);
315 WARN_ON(drm_crtc_vblank_get(crtc) != 0);
316 drm_crtc_arm_vblank_event(crtc, crtc->state->event);
317 crtc->state->event = NULL;
319 spin_unlock_irq(&crtc->dev->event_lock);
322 static void xlnx_pl_disp_clear_event(struct drm_crtc *crtc)
324 if (crtc->state->event) {
325 complete_all(crtc->state->event->base.completion);
326 crtc->state->event = NULL;
330 static void xlnx_pl_disp_crtc_atomic_enable(struct drm_crtc *crtc,
331 struct drm_crtc_state *old_state)
333 struct drm_display_mode *adjusted_mode = &crtc->state->adjusted_mode;
335 struct xlnx_crtc *xlnx_crtc = to_xlnx_crtc(crtc);
336 struct xlnx_pl_disp *xlnx_pl_disp = crtc_to_dma(xlnx_crtc);
339 if (xlnx_pl_disp->vtc_bridge) {
340 /* set video timing */
341 drm_display_mode_to_videomode(adjusted_mode, &vm);
342 xlnx_bridge_set_timing(xlnx_pl_disp->vtc_bridge, &vm);
343 xlnx_bridge_enable(xlnx_pl_disp->vtc_bridge);
346 xlnx_pl_disp_plane_enable(crtc->primary);
348 /* Delay of 1 vblank interval for timing gen to be stable */
349 vrefresh = (adjusted_mode->clock * 1000) /
350 (adjusted_mode->vtotal * adjusted_mode->htotal);
351 msleep(1 * 1000 / vrefresh);
354 static void xlnx_pl_disp_crtc_atomic_disable(struct drm_crtc *crtc,
355 struct drm_crtc_state *old_state)
357 struct xlnx_crtc *xlnx_crtc = to_xlnx_crtc(crtc);
358 struct xlnx_pl_disp *xlnx_pl_disp = crtc_to_dma(xlnx_crtc);
360 xlnx_pl_disp_plane_disable(crtc->primary);
361 xlnx_pl_disp_clear_event(crtc);
362 xlnx_bridge_disable(xlnx_pl_disp->vtc_bridge);
365 static int xlnx_pl_disp_crtc_atomic_check(struct drm_crtc *crtc,
366 struct drm_crtc_state *state)
368 return drm_atomic_add_affected_planes(state->state, crtc);
371 static struct drm_crtc_helper_funcs xlnx_pl_disp_crtc_helper_funcs = {
372 .atomic_enable = xlnx_pl_disp_crtc_atomic_enable,
373 .atomic_disable = xlnx_pl_disp_crtc_atomic_disable,
374 .atomic_check = xlnx_pl_disp_crtc_atomic_check,
375 .atomic_begin = xlnx_pl_disp_crtc_atomic_begin,
378 static void xlnx_pl_disp_crtc_destroy(struct drm_crtc *crtc)
380 xlnx_pl_disp_plane_disable(crtc->primary);
381 drm_crtc_cleanup(crtc);
384 static int xlnx_pl_disp_crtc_enable_vblank(struct drm_crtc *crtc)
386 struct xlnx_crtc *xlnx_crtc = to_xlnx_crtc(crtc);
387 struct xlnx_pl_disp *xlnx_pl_disp = crtc_to_dma(xlnx_crtc);
390 * Use the complete callback for vblank event assuming the dma engine
391 * starts on the next descriptor upon this event. This may not be safe
392 * assumption for some dma engines.
394 xlnx_pl_disp->callback = xlnx_pl_disp_complete;
395 xlnx_pl_disp->callback_param = xlnx_pl_disp;
400 static void xlnx_pl_disp_crtc_disable_vblank(struct drm_crtc *crtc)
402 struct xlnx_crtc *xlnx_crtc = to_xlnx_crtc(crtc);
403 struct xlnx_pl_disp *xlnx_pl_disp = crtc_to_dma(xlnx_crtc);
405 xlnx_pl_disp->callback = NULL;
406 xlnx_pl_disp->callback_param = NULL;
409 static struct drm_crtc_funcs xlnx_pl_disp_crtc_funcs = {
410 .destroy = xlnx_pl_disp_crtc_destroy,
411 .set_config = drm_atomic_helper_set_config,
412 .page_flip = drm_atomic_helper_page_flip,
413 .reset = drm_atomic_helper_crtc_reset,
414 .atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state,
415 .atomic_destroy_state = drm_atomic_helper_crtc_destroy_state,
416 .enable_vblank = xlnx_pl_disp_crtc_enable_vblank,
417 .disable_vblank = xlnx_pl_disp_crtc_disable_vblank,
420 static int xlnx_pl_disp_bind(struct device *dev, struct device *master,
423 struct drm_device *drm = data;
424 struct xlnx_pl_disp *xlnx_pl_disp = dev_get_drvdata(dev);
427 unsigned int num_fmts = 0;
429 /* in case of fb IP query the supported formats and there count */
430 xilinx_xdma_get_drm_vid_fmts(xlnx_pl_disp->chan->dma_chan,
432 ret = drm_universal_plane_init(drm, &xlnx_pl_disp->plane, 0,
433 &xlnx_pl_disp_plane_funcs,
434 fmts ? fmts : &xlnx_pl_disp->fmt,
435 num_fmts ? num_fmts : 1,
436 NULL, DRM_PLANE_TYPE_PRIMARY, NULL);
440 drm_plane_helper_add(&xlnx_pl_disp->plane,
441 &xlnx_pl_disp_plane_helper_funcs);
443 ret = drm_crtc_init_with_planes(drm, &xlnx_pl_disp->xlnx_crtc.crtc,
444 &xlnx_pl_disp->plane, NULL,
445 &xlnx_pl_disp_crtc_funcs, NULL);
447 drm_plane_cleanup(&xlnx_pl_disp->plane);
451 drm_crtc_helper_add(&xlnx_pl_disp->xlnx_crtc.crtc,
452 &xlnx_pl_disp_crtc_helper_funcs);
453 xlnx_pl_disp->xlnx_crtc.get_format = &xlnx_pl_disp_get_format;
454 xlnx_pl_disp->xlnx_crtc.get_align = &xlnx_pl_disp_get_align;
455 xlnx_pl_disp->drm = drm;
456 xlnx_crtc_register(xlnx_pl_disp->drm, &xlnx_pl_disp->xlnx_crtc);
461 static void xlnx_pl_disp_unbind(struct device *dev, struct device *master,
464 struct xlnx_pl_disp *xlnx_pl_disp = dev_get_drvdata(dev);
466 drm_plane_cleanup(&xlnx_pl_disp->plane);
467 drm_crtc_cleanup(&xlnx_pl_disp->xlnx_crtc.crtc);
470 static const struct component_ops xlnx_pl_disp_component_ops = {
471 .bind = xlnx_pl_disp_bind,
472 .unbind = xlnx_pl_disp_unbind,
475 static int xlnx_pl_disp_probe(struct platform_device *pdev)
477 struct device *dev = &pdev->dev;
478 struct device_node *vtc_node;
479 struct xlnx_pl_disp *xlnx_pl_disp;
482 struct dma_chan *dma_chan;
483 struct xlnx_dma_chan *xlnx_dma_chan;
485 xlnx_pl_disp = devm_kzalloc(dev, sizeof(*xlnx_pl_disp), GFP_KERNEL);
489 dma_chan = of_dma_request_slave_channel(dev->of_node, "dma0");
490 if (IS_ERR_OR_NULL(dma_chan)) {
491 dev_err(dev, "failed to request dma channel\n");
492 return PTR_ERR(dma_chan);
495 xlnx_dma_chan = devm_kzalloc(dev, sizeof(*xlnx_dma_chan), GFP_KERNEL);
499 xlnx_dma_chan->dma_chan = dma_chan;
500 xlnx_pl_disp->chan = xlnx_dma_chan;
501 ret = of_property_read_string(dev->of_node, "xlnx,vformat", &vformat);
503 dev_err(dev, "No xlnx,vformat value in dts\n");
507 strcpy((char *)&xlnx_pl_disp->fmt, vformat);
509 /* VTC Bridge support */
510 vtc_node = of_parse_phandle(dev->of_node, "xlnx,bridge", 0);
512 xlnx_pl_disp->vtc_bridge = of_xlnx_bridge_get(vtc_node);
513 if (!xlnx_pl_disp->vtc_bridge) {
514 dev_info(dev, "Didn't get vtc bridge instance\n");
515 return -EPROBE_DEFER;
518 dev_info(dev, "vtc bridge property not present\n");
521 xlnx_pl_disp->dev = dev;
522 platform_set_drvdata(pdev, xlnx_pl_disp);
524 ret = component_add(dev, &xlnx_pl_disp_component_ops);
528 xlnx_pl_disp->master = xlnx_drm_pipeline_init(pdev);
529 if (IS_ERR(xlnx_pl_disp->master)) {
530 ret = PTR_ERR(xlnx_pl_disp->master);
531 dev_err(dev, "failed to initialize the drm pipeline\n");
535 dev_info(&pdev->dev, "Xlnx PL display driver probed\n");
540 component_del(dev, &xlnx_pl_disp_component_ops);
542 dma_release_channel(xlnx_pl_disp->chan->dma_chan);
547 static int xlnx_pl_disp_remove(struct platform_device *pdev)
549 struct xlnx_pl_disp *xlnx_pl_disp = platform_get_drvdata(pdev);
550 struct xlnx_dma_chan *xlnx_dma_chan = xlnx_pl_disp->chan;
552 of_xlnx_bridge_put(xlnx_pl_disp->vtc_bridge);
553 xlnx_drm_pipeline_exit(xlnx_pl_disp->master);
554 component_del(&pdev->dev, &xlnx_pl_disp_component_ops);
556 /* Make sure the channel is terminated before release */
557 dmaengine_terminate_sync(xlnx_dma_chan->dma_chan);
558 dma_release_channel(xlnx_dma_chan->dma_chan);
563 static const struct of_device_id xlnx_pl_disp_of_match[] = {
564 { .compatible = "xlnx,pl-disp"},
567 MODULE_DEVICE_TABLE(of, xlnx_pl_disp_of_match);
569 static struct platform_driver xlnx_pl_disp_driver = {
570 .probe = xlnx_pl_disp_probe,
571 .remove = xlnx_pl_disp_remove,
573 .name = "xlnx-pl-disp",
574 .of_match_table = xlnx_pl_disp_of_match,
578 module_platform_driver(xlnx_pl_disp_driver);
580 MODULE_AUTHOR("Saurabh Sengar");
581 MODULE_DESCRIPTION("Xilinx DRM Display Driver for PL IPs");
582 MODULE_LICENSE("GPL v2");