2 * DMAEngine driver for Xilinx Framebuffer IP
4 * Copyright (C) 2016,2017 Xilinx, Inc. All rights reserved.
6 * Authors: Radhey Shyam Pandey <radheys@xilinx.com>
7 * John Nichols <jnichol@xilinx.com>
8 * Jeffrey Mouroux <jmouroux@xilinx.com>
10 * Based on the Freescale DMA driver.
13 * The AXI Framebuffer core is a soft Xilinx IP core that
14 * provides high-bandwidth direct memory access between memory
17 * This program is free software: you can redistribute it and/or modify
18 * it under the terms of the GNU General Public License as published by
19 * the Free Software Foundation, either version 2 of the License, or
20 * (at your option) any later version.
23 #include <linux/bitops.h>
24 #include <linux/delay.h>
25 #include <linux/dma/xilinx_frmbuf.h>
26 #include <linux/dmapool.h>
27 #include <linux/gpio/consumer.h>
28 #include <linux/init.h>
29 #include <linux/interrupt.h>
31 #include <linux/iopoll.h>
32 #include <linux/module.h>
33 #include <linux/of_address.h>
34 #include <linux/of_dma.h>
35 #include <linux/of_irq.h>
36 #include <linux/of_platform.h>
37 #include <linux/slab.h>
38 #include <linux/videodev2.h>
40 #include <drm/drm_fourcc.h>
42 #include "../dmaengine.h"
44 /* Register/Descriptor Offsets */
45 #define XILINX_FRMBUF_CTRL_OFFSET 0x00
46 #define XILINX_FRMBUF_GIE_OFFSET 0x04
47 #define XILINX_FRMBUF_IE_OFFSET 0x08
48 #define XILINX_FRMBUF_ISR_OFFSET 0x0c
49 #define XILINX_FRMBUF_WIDTH_OFFSET 0x10
50 #define XILINX_FRMBUF_HEIGHT_OFFSET 0x18
51 #define XILINX_FRMBUF_STRIDE_OFFSET 0x20
52 #define XILINX_FRMBUF_FMT_OFFSET 0x28
53 #define XILINX_FRMBUF_ADDR_OFFSET 0x30
54 #define XILINX_FRMBUF_ADDR2_OFFSET 0x3c
55 #define XILINX_FRMBUF_FID_OFFSET 0x48
57 /* Control Registers */
58 #define XILINX_FRMBUF_CTRL_AP_START BIT(0)
59 #define XILINX_FRMBUF_CTRL_AP_DONE BIT(1)
60 #define XILINX_FRMBUF_CTRL_AP_IDLE BIT(2)
61 #define XILINX_FRMBUF_CTRL_AP_READY BIT(3)
62 #define XILINX_FRMBUF_CTRL_FLUSH BIT(5)
63 #define XILINX_FRMBUF_CTRL_FLUSH_DONE BIT(6)
64 #define XILINX_FRMBUF_CTRL_AUTO_RESTART BIT(7)
65 #define XILINX_FRMBUF_GIE_EN BIT(0)
67 /* Interrupt Status and Control */
68 #define XILINX_FRMBUF_IE_AP_DONE BIT(0)
69 #define XILINX_FRMBUF_IE_AP_READY BIT(1)
71 #define XILINX_FRMBUF_ISR_AP_DONE_IRQ BIT(0)
72 #define XILINX_FRMBUF_ISR_AP_READY_IRQ BIT(1)
74 #define XILINX_FRMBUF_ISR_ALL_IRQ_MASK \
75 (XILINX_FRMBUF_ISR_AP_DONE_IRQ | \
76 XILINX_FRMBUF_ISR_AP_READY_IRQ)
78 /* Video Format Register Settings */
79 #define XILINX_FRMBUF_FMT_RGBX8 10
80 #define XILINX_FRMBUF_FMT_YUVX8 11
81 #define XILINX_FRMBUF_FMT_YUYV8 12
82 #define XILINX_FRMBUF_FMT_RGBA8 13
83 #define XILINX_FRMBUF_FMT_YUVA8 14
84 #define XILINX_FRMBUF_FMT_RGBX10 15
85 #define XILINX_FRMBUF_FMT_YUVX10 16
86 #define XILINX_FRMBUF_FMT_Y_UV8 18
87 #define XILINX_FRMBUF_FMT_Y_UV8_420 19
88 #define XILINX_FRMBUF_FMT_RGB8 20
89 #define XILINX_FRMBUF_FMT_YUV8 21
90 #define XILINX_FRMBUF_FMT_Y_UV10 22
91 #define XILINX_FRMBUF_FMT_Y_UV10_420 23
92 #define XILINX_FRMBUF_FMT_Y8 24
93 #define XILINX_FRMBUF_FMT_Y10 25
94 #define XILINX_FRMBUF_FMT_BGRA8 26
95 #define XILINX_FRMBUF_FMT_BGRX8 27
96 #define XILINX_FRMBUF_FMT_UYVY8 28
97 #define XILINX_FRMBUF_FMT_BGR8 29
100 #define XILINX_FRMBUF_FID_MASK BIT(0)
102 #define XILINX_FRMBUF_ALIGN_MUL 8
104 #define WAIT_FOR_FLUSH_DONE 25
106 /* Pixels per clock property flag */
107 #define XILINX_PPC_PROP BIT(0)
108 #define XILINX_FLUSH_PROP BIT(1)
109 #define XILINX_FID_PROP BIT(2)
112 * struct xilinx_frmbuf_desc_hw - Hardware Descriptor
113 * @luma_plane_addr: Luma or packed plane buffer address
114 * @chroma_plane_addr: Chroma plane buffer address
115 * @vsize: Vertical Size
116 * @hsize: Horizontal Size
117 * @stride: Number of bytes between the first
118 * pixels of each horizontal line
120 struct xilinx_frmbuf_desc_hw {
121 dma_addr_t luma_plane_addr;
122 dma_addr_t chroma_plane_addr;
129 * struct xilinx_frmbuf_tx_descriptor - Per Transaction structure
130 * @async_tx: Async transaction descriptor
131 * @hw: Hardware descriptor
132 * @node: Node in the channel descriptors list
133 * @fid: Field ID of buffer
135 struct xilinx_frmbuf_tx_descriptor {
136 struct dma_async_tx_descriptor async_tx;
137 struct xilinx_frmbuf_desc_hw hw;
138 struct list_head node;
143 * struct xilinx_frmbuf_chan - Driver specific dma channel structure
144 * @xdev: Driver specific device structure
145 * @lock: Descriptor operation lock
146 * @chan_node: Member of a list of framebuffer channel instances
147 * @pending_list: Descriptors waiting
148 * @done_list: Complete descriptors
149 * @staged_desc: Next buffer to be programmed
150 * @active_desc: Currently active buffer being read/written to
151 * @common: DMA common channel
152 * @dev: The dma device
153 * @write_addr: callback that will write dma addresses to IP (32 or 64 bit)
155 * @direction: Transfer direction
156 * @idle: Channel idle state
157 * @tasklet: Cleanup work after irq
158 * @vid_fmt: Reference to currently assigned video format description
159 * @hw_fid: FID enabled in hardware flag
161 struct xilinx_frmbuf_chan {
162 struct xilinx_frmbuf_device *xdev;
163 /* Descriptor operation lock */
165 struct list_head chan_node;
166 struct list_head pending_list;
167 struct list_head done_list;
168 struct xilinx_frmbuf_tx_descriptor *staged_desc;
169 struct xilinx_frmbuf_tx_descriptor *active_desc;
170 struct dma_chan common;
172 void (*write_addr)(struct xilinx_frmbuf_chan *chan, u32 reg,
175 enum dma_transfer_direction direction;
177 struct tasklet_struct tasklet;
178 const struct xilinx_frmbuf_format_desc *vid_fmt;
183 * struct xilinx_frmbuf_format_desc - lookup table to match fourcc to format
184 * @dts_name: Device tree name for this entry.
186 * @bpw: Bits of pixel data + padding in a 32-bit word (luma plane for semi-pl)
187 * @ppw: Number of pixels represented in a 32-bit word (luma plane for semi-pl)
188 * @num_planes: Expected number of plane buffers in framebuffer for this format
189 * @drm_fmt: DRM video framework equivalent fourcc code
190 * @v4l2_fmt: Video 4 Linux framework equivalent fourcc code
191 * @fmt_bitmask: Flag identifying this format in device-specific "enabled"
194 struct xilinx_frmbuf_format_desc {
195 const char *dts_name;
205 static LIST_HEAD(frmbuf_chan_list);
206 static DEFINE_MUTEX(frmbuf_chan_list_lock);
208 static const struct xilinx_frmbuf_format_desc xilinx_frmbuf_formats[] = {
210 .dts_name = "xbgr8888",
211 .id = XILINX_FRMBUF_FMT_RGBX8,
215 .drm_fmt = DRM_FORMAT_XBGR8888,
216 .v4l2_fmt = V4L2_PIX_FMT_BGRX32,
217 .fmt_bitmask = BIT(0),
220 .dts_name = "xbgr2101010",
221 .id = XILINX_FRMBUF_FMT_RGBX10,
225 .drm_fmt = DRM_FORMAT_XBGR2101010,
226 .v4l2_fmt = V4L2_PIX_FMT_XBGR30,
227 .fmt_bitmask = BIT(1),
230 .dts_name = "xrgb8888",
231 .id = XILINX_FRMBUF_FMT_BGRX8,
235 .drm_fmt = DRM_FORMAT_XRGB8888,
236 .v4l2_fmt = V4L2_PIX_FMT_XBGR32,
237 .fmt_bitmask = BIT(2),
240 .dts_name = "xvuy8888",
241 .id = XILINX_FRMBUF_FMT_YUVX8,
245 .drm_fmt = DRM_FORMAT_XVUY8888,
246 .v4l2_fmt = V4L2_PIX_FMT_XVUY32,
247 .fmt_bitmask = BIT(5),
250 .dts_name = "vuy888",
251 .id = XILINX_FRMBUF_FMT_YUV8,
255 .drm_fmt = DRM_FORMAT_VUY888,
256 .v4l2_fmt = V4L2_PIX_FMT_VUY24,
257 .fmt_bitmask = BIT(6),
260 .dts_name = "yuvx2101010",
261 .id = XILINX_FRMBUF_FMT_YUVX10,
265 .drm_fmt = DRM_FORMAT_XVUY2101010,
266 .v4l2_fmt = V4L2_PIX_FMT_XVUY10,
267 .fmt_bitmask = BIT(7),
271 .id = XILINX_FRMBUF_FMT_YUYV8,
275 .drm_fmt = DRM_FORMAT_YUYV,
276 .v4l2_fmt = V4L2_PIX_FMT_YUYV,
277 .fmt_bitmask = BIT(8),
281 .id = XILINX_FRMBUF_FMT_UYVY8,
285 .drm_fmt = DRM_FORMAT_UYVY,
286 .v4l2_fmt = V4L2_PIX_FMT_UYVY,
287 .fmt_bitmask = BIT(9),
291 .id = XILINX_FRMBUF_FMT_Y_UV8,
295 .drm_fmt = DRM_FORMAT_NV16,
296 .v4l2_fmt = V4L2_PIX_FMT_NV16M,
297 .fmt_bitmask = BIT(11),
301 .id = XILINX_FRMBUF_FMT_Y_UV8,
306 .v4l2_fmt = V4L2_PIX_FMT_NV16,
307 .fmt_bitmask = BIT(11),
311 .id = XILINX_FRMBUF_FMT_Y_UV8_420,
315 .drm_fmt = DRM_FORMAT_NV12,
316 .v4l2_fmt = V4L2_PIX_FMT_NV12M,
317 .fmt_bitmask = BIT(12),
321 .id = XILINX_FRMBUF_FMT_Y_UV8_420,
326 .v4l2_fmt = V4L2_PIX_FMT_NV12,
327 .fmt_bitmask = BIT(12),
331 .id = XILINX_FRMBUF_FMT_Y_UV10_420,
335 .drm_fmt = DRM_FORMAT_XV15,
336 .v4l2_fmt = V4L2_PIX_FMT_XV15M,
337 .fmt_bitmask = BIT(13),
341 .id = XILINX_FRMBUF_FMT_Y_UV10_420,
346 .v4l2_fmt = V4L2_PIX_FMT_XV15,
347 .fmt_bitmask = BIT(13),
351 .id = XILINX_FRMBUF_FMT_Y_UV10,
355 .drm_fmt = DRM_FORMAT_XV20,
356 .v4l2_fmt = V4L2_PIX_FMT_XV20M,
357 .fmt_bitmask = BIT(14),
361 .id = XILINX_FRMBUF_FMT_Y_UV10,
366 .v4l2_fmt = V4L2_PIX_FMT_XV20,
367 .fmt_bitmask = BIT(14),
370 .dts_name = "bgr888",
371 .id = XILINX_FRMBUF_FMT_RGB8,
375 .drm_fmt = DRM_FORMAT_BGR888,
376 .v4l2_fmt = V4L2_PIX_FMT_RGB24,
377 .fmt_bitmask = BIT(15),
381 .id = XILINX_FRMBUF_FMT_Y8,
385 .drm_fmt = DRM_FORMAT_Y8,
386 .v4l2_fmt = V4L2_PIX_FMT_GREY,
387 .fmt_bitmask = BIT(16),
391 .id = XILINX_FRMBUF_FMT_Y10,
395 .drm_fmt = DRM_FORMAT_Y10,
396 .v4l2_fmt = V4L2_PIX_FMT_Y10,
397 .fmt_bitmask = BIT(17),
400 .dts_name = "rgb888",
401 .id = XILINX_FRMBUF_FMT_BGR8,
405 .drm_fmt = DRM_FORMAT_RGB888,
406 .v4l2_fmt = V4L2_PIX_FMT_BGR24,
407 .fmt_bitmask = BIT(18),
410 .dts_name = "abgr8888",
411 .id = XILINX_FRMBUF_FMT_RGBA8,
415 .drm_fmt = DRM_FORMAT_ABGR8888,
417 .fmt_bitmask = BIT(19),
420 .dts_name = "argb8888",
421 .id = XILINX_FRMBUF_FMT_BGRA8,
425 .drm_fmt = DRM_FORMAT_ARGB8888,
427 .fmt_bitmask = BIT(20),
430 .dts_name = "avuy8888",
431 .id = XILINX_FRMBUF_FMT_YUVA8,
435 .drm_fmt = DRM_FORMAT_AVUY,
437 .fmt_bitmask = BIT(21),
442 * struct xilinx_frmbuf_feature - dt or IP property structure
443 * @direction: dma transfer mode and direction
444 * @flags: Bitmask of properties enabled in IP or dt
446 struct xilinx_frmbuf_feature {
447 enum dma_transfer_direction direction;
452 * struct xilinx_frmbuf_device - dma device structure
453 * @regs: I/O mapped base address
454 * @dev: Device Structure
455 * @common: DMA device structure
456 * @chan: Driver specific dma channel
457 * @rst_gpio: GPIO reset
458 * @enabled_vid_fmts: Bitmask of video formats enabled in hardware
459 * @drm_memory_fmts: Array of supported DRM fourcc codes
460 * @drm_fmt_cnt: Count of supported DRM fourcc codes
461 * @v4l2_memory_fmts: Array of supported V4L2 fourcc codes
462 * @v4l2_fmt_cnt: Count of supported V4L2 fourcc codes
463 * @cfg: Pointer to Framebuffer Feature config struct
465 struct xilinx_frmbuf_device {
468 struct dma_device common;
469 struct xilinx_frmbuf_chan chan;
470 struct gpio_desc *rst_gpio;
471 u32 enabled_vid_fmts;
472 u32 drm_memory_fmts[ARRAY_SIZE(xilinx_frmbuf_formats)];
474 u32 v4l2_memory_fmts[ARRAY_SIZE(xilinx_frmbuf_formats)];
476 const struct xilinx_frmbuf_feature *cfg;
479 static const struct xilinx_frmbuf_feature xlnx_fbwr_cfg_v20 = {
480 .direction = DMA_DEV_TO_MEM,
483 static const struct xilinx_frmbuf_feature xlnx_fbwr_cfg_v21 = {
484 .direction = DMA_DEV_TO_MEM,
485 .flags = XILINX_PPC_PROP | XILINX_FLUSH_PROP | XILINX_FID_PROP,
488 static const struct xilinx_frmbuf_feature xlnx_fbrd_cfg_v20 = {
489 .direction = DMA_MEM_TO_DEV,
492 static const struct xilinx_frmbuf_feature xlnx_fbrd_cfg_v21 = {
493 .direction = DMA_MEM_TO_DEV,
494 .flags = XILINX_PPC_PROP | XILINX_FLUSH_PROP | XILINX_FID_PROP,
497 static const struct of_device_id xilinx_frmbuf_of_ids[] = {
498 { .compatible = "xlnx,axi-frmbuf-wr-v2",
499 .data = (void *)&xlnx_fbwr_cfg_v20},
500 { .compatible = "xlnx,axi-frmbuf-wr-v2.1",
501 .data = (void *)&xlnx_fbwr_cfg_v21},
502 { .compatible = "xlnx,axi-frmbuf-rd-v2",
503 .data = (void *)&xlnx_fbrd_cfg_v20},
504 { .compatible = "xlnx,axi-frmbuf-rd-v2.1",
505 .data = (void *)&xlnx_fbrd_cfg_v21},
509 /******************************PROTOTYPES*************************************/
510 #define to_xilinx_chan(chan) \
511 container_of(chan, struct xilinx_frmbuf_chan, common)
512 #define to_dma_tx_descriptor(tx) \
513 container_of(tx, struct xilinx_frmbuf_tx_descriptor, async_tx)
515 static inline u32 frmbuf_read(struct xilinx_frmbuf_chan *chan, u32 reg)
517 return ioread32(chan->xdev->regs + reg);
520 static inline void frmbuf_write(struct xilinx_frmbuf_chan *chan, u32 reg,
523 iowrite32(value, chan->xdev->regs + reg);
526 static inline void frmbuf_writeq(struct xilinx_frmbuf_chan *chan, u32 reg,
529 iowrite32(lower_32_bits(value), chan->xdev->regs + reg);
530 iowrite32(upper_32_bits(value), chan->xdev->regs + reg + 4);
533 static void writeq_addr(struct xilinx_frmbuf_chan *chan, u32 reg,
536 frmbuf_writeq(chan, reg, (u64)addr);
539 static void write_addr(struct xilinx_frmbuf_chan *chan, u32 reg,
542 frmbuf_write(chan, reg, addr);
545 static inline void frmbuf_clr(struct xilinx_frmbuf_chan *chan, u32 reg,
548 frmbuf_write(chan, reg, frmbuf_read(chan, reg) & ~clr);
551 static inline void frmbuf_set(struct xilinx_frmbuf_chan *chan, u32 reg,
554 frmbuf_write(chan, reg, frmbuf_read(chan, reg) | set);
557 static void frmbuf_init_format_array(struct xilinx_frmbuf_device *xdev)
561 for (i = 0; i < ARRAY_SIZE(xilinx_frmbuf_formats); i++) {
562 if (!(xdev->enabled_vid_fmts &
563 xilinx_frmbuf_formats[i].fmt_bitmask))
566 if (xilinx_frmbuf_formats[i].drm_fmt) {
567 cnt = xdev->drm_fmt_cnt++;
568 xdev->drm_memory_fmts[cnt] =
569 xilinx_frmbuf_formats[i].drm_fmt;
572 if (xilinx_frmbuf_formats[i].v4l2_fmt) {
573 cnt = xdev->v4l2_fmt_cnt++;
574 xdev->v4l2_memory_fmts[cnt] =
575 xilinx_frmbuf_formats[i].v4l2_fmt;
580 static struct xilinx_frmbuf_device *frmbuf_find_dev(struct dma_chan *chan)
582 struct xilinx_frmbuf_chan *xchan, *temp;
583 struct xilinx_frmbuf_device *xdev;
584 bool is_frmbuf_chan = false;
586 list_for_each_entry_safe(xchan, temp, &frmbuf_chan_list, chan_node) {
587 if (chan == &xchan->common)
588 is_frmbuf_chan = true;
592 return ERR_PTR(-ENODEV);
594 xchan = to_xilinx_chan(chan);
595 xdev = container_of(xchan, struct xilinx_frmbuf_device, chan);
600 static int frmbuf_verify_format(struct dma_chan *chan, u32 fourcc, u32 type)
602 struct xilinx_frmbuf_chan *xil_chan = to_xilinx_chan(chan);
603 u32 i, sz = ARRAY_SIZE(xilinx_frmbuf_formats);
605 for (i = 0; i < sz; i++) {
606 if ((type == XDMA_DRM &&
607 fourcc != xilinx_frmbuf_formats[i].drm_fmt) ||
608 (type == XDMA_V4L2 &&
609 fourcc != xilinx_frmbuf_formats[i].v4l2_fmt))
612 if (!(xilinx_frmbuf_formats[i].fmt_bitmask &
613 xil_chan->xdev->enabled_vid_fmts))
617 * The Alpha color formats are supported in Framebuffer Read
618 * IP only as corresponding DRM formats.
620 if (type == XDMA_DRM &&
621 (xilinx_frmbuf_formats[i].drm_fmt == DRM_FORMAT_ABGR8888 ||
622 xilinx_frmbuf_formats[i].drm_fmt == DRM_FORMAT_ARGB8888 ||
623 xilinx_frmbuf_formats[i].drm_fmt == DRM_FORMAT_AVUY) &&
624 xil_chan->direction != DMA_MEM_TO_DEV)
627 xil_chan->vid_fmt = &xilinx_frmbuf_formats[i];
633 static void xilinx_xdma_set_config(struct dma_chan *chan, u32 fourcc, u32 type)
635 struct xilinx_frmbuf_chan *xil_chan;
636 bool found_xchan = false;
639 mutex_lock(&frmbuf_chan_list_lock);
640 list_for_each_entry(xil_chan, &frmbuf_chan_list, chan_node) {
641 if (chan == &xil_chan->common) {
646 mutex_unlock(&frmbuf_chan_list_lock);
649 dev_dbg(chan->device->dev,
650 "dma chan not a Video Framebuffer channel instance\n");
654 ret = frmbuf_verify_format(chan, fourcc, type);
655 if (ret == -EINVAL) {
656 dev_err(chan->device->dev,
657 "Framebuffer not configured for fourcc 0x%x\n",
663 void xilinx_xdma_drm_config(struct dma_chan *chan, u32 drm_fourcc)
665 xilinx_xdma_set_config(chan, drm_fourcc, XDMA_DRM);
667 } EXPORT_SYMBOL_GPL(xilinx_xdma_drm_config);
669 void xilinx_xdma_v4l2_config(struct dma_chan *chan, u32 v4l2_fourcc)
671 xilinx_xdma_set_config(chan, v4l2_fourcc, XDMA_V4L2);
673 } EXPORT_SYMBOL_GPL(xilinx_xdma_v4l2_config);
675 int xilinx_xdma_get_drm_vid_fmts(struct dma_chan *chan, u32 *fmt_cnt,
678 struct xilinx_frmbuf_device *xdev;
680 xdev = frmbuf_find_dev(chan);
683 return PTR_ERR(xdev);
685 *fmt_cnt = xdev->drm_fmt_cnt;
686 *fmts = xdev->drm_memory_fmts;
690 EXPORT_SYMBOL(xilinx_xdma_get_drm_vid_fmts);
692 int xilinx_xdma_get_v4l2_vid_fmts(struct dma_chan *chan, u32 *fmt_cnt,
695 struct xilinx_frmbuf_device *xdev;
697 xdev = frmbuf_find_dev(chan);
700 return PTR_ERR(xdev);
702 *fmt_cnt = xdev->v4l2_fmt_cnt;
703 *fmts = xdev->v4l2_memory_fmts;
707 EXPORT_SYMBOL(xilinx_xdma_get_v4l2_vid_fmts);
709 int xilinx_xdma_get_fid(struct dma_chan *chan,
710 struct dma_async_tx_descriptor *async_tx, u32 *fid)
712 struct xilinx_frmbuf_device *xdev;
713 struct xilinx_frmbuf_tx_descriptor *desc;
715 xdev = frmbuf_find_dev(chan);
717 return PTR_ERR(xdev);
719 if (!async_tx || !fid)
722 if (xdev->chan.direction != DMA_DEV_TO_MEM)
725 desc = to_dma_tx_descriptor(async_tx);
732 EXPORT_SYMBOL(xilinx_xdma_get_fid);
734 int xilinx_xdma_set_fid(struct dma_chan *chan,
735 struct dma_async_tx_descriptor *async_tx, u32 fid)
737 struct xilinx_frmbuf_device *xdev;
738 struct xilinx_frmbuf_tx_descriptor *desc;
740 if (fid > 1 || !async_tx)
743 xdev = frmbuf_find_dev(chan);
745 return PTR_ERR(xdev);
747 if (xdev->chan.direction != DMA_MEM_TO_DEV)
750 desc = to_dma_tx_descriptor(async_tx);
757 EXPORT_SYMBOL(xilinx_xdma_set_fid);
760 * of_dma_xilinx_xlate - Translation function
761 * @dma_spec: Pointer to DMA specifier as found in the device tree
762 * @ofdma: Pointer to DMA controller data
764 * Return: DMA channel pointer on success or error code on error
766 static struct dma_chan *of_dma_xilinx_xlate(struct of_phandle_args *dma_spec,
767 struct of_dma *ofdma)
769 struct xilinx_frmbuf_device *xdev = ofdma->of_dma_data;
771 return dma_get_slave_channel(&xdev->chan.common);
774 /* -----------------------------------------------------------------------------
775 * Descriptors alloc and free
779 * xilinx_frmbuf_tx_descriptor - Allocate transaction descriptor
780 * @chan: Driver specific dma channel
782 * Return: The allocated descriptor on success and NULL on failure.
784 static struct xilinx_frmbuf_tx_descriptor *
785 xilinx_frmbuf_alloc_tx_descriptor(struct xilinx_frmbuf_chan *chan)
787 struct xilinx_frmbuf_tx_descriptor *desc;
789 desc = kzalloc(sizeof(*desc), GFP_KERNEL);
797 * xilinx_frmbuf_free_desc_list - Free descriptors list
798 * @chan: Driver specific dma channel
799 * @list: List to parse and delete the descriptor
801 static void xilinx_frmbuf_free_desc_list(struct xilinx_frmbuf_chan *chan,
802 struct list_head *list)
804 struct xilinx_frmbuf_tx_descriptor *desc, *next;
806 list_for_each_entry_safe(desc, next, list, node) {
807 list_del(&desc->node);
813 * xilinx_frmbuf_free_descriptors - Free channel descriptors
814 * @chan: Driver specific dma channel
816 static void xilinx_frmbuf_free_descriptors(struct xilinx_frmbuf_chan *chan)
820 spin_lock_irqsave(&chan->lock, flags);
822 xilinx_frmbuf_free_desc_list(chan, &chan->pending_list);
823 xilinx_frmbuf_free_desc_list(chan, &chan->done_list);
824 kfree(chan->active_desc);
825 kfree(chan->staged_desc);
827 chan->staged_desc = NULL;
828 chan->active_desc = NULL;
829 INIT_LIST_HEAD(&chan->pending_list);
830 INIT_LIST_HEAD(&chan->done_list);
832 spin_unlock_irqrestore(&chan->lock, flags);
836 * xilinx_frmbuf_free_chan_resources - Free channel resources
837 * @dchan: DMA channel
839 static void xilinx_frmbuf_free_chan_resources(struct dma_chan *dchan)
841 struct xilinx_frmbuf_chan *chan = to_xilinx_chan(dchan);
843 xilinx_frmbuf_free_descriptors(chan);
847 * xilinx_frmbuf_chan_desc_cleanup - Clean channel descriptors
848 * @chan: Driver specific dma channel
850 static void xilinx_frmbuf_chan_desc_cleanup(struct xilinx_frmbuf_chan *chan)
852 struct xilinx_frmbuf_tx_descriptor *desc, *next;
855 spin_lock_irqsave(&chan->lock, flags);
857 list_for_each_entry_safe(desc, next, &chan->done_list, node) {
858 dma_async_tx_callback callback;
859 void *callback_param;
861 list_del(&desc->node);
863 /* Run the link descriptor callback function */
864 callback = desc->async_tx.callback;
865 callback_param = desc->async_tx.callback_param;
867 spin_unlock_irqrestore(&chan->lock, flags);
868 callback(callback_param);
869 spin_lock_irqsave(&chan->lock, flags);
872 /* Run any dependencies, then free the descriptor */
873 dma_run_dependencies(&desc->async_tx);
877 spin_unlock_irqrestore(&chan->lock, flags);
881 * xilinx_frmbuf_do_tasklet - Schedule completion tasklet
882 * @data: Pointer to the Xilinx frmbuf channel structure
884 static void xilinx_frmbuf_do_tasklet(unsigned long data)
886 struct xilinx_frmbuf_chan *chan = (struct xilinx_frmbuf_chan *)data;
888 xilinx_frmbuf_chan_desc_cleanup(chan);
892 * xilinx_frmbuf_alloc_chan_resources - Allocate channel resources
893 * @dchan: DMA channel
895 * Return: '0' on success and failure value on error
897 static int xilinx_frmbuf_alloc_chan_resources(struct dma_chan *dchan)
899 dma_cookie_init(dchan);
905 * xilinx_frmbuf_tx_status - Get frmbuf transaction status
906 * @dchan: DMA channel
907 * @cookie: Transaction identifier
908 * @txstate: Transaction state
910 * Return: fmrbuf transaction status
912 static enum dma_status xilinx_frmbuf_tx_status(struct dma_chan *dchan,
914 struct dma_tx_state *txstate)
916 return dma_cookie_status(dchan, cookie, txstate);
920 * xilinx_frmbuf_halt - Halt frmbuf channel
921 * @chan: Driver specific dma channel
923 static void xilinx_frmbuf_halt(struct xilinx_frmbuf_chan *chan)
925 frmbuf_clr(chan, XILINX_FRMBUF_CTRL_OFFSET,
926 XILINX_FRMBUF_CTRL_AP_START |
927 XILINX_FRMBUF_CTRL_AUTO_RESTART);
932 * xilinx_frmbuf_start - Start dma channel
933 * @chan: Driver specific dma channel
935 static void xilinx_frmbuf_start(struct xilinx_frmbuf_chan *chan)
937 frmbuf_set(chan, XILINX_FRMBUF_CTRL_OFFSET,
938 XILINX_FRMBUF_CTRL_AP_START |
939 XILINX_FRMBUF_CTRL_AUTO_RESTART);
944 * xilinx_frmbuf_complete_descriptor - Mark the active descriptor as complete
945 * This function is invoked with spinlock held
946 * @chan : xilinx frmbuf channel
950 static void xilinx_frmbuf_complete_descriptor(struct xilinx_frmbuf_chan *chan)
952 struct xilinx_frmbuf_tx_descriptor *desc = chan->active_desc;
955 * In case of frame buffer write, read the fid register
956 * and associate it with descriptor
958 if (chan->direction == DMA_DEV_TO_MEM && chan->hw_fid)
959 desc->fid = frmbuf_read(chan, XILINX_FRMBUF_FID_OFFSET) &
960 XILINX_FRMBUF_FID_MASK;
962 dma_cookie_complete(&desc->async_tx);
963 list_add_tail(&desc->node, &chan->done_list);
967 * xilinx_frmbuf_start_transfer - Starts frmbuf transfer
968 * @chan: Driver specific channel struct pointer
970 static void xilinx_frmbuf_start_transfer(struct xilinx_frmbuf_chan *chan)
972 struct xilinx_frmbuf_tx_descriptor *desc;
977 if (chan->active_desc) {
978 xilinx_frmbuf_complete_descriptor(chan);
979 chan->active_desc = NULL;
982 if (chan->staged_desc) {
983 chan->active_desc = chan->staged_desc;
984 chan->staged_desc = NULL;
987 if (list_empty(&chan->pending_list))
990 desc = list_first_entry(&chan->pending_list,
991 struct xilinx_frmbuf_tx_descriptor,
994 /* Start the transfer */
995 chan->write_addr(chan, XILINX_FRMBUF_ADDR_OFFSET,
996 desc->hw.luma_plane_addr);
997 chan->write_addr(chan, XILINX_FRMBUF_ADDR2_OFFSET,
998 desc->hw.chroma_plane_addr);
1000 /* HW expects these parameters to be same for one transaction */
1001 frmbuf_write(chan, XILINX_FRMBUF_WIDTH_OFFSET, desc->hw.hsize);
1002 frmbuf_write(chan, XILINX_FRMBUF_STRIDE_OFFSET, desc->hw.stride);
1003 frmbuf_write(chan, XILINX_FRMBUF_HEIGHT_OFFSET, desc->hw.vsize);
1004 frmbuf_write(chan, XILINX_FRMBUF_FMT_OFFSET, chan->vid_fmt->id);
1006 /* If it is framebuffer read IP set the FID */
1007 if (chan->direction == DMA_MEM_TO_DEV && chan->hw_fid)
1008 frmbuf_write(chan, XILINX_FRMBUF_FID_OFFSET, desc->fid);
1010 /* Start the hardware */
1011 xilinx_frmbuf_start(chan);
1012 list_del(&desc->node);
1013 chan->staged_desc = desc;
1017 * xilinx_frmbuf_issue_pending - Issue pending transactions
1018 * @dchan: DMA channel
1020 static void xilinx_frmbuf_issue_pending(struct dma_chan *dchan)
1022 struct xilinx_frmbuf_chan *chan = to_xilinx_chan(dchan);
1023 unsigned long flags;
1025 spin_lock_irqsave(&chan->lock, flags);
1026 xilinx_frmbuf_start_transfer(chan);
1027 spin_unlock_irqrestore(&chan->lock, flags);
1031 * xilinx_frmbuf_reset - Reset frmbuf channel
1032 * @chan: Driver specific dma channel
1034 static void xilinx_frmbuf_reset(struct xilinx_frmbuf_chan *chan)
1037 gpiod_set_value(chan->xdev->rst_gpio, 1);
1039 gpiod_set_value(chan->xdev->rst_gpio, 0);
1043 * xilinx_frmbuf_chan_reset - Reset frmbuf channel and enable interrupts
1044 * @chan: Driver specific frmbuf channel
1046 static void xilinx_frmbuf_chan_reset(struct xilinx_frmbuf_chan *chan)
1048 xilinx_frmbuf_reset(chan);
1049 frmbuf_write(chan, XILINX_FRMBUF_IE_OFFSET, XILINX_FRMBUF_IE_AP_READY);
1050 frmbuf_write(chan, XILINX_FRMBUF_GIE_OFFSET, XILINX_FRMBUF_GIE_EN);
1054 * xilinx_frmbuf_irq_handler - frmbuf Interrupt handler
1056 * @data: Pointer to the Xilinx frmbuf channel structure
1058 * Return: IRQ_HANDLED/IRQ_NONE
1060 static irqreturn_t xilinx_frmbuf_irq_handler(int irq, void *data)
1062 struct xilinx_frmbuf_chan *chan = data;
1065 status = frmbuf_read(chan, XILINX_FRMBUF_ISR_OFFSET);
1066 if (!(status & XILINX_FRMBUF_ISR_ALL_IRQ_MASK))
1069 frmbuf_write(chan, XILINX_FRMBUF_ISR_OFFSET,
1070 status & XILINX_FRMBUF_ISR_ALL_IRQ_MASK);
1072 if (status & XILINX_FRMBUF_ISR_AP_READY_IRQ) {
1073 spin_lock(&chan->lock);
1075 xilinx_frmbuf_start_transfer(chan);
1076 spin_unlock(&chan->lock);
1079 tasklet_schedule(&chan->tasklet);
1084 * xilinx_frmbuf_tx_submit - Submit DMA transaction
1085 * @tx: Async transaction descriptor
1087 * Return: cookie value on success and failure value on error
1089 static dma_cookie_t xilinx_frmbuf_tx_submit(struct dma_async_tx_descriptor *tx)
1091 struct xilinx_frmbuf_tx_descriptor *desc = to_dma_tx_descriptor(tx);
1092 struct xilinx_frmbuf_chan *chan = to_xilinx_chan(tx->chan);
1093 dma_cookie_t cookie;
1094 unsigned long flags;
1096 spin_lock_irqsave(&chan->lock, flags);
1097 cookie = dma_cookie_assign(tx);
1098 list_add_tail(&desc->node, &chan->pending_list);
1099 spin_unlock_irqrestore(&chan->lock, flags);
1105 * xilinx_frmbuf_dma_prep_interleaved - prepare a descriptor for a
1106 * DMA_SLAVE transaction
1107 * @dchan: DMA channel
1108 * @xt: Interleaved template pointer
1109 * @flags: transfer ack flags
1111 * Return: Async transaction descriptor on success and NULL on failure
1113 static struct dma_async_tx_descriptor *
1114 xilinx_frmbuf_dma_prep_interleaved(struct dma_chan *dchan,
1115 struct dma_interleaved_template *xt,
1116 unsigned long flags)
1118 struct xilinx_frmbuf_chan *chan = to_xilinx_chan(dchan);
1119 struct xilinx_frmbuf_tx_descriptor *desc;
1120 struct xilinx_frmbuf_desc_hw *hw;
1122 if (chan->direction != xt->dir || !chan->vid_fmt)
1125 if (!xt->numf || !xt->sgl[0].size)
1128 if (xt->frame_size != chan->vid_fmt->num_planes)
1131 desc = xilinx_frmbuf_alloc_tx_descriptor(chan);
1135 dma_async_tx_descriptor_init(&desc->async_tx, &chan->common);
1136 desc->async_tx.tx_submit = xilinx_frmbuf_tx_submit;
1137 async_tx_ack(&desc->async_tx);
1140 hw->vsize = xt->numf;
1141 hw->stride = xt->sgl[0].icg + xt->sgl[0].size;
1142 hw->hsize = (xt->sgl[0].size * chan->vid_fmt->ppw * 8) /
1145 /* hsize calc should not have resulted in an odd number */
1149 if (chan->direction == DMA_MEM_TO_DEV) {
1150 hw->luma_plane_addr = xt->src_start;
1151 if (xt->frame_size == 2)
1152 hw->chroma_plane_addr =
1154 xt->numf * hw->stride +
1157 hw->luma_plane_addr = xt->dst_start;
1158 if (xt->frame_size == 2)
1159 hw->chroma_plane_addr =
1161 xt->numf * hw->stride +
1165 return &desc->async_tx;
1168 dev_err(chan->xdev->dev,
1169 "Invalid dma template or missing dma video fmt config\n");
1174 * xilinx_frmbuf_terminate_all - Halt the channel and free descriptors
1175 * @dchan: Driver specific dma channel pointer
1179 static int xilinx_frmbuf_terminate_all(struct dma_chan *dchan)
1181 struct xilinx_frmbuf_chan *chan = to_xilinx_chan(dchan);
1183 xilinx_frmbuf_halt(chan);
1184 xilinx_frmbuf_free_descriptors(chan);
1185 /* worst case frame-to-frame boundary; ensure frame output complete */
1188 if (chan->xdev->cfg->flags & XILINX_FLUSH_PROP) {
1192 * Flush the framebuffer FIFO and
1193 * wait for max 50ms for flush done
1195 frmbuf_set(chan, XILINX_FRMBUF_CTRL_OFFSET,
1196 XILINX_FRMBUF_CTRL_FLUSH);
1197 for (count = WAIT_FOR_FLUSH_DONE; count > 0; count--) {
1198 if (frmbuf_read(chan, XILINX_FRMBUF_CTRL_OFFSET) &
1199 XILINX_FRMBUF_CTRL_FLUSH_DONE)
1201 usleep_range(2000, 2100);
1205 dev_err(chan->xdev->dev, "Framebuffer Flush not done!\n");
1208 xilinx_frmbuf_chan_reset(chan);
1214 * xilinx_frmbuf_synchronize - kill tasklet to stop further descr processing
1215 * @dchan: Driver specific dma channel pointer
1217 static void xilinx_frmbuf_synchronize(struct dma_chan *dchan)
1219 struct xilinx_frmbuf_chan *chan = to_xilinx_chan(dchan);
1221 tasklet_kill(&chan->tasklet);
1224 /* -----------------------------------------------------------------------------
1229 * xilinx_frmbuf_chan_remove - Per Channel remove function
1230 * @chan: Driver specific dma channel
1232 static void xilinx_frmbuf_chan_remove(struct xilinx_frmbuf_chan *chan)
1234 /* Disable all interrupts */
1235 frmbuf_clr(chan, XILINX_FRMBUF_IE_OFFSET,
1236 XILINX_FRMBUF_ISR_ALL_IRQ_MASK);
1238 tasklet_kill(&chan->tasklet);
1239 list_del(&chan->common.device_node);
1241 mutex_lock(&frmbuf_chan_list_lock);
1242 list_del(&chan->chan_node);
1243 mutex_unlock(&frmbuf_chan_list_lock);
1247 * xilinx_frmbuf_chan_probe - Per Channel Probing
1248 * It get channel features from the device tree entry and
1249 * initialize special channel handling routines
1251 * @xdev: Driver specific device structure
1252 * @node: Device node
1254 * Return: '0' on success and failure value on error
1256 static int xilinx_frmbuf_chan_probe(struct xilinx_frmbuf_device *xdev,
1257 struct device_node *node)
1259 struct xilinx_frmbuf_chan *chan;
1265 chan->dev = xdev->dev;
1269 err = of_property_read_u32(node, "xlnx,dma-addr-width",
1271 if (err || (dma_addr_size != 32 && dma_addr_size != 64)) {
1272 dev_err(xdev->dev, "missing or invalid addr width dts prop\n");
1276 if (dma_addr_size == 64 && sizeof(dma_addr_t) == sizeof(u64))
1277 chan->write_addr = writeq_addr;
1279 chan->write_addr = write_addr;
1281 if (xdev->cfg->flags & XILINX_FID_PROP)
1282 chan->hw_fid = of_property_read_bool(node, "xlnx,fid");
1284 spin_lock_init(&chan->lock);
1285 INIT_LIST_HEAD(&chan->pending_list);
1286 INIT_LIST_HEAD(&chan->done_list);
1288 chan->irq = irq_of_parse_and_map(node, 0);
1289 err = devm_request_irq(xdev->dev, chan->irq, xilinx_frmbuf_irq_handler,
1290 IRQF_SHARED, "xilinx_framebuffer", chan);
1293 dev_err(xdev->dev, "unable to request IRQ %d\n", chan->irq);
1297 tasklet_init(&chan->tasklet, xilinx_frmbuf_do_tasklet,
1298 (unsigned long)chan);
1301 * Initialize the DMA channel and add it to the DMA engine channels
1304 chan->common.device = &xdev->common;
1306 list_add_tail(&chan->common.device_node, &xdev->common.channels);
1308 mutex_lock(&frmbuf_chan_list_lock);
1309 list_add_tail(&chan->chan_node, &frmbuf_chan_list);
1310 mutex_unlock(&frmbuf_chan_list_lock);
1312 xilinx_frmbuf_chan_reset(chan);
1318 * xilinx_frmbuf_probe - Driver probe function
1319 * @pdev: Pointer to the platform_device structure
1321 * Return: '0' on success and failure value on error
1323 static int xilinx_frmbuf_probe(struct platform_device *pdev)
1325 struct device_node *node = pdev->dev.of_node;
1326 struct xilinx_frmbuf_device *xdev;
1327 struct resource *io;
1328 enum dma_transfer_direction dma_dir;
1329 const struct of_device_id *match;
1331 u32 i, j, align, ppc;
1333 const char *vid_fmts[ARRAY_SIZE(xilinx_frmbuf_formats)];
1335 xdev = devm_kzalloc(&pdev->dev, sizeof(*xdev), GFP_KERNEL);
1339 xdev->dev = &pdev->dev;
1341 match = of_match_node(xilinx_frmbuf_of_ids, node);
1345 xdev->cfg = match->data;
1347 dma_dir = (enum dma_transfer_direction)xdev->cfg->direction;
1349 xdev->rst_gpio = devm_gpiod_get(&pdev->dev, "reset",
1351 if (IS_ERR(xdev->rst_gpio)) {
1352 err = PTR_ERR(xdev->rst_gpio);
1353 if (err == -EPROBE_DEFER)
1354 dev_info(&pdev->dev,
1355 "Probe deferred due to GPIO reset defer\n");
1358 "Unable to locate reset property in dt\n");
1362 gpiod_set_value_cansleep(xdev->rst_gpio, 0x0);
1364 io = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1365 xdev->regs = devm_ioremap_resource(&pdev->dev, io);
1366 if (IS_ERR(xdev->regs))
1367 return PTR_ERR(xdev->regs);
1369 /* Initialize the DMA engine */
1370 if (xdev->cfg->flags & XILINX_PPC_PROP) {
1371 err = of_property_read_u32(node, "xlnx,pixels-per-clock", &ppc);
1372 if (err || (ppc != 1 && ppc != 2 && ppc != 4)) {
1373 dev_err(&pdev->dev, "missing or invalid pixels per clock dts prop\n");
1377 err = of_property_read_u32(node, "xlnx,dma-align", &align);
1379 align = ppc * XILINX_FRMBUF_ALIGN_MUL;
1381 if (align < (ppc * XILINX_FRMBUF_ALIGN_MUL) ||
1382 ffs(align) != fls(align)) {
1383 dev_err(&pdev->dev, "invalid dma align dts prop\n");
1390 xdev->common.copy_align = fls(align) - 1;
1391 xdev->common.dev = &pdev->dev;
1393 INIT_LIST_HEAD(&xdev->common.channels);
1394 dma_cap_set(DMA_SLAVE, xdev->common.cap_mask);
1395 dma_cap_set(DMA_PRIVATE, xdev->common.cap_mask);
1397 /* Initialize the channels */
1398 err = xilinx_frmbuf_chan_probe(xdev, node);
1402 xdev->chan.direction = dma_dir;
1404 if (xdev->chan.direction == DMA_DEV_TO_MEM) {
1405 xdev->common.directions = BIT(DMA_DEV_TO_MEM);
1406 dev_info(&pdev->dev, "Xilinx AXI frmbuf DMA_DEV_TO_MEM\n");
1407 } else if (xdev->chan.direction == DMA_MEM_TO_DEV) {
1408 xdev->common.directions = BIT(DMA_MEM_TO_DEV);
1409 dev_info(&pdev->dev, "Xilinx AXI frmbuf DMA_MEM_TO_DEV\n");
1411 xilinx_frmbuf_chan_remove(&xdev->chan);
1415 /* read supported video formats and update internal table */
1416 hw_vid_fmt_cnt = of_property_count_strings(node, "xlnx,vid-formats");
1418 err = of_property_read_string_array(node, "xlnx,vid-formats",
1419 vid_fmts, hw_vid_fmt_cnt);
1422 "Missing or invalid xlnx,vid-formats dts prop\n");
1426 for (i = 0; i < hw_vid_fmt_cnt; i++) {
1427 const char *vid_fmt_name = vid_fmts[i];
1429 for (j = 0; j < ARRAY_SIZE(xilinx_frmbuf_formats); j++) {
1430 const char *dts_name =
1431 xilinx_frmbuf_formats[j].dts_name;
1433 if (strcmp(vid_fmt_name, dts_name))
1436 xdev->enabled_vid_fmts |=
1437 xilinx_frmbuf_formats[j].fmt_bitmask;
1441 /* Determine supported vid framework formats */
1442 frmbuf_init_format_array(xdev);
1444 xdev->common.device_alloc_chan_resources =
1445 xilinx_frmbuf_alloc_chan_resources;
1446 xdev->common.device_free_chan_resources =
1447 xilinx_frmbuf_free_chan_resources;
1448 xdev->common.device_prep_interleaved_dma =
1449 xilinx_frmbuf_dma_prep_interleaved;
1450 xdev->common.device_terminate_all = xilinx_frmbuf_terminate_all;
1451 xdev->common.device_synchronize = xilinx_frmbuf_synchronize;
1452 xdev->common.device_tx_status = xilinx_frmbuf_tx_status;
1453 xdev->common.device_issue_pending = xilinx_frmbuf_issue_pending;
1455 platform_set_drvdata(pdev, xdev);
1457 /* Register the DMA engine with the core */
1458 dma_async_device_register(&xdev->common);
1459 err = of_dma_controller_register(node, of_dma_xilinx_xlate, xdev);
1462 dev_err(&pdev->dev, "Unable to register DMA to DT\n");
1463 xilinx_frmbuf_chan_remove(&xdev->chan);
1464 dma_async_device_unregister(&xdev->common);
1468 dev_info(&pdev->dev, "Xilinx AXI FrameBuffer Engine Driver Probed!!\n");
1474 * xilinx_frmbuf_remove - Driver remove function
1475 * @pdev: Pointer to the platform_device structure
1477 * Return: Always '0'
1479 static int xilinx_frmbuf_remove(struct platform_device *pdev)
1481 struct xilinx_frmbuf_device *xdev = platform_get_drvdata(pdev);
1483 dma_async_device_unregister(&xdev->common);
1484 xilinx_frmbuf_chan_remove(&xdev->chan);
1489 MODULE_DEVICE_TABLE(of, xilinx_frmbuf_of_ids);
1491 static struct platform_driver xilinx_frmbuf_driver = {
1493 .name = "xilinx-frmbuf",
1494 .of_match_table = xilinx_frmbuf_of_ids,
1496 .probe = xilinx_frmbuf_probe,
1497 .remove = xilinx_frmbuf_remove,
1500 module_platform_driver(xilinx_frmbuf_driver);
1502 MODULE_AUTHOR("Xilinx, Inc.");
1503 MODULE_DESCRIPTION("Xilinx Framebuffer driver");
1504 MODULE_LICENSE("GPL v2");