2 * DMAEngine driver for Xilinx Framebuffer IP
4 * Copyright (C) 2016,2017 Xilinx, Inc. All rights reserved.
6 * Authors: Radhey Shyam Pandey <radheys@xilinx.com>
7 * John Nichols <jnichol@xilinx.com>
8 * Jeffrey Mouroux <jmouroux@xilinx.com>
10 * Based on the Freescale DMA driver.
13 * The AXI Framebuffer core is a soft Xilinx IP core that
14 * provides high-bandwidth direct memory access between memory
17 * This program is free software: you can redistribute it and/or modify
18 * it under the terms of the GNU General Public License as published by
19 * the Free Software Foundation, either version 2 of the License, or
20 * (at your option) any later version.
23 #include <linux/bitops.h>
24 #include <linux/dma/xilinx_frmbuf.h>
25 #include <linux/dmapool.h>
26 #include <linux/gpio/consumer.h>
27 #include <linux/init.h>
28 #include <linux/interrupt.h>
30 #include <linux/iopoll.h>
31 #include <linux/module.h>
32 #include <linux/of_address.h>
33 #include <linux/of_dma.h>
34 #include <linux/of_irq.h>
35 #include <linux/of_platform.h>
36 #include <linux/slab.h>
37 #include <linux/videodev2.h>
39 #include <drm/drm_fourcc.h>
41 #include "../dmaengine.h"
43 /* Register/Descriptor Offsets */
44 #define XILINX_FRMBUF_CTRL_OFFSET 0x00
45 #define XILINX_FRMBUF_GIE_OFFSET 0x04
46 #define XILINX_FRMBUF_IE_OFFSET 0x08
47 #define XILINX_FRMBUF_ISR_OFFSET 0x0c
48 #define XILINX_FRMBUF_WIDTH_OFFSET 0x10
49 #define XILINX_FRMBUF_HEIGHT_OFFSET 0x18
50 #define XILINX_FRMBUF_STRIDE_OFFSET 0x20
51 #define XILINX_FRMBUF_FMT_OFFSET 0x28
52 #define XILINX_FRMBUF_ADDR_OFFSET 0x30
53 #define XILINX_FRMBUF_ADDR2_OFFSET 0x3c
55 /* Control Registers */
56 #define XILINX_FRMBUF_CTRL_AP_START BIT(0)
57 #define XILINX_FRMBUF_CTRL_AP_DONE BIT(1)
58 #define XILINX_FRMBUF_CTRL_AP_IDLE BIT(2)
59 #define XILINX_FRMBUF_CTRL_AP_READY BIT(3)
60 #define XILINX_FRMBUF_CTRL_AUTO_RESTART BIT(7)
61 #define XILINX_FRMBUF_GIE_EN BIT(0)
63 /* Interrupt Status and Control */
64 #define XILINX_FRMBUF_IE_AP_DONE BIT(0)
65 #define XILINX_FRMBUF_IE_AP_READY BIT(1)
67 #define XILINX_FRMBUF_ISR_AP_DONE_IRQ BIT(0)
68 #define XILINX_FRMBUF_ISR_AP_READY_IRQ BIT(1)
70 #define XILINX_FRMBUF_ISR_ALL_IRQ_MASK \
71 (XILINX_FRMBUF_ISR_AP_DONE_IRQ | \
72 XILINX_FRMBUF_ISR_AP_READY_IRQ)
74 /* Video Format Register Settings */
75 #define XILINX_FRMBUF_FMT_RGBX8 10
76 #define XILINX_FRMBUF_FMT_YUVX8 11
77 #define XILINX_FRMBUF_FMT_YUYV8 12
78 #define XILINX_FRMBUF_FMT_Y_UV8 18
79 #define XILINX_FRMBUF_FMT_Y_UV8_420 19
80 #define XILINX_FRMBUF_FMT_RGB8 20
81 #define XILINX_FRMBUF_FMT_YUV8 21
82 #define XILINX_FRMBUF_FMT_Y8 24
85 * struct xilinx_frmbuf_desc_hw - Hardware Descriptor
86 * @luma_plane_addr: Luma or packed plane buffer address
87 * @chroma_plane_addr: Chroma plane buffer address
88 * @vsize: Vertical Size
89 * @hsize: Horizontal Size
90 * @stride: Number of bytes between the first
91 * pixels of each horizontal line
93 struct xilinx_frmbuf_desc_hw {
94 dma_addr_t luma_plane_addr;
95 dma_addr_t chroma_plane_addr;
102 * struct xilinx_frmbuf_tx_descriptor - Per Transaction structure
103 * @async_tx: Async transaction descriptor
104 * @hw: Hardware descriptor
105 * @node: Node in the channel descriptors list
107 struct xilinx_frmbuf_tx_descriptor {
108 struct dma_async_tx_descriptor async_tx;
109 struct xilinx_frmbuf_desc_hw hw;
110 struct list_head node;
114 * struct xilinx_frmbuf_chan - Driver specific dma channel structure
115 * @xdev: Driver specific device structure
116 * @lock: Descriptor operation lock
117 * @chan_node: Member of a list of framebuffer channel instances
118 * @pending_list: Descriptors waiting
119 * @done_list: Complete descriptors
120 * @staged_desc: Next buffer to be programmed
121 * @active_desc: Currently active buffer being read/written to
122 * @common: DMA common channel
123 * @dev: The dma device
124 * @write_addr: callback that will write dma addresses to IP (32 or 64 bit)
126 * @direction: Transfer direction
127 * @idle: Channel idle state
128 * @tasklet: Cleanup work after irq
129 * @vid_fmt: Reference to currently assigned video format description
131 struct xilinx_frmbuf_chan {
132 struct xilinx_frmbuf_device *xdev;
133 /* Descriptor operation lock */
135 struct list_head chan_node;
136 struct list_head pending_list;
137 struct list_head done_list;
138 struct xilinx_frmbuf_tx_descriptor *staged_desc;
139 struct xilinx_frmbuf_tx_descriptor *active_desc;
140 struct dma_chan common;
142 void (*write_addr)(struct xilinx_frmbuf_chan *chan, u32 reg,
145 enum dma_transfer_direction direction;
147 struct tasklet_struct tasklet;
148 const struct xilinx_frmbuf_format_desc *vid_fmt;
152 * struct xilinx_frmbuf_device - dma device structure
153 * @regs: I/O mapped base address
154 * @dev: Device Structure
155 * @common: DMA device structure
156 * @chan: Driver specific dma channel
157 * @rst_gpio: GPIO reset
158 * @enabled_vid_fmts: Bitmask of video formats enabled in hardware
160 struct xilinx_frmbuf_device {
163 struct dma_device common;
164 struct xilinx_frmbuf_chan chan;
165 struct gpio_desc *rst_gpio;
166 u32 enabled_vid_fmts;
170 * struct xilinx_frmbuf_format_desc - lookup table to match fourcc to format
171 * @dts_name: Device tree name for this entry.
173 * @bpp: Bytes per pixel
174 * @num_planes: Expected number of plane buffers in framebuffer for this format
175 * @drm_fmt: DRM video framework equivalent fourcc code
176 * @v4l2_fmt: Video 4 Linux framework equivalent fourcc code
177 * @fmt_bitmask: Flag identifying this format in device-specific "enabled"
180 struct xilinx_frmbuf_format_desc {
181 const char *dts_name;
190 static LIST_HEAD(frmbuf_chan_list);
191 static DEFINE_MUTEX(frmbuf_chan_list_lock);
193 static const struct xilinx_frmbuf_format_desc xilinx_frmbuf_formats[] = {
195 .dts_name = "xbgr8888",
196 .id = XILINX_FRMBUF_FMT_RGBX8,
199 .drm_fmt = DRM_FORMAT_XBGR8888,
201 .fmt_bitmask = BIT(0),
204 .dts_name = "unsupported",
205 .id = XILINX_FRMBUF_FMT_YUVX8,
210 .fmt_bitmask = BIT(1),
214 .id = XILINX_FRMBUF_FMT_YUYV8,
217 .drm_fmt = DRM_FORMAT_YUYV,
218 .v4l2_fmt = V4L2_PIX_FMT_YUYV,
219 .fmt_bitmask = BIT(2),
223 .id = XILINX_FRMBUF_FMT_Y_UV8,
226 .drm_fmt = DRM_FORMAT_NV16,
227 .v4l2_fmt = V4L2_PIX_FMT_NV16,
228 .fmt_bitmask = BIT(3),
232 .id = XILINX_FRMBUF_FMT_Y_UV8_420,
235 .drm_fmt = DRM_FORMAT_NV12,
236 .v4l2_fmt = V4L2_PIX_FMT_NV12,
237 .fmt_bitmask = BIT(4),
240 .dts_name = "bgr888",
241 .id = XILINX_FRMBUF_FMT_RGB8,
244 .drm_fmt = DRM_FORMAT_BGR888,
245 .v4l2_fmt = V4L2_PIX_FMT_RGB24,
246 .fmt_bitmask = BIT(5),
249 .dts_name = "unsupported",
250 .id = XILINX_FRMBUF_FMT_YUV8,
255 .fmt_bitmask = BIT(6),
259 .id = XILINX_FRMBUF_FMT_Y8,
263 .v4l2_fmt = V4L2_PIX_FMT_GREY,
264 .fmt_bitmask = BIT(7),
268 static const struct of_device_id xilinx_frmbuf_of_ids[] = {
269 { .compatible = "xlnx,axi-frmbuf-wr-v2",
270 .data = (void *)DMA_DEV_TO_MEM},
271 { .compatible = "xlnx,axi-frmbuf-rd-v2",
272 .data = (void *)DMA_MEM_TO_DEV},
276 /******************************PROTOTYPES*************************************/
277 #define to_xilinx_chan(chan) \
278 container_of(chan, struct xilinx_frmbuf_chan, common)
279 #define to_dma_tx_descriptor(tx) \
280 container_of(tx, struct xilinx_frmbuf_tx_descriptor, async_tx)
282 static inline u32 frmbuf_read(struct xilinx_frmbuf_chan *chan, u32 reg)
284 return ioread32(chan->xdev->regs + reg);
287 static inline void frmbuf_write(struct xilinx_frmbuf_chan *chan, u32 reg,
290 iowrite32(value, chan->xdev->regs + reg);
293 static inline void frmbuf_writeq(struct xilinx_frmbuf_chan *chan, u32 reg,
296 iowrite32(lower_32_bits(value), chan->xdev->regs + reg);
297 iowrite32(upper_32_bits(value), chan->xdev->regs + reg + 4);
300 static void writeq_addr(struct xilinx_frmbuf_chan *chan, u32 reg,
303 frmbuf_writeq(chan, reg, (u64)addr);
306 static void write_addr(struct xilinx_frmbuf_chan *chan, u32 reg,
309 frmbuf_write(chan, reg, addr);
312 static inline void frmbuf_clr(struct xilinx_frmbuf_chan *chan, u32 reg,
315 frmbuf_write(chan, reg, frmbuf_read(chan, reg) & ~clr);
318 static inline void frmbuf_set(struct xilinx_frmbuf_chan *chan, u32 reg,
321 frmbuf_write(chan, reg, frmbuf_read(chan, reg) | set);
324 static int frmbuf_verify_format(struct dma_chan *chan, u32 fourcc, u32 type)
326 struct xilinx_frmbuf_chan *xil_chan = to_xilinx_chan(chan);
327 u32 i, sz = ARRAY_SIZE(xilinx_frmbuf_formats);
329 for (i = 0; i < sz; i++) {
330 if ((type == XDMA_DRM &&
331 fourcc != xilinx_frmbuf_formats[i].drm_fmt) ||
332 (type == XDMA_V4L2 &&
333 fourcc != xilinx_frmbuf_formats[i].v4l2_fmt))
336 if (!(xilinx_frmbuf_formats[i].fmt_bitmask &
337 xil_chan->xdev->enabled_vid_fmts))
340 xil_chan->vid_fmt = &xilinx_frmbuf_formats[i];
346 static void xilinx_xdma_set_config(struct dma_chan *chan, u32 fourcc, u32 type)
348 struct xilinx_frmbuf_chan *xil_chan;
349 bool found_xchan = false;
352 mutex_lock(&frmbuf_chan_list_lock);
353 list_for_each_entry(xil_chan, &frmbuf_chan_list, chan_node) {
354 if (chan == &xil_chan->common) {
359 mutex_unlock(&frmbuf_chan_list_lock);
362 dev_dbg(chan->device->dev,
363 "dma chan not a Video Framebuffer channel instance\n");
367 ret = frmbuf_verify_format(chan, fourcc, type);
368 if (ret == -EINVAL) {
369 dev_err(chan->device->dev,
370 "Framebuffer not configured for fourcc 0x%x\n",
376 void xilinx_xdma_drm_config(struct dma_chan *chan, u32 drm_fourcc)
378 xilinx_xdma_set_config(chan, drm_fourcc, XDMA_DRM);
380 } EXPORT_SYMBOL_GPL(xilinx_xdma_drm_config);
382 void xilinx_xdma_v4l2_config(struct dma_chan *chan, u32 v4l2_fourcc)
384 xilinx_xdma_set_config(chan, v4l2_fourcc, XDMA_V4L2);
386 } EXPORT_SYMBOL_GPL(xilinx_xdma_v4l2_config);
389 * of_dma_xilinx_xlate - Translation function
390 * @dma_spec: Pointer to DMA specifier as found in the device tree
391 * @ofdma: Pointer to DMA controller data
393 * Return: DMA channel pointer on success or error code on error
395 static struct dma_chan *of_dma_xilinx_xlate(struct of_phandle_args *dma_spec,
396 struct of_dma *ofdma)
398 struct xilinx_frmbuf_device *xdev = ofdma->of_dma_data;
400 return dma_get_slave_channel(&xdev->chan.common);
403 /* -----------------------------------------------------------------------------
404 * Descriptors alloc and free
408 * xilinx_frmbuf_tx_descriptor - Allocate transaction descriptor
409 * @chan: Driver specific dma channel
411 * Return: The allocated descriptor on success and NULL on failure.
413 static struct xilinx_frmbuf_tx_descriptor *
414 xilinx_frmbuf_alloc_tx_descriptor(struct xilinx_frmbuf_chan *chan)
416 struct xilinx_frmbuf_tx_descriptor *desc;
418 desc = kzalloc(sizeof(*desc), GFP_KERNEL);
426 * xilinx_frmbuf_free_desc_list - Free descriptors list
427 * @chan: Driver specific dma channel
428 * @list: List to parse and delete the descriptor
430 static void xilinx_frmbuf_free_desc_list(struct xilinx_frmbuf_chan *chan,
431 struct list_head *list)
433 struct xilinx_frmbuf_tx_descriptor *desc, *next;
435 list_for_each_entry_safe(desc, next, list, node) {
436 list_del(&desc->node);
442 * xilinx_frmbuf_free_descriptors - Free channel descriptors
443 * @chan: Driver specific dma channel
445 static void xilinx_frmbuf_free_descriptors(struct xilinx_frmbuf_chan *chan)
449 spin_lock_irqsave(&chan->lock, flags);
451 xilinx_frmbuf_free_desc_list(chan, &chan->pending_list);
452 xilinx_frmbuf_free_desc_list(chan, &chan->done_list);
453 kfree(chan->active_desc);
454 kfree(chan->staged_desc);
456 chan->staged_desc = NULL;
457 chan->active_desc = NULL;
458 INIT_LIST_HEAD(&chan->pending_list);
459 INIT_LIST_HEAD(&chan->done_list);
461 spin_unlock_irqrestore(&chan->lock, flags);
465 * xilinx_frmbuf_free_chan_resources - Free channel resources
466 * @dchan: DMA channel
468 static void xilinx_frmbuf_free_chan_resources(struct dma_chan *dchan)
470 struct xilinx_frmbuf_chan *chan = to_xilinx_chan(dchan);
472 xilinx_frmbuf_free_descriptors(chan);
476 * xilinx_frmbuf_chan_desc_cleanup - Clean channel descriptors
477 * @chan: Driver specific dma channel
479 static void xilinx_frmbuf_chan_desc_cleanup(struct xilinx_frmbuf_chan *chan)
481 struct xilinx_frmbuf_tx_descriptor *desc, *next;
484 spin_lock_irqsave(&chan->lock, flags);
486 list_for_each_entry_safe(desc, next, &chan->done_list, node) {
487 dma_async_tx_callback callback;
488 void *callback_param;
490 list_del(&desc->node);
492 /* Run the link descriptor callback function */
493 callback = desc->async_tx.callback;
494 callback_param = desc->async_tx.callback_param;
496 spin_unlock_irqrestore(&chan->lock, flags);
497 callback(callback_param);
498 spin_lock_irqsave(&chan->lock, flags);
501 /* Run any dependencies, then free the descriptor */
502 dma_run_dependencies(&desc->async_tx);
506 spin_unlock_irqrestore(&chan->lock, flags);
510 * xilinx_frmbuf_do_tasklet - Schedule completion tasklet
511 * @data: Pointer to the Xilinx frmbuf channel structure
513 static void xilinx_frmbuf_do_tasklet(unsigned long data)
515 struct xilinx_frmbuf_chan *chan = (struct xilinx_frmbuf_chan *)data;
517 xilinx_frmbuf_chan_desc_cleanup(chan);
521 * xilinx_frmbuf_alloc_chan_resources - Allocate channel resources
522 * @dchan: DMA channel
524 * Return: '0' on success and failure value on error
526 static int xilinx_frmbuf_alloc_chan_resources(struct dma_chan *dchan)
528 dma_cookie_init(dchan);
534 * xilinx_frmbuf_tx_status - Get frmbuf transaction status
535 * @dchan: DMA channel
536 * @cookie: Transaction identifier
537 * @txstate: Transaction state
539 * Return: fmrbuf transaction status
541 static enum dma_status xilinx_frmbuf_tx_status(struct dma_chan *dchan,
543 struct dma_tx_state *txstate)
545 return dma_cookie_status(dchan, cookie, txstate);
549 * xilinx_frmbuf_halt - Halt frmbuf channel
550 * @chan: Driver specific dma channel
552 static void xilinx_frmbuf_halt(struct xilinx_frmbuf_chan *chan)
554 frmbuf_clr(chan, XILINX_FRMBUF_CTRL_OFFSET,
555 XILINX_FRMBUF_CTRL_AP_START |
556 XILINX_FRMBUF_CTRL_AUTO_RESTART);
561 * xilinx_frmbuf_start - Start dma channel
562 * @chan: Driver specific dma channel
564 static void xilinx_frmbuf_start(struct xilinx_frmbuf_chan *chan)
566 frmbuf_set(chan, XILINX_FRMBUF_CTRL_OFFSET,
567 XILINX_FRMBUF_CTRL_AP_START |
568 XILINX_FRMBUF_CTRL_AUTO_RESTART);
573 * xilinx_frmbuf_complete_descriptor - Mark the active descriptor as complete
574 * This function is invoked with spinlock held
575 * @chan : xilinx frmbuf channel
579 static void xilinx_frmbuf_complete_descriptor(struct xilinx_frmbuf_chan *chan)
581 struct xilinx_frmbuf_tx_descriptor *desc = chan->active_desc;
583 dma_cookie_complete(&desc->async_tx);
584 list_add_tail(&desc->node, &chan->done_list);
588 * xilinx_frmbuf_start_transfer - Starts frmbuf transfer
589 * @chan: Driver specific channel struct pointer
591 static void xilinx_frmbuf_start_transfer(struct xilinx_frmbuf_chan *chan)
593 struct xilinx_frmbuf_tx_descriptor *desc;
598 if (chan->active_desc) {
599 xilinx_frmbuf_complete_descriptor(chan);
600 chan->active_desc = NULL;
603 if (chan->staged_desc) {
604 chan->active_desc = chan->staged_desc;
605 chan->staged_desc = NULL;
608 if (list_empty(&chan->pending_list))
611 desc = list_first_entry(&chan->pending_list,
612 struct xilinx_frmbuf_tx_descriptor,
615 /* Start the transfer */
616 chan->write_addr(chan, XILINX_FRMBUF_ADDR_OFFSET,
617 desc->hw.luma_plane_addr);
618 chan->write_addr(chan, XILINX_FRMBUF_ADDR2_OFFSET,
619 desc->hw.chroma_plane_addr);
621 /* HW expects these parameters to be same for one transaction */
622 frmbuf_write(chan, XILINX_FRMBUF_WIDTH_OFFSET, desc->hw.hsize);
623 frmbuf_write(chan, XILINX_FRMBUF_STRIDE_OFFSET, desc->hw.stride);
624 frmbuf_write(chan, XILINX_FRMBUF_HEIGHT_OFFSET, desc->hw.vsize);
625 frmbuf_write(chan, XILINX_FRMBUF_FMT_OFFSET, chan->vid_fmt->id);
627 /* Start the hardware */
628 xilinx_frmbuf_start(chan);
629 list_del(&desc->node);
630 chan->staged_desc = desc;
634 * xilinx_frmbuf_issue_pending - Issue pending transactions
635 * @dchan: DMA channel
637 static void xilinx_frmbuf_issue_pending(struct dma_chan *dchan)
639 struct xilinx_frmbuf_chan *chan = to_xilinx_chan(dchan);
642 spin_lock_irqsave(&chan->lock, flags);
643 xilinx_frmbuf_start_transfer(chan);
644 spin_unlock_irqrestore(&chan->lock, flags);
648 * xilinx_frmbuf_reset - Reset frmbuf channel
649 * @chan: Driver specific dma channel
651 static void xilinx_frmbuf_reset(struct xilinx_frmbuf_chan *chan)
653 frmbuf_write(chan, XILINX_FRMBUF_CTRL_OFFSET, 0);
657 * xilinx_frmbuf_chan_reset - Reset frmbuf channel and enable interrupts
658 * @chan: Driver specific frmbuf channel
660 static void xilinx_frmbuf_chan_reset(struct xilinx_frmbuf_chan *chan)
662 xilinx_frmbuf_reset(chan);
664 frmbuf_set(chan, XILINX_FRMBUF_IE_OFFSET,
665 XILINX_FRMBUF_ISR_ALL_IRQ_MASK);
669 * xilinx_frmbuf_irq_handler - frmbuf Interrupt handler
671 * @data: Pointer to the Xilinx frmbuf channel structure
673 * Return: IRQ_HANDLED/IRQ_NONE
675 static irqreturn_t xilinx_frmbuf_irq_handler(int irq, void *data)
677 struct xilinx_frmbuf_chan *chan = data;
680 status = frmbuf_read(chan, XILINX_FRMBUF_ISR_OFFSET);
681 if (!(status & XILINX_FRMBUF_ISR_ALL_IRQ_MASK))
684 frmbuf_write(chan, XILINX_FRMBUF_ISR_OFFSET,
685 status & XILINX_FRMBUF_ISR_ALL_IRQ_MASK);
687 if (status & XILINX_FRMBUF_ISR_AP_READY_IRQ) {
688 spin_lock(&chan->lock);
690 xilinx_frmbuf_start_transfer(chan);
691 spin_unlock(&chan->lock);
694 tasklet_schedule(&chan->tasklet);
699 * xilinx_frmbuf_tx_submit - Submit DMA transaction
700 * @tx: Async transaction descriptor
702 * Return: cookie value on success and failure value on error
704 static dma_cookie_t xilinx_frmbuf_tx_submit(struct dma_async_tx_descriptor *tx)
706 struct xilinx_frmbuf_tx_descriptor *desc = to_dma_tx_descriptor(tx);
707 struct xilinx_frmbuf_chan *chan = to_xilinx_chan(tx->chan);
711 spin_lock_irqsave(&chan->lock, flags);
712 cookie = dma_cookie_assign(tx);
713 list_add_tail(&desc->node, &chan->pending_list);
714 spin_unlock_irqrestore(&chan->lock, flags);
720 * xilinx_frmbuf_dma_prep_interleaved - prepare a descriptor for a
721 * DMA_SLAVE transaction
722 * @dchan: DMA channel
723 * @xt: Interleaved template pointer
724 * @flags: transfer ack flags
726 * Return: Async transaction descriptor on success and NULL on failure
728 static struct dma_async_tx_descriptor *
729 xilinx_frmbuf_dma_prep_interleaved(struct dma_chan *dchan,
730 struct dma_interleaved_template *xt,
733 struct xilinx_frmbuf_chan *chan = to_xilinx_chan(dchan);
734 struct xilinx_frmbuf_tx_descriptor *desc;
735 struct xilinx_frmbuf_desc_hw *hw;
737 if (chan->direction != xt->dir || !chan->vid_fmt)
740 if (!xt->numf || !xt->sgl[0].size)
743 if (xt->frame_size != chan->vid_fmt->num_planes)
746 desc = xilinx_frmbuf_alloc_tx_descriptor(chan);
750 dma_async_tx_descriptor_init(&desc->async_tx, &chan->common);
751 desc->async_tx.tx_submit = xilinx_frmbuf_tx_submit;
752 async_tx_ack(&desc->async_tx);
755 hw->vsize = xt->numf;
756 hw->hsize = xt->sgl[0].size / chan->vid_fmt->bpp;
757 hw->stride = xt->sgl[0].icg + xt->sgl[0].size;
759 if (chan->direction == DMA_MEM_TO_DEV) {
760 hw->luma_plane_addr = xt->src_start;
761 if (xt->frame_size == 2)
762 hw->chroma_plane_addr =
764 xt->numf * hw->stride +
767 hw->luma_plane_addr = xt->dst_start;
768 if (xt->frame_size == 2)
769 hw->chroma_plane_addr =
771 xt->numf * hw->stride +
775 return &desc->async_tx;
778 dev_err(chan->xdev->dev,
779 "Invalid dma template or missing dma video fmt config\n");
784 * xilinx_frmbuf_terminate_all - Halt the channel and free descriptors
785 * @dchan: Driver specific dma channel pointer
789 static int xilinx_frmbuf_terminate_all(struct dma_chan *dchan)
791 struct xilinx_frmbuf_chan *chan = to_xilinx_chan(dchan);
793 xilinx_frmbuf_halt(chan);
794 xilinx_frmbuf_free_descriptors(chan);
800 * xilinx_frmbuf_synchronize - kill tasklet to stop further descr processing
801 * @dchan: Driver specific dma channel pointer
803 static void xilinx_frmbuf_synchronize(struct dma_chan *dchan)
805 struct xilinx_frmbuf_chan *chan = to_xilinx_chan(dchan);
807 tasklet_kill(&chan->tasklet);
810 /* -----------------------------------------------------------------------------
815 * xilinx_frmbuf_chan_remove - Per Channel remove function
816 * @chan: Driver specific dma channel
818 static void xilinx_frmbuf_chan_remove(struct xilinx_frmbuf_chan *chan)
820 /* Disable all interrupts */
821 frmbuf_clr(chan, XILINX_FRMBUF_IE_OFFSET,
822 XILINX_FRMBUF_ISR_ALL_IRQ_MASK);
824 tasklet_kill(&chan->tasklet);
825 list_del(&chan->common.device_node);
827 mutex_lock(&frmbuf_chan_list_lock);
828 list_del(&chan->chan_node);
829 mutex_unlock(&frmbuf_chan_list_lock);
833 * xilinx_frmbuf_chan_probe - Per Channel Probing
834 * It get channel features from the device tree entry and
835 * initialize special channel handling routines
837 * @xdev: Driver specific device structure
840 * Return: '0' on success and failure value on error
842 static int xilinx_frmbuf_chan_probe(struct xilinx_frmbuf_device *xdev,
843 struct device_node *node)
845 struct xilinx_frmbuf_chan *chan;
851 chan->dev = xdev->dev;
855 err = of_property_read_u32(node, "xlnx,dma-addr-width",
857 if (err || (dma_addr_size != 32 && dma_addr_size != 64)) {
858 dev_err(xdev->dev, "missing or invalid addr width dts prop\n");
862 if (dma_addr_size == 64 && sizeof(dma_addr_t) == sizeof(u64))
863 chan->write_addr = writeq_addr;
865 chan->write_addr = write_addr;
867 spin_lock_init(&chan->lock);
868 INIT_LIST_HEAD(&chan->pending_list);
869 INIT_LIST_HEAD(&chan->done_list);
871 chan->irq = irq_of_parse_and_map(node, 0);
872 err = devm_request_irq(xdev->dev, chan->irq, xilinx_frmbuf_irq_handler,
873 IRQF_SHARED, "xilinx_framebuffer", chan);
876 dev_err(xdev->dev, "unable to request IRQ %d\n", chan->irq);
880 tasklet_init(&chan->tasklet, xilinx_frmbuf_do_tasklet,
881 (unsigned long)chan);
884 * Initialize the DMA channel and add it to the DMA engine channels
887 chan->common.device = &xdev->common;
889 list_add_tail(&chan->common.device_node, &xdev->common.channels);
891 mutex_lock(&frmbuf_chan_list_lock);
892 list_add_tail(&chan->chan_node, &frmbuf_chan_list);
893 mutex_unlock(&frmbuf_chan_list_lock);
895 xilinx_frmbuf_chan_reset(chan);
897 frmbuf_write(chan, XILINX_FRMBUF_IE_OFFSET, XILINX_FRMBUF_IE_AP_READY);
898 frmbuf_write(chan, XILINX_FRMBUF_GIE_OFFSET, XILINX_FRMBUF_GIE_EN);
904 * xilinx_frmbuf_probe - Driver probe function
905 * @pdev: Pointer to the platform_device structure
907 * Return: '0' on success and failure value on error
909 static int xilinx_frmbuf_probe(struct platform_device *pdev)
911 struct device_node *node = pdev->dev.of_node;
912 struct xilinx_frmbuf_device *xdev;
914 enum dma_transfer_direction dma_dir;
915 const struct of_device_id *match;
919 const char *vid_fmts[ARRAY_SIZE(xilinx_frmbuf_formats)];
921 xdev = devm_kzalloc(&pdev->dev, sizeof(*xdev), GFP_KERNEL);
925 xdev->dev = &pdev->dev;
927 match = of_match_node(xilinx_frmbuf_of_ids, node);
931 dma_dir = (enum dma_transfer_direction)match->data;
933 xdev->rst_gpio = devm_gpiod_get(&pdev->dev, "reset",
935 if (IS_ERR(xdev->rst_gpio)) {
936 err = PTR_ERR(xdev->rst_gpio);
937 if (err == -EPROBE_DEFER)
939 "Probe deferred due to GPIO reset defer\n");
942 "Unable to locate reset property in dt\n");
946 gpiod_set_value_cansleep(xdev->rst_gpio, 0x0);
948 io = platform_get_resource(pdev, IORESOURCE_MEM, 0);
949 xdev->regs = devm_ioremap_resource(&pdev->dev, io);
950 if (IS_ERR(xdev->regs))
951 return PTR_ERR(xdev->regs);
953 /* Initialize the DMA engine */
954 xdev->common.dev = &pdev->dev;
956 INIT_LIST_HEAD(&xdev->common.channels);
957 dma_cap_set(DMA_SLAVE, xdev->common.cap_mask);
958 dma_cap_set(DMA_PRIVATE, xdev->common.cap_mask);
960 /* Initialize the channels */
961 err = xilinx_frmbuf_chan_probe(xdev, node);
965 xdev->chan.direction = dma_dir;
967 if (xdev->chan.direction == DMA_DEV_TO_MEM) {
968 xdev->common.directions = BIT(DMA_DEV_TO_MEM);
969 dev_info(&pdev->dev, "Xilinx AXI frmbuf DMA_DEV_TO_MEM\n");
970 } else if (xdev->chan.direction == DMA_MEM_TO_DEV) {
971 xdev->common.directions = BIT(DMA_MEM_TO_DEV);
972 dev_info(&pdev->dev, "Xilinx AXI frmbuf DMA_MEM_TO_DEV\n");
974 xilinx_frmbuf_chan_remove(&xdev->chan);
978 /* read supported video formats and update internal table */
979 hw_vid_fmt_cnt = of_property_count_strings(node, "xlnx,vid-formats");
981 err = of_property_read_string_array(node, "xlnx,vid-formats",
982 vid_fmts, hw_vid_fmt_cnt);
985 "Missing or invalid xlnx,vid-formats dts prop\n");
989 for (i = 0; i < hw_vid_fmt_cnt; i++) {
990 const char *vid_fmt_name = vid_fmts[i];
992 for (j = 0; j < ARRAY_SIZE(xilinx_frmbuf_formats); j++) {
993 const char *dts_name =
994 xilinx_frmbuf_formats[j].dts_name;
996 if (strcmp(vid_fmt_name, dts_name))
999 xdev->enabled_vid_fmts |=
1000 xilinx_frmbuf_formats[j].fmt_bitmask;
1004 xdev->common.device_alloc_chan_resources =
1005 xilinx_frmbuf_alloc_chan_resources;
1006 xdev->common.device_free_chan_resources =
1007 xilinx_frmbuf_free_chan_resources;
1008 xdev->common.device_prep_interleaved_dma =
1009 xilinx_frmbuf_dma_prep_interleaved;
1010 xdev->common.device_terminate_all = xilinx_frmbuf_terminate_all;
1011 xdev->common.device_synchronize = xilinx_frmbuf_synchronize;
1012 xdev->common.device_tx_status = xilinx_frmbuf_tx_status;
1013 xdev->common.device_issue_pending = xilinx_frmbuf_issue_pending;
1015 platform_set_drvdata(pdev, xdev);
1017 /* Register the DMA engine with the core */
1018 dma_async_device_register(&xdev->common);
1019 err = of_dma_controller_register(node, of_dma_xilinx_xlate, xdev);
1022 dev_err(&pdev->dev, "Unable to register DMA to DT\n");
1023 xilinx_frmbuf_chan_remove(&xdev->chan);
1024 dma_async_device_unregister(&xdev->common);
1028 dev_info(&pdev->dev, "Xilinx AXI FrameBuffer Engine Driver Probed!!\n");
1034 * xilinx_frmbuf_remove - Driver remove function
1035 * @pdev: Pointer to the platform_device structure
1037 * Return: Always '0'
1039 static int xilinx_frmbuf_remove(struct platform_device *pdev)
1041 struct xilinx_frmbuf_device *xdev = platform_get_drvdata(pdev);
1043 dma_async_device_unregister(&xdev->common);
1044 xilinx_frmbuf_chan_remove(&xdev->chan);
1049 MODULE_DEVICE_TABLE(of, xilinx_frmbuf_of_ids);
1051 static struct platform_driver xilinx_frmbuf_driver = {
1053 .name = "xilinx-frmbuf",
1054 .of_match_table = xilinx_frmbuf_of_ids,
1056 .probe = xilinx_frmbuf_probe,
1057 .remove = xilinx_frmbuf_remove,
1060 module_platform_driver(xilinx_frmbuf_driver);
1062 MODULE_AUTHOR("Xilinx, Inc.");
1063 MODULE_DESCRIPTION("Xilinx Framebuffer driver");
1064 MODULE_LICENSE("GPL v2");