1 //SPDX-License-Identifier: GPL-2.0
3 * Xilinx Scene Change Detection DMA driver
5 * Copyright (C) 2018 Xilinx, Inc.
7 * Authors: Anand Ashok Dumbre <anand.ashok.dumbre@xilinx.com>
8 * Satish Kumar Nagireddy <satish.nagireddy.nagireddy@xilinx.com>
11 #include <linux/bitops.h>
12 #include <linux/clk.h>
13 #include <linux/dmaengine.h>
14 #include <linux/dma-mapping.h>
15 #include <linux/init.h>
16 #include <linux/interrupt.h>
18 #include <linux/io-64-nonatomic-lo-hi.h>
19 #include <linux/iopoll.h>
20 #include <linux/module.h>
21 #include <linux/of_address.h>
22 #include <linux/of_dma.h>
23 #include <linux/of_irq.h>
24 #include <linux/platform_device.h>
25 #include <linux/slab.h>
27 #include "xilinx-scenechange.h"
30 * xscd_dma_irq_handler - scdma Interrupt handler
32 * @data: Pointer to the Xilinx scdma channel structure
34 * Return: IRQ_HANDLED/IRQ_NONE
36 static irqreturn_t xscd_dma_irq_handler(int irq, void *data)
38 struct xscd_dma_device *dev = data;
39 struct xscd_dma_chan *chan;
41 if (dev->memory_based) {
44 for (id = 0; id < dev->numchannels; id++) {
46 spin_lock(&chan->lock);
49 if (chan->en && (!list_empty(&chan->pending_list))) {
50 chan_en |= 1 << chan->id;
51 chan->valid_interrupt = true;
53 chan->valid_interrupt = false;
56 xscd_dma_start_transfer(chan);
57 spin_unlock(&chan->lock);
62 xscd_dma_chan_enable(chan, chan_en);
66 for (id = 0; id < dev->numchannels; id++) {
68 tasklet_schedule(&chan->tasklet);
75 /* -----------------------------------------------------------------------------
76 * Descriptors alloc and free
80 * xscd_dma_tx_descriptor - Allocate transaction descriptor
81 * @chan: Driver specific dma channel
83 * Return: The allocated descriptor on success and NULL on failure.
85 struct xscd_dma_tx_descriptor *
86 xscd_dma_alloc_tx_descriptor(struct xscd_dma_chan *chan)
88 struct xscd_dma_tx_descriptor *desc;
90 desc = kzalloc(sizeof(*desc), GFP_KERNEL);
98 * xscd_dma_tx_submit - Submit DMA transaction
99 * @tx: Async transaction descriptor
101 * Return: cookie value on success and failure value on error
103 dma_cookie_t xscd_dma_tx_submit(struct dma_async_tx_descriptor *tx)
105 struct xscd_dma_tx_descriptor *desc = to_xscd_dma_tx_descriptor(tx);
106 struct xscd_dma_chan *chan = to_xscd_dma_chan(tx->chan);
110 spin_lock_irqsave(&chan->lock, flags);
111 cookie = dma_cookie_assign(tx);
112 list_add_tail(&desc->node, &chan->pending_list);
113 spin_unlock_irqrestore(&chan->lock, flags);
119 * xscd_dma_chan_enable - Enable dma channel
120 * @chan: Driver specific dma channel
121 * @chan_en: Channels ready for transfer, it is a bitmap
123 void xscd_dma_chan_enable(struct xscd_dma_chan *chan, int chan_en)
125 xscd_write(chan->iomem, XSCD_CHAN_EN_OFFSET, chan_en);
129 * xscd_dma_complete_descriptor - Mark the active descriptor as complete
130 * This function is invoked with spinlock held
131 * @chan : xilinx dma channel
134 static void xscd_dma_complete_descriptor(struct xscd_dma_chan *chan)
136 struct xscd_dma_tx_descriptor *desc = chan->active_desc;
138 dma_cookie_complete(&desc->async_tx);
139 list_add_tail(&desc->node, &chan->done_list);
143 * xscd_dma_start_transfer - Starts dma transfer
144 * @chan: Driver specific channel struct pointer
146 void xscd_dma_start_transfer(struct xscd_dma_chan *chan)
148 struct xscd_dma_tx_descriptor *desc;
149 u32 chanoffset = chan->id * XSCD_CHAN_OFFSET;
157 if (chan->active_desc) {
158 xscd_dma_complete_descriptor(chan);
159 chan->active_desc = NULL;
162 if (chan->staged_desc) {
163 chan->active_desc = chan->staged_desc;
164 chan->staged_desc = NULL;
167 if (list_empty(&chan->pending_list))
170 desc = list_first_entry(&chan->pending_list,
171 struct xscd_dma_tx_descriptor, node);
173 /* Start the transfer */
174 xscd_write(chan->iomem, XSCD_ADDR_OFFSET + chanoffset,
175 desc->sw.luma_plane_addr);
177 list_del(&desc->node);
178 chan->staged_desc = desc;
182 * xscd_dma_free_desc_list - Free descriptors list
183 * @chan: Driver specific dma channel
184 * @list: List to parse and delete the descriptor
186 void xscd_dma_free_desc_list(struct xscd_dma_chan *chan,
187 struct list_head *list)
189 struct xscd_dma_tx_descriptor *desc, *next;
191 list_for_each_entry_safe(desc, next, list, node) {
192 list_del(&desc->node);
198 * xscd_dma_free_descriptors - Free channel descriptors
199 * @chan: Driver specific dma channel
201 void xscd_dma_free_descriptors(struct xscd_dma_chan *chan)
205 spin_lock_irqsave(&chan->lock, flags);
207 xscd_dma_free_desc_list(chan, &chan->pending_list);
208 xscd_dma_free_desc_list(chan, &chan->done_list);
209 kfree(chan->active_desc);
210 kfree(chan->staged_desc);
212 chan->staged_desc = NULL;
213 chan->active_desc = NULL;
214 INIT_LIST_HEAD(&chan->pending_list);
215 INIT_LIST_HEAD(&chan->done_list);
217 spin_unlock_irqrestore(&chan->lock, flags);
221 * scd_dma_chan_desc_cleanup - Clean channel descriptors
222 * @chan: Driver specific dma channel
224 void xscd_dma_chan_desc_cleanup(struct xscd_dma_chan *chan)
226 struct xscd_dma_tx_descriptor *desc, *next;
229 spin_lock_irqsave(&chan->lock, flags);
231 list_for_each_entry_safe(desc, next, &chan->done_list, node) {
232 dma_async_tx_callback callback;
233 void *callback_param;
235 list_del(&desc->node);
237 /* Run the link descriptor callback function */
238 callback = desc->async_tx.callback;
239 callback_param = desc->async_tx.callback_param;
241 spin_unlock_irqrestore(&chan->lock, flags);
242 callback(callback_param);
243 spin_lock_irqsave(&chan->lock, flags);
249 spin_unlock_irqrestore(&chan->lock, flags);
253 * xscd_dma_chan_remove - Per Channel remove function
254 * @chan: Driver specific DMA channel
256 void xscd_dma_chan_remove(struct xscd_dma_chan *chan)
258 list_del(&chan->common.device_node);
262 * xscd_dma_dma_prep_interleaved - prepare a descriptor for a
263 * DMA_SLAVE transaction
264 * @dchan: DMA channel
265 * @xt: Interleaved template pointer
266 * @flags: transfer ack flags
268 * Return: Async transaction descriptor on success and NULL on failure
270 static struct dma_async_tx_descriptor *
271 xscd_dma_prep_interleaved(struct dma_chan *dchan,
272 struct dma_interleaved_template *xt,
275 struct xscd_dma_chan *chan = to_xscd_dma_chan(dchan);
276 struct xscd_dma_tx_descriptor *desc;
277 struct xscd_dma_desc *sw;
279 desc = xscd_dma_alloc_tx_descriptor(chan);
283 dma_async_tx_descriptor_init(&desc->async_tx, &chan->common);
284 desc->async_tx.tx_submit = xscd_dma_tx_submit;
285 async_tx_ack(&desc->async_tx);
288 sw->vsize = xt->numf;
289 sw->hsize = xt->sgl[0].size;
290 sw->stride = xt->sgl[0].size + xt->sgl[0].icg;
291 sw->luma_plane_addr = xt->src_start;
293 return &desc->async_tx;
297 * xscd_dma_terminate_all - Halt the channel and free descriptors
298 * @dchan: Driver specific dma channel pointer
302 static int xscd_dma_terminate_all(struct dma_chan *dchan)
304 struct xscd_dma_chan *chan = to_xscd_dma_chan(dchan);
307 xscd_dma_free_descriptors(chan);
309 /* Worst case frame-to-frame boundary, ensure frame output complete */
311 xscd_dma_reset(chan);
317 * xscd_dma_issue_pending - Issue pending transactions
318 * @dchan: DMA channel
320 static void xscd_dma_issue_pending(struct dma_chan *dchan)
322 struct xscd_dma_chan *chan = to_xscd_dma_chan(dchan);
323 struct xscd_dma_device *dev = chan->xdev;
326 for (id = 0; id < dev->numchannels; id++) {
327 chan = dev->chan[id];
328 spin_lock(&chan->lock);
331 if (chan->en && (!list_empty(&chan->pending_list))) {
332 chan_en |= 1 << chan->id;
333 chan->valid_interrupt = true;
335 chan->valid_interrupt = false;
338 xscd_dma_start_transfer(chan);
339 spin_unlock(&chan->lock);
343 xscd_dma_reset(chan);
344 xscd_dma_chan_enable(chan, chan_en);
345 xscd_dma_start(chan);
349 static enum dma_status xscd_dma_tx_status(struct dma_chan *dchan,
351 struct dma_tx_state *txstate)
353 return dma_cookie_status(dchan, cookie, txstate);
357 * xscd_dma_halt - Halt dma channel
358 * @chan: Driver specific dma channel
360 void xscd_dma_halt(struct xscd_dma_chan *chan)
362 struct xscd_dma_device *xdev = chan->xdev;
364 if (xdev->memory_based)
365 xscd_clr(chan->iomem, XSCD_CTRL_OFFSET, XSCD_CTRL_AP_START);
367 /* Streaming based */
368 xscd_clr(chan->iomem, XSCD_CTRL_OFFSET,
369 XSCD_CTRL_AP_START | XSCD_CTRL_AUTO_RESTART);
375 * xscd_dma_start - Start dma channel
376 * @chan: Driver specific dma channel
378 void xscd_dma_start(struct xscd_dma_chan *chan)
380 struct xscd_dma_device *xdev = chan->xdev;
382 if (xdev->memory_based)
383 xscd_set(chan->iomem, XSCD_CTRL_OFFSET, XSCD_CTRL_AP_START);
385 /* Streaming based */
386 xscd_set(chan->iomem, XSCD_CTRL_OFFSET,
387 XSCD_CTRL_AP_START | XSCD_CTRL_AUTO_RESTART);
393 * xscd_dma_reset - Reset dma channel and enable interrupts
394 * @chan: Driver specific dma channel
396 void xscd_dma_reset(struct xscd_dma_chan *chan)
398 xscd_write(chan->iomem, XSCD_IE_OFFSET, XSCD_IE_AP_DONE);
399 xscd_write(chan->iomem, XSCD_GIE_OFFSET, XSCD_GIE_EN);
403 * xscd_dma_free_chan_resources - Free channel resources
404 * @dchan: DMA channel
406 static void xscd_dma_free_chan_resources(struct dma_chan *dchan)
408 struct xscd_dma_chan *chan = to_xscd_dma_chan(dchan);
410 xscd_dma_free_descriptors(chan);
414 * xscd_dma_do_tasklet - Schedule completion tasklet
415 * @data: Pointer to the Xilinx scdma channel structure
417 static void xscd_dma_do_tasklet(unsigned long data)
419 struct xscd_dma_chan *chan = (struct xscd_dma_chan *)data;
421 xscd_dma_chan_desc_cleanup(chan);
425 * xscd_dma_alloc_chan_resources - Allocate channel resources
426 * @dchan: DMA channel
428 * Return: '0' on success and failure value on error
430 static int xscd_dma_alloc_chan_resources(struct dma_chan *dchan)
432 dma_cookie_init(dchan);
437 * of_scdma_xilinx_xlate - Translation function
438 * @dma_spec: Pointer to DMA specifier as found in the device tree
439 * @ofdma: Pointer to DMA controller data
441 * Return: DMA channel pointer on success and NULL on error
443 static struct dma_chan *of_scdma_xilinx_xlate(struct of_phandle_args *dma_spec,
444 struct of_dma *ofdma)
446 struct xscd_dma_device *xdev = ofdma->of_dma_data;
447 u32 chan_id = dma_spec->args[0];
449 if (chan_id >= xdev->numchannels)
452 if (!xdev->chan[chan_id])
455 return dma_get_slave_channel(&xdev->chan[chan_id]->common);
458 static struct xscd_dma_chan *
459 xscd_dma_chan_probe(struct xscd_dma_device *xdev, int chan_id)
461 struct xscd_dma_chan *chan;
463 chan = xdev->chan[chan_id];
464 chan->dev = xdev->dev;
468 spin_lock_init(&chan->lock);
469 INIT_LIST_HEAD(&chan->pending_list);
470 INIT_LIST_HEAD(&chan->done_list);
471 tasklet_init(&chan->tasklet, xscd_dma_do_tasklet,
472 (unsigned long)chan);
473 chan->common.device = &xdev->common;
474 list_add_tail(&chan->common.device_node, &xdev->common.channels);
480 * xilinx_dma_probe - Driver probe function
481 * @pdev: Pointer to the device structure
483 * Return: '0' on success and failure value on error
485 static int xscd_dma_probe(struct platform_device *pdev)
487 struct xscd_dma_device *xdev;
488 struct device_node *node;
489 struct xscd_dma_chan *chan;
490 struct dma_device *ddev;
491 struct xscd_shared_data *shared_data;
492 int ret, irq_num, chan_id = 0;
494 /* Allocate and initialize the DMA engine structure */
495 xdev = devm_kzalloc(&pdev->dev, sizeof(*xdev), GFP_KERNEL);
499 xdev->dev = &pdev->dev;
500 ddev = &xdev->common;
501 ddev->dev = &pdev->dev;
502 node = xdev->dev->parent->of_node;
503 xdev->dev->of_node = node;
504 shared_data = (struct xscd_shared_data *)pdev->dev.parent->driver_data;
505 xdev->regs = shared_data->iomem;
506 xdev->chan = shared_data->dma_chan_list;
507 xdev->memory_based = shared_data->memory_based;
508 dma_set_mask(xdev->dev, DMA_BIT_MASK(32));
510 /* Initialize the DMA engine */
511 xdev->common.dev = &pdev->dev;
512 ret = of_property_read_u32(node, "xlnx,numstreams",
515 irq_num = irq_of_parse_and_map(node, 0);
517 dev_err(xdev->dev, "No valid irq found\n");
521 /* TODO: Clean up multiple interrupt handlers as there is one device */
522 ret = devm_request_irq(xdev->dev, irq_num, xscd_dma_irq_handler,
523 IRQF_SHARED, "xilinx_scenechange DMA", xdev);
524 INIT_LIST_HEAD(&xdev->common.channels);
525 dma_cap_set(DMA_SLAVE, ddev->cap_mask);
526 dma_cap_set(DMA_PRIVATE, ddev->cap_mask);
527 ddev->device_alloc_chan_resources = xscd_dma_alloc_chan_resources;
528 ddev->device_free_chan_resources = xscd_dma_free_chan_resources;
529 ddev->device_tx_status = xscd_dma_tx_status;
530 ddev->device_issue_pending = xscd_dma_issue_pending;
531 ddev->device_terminate_all = xscd_dma_terminate_all;
532 ddev->device_prep_interleaved_dma = xscd_dma_prep_interleaved;
533 platform_set_drvdata(pdev, xdev);
535 for (chan_id = 0; chan_id < xdev->numchannels; chan_id++) {
536 chan = xscd_dma_chan_probe(xdev, chan_id);
538 dev_err(xdev->dev, "failed to probe a channel\n");
544 ret = dma_async_device_register(ddev);
546 dev_err(xdev->dev, "failed to register the dma device\n");
550 ret = of_dma_controller_register(xdev->dev->of_node,
551 of_scdma_xilinx_xlate, xdev);
553 dev_err(xdev->dev, "failed to register DMA to DT DMA helper\n");
557 dev_info(&pdev->dev, "Xilinx Scene Change DMA is probed!\n");
561 dma_async_device_unregister(ddev);
564 for (chan_id = 0; chan_id < xdev->numchannels; chan_id++) {
565 if (xdev->chan[chan_id])
566 xscd_dma_chan_remove(xdev->chan[chan_id]);
571 static int xscd_dma_remove(struct platform_device *pdev)
576 static struct platform_driver xscd_dma_driver = {
577 .probe = xscd_dma_probe,
578 .remove = xscd_dma_remove,
580 .name = "xlnx,scdma",
584 module_platform_driver(xscd_dma_driver);
586 MODULE_AUTHOR("Xilinx, Inc.");
587 MODULE_DESCRIPTION("Xilinx Scene Change Detect DMA driver");
588 MODULE_LICENSE("GPL v2");