1 //SPDX-License-Identifier: GPL-2.0
3 * Xilinx Scene Change Detection DMA driver
5 * Copyright (C) 2018 Xilinx, Inc.
7 * Authors: Anand Ashok Dumbre <anand.ashok.dumbre@xilinx.com>
8 * Satish Kumar Nagireddy <satish.nagireddy.nagireddy@xilinx.com>
11 #include <linux/delay.h>
12 #include <linux/dma-mapping.h>
13 #include <linux/dmaengine.h>
14 #include <linux/of_dma.h>
15 #include <linux/slab.h>
17 #include "../../../dma/dmaengine.h"
19 #include "xilinx-scenechange.h"
22 * xscd_dma_irq_handler - scdma Interrupt handler
23 * @xscd: Pointer to the SCD device structure
25 void xscd_dma_irq_handler(struct xscd_device *xscd)
27 struct xscd_dma_chan *chan;
29 if (xscd->memory_based) {
32 for (id = 0; id < xscd->num_streams; id++) {
33 chan = xscd->channels[id];
34 spin_lock(&chan->lock);
37 if (chan->en && (!list_empty(&chan->pending_list))) {
38 chan_en |= 1 << chan->id;
39 chan->valid_interrupt = true;
41 chan->valid_interrupt = false;
44 xscd_dma_start_transfer(chan);
45 spin_unlock(&chan->lock);
50 xscd_dma_chan_enable(chan, chan_en);
54 for (id = 0; id < xscd->num_streams; id++) {
55 chan = xscd->channels[id];
56 tasklet_schedule(&chan->tasklet);
61 /* -----------------------------------------------------------------------------
62 * Descriptors alloc and free
66 * xscd_dma_tx_submit - Submit DMA transaction
67 * @tx: Async transaction descriptor
69 * Return: cookie value on success and failure value on error
71 static dma_cookie_t xscd_dma_tx_submit(struct dma_async_tx_descriptor *tx)
73 struct xscd_dma_tx_descriptor *desc = to_xscd_dma_tx_descriptor(tx);
74 struct xscd_dma_chan *chan = to_xscd_dma_chan(tx->chan);
78 spin_lock_irqsave(&chan->lock, flags);
79 cookie = dma_cookie_assign(tx);
80 list_add_tail(&desc->node, &chan->pending_list);
81 spin_unlock_irqrestore(&chan->lock, flags);
87 * xscd_dma_chan_enable - Enable dma channel
88 * @chan: Driver specific dma channel
89 * @chan_en: Channels ready for transfer, it is a bitmap
91 void xscd_dma_chan_enable(struct xscd_dma_chan *chan, int chan_en)
93 xscd_write(chan->xscd->iomem, XSCD_CHAN_EN_OFFSET, chan_en);
97 * xscd_dma_complete_descriptor - Mark the active descriptor as complete
98 * This function is invoked with spinlock held
99 * @chan : xilinx dma channel
102 static void xscd_dma_complete_descriptor(struct xscd_dma_chan *chan)
104 struct xscd_dma_tx_descriptor *desc = chan->active_desc;
106 dma_cookie_complete(&desc->async_tx);
107 list_add_tail(&desc->node, &chan->done_list);
111 * xscd_dma_start_transfer - Starts dma transfer
112 * @chan: Driver specific channel struct pointer
114 void xscd_dma_start_transfer(struct xscd_dma_chan *chan)
116 struct xscd_dma_tx_descriptor *desc;
124 if (chan->active_desc) {
125 xscd_dma_complete_descriptor(chan);
126 chan->active_desc = NULL;
129 if (chan->staged_desc) {
130 chan->active_desc = chan->staged_desc;
131 chan->staged_desc = NULL;
134 if (list_empty(&chan->pending_list))
137 desc = list_first_entry(&chan->pending_list,
138 struct xscd_dma_tx_descriptor, node);
140 /* Start the transfer */
141 xscd_write(chan->iomem, XSCD_ADDR_OFFSET, desc->sw.luma_plane_addr);
143 list_del(&desc->node);
144 chan->staged_desc = desc;
148 * xscd_dma_free_desc_list - Free descriptors list
149 * @chan: Driver specific dma channel
150 * @list: List to parse and delete the descriptor
152 static void xscd_dma_free_desc_list(struct xscd_dma_chan *chan,
153 struct list_head *list)
155 struct xscd_dma_tx_descriptor *desc, *next;
157 list_for_each_entry_safe(desc, next, list, node) {
158 list_del(&desc->node);
164 * xscd_dma_free_descriptors - Free channel descriptors
165 * @chan: Driver specific dma channel
167 static void xscd_dma_free_descriptors(struct xscd_dma_chan *chan)
171 spin_lock_irqsave(&chan->lock, flags);
173 xscd_dma_free_desc_list(chan, &chan->pending_list);
174 xscd_dma_free_desc_list(chan, &chan->done_list);
175 kfree(chan->active_desc);
176 kfree(chan->staged_desc);
178 chan->staged_desc = NULL;
179 chan->active_desc = NULL;
180 INIT_LIST_HEAD(&chan->pending_list);
181 INIT_LIST_HEAD(&chan->done_list);
183 spin_unlock_irqrestore(&chan->lock, flags);
187 * scd_dma_chan_desc_cleanup - Clean channel descriptors
188 * @chan: Driver specific dma channel
190 static void xscd_dma_chan_desc_cleanup(struct xscd_dma_chan *chan)
192 struct xscd_dma_tx_descriptor *desc, *next;
195 spin_lock_irqsave(&chan->lock, flags);
197 list_for_each_entry_safe(desc, next, &chan->done_list, node) {
198 dma_async_tx_callback callback;
199 void *callback_param;
201 list_del(&desc->node);
203 /* Run the link descriptor callback function */
204 callback = desc->async_tx.callback;
205 callback_param = desc->async_tx.callback_param;
207 spin_unlock_irqrestore(&chan->lock, flags);
208 callback(callback_param);
209 spin_lock_irqsave(&chan->lock, flags);
215 spin_unlock_irqrestore(&chan->lock, flags);
219 * xscd_dma_dma_prep_interleaved - prepare a descriptor for a
220 * DMA_SLAVE transaction
221 * @dchan: DMA channel
222 * @xt: Interleaved template pointer
223 * @flags: transfer ack flags
225 * Return: Async transaction descriptor on success and NULL on failure
227 static struct dma_async_tx_descriptor *
228 xscd_dma_prep_interleaved(struct dma_chan *dchan,
229 struct dma_interleaved_template *xt,
232 struct xscd_dma_chan *chan = to_xscd_dma_chan(dchan);
233 struct xscd_dma_tx_descriptor *desc;
234 struct xscd_dma_desc *sw;
236 desc = kzalloc(sizeof(*desc), GFP_KERNEL);
240 dma_async_tx_descriptor_init(&desc->async_tx, &chan->common);
241 desc->async_tx.tx_submit = xscd_dma_tx_submit;
242 async_tx_ack(&desc->async_tx);
245 sw->vsize = xt->numf;
246 sw->hsize = xt->sgl[0].size;
247 sw->stride = xt->sgl[0].size + xt->sgl[0].icg;
248 sw->luma_plane_addr = xt->src_start;
250 return &desc->async_tx;
254 * xscd_dma_terminate_all - Halt the channel and free descriptors
255 * @dchan: Driver specific dma channel pointer
259 static int xscd_dma_terminate_all(struct dma_chan *dchan)
261 struct xscd_dma_chan *chan = to_xscd_dma_chan(dchan);
264 xscd_dma_free_descriptors(chan);
266 /* Worst case frame-to-frame boundary, ensure frame output complete */
268 xscd_dma_reset(chan);
274 * xscd_dma_issue_pending - Issue pending transactions
275 * @dchan: DMA channel
277 static void xscd_dma_issue_pending(struct dma_chan *dchan)
279 struct xscd_dma_chan *chan = to_xscd_dma_chan(dchan);
280 struct xscd_device *xscd = chan->xscd;
283 for (id = 0; id < xscd->num_streams; id++) {
284 chan = xscd->channels[id];
285 spin_lock(&chan->lock);
288 if (chan->en && (!list_empty(&chan->pending_list))) {
289 chan_en |= 1 << chan->id;
290 chan->valid_interrupt = true;
292 chan->valid_interrupt = false;
295 xscd_dma_start_transfer(chan);
296 spin_unlock(&chan->lock);
300 xscd_dma_reset(chan);
301 xscd_dma_chan_enable(chan, chan_en);
302 xscd_dma_start(chan);
306 static enum dma_status xscd_dma_tx_status(struct dma_chan *dchan,
308 struct dma_tx_state *txstate)
310 return dma_cookie_status(dchan, cookie, txstate);
314 * xscd_dma_halt - Halt dma channel
315 * @chan: Driver specific dma channel
317 void xscd_dma_halt(struct xscd_dma_chan *chan)
319 struct xscd_device *xscd = chan->xscd;
321 if (xscd->memory_based)
322 xscd_clr(chan->xscd->iomem, XSCD_CTRL_OFFSET,
325 /* Streaming based */
326 xscd_clr(chan->xscd->iomem, XSCD_CTRL_OFFSET,
327 XSCD_CTRL_AP_START | XSCD_CTRL_AUTO_RESTART);
333 * xscd_dma_start - Start dma channel
334 * @chan: Driver specific dma channel
336 void xscd_dma_start(struct xscd_dma_chan *chan)
338 struct xscd_device *xscd = chan->xscd;
340 if (xscd->memory_based)
341 xscd_set(chan->xscd->iomem, XSCD_CTRL_OFFSET,
344 /* Streaming based */
345 xscd_set(chan->xscd->iomem, XSCD_CTRL_OFFSET,
346 XSCD_CTRL_AP_START | XSCD_CTRL_AUTO_RESTART);
352 * xscd_dma_reset - Reset dma channel and enable interrupts
353 * @chan: Driver specific dma channel
355 void xscd_dma_reset(struct xscd_dma_chan *chan)
357 xscd_write(chan->xscd->iomem, XSCD_IE_OFFSET, XSCD_IE_AP_DONE);
358 xscd_write(chan->xscd->iomem, XSCD_GIE_OFFSET, XSCD_GIE_EN);
362 * xscd_dma_free_chan_resources - Free channel resources
363 * @dchan: DMA channel
365 static void xscd_dma_free_chan_resources(struct dma_chan *dchan)
367 struct xscd_dma_chan *chan = to_xscd_dma_chan(dchan);
369 xscd_dma_free_descriptors(chan);
373 * xscd_dma_do_tasklet - Schedule completion tasklet
374 * @data: Pointer to the Xilinx scdma channel structure
376 static void xscd_dma_do_tasklet(unsigned long data)
378 struct xscd_dma_chan *chan = (struct xscd_dma_chan *)data;
380 xscd_dma_chan_desc_cleanup(chan);
384 * xscd_dma_alloc_chan_resources - Allocate channel resources
385 * @dchan: DMA channel
387 * Return: '0' on success and failure value on error
389 static int xscd_dma_alloc_chan_resources(struct dma_chan *dchan)
391 dma_cookie_init(dchan);
396 * of_scdma_xilinx_xlate - Translation function
397 * @dma_spec: Pointer to DMA specifier as found in the device tree
398 * @ofdma: Pointer to DMA controller data
400 * Return: DMA channel pointer on success and NULL on error
402 static struct dma_chan *of_scdma_xilinx_xlate(struct of_phandle_args *dma_spec,
403 struct of_dma *ofdma)
405 struct xscd_device *xscd = ofdma->of_dma_data;
406 u32 chan_id = dma_spec->args[0];
408 if (chan_id >= xscd->num_streams)
411 if (!xscd->channels[chan_id])
414 return dma_get_slave_channel(&xscd->channels[chan_id]->common);
417 static void xscd_dma_chan_init(struct xscd_device *xscd, int chan_id)
419 struct xscd_dma_chan *chan = &xscd->chans[chan_id].dmachan;
422 chan->iomem = xscd->iomem + chan->id * XSCD_CHAN_OFFSET;
426 xscd->channels[chan->id] = chan;
428 spin_lock_init(&chan->lock);
429 INIT_LIST_HEAD(&chan->pending_list);
430 INIT_LIST_HEAD(&chan->done_list);
431 tasklet_init(&chan->tasklet, xscd_dma_do_tasklet,
432 (unsigned long)chan);
433 chan->common.device = &xscd->dma_device;
434 list_add_tail(&chan->common.device_node, &xscd->dma_device.channels);
438 * xscd_dma_chan_remove - Per Channel remove function
439 * @chan: Driver specific DMA channel
441 static void xscd_dma_chan_remove(struct xscd_dma_chan *chan)
443 list_del(&chan->common.device_node);
447 * xscd_dma_init - Initialize the SCD DMA engine
448 * @xscd: Pointer to the SCD device structure
450 * Return: '0' on success and failure value on error
452 int xscd_dma_init(struct xscd_device *xscd)
454 struct dma_device *ddev = &xscd->dma_device;
455 unsigned int chan_id;
458 /* Initialize the DMA engine */
459 ddev->dev = xscd->dev;
460 dma_set_mask(xscd->dev, DMA_BIT_MASK(32));
462 INIT_LIST_HEAD(&ddev->channels);
463 dma_cap_set(DMA_SLAVE, ddev->cap_mask);
464 dma_cap_set(DMA_PRIVATE, ddev->cap_mask);
465 ddev->device_alloc_chan_resources = xscd_dma_alloc_chan_resources;
466 ddev->device_free_chan_resources = xscd_dma_free_chan_resources;
467 ddev->device_tx_status = xscd_dma_tx_status;
468 ddev->device_issue_pending = xscd_dma_issue_pending;
469 ddev->device_terminate_all = xscd_dma_terminate_all;
470 ddev->device_prep_interleaved_dma = xscd_dma_prep_interleaved;
472 for (chan_id = 0; chan_id < xscd->num_streams; chan_id++)
473 xscd_dma_chan_init(xscd, chan_id);
475 ret = dma_async_device_register(ddev);
477 dev_err(xscd->dev, "failed to register the dma device\n");
481 ret = of_dma_controller_register(xscd->dev->of_node,
482 of_scdma_xilinx_xlate, xscd);
484 dev_err(xscd->dev, "failed to register DMA to DT DMA helper\n");
488 dev_info(xscd->dev, "Xilinx Scene Change DMA is initialized!\n");
492 dma_async_device_unregister(ddev);
495 for (chan_id = 0; chan_id < xscd->num_streams; chan_id++)
496 xscd_dma_chan_remove(xscd->channels[chan_id]);
502 * xscd_dma_cleanup - Clean up the SCD DMA engine
503 * @xscd: Pointer to the SCD device structure
505 * This function is the counterpart of xscd_dma_init() and cleans up the
506 * resources related to the DMA engine.
508 void xscd_dma_cleanup(struct xscd_device *xscd)
510 dma_async_device_unregister(&xscd->dma_device);
511 of_dma_controller_free(xscd->dev->of_node);