1 //SPDX-License-Identifier: GPL-2.0
3 * Xilinx Scene Change Detection DMA driver
5 * Copyright (C) 2018 Xilinx, Inc.
7 * Authors: Anand Ashok Dumbre <anand.ashok.dumbre@xilinx.com>
8 * Satish Kumar Nagireddy <satish.nagireddy.nagireddy@xilinx.com>
11 #include <linux/bitops.h>
12 #include <linux/clk.h>
13 #include <linux/dmaengine.h>
14 #include <linux/dma-mapping.h>
15 #include <linux/init.h>
16 #include <linux/interrupt.h>
18 #include <linux/io-64-nonatomic-lo-hi.h>
19 #include <linux/iopoll.h>
20 #include <linux/module.h>
21 #include <linux/of_address.h>
22 #include <linux/of_dma.h>
23 #include <linux/of_irq.h>
24 #include <linux/platform_device.h>
25 #include <linux/slab.h>
27 #include "xilinx-scenechange.h"
30 * xscd_dma_irq_handler - scdma Interrupt handler
31 * @xscd: Pointer to the SCD device structure
33 void xscd_dma_irq_handler(struct xscd_device *xscd)
35 struct xscd_dma_chan *chan;
37 if (xscd->shared_data.memory_based) {
40 for (id = 0; id < xscd->numchannels; id++) {
41 chan = xscd->channels[id];
42 spin_lock(&chan->lock);
45 if (chan->en && (!list_empty(&chan->pending_list))) {
46 chan_en |= 1 << chan->id;
47 chan->valid_interrupt = true;
49 chan->valid_interrupt = false;
52 xscd_dma_start_transfer(chan);
53 spin_unlock(&chan->lock);
58 xscd_dma_chan_enable(chan, chan_en);
62 for (id = 0; id < xscd->numchannels; id++) {
63 chan = xscd->channels[id];
64 tasklet_schedule(&chan->tasklet);
69 /* -----------------------------------------------------------------------------
70 * Descriptors alloc and free
74 * xscd_dma_tx_descriptor - Allocate transaction descriptor
75 * @chan: Driver specific dma channel
77 * Return: The allocated descriptor on success and NULL on failure.
79 static struct xscd_dma_tx_descriptor *
80 xscd_dma_alloc_tx_descriptor(struct xscd_dma_chan *chan)
82 struct xscd_dma_tx_descriptor *desc;
84 desc = kzalloc(sizeof(*desc), GFP_KERNEL);
92 * xscd_dma_tx_submit - Submit DMA transaction
93 * @tx: Async transaction descriptor
95 * Return: cookie value on success and failure value on error
97 static dma_cookie_t xscd_dma_tx_submit(struct dma_async_tx_descriptor *tx)
99 struct xscd_dma_tx_descriptor *desc = to_xscd_dma_tx_descriptor(tx);
100 struct xscd_dma_chan *chan = to_xscd_dma_chan(tx->chan);
104 spin_lock_irqsave(&chan->lock, flags);
105 cookie = dma_cookie_assign(tx);
106 list_add_tail(&desc->node, &chan->pending_list);
107 spin_unlock_irqrestore(&chan->lock, flags);
113 * xscd_dma_chan_enable - Enable dma channel
114 * @chan: Driver specific dma channel
115 * @chan_en: Channels ready for transfer, it is a bitmap
117 void xscd_dma_chan_enable(struct xscd_dma_chan *chan, int chan_en)
119 xscd_write(chan->iomem, XSCD_CHAN_EN_OFFSET, chan_en);
123 * xscd_dma_complete_descriptor - Mark the active descriptor as complete
124 * This function is invoked with spinlock held
125 * @chan : xilinx dma channel
128 static void xscd_dma_complete_descriptor(struct xscd_dma_chan *chan)
130 struct xscd_dma_tx_descriptor *desc = chan->active_desc;
132 dma_cookie_complete(&desc->async_tx);
133 list_add_tail(&desc->node, &chan->done_list);
137 * xscd_dma_start_transfer - Starts dma transfer
138 * @chan: Driver specific channel struct pointer
140 void xscd_dma_start_transfer(struct xscd_dma_chan *chan)
142 struct xscd_dma_tx_descriptor *desc;
143 u32 chanoffset = chan->id * XSCD_CHAN_OFFSET;
151 if (chan->active_desc) {
152 xscd_dma_complete_descriptor(chan);
153 chan->active_desc = NULL;
156 if (chan->staged_desc) {
157 chan->active_desc = chan->staged_desc;
158 chan->staged_desc = NULL;
161 if (list_empty(&chan->pending_list))
164 desc = list_first_entry(&chan->pending_list,
165 struct xscd_dma_tx_descriptor, node);
167 /* Start the transfer */
168 xscd_write(chan->iomem, XSCD_ADDR_OFFSET + chanoffset,
169 desc->sw.luma_plane_addr);
171 list_del(&desc->node);
172 chan->staged_desc = desc;
176 * xscd_dma_free_desc_list - Free descriptors list
177 * @chan: Driver specific dma channel
178 * @list: List to parse and delete the descriptor
180 static void xscd_dma_free_desc_list(struct xscd_dma_chan *chan,
181 struct list_head *list)
183 struct xscd_dma_tx_descriptor *desc, *next;
185 list_for_each_entry_safe(desc, next, list, node) {
186 list_del(&desc->node);
192 * xscd_dma_free_descriptors - Free channel descriptors
193 * @chan: Driver specific dma channel
195 static void xscd_dma_free_descriptors(struct xscd_dma_chan *chan)
199 spin_lock_irqsave(&chan->lock, flags);
201 xscd_dma_free_desc_list(chan, &chan->pending_list);
202 xscd_dma_free_desc_list(chan, &chan->done_list);
203 kfree(chan->active_desc);
204 kfree(chan->staged_desc);
206 chan->staged_desc = NULL;
207 chan->active_desc = NULL;
208 INIT_LIST_HEAD(&chan->pending_list);
209 INIT_LIST_HEAD(&chan->done_list);
211 spin_unlock_irqrestore(&chan->lock, flags);
215 * scd_dma_chan_desc_cleanup - Clean channel descriptors
216 * @chan: Driver specific dma channel
218 static void xscd_dma_chan_desc_cleanup(struct xscd_dma_chan *chan)
220 struct xscd_dma_tx_descriptor *desc, *next;
223 spin_lock_irqsave(&chan->lock, flags);
225 list_for_each_entry_safe(desc, next, &chan->done_list, node) {
226 dma_async_tx_callback callback;
227 void *callback_param;
229 list_del(&desc->node);
231 /* Run the link descriptor callback function */
232 callback = desc->async_tx.callback;
233 callback_param = desc->async_tx.callback_param;
235 spin_unlock_irqrestore(&chan->lock, flags);
236 callback(callback_param);
237 spin_lock_irqsave(&chan->lock, flags);
243 spin_unlock_irqrestore(&chan->lock, flags);
247 * xscd_dma_chan_remove - Per Channel remove function
248 * @chan: Driver specific DMA channel
250 static void xscd_dma_chan_remove(struct xscd_dma_chan *chan)
252 list_del(&chan->common.device_node);
256 * xscd_dma_dma_prep_interleaved - prepare a descriptor for a
257 * DMA_SLAVE transaction
258 * @dchan: DMA channel
259 * @xt: Interleaved template pointer
260 * @flags: transfer ack flags
262 * Return: Async transaction descriptor on success and NULL on failure
264 static struct dma_async_tx_descriptor *
265 xscd_dma_prep_interleaved(struct dma_chan *dchan,
266 struct dma_interleaved_template *xt,
269 struct xscd_dma_chan *chan = to_xscd_dma_chan(dchan);
270 struct xscd_dma_tx_descriptor *desc;
271 struct xscd_dma_desc *sw;
273 desc = xscd_dma_alloc_tx_descriptor(chan);
277 dma_async_tx_descriptor_init(&desc->async_tx, &chan->common);
278 desc->async_tx.tx_submit = xscd_dma_tx_submit;
279 async_tx_ack(&desc->async_tx);
282 sw->vsize = xt->numf;
283 sw->hsize = xt->sgl[0].size;
284 sw->stride = xt->sgl[0].size + xt->sgl[0].icg;
285 sw->luma_plane_addr = xt->src_start;
287 return &desc->async_tx;
291 * xscd_dma_terminate_all - Halt the channel and free descriptors
292 * @dchan: Driver specific dma channel pointer
296 static int xscd_dma_terminate_all(struct dma_chan *dchan)
298 struct xscd_dma_chan *chan = to_xscd_dma_chan(dchan);
301 xscd_dma_free_descriptors(chan);
303 /* Worst case frame-to-frame boundary, ensure frame output complete */
305 xscd_dma_reset(chan);
311 * xscd_dma_issue_pending - Issue pending transactions
312 * @dchan: DMA channel
314 static void xscd_dma_issue_pending(struct dma_chan *dchan)
316 struct xscd_dma_chan *chan = to_xscd_dma_chan(dchan);
317 struct xscd_device *xscd = chan->xscd;
320 for (id = 0; id < xscd->numchannels; id++) {
321 chan = xscd->channels[id];
322 spin_lock(&chan->lock);
325 if (chan->en && (!list_empty(&chan->pending_list))) {
326 chan_en |= 1 << chan->id;
327 chan->valid_interrupt = true;
329 chan->valid_interrupt = false;
332 xscd_dma_start_transfer(chan);
333 spin_unlock(&chan->lock);
337 xscd_dma_reset(chan);
338 xscd_dma_chan_enable(chan, chan_en);
339 xscd_dma_start(chan);
343 static enum dma_status xscd_dma_tx_status(struct dma_chan *dchan,
345 struct dma_tx_state *txstate)
347 return dma_cookie_status(dchan, cookie, txstate);
351 * xscd_dma_halt - Halt dma channel
352 * @chan: Driver specific dma channel
354 void xscd_dma_halt(struct xscd_dma_chan *chan)
356 struct xscd_device *xscd = chan->xscd;
358 if (xscd->shared_data.memory_based)
359 xscd_clr(chan->iomem, XSCD_CTRL_OFFSET, XSCD_CTRL_AP_START);
361 /* Streaming based */
362 xscd_clr(chan->iomem, XSCD_CTRL_OFFSET,
363 XSCD_CTRL_AP_START | XSCD_CTRL_AUTO_RESTART);
369 * xscd_dma_start - Start dma channel
370 * @chan: Driver specific dma channel
372 void xscd_dma_start(struct xscd_dma_chan *chan)
374 struct xscd_device *xscd = chan->xscd;
376 if (xscd->shared_data.memory_based)
377 xscd_set(chan->iomem, XSCD_CTRL_OFFSET, XSCD_CTRL_AP_START);
379 /* Streaming based */
380 xscd_set(chan->iomem, XSCD_CTRL_OFFSET,
381 XSCD_CTRL_AP_START | XSCD_CTRL_AUTO_RESTART);
387 * xscd_dma_reset - Reset dma channel and enable interrupts
388 * @chan: Driver specific dma channel
390 void xscd_dma_reset(struct xscd_dma_chan *chan)
392 xscd_write(chan->iomem, XSCD_IE_OFFSET, XSCD_IE_AP_DONE);
393 xscd_write(chan->iomem, XSCD_GIE_OFFSET, XSCD_GIE_EN);
397 * xscd_dma_free_chan_resources - Free channel resources
398 * @dchan: DMA channel
400 static void xscd_dma_free_chan_resources(struct dma_chan *dchan)
402 struct xscd_dma_chan *chan = to_xscd_dma_chan(dchan);
404 xscd_dma_free_descriptors(chan);
408 * xscd_dma_do_tasklet - Schedule completion tasklet
409 * @data: Pointer to the Xilinx scdma channel structure
411 static void xscd_dma_do_tasklet(unsigned long data)
413 struct xscd_dma_chan *chan = (struct xscd_dma_chan *)data;
415 xscd_dma_chan_desc_cleanup(chan);
419 * xscd_dma_alloc_chan_resources - Allocate channel resources
420 * @dchan: DMA channel
422 * Return: '0' on success and failure value on error
424 static int xscd_dma_alloc_chan_resources(struct dma_chan *dchan)
426 dma_cookie_init(dchan);
431 * of_scdma_xilinx_xlate - Translation function
432 * @dma_spec: Pointer to DMA specifier as found in the device tree
433 * @ofdma: Pointer to DMA controller data
435 * Return: DMA channel pointer on success and NULL on error
437 static struct dma_chan *of_scdma_xilinx_xlate(struct of_phandle_args *dma_spec,
438 struct of_dma *ofdma)
440 struct xscd_device *xscd = ofdma->of_dma_data;
441 u32 chan_id = dma_spec->args[0];
443 if (chan_id >= xscd->numchannels)
446 if (!xscd->channels[chan_id])
449 return dma_get_slave_channel(&xscd->channels[chan_id]->common);
452 static struct xscd_dma_chan *
453 xscd_dma_chan_probe(struct xscd_device *xscd, int chan_id)
455 struct xscd_dma_chan *chan = xscd->channels[chan_id];
460 spin_lock_init(&chan->lock);
461 INIT_LIST_HEAD(&chan->pending_list);
462 INIT_LIST_HEAD(&chan->done_list);
463 tasklet_init(&chan->tasklet, xscd_dma_do_tasklet,
464 (unsigned long)chan);
465 chan->common.device = &xscd->dma_device;
466 list_add_tail(&chan->common.device_node, &xscd->dma_device.channels);
472 * xscd_dma_init - Initialize the SCD DMA engine
473 * @xscd: Pointer to the SCD device structure
475 * Return: '0' on success and failure value on error
477 int xscd_dma_init(struct xscd_device *xscd)
479 struct device_node *node = xscd->dev->of_node;
480 struct dma_device *ddev = &xscd->dma_device;
481 struct xscd_dma_chan *chan;
482 unsigned int chan_id;
485 /* Initialize the DMA engine */
486 ddev->dev = xscd->dev;
487 dma_set_mask(xscd->dev, DMA_BIT_MASK(32));
489 ret = of_property_read_u32(node, "xlnx,numstreams",
492 INIT_LIST_HEAD(&ddev->channels);
493 dma_cap_set(DMA_SLAVE, ddev->cap_mask);
494 dma_cap_set(DMA_PRIVATE, ddev->cap_mask);
495 ddev->device_alloc_chan_resources = xscd_dma_alloc_chan_resources;
496 ddev->device_free_chan_resources = xscd_dma_free_chan_resources;
497 ddev->device_tx_status = xscd_dma_tx_status;
498 ddev->device_issue_pending = xscd_dma_issue_pending;
499 ddev->device_terminate_all = xscd_dma_terminate_all;
500 ddev->device_prep_interleaved_dma = xscd_dma_prep_interleaved;
502 for (chan_id = 0; chan_id < xscd->numchannels; chan_id++) {
503 chan = xscd_dma_chan_probe(xscd, chan_id);
505 dev_err(xscd->dev, "failed to probe a channel\n");
511 ret = dma_async_device_register(ddev);
513 dev_err(xscd->dev, "failed to register the dma device\n");
517 ret = of_dma_controller_register(xscd->dev->of_node,
518 of_scdma_xilinx_xlate, xscd);
520 dev_err(xscd->dev, "failed to register DMA to DT DMA helper\n");
524 dev_info(xscd->dev, "Xilinx Scene Change DMA is probed!\n");
528 dma_async_device_unregister(ddev);
531 for (chan_id = 0; chan_id < xscd->numchannels; chan_id++) {
532 if (xscd->channels[chan_id])
533 xscd_dma_chan_remove(xscd->channels[chan_id]);
539 * xscd_dma_cleanup - Clean up the SCD DMA engine
540 * @xscd: Pointer to the SCD device structure
542 * This function is the counterpart of xscd_dma_init() and cleans up the
543 * resources related to the DMA engine.
545 void xscd_dma_cleanup(struct xscd_device *xscd)
547 dma_async_device_unregister(&xscd->dma_device);
548 of_dma_controller_free(xscd->dev->of_node);