]> rtime.felk.cvut.cz Git - zynq/linux.git/blob - drivers/media/platform/xilinx/xilinx-scenechange-dma.c
v4l: xilinx: scd: Merge the main and DMA IRQ handlers
[zynq/linux.git] / drivers / media / platform / xilinx / xilinx-scenechange-dma.c
1 //SPDX-License-Identifier: GPL-2.0
2 /*
3  * Xilinx Scene Change Detection DMA driver
4  *
5  * Copyright (C) 2018 Xilinx, Inc.
6  *
7  * Authors: Anand Ashok Dumbre <anand.ashok.dumbre@xilinx.com>
8  *          Satish Kumar Nagireddy <satish.nagireddy.nagireddy@xilinx.com>
9  */
10
11 #include <linux/bitops.h>
12 #include <linux/clk.h>
13 #include <linux/dmaengine.h>
14 #include <linux/dma-mapping.h>
15 #include <linux/init.h>
16 #include <linux/interrupt.h>
17 #include <linux/io.h>
18 #include <linux/io-64-nonatomic-lo-hi.h>
19 #include <linux/iopoll.h>
20 #include <linux/module.h>
21 #include <linux/of_address.h>
22 #include <linux/of_dma.h>
23 #include <linux/of_irq.h>
24 #include <linux/platform_device.h>
25 #include <linux/slab.h>
26
27 #include "xilinx-scenechange.h"
28
29 /**
30  * xscd_dma_irq_handler - scdma Interrupt handler
31  * @xscd: Pointer to the SCD device structure
32  */
33 void xscd_dma_irq_handler(struct xscd_device *xscd)
34 {
35         struct xscd_dma_chan *chan;
36
37         if (xscd->shared_data.memory_based) {
38                 u32 chan_en = 0, id;
39
40                 for (id = 0; id < xscd->numchannels; id++) {
41                         chan = xscd->channels[id];
42                         spin_lock(&chan->lock);
43                         chan->idle = true;
44
45                         if (chan->en && (!list_empty(&chan->pending_list))) {
46                                 chan_en |= 1 << chan->id;
47                                 chan->valid_interrupt = true;
48                         } else {
49                                 chan->valid_interrupt = false;
50                         }
51
52                         xscd_dma_start_transfer(chan);
53                         spin_unlock(&chan->lock);
54                 }
55
56                 if (chan_en) {
57                         xscd_dma_reset(chan);
58                         xscd_dma_chan_enable(chan, chan_en);
59                         xscd_dma_start(chan);
60                 }
61
62                 for (id = 0; id < xscd->numchannels; id++) {
63                         chan = xscd->channels[id];
64                         tasklet_schedule(&chan->tasklet);
65                 }
66         }
67 }
68
69 /* -----------------------------------------------------------------------------
70  * Descriptors alloc and free
71  */
72
73 /**
74  * xscd_dma_tx_descriptor - Allocate transaction descriptor
75  * @chan: Driver specific dma channel
76  *
77  * Return: The allocated descriptor on success and NULL on failure.
78  */
79 static struct xscd_dma_tx_descriptor *
80 xscd_dma_alloc_tx_descriptor(struct xscd_dma_chan *chan)
81 {
82         struct xscd_dma_tx_descriptor *desc;
83
84         desc = kzalloc(sizeof(*desc), GFP_KERNEL);
85         if (!desc)
86                 return NULL;
87
88         return desc;
89 }
90
91 /**
92  * xscd_dma_tx_submit - Submit DMA transaction
93  * @tx: Async transaction descriptor
94  *
95  * Return: cookie value on success and failure value on error
96  */
97 static dma_cookie_t xscd_dma_tx_submit(struct dma_async_tx_descriptor *tx)
98 {
99         struct xscd_dma_tx_descriptor *desc = to_xscd_dma_tx_descriptor(tx);
100         struct xscd_dma_chan *chan = to_xscd_dma_chan(tx->chan);
101         dma_cookie_t cookie;
102         unsigned long flags;
103
104         spin_lock_irqsave(&chan->lock, flags);
105         cookie = dma_cookie_assign(tx);
106         list_add_tail(&desc->node, &chan->pending_list);
107         spin_unlock_irqrestore(&chan->lock, flags);
108
109         return cookie;
110 }
111
112 /**
113  * xscd_dma_chan_enable - Enable dma channel
114  * @chan: Driver specific dma channel
115  * @chan_en: Channels ready for transfer, it is a bitmap
116  */
117 void xscd_dma_chan_enable(struct xscd_dma_chan *chan, int chan_en)
118 {
119         xscd_write(chan->iomem, XSCD_CHAN_EN_OFFSET, chan_en);
120 }
121
122 /**
123  * xscd_dma_complete_descriptor - Mark the active descriptor as complete
124  * This function is invoked with spinlock held
125  * @chan : xilinx dma channel
126  *
127  */
128 static void xscd_dma_complete_descriptor(struct xscd_dma_chan *chan)
129 {
130         struct xscd_dma_tx_descriptor *desc = chan->active_desc;
131
132         dma_cookie_complete(&desc->async_tx);
133         list_add_tail(&desc->node, &chan->done_list);
134 }
135
136 /**
137  * xscd_dma_start_transfer - Starts dma transfer
138  * @chan: Driver specific channel struct pointer
139  */
140 void xscd_dma_start_transfer(struct xscd_dma_chan *chan)
141 {
142         struct xscd_dma_tx_descriptor *desc;
143         u32 chanoffset = chan->id * XSCD_CHAN_OFFSET;
144
145         if (!chan->en)
146                 return;
147
148         if (!chan->idle)
149                 return;
150
151         if (chan->active_desc) {
152                 xscd_dma_complete_descriptor(chan);
153                 chan->active_desc = NULL;
154         }
155
156         if (chan->staged_desc) {
157                 chan->active_desc = chan->staged_desc;
158                 chan->staged_desc = NULL;
159         }
160
161         if (list_empty(&chan->pending_list))
162                 return;
163
164         desc = list_first_entry(&chan->pending_list,
165                                 struct xscd_dma_tx_descriptor, node);
166
167         /* Start the transfer */
168         xscd_write(chan->iomem, XSCD_ADDR_OFFSET + chanoffset,
169                    desc->sw.luma_plane_addr);
170
171         list_del(&desc->node);
172         chan->staged_desc = desc;
173 }
174
175 /**
176  * xscd_dma_free_desc_list - Free descriptors list
177  * @chan: Driver specific dma channel
178  * @list: List to parse and delete the descriptor
179  */
180 static void xscd_dma_free_desc_list(struct xscd_dma_chan *chan,
181                                     struct list_head *list)
182 {
183         struct xscd_dma_tx_descriptor *desc, *next;
184
185         list_for_each_entry_safe(desc, next, list, node) {
186                 list_del(&desc->node);
187                 kfree(desc);
188         }
189 }
190
191 /**
192  * xscd_dma_free_descriptors - Free channel descriptors
193  * @chan: Driver specific dma channel
194  */
195 static void xscd_dma_free_descriptors(struct xscd_dma_chan *chan)
196 {
197         unsigned long flags;
198
199         spin_lock_irqsave(&chan->lock, flags);
200
201         xscd_dma_free_desc_list(chan, &chan->pending_list);
202         xscd_dma_free_desc_list(chan, &chan->done_list);
203         kfree(chan->active_desc);
204         kfree(chan->staged_desc);
205
206         chan->staged_desc = NULL;
207         chan->active_desc = NULL;
208         INIT_LIST_HEAD(&chan->pending_list);
209         INIT_LIST_HEAD(&chan->done_list);
210
211         spin_unlock_irqrestore(&chan->lock, flags);
212 }
213
214 /**
215  * scd_dma_chan_desc_cleanup - Clean channel descriptors
216  * @chan: Driver specific dma channel
217  */
218 static void xscd_dma_chan_desc_cleanup(struct xscd_dma_chan *chan)
219 {
220         struct xscd_dma_tx_descriptor *desc, *next;
221         unsigned long flags;
222
223         spin_lock_irqsave(&chan->lock, flags);
224
225         list_for_each_entry_safe(desc, next, &chan->done_list, node) {
226                 dma_async_tx_callback callback;
227                 void *callback_param;
228
229                 list_del(&desc->node);
230
231                 /* Run the link descriptor callback function */
232                 callback = desc->async_tx.callback;
233                 callback_param = desc->async_tx.callback_param;
234                 if (callback) {
235                         spin_unlock_irqrestore(&chan->lock, flags);
236                         callback(callback_param);
237                         spin_lock_irqsave(&chan->lock, flags);
238                 }
239
240                 kfree(desc);
241         }
242
243         spin_unlock_irqrestore(&chan->lock, flags);
244 }
245
246 /**
247  * xscd_dma_chan_remove - Per Channel remove function
248  * @chan: Driver specific DMA channel
249  */
250 static void xscd_dma_chan_remove(struct xscd_dma_chan *chan)
251 {
252         list_del(&chan->common.device_node);
253 }
254
255 /**
256  * xscd_dma_dma_prep_interleaved - prepare a descriptor for a
257  * DMA_SLAVE transaction
258  * @dchan: DMA channel
259  * @xt: Interleaved template pointer
260  * @flags: transfer ack flags
261  *
262  * Return: Async transaction descriptor on success and NULL on failure
263  */
264 static struct dma_async_tx_descriptor *
265 xscd_dma_prep_interleaved(struct dma_chan *dchan,
266                           struct dma_interleaved_template *xt,
267                           unsigned long flags)
268 {
269         struct xscd_dma_chan *chan = to_xscd_dma_chan(dchan);
270         struct xscd_dma_tx_descriptor *desc;
271         struct xscd_dma_desc *sw;
272
273         desc = xscd_dma_alloc_tx_descriptor(chan);
274         if (!desc)
275                 return NULL;
276
277         dma_async_tx_descriptor_init(&desc->async_tx, &chan->common);
278         desc->async_tx.tx_submit = xscd_dma_tx_submit;
279         async_tx_ack(&desc->async_tx);
280
281         sw = &desc->sw;
282         sw->vsize = xt->numf;
283         sw->hsize = xt->sgl[0].size;
284         sw->stride = xt->sgl[0].size + xt->sgl[0].icg;
285         sw->luma_plane_addr = xt->src_start;
286
287         return &desc->async_tx;
288 }
289
290 /**
291  * xscd_dma_terminate_all - Halt the channel and free descriptors
292  * @dchan: Driver specific dma channel pointer
293  *
294  * Return: 0
295  */
296 static int xscd_dma_terminate_all(struct dma_chan *dchan)
297 {
298         struct xscd_dma_chan *chan = to_xscd_dma_chan(dchan);
299
300         xscd_dma_halt(chan);
301         xscd_dma_free_descriptors(chan);
302
303         /* Worst case frame-to-frame boundary, ensure frame output complete */
304         msleep(50);
305         xscd_dma_reset(chan);
306
307         return 0;
308 }
309
310 /**
311  * xscd_dma_issue_pending - Issue pending transactions
312  * @dchan: DMA channel
313  */
314 static void xscd_dma_issue_pending(struct dma_chan *dchan)
315 {
316         struct xscd_dma_chan *chan = to_xscd_dma_chan(dchan);
317         struct xscd_device *xscd = chan->xscd;
318         u32 chan_en = 0, id;
319
320         for (id = 0; id < xscd->numchannels; id++) {
321                 chan = xscd->channels[id];
322                 spin_lock(&chan->lock);
323                 chan->idle = true;
324
325                 if (chan->en && (!list_empty(&chan->pending_list))) {
326                         chan_en |= 1 << chan->id;
327                         chan->valid_interrupt = true;
328                 } else {
329                         chan->valid_interrupt = false;
330                 }
331
332                 xscd_dma_start_transfer(chan);
333                 spin_unlock(&chan->lock);
334         }
335
336         if (chan_en) {
337                 xscd_dma_reset(chan);
338                 xscd_dma_chan_enable(chan, chan_en);
339                 xscd_dma_start(chan);
340         }
341 }
342
343 static enum dma_status xscd_dma_tx_status(struct dma_chan *dchan,
344                                           dma_cookie_t cookie,
345                                           struct dma_tx_state *txstate)
346 {
347         return dma_cookie_status(dchan, cookie, txstate);
348 }
349
350 /**
351  * xscd_dma_halt - Halt dma channel
352  * @chan: Driver specific dma channel
353  */
354 void xscd_dma_halt(struct xscd_dma_chan *chan)
355 {
356         struct xscd_device *xscd = chan->xscd;
357
358         if (xscd->shared_data.memory_based)
359                 xscd_clr(chan->iomem, XSCD_CTRL_OFFSET, XSCD_CTRL_AP_START);
360         else
361                 /* Streaming based */
362                 xscd_clr(chan->iomem, XSCD_CTRL_OFFSET,
363                          XSCD_CTRL_AP_START | XSCD_CTRL_AUTO_RESTART);
364
365         chan->idle = true;
366 }
367
368 /**
369  * xscd_dma_start - Start dma channel
370  * @chan: Driver specific dma channel
371  */
372 void xscd_dma_start(struct xscd_dma_chan *chan)
373 {
374         struct xscd_device *xscd = chan->xscd;
375
376         if (xscd->shared_data.memory_based)
377                 xscd_set(chan->iomem, XSCD_CTRL_OFFSET, XSCD_CTRL_AP_START);
378         else
379                 /* Streaming based */
380                 xscd_set(chan->iomem, XSCD_CTRL_OFFSET,
381                          XSCD_CTRL_AP_START | XSCD_CTRL_AUTO_RESTART);
382
383         chan->idle = false;
384 }
385
386 /**
387  * xscd_dma_reset - Reset dma channel and enable interrupts
388  * @chan: Driver specific dma channel
389  */
390 void xscd_dma_reset(struct xscd_dma_chan *chan)
391 {
392         xscd_write(chan->iomem, XSCD_IE_OFFSET, XSCD_IE_AP_DONE);
393         xscd_write(chan->iomem, XSCD_GIE_OFFSET, XSCD_GIE_EN);
394 }
395
396 /**
397  * xscd_dma_free_chan_resources - Free channel resources
398  * @dchan: DMA channel
399  */
400 static void xscd_dma_free_chan_resources(struct dma_chan *dchan)
401 {
402         struct xscd_dma_chan *chan = to_xscd_dma_chan(dchan);
403
404         xscd_dma_free_descriptors(chan);
405 }
406
407 /**
408  * xscd_dma_do_tasklet - Schedule completion tasklet
409  * @data: Pointer to the Xilinx scdma channel structure
410  */
411 static void xscd_dma_do_tasklet(unsigned long data)
412 {
413         struct xscd_dma_chan *chan = (struct xscd_dma_chan *)data;
414
415         xscd_dma_chan_desc_cleanup(chan);
416 }
417
418 /**
419  * xscd_dma_alloc_chan_resources - Allocate channel resources
420  * @dchan: DMA channel
421  *
422  * Return: '0' on success and failure value on error
423  */
424 static int xscd_dma_alloc_chan_resources(struct dma_chan *dchan)
425 {
426         dma_cookie_init(dchan);
427         return 0;
428 }
429
430 /**
431  * of_scdma_xilinx_xlate - Translation function
432  * @dma_spec: Pointer to DMA specifier as found in the device tree
433  * @ofdma: Pointer to DMA controller data
434  *
435  * Return: DMA channel pointer on success and NULL on error
436  */
437 static struct dma_chan *of_scdma_xilinx_xlate(struct of_phandle_args *dma_spec,
438                                               struct of_dma *ofdma)
439 {
440         struct xscd_device *xscd = ofdma->of_dma_data;
441         u32 chan_id = dma_spec->args[0];
442
443         if (chan_id >= xscd->numchannels)
444                 return NULL;
445
446         if (!xscd->channels[chan_id])
447                 return NULL;
448
449         return dma_get_slave_channel(&xscd->channels[chan_id]->common);
450 }
451
452 static struct xscd_dma_chan *
453 xscd_dma_chan_probe(struct xscd_device *xscd, int chan_id)
454 {
455         struct xscd_dma_chan *chan = xscd->channels[chan_id];
456
457         chan->xscd = xscd;
458         chan->idle = true;
459
460         spin_lock_init(&chan->lock);
461         INIT_LIST_HEAD(&chan->pending_list);
462         INIT_LIST_HEAD(&chan->done_list);
463         tasklet_init(&chan->tasklet, xscd_dma_do_tasklet,
464                      (unsigned long)chan);
465         chan->common.device = &xscd->dma_device;
466         list_add_tail(&chan->common.device_node, &xscd->dma_device.channels);
467
468         return chan;
469 }
470
471 /**
472  * xscd_dma_init - Initialize the SCD DMA engine
473  * @xscd: Pointer to the SCD device structure
474  *
475  * Return: '0' on success and failure value on error
476  */
477 int xscd_dma_init(struct xscd_device *xscd)
478 {
479         struct device_node *node = xscd->dev->of_node;
480         struct dma_device *ddev = &xscd->dma_device;
481         struct xscd_dma_chan *chan;
482         unsigned int chan_id;
483         int ret;
484
485         /* Initialize the DMA engine */
486         ddev->dev = xscd->dev;
487         dma_set_mask(xscd->dev, DMA_BIT_MASK(32));
488
489         ret = of_property_read_u32(node, "xlnx,numstreams",
490                                    &xscd->numchannels);
491
492         INIT_LIST_HEAD(&ddev->channels);
493         dma_cap_set(DMA_SLAVE, ddev->cap_mask);
494         dma_cap_set(DMA_PRIVATE, ddev->cap_mask);
495         ddev->device_alloc_chan_resources = xscd_dma_alloc_chan_resources;
496         ddev->device_free_chan_resources = xscd_dma_free_chan_resources;
497         ddev->device_tx_status = xscd_dma_tx_status;
498         ddev->device_issue_pending = xscd_dma_issue_pending;
499         ddev->device_terminate_all = xscd_dma_terminate_all;
500         ddev->device_prep_interleaved_dma = xscd_dma_prep_interleaved;
501
502         for (chan_id = 0; chan_id < xscd->numchannels; chan_id++) {
503                 chan = xscd_dma_chan_probe(xscd, chan_id);
504                 if (IS_ERR(chan)) {
505                         dev_err(xscd->dev, "failed to probe a channel\n");
506                         ret = PTR_ERR(chan);
507                         goto error;
508                 }
509         }
510
511         ret = dma_async_device_register(ddev);
512         if (ret) {
513                 dev_err(xscd->dev, "failed to register the dma device\n");
514                 goto error;
515         }
516
517         ret = of_dma_controller_register(xscd->dev->of_node,
518                                          of_scdma_xilinx_xlate, xscd);
519         if (ret) {
520                 dev_err(xscd->dev, "failed to register DMA to DT DMA helper\n");
521                 goto error_of_dma;
522         }
523
524         dev_info(xscd->dev, "Xilinx Scene Change DMA is probed!\n");
525         return 0;
526
527 error_of_dma:
528         dma_async_device_unregister(ddev);
529
530 error:
531         for (chan_id = 0; chan_id < xscd->numchannels; chan_id++) {
532                 if (xscd->channels[chan_id])
533                         xscd_dma_chan_remove(xscd->channels[chan_id]);
534         }
535         return ret;
536 }
537
538 /**
539  * xscd_dma_cleanup - Clean up the SCD DMA engine
540  * @xscd: Pointer to the SCD device structure
541  *
542  * This function is the counterpart of xscd_dma_init() and cleans up the
543  * resources related to the DMA engine.
544  */
545 void xscd_dma_cleanup(struct xscd_device *xscd)
546 {
547         dma_async_device_unregister(&xscd->dma_device);
548         of_dma_controller_free(xscd->dev->of_node);
549 }