]> rtime.felk.cvut.cz Git - linux-imx.git/blob - drivers/dma/pxp/pxp_dma.c
mx53: Enable building without PM_CONFIG, CONFIG_NO_HZ, CONFIG_PREEMPT etc.
[linux-imx.git] / drivers / dma / pxp / pxp_dma.c
1 /*
2  * Copyright (C) 2010-2011 Freescale Semiconductor, Inc.
3  *
4  * This program is free software; you can redistribute it and/or modify
5  * it under the terms of the GNU General Public License as published by
6  * the Free Software Foundation; either version 2 of the License, or
7  * (at your option) any later version.
8  *
9  * This program is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12  * GNU General Public License for more details.
13  *
14  * You should have received a copy of the GNU General Public License
15  * along with this program; if not, write to the Free Software
16  * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
17  *
18  */
19 /*
20  * Based on STMP378X PxP driver
21  * Copyright 2008-2009 Embedded Alley Solutions, Inc All Rights Reserved.
22  */
23
24 #include <linux/dma-mapping.h>
25 #include <linux/init.h>
26 #include <linux/interrupt.h>
27 #include <linux/io.h>
28 #include <linux/kernel.h>
29 #include <linux/module.h>
30 #include <linux/mutex.h>
31 #include <linux/platform_device.h>
32 #include <linux/slab.h>
33 #include <linux/vmalloc.h>
34 #include <linux/dmaengine.h>
35 #include <linux/pxp_dma.h>
36 #include <linux/timer.h>
37 #include <linux/clk.h>
38 #include <linux/workqueue.h>
39 #include <linux/sched.h>
40
41 #include "regs-pxp.h"
42
43 #define PXP_DOWNSCALE_THRESHOLD         0x4000
44
45 static LIST_HEAD(head);
46 static int timeout_in_ms = 600;
47
48 struct pxp_dma {
49         struct dma_device dma;
50 };
51
52 struct pxps {
53         struct platform_device *pdev;
54         struct clk *clk;
55         void __iomem *base;
56         int irq;                /* PXP IRQ to the CPU */
57
58         spinlock_t lock;
59         struct mutex clk_mutex;
60         int clk_stat;
61 #define CLK_STAT_OFF            0
62 #define CLK_STAT_ON             1
63         int pxp_ongoing;
64         int lut_state;
65
66         struct device *dev;
67         struct pxp_dma pxp_dma;
68         struct pxp_channel channel[NR_PXP_VIRT_CHANNEL];
69         wait_queue_head_t done;
70         struct work_struct work;
71
72         /* describes most recent processing configuration */
73         struct pxp_config_data pxp_conf_state;
74
75         /* to turn clock off when pxp is inactive */
76         struct timer_list clk_timer;
77 };
78
79 #define to_pxp_dma(d) container_of(d, struct pxp_dma, dma)
80 #define to_tx_desc(tx) container_of(tx, struct pxp_tx_desc, txd)
81 #define to_pxp_channel(d) container_of(d, struct pxp_channel, dma_chan)
82 #define to_pxp(id) container_of(id, struct pxps, pxp_dma)
83
84 #define PXP_DEF_BUFS    2
85 #define PXP_MIN_PIX     8
86
87 #define PXP_WAITCON     ((__raw_readl(pxp->base + HW_PXP_STAT) & \
88                                 BM_PXP_STAT_IRQ) != BM_PXP_STAT_IRQ)
89
90 static uint32_t pxp_s0_formats[] = {
91         PXP_PIX_FMT_RGB24,
92         PXP_PIX_FMT_RGB565,
93         PXP_PIX_FMT_RGB555,
94         PXP_PIX_FMT_YUV420P,
95         PXP_PIX_FMT_YUV422P,
96 };
97
98 /*
99  * PXP common functions
100  */
101 static void dump_pxp_reg(struct pxps *pxp)
102 {
103         dev_dbg(pxp->dev, "PXP_CTRL 0x%x",
104                 __raw_readl(pxp->base + HW_PXP_CTRL));
105         dev_dbg(pxp->dev, "PXP_STAT 0x%x",
106                 __raw_readl(pxp->base + HW_PXP_STAT));
107         dev_dbg(pxp->dev, "PXP_OUTBUF 0x%x",
108                 __raw_readl(pxp->base + HW_PXP_OUTBUF));
109         dev_dbg(pxp->dev, "PXP_OUTBUF2 0x%x",
110                 __raw_readl(pxp->base + HW_PXP_OUTBUF2));
111         dev_dbg(pxp->dev, "PXP_OUTSIZE 0x%x",
112                 __raw_readl(pxp->base + HW_PXP_OUTSIZE));
113         dev_dbg(pxp->dev, "PXP_S0BUF 0x%x",
114                 __raw_readl(pxp->base + HW_PXP_S0BUF));
115         dev_dbg(pxp->dev, "PXP_S0UBUF 0x%x",
116                 __raw_readl(pxp->base + HW_PXP_S0UBUF));
117         dev_dbg(pxp->dev, "PXP_S0VBUF 0x%x",
118                 __raw_readl(pxp->base + HW_PXP_S0VBUF));
119         dev_dbg(pxp->dev, "PXP_S0PARAM 0x%x",
120                 __raw_readl(pxp->base + HW_PXP_S0PARAM));
121         dev_dbg(pxp->dev, "PXP_S0BACKGROUND 0x%x",
122                 __raw_readl(pxp->base + HW_PXP_S0BACKGROUND));
123         dev_dbg(pxp->dev, "PXP_S0CROP 0x%x",
124                 __raw_readl(pxp->base + HW_PXP_S0CROP));
125         dev_dbg(pxp->dev, "PXP_S0SCALE 0x%x",
126                 __raw_readl(pxp->base + HW_PXP_S0SCALE));
127         dev_dbg(pxp->dev, "PXP_OLn 0x%x",
128                 __raw_readl(pxp->base + HW_PXP_OLn(0)));
129         dev_dbg(pxp->dev, "PXP_OLnSIZE 0x%x",
130                 __raw_readl(pxp->base + HW_PXP_OLnSIZE(0)));
131         dev_dbg(pxp->dev, "PXP_OLnPARAM 0x%x",
132                 __raw_readl(pxp->base + HW_PXP_OLnPARAM(0)));
133         dev_dbg(pxp->dev, "PXP_CSCCOEF0 0x%x",
134                 __raw_readl(pxp->base + HW_PXP_CSCCOEF0));
135         dev_dbg(pxp->dev, "PXP_CSCCOEF1 0x%x",
136                 __raw_readl(pxp->base + HW_PXP_CSCCOEF1));
137         dev_dbg(pxp->dev, "PXP_CSCCOEF2 0x%x",
138                 __raw_readl(pxp->base + HW_PXP_CSCCOEF2));
139         dev_dbg(pxp->dev, "PXP_CSC2CTRL 0x%x",
140                 __raw_readl(pxp->base + HW_PXP_CSC2CTRL));
141         dev_dbg(pxp->dev, "PXP_CSC2COEF0 0x%x",
142                 __raw_readl(pxp->base + HW_PXP_CSC2COEF0));
143         dev_dbg(pxp->dev, "PXP_CSC2COEF1 0x%x",
144                 __raw_readl(pxp->base + HW_PXP_CSC2COEF1));
145         dev_dbg(pxp->dev, "PXP_CSC2COEF2 0x%x",
146                 __raw_readl(pxp->base + HW_PXP_CSC2COEF2));
147         dev_dbg(pxp->dev, "PXP_CSC2COEF3 0x%x",
148                 __raw_readl(pxp->base + HW_PXP_CSC2COEF3));
149         dev_dbg(pxp->dev, "PXP_CSC2COEF4 0x%x",
150                 __raw_readl(pxp->base + HW_PXP_CSC2COEF4));
151         dev_dbg(pxp->dev, "PXP_CSC2COEF5 0x%x",
152                 __raw_readl(pxp->base + HW_PXP_CSC2COEF5));
153         dev_dbg(pxp->dev, "PXP_LUT_CTRL 0x%x",
154                 __raw_readl(pxp->base + HW_PXP_LUT_CTRL));
155         dev_dbg(pxp->dev, "PXP_LUT 0x%x", __raw_readl(pxp->base + HW_PXP_LUT));
156         dev_dbg(pxp->dev, "PXP_HIST_CTRL 0x%x",
157                 __raw_readl(pxp->base + HW_PXP_HIST_CTRL));
158         dev_dbg(pxp->dev, "PXP_HIST2_PARAM 0x%x",
159                 __raw_readl(pxp->base + HW_PXP_HIST2_PARAM));
160         dev_dbg(pxp->dev, "PXP_HIST4_PARAM 0x%x",
161                 __raw_readl(pxp->base + HW_PXP_HIST4_PARAM));
162         dev_dbg(pxp->dev, "PXP_HIST8_PARAM0 0x%x",
163                 __raw_readl(pxp->base + HW_PXP_HIST8_PARAM0));
164         dev_dbg(pxp->dev, "PXP_HIST8_PARAM1 0x%x",
165                 __raw_readl(pxp->base + HW_PXP_HIST8_PARAM1));
166         dev_dbg(pxp->dev, "PXP_HIST16_PARAM0 0x%x",
167                 __raw_readl(pxp->base + HW_PXP_HIST16_PARAM0));
168         dev_dbg(pxp->dev, "PXP_HIST16_PARAM1 0x%x",
169                 __raw_readl(pxp->base + HW_PXP_HIST16_PARAM1));
170         dev_dbg(pxp->dev, "PXP_HIST16_PARAM2 0x%x",
171                 __raw_readl(pxp->base + HW_PXP_HIST16_PARAM2));
172         dev_dbg(pxp->dev, "PXP_HIST16_PARAM3 0x%x",
173                 __raw_readl(pxp->base + HW_PXP_HIST16_PARAM3));
174 }
175
176 static bool is_yuv(u32 pix_fmt)
177 {
178         if ((pix_fmt == PXP_PIX_FMT_YUYV) |
179             (pix_fmt == PXP_PIX_FMT_UYVY) |
180             (pix_fmt == PXP_PIX_FMT_Y41P) |
181             (pix_fmt == PXP_PIX_FMT_YUV444) |
182             (pix_fmt == PXP_PIX_FMT_NV12) |
183             (pix_fmt == PXP_PIX_FMT_GREY) |
184             (pix_fmt == PXP_PIX_FMT_YVU410P) |
185             (pix_fmt == PXP_PIX_FMT_YUV410P) |
186             (pix_fmt == PXP_PIX_FMT_YVU420P) |
187             (pix_fmt == PXP_PIX_FMT_YUV420P) |
188             (pix_fmt == PXP_PIX_FMT_YUV420P2) |
189             (pix_fmt == PXP_PIX_FMT_YVU422P) |
190             (pix_fmt == PXP_PIX_FMT_YUV422P)) {
191                 return true;
192         } else {
193                 return false;
194         }
195 }
196
197 static void pxp_set_ctrl(struct pxps *pxp)
198 {
199         struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
200         struct pxp_proc_data *proc_data = &pxp_conf->proc_data;
201         u32 ctrl;
202         u32 fmt_ctrl;
203
204         /* Configure S0 input format */
205         switch (pxp_conf->s0_param.pixel_fmt) {
206         case PXP_PIX_FMT_RGB24:
207                 fmt_ctrl = BV_PXP_CTRL_S0_FORMAT__RGB888;
208                 break;
209         case PXP_PIX_FMT_RGB565:
210                 fmt_ctrl = BV_PXP_CTRL_S0_FORMAT__RGB565;
211                 break;
212         case PXP_PIX_FMT_RGB555:
213                 fmt_ctrl = BV_PXP_CTRL_S0_FORMAT__RGB555;
214                 break;
215         case PXP_PIX_FMT_YUV420P:
216         case PXP_PIX_FMT_GREY:
217                 fmt_ctrl = BV_PXP_CTRL_S0_FORMAT__YUV420;
218                 break;
219         case PXP_PIX_FMT_YUV422P:
220                 fmt_ctrl = BV_PXP_CTRL_S0_FORMAT__YUV422;
221                 break;
222         default:
223                 fmt_ctrl = 0;
224         }
225         ctrl = BF_PXP_CTRL_S0_FORMAT(fmt_ctrl);
226
227         /* Configure output format based on out_channel format */
228         switch (pxp_conf->out_param.pixel_fmt) {
229         case PXP_PIX_FMT_RGB24:
230                 fmt_ctrl = BV_PXP_CTRL_OUTBUF_FORMAT__RGB888;
231                 break;
232         case PXP_PIX_FMT_RGB565:
233                 fmt_ctrl = BV_PXP_CTRL_OUTBUF_FORMAT__RGB565;
234                 break;
235         case PXP_PIX_FMT_RGB555:
236                 fmt_ctrl = BV_PXP_CTRL_OUTBUF_FORMAT__RGB555;
237                 break;
238         case PXP_PIX_FMT_YUV420P:
239                 fmt_ctrl = BV_PXP_CTRL_OUTBUF_FORMAT__YUV2P420;
240                 break;
241         case PXP_PIX_FMT_YUV422P:
242                 fmt_ctrl = BV_PXP_CTRL_OUTBUF_FORMAT__YUV2P422;
243                 break;
244         case PXP_PIX_FMT_GREY:
245                 fmt_ctrl = BV_PXP_CTRL_OUTBUF_FORMAT__MONOC8;
246                 break;
247         default:
248                 fmt_ctrl = 0;
249         }
250         ctrl |= BF_PXP_CTRL_OUTBUF_FORMAT(fmt_ctrl);
251
252         ctrl |= BM_PXP_CTRL_CROP;
253
254         if (proc_data->scaling)
255                 ctrl |= BM_PXP_CTRL_SCALE;
256         if (proc_data->vflip)
257                 ctrl |= BM_PXP_CTRL_VFLIP;
258         if (proc_data->hflip)
259                 ctrl |= BM_PXP_CTRL_HFLIP;
260         if (proc_data->rotate)
261                 ctrl |= BF_PXP_CTRL_ROTATE(proc_data->rotate / 90);
262
263         __raw_writel(ctrl, pxp->base + HW_PXP_CTRL);
264 }
265
266 static int pxp_start(struct pxps *pxp)
267 {
268         __raw_writel(BM_PXP_CTRL_IRQ_ENABLE, pxp->base + HW_PXP_CTRL_SET);
269         __raw_writel(BM_PXP_CTRL_ENABLE, pxp->base + HW_PXP_CTRL_SET);
270
271         return 0;
272 }
273
274 static void pxp_set_outbuf(struct pxps *pxp)
275 {
276         struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
277         struct pxp_layer_param *out_params = &pxp_conf->out_param;
278
279         __raw_writel(out_params->paddr, pxp->base + HW_PXP_OUTBUF);
280
281         __raw_writel(BF_PXP_OUTSIZE_WIDTH(out_params->width) |
282                      BF_PXP_OUTSIZE_HEIGHT(out_params->height),
283                      pxp->base + HW_PXP_OUTSIZE);
284 }
285
286 static void pxp_set_s0colorkey(struct pxps *pxp)
287 {
288         struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
289         struct pxp_layer_param *s0_params = &pxp_conf->s0_param;
290
291         /* Low and high are set equal. V4L does not allow a chromakey range */
292         if (s0_params->color_key == -1) {
293                 /* disable color key */
294                 __raw_writel(0xFFFFFF, pxp->base + HW_PXP_S0COLORKEYLOW);
295                 __raw_writel(0, pxp->base + HW_PXP_S0COLORKEYHIGH);
296         } else {
297                 __raw_writel(s0_params->color_key,
298                              pxp->base + HW_PXP_S0COLORKEYLOW);
299                 __raw_writel(s0_params->color_key,
300                              pxp->base + HW_PXP_S0COLORKEYHIGH);
301         }
302 }
303
304 static void pxp_set_olcolorkey(int layer_no, struct pxps *pxp)
305 {
306         struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
307         struct pxp_layer_param *ol_params = &pxp_conf->ol_param[layer_no];
308
309         /* Low and high are set equal. V4L does not allow a chromakey range */
310         if (ol_params->color_key_enable != 0 && ol_params->color_key != -1) {
311                 __raw_writel(ol_params->color_key,
312                              pxp->base + HW_PXP_OLCOLORKEYLOW);
313                 __raw_writel(ol_params->color_key,
314                              pxp->base + HW_PXP_OLCOLORKEYHIGH);
315         } else {
316                 /* disable color key */
317                 __raw_writel(0xFFFFFF, pxp->base + HW_PXP_OLCOLORKEYLOW);
318                 __raw_writel(0, pxp->base + HW_PXP_OLCOLORKEYHIGH);
319         }
320 }
321
322 static void pxp_set_oln(int layer_no, struct pxps *pxp)
323 {
324         struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
325         struct pxp_layer_param *olparams_data = &pxp_conf->ol_param[layer_no];
326         dma_addr_t phys_addr = olparams_data->paddr;
327         __raw_writel(phys_addr, pxp->base + HW_PXP_OLn(layer_no));
328
329         /* Fixme */
330         __raw_writel(BF_PXP_OLnSIZE_WIDTH(olparams_data->width >> 3) |
331                      BF_PXP_OLnSIZE_HEIGHT(olparams_data->height >> 3),
332                      pxp->base + HW_PXP_OLnSIZE(layer_no));
333 }
334
335 static void pxp_set_olparam(int layer_no, struct pxps *pxp)
336 {
337         struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
338         struct pxp_layer_param *olparams_data = &pxp_conf->ol_param[layer_no];
339         u32 olparam;
340
341         olparam = BF_PXP_OLnPARAM_ALPHA(olparams_data->global_alpha);
342         if (olparams_data->pixel_fmt == PXP_PIX_FMT_RGB24)
343                 olparam |=
344                     BF_PXP_OLnPARAM_FORMAT(BV_PXP_OLnPARAM_FORMAT__RGB888);
345         else
346                 olparam |=
347                     BF_PXP_OLnPARAM_FORMAT(BV_PXP_OLnPARAM_FORMAT__RGB565);
348         if (olparams_data->global_alpha_enable)
349                 olparam |=
350                     BF_PXP_OLnPARAM_ALPHA_CNTL
351                     (BV_PXP_OLnPARAM_ALPHA_CNTL__Override);
352         if (olparams_data->color_key_enable)
353                 olparam |= BM_PXP_OLnPARAM_ENABLE_COLORKEY;
354         if (olparams_data->combine_enable)
355                 olparam |= BM_PXP_OLnPARAM_ENABLE;
356         __raw_writel(olparam, pxp->base + HW_PXP_OLnPARAM(layer_no));
357 }
358
359 static void pxp_set_s0param(struct pxps *pxp)
360 {
361         struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
362         struct pxp_layer_param *s0params_data = &pxp_conf->s0_param;
363         struct pxp_proc_data *proc_data = &pxp_conf->proc_data;
364         u32 s0param;
365
366         s0param = BF_PXP_S0PARAM_XBASE(proc_data->drect.left >> 3);
367         s0param |= BF_PXP_S0PARAM_YBASE(proc_data->drect.top >> 3);
368         s0param |= BF_PXP_S0PARAM_WIDTH(s0params_data->width >> 3);
369         s0param |= BF_PXP_S0PARAM_HEIGHT(s0params_data->height >> 3);
370         __raw_writel(s0param, pxp->base + HW_PXP_S0PARAM);
371 }
372
373 static void pxp_set_s0crop(struct pxps *pxp)
374 {
375         u32 s0crop;
376         struct pxp_proc_data *proc_data = &pxp->pxp_conf_state.proc_data;
377
378         s0crop = BF_PXP_S0CROP_XBASE(proc_data->srect.left >> 3);
379         s0crop |= BF_PXP_S0CROP_YBASE(proc_data->srect.top >> 3);
380         s0crop |= BF_PXP_S0CROP_WIDTH(proc_data->drect.width >> 3);
381         s0crop |= BF_PXP_S0CROP_HEIGHT(proc_data->drect.height >> 3);
382         __raw_writel(s0crop, pxp->base + HW_PXP_S0CROP);
383 }
384
385 static int pxp_set_scaling(struct pxps *pxp)
386 {
387         int ret = 0;
388         u32 xscale, yscale, s0scale;
389         struct pxp_proc_data *proc_data = &pxp->pxp_conf_state.proc_data;
390         struct pxp_layer_param *s0params_data = &pxp->pxp_conf_state.s0_param;
391
392         if ((s0params_data->pixel_fmt != PXP_PIX_FMT_YUV420P) &&
393             (s0params_data->pixel_fmt != PXP_PIX_FMT_YUV422P)) {
394                 proc_data->scaling = 0;
395                 ret = -EINVAL;
396                 goto out;
397         }
398
399         if ((proc_data->srect.width == proc_data->drect.width) &&
400             (proc_data->srect.height == proc_data->drect.height)) {
401                 proc_data->scaling = 0;
402                 __raw_writel(0x10001000, pxp->base + HW_PXP_S0SCALE);
403                 goto out;
404         }
405
406         proc_data->scaling = 1;
407         xscale = proc_data->srect.width * 0x1000 / proc_data->drect.width;
408         yscale = proc_data->srect.height * 0x1000 / proc_data->drect.height;
409         if (xscale > PXP_DOWNSCALE_THRESHOLD)
410                 xscale = PXP_DOWNSCALE_THRESHOLD;
411         if (yscale > PXP_DOWNSCALE_THRESHOLD)
412                 yscale = PXP_DOWNSCALE_THRESHOLD;
413         s0scale = BF_PXP_S0SCALE_YSCALE(yscale) | BF_PXP_S0SCALE_XSCALE(xscale);
414         __raw_writel(s0scale, pxp->base + HW_PXP_S0SCALE);
415
416 out:
417         pxp_set_ctrl(pxp);
418
419         return ret;
420 }
421
422 static void pxp_set_bg(struct pxps *pxp)
423 {
424         __raw_writel(pxp->pxp_conf_state.proc_data.bgcolor,
425                      pxp->base + HW_PXP_S0BACKGROUND);
426 }
427
428 static void pxp_set_lut(struct pxps *pxp)
429 {
430         struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
431         int lut_op = pxp_conf->proc_data.lut_transform;
432         u32 reg_val;
433         int i;
434         bool use_cmap = (lut_op & PXP_LUT_USE_CMAP) ? true : false;
435         u8 *cmap = pxp_conf->proc_data.lut_map;
436         u32 entry_src;
437
438         /*
439          * If LUT already configured as needed, return...
440          * Unless CMAP is needed and it has been updated.
441          */
442         if ((pxp->lut_state == lut_op) &&
443                 !(use_cmap && pxp_conf->proc_data.lut_map_updated))
444                 return;
445
446         if (lut_op == PXP_LUT_NONE) {
447                 __raw_writel(BM_PXP_LUT_CTRL_BYPASS,
448                              pxp->base + HW_PXP_LUT_CTRL);
449         } else if (((lut_op & PXP_LUT_INVERT) != 0)
450                 && ((lut_op & PXP_LUT_BLACK_WHITE) != 0)) {
451                 /* Fill out LUT table with inverted monochromized values */
452
453                 /* Initialize LUT address to 0 and clear bypass bit */
454                 __raw_writel(0, pxp->base + HW_PXP_LUT_CTRL);
455
456                 /* LUT address pointer auto-increments after each data write */
457                 for (i = 0; i < 256; i++) {
458                         reg_val =
459                             __raw_readl(pxp->base +
460                                         HW_PXP_LUT_CTRL) & BM_PXP_LUT_CTRL_ADDR;
461                         entry_src = use_cmap ? cmap[i] : reg_val;
462                         reg_val = (entry_src < 0x80) ? 0x00 : 0xFF;
463                         reg_val = ~reg_val & BM_PXP_LUT_DATA;
464                         __raw_writel(reg_val, pxp->base + HW_PXP_LUT);
465                 }
466         } else if ((lut_op & PXP_LUT_INVERT) != 0) {
467                 /* Fill out LUT table with 8-bit inverted values */
468
469                 /* Initialize LUT address to 0 and clear bypass bit */
470                 __raw_writel(0, pxp->base + HW_PXP_LUT_CTRL);
471
472                 /* LUT address pointer auto-increments after each data write */
473                 for (i = 0; i < 256; i++) {
474                         reg_val =
475                             __raw_readl(pxp->base +
476                                         HW_PXP_LUT_CTRL) & BM_PXP_LUT_CTRL_ADDR;
477                         entry_src = use_cmap ? cmap[i] : reg_val;
478                         reg_val = ~entry_src & BM_PXP_LUT_DATA;
479                         __raw_writel(reg_val, pxp->base + HW_PXP_LUT);
480                 }
481         } else if ((lut_op & PXP_LUT_BLACK_WHITE) != 0) {
482                 /* Fill out LUT table with 8-bit monochromized values */
483
484                 /* Initialize LUT address to 0 and clear bypass bit */
485                 __raw_writel(0, pxp->base + HW_PXP_LUT_CTRL);
486
487                 /* LUT address pointer auto-increments after each data write */
488                 for (i = 0; i < 256; i++) {
489                         reg_val =
490                             __raw_readl(pxp->base +
491                                         HW_PXP_LUT_CTRL) & BM_PXP_LUT_CTRL_ADDR;
492                         entry_src = use_cmap ? cmap[i] : reg_val;
493                         reg_val = (entry_src < 0x80) ? 0x00 : 0xFF;
494                         reg_val = reg_val & BM_PXP_LUT_DATA;
495                         __raw_writel(reg_val, pxp->base + HW_PXP_LUT);
496                 }
497         } else if (use_cmap) {
498                 /* Fill out LUT table using colormap values */
499
500                 /* Initialize LUT address to 0 and clear bypass bit */
501                 __raw_writel(0, pxp->base + HW_PXP_LUT_CTRL);
502
503                 /* LUT address pointer auto-increments after each data write */
504                 for (i = 0; i < 256; i++) {
505                         reg_val = cmap[i] & BM_PXP_LUT_DATA;
506                         __raw_writel(reg_val, pxp->base + HW_PXP_LUT);
507                 }
508         }
509
510         pxp->lut_state = lut_op;
511 }
512
513 static void pxp_set_csc(struct pxps *pxp)
514 {
515         struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
516         struct pxp_layer_param *s0_params = &pxp_conf->s0_param;
517         struct pxp_layer_param *ol_params = &pxp_conf->ol_param[0];
518         struct pxp_layer_param *out_params = &pxp_conf->out_param;
519
520         bool input_is_YUV = is_yuv(s0_params->pixel_fmt);
521         bool output_is_YUV = is_yuv(out_params->pixel_fmt);
522
523         if (input_is_YUV && output_is_YUV) {
524                 /*
525                  * Input = YUV, Output = YUV
526                  * No CSC unless we need to do combining
527                  */
528                 if (ol_params->combine_enable) {
529                         /* Must convert to RGB for combining with RGB overlay */
530
531                         /* CSC1 - YUV->RGB */
532                         __raw_writel(0x04030000, pxp->base + HW_PXP_CSCCOEF0);
533                         __raw_writel(0x01230208, pxp->base + HW_PXP_CSCCOEF1);
534                         __raw_writel(0x076b079c, pxp->base + HW_PXP_CSCCOEF2);
535
536                         /* CSC2 - RGB->YUV */
537                         __raw_writel(0x4, pxp->base + HW_PXP_CSC2CTRL);
538                         __raw_writel(0x0096004D, pxp->base + HW_PXP_CSC2COEF0);
539                         __raw_writel(0x05DA001D, pxp->base + HW_PXP_CSC2COEF1);
540                         __raw_writel(0x007005B6, pxp->base + HW_PXP_CSC2COEF2);
541                         __raw_writel(0x057C009E, pxp->base + HW_PXP_CSC2COEF3);
542                         __raw_writel(0x000005E6, pxp->base + HW_PXP_CSC2COEF4);
543                         __raw_writel(0x00000000, pxp->base + HW_PXP_CSC2COEF5);
544                 } else {
545                         /* Input & Output both YUV, so bypass both CSCs */
546
547                         /* CSC1 - Bypass */
548                         __raw_writel(0x40000000, pxp->base + HW_PXP_CSCCOEF0);
549
550                         /* CSC2 - Bypass */
551                         __raw_writel(0x1, pxp->base + HW_PXP_CSC2CTRL);
552                 }
553         } else if (input_is_YUV && !output_is_YUV) {
554                 /*
555                  * Input = YUV, Output = RGB
556                  * Use CSC1 to convert to RGB
557                  */
558
559                 /* CSC1 - YUV->RGB */
560                 __raw_writel(0x84ab01f0, pxp->base + HW_PXP_CSCCOEF0);
561                 __raw_writel(0x01230204, pxp->base + HW_PXP_CSCCOEF1);
562                 __raw_writel(0x0730079c, pxp->base + HW_PXP_CSCCOEF2);
563
564                 /* CSC2 - Bypass */
565                 __raw_writel(0x1, pxp->base + HW_PXP_CSC2CTRL);
566         } else if (!input_is_YUV && output_is_YUV) {
567                 /*
568                  * Input = RGB, Output = YUV
569                  * Use CSC2 to convert to YUV
570                  */
571
572                 /* CSC1 - Bypass */
573                 __raw_writel(0x40000000, pxp->base + HW_PXP_CSCCOEF0);
574
575                 /* CSC2 - RGB->YUV */
576                 __raw_writel(0x4, pxp->base + HW_PXP_CSC2CTRL);
577                 __raw_writel(0x0096004D, pxp->base + HW_PXP_CSC2COEF0);
578                 __raw_writel(0x05DA001D, pxp->base + HW_PXP_CSC2COEF1);
579                 __raw_writel(0x007005B6, pxp->base + HW_PXP_CSC2COEF2);
580                 __raw_writel(0x057C009E, pxp->base + HW_PXP_CSC2COEF3);
581                 __raw_writel(0x000005E6, pxp->base + HW_PXP_CSC2COEF4);
582                 __raw_writel(0x00000000, pxp->base + HW_PXP_CSC2COEF5);
583         } else {
584                 /*
585                  * Input = RGB, Output = RGB
586                  * Input & Output both RGB, so bypass both CSCs
587                  */
588
589                 /* CSC1 - Bypass */
590                 __raw_writel(0x40000000, pxp->base + HW_PXP_CSCCOEF0);
591
592                 /* CSC2 - Bypass */
593                 __raw_writel(0x1, pxp->base + HW_PXP_CSC2CTRL);
594         }
595
596         /* YCrCb colorspace */
597         /* Not sure when we use this...no YCrCb formats are defined for PxP */
598         /*
599            __raw_writel(0x84ab01f0, HW_PXP_CSCCOEFF0_ADDR);
600            __raw_writel(0x01230204, HW_PXP_CSCCOEFF1_ADDR);
601            __raw_writel(0x0730079c, HW_PXP_CSCCOEFF2_ADDR);
602          */
603
604 }
605
606 static void pxp_set_s0buf(struct pxps *pxp)
607 {
608         struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
609         struct pxp_layer_param *s0_params = &pxp_conf->s0_param;
610         dma_addr_t Y, U, V;
611
612         Y = s0_params->paddr;
613         __raw_writel(Y, pxp->base + HW_PXP_S0BUF);
614         if ((s0_params->pixel_fmt == PXP_PIX_FMT_YUV420P) ||
615             (s0_params->pixel_fmt == PXP_PIX_FMT_YVU420P) ||
616             (s0_params->pixel_fmt == PXP_PIX_FMT_GREY)) {
617                 /* Set to 1 if YUV format is 4:2:2 rather than 4:2:0 */
618                 int s = 2;
619                 U = Y + (s0_params->width * s0_params->height);
620                 V = U + ((s0_params->width * s0_params->height) >> s);
621                 __raw_writel(U, pxp->base + HW_PXP_S0UBUF);
622                 __raw_writel(V, pxp->base + HW_PXP_S0VBUF);
623         }
624 }
625
626 /**
627  * pxp_config() - configure PxP for a processing task
628  * @pxps:       PXP context.
629  * @pxp_chan:   PXP channel.
630  * @return:     0 on success or negative error code on failure.
631  */
632 static int pxp_config(struct pxps *pxp, struct pxp_channel *pxp_chan)
633 {
634         struct pxp_config_data *pxp_conf_data = &pxp->pxp_conf_state;
635         int ol_nr;
636         int i;
637
638         /* Configure PxP regs */
639         pxp_set_ctrl(pxp);
640         pxp_set_s0param(pxp);
641         pxp_set_s0crop(pxp);
642         pxp_set_scaling(pxp);
643         ol_nr = pxp_conf_data->layer_nr - 2;
644         while (ol_nr > 0) {
645                 i = pxp_conf_data->layer_nr - 2 - ol_nr;
646                 pxp_set_oln(i, pxp);
647                 pxp_set_olparam(i, pxp);
648                 /* only the color key in higher overlay will take effect. */
649                 pxp_set_olcolorkey(i, pxp);
650                 ol_nr--;
651         }
652         pxp_set_s0colorkey(pxp);
653         pxp_set_csc(pxp);
654         pxp_set_bg(pxp);
655         pxp_set_lut(pxp);
656
657         pxp_set_s0buf(pxp);
658         pxp_set_outbuf(pxp);
659
660         return 0;
661 }
662
663 static void pxp_clk_enable(struct pxps *pxp)
664 {
665         mutex_lock(&pxp->clk_mutex);
666
667         if (pxp->clk_stat == CLK_STAT_ON) {
668                 mutex_unlock(&pxp->clk_mutex);
669                 return;
670         }
671
672         clk_enable(pxp->clk);
673         pxp->clk_stat = CLK_STAT_ON;
674
675         mutex_unlock(&pxp->clk_mutex);
676 }
677
678 static void pxp_clk_disable(struct pxps *pxp)
679 {
680         unsigned long flags;
681
682         mutex_lock(&pxp->clk_mutex);
683
684         if (pxp->clk_stat == CLK_STAT_OFF) {
685                 mutex_unlock(&pxp->clk_mutex);
686                 return;
687         }
688
689         spin_lock_irqsave(&pxp->lock, flags);
690         if ((pxp->pxp_ongoing == 0) && list_empty(&head)) {
691                 spin_unlock_irqrestore(&pxp->lock, flags);
692                 clk_disable(pxp->clk);
693                 pxp->clk_stat = CLK_STAT_OFF;
694         } else
695                 spin_unlock_irqrestore(&pxp->lock, flags);
696
697         mutex_unlock(&pxp->clk_mutex);
698 }
699
700 static inline void clkoff_callback(struct work_struct *w)
701 {
702         struct pxps *pxp = container_of(w, struct pxps, work);
703
704         pxp_clk_disable(pxp);
705 }
706
707 static void pxp_clkoff_timer(unsigned long arg)
708 {
709         struct pxps *pxp = (struct pxps *)arg;
710
711         if ((pxp->pxp_ongoing == 0) && list_empty(&head))
712                 schedule_work(&pxp->work);
713         else
714                 mod_timer(&pxp->clk_timer,
715                           jiffies + msecs_to_jiffies(timeout_in_ms));
716 }
717
718 static struct pxp_tx_desc *pxpdma_first_active(struct pxp_channel *pxp_chan)
719 {
720         return list_entry(pxp_chan->active_list.next, struct pxp_tx_desc, list);
721 }
722
723 static struct pxp_tx_desc *pxpdma_first_queued(struct pxp_channel *pxp_chan)
724 {
725         return list_entry(pxp_chan->queue.next, struct pxp_tx_desc, list);
726 }
727
728 /* called with pxp_chan->lock held */
729 static void __pxpdma_dostart(struct pxp_channel *pxp_chan)
730 {
731         struct pxp_dma *pxp_dma = to_pxp_dma(pxp_chan->dma_chan.device);
732         struct pxps *pxp = to_pxp(pxp_dma);
733         struct pxp_tx_desc *desc;
734         struct pxp_tx_desc *child;
735         int i = 0;
736
737         /* so far we presume only one transaction on active_list */
738         /* S0 */
739         desc = pxpdma_first_active(pxp_chan);
740         memcpy(&pxp->pxp_conf_state.s0_param,
741                &desc->layer_param.s0_param, sizeof(struct pxp_layer_param));
742         memcpy(&pxp->pxp_conf_state.proc_data,
743                &desc->proc_data, sizeof(struct pxp_proc_data));
744
745         /* Save PxP configuration */
746         list_for_each_entry(child, &desc->tx_list, list) {
747                 if (i == 0) {   /* Output */
748                         memcpy(&pxp->pxp_conf_state.out_param,
749                                &child->layer_param.out_param,
750                                sizeof(struct pxp_layer_param));
751                 } else {        /* Overlay */
752                         memcpy(&pxp->pxp_conf_state.ol_param[i - 1],
753                                &child->layer_param.ol_param,
754                                sizeof(struct pxp_layer_param));
755                 }
756
757                 i++;
758         }
759         pr_debug("%s:%d S0 w/h %d/%d paddr %08x\n", __func__, __LINE__,
760                  pxp->pxp_conf_state.s0_param.width,
761                  pxp->pxp_conf_state.s0_param.height,
762                  pxp->pxp_conf_state.s0_param.paddr);
763         pr_debug("%s:%d OUT w/h %d/%d paddr %08x\n", __func__, __LINE__,
764                  pxp->pxp_conf_state.out_param.width,
765                  pxp->pxp_conf_state.out_param.height,
766                  pxp->pxp_conf_state.out_param.paddr);
767 }
768
769 static void pxpdma_dostart_work(struct pxps *pxp)
770 {
771         struct pxp_channel *pxp_chan = NULL;
772         unsigned long flags, flags1;
773
774         while (__raw_readl(pxp->base + HW_PXP_CTRL) & BM_PXP_CTRL_ENABLE)
775                 ;
776
777         spin_lock_irqsave(&pxp->lock, flags);
778         if (list_empty(&head)) {
779                 pxp->pxp_ongoing = 0;
780                 spin_unlock_irqrestore(&pxp->lock, flags);
781                 return;
782         }
783
784         pxp_chan = list_entry(head.next, struct pxp_channel, list);
785
786         spin_lock_irqsave(&pxp_chan->lock, flags1);
787         if (!list_empty(&pxp_chan->active_list)) {
788                 struct pxp_tx_desc *desc;
789                 /* REVISIT */
790                 desc = pxpdma_first_active(pxp_chan);
791                 __pxpdma_dostart(pxp_chan);
792         }
793         spin_unlock_irqrestore(&pxp_chan->lock, flags1);
794
795         /* Configure PxP */
796         pxp_config(pxp, pxp_chan);
797
798         pxp_start(pxp);
799
800         spin_unlock_irqrestore(&pxp->lock, flags);
801 }
802
803 static void pxpdma_dequeue(struct pxp_channel *pxp_chan, struct list_head *list)
804 {
805         struct pxp_tx_desc *desc = NULL;
806         do {
807                 desc = pxpdma_first_queued(pxp_chan);
808                 list_move_tail(&desc->list, list);
809         } while (!list_empty(&pxp_chan->queue));
810 }
811
812 static dma_cookie_t pxp_tx_submit(struct dma_async_tx_descriptor *tx)
813 {
814         struct pxp_tx_desc *desc = to_tx_desc(tx);
815         struct pxp_channel *pxp_chan = to_pxp_channel(tx->chan);
816         dma_cookie_t cookie;
817         unsigned long flags;
818
819         dev_dbg(&pxp_chan->dma_chan.dev->device, "received TX\n");
820
821         mutex_lock(&pxp_chan->chan_mutex);
822
823         cookie = pxp_chan->dma_chan.cookie;
824
825         if (++cookie < 0)
826                 cookie = 1;
827
828         /* from dmaengine.h: "last cookie value returned to client" */
829         pxp_chan->dma_chan.cookie = cookie;
830         tx->cookie = cookie;
831
832         /* pxp_chan->lock can be taken under ichan->lock, but not v.v. */
833         spin_lock_irqsave(&pxp_chan->lock, flags);
834
835         /* Here we add the tx descriptor to our PxP task queue. */
836         list_add_tail(&desc->list, &pxp_chan->queue);
837
838         spin_unlock_irqrestore(&pxp_chan->lock, flags);
839
840         dev_dbg(&pxp_chan->dma_chan.dev->device, "done TX\n");
841
842         mutex_unlock(&pxp_chan->chan_mutex);
843         return cookie;
844 }
845
846 /* Called with pxp_chan->chan_mutex held */
847 static int pxp_desc_alloc(struct pxp_channel *pxp_chan, int n)
848 {
849         struct pxp_tx_desc *desc = vmalloc(n * sizeof(struct pxp_tx_desc));
850
851         if (!desc)
852                 return -ENOMEM;
853
854         pxp_chan->n_tx_desc = n;
855         pxp_chan->desc = desc;
856         INIT_LIST_HEAD(&pxp_chan->active_list);
857         INIT_LIST_HEAD(&pxp_chan->queue);
858         INIT_LIST_HEAD(&pxp_chan->free_list);
859
860         while (n--) {
861                 struct dma_async_tx_descriptor *txd = &desc->txd;
862
863                 memset(txd, 0, sizeof(*txd));
864                 INIT_LIST_HEAD(&desc->tx_list);
865                 dma_async_tx_descriptor_init(txd, &pxp_chan->dma_chan);
866                 txd->tx_submit = pxp_tx_submit;
867
868                 list_add(&desc->list, &pxp_chan->free_list);
869
870                 desc++;
871         }
872
873         return 0;
874 }
875
876 /**
877  * pxp_init_channel() - initialize a PXP channel.
878  * @pxp_dma:   PXP DMA context.
879  * @pchan:  pointer to the channel object.
880  * @return      0 on success or negative error code on failure.
881  */
882 static int pxp_init_channel(struct pxp_dma *pxp_dma,
883                             struct pxp_channel *pxp_chan)
884 {
885         unsigned long flags;
886         struct pxps *pxp = to_pxp(pxp_dma);
887         int ret = 0, n_desc = 0;
888
889         /*
890          * We are using _virtual_ channel here.
891          * Each channel contains all parameters of corresponding layers
892          * for one transaction; each layer is represented as one descriptor
893          * (i.e., pxp_tx_desc) here.
894          */
895
896         spin_lock_irqsave(&pxp->lock, flags);
897
898         /* max desc nr: S0+OL+OUT = 1+8+1 */
899         n_desc = 16;
900
901         spin_unlock_irqrestore(&pxp->lock, flags);
902
903         if (n_desc && !pxp_chan->desc)
904                 ret = pxp_desc_alloc(pxp_chan, n_desc);
905
906         return ret;
907 }
908
909 /**
910  * pxp_uninit_channel() - uninitialize a PXP channel.
911  * @pxp_dma:   PXP DMA context.
912  * @pchan:  pointer to the channel object.
913  * @return      0 on success or negative error code on failure.
914  */
915 static int pxp_uninit_channel(struct pxp_dma *pxp_dma,
916                               struct pxp_channel *pxp_chan)
917 {
918         int ret = 0;
919
920         if (pxp_chan->desc)
921                 vfree(pxp_chan->desc);
922
923         pxp_chan->desc = NULL;
924
925         return ret;
926 }
927
928 static irqreturn_t pxp_irq(int irq, void *dev_id)
929 {
930         struct pxps *pxp = dev_id;
931         struct pxp_channel *pxp_chan;
932         struct pxp_tx_desc *desc;
933         dma_async_tx_callback callback;
934         void *callback_param;
935         unsigned long flags;
936         u32 hist_status;
937
938         dump_pxp_reg(pxp);
939
940         hist_status =
941             __raw_readl(pxp->base + HW_PXP_HIST_CTRL) & BM_PXP_HIST_CTRL_STATUS;
942
943         __raw_writel(BM_PXP_STAT_IRQ, pxp->base + HW_PXP_STAT_CLR);
944
945         spin_lock_irqsave(&pxp->lock, flags);
946
947         if (list_empty(&head)) {
948                 pxp->pxp_ongoing = 0;
949                 spin_unlock_irqrestore(&pxp->lock, flags);
950                 return IRQ_NONE;
951         }
952
953         pxp_chan = list_entry(head.next, struct pxp_channel, list);
954         list_del_init(&pxp_chan->list);
955
956         if (list_empty(&pxp_chan->active_list)) {
957                 pr_debug("PXP_IRQ pxp_chan->active_list empty. chan_id %d\n",
958                          pxp_chan->dma_chan.chan_id);
959                 pxp->pxp_ongoing = 0;
960                 spin_unlock_irqrestore(&pxp->lock, flags);
961                 return IRQ_NONE;
962         }
963
964         /* Get descriptor and call callback */
965         desc = pxpdma_first_active(pxp_chan);
966
967         pxp_chan->completed = desc->txd.cookie;
968
969         callback = desc->txd.callback;
970         callback_param = desc->txd.callback_param;
971
972         /* Send histogram status back to caller */
973         desc->hist_status = hist_status;
974
975         if ((desc->txd.flags & DMA_PREP_INTERRUPT) && callback)
976                 callback(callback_param);
977
978         pxp_chan->status = PXP_CHANNEL_INITIALIZED;
979
980         list_splice_init(&desc->tx_list, &pxp_chan->free_list);
981         list_move(&desc->list, &pxp_chan->free_list);
982
983         wake_up(&pxp->done);
984         pxp->pxp_ongoing = 0;
985         mod_timer(&pxp->clk_timer, jiffies + msecs_to_jiffies(timeout_in_ms));
986
987         spin_unlock_irqrestore(&pxp->lock, flags);
988
989         return IRQ_HANDLED;
990 }
991
992 /* called with pxp_chan->lock held */
993 static struct pxp_tx_desc *pxpdma_desc_get(struct pxp_channel *pxp_chan)
994 {
995         struct pxp_tx_desc *desc, *_desc;
996         struct pxp_tx_desc *ret = NULL;
997
998         list_for_each_entry_safe(desc, _desc, &pxp_chan->free_list, list) {
999                 list_del_init(&desc->list);
1000                 ret = desc;
1001                 break;
1002         }
1003
1004         return ret;
1005 }
1006
1007 /* called with pxp_chan->lock held */
1008 static void pxpdma_desc_put(struct pxp_channel *pxp_chan,
1009                             struct pxp_tx_desc *desc)
1010 {
1011         if (desc) {
1012                 struct device *dev = &pxp_chan->dma_chan.dev->device;
1013                 struct pxp_tx_desc *child;
1014
1015                 list_for_each_entry(child, &desc->tx_list, list)
1016                     dev_info(dev, "moving child desc %p to freelist\n", child);
1017                 list_splice_init(&desc->tx_list, &pxp_chan->free_list);
1018                 dev_info(dev, "moving desc %p to freelist\n", desc);
1019                 list_add(&desc->list, &pxp_chan->free_list);
1020         }
1021 }
1022
1023 /* Allocate and initialise a transfer descriptor. */
1024 static struct dma_async_tx_descriptor *pxp_prep_slave_sg(struct dma_chan *chan,
1025                                                          struct scatterlist
1026                                                          *sgl,
1027                                                          unsigned int sg_len,
1028                                                          enum dma_data_direction
1029                                                          direction,
1030                                                          unsigned long tx_flags)
1031 {
1032         struct pxp_channel *pxp_chan = to_pxp_channel(chan);
1033         struct pxp_dma *pxp_dma = to_pxp_dma(chan->device);
1034         struct pxps *pxp = to_pxp(pxp_dma);
1035         struct pxp_tx_desc *desc = NULL;
1036         struct pxp_tx_desc *first = NULL, *prev = NULL;
1037         struct scatterlist *sg;
1038         unsigned long flags;
1039         dma_addr_t phys_addr;
1040         int i;
1041
1042         if (direction != DMA_FROM_DEVICE && direction != DMA_TO_DEVICE) {
1043                 dev_err(chan->device->dev, "Invalid DMA direction %d!\n",
1044                         direction);
1045                 return NULL;
1046         }
1047
1048         if (unlikely(sg_len < 2))
1049                 return NULL;
1050
1051         spin_lock_irqsave(&pxp_chan->lock, flags);
1052         for_each_sg(sgl, sg, sg_len, i) {
1053                 desc = pxpdma_desc_get(pxp_chan);
1054                 if (!desc) {
1055                         pxpdma_desc_put(pxp_chan, first);
1056                         dev_err(chan->device->dev, "Can't get DMA desc.\n");
1057                         spin_unlock_irqrestore(&pxp_chan->lock, flags);
1058                         return NULL;
1059                 }
1060
1061                 phys_addr = sg_dma_address(sg);
1062
1063                 if (!first) {
1064                         first = desc;
1065
1066                         desc->layer_param.s0_param.paddr = phys_addr;
1067                 } else {
1068                         list_add_tail(&desc->list, &first->tx_list);
1069                         prev->next = desc;
1070                         desc->next = NULL;
1071
1072                         if (i == 1)
1073                                 desc->layer_param.out_param.paddr = phys_addr;
1074                         else
1075                                 desc->layer_param.ol_param.paddr = phys_addr;
1076                 }
1077
1078                 prev = desc;
1079         }
1080         spin_unlock_irqrestore(&pxp_chan->lock, flags);
1081
1082         pxp->pxp_conf_state.layer_nr = sg_len;
1083         first->txd.flags = tx_flags;
1084         first->len = sg_len;
1085         pr_debug("%s:%d first %p, first->len %d, flags %08x\n",
1086                  __func__, __LINE__, first, first->len, first->txd.flags);
1087
1088         return &first->txd;
1089 }
1090
1091 static void pxp_issue_pending(struct dma_chan *chan)
1092 {
1093         struct pxp_channel *pxp_chan = to_pxp_channel(chan);
1094         struct pxp_dma *pxp_dma = to_pxp_dma(chan->device);
1095         struct pxps *pxp = to_pxp(pxp_dma);
1096         unsigned long flags0, flags;
1097
1098         spin_lock_irqsave(&pxp->lock, flags0);
1099         spin_lock_irqsave(&pxp_chan->lock, flags);
1100
1101         if (!list_empty(&pxp_chan->queue)) {
1102                 pxpdma_dequeue(pxp_chan, &pxp_chan->active_list);
1103                 pxp_chan->status = PXP_CHANNEL_READY;
1104                 list_add_tail(&pxp_chan->list, &head);
1105         } else {
1106                 spin_unlock_irqrestore(&pxp_chan->lock, flags);
1107                 spin_unlock_irqrestore(&pxp->lock, flags0);
1108                 return;
1109         }
1110         spin_unlock_irqrestore(&pxp_chan->lock, flags);
1111         spin_unlock_irqrestore(&pxp->lock, flags0);
1112
1113         pxp_clk_enable(pxp);
1114         if (!wait_event_interruptible_timeout(pxp->done, PXP_WAITCON, 2 * HZ) ||
1115                 signal_pending(current)) {
1116                 pxp_clk_disable(pxp);
1117                 return;
1118         }
1119
1120         spin_lock_irqsave(&pxp->lock, flags);
1121         pxp->pxp_ongoing = 1;
1122         spin_unlock_irqrestore(&pxp->lock, flags);
1123         pxpdma_dostart_work(pxp);
1124 }
1125
1126 static void __pxp_terminate_all(struct dma_chan *chan)
1127 {
1128         struct pxp_channel *pxp_chan = to_pxp_channel(chan);
1129         unsigned long flags;
1130
1131         /* pchan->queue is modified in ISR, have to spinlock */
1132         spin_lock_irqsave(&pxp_chan->lock, flags);
1133         list_splice_init(&pxp_chan->queue, &pxp_chan->free_list);
1134         list_splice_init(&pxp_chan->active_list, &pxp_chan->free_list);
1135
1136         spin_unlock_irqrestore(&pxp_chan->lock, flags);
1137
1138         pxp_chan->status = PXP_CHANNEL_INITIALIZED;
1139 }
1140
1141 static int pxp_control(struct dma_chan *chan, enum dma_ctrl_cmd cmd,
1142                         unsigned long arg)
1143 {
1144         struct pxp_channel *pxp_chan = to_pxp_channel(chan);
1145
1146         /* Only supports DMA_TERMINATE_ALL */
1147         if (cmd != DMA_TERMINATE_ALL)
1148                 return -ENXIO;
1149
1150         mutex_lock(&pxp_chan->chan_mutex);
1151         __pxp_terminate_all(chan);
1152         mutex_unlock(&pxp_chan->chan_mutex);
1153
1154         return 0;
1155 }
1156
1157 static int pxp_alloc_chan_resources(struct dma_chan *chan)
1158 {
1159         struct pxp_channel *pxp_chan = to_pxp_channel(chan);
1160         struct pxp_dma *pxp_dma = to_pxp_dma(chan->device);
1161         int ret;
1162
1163         /* dmaengine.c now guarantees to only offer free channels */
1164         BUG_ON(chan->client_count > 1);
1165         WARN_ON(pxp_chan->status != PXP_CHANNEL_FREE);
1166
1167         chan->cookie = 1;
1168         pxp_chan->completed = -ENXIO;
1169
1170         pr_debug("%s dma_chan.chan_id %d\n", __func__, chan->chan_id);
1171         ret = pxp_init_channel(pxp_dma, pxp_chan);
1172         if (ret < 0)
1173                 goto err_chan;
1174
1175         pxp_chan->status = PXP_CHANNEL_INITIALIZED;
1176
1177         dev_dbg(&chan->dev->device, "Found channel 0x%x, irq %d\n",
1178                 chan->chan_id, pxp_chan->eof_irq);
1179
1180         return ret;
1181
1182 err_chan:
1183         return ret;
1184 }
1185
1186 static void pxp_free_chan_resources(struct dma_chan *chan)
1187 {
1188         struct pxp_channel *pxp_chan = to_pxp_channel(chan);
1189         struct pxp_dma *pxp_dma = to_pxp_dma(chan->device);
1190
1191         mutex_lock(&pxp_chan->chan_mutex);
1192
1193         __pxp_terminate_all(chan);
1194
1195         pxp_chan->status = PXP_CHANNEL_FREE;
1196
1197         pxp_uninit_channel(pxp_dma, pxp_chan);
1198
1199         mutex_unlock(&pxp_chan->chan_mutex);
1200 }
1201
1202 static enum dma_status pxp_tx_status(struct dma_chan *chan,
1203                                      dma_cookie_t cookie,
1204                                      struct dma_tx_state *txstate)
1205 {
1206         struct pxp_channel *pxp_chan = to_pxp_channel(chan);
1207
1208         if (cookie != chan->cookie)
1209                 return DMA_ERROR;
1210
1211         if (txstate) {
1212                 txstate->last = pxp_chan->completed;
1213                 txstate->used = chan->cookie;
1214                 txstate->residue = 0;
1215         }
1216         return DMA_SUCCESS;
1217 }
1218
1219 static int pxp_hw_init(struct pxps *pxp)
1220 {
1221         struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
1222         struct pxp_proc_data *proc_data = &pxp_conf->proc_data;
1223         u32 reg_val;
1224         int i;
1225
1226         /* Pull PxP out of reset */
1227         __raw_writel(0, pxp->base + HW_PXP_CTRL);
1228
1229         /* Config defaults */
1230
1231         /* Initialize non-channel-specific PxP parameters */
1232         proc_data->drect.left = proc_data->srect.left = 0;
1233         proc_data->drect.top = proc_data->srect.top = 0;
1234         proc_data->drect.width = proc_data->srect.width = 0;
1235         proc_data->drect.height = proc_data->srect.height = 0;
1236         proc_data->scaling = 0;
1237         proc_data->hflip = 0;
1238         proc_data->vflip = 0;
1239         proc_data->rotate = 0;
1240         proc_data->bgcolor = 0;
1241
1242         /* Initialize S0 channel parameters */
1243         pxp_conf->s0_param.pixel_fmt = pxp_s0_formats[0];
1244         pxp_conf->s0_param.width = 0;
1245         pxp_conf->s0_param.height = 0;
1246         pxp_conf->s0_param.color_key = -1;
1247         pxp_conf->s0_param.color_key_enable = false;
1248
1249         /* Initialize OL channel parameters */
1250         for (i = 0; i < 8; i++) {
1251                 pxp_conf->ol_param[i].combine_enable = false;
1252                 pxp_conf->ol_param[i].width = 0;
1253                 pxp_conf->ol_param[i].height = 0;
1254                 pxp_conf->ol_param[i].pixel_fmt = PXP_PIX_FMT_RGB565;
1255                 pxp_conf->ol_param[i].color_key_enable = false;
1256                 pxp_conf->ol_param[i].color_key = -1;
1257                 pxp_conf->ol_param[i].global_alpha_enable = false;
1258                 pxp_conf->ol_param[i].global_alpha = 0;
1259                 pxp_conf->ol_param[i].local_alpha_enable = false;
1260         }
1261
1262         /* Initialize Output channel parameters */
1263         pxp_conf->out_param.width = 0;
1264         pxp_conf->out_param.height = 0;
1265         pxp_conf->out_param.pixel_fmt = PXP_PIX_FMT_RGB565;
1266
1267         proc_data->overlay_state = 0;
1268
1269         /* Write default h/w config */
1270         pxp_set_ctrl(pxp);
1271         pxp_set_s0param(pxp);
1272         pxp_set_s0crop(pxp);
1273         for (i = 0; i < 8; i++) {
1274                 pxp_set_oln(i, pxp);
1275                 pxp_set_olparam(i, pxp);
1276                 pxp_set_olcolorkey(i, pxp);
1277         }
1278         pxp_set_s0colorkey(pxp);
1279         pxp_set_csc(pxp);
1280         pxp_set_bg(pxp);
1281         pxp_set_lut(pxp);
1282
1283         /* One-time histogram configuration */
1284         reg_val =
1285             BF_PXP_HIST_CTRL_PANEL_MODE(BV_PXP_HIST_CTRL_PANEL_MODE__GRAY16);
1286         __raw_writel(reg_val, pxp->base + HW_PXP_HIST_CTRL);
1287
1288         reg_val = BF_PXP_HIST2_PARAM_VALUE0(0x00) |
1289             BF_PXP_HIST2_PARAM_VALUE1(0x00F);
1290         __raw_writel(reg_val, pxp->base + HW_PXP_HIST2_PARAM);
1291
1292         reg_val = BF_PXP_HIST4_PARAM_VALUE0(0x00) |
1293             BF_PXP_HIST4_PARAM_VALUE1(0x05) |
1294             BF_PXP_HIST4_PARAM_VALUE2(0x0A) | BF_PXP_HIST4_PARAM_VALUE3(0x0F);
1295         __raw_writel(reg_val, pxp->base + HW_PXP_HIST4_PARAM);
1296
1297         reg_val = BF_PXP_HIST8_PARAM0_VALUE0(0x00) |
1298             BF_PXP_HIST8_PARAM0_VALUE1(0x02) |
1299             BF_PXP_HIST8_PARAM0_VALUE2(0x04) | BF_PXP_HIST8_PARAM0_VALUE3(0x06);
1300         __raw_writel(reg_val, pxp->base + HW_PXP_HIST8_PARAM0);
1301         reg_val = BF_PXP_HIST8_PARAM1_VALUE4(0x09) |
1302             BF_PXP_HIST8_PARAM1_VALUE5(0x0B) |
1303             BF_PXP_HIST8_PARAM1_VALUE6(0x0D) | BF_PXP_HIST8_PARAM1_VALUE7(0x0F);
1304         __raw_writel(reg_val, pxp->base + HW_PXP_HIST8_PARAM1);
1305
1306         reg_val = BF_PXP_HIST16_PARAM0_VALUE0(0x00) |
1307             BF_PXP_HIST16_PARAM0_VALUE1(0x01) |
1308             BF_PXP_HIST16_PARAM0_VALUE2(0x02) |
1309             BF_PXP_HIST16_PARAM0_VALUE3(0x03);
1310         __raw_writel(reg_val, pxp->base + HW_PXP_HIST16_PARAM0);
1311         reg_val = BF_PXP_HIST16_PARAM1_VALUE4(0x04) |
1312             BF_PXP_HIST16_PARAM1_VALUE5(0x05) |
1313             BF_PXP_HIST16_PARAM1_VALUE6(0x06) |
1314             BF_PXP_HIST16_PARAM1_VALUE7(0x07);
1315         __raw_writel(reg_val, pxp->base + HW_PXP_HIST16_PARAM1);
1316         reg_val = BF_PXP_HIST16_PARAM2_VALUE8(0x08) |
1317             BF_PXP_HIST16_PARAM2_VALUE9(0x09) |
1318             BF_PXP_HIST16_PARAM2_VALUE10(0x0A) |
1319             BF_PXP_HIST16_PARAM2_VALUE11(0x0B);
1320         __raw_writel(reg_val, pxp->base + HW_PXP_HIST16_PARAM2);
1321         reg_val = BF_PXP_HIST16_PARAM3_VALUE12(0x0C) |
1322             BF_PXP_HIST16_PARAM3_VALUE13(0x0D) |
1323             BF_PXP_HIST16_PARAM3_VALUE14(0x0E) |
1324             BF_PXP_HIST16_PARAM3_VALUE15(0x0F);
1325         __raw_writel(reg_val, pxp->base + HW_PXP_HIST16_PARAM3);
1326
1327         return 0;
1328 }
1329
1330 static int pxp_dma_init(struct pxps *pxp)
1331 {
1332         struct pxp_dma *pxp_dma = &pxp->pxp_dma;
1333         struct dma_device *dma = &pxp_dma->dma;
1334         int i;
1335
1336         dma_cap_set(DMA_SLAVE, dma->cap_mask);
1337         dma_cap_set(DMA_PRIVATE, dma->cap_mask);
1338
1339         /* Compulsory common fields */
1340         dma->dev = pxp->dev;
1341         dma->device_alloc_chan_resources = pxp_alloc_chan_resources;
1342         dma->device_free_chan_resources = pxp_free_chan_resources;
1343         dma->device_tx_status = pxp_tx_status;
1344         dma->device_issue_pending = pxp_issue_pending;
1345
1346         /* Compulsory for DMA_SLAVE fields */
1347         dma->device_prep_slave_sg = pxp_prep_slave_sg;
1348         dma->device_control = pxp_control;
1349
1350         /* Initialize PxP Channels */
1351         INIT_LIST_HEAD(&dma->channels);
1352         for (i = 0; i < NR_PXP_VIRT_CHANNEL; i++) {
1353                 struct pxp_channel *pxp_chan = pxp->channel + i;
1354                 struct dma_chan *dma_chan = &pxp_chan->dma_chan;
1355
1356                 spin_lock_init(&pxp_chan->lock);
1357                 mutex_init(&pxp_chan->chan_mutex);
1358
1359                 /* Only one EOF IRQ for PxP, shared by all channels */
1360                 pxp_chan->eof_irq = pxp->irq;
1361                 pxp_chan->status = PXP_CHANNEL_FREE;
1362                 pxp_chan->completed = -ENXIO;
1363                 snprintf(pxp_chan->eof_name, sizeof(pxp_chan->eof_name),
1364                          "PXP EOF %d", i);
1365
1366                 dma_chan->device = &pxp_dma->dma;
1367                 dma_chan->cookie = 1;
1368                 dma_chan->chan_id = i;
1369                 list_add_tail(&dma_chan->device_node, &dma->channels);
1370         }
1371
1372         return dma_async_device_register(&pxp_dma->dma);
1373 }
1374
1375 static ssize_t clk_off_timeout_show(struct device *dev,
1376                                     struct device_attribute *attr, char *buf)
1377 {
1378         return sprintf(buf, "%d\n", timeout_in_ms);
1379 }
1380
1381 static ssize_t clk_off_timeout_store(struct device *dev,
1382                                      struct device_attribute *attr,
1383                                      const char *buf, size_t count)
1384 {
1385         int val;
1386         if (sscanf(buf, "%d", &val) > 0) {
1387                 timeout_in_ms = val;
1388                 return count;
1389         }
1390         return -EINVAL;
1391 }
1392
1393 static DEVICE_ATTR(clk_off_timeout, 0644, clk_off_timeout_show,
1394                    clk_off_timeout_store);
1395
1396 static int pxp_probe(struct platform_device *pdev)
1397 {
1398         struct pxps *pxp;
1399         struct resource *res;
1400         int irq;
1401         int err = 0;
1402
1403         res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1404         irq = platform_get_irq(pdev, 0);
1405         if (!res || irq < 0) {
1406                 err = -ENODEV;
1407                 goto exit;
1408         }
1409
1410         pxp = kzalloc(sizeof(*pxp), GFP_KERNEL);
1411         if (!pxp) {
1412                 dev_err(&pdev->dev, "failed to allocate control object\n");
1413                 err = -ENOMEM;
1414                 goto exit;
1415         }
1416
1417         pxp->dev = &pdev->dev;
1418
1419         platform_set_drvdata(pdev, pxp);
1420         pxp->irq = irq;
1421
1422         pxp->pxp_ongoing = 0;
1423         pxp->lut_state = 0;
1424
1425         spin_lock_init(&pxp->lock);
1426         mutex_init(&pxp->clk_mutex);
1427
1428         if (!request_mem_region(res->start, resource_size(res), "pxp-mem")) {
1429                 err = -EBUSY;
1430                 goto freepxp;
1431         }
1432
1433         pxp->base = ioremap(res->start, SZ_4K);
1434         pxp->pdev = pdev;
1435
1436         pxp->clk = clk_get(NULL, "pxp_axi");
1437         clk_enable(pxp->clk);
1438
1439         err = pxp_hw_init(pxp);
1440         if (err) {
1441                 dev_err(&pdev->dev, "failed to initialize hardware\n");
1442                 goto release;
1443         }
1444         clk_disable(pxp->clk);
1445
1446         err = request_irq(pxp->irq, pxp_irq, 0, "pxp-irq", pxp);
1447         if (err)
1448                 goto release;
1449         /* Initialize DMA engine */
1450         err = pxp_dma_init(pxp);
1451         if (err < 0)
1452                 goto err_dma_init;
1453
1454         if (device_create_file(&pdev->dev, &dev_attr_clk_off_timeout)) {
1455                 dev_err(&pdev->dev,
1456                         "Unable to create file from clk_off_timeout\n");
1457                 goto err_dma_init;
1458         }
1459
1460         INIT_WORK(&pxp->work, clkoff_callback);
1461         init_waitqueue_head(&pxp->done);
1462         init_timer(&pxp->clk_timer);
1463         pxp->clk_timer.function = pxp_clkoff_timer;
1464         pxp->clk_timer.data = (unsigned long)pxp;
1465 exit:
1466         return err;
1467 err_dma_init:
1468         free_irq(pxp->irq, pxp);
1469 release:
1470         release_mem_region(res->start, resource_size(res));
1471 freepxp:
1472         kfree(pxp);
1473         dev_err(&pdev->dev, "Exiting (unsuccessfully) pxp_probe function\n");
1474         return err;
1475 }
1476
1477 static int __devexit pxp_remove(struct platform_device *pdev)
1478 {
1479         struct pxps *pxp = platform_get_drvdata(pdev);
1480
1481         cancel_work_sync(&pxp->work);
1482         del_timer_sync(&pxp->clk_timer);
1483         free_irq(pxp->irq, pxp);
1484         clk_disable(pxp->clk);
1485         clk_put(pxp->clk);
1486         iounmap(pxp->base);
1487         device_remove_file(&pdev->dev, &dev_attr_clk_off_timeout);
1488
1489         kfree(pxp);
1490
1491         return 0;
1492 }
1493
1494 #ifdef CONFIG_PM
1495 static int pxp_suspend(struct platform_device *pdev, pm_message_t state)
1496 {
1497         struct pxps *pxp = platform_get_drvdata(pdev);
1498
1499         pxp_clk_enable(pxp);
1500         while (__raw_readl(pxp->base + HW_PXP_CTRL) & BM_PXP_CTRL_ENABLE)
1501                 ;
1502
1503         __raw_writel(BM_PXP_CTRL_SFTRST, pxp->base + HW_PXP_CTRL);
1504         pxp_clk_disable(pxp);
1505
1506         return 0;
1507 }
1508
1509 static int pxp_resume(struct platform_device *pdev)
1510 {
1511         struct pxps *pxp = platform_get_drvdata(pdev);
1512
1513         pxp_clk_enable(pxp);
1514         /* Pull PxP out of reset */
1515         __raw_writel(0, pxp->base + HW_PXP_CTRL);
1516         pxp_clk_disable(pxp);
1517
1518         return 0;
1519 }
1520 #else
1521 #define pxp_suspend     NULL
1522 #define pxp_resume      NULL
1523 #endif
1524
1525 static struct platform_driver pxp_driver = {
1526         .driver = {
1527                    .name = "mxc-pxp",
1528                    },
1529         .probe = pxp_probe,
1530         .remove = __exit_p(pxp_remove),
1531         .suspend = pxp_suspend,
1532         .resume = pxp_resume,
1533 };
1534
1535 static int __init pxp_init(void)
1536 {
1537         return platform_driver_register(&pxp_driver);
1538 }
1539
1540 subsys_initcall(pxp_init);
1541
1542 static void __exit pxp_exit(void)
1543 {
1544         platform_driver_unregister(&pxp_driver);
1545 }
1546
1547 module_exit(pxp_exit);
1548
1549 MODULE_DESCRIPTION("i.MX PxP driver");
1550 MODULE_AUTHOR("Freescale Semiconductor, Inc.");
1551 MODULE_LICENSE("GPL");