2 * Copyright (C) 2010-2011 Freescale Semiconductor, Inc.
4 * This program is free software; you can redistribute it and/or modify
5 * it under the terms of the GNU General Public License as published by
6 * the Free Software Foundation; either version 2 of the License, or
7 * (at your option) any later version.
9 * This program is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 * GNU General Public License for more details.
14 * You should have received a copy of the GNU General Public License
15 * along with this program; if not, write to the Free Software
16 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 * Based on STMP378X PxP driver
21 * Copyright 2008-2009 Embedded Alley Solutions, Inc All Rights Reserved.
24 #include <linux/dma-mapping.h>
25 #include <linux/init.h>
26 #include <linux/interrupt.h>
28 #include <linux/kernel.h>
29 #include <linux/module.h>
30 #include <linux/mutex.h>
31 #include <linux/platform_device.h>
32 #include <linux/slab.h>
33 #include <linux/vmalloc.h>
34 #include <linux/dmaengine.h>
35 #include <linux/pxp_dma.h>
36 #include <linux/timer.h>
37 #include <linux/clk.h>
38 #include <linux/workqueue.h>
39 #include <linux/sched.h>
43 #define PXP_DOWNSCALE_THRESHOLD 0x4000
45 static LIST_HEAD(head);
46 static int timeout_in_ms = 600;
49 struct dma_device dma;
53 struct platform_device *pdev;
56 int irq; /* PXP IRQ to the CPU */
59 struct mutex clk_mutex;
61 #define CLK_STAT_OFF 0
67 struct pxp_dma pxp_dma;
68 struct pxp_channel channel[NR_PXP_VIRT_CHANNEL];
69 wait_queue_head_t done;
70 struct work_struct work;
72 /* describes most recent processing configuration */
73 struct pxp_config_data pxp_conf_state;
75 /* to turn clock off when pxp is inactive */
76 struct timer_list clk_timer;
79 #define to_pxp_dma(d) container_of(d, struct pxp_dma, dma)
80 #define to_tx_desc(tx) container_of(tx, struct pxp_tx_desc, txd)
81 #define to_pxp_channel(d) container_of(d, struct pxp_channel, dma_chan)
82 #define to_pxp(id) container_of(id, struct pxps, pxp_dma)
84 #define PXP_DEF_BUFS 2
87 #define PXP_WAITCON ((__raw_readl(pxp->base + HW_PXP_STAT) & \
88 BM_PXP_STAT_IRQ) != BM_PXP_STAT_IRQ)
90 static uint32_t pxp_s0_formats[] = {
99 * PXP common functions
101 static void dump_pxp_reg(struct pxps *pxp)
103 dev_dbg(pxp->dev, "PXP_CTRL 0x%x",
104 __raw_readl(pxp->base + HW_PXP_CTRL));
105 dev_dbg(pxp->dev, "PXP_STAT 0x%x",
106 __raw_readl(pxp->base + HW_PXP_STAT));
107 dev_dbg(pxp->dev, "PXP_OUTBUF 0x%x",
108 __raw_readl(pxp->base + HW_PXP_OUTBUF));
109 dev_dbg(pxp->dev, "PXP_OUTBUF2 0x%x",
110 __raw_readl(pxp->base + HW_PXP_OUTBUF2));
111 dev_dbg(pxp->dev, "PXP_OUTSIZE 0x%x",
112 __raw_readl(pxp->base + HW_PXP_OUTSIZE));
113 dev_dbg(pxp->dev, "PXP_S0BUF 0x%x",
114 __raw_readl(pxp->base + HW_PXP_S0BUF));
115 dev_dbg(pxp->dev, "PXP_S0UBUF 0x%x",
116 __raw_readl(pxp->base + HW_PXP_S0UBUF));
117 dev_dbg(pxp->dev, "PXP_S0VBUF 0x%x",
118 __raw_readl(pxp->base + HW_PXP_S0VBUF));
119 dev_dbg(pxp->dev, "PXP_S0PARAM 0x%x",
120 __raw_readl(pxp->base + HW_PXP_S0PARAM));
121 dev_dbg(pxp->dev, "PXP_S0BACKGROUND 0x%x",
122 __raw_readl(pxp->base + HW_PXP_S0BACKGROUND));
123 dev_dbg(pxp->dev, "PXP_S0CROP 0x%x",
124 __raw_readl(pxp->base + HW_PXP_S0CROP));
125 dev_dbg(pxp->dev, "PXP_S0SCALE 0x%x",
126 __raw_readl(pxp->base + HW_PXP_S0SCALE));
127 dev_dbg(pxp->dev, "PXP_OLn 0x%x",
128 __raw_readl(pxp->base + HW_PXP_OLn(0)));
129 dev_dbg(pxp->dev, "PXP_OLnSIZE 0x%x",
130 __raw_readl(pxp->base + HW_PXP_OLnSIZE(0)));
131 dev_dbg(pxp->dev, "PXP_OLnPARAM 0x%x",
132 __raw_readl(pxp->base + HW_PXP_OLnPARAM(0)));
133 dev_dbg(pxp->dev, "PXP_CSCCOEF0 0x%x",
134 __raw_readl(pxp->base + HW_PXP_CSCCOEF0));
135 dev_dbg(pxp->dev, "PXP_CSCCOEF1 0x%x",
136 __raw_readl(pxp->base + HW_PXP_CSCCOEF1));
137 dev_dbg(pxp->dev, "PXP_CSCCOEF2 0x%x",
138 __raw_readl(pxp->base + HW_PXP_CSCCOEF2));
139 dev_dbg(pxp->dev, "PXP_CSC2CTRL 0x%x",
140 __raw_readl(pxp->base + HW_PXP_CSC2CTRL));
141 dev_dbg(pxp->dev, "PXP_CSC2COEF0 0x%x",
142 __raw_readl(pxp->base + HW_PXP_CSC2COEF0));
143 dev_dbg(pxp->dev, "PXP_CSC2COEF1 0x%x",
144 __raw_readl(pxp->base + HW_PXP_CSC2COEF1));
145 dev_dbg(pxp->dev, "PXP_CSC2COEF2 0x%x",
146 __raw_readl(pxp->base + HW_PXP_CSC2COEF2));
147 dev_dbg(pxp->dev, "PXP_CSC2COEF3 0x%x",
148 __raw_readl(pxp->base + HW_PXP_CSC2COEF3));
149 dev_dbg(pxp->dev, "PXP_CSC2COEF4 0x%x",
150 __raw_readl(pxp->base + HW_PXP_CSC2COEF4));
151 dev_dbg(pxp->dev, "PXP_CSC2COEF5 0x%x",
152 __raw_readl(pxp->base + HW_PXP_CSC2COEF5));
153 dev_dbg(pxp->dev, "PXP_LUT_CTRL 0x%x",
154 __raw_readl(pxp->base + HW_PXP_LUT_CTRL));
155 dev_dbg(pxp->dev, "PXP_LUT 0x%x", __raw_readl(pxp->base + HW_PXP_LUT));
156 dev_dbg(pxp->dev, "PXP_HIST_CTRL 0x%x",
157 __raw_readl(pxp->base + HW_PXP_HIST_CTRL));
158 dev_dbg(pxp->dev, "PXP_HIST2_PARAM 0x%x",
159 __raw_readl(pxp->base + HW_PXP_HIST2_PARAM));
160 dev_dbg(pxp->dev, "PXP_HIST4_PARAM 0x%x",
161 __raw_readl(pxp->base + HW_PXP_HIST4_PARAM));
162 dev_dbg(pxp->dev, "PXP_HIST8_PARAM0 0x%x",
163 __raw_readl(pxp->base + HW_PXP_HIST8_PARAM0));
164 dev_dbg(pxp->dev, "PXP_HIST8_PARAM1 0x%x",
165 __raw_readl(pxp->base + HW_PXP_HIST8_PARAM1));
166 dev_dbg(pxp->dev, "PXP_HIST16_PARAM0 0x%x",
167 __raw_readl(pxp->base + HW_PXP_HIST16_PARAM0));
168 dev_dbg(pxp->dev, "PXP_HIST16_PARAM1 0x%x",
169 __raw_readl(pxp->base + HW_PXP_HIST16_PARAM1));
170 dev_dbg(pxp->dev, "PXP_HIST16_PARAM2 0x%x",
171 __raw_readl(pxp->base + HW_PXP_HIST16_PARAM2));
172 dev_dbg(pxp->dev, "PXP_HIST16_PARAM3 0x%x",
173 __raw_readl(pxp->base + HW_PXP_HIST16_PARAM3));
176 static bool is_yuv(u32 pix_fmt)
178 if ((pix_fmt == PXP_PIX_FMT_YUYV) |
179 (pix_fmt == PXP_PIX_FMT_UYVY) |
180 (pix_fmt == PXP_PIX_FMT_Y41P) |
181 (pix_fmt == PXP_PIX_FMT_YUV444) |
182 (pix_fmt == PXP_PIX_FMT_NV12) |
183 (pix_fmt == PXP_PIX_FMT_GREY) |
184 (pix_fmt == PXP_PIX_FMT_YVU410P) |
185 (pix_fmt == PXP_PIX_FMT_YUV410P) |
186 (pix_fmt == PXP_PIX_FMT_YVU420P) |
187 (pix_fmt == PXP_PIX_FMT_YUV420P) |
188 (pix_fmt == PXP_PIX_FMT_YUV420P2) |
189 (pix_fmt == PXP_PIX_FMT_YVU422P) |
190 (pix_fmt == PXP_PIX_FMT_YUV422P)) {
197 static void pxp_set_ctrl(struct pxps *pxp)
199 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
200 struct pxp_proc_data *proc_data = &pxp_conf->proc_data;
204 /* Configure S0 input format */
205 switch (pxp_conf->s0_param.pixel_fmt) {
206 case PXP_PIX_FMT_RGB24:
207 fmt_ctrl = BV_PXP_CTRL_S0_FORMAT__RGB888;
209 case PXP_PIX_FMT_RGB565:
210 fmt_ctrl = BV_PXP_CTRL_S0_FORMAT__RGB565;
212 case PXP_PIX_FMT_RGB555:
213 fmt_ctrl = BV_PXP_CTRL_S0_FORMAT__RGB555;
215 case PXP_PIX_FMT_YUV420P:
216 case PXP_PIX_FMT_GREY:
217 fmt_ctrl = BV_PXP_CTRL_S0_FORMAT__YUV420;
219 case PXP_PIX_FMT_YUV422P:
220 fmt_ctrl = BV_PXP_CTRL_S0_FORMAT__YUV422;
225 ctrl = BF_PXP_CTRL_S0_FORMAT(fmt_ctrl);
227 /* Configure output format based on out_channel format */
228 switch (pxp_conf->out_param.pixel_fmt) {
229 case PXP_PIX_FMT_RGB24:
230 fmt_ctrl = BV_PXP_CTRL_OUTBUF_FORMAT__RGB888;
232 case PXP_PIX_FMT_RGB565:
233 fmt_ctrl = BV_PXP_CTRL_OUTBUF_FORMAT__RGB565;
235 case PXP_PIX_FMT_RGB555:
236 fmt_ctrl = BV_PXP_CTRL_OUTBUF_FORMAT__RGB555;
238 case PXP_PIX_FMT_YUV420P:
239 fmt_ctrl = BV_PXP_CTRL_OUTBUF_FORMAT__YUV2P420;
241 case PXP_PIX_FMT_YUV422P:
242 fmt_ctrl = BV_PXP_CTRL_OUTBUF_FORMAT__YUV2P422;
244 case PXP_PIX_FMT_GREY:
245 fmt_ctrl = BV_PXP_CTRL_OUTBUF_FORMAT__MONOC8;
250 ctrl |= BF_PXP_CTRL_OUTBUF_FORMAT(fmt_ctrl);
252 ctrl |= BM_PXP_CTRL_CROP;
254 if (proc_data->scaling)
255 ctrl |= BM_PXP_CTRL_SCALE;
256 if (proc_data->vflip)
257 ctrl |= BM_PXP_CTRL_VFLIP;
258 if (proc_data->hflip)
259 ctrl |= BM_PXP_CTRL_HFLIP;
260 if (proc_data->rotate)
261 ctrl |= BF_PXP_CTRL_ROTATE(proc_data->rotate / 90);
263 __raw_writel(ctrl, pxp->base + HW_PXP_CTRL);
266 static int pxp_start(struct pxps *pxp)
268 __raw_writel(BM_PXP_CTRL_IRQ_ENABLE, pxp->base + HW_PXP_CTRL_SET);
269 __raw_writel(BM_PXP_CTRL_ENABLE, pxp->base + HW_PXP_CTRL_SET);
274 static void pxp_set_outbuf(struct pxps *pxp)
276 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
277 struct pxp_layer_param *out_params = &pxp_conf->out_param;
279 __raw_writel(out_params->paddr, pxp->base + HW_PXP_OUTBUF);
281 __raw_writel(BF_PXP_OUTSIZE_WIDTH(out_params->width) |
282 BF_PXP_OUTSIZE_HEIGHT(out_params->height),
283 pxp->base + HW_PXP_OUTSIZE);
286 static void pxp_set_s0colorkey(struct pxps *pxp)
288 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
289 struct pxp_layer_param *s0_params = &pxp_conf->s0_param;
291 /* Low and high are set equal. V4L does not allow a chromakey range */
292 if (s0_params->color_key == -1) {
293 /* disable color key */
294 __raw_writel(0xFFFFFF, pxp->base + HW_PXP_S0COLORKEYLOW);
295 __raw_writel(0, pxp->base + HW_PXP_S0COLORKEYHIGH);
297 __raw_writel(s0_params->color_key,
298 pxp->base + HW_PXP_S0COLORKEYLOW);
299 __raw_writel(s0_params->color_key,
300 pxp->base + HW_PXP_S0COLORKEYHIGH);
304 static void pxp_set_olcolorkey(int layer_no, struct pxps *pxp)
306 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
307 struct pxp_layer_param *ol_params = &pxp_conf->ol_param[layer_no];
309 /* Low and high are set equal. V4L does not allow a chromakey range */
310 if (ol_params->color_key_enable != 0 && ol_params->color_key != -1) {
311 __raw_writel(ol_params->color_key,
312 pxp->base + HW_PXP_OLCOLORKEYLOW);
313 __raw_writel(ol_params->color_key,
314 pxp->base + HW_PXP_OLCOLORKEYHIGH);
316 /* disable color key */
317 __raw_writel(0xFFFFFF, pxp->base + HW_PXP_OLCOLORKEYLOW);
318 __raw_writel(0, pxp->base + HW_PXP_OLCOLORKEYHIGH);
322 static void pxp_set_oln(int layer_no, struct pxps *pxp)
324 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
325 struct pxp_layer_param *olparams_data = &pxp_conf->ol_param[layer_no];
326 dma_addr_t phys_addr = olparams_data->paddr;
327 __raw_writel(phys_addr, pxp->base + HW_PXP_OLn(layer_no));
330 __raw_writel(BF_PXP_OLnSIZE_WIDTH(olparams_data->width >> 3) |
331 BF_PXP_OLnSIZE_HEIGHT(olparams_data->height >> 3),
332 pxp->base + HW_PXP_OLnSIZE(layer_no));
335 static void pxp_set_olparam(int layer_no, struct pxps *pxp)
337 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
338 struct pxp_layer_param *olparams_data = &pxp_conf->ol_param[layer_no];
341 olparam = BF_PXP_OLnPARAM_ALPHA(olparams_data->global_alpha);
342 if (olparams_data->pixel_fmt == PXP_PIX_FMT_RGB24)
344 BF_PXP_OLnPARAM_FORMAT(BV_PXP_OLnPARAM_FORMAT__RGB888);
347 BF_PXP_OLnPARAM_FORMAT(BV_PXP_OLnPARAM_FORMAT__RGB565);
348 if (olparams_data->global_alpha_enable)
350 BF_PXP_OLnPARAM_ALPHA_CNTL
351 (BV_PXP_OLnPARAM_ALPHA_CNTL__Override);
352 if (olparams_data->color_key_enable)
353 olparam |= BM_PXP_OLnPARAM_ENABLE_COLORKEY;
354 if (olparams_data->combine_enable)
355 olparam |= BM_PXP_OLnPARAM_ENABLE;
356 __raw_writel(olparam, pxp->base + HW_PXP_OLnPARAM(layer_no));
359 static void pxp_set_s0param(struct pxps *pxp)
361 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
362 struct pxp_layer_param *s0params_data = &pxp_conf->s0_param;
363 struct pxp_proc_data *proc_data = &pxp_conf->proc_data;
366 s0param = BF_PXP_S0PARAM_XBASE(proc_data->drect.left >> 3);
367 s0param |= BF_PXP_S0PARAM_YBASE(proc_data->drect.top >> 3);
368 s0param |= BF_PXP_S0PARAM_WIDTH(s0params_data->width >> 3);
369 s0param |= BF_PXP_S0PARAM_HEIGHT(s0params_data->height >> 3);
370 __raw_writel(s0param, pxp->base + HW_PXP_S0PARAM);
373 static void pxp_set_s0crop(struct pxps *pxp)
376 struct pxp_proc_data *proc_data = &pxp->pxp_conf_state.proc_data;
378 s0crop = BF_PXP_S0CROP_XBASE(proc_data->srect.left >> 3);
379 s0crop |= BF_PXP_S0CROP_YBASE(proc_data->srect.top >> 3);
380 s0crop |= BF_PXP_S0CROP_WIDTH(proc_data->drect.width >> 3);
381 s0crop |= BF_PXP_S0CROP_HEIGHT(proc_data->drect.height >> 3);
382 __raw_writel(s0crop, pxp->base + HW_PXP_S0CROP);
385 static int pxp_set_scaling(struct pxps *pxp)
388 u32 xscale, yscale, s0scale;
389 struct pxp_proc_data *proc_data = &pxp->pxp_conf_state.proc_data;
390 struct pxp_layer_param *s0params_data = &pxp->pxp_conf_state.s0_param;
392 if ((s0params_data->pixel_fmt != PXP_PIX_FMT_YUV420P) &&
393 (s0params_data->pixel_fmt != PXP_PIX_FMT_YUV422P)) {
394 proc_data->scaling = 0;
399 if ((proc_data->srect.width == proc_data->drect.width) &&
400 (proc_data->srect.height == proc_data->drect.height)) {
401 proc_data->scaling = 0;
402 __raw_writel(0x10001000, pxp->base + HW_PXP_S0SCALE);
406 proc_data->scaling = 1;
407 xscale = proc_data->srect.width * 0x1000 / proc_data->drect.width;
408 yscale = proc_data->srect.height * 0x1000 / proc_data->drect.height;
409 if (xscale > PXP_DOWNSCALE_THRESHOLD)
410 xscale = PXP_DOWNSCALE_THRESHOLD;
411 if (yscale > PXP_DOWNSCALE_THRESHOLD)
412 yscale = PXP_DOWNSCALE_THRESHOLD;
413 s0scale = BF_PXP_S0SCALE_YSCALE(yscale) | BF_PXP_S0SCALE_XSCALE(xscale);
414 __raw_writel(s0scale, pxp->base + HW_PXP_S0SCALE);
422 static void pxp_set_bg(struct pxps *pxp)
424 __raw_writel(pxp->pxp_conf_state.proc_data.bgcolor,
425 pxp->base + HW_PXP_S0BACKGROUND);
428 static void pxp_set_lut(struct pxps *pxp)
430 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
431 int lut_op = pxp_conf->proc_data.lut_transform;
434 bool use_cmap = (lut_op & PXP_LUT_USE_CMAP) ? true : false;
435 u8 *cmap = pxp_conf->proc_data.lut_map;
439 * If LUT already configured as needed, return...
440 * Unless CMAP is needed and it has been updated.
442 if ((pxp->lut_state == lut_op) &&
443 !(use_cmap && pxp_conf->proc_data.lut_map_updated))
446 if (lut_op == PXP_LUT_NONE) {
447 __raw_writel(BM_PXP_LUT_CTRL_BYPASS,
448 pxp->base + HW_PXP_LUT_CTRL);
449 } else if (((lut_op & PXP_LUT_INVERT) != 0)
450 && ((lut_op & PXP_LUT_BLACK_WHITE) != 0)) {
451 /* Fill out LUT table with inverted monochromized values */
453 /* Initialize LUT address to 0 and clear bypass bit */
454 __raw_writel(0, pxp->base + HW_PXP_LUT_CTRL);
456 /* LUT address pointer auto-increments after each data write */
457 for (i = 0; i < 256; i++) {
459 __raw_readl(pxp->base +
460 HW_PXP_LUT_CTRL) & BM_PXP_LUT_CTRL_ADDR;
461 entry_src = use_cmap ? cmap[i] : reg_val;
462 reg_val = (entry_src < 0x80) ? 0x00 : 0xFF;
463 reg_val = ~reg_val & BM_PXP_LUT_DATA;
464 __raw_writel(reg_val, pxp->base + HW_PXP_LUT);
466 } else if ((lut_op & PXP_LUT_INVERT) != 0) {
467 /* Fill out LUT table with 8-bit inverted values */
469 /* Initialize LUT address to 0 and clear bypass bit */
470 __raw_writel(0, pxp->base + HW_PXP_LUT_CTRL);
472 /* LUT address pointer auto-increments after each data write */
473 for (i = 0; i < 256; i++) {
475 __raw_readl(pxp->base +
476 HW_PXP_LUT_CTRL) & BM_PXP_LUT_CTRL_ADDR;
477 entry_src = use_cmap ? cmap[i] : reg_val;
478 reg_val = ~entry_src & BM_PXP_LUT_DATA;
479 __raw_writel(reg_val, pxp->base + HW_PXP_LUT);
481 } else if ((lut_op & PXP_LUT_BLACK_WHITE) != 0) {
482 /* Fill out LUT table with 8-bit monochromized values */
484 /* Initialize LUT address to 0 and clear bypass bit */
485 __raw_writel(0, pxp->base + HW_PXP_LUT_CTRL);
487 /* LUT address pointer auto-increments after each data write */
488 for (i = 0; i < 256; i++) {
490 __raw_readl(pxp->base +
491 HW_PXP_LUT_CTRL) & BM_PXP_LUT_CTRL_ADDR;
492 entry_src = use_cmap ? cmap[i] : reg_val;
493 reg_val = (entry_src < 0x80) ? 0x00 : 0xFF;
494 reg_val = reg_val & BM_PXP_LUT_DATA;
495 __raw_writel(reg_val, pxp->base + HW_PXP_LUT);
497 } else if (use_cmap) {
498 /* Fill out LUT table using colormap values */
500 /* Initialize LUT address to 0 and clear bypass bit */
501 __raw_writel(0, pxp->base + HW_PXP_LUT_CTRL);
503 /* LUT address pointer auto-increments after each data write */
504 for (i = 0; i < 256; i++) {
505 reg_val = cmap[i] & BM_PXP_LUT_DATA;
506 __raw_writel(reg_val, pxp->base + HW_PXP_LUT);
510 pxp->lut_state = lut_op;
513 static void pxp_set_csc(struct pxps *pxp)
515 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
516 struct pxp_layer_param *s0_params = &pxp_conf->s0_param;
517 struct pxp_layer_param *ol_params = &pxp_conf->ol_param[0];
518 struct pxp_layer_param *out_params = &pxp_conf->out_param;
520 bool input_is_YUV = is_yuv(s0_params->pixel_fmt);
521 bool output_is_YUV = is_yuv(out_params->pixel_fmt);
523 if (input_is_YUV && output_is_YUV) {
525 * Input = YUV, Output = YUV
526 * No CSC unless we need to do combining
528 if (ol_params->combine_enable) {
529 /* Must convert to RGB for combining with RGB overlay */
531 /* CSC1 - YUV->RGB */
532 __raw_writel(0x04030000, pxp->base + HW_PXP_CSCCOEF0);
533 __raw_writel(0x01230208, pxp->base + HW_PXP_CSCCOEF1);
534 __raw_writel(0x076b079c, pxp->base + HW_PXP_CSCCOEF2);
536 /* CSC2 - RGB->YUV */
537 __raw_writel(0x4, pxp->base + HW_PXP_CSC2CTRL);
538 __raw_writel(0x0096004D, pxp->base + HW_PXP_CSC2COEF0);
539 __raw_writel(0x05DA001D, pxp->base + HW_PXP_CSC2COEF1);
540 __raw_writel(0x007005B6, pxp->base + HW_PXP_CSC2COEF2);
541 __raw_writel(0x057C009E, pxp->base + HW_PXP_CSC2COEF3);
542 __raw_writel(0x000005E6, pxp->base + HW_PXP_CSC2COEF4);
543 __raw_writel(0x00000000, pxp->base + HW_PXP_CSC2COEF5);
545 /* Input & Output both YUV, so bypass both CSCs */
548 __raw_writel(0x40000000, pxp->base + HW_PXP_CSCCOEF0);
551 __raw_writel(0x1, pxp->base + HW_PXP_CSC2CTRL);
553 } else if (input_is_YUV && !output_is_YUV) {
555 * Input = YUV, Output = RGB
556 * Use CSC1 to convert to RGB
559 /* CSC1 - YUV->RGB */
560 __raw_writel(0x84ab01f0, pxp->base + HW_PXP_CSCCOEF0);
561 __raw_writel(0x01230204, pxp->base + HW_PXP_CSCCOEF1);
562 __raw_writel(0x0730079c, pxp->base + HW_PXP_CSCCOEF2);
565 __raw_writel(0x1, pxp->base + HW_PXP_CSC2CTRL);
566 } else if (!input_is_YUV && output_is_YUV) {
568 * Input = RGB, Output = YUV
569 * Use CSC2 to convert to YUV
573 __raw_writel(0x40000000, pxp->base + HW_PXP_CSCCOEF0);
575 /* CSC2 - RGB->YUV */
576 __raw_writel(0x4, pxp->base + HW_PXP_CSC2CTRL);
577 __raw_writel(0x0096004D, pxp->base + HW_PXP_CSC2COEF0);
578 __raw_writel(0x05DA001D, pxp->base + HW_PXP_CSC2COEF1);
579 __raw_writel(0x007005B6, pxp->base + HW_PXP_CSC2COEF2);
580 __raw_writel(0x057C009E, pxp->base + HW_PXP_CSC2COEF3);
581 __raw_writel(0x000005E6, pxp->base + HW_PXP_CSC2COEF4);
582 __raw_writel(0x00000000, pxp->base + HW_PXP_CSC2COEF5);
585 * Input = RGB, Output = RGB
586 * Input & Output both RGB, so bypass both CSCs
590 __raw_writel(0x40000000, pxp->base + HW_PXP_CSCCOEF0);
593 __raw_writel(0x1, pxp->base + HW_PXP_CSC2CTRL);
596 /* YCrCb colorspace */
597 /* Not sure when we use this...no YCrCb formats are defined for PxP */
599 __raw_writel(0x84ab01f0, HW_PXP_CSCCOEFF0_ADDR);
600 __raw_writel(0x01230204, HW_PXP_CSCCOEFF1_ADDR);
601 __raw_writel(0x0730079c, HW_PXP_CSCCOEFF2_ADDR);
606 static void pxp_set_s0buf(struct pxps *pxp)
608 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
609 struct pxp_layer_param *s0_params = &pxp_conf->s0_param;
612 Y = s0_params->paddr;
613 __raw_writel(Y, pxp->base + HW_PXP_S0BUF);
614 if ((s0_params->pixel_fmt == PXP_PIX_FMT_YUV420P) ||
615 (s0_params->pixel_fmt == PXP_PIX_FMT_YVU420P) ||
616 (s0_params->pixel_fmt == PXP_PIX_FMT_GREY)) {
617 /* Set to 1 if YUV format is 4:2:2 rather than 4:2:0 */
619 U = Y + (s0_params->width * s0_params->height);
620 V = U + ((s0_params->width * s0_params->height) >> s);
621 __raw_writel(U, pxp->base + HW_PXP_S0UBUF);
622 __raw_writel(V, pxp->base + HW_PXP_S0VBUF);
627 * pxp_config() - configure PxP for a processing task
628 * @pxps: PXP context.
629 * @pxp_chan: PXP channel.
630 * @return: 0 on success or negative error code on failure.
632 static int pxp_config(struct pxps *pxp, struct pxp_channel *pxp_chan)
634 struct pxp_config_data *pxp_conf_data = &pxp->pxp_conf_state;
638 /* Configure PxP regs */
640 pxp_set_s0param(pxp);
642 pxp_set_scaling(pxp);
643 ol_nr = pxp_conf_data->layer_nr - 2;
645 i = pxp_conf_data->layer_nr - 2 - ol_nr;
647 pxp_set_olparam(i, pxp);
648 /* only the color key in higher overlay will take effect. */
649 pxp_set_olcolorkey(i, pxp);
652 pxp_set_s0colorkey(pxp);
663 static void pxp_clk_enable(struct pxps *pxp)
665 mutex_lock(&pxp->clk_mutex);
667 if (pxp->clk_stat == CLK_STAT_ON) {
668 mutex_unlock(&pxp->clk_mutex);
672 clk_enable(pxp->clk);
673 pxp->clk_stat = CLK_STAT_ON;
675 mutex_unlock(&pxp->clk_mutex);
678 static void pxp_clk_disable(struct pxps *pxp)
682 mutex_lock(&pxp->clk_mutex);
684 if (pxp->clk_stat == CLK_STAT_OFF) {
685 mutex_unlock(&pxp->clk_mutex);
689 spin_lock_irqsave(&pxp->lock, flags);
690 if ((pxp->pxp_ongoing == 0) && list_empty(&head)) {
691 spin_unlock_irqrestore(&pxp->lock, flags);
692 clk_disable(pxp->clk);
693 pxp->clk_stat = CLK_STAT_OFF;
695 spin_unlock_irqrestore(&pxp->lock, flags);
697 mutex_unlock(&pxp->clk_mutex);
700 static inline void clkoff_callback(struct work_struct *w)
702 struct pxps *pxp = container_of(w, struct pxps, work);
704 pxp_clk_disable(pxp);
707 static void pxp_clkoff_timer(unsigned long arg)
709 struct pxps *pxp = (struct pxps *)arg;
711 if ((pxp->pxp_ongoing == 0) && list_empty(&head))
712 schedule_work(&pxp->work);
714 mod_timer(&pxp->clk_timer,
715 jiffies + msecs_to_jiffies(timeout_in_ms));
718 static struct pxp_tx_desc *pxpdma_first_active(struct pxp_channel *pxp_chan)
720 return list_entry(pxp_chan->active_list.next, struct pxp_tx_desc, list);
723 static struct pxp_tx_desc *pxpdma_first_queued(struct pxp_channel *pxp_chan)
725 return list_entry(pxp_chan->queue.next, struct pxp_tx_desc, list);
728 /* called with pxp_chan->lock held */
729 static void __pxpdma_dostart(struct pxp_channel *pxp_chan)
731 struct pxp_dma *pxp_dma = to_pxp_dma(pxp_chan->dma_chan.device);
732 struct pxps *pxp = to_pxp(pxp_dma);
733 struct pxp_tx_desc *desc;
734 struct pxp_tx_desc *child;
737 /* so far we presume only one transaction on active_list */
739 desc = pxpdma_first_active(pxp_chan);
740 memcpy(&pxp->pxp_conf_state.s0_param,
741 &desc->layer_param.s0_param, sizeof(struct pxp_layer_param));
742 memcpy(&pxp->pxp_conf_state.proc_data,
743 &desc->proc_data, sizeof(struct pxp_proc_data));
745 /* Save PxP configuration */
746 list_for_each_entry(child, &desc->tx_list, list) {
747 if (i == 0) { /* Output */
748 memcpy(&pxp->pxp_conf_state.out_param,
749 &child->layer_param.out_param,
750 sizeof(struct pxp_layer_param));
751 } else { /* Overlay */
752 memcpy(&pxp->pxp_conf_state.ol_param[i - 1],
753 &child->layer_param.ol_param,
754 sizeof(struct pxp_layer_param));
759 pr_debug("%s:%d S0 w/h %d/%d paddr %08x\n", __func__, __LINE__,
760 pxp->pxp_conf_state.s0_param.width,
761 pxp->pxp_conf_state.s0_param.height,
762 pxp->pxp_conf_state.s0_param.paddr);
763 pr_debug("%s:%d OUT w/h %d/%d paddr %08x\n", __func__, __LINE__,
764 pxp->pxp_conf_state.out_param.width,
765 pxp->pxp_conf_state.out_param.height,
766 pxp->pxp_conf_state.out_param.paddr);
769 static void pxpdma_dostart_work(struct pxps *pxp)
771 struct pxp_channel *pxp_chan = NULL;
772 unsigned long flags, flags1;
774 while (__raw_readl(pxp->base + HW_PXP_CTRL) & BM_PXP_CTRL_ENABLE)
777 spin_lock_irqsave(&pxp->lock, flags);
778 if (list_empty(&head)) {
779 pxp->pxp_ongoing = 0;
780 spin_unlock_irqrestore(&pxp->lock, flags);
784 pxp_chan = list_entry(head.next, struct pxp_channel, list);
786 spin_lock_irqsave(&pxp_chan->lock, flags1);
787 if (!list_empty(&pxp_chan->active_list)) {
788 struct pxp_tx_desc *desc;
790 desc = pxpdma_first_active(pxp_chan);
791 __pxpdma_dostart(pxp_chan);
793 spin_unlock_irqrestore(&pxp_chan->lock, flags1);
796 pxp_config(pxp, pxp_chan);
800 spin_unlock_irqrestore(&pxp->lock, flags);
803 static void pxpdma_dequeue(struct pxp_channel *pxp_chan, struct list_head *list)
805 struct pxp_tx_desc *desc = NULL;
807 desc = pxpdma_first_queued(pxp_chan);
808 list_move_tail(&desc->list, list);
809 } while (!list_empty(&pxp_chan->queue));
812 static dma_cookie_t pxp_tx_submit(struct dma_async_tx_descriptor *tx)
814 struct pxp_tx_desc *desc = to_tx_desc(tx);
815 struct pxp_channel *pxp_chan = to_pxp_channel(tx->chan);
819 dev_dbg(&pxp_chan->dma_chan.dev->device, "received TX\n");
821 mutex_lock(&pxp_chan->chan_mutex);
823 cookie = pxp_chan->dma_chan.cookie;
828 /* from dmaengine.h: "last cookie value returned to client" */
829 pxp_chan->dma_chan.cookie = cookie;
832 /* pxp_chan->lock can be taken under ichan->lock, but not v.v. */
833 spin_lock_irqsave(&pxp_chan->lock, flags);
835 /* Here we add the tx descriptor to our PxP task queue. */
836 list_add_tail(&desc->list, &pxp_chan->queue);
838 spin_unlock_irqrestore(&pxp_chan->lock, flags);
840 dev_dbg(&pxp_chan->dma_chan.dev->device, "done TX\n");
842 mutex_unlock(&pxp_chan->chan_mutex);
846 /* Called with pxp_chan->chan_mutex held */
847 static int pxp_desc_alloc(struct pxp_channel *pxp_chan, int n)
849 struct pxp_tx_desc *desc = vmalloc(n * sizeof(struct pxp_tx_desc));
854 pxp_chan->n_tx_desc = n;
855 pxp_chan->desc = desc;
856 INIT_LIST_HEAD(&pxp_chan->active_list);
857 INIT_LIST_HEAD(&pxp_chan->queue);
858 INIT_LIST_HEAD(&pxp_chan->free_list);
861 struct dma_async_tx_descriptor *txd = &desc->txd;
863 memset(txd, 0, sizeof(*txd));
864 INIT_LIST_HEAD(&desc->tx_list);
865 dma_async_tx_descriptor_init(txd, &pxp_chan->dma_chan);
866 txd->tx_submit = pxp_tx_submit;
868 list_add(&desc->list, &pxp_chan->free_list);
877 * pxp_init_channel() - initialize a PXP channel.
878 * @pxp_dma: PXP DMA context.
879 * @pchan: pointer to the channel object.
880 * @return 0 on success or negative error code on failure.
882 static int pxp_init_channel(struct pxp_dma *pxp_dma,
883 struct pxp_channel *pxp_chan)
886 struct pxps *pxp = to_pxp(pxp_dma);
887 int ret = 0, n_desc = 0;
890 * We are using _virtual_ channel here.
891 * Each channel contains all parameters of corresponding layers
892 * for one transaction; each layer is represented as one descriptor
893 * (i.e., pxp_tx_desc) here.
896 spin_lock_irqsave(&pxp->lock, flags);
898 /* max desc nr: S0+OL+OUT = 1+8+1 */
901 spin_unlock_irqrestore(&pxp->lock, flags);
903 if (n_desc && !pxp_chan->desc)
904 ret = pxp_desc_alloc(pxp_chan, n_desc);
910 * pxp_uninit_channel() - uninitialize a PXP channel.
911 * @pxp_dma: PXP DMA context.
912 * @pchan: pointer to the channel object.
913 * @return 0 on success or negative error code on failure.
915 static int pxp_uninit_channel(struct pxp_dma *pxp_dma,
916 struct pxp_channel *pxp_chan)
921 vfree(pxp_chan->desc);
923 pxp_chan->desc = NULL;
928 static irqreturn_t pxp_irq(int irq, void *dev_id)
930 struct pxps *pxp = dev_id;
931 struct pxp_channel *pxp_chan;
932 struct pxp_tx_desc *desc;
933 dma_async_tx_callback callback;
934 void *callback_param;
941 __raw_readl(pxp->base + HW_PXP_HIST_CTRL) & BM_PXP_HIST_CTRL_STATUS;
943 __raw_writel(BM_PXP_STAT_IRQ, pxp->base + HW_PXP_STAT_CLR);
945 spin_lock_irqsave(&pxp->lock, flags);
947 if (list_empty(&head)) {
948 pxp->pxp_ongoing = 0;
949 spin_unlock_irqrestore(&pxp->lock, flags);
953 pxp_chan = list_entry(head.next, struct pxp_channel, list);
954 list_del_init(&pxp_chan->list);
956 if (list_empty(&pxp_chan->active_list)) {
957 pr_debug("PXP_IRQ pxp_chan->active_list empty. chan_id %d\n",
958 pxp_chan->dma_chan.chan_id);
959 pxp->pxp_ongoing = 0;
960 spin_unlock_irqrestore(&pxp->lock, flags);
964 /* Get descriptor and call callback */
965 desc = pxpdma_first_active(pxp_chan);
967 pxp_chan->completed = desc->txd.cookie;
969 callback = desc->txd.callback;
970 callback_param = desc->txd.callback_param;
972 /* Send histogram status back to caller */
973 desc->hist_status = hist_status;
975 if ((desc->txd.flags & DMA_PREP_INTERRUPT) && callback)
976 callback(callback_param);
978 pxp_chan->status = PXP_CHANNEL_INITIALIZED;
980 list_splice_init(&desc->tx_list, &pxp_chan->free_list);
981 list_move(&desc->list, &pxp_chan->free_list);
984 pxp->pxp_ongoing = 0;
985 mod_timer(&pxp->clk_timer, jiffies + msecs_to_jiffies(timeout_in_ms));
987 spin_unlock_irqrestore(&pxp->lock, flags);
992 /* called with pxp_chan->lock held */
993 static struct pxp_tx_desc *pxpdma_desc_get(struct pxp_channel *pxp_chan)
995 struct pxp_tx_desc *desc, *_desc;
996 struct pxp_tx_desc *ret = NULL;
998 list_for_each_entry_safe(desc, _desc, &pxp_chan->free_list, list) {
999 list_del_init(&desc->list);
1007 /* called with pxp_chan->lock held */
1008 static void pxpdma_desc_put(struct pxp_channel *pxp_chan,
1009 struct pxp_tx_desc *desc)
1012 struct device *dev = &pxp_chan->dma_chan.dev->device;
1013 struct pxp_tx_desc *child;
1015 list_for_each_entry(child, &desc->tx_list, list)
1016 dev_info(dev, "moving child desc %p to freelist\n", child);
1017 list_splice_init(&desc->tx_list, &pxp_chan->free_list);
1018 dev_info(dev, "moving desc %p to freelist\n", desc);
1019 list_add(&desc->list, &pxp_chan->free_list);
1023 /* Allocate and initialise a transfer descriptor. */
1024 static struct dma_async_tx_descriptor *pxp_prep_slave_sg(struct dma_chan *chan,
1027 unsigned int sg_len,
1028 enum dma_data_direction
1030 unsigned long tx_flags)
1032 struct pxp_channel *pxp_chan = to_pxp_channel(chan);
1033 struct pxp_dma *pxp_dma = to_pxp_dma(chan->device);
1034 struct pxps *pxp = to_pxp(pxp_dma);
1035 struct pxp_tx_desc *desc = NULL;
1036 struct pxp_tx_desc *first = NULL, *prev = NULL;
1037 struct scatterlist *sg;
1038 unsigned long flags;
1039 dma_addr_t phys_addr;
1042 if (direction != DMA_FROM_DEVICE && direction != DMA_TO_DEVICE) {
1043 dev_err(chan->device->dev, "Invalid DMA direction %d!\n",
1048 if (unlikely(sg_len < 2))
1051 spin_lock_irqsave(&pxp_chan->lock, flags);
1052 for_each_sg(sgl, sg, sg_len, i) {
1053 desc = pxpdma_desc_get(pxp_chan);
1055 pxpdma_desc_put(pxp_chan, first);
1056 dev_err(chan->device->dev, "Can't get DMA desc.\n");
1057 spin_unlock_irqrestore(&pxp_chan->lock, flags);
1061 phys_addr = sg_dma_address(sg);
1066 desc->layer_param.s0_param.paddr = phys_addr;
1068 list_add_tail(&desc->list, &first->tx_list);
1073 desc->layer_param.out_param.paddr = phys_addr;
1075 desc->layer_param.ol_param.paddr = phys_addr;
1080 spin_unlock_irqrestore(&pxp_chan->lock, flags);
1082 pxp->pxp_conf_state.layer_nr = sg_len;
1083 first->txd.flags = tx_flags;
1084 first->len = sg_len;
1085 pr_debug("%s:%d first %p, first->len %d, flags %08x\n",
1086 __func__, __LINE__, first, first->len, first->txd.flags);
1091 static void pxp_issue_pending(struct dma_chan *chan)
1093 struct pxp_channel *pxp_chan = to_pxp_channel(chan);
1094 struct pxp_dma *pxp_dma = to_pxp_dma(chan->device);
1095 struct pxps *pxp = to_pxp(pxp_dma);
1096 unsigned long flags0, flags;
1098 spin_lock_irqsave(&pxp->lock, flags0);
1099 spin_lock_irqsave(&pxp_chan->lock, flags);
1101 if (!list_empty(&pxp_chan->queue)) {
1102 pxpdma_dequeue(pxp_chan, &pxp_chan->active_list);
1103 pxp_chan->status = PXP_CHANNEL_READY;
1104 list_add_tail(&pxp_chan->list, &head);
1106 spin_unlock_irqrestore(&pxp_chan->lock, flags);
1107 spin_unlock_irqrestore(&pxp->lock, flags0);
1110 spin_unlock_irqrestore(&pxp_chan->lock, flags);
1111 spin_unlock_irqrestore(&pxp->lock, flags0);
1113 pxp_clk_enable(pxp);
1114 if (!wait_event_interruptible_timeout(pxp->done, PXP_WAITCON, 2 * HZ) ||
1115 signal_pending(current)) {
1116 pxp_clk_disable(pxp);
1120 spin_lock_irqsave(&pxp->lock, flags);
1121 pxp->pxp_ongoing = 1;
1122 spin_unlock_irqrestore(&pxp->lock, flags);
1123 pxpdma_dostart_work(pxp);
1126 static void __pxp_terminate_all(struct dma_chan *chan)
1128 struct pxp_channel *pxp_chan = to_pxp_channel(chan);
1129 unsigned long flags;
1131 /* pchan->queue is modified in ISR, have to spinlock */
1132 spin_lock_irqsave(&pxp_chan->lock, flags);
1133 list_splice_init(&pxp_chan->queue, &pxp_chan->free_list);
1134 list_splice_init(&pxp_chan->active_list, &pxp_chan->free_list);
1136 spin_unlock_irqrestore(&pxp_chan->lock, flags);
1138 pxp_chan->status = PXP_CHANNEL_INITIALIZED;
1141 static int pxp_control(struct dma_chan *chan, enum dma_ctrl_cmd cmd,
1144 struct pxp_channel *pxp_chan = to_pxp_channel(chan);
1146 /* Only supports DMA_TERMINATE_ALL */
1147 if (cmd != DMA_TERMINATE_ALL)
1150 mutex_lock(&pxp_chan->chan_mutex);
1151 __pxp_terminate_all(chan);
1152 mutex_unlock(&pxp_chan->chan_mutex);
1157 static int pxp_alloc_chan_resources(struct dma_chan *chan)
1159 struct pxp_channel *pxp_chan = to_pxp_channel(chan);
1160 struct pxp_dma *pxp_dma = to_pxp_dma(chan->device);
1163 /* dmaengine.c now guarantees to only offer free channels */
1164 BUG_ON(chan->client_count > 1);
1165 WARN_ON(pxp_chan->status != PXP_CHANNEL_FREE);
1168 pxp_chan->completed = -ENXIO;
1170 pr_debug("%s dma_chan.chan_id %d\n", __func__, chan->chan_id);
1171 ret = pxp_init_channel(pxp_dma, pxp_chan);
1175 pxp_chan->status = PXP_CHANNEL_INITIALIZED;
1177 dev_dbg(&chan->dev->device, "Found channel 0x%x, irq %d\n",
1178 chan->chan_id, pxp_chan->eof_irq);
1186 static void pxp_free_chan_resources(struct dma_chan *chan)
1188 struct pxp_channel *pxp_chan = to_pxp_channel(chan);
1189 struct pxp_dma *pxp_dma = to_pxp_dma(chan->device);
1191 mutex_lock(&pxp_chan->chan_mutex);
1193 __pxp_terminate_all(chan);
1195 pxp_chan->status = PXP_CHANNEL_FREE;
1197 pxp_uninit_channel(pxp_dma, pxp_chan);
1199 mutex_unlock(&pxp_chan->chan_mutex);
1202 static enum dma_status pxp_tx_status(struct dma_chan *chan,
1203 dma_cookie_t cookie,
1204 struct dma_tx_state *txstate)
1206 struct pxp_channel *pxp_chan = to_pxp_channel(chan);
1208 if (cookie != chan->cookie)
1212 txstate->last = pxp_chan->completed;
1213 txstate->used = chan->cookie;
1214 txstate->residue = 0;
1219 static int pxp_hw_init(struct pxps *pxp)
1221 struct pxp_config_data *pxp_conf = &pxp->pxp_conf_state;
1222 struct pxp_proc_data *proc_data = &pxp_conf->proc_data;
1226 /* Pull PxP out of reset */
1227 __raw_writel(0, pxp->base + HW_PXP_CTRL);
1229 /* Config defaults */
1231 /* Initialize non-channel-specific PxP parameters */
1232 proc_data->drect.left = proc_data->srect.left = 0;
1233 proc_data->drect.top = proc_data->srect.top = 0;
1234 proc_data->drect.width = proc_data->srect.width = 0;
1235 proc_data->drect.height = proc_data->srect.height = 0;
1236 proc_data->scaling = 0;
1237 proc_data->hflip = 0;
1238 proc_data->vflip = 0;
1239 proc_data->rotate = 0;
1240 proc_data->bgcolor = 0;
1242 /* Initialize S0 channel parameters */
1243 pxp_conf->s0_param.pixel_fmt = pxp_s0_formats[0];
1244 pxp_conf->s0_param.width = 0;
1245 pxp_conf->s0_param.height = 0;
1246 pxp_conf->s0_param.color_key = -1;
1247 pxp_conf->s0_param.color_key_enable = false;
1249 /* Initialize OL channel parameters */
1250 for (i = 0; i < 8; i++) {
1251 pxp_conf->ol_param[i].combine_enable = false;
1252 pxp_conf->ol_param[i].width = 0;
1253 pxp_conf->ol_param[i].height = 0;
1254 pxp_conf->ol_param[i].pixel_fmt = PXP_PIX_FMT_RGB565;
1255 pxp_conf->ol_param[i].color_key_enable = false;
1256 pxp_conf->ol_param[i].color_key = -1;
1257 pxp_conf->ol_param[i].global_alpha_enable = false;
1258 pxp_conf->ol_param[i].global_alpha = 0;
1259 pxp_conf->ol_param[i].local_alpha_enable = false;
1262 /* Initialize Output channel parameters */
1263 pxp_conf->out_param.width = 0;
1264 pxp_conf->out_param.height = 0;
1265 pxp_conf->out_param.pixel_fmt = PXP_PIX_FMT_RGB565;
1267 proc_data->overlay_state = 0;
1269 /* Write default h/w config */
1271 pxp_set_s0param(pxp);
1272 pxp_set_s0crop(pxp);
1273 for (i = 0; i < 8; i++) {
1274 pxp_set_oln(i, pxp);
1275 pxp_set_olparam(i, pxp);
1276 pxp_set_olcolorkey(i, pxp);
1278 pxp_set_s0colorkey(pxp);
1283 /* One-time histogram configuration */
1285 BF_PXP_HIST_CTRL_PANEL_MODE(BV_PXP_HIST_CTRL_PANEL_MODE__GRAY16);
1286 __raw_writel(reg_val, pxp->base + HW_PXP_HIST_CTRL);
1288 reg_val = BF_PXP_HIST2_PARAM_VALUE0(0x00) |
1289 BF_PXP_HIST2_PARAM_VALUE1(0x00F);
1290 __raw_writel(reg_val, pxp->base + HW_PXP_HIST2_PARAM);
1292 reg_val = BF_PXP_HIST4_PARAM_VALUE0(0x00) |
1293 BF_PXP_HIST4_PARAM_VALUE1(0x05) |
1294 BF_PXP_HIST4_PARAM_VALUE2(0x0A) | BF_PXP_HIST4_PARAM_VALUE3(0x0F);
1295 __raw_writel(reg_val, pxp->base + HW_PXP_HIST4_PARAM);
1297 reg_val = BF_PXP_HIST8_PARAM0_VALUE0(0x00) |
1298 BF_PXP_HIST8_PARAM0_VALUE1(0x02) |
1299 BF_PXP_HIST8_PARAM0_VALUE2(0x04) | BF_PXP_HIST8_PARAM0_VALUE3(0x06);
1300 __raw_writel(reg_val, pxp->base + HW_PXP_HIST8_PARAM0);
1301 reg_val = BF_PXP_HIST8_PARAM1_VALUE4(0x09) |
1302 BF_PXP_HIST8_PARAM1_VALUE5(0x0B) |
1303 BF_PXP_HIST8_PARAM1_VALUE6(0x0D) | BF_PXP_HIST8_PARAM1_VALUE7(0x0F);
1304 __raw_writel(reg_val, pxp->base + HW_PXP_HIST8_PARAM1);
1306 reg_val = BF_PXP_HIST16_PARAM0_VALUE0(0x00) |
1307 BF_PXP_HIST16_PARAM0_VALUE1(0x01) |
1308 BF_PXP_HIST16_PARAM0_VALUE2(0x02) |
1309 BF_PXP_HIST16_PARAM0_VALUE3(0x03);
1310 __raw_writel(reg_val, pxp->base + HW_PXP_HIST16_PARAM0);
1311 reg_val = BF_PXP_HIST16_PARAM1_VALUE4(0x04) |
1312 BF_PXP_HIST16_PARAM1_VALUE5(0x05) |
1313 BF_PXP_HIST16_PARAM1_VALUE6(0x06) |
1314 BF_PXP_HIST16_PARAM1_VALUE7(0x07);
1315 __raw_writel(reg_val, pxp->base + HW_PXP_HIST16_PARAM1);
1316 reg_val = BF_PXP_HIST16_PARAM2_VALUE8(0x08) |
1317 BF_PXP_HIST16_PARAM2_VALUE9(0x09) |
1318 BF_PXP_HIST16_PARAM2_VALUE10(0x0A) |
1319 BF_PXP_HIST16_PARAM2_VALUE11(0x0B);
1320 __raw_writel(reg_val, pxp->base + HW_PXP_HIST16_PARAM2);
1321 reg_val = BF_PXP_HIST16_PARAM3_VALUE12(0x0C) |
1322 BF_PXP_HIST16_PARAM3_VALUE13(0x0D) |
1323 BF_PXP_HIST16_PARAM3_VALUE14(0x0E) |
1324 BF_PXP_HIST16_PARAM3_VALUE15(0x0F);
1325 __raw_writel(reg_val, pxp->base + HW_PXP_HIST16_PARAM3);
1330 static int pxp_dma_init(struct pxps *pxp)
1332 struct pxp_dma *pxp_dma = &pxp->pxp_dma;
1333 struct dma_device *dma = &pxp_dma->dma;
1336 dma_cap_set(DMA_SLAVE, dma->cap_mask);
1337 dma_cap_set(DMA_PRIVATE, dma->cap_mask);
1339 /* Compulsory common fields */
1340 dma->dev = pxp->dev;
1341 dma->device_alloc_chan_resources = pxp_alloc_chan_resources;
1342 dma->device_free_chan_resources = pxp_free_chan_resources;
1343 dma->device_tx_status = pxp_tx_status;
1344 dma->device_issue_pending = pxp_issue_pending;
1346 /* Compulsory for DMA_SLAVE fields */
1347 dma->device_prep_slave_sg = pxp_prep_slave_sg;
1348 dma->device_control = pxp_control;
1350 /* Initialize PxP Channels */
1351 INIT_LIST_HEAD(&dma->channels);
1352 for (i = 0; i < NR_PXP_VIRT_CHANNEL; i++) {
1353 struct pxp_channel *pxp_chan = pxp->channel + i;
1354 struct dma_chan *dma_chan = &pxp_chan->dma_chan;
1356 spin_lock_init(&pxp_chan->lock);
1357 mutex_init(&pxp_chan->chan_mutex);
1359 /* Only one EOF IRQ for PxP, shared by all channels */
1360 pxp_chan->eof_irq = pxp->irq;
1361 pxp_chan->status = PXP_CHANNEL_FREE;
1362 pxp_chan->completed = -ENXIO;
1363 snprintf(pxp_chan->eof_name, sizeof(pxp_chan->eof_name),
1366 dma_chan->device = &pxp_dma->dma;
1367 dma_chan->cookie = 1;
1368 dma_chan->chan_id = i;
1369 list_add_tail(&dma_chan->device_node, &dma->channels);
1372 return dma_async_device_register(&pxp_dma->dma);
1375 static ssize_t clk_off_timeout_show(struct device *dev,
1376 struct device_attribute *attr, char *buf)
1378 return sprintf(buf, "%d\n", timeout_in_ms);
1381 static ssize_t clk_off_timeout_store(struct device *dev,
1382 struct device_attribute *attr,
1383 const char *buf, size_t count)
1386 if (sscanf(buf, "%d", &val) > 0) {
1387 timeout_in_ms = val;
1393 static DEVICE_ATTR(clk_off_timeout, 0644, clk_off_timeout_show,
1394 clk_off_timeout_store);
1396 static int pxp_probe(struct platform_device *pdev)
1399 struct resource *res;
1403 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1404 irq = platform_get_irq(pdev, 0);
1405 if (!res || irq < 0) {
1410 pxp = kzalloc(sizeof(*pxp), GFP_KERNEL);
1412 dev_err(&pdev->dev, "failed to allocate control object\n");
1417 pxp->dev = &pdev->dev;
1419 platform_set_drvdata(pdev, pxp);
1422 pxp->pxp_ongoing = 0;
1425 spin_lock_init(&pxp->lock);
1426 mutex_init(&pxp->clk_mutex);
1428 if (!request_mem_region(res->start, resource_size(res), "pxp-mem")) {
1433 pxp->base = ioremap(res->start, SZ_4K);
1436 pxp->clk = clk_get(NULL, "pxp_axi");
1437 clk_enable(pxp->clk);
1439 err = pxp_hw_init(pxp);
1441 dev_err(&pdev->dev, "failed to initialize hardware\n");
1444 clk_disable(pxp->clk);
1446 err = request_irq(pxp->irq, pxp_irq, 0, "pxp-irq", pxp);
1449 /* Initialize DMA engine */
1450 err = pxp_dma_init(pxp);
1454 if (device_create_file(&pdev->dev, &dev_attr_clk_off_timeout)) {
1456 "Unable to create file from clk_off_timeout\n");
1460 INIT_WORK(&pxp->work, clkoff_callback);
1461 init_waitqueue_head(&pxp->done);
1462 init_timer(&pxp->clk_timer);
1463 pxp->clk_timer.function = pxp_clkoff_timer;
1464 pxp->clk_timer.data = (unsigned long)pxp;
1468 free_irq(pxp->irq, pxp);
1470 release_mem_region(res->start, resource_size(res));
1473 dev_err(&pdev->dev, "Exiting (unsuccessfully) pxp_probe function\n");
1477 static int __devexit pxp_remove(struct platform_device *pdev)
1479 struct pxps *pxp = platform_get_drvdata(pdev);
1481 cancel_work_sync(&pxp->work);
1482 del_timer_sync(&pxp->clk_timer);
1483 free_irq(pxp->irq, pxp);
1484 clk_disable(pxp->clk);
1487 device_remove_file(&pdev->dev, &dev_attr_clk_off_timeout);
1495 static int pxp_suspend(struct platform_device *pdev, pm_message_t state)
1497 struct pxps *pxp = platform_get_drvdata(pdev);
1499 pxp_clk_enable(pxp);
1500 while (__raw_readl(pxp->base + HW_PXP_CTRL) & BM_PXP_CTRL_ENABLE)
1503 __raw_writel(BM_PXP_CTRL_SFTRST, pxp->base + HW_PXP_CTRL);
1504 pxp_clk_disable(pxp);
1509 static int pxp_resume(struct platform_device *pdev)
1511 struct pxps *pxp = platform_get_drvdata(pdev);
1513 pxp_clk_enable(pxp);
1514 /* Pull PxP out of reset */
1515 __raw_writel(0, pxp->base + HW_PXP_CTRL);
1516 pxp_clk_disable(pxp);
1521 #define pxp_suspend NULL
1522 #define pxp_resume NULL
1525 static struct platform_driver pxp_driver = {
1530 .remove = __exit_p(pxp_remove),
1531 .suspend = pxp_suspend,
1532 .resume = pxp_resume,
1535 static int __init pxp_init(void)
1537 return platform_driver_register(&pxp_driver);
1540 subsys_initcall(pxp_init);
1542 static void __exit pxp_exit(void)
1544 platform_driver_unregister(&pxp_driver);
1547 module_exit(pxp_exit);
1549 MODULE_DESCRIPTION("i.MX PxP driver");
1550 MODULE_AUTHOR("Freescale Semiconductor, Inc.");
1551 MODULE_LICENSE("GPL");