1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * V4L2 deinterlacing support.
5 * Copyright (c) 2012 Vista Silicon S.L.
6 * Javier Martin <javier.martin@vista-silicon.com>
9 #include <linux/module.h>
10 #include <linux/slab.h>
11 #include <linux/interrupt.h>
12 #include <linux/dmaengine.h>
13 #include <linux/platform_device.h>
15 #include <media/v4l2-mem2mem.h>
16 #include <media/v4l2-device.h>
17 #include <media/v4l2-ioctl.h>
18 #include <media/videobuf2-dma-contig.h>
20 #define MEM2MEM_TEST_MODULE_NAME "mem2mem-deinterlace"
22 MODULE_DESCRIPTION("mem2mem device which supports deinterlacing using dmaengine");
23 MODULE_AUTHOR("Javier Martin <javier.martin@vista-silicon.com");
24 MODULE_LICENSE("GPL");
25 MODULE_VERSION("0.0.1");
28 module_param(debug, bool, 0644);
30 /* Flags that indicate a format can be used for capture/output */
31 #define MEM2MEM_CAPTURE (1 << 0)
32 #define MEM2MEM_OUTPUT (1 << 1)
34 #define MEM2MEM_NAME "m2m-deinterlace"
36 #define dprintk(dev, fmt, arg...) \
37 v4l2_dbg(1, debug, &dev->v4l2_dev, "%s: " fmt, __func__, ## arg)
39 struct deinterlace_fmt {
42 /* Types the format can be used for */
46 static struct deinterlace_fmt formats[] = {
48 .name = "YUV 4:2:0 Planar",
49 .fourcc = V4L2_PIX_FMT_YUV420,
50 .types = MEM2MEM_CAPTURE | MEM2MEM_OUTPUT,
54 .fourcc = V4L2_PIX_FMT_YUYV,
55 .types = MEM2MEM_CAPTURE | MEM2MEM_OUTPUT,
59 #define NUM_FORMATS ARRAY_SIZE(formats)
61 /* Per-queue, driver-specific private data */
62 struct deinterlace_q_data {
65 unsigned int sizeimage;
66 struct deinterlace_fmt *fmt;
67 enum v4l2_field field;
82 YUV420_DMA_Y_ODD_DOUBLING,
83 YUV420_DMA_U_ODD_DOUBLING,
84 YUV420_DMA_V_ODD_DOUBLING,
87 YUYV_DMA_EVEN_DOUBLING,
90 /* Source and destination queue data */
91 static struct deinterlace_q_data q_data[2];
93 static struct deinterlace_q_data *get_q_data(enum v4l2_buf_type type)
96 case V4L2_BUF_TYPE_VIDEO_OUTPUT:
97 return &q_data[V4L2_M2M_SRC];
98 case V4L2_BUF_TYPE_VIDEO_CAPTURE:
99 return &q_data[V4L2_M2M_DST];
106 static struct deinterlace_fmt *find_format(struct v4l2_format *f)
108 struct deinterlace_fmt *fmt;
111 for (k = 0; k < NUM_FORMATS; k++) {
113 if ((fmt->types & f->type) &&
114 (fmt->fourcc == f->fmt.pix.pixelformat))
118 if (k == NUM_FORMATS)
124 struct deinterlace_dev {
125 struct v4l2_device v4l2_dev;
126 struct video_device vfd;
129 struct mutex dev_mutex;
132 struct dma_chan *dma_chan;
134 struct v4l2_m2m_dev *m2m_dev;
137 struct deinterlace_ctx {
138 struct deinterlace_dev *dev;
140 /* Abort requested by m2m */
142 enum v4l2_colorspace colorspace;
144 struct v4l2_m2m_ctx *m2m_ctx;
145 struct dma_interleaved_template *xt;
151 static int deinterlace_job_ready(void *priv)
153 struct deinterlace_ctx *ctx = priv;
154 struct deinterlace_dev *pcdev = ctx->dev;
156 if ((v4l2_m2m_num_src_bufs_ready(ctx->m2m_ctx) > 0)
157 && (v4l2_m2m_num_dst_bufs_ready(ctx->m2m_ctx) > 0)
158 && (atomic_read(&ctx->dev->busy) == 0)) {
159 dprintk(pcdev, "Task ready\n");
163 dprintk(pcdev, "Task not ready to run\n");
168 static void deinterlace_job_abort(void *priv)
170 struct deinterlace_ctx *ctx = priv;
171 struct deinterlace_dev *pcdev = ctx->dev;
175 dprintk(pcdev, "Aborting task\n");
177 v4l2_m2m_job_finish(pcdev->m2m_dev, ctx->m2m_ctx);
180 static void dma_callback(void *data)
182 struct deinterlace_ctx *curr_ctx = data;
183 struct deinterlace_dev *pcdev = curr_ctx->dev;
184 struct vb2_v4l2_buffer *src_vb, *dst_vb;
186 atomic_set(&pcdev->busy, 0);
188 src_vb = v4l2_m2m_src_buf_remove(curr_ctx->m2m_ctx);
189 dst_vb = v4l2_m2m_dst_buf_remove(curr_ctx->m2m_ctx);
191 dst_vb->vb2_buf.timestamp = src_vb->vb2_buf.timestamp;
192 dst_vb->flags &= ~V4L2_BUF_FLAG_TSTAMP_SRC_MASK;
194 src_vb->flags & V4L2_BUF_FLAG_TSTAMP_SRC_MASK;
195 dst_vb->timecode = src_vb->timecode;
197 v4l2_m2m_buf_done(src_vb, VB2_BUF_STATE_DONE);
198 v4l2_m2m_buf_done(dst_vb, VB2_BUF_STATE_DONE);
200 v4l2_m2m_job_finish(pcdev->m2m_dev, curr_ctx->m2m_ctx);
202 dprintk(pcdev, "dma transfers completed.\n");
205 static void deinterlace_issue_dma(struct deinterlace_ctx *ctx, int op,
208 struct deinterlace_q_data *s_q_data;
209 struct vb2_v4l2_buffer *src_buf, *dst_buf;
210 struct deinterlace_dev *pcdev = ctx->dev;
211 struct dma_chan *chan = pcdev->dma_chan;
212 struct dma_device *dmadev = chan->device;
213 struct dma_async_tx_descriptor *tx;
214 unsigned int s_width, s_height;
216 dma_addr_t p_in, p_out;
217 enum dma_ctrl_flags flags;
219 src_buf = v4l2_m2m_next_src_buf(ctx->m2m_ctx);
220 dst_buf = v4l2_m2m_next_dst_buf(ctx->m2m_ctx);
222 s_q_data = get_q_data(V4L2_BUF_TYPE_VIDEO_OUTPUT);
223 s_width = s_q_data->width;
224 s_height = s_q_data->height;
225 s_size = s_width * s_height;
227 p_in = (dma_addr_t)vb2_dma_contig_plane_dma_addr(&src_buf->vb2_buf, 0);
228 p_out = (dma_addr_t)vb2_dma_contig_plane_dma_addr(&dst_buf->vb2_buf,
230 if (!p_in || !p_out) {
231 v4l2_err(&pcdev->v4l2_dev,
232 "Acquiring kernel pointers to buffers failed\n");
237 case YUV420_DMA_Y_ODD:
238 ctx->xt->numf = s_height / 2;
239 ctx->xt->sgl[0].size = s_width;
240 ctx->xt->sgl[0].icg = s_width;
241 ctx->xt->src_start = p_in;
242 ctx->xt->dst_start = p_out;
244 case YUV420_DMA_Y_EVEN:
245 ctx->xt->numf = s_height / 2;
246 ctx->xt->sgl[0].size = s_width;
247 ctx->xt->sgl[0].icg = s_width;
248 ctx->xt->src_start = p_in + s_size / 2;
249 ctx->xt->dst_start = p_out + s_width;
251 case YUV420_DMA_U_ODD:
252 ctx->xt->numf = s_height / 4;
253 ctx->xt->sgl[0].size = s_width / 2;
254 ctx->xt->sgl[0].icg = s_width / 2;
255 ctx->xt->src_start = p_in + s_size;
256 ctx->xt->dst_start = p_out + s_size;
258 case YUV420_DMA_U_EVEN:
259 ctx->xt->numf = s_height / 4;
260 ctx->xt->sgl[0].size = s_width / 2;
261 ctx->xt->sgl[0].icg = s_width / 2;
262 ctx->xt->src_start = p_in + (9 * s_size) / 8;
263 ctx->xt->dst_start = p_out + s_size + s_width / 2;
265 case YUV420_DMA_V_ODD:
266 ctx->xt->numf = s_height / 4;
267 ctx->xt->sgl[0].size = s_width / 2;
268 ctx->xt->sgl[0].icg = s_width / 2;
269 ctx->xt->src_start = p_in + (5 * s_size) / 4;
270 ctx->xt->dst_start = p_out + (5 * s_size) / 4;
272 case YUV420_DMA_V_EVEN:
273 ctx->xt->numf = s_height / 4;
274 ctx->xt->sgl[0].size = s_width / 2;
275 ctx->xt->sgl[0].icg = s_width / 2;
276 ctx->xt->src_start = p_in + (11 * s_size) / 8;
277 ctx->xt->dst_start = p_out + (5 * s_size) / 4 + s_width / 2;
279 case YUV420_DMA_Y_ODD_DOUBLING:
280 ctx->xt->numf = s_height / 2;
281 ctx->xt->sgl[0].size = s_width;
282 ctx->xt->sgl[0].icg = s_width;
283 ctx->xt->src_start = p_in;
284 ctx->xt->dst_start = p_out + s_width;
286 case YUV420_DMA_U_ODD_DOUBLING:
287 ctx->xt->numf = s_height / 4;
288 ctx->xt->sgl[0].size = s_width / 2;
289 ctx->xt->sgl[0].icg = s_width / 2;
290 ctx->xt->src_start = p_in + s_size;
291 ctx->xt->dst_start = p_out + s_size + s_width / 2;
293 case YUV420_DMA_V_ODD_DOUBLING:
294 ctx->xt->numf = s_height / 4;
295 ctx->xt->sgl[0].size = s_width / 2;
296 ctx->xt->sgl[0].icg = s_width / 2;
297 ctx->xt->src_start = p_in + (5 * s_size) / 4;
298 ctx->xt->dst_start = p_out + (5 * s_size) / 4 + s_width / 2;
301 ctx->xt->numf = s_height / 2;
302 ctx->xt->sgl[0].size = s_width * 2;
303 ctx->xt->sgl[0].icg = s_width * 2;
304 ctx->xt->src_start = p_in;
305 ctx->xt->dst_start = p_out;
308 ctx->xt->numf = s_height / 2;
309 ctx->xt->sgl[0].size = s_width * 2;
310 ctx->xt->sgl[0].icg = s_width * 2;
311 ctx->xt->src_start = p_in + s_size;
312 ctx->xt->dst_start = p_out + s_width * 2;
314 case YUYV_DMA_EVEN_DOUBLING:
316 ctx->xt->numf = s_height / 2;
317 ctx->xt->sgl[0].size = s_width * 2;
318 ctx->xt->sgl[0].icg = s_width * 2;
319 ctx->xt->src_start = p_in;
320 ctx->xt->dst_start = p_out + s_width * 2;
324 /* Common parameters for al transfers */
325 ctx->xt->frame_size = 1;
326 ctx->xt->dir = DMA_MEM_TO_MEM;
327 ctx->xt->src_sgl = false;
328 ctx->xt->dst_sgl = true;
329 flags = DMA_CTRL_ACK | DMA_PREP_INTERRUPT;
331 tx = dmadev->device_prep_interleaved_dma(chan, ctx->xt, flags);
333 v4l2_warn(&pcdev->v4l2_dev, "DMA interleaved prep error\n");
338 tx->callback = dma_callback;
339 tx->callback_param = ctx;
342 ctx->cookie = dmaengine_submit(tx);
343 if (dma_submit_error(ctx->cookie)) {
344 v4l2_warn(&pcdev->v4l2_dev,
345 "DMA submit error %d with src=0x%x dst=0x%x len=0x%x\n",
346 ctx->cookie, (unsigned)p_in, (unsigned)p_out,
351 dma_async_issue_pending(chan);
354 static void deinterlace_device_run(void *priv)
356 struct deinterlace_ctx *ctx = priv;
357 struct deinterlace_q_data *dst_q_data;
359 atomic_set(&ctx->dev->busy, 1);
361 dprintk(ctx->dev, "%s: DMA try issue.\n", __func__);
363 dst_q_data = get_q_data(V4L2_BUF_TYPE_VIDEO_CAPTURE);
366 * 4 possible field conversions are possible at the moment:
367 * V4L2_FIELD_SEQ_TB --> V4L2_FIELD_INTERLACED_TB:
368 * two separate fields in the same input buffer are interlaced
369 * in the output buffer using weaving. Top field comes first.
370 * V4L2_FIELD_SEQ_TB --> V4L2_FIELD_NONE:
371 * top field from the input buffer is copied to the output buffer
372 * using line doubling. Bottom field from the input buffer is discarded.
373 * V4L2_FIELD_SEQ_BT --> V4L2_FIELD_INTERLACED_BT:
374 * two separate fields in the same input buffer are interlaced
375 * in the output buffer using weaving. Bottom field comes first.
376 * V4L2_FIELD_SEQ_BT --> V4L2_FIELD_NONE:
377 * bottom field from the input buffer is copied to the output buffer
378 * using line doubling. Top field from the input buffer is discarded.
380 switch (dst_q_data->fmt->fourcc) {
381 case V4L2_PIX_FMT_YUV420:
382 switch (dst_q_data->field) {
383 case V4L2_FIELD_INTERLACED_TB:
384 case V4L2_FIELD_INTERLACED_BT:
385 dprintk(ctx->dev, "%s: yuv420 interlaced tb.\n",
387 deinterlace_issue_dma(ctx, YUV420_DMA_Y_ODD, 0);
388 deinterlace_issue_dma(ctx, YUV420_DMA_Y_EVEN, 0);
389 deinterlace_issue_dma(ctx, YUV420_DMA_U_ODD, 0);
390 deinterlace_issue_dma(ctx, YUV420_DMA_U_EVEN, 0);
391 deinterlace_issue_dma(ctx, YUV420_DMA_V_ODD, 0);
392 deinterlace_issue_dma(ctx, YUV420_DMA_V_EVEN, 1);
394 case V4L2_FIELD_NONE:
396 dprintk(ctx->dev, "%s: yuv420 interlaced line doubling.\n",
398 deinterlace_issue_dma(ctx, YUV420_DMA_Y_ODD, 0);
399 deinterlace_issue_dma(ctx, YUV420_DMA_Y_ODD_DOUBLING, 0);
400 deinterlace_issue_dma(ctx, YUV420_DMA_U_ODD, 0);
401 deinterlace_issue_dma(ctx, YUV420_DMA_U_ODD_DOUBLING, 0);
402 deinterlace_issue_dma(ctx, YUV420_DMA_V_ODD, 0);
403 deinterlace_issue_dma(ctx, YUV420_DMA_V_ODD_DOUBLING, 1);
407 case V4L2_PIX_FMT_YUYV:
409 switch (dst_q_data->field) {
410 case V4L2_FIELD_INTERLACED_TB:
411 case V4L2_FIELD_INTERLACED_BT:
412 dprintk(ctx->dev, "%s: yuyv interlaced_tb.\n",
414 deinterlace_issue_dma(ctx, YUYV_DMA_ODD, 0);
415 deinterlace_issue_dma(ctx, YUYV_DMA_EVEN, 1);
417 case V4L2_FIELD_NONE:
419 dprintk(ctx->dev, "%s: yuyv interlaced line doubling.\n",
421 deinterlace_issue_dma(ctx, YUYV_DMA_ODD, 0);
422 deinterlace_issue_dma(ctx, YUYV_DMA_EVEN_DOUBLING, 1);
428 dprintk(ctx->dev, "%s: DMA issue done.\n", __func__);
434 static int vidioc_querycap(struct file *file, void *priv,
435 struct v4l2_capability *cap)
437 strscpy(cap->driver, MEM2MEM_NAME, sizeof(cap->driver));
438 strscpy(cap->card, MEM2MEM_NAME, sizeof(cap->card));
439 strscpy(cap->bus_info, MEM2MEM_NAME, sizeof(cap->card));
441 * This is only a mem-to-mem video device. The capture and output
442 * device capability flags are left only for backward compatibility
443 * and are scheduled for removal.
445 cap->device_caps = V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_VIDEO_OUTPUT |
446 V4L2_CAP_VIDEO_M2M | V4L2_CAP_STREAMING;
447 cap->capabilities = cap->device_caps | V4L2_CAP_DEVICE_CAPS;
452 static int enum_fmt(struct v4l2_fmtdesc *f, u32 type)
455 struct deinterlace_fmt *fmt;
459 for (i = 0; i < NUM_FORMATS; ++i) {
460 if (formats[i].types & type) {
461 /* index-th format of type type found ? */
464 /* Correct type but haven't reached our index yet,
465 * just increment per-type index */
470 if (i < NUM_FORMATS) {
473 strscpy(f->description, fmt->name, sizeof(f->description));
474 f->pixelformat = fmt->fourcc;
478 /* Format not found */
482 static int vidioc_enum_fmt_vid_cap(struct file *file, void *priv,
483 struct v4l2_fmtdesc *f)
485 return enum_fmt(f, MEM2MEM_CAPTURE);
488 static int vidioc_enum_fmt_vid_out(struct file *file, void *priv,
489 struct v4l2_fmtdesc *f)
491 return enum_fmt(f, MEM2MEM_OUTPUT);
494 static int vidioc_g_fmt(struct deinterlace_ctx *ctx, struct v4l2_format *f)
496 struct vb2_queue *vq;
497 struct deinterlace_q_data *q_data;
499 vq = v4l2_m2m_get_vq(ctx->m2m_ctx, f->type);
503 q_data = get_q_data(f->type);
505 f->fmt.pix.width = q_data->width;
506 f->fmt.pix.height = q_data->height;
507 f->fmt.pix.field = q_data->field;
508 f->fmt.pix.pixelformat = q_data->fmt->fourcc;
510 switch (q_data->fmt->fourcc) {
511 case V4L2_PIX_FMT_YUV420:
512 f->fmt.pix.bytesperline = q_data->width * 3 / 2;
514 case V4L2_PIX_FMT_YUYV:
516 f->fmt.pix.bytesperline = q_data->width * 2;
519 f->fmt.pix.sizeimage = q_data->sizeimage;
520 f->fmt.pix.colorspace = ctx->colorspace;
525 static int vidioc_g_fmt_vid_out(struct file *file, void *priv,
526 struct v4l2_format *f)
528 return vidioc_g_fmt(priv, f);
531 static int vidioc_g_fmt_vid_cap(struct file *file, void *priv,
532 struct v4l2_format *f)
534 return vidioc_g_fmt(priv, f);
537 static int vidioc_try_fmt(struct v4l2_format *f, struct deinterlace_fmt *fmt)
539 switch (f->fmt.pix.pixelformat) {
540 case V4L2_PIX_FMT_YUV420:
541 f->fmt.pix.bytesperline = f->fmt.pix.width * 3 / 2;
543 case V4L2_PIX_FMT_YUYV:
545 f->fmt.pix.bytesperline = f->fmt.pix.width * 2;
547 f->fmt.pix.sizeimage = f->fmt.pix.height * f->fmt.pix.bytesperline;
552 static int vidioc_try_fmt_vid_cap(struct file *file, void *priv,
553 struct v4l2_format *f)
555 struct deinterlace_fmt *fmt;
556 struct deinterlace_ctx *ctx = priv;
558 fmt = find_format(f);
559 if (!fmt || !(fmt->types & MEM2MEM_CAPTURE))
560 f->fmt.pix.pixelformat = V4L2_PIX_FMT_YUV420;
562 f->fmt.pix.colorspace = ctx->colorspace;
564 if (f->fmt.pix.field != V4L2_FIELD_INTERLACED_TB &&
565 f->fmt.pix.field != V4L2_FIELD_INTERLACED_BT &&
566 f->fmt.pix.field != V4L2_FIELD_NONE)
567 f->fmt.pix.field = V4L2_FIELD_INTERLACED_TB;
569 return vidioc_try_fmt(f, fmt);
572 static int vidioc_try_fmt_vid_out(struct file *file, void *priv,
573 struct v4l2_format *f)
575 struct deinterlace_fmt *fmt;
577 fmt = find_format(f);
578 if (!fmt || !(fmt->types & MEM2MEM_OUTPUT))
579 f->fmt.pix.pixelformat = V4L2_PIX_FMT_YUV420;
581 if (!f->fmt.pix.colorspace)
582 f->fmt.pix.colorspace = V4L2_COLORSPACE_REC709;
584 if (f->fmt.pix.field != V4L2_FIELD_SEQ_TB &&
585 f->fmt.pix.field != V4L2_FIELD_SEQ_BT)
586 f->fmt.pix.field = V4L2_FIELD_SEQ_TB;
588 return vidioc_try_fmt(f, fmt);
591 static int vidioc_s_fmt(struct deinterlace_ctx *ctx, struct v4l2_format *f)
593 struct deinterlace_q_data *q_data;
594 struct vb2_queue *vq;
596 vq = v4l2_m2m_get_vq(ctx->m2m_ctx, f->type);
600 q_data = get_q_data(f->type);
604 if (vb2_is_busy(vq)) {
605 v4l2_err(&ctx->dev->v4l2_dev, "%s queue busy\n", __func__);
609 q_data->fmt = find_format(f);
611 v4l2_err(&ctx->dev->v4l2_dev,
612 "Couldn't set format type %d, wxh: %dx%d. fmt: %d, field: %d\n",
613 f->type, f->fmt.pix.width, f->fmt.pix.height,
614 f->fmt.pix.pixelformat, f->fmt.pix.field);
618 q_data->width = f->fmt.pix.width;
619 q_data->height = f->fmt.pix.height;
620 q_data->field = f->fmt.pix.field;
622 switch (f->fmt.pix.pixelformat) {
623 case V4L2_PIX_FMT_YUV420:
624 f->fmt.pix.bytesperline = f->fmt.pix.width * 3 / 2;
625 q_data->sizeimage = (q_data->width * q_data->height * 3) / 2;
627 case V4L2_PIX_FMT_YUYV:
629 f->fmt.pix.bytesperline = f->fmt.pix.width * 2;
630 q_data->sizeimage = q_data->width * q_data->height * 2;
634 "Setting format for type %d, wxh: %dx%d, fmt: %d, field: %d\n",
635 f->type, q_data->width, q_data->height, q_data->fmt->fourcc,
641 static int vidioc_s_fmt_vid_cap(struct file *file, void *priv,
642 struct v4l2_format *f)
646 ret = vidioc_try_fmt_vid_cap(file, priv, f);
649 return vidioc_s_fmt(priv, f);
652 static int vidioc_s_fmt_vid_out(struct file *file, void *priv,
653 struct v4l2_format *f)
655 struct deinterlace_ctx *ctx = priv;
658 ret = vidioc_try_fmt_vid_out(file, priv, f);
662 ret = vidioc_s_fmt(priv, f);
664 ctx->colorspace = f->fmt.pix.colorspace;
669 static int vidioc_reqbufs(struct file *file, void *priv,
670 struct v4l2_requestbuffers *reqbufs)
672 struct deinterlace_ctx *ctx = priv;
674 return v4l2_m2m_reqbufs(file, ctx->m2m_ctx, reqbufs);
677 static int vidioc_querybuf(struct file *file, void *priv,
678 struct v4l2_buffer *buf)
680 struct deinterlace_ctx *ctx = priv;
682 return v4l2_m2m_querybuf(file, ctx->m2m_ctx, buf);
685 static int vidioc_qbuf(struct file *file, void *priv, struct v4l2_buffer *buf)
687 struct deinterlace_ctx *ctx = priv;
689 return v4l2_m2m_qbuf(file, ctx->m2m_ctx, buf);
692 static int vidioc_dqbuf(struct file *file, void *priv, struct v4l2_buffer *buf)
694 struct deinterlace_ctx *ctx = priv;
696 return v4l2_m2m_dqbuf(file, ctx->m2m_ctx, buf);
699 static int vidioc_streamon(struct file *file, void *priv,
700 enum v4l2_buf_type type)
702 struct deinterlace_q_data *s_q_data, *d_q_data;
703 struct deinterlace_ctx *ctx = priv;
705 s_q_data = get_q_data(V4L2_BUF_TYPE_VIDEO_OUTPUT);
706 d_q_data = get_q_data(V4L2_BUF_TYPE_VIDEO_CAPTURE);
708 /* Check that src and dst queues have the same pix format */
709 if (s_q_data->fmt->fourcc != d_q_data->fmt->fourcc) {
710 v4l2_err(&ctx->dev->v4l2_dev,
711 "src and dst formats don't match.\n");
715 /* Check that input and output deinterlacing types are compatible */
716 switch (s_q_data->field) {
717 case V4L2_FIELD_SEQ_BT:
718 if (d_q_data->field != V4L2_FIELD_NONE &&
719 d_q_data->field != V4L2_FIELD_INTERLACED_BT) {
720 v4l2_err(&ctx->dev->v4l2_dev,
721 "src and dst field conversion [(%d)->(%d)] not supported.\n",
722 s_q_data->field, d_q_data->field);
726 case V4L2_FIELD_SEQ_TB:
727 if (d_q_data->field != V4L2_FIELD_NONE &&
728 d_q_data->field != V4L2_FIELD_INTERLACED_TB) {
729 v4l2_err(&ctx->dev->v4l2_dev,
730 "src and dst field conversion [(%d)->(%d)] not supported.\n",
731 s_q_data->field, d_q_data->field);
739 return v4l2_m2m_streamon(file, ctx->m2m_ctx, type);
742 static int vidioc_streamoff(struct file *file, void *priv,
743 enum v4l2_buf_type type)
745 struct deinterlace_ctx *ctx = priv;
747 return v4l2_m2m_streamoff(file, ctx->m2m_ctx, type);
750 static const struct v4l2_ioctl_ops deinterlace_ioctl_ops = {
751 .vidioc_querycap = vidioc_querycap,
753 .vidioc_enum_fmt_vid_cap = vidioc_enum_fmt_vid_cap,
754 .vidioc_g_fmt_vid_cap = vidioc_g_fmt_vid_cap,
755 .vidioc_try_fmt_vid_cap = vidioc_try_fmt_vid_cap,
756 .vidioc_s_fmt_vid_cap = vidioc_s_fmt_vid_cap,
758 .vidioc_enum_fmt_vid_out = vidioc_enum_fmt_vid_out,
759 .vidioc_g_fmt_vid_out = vidioc_g_fmt_vid_out,
760 .vidioc_try_fmt_vid_out = vidioc_try_fmt_vid_out,
761 .vidioc_s_fmt_vid_out = vidioc_s_fmt_vid_out,
763 .vidioc_reqbufs = vidioc_reqbufs,
764 .vidioc_querybuf = vidioc_querybuf,
766 .vidioc_qbuf = vidioc_qbuf,
767 .vidioc_dqbuf = vidioc_dqbuf,
769 .vidioc_streamon = vidioc_streamon,
770 .vidioc_streamoff = vidioc_streamoff,
781 static int deinterlace_queue_setup(struct vb2_queue *vq,
782 unsigned int *nbuffers, unsigned int *nplanes,
783 unsigned int sizes[], struct device *alloc_devs[])
785 struct deinterlace_ctx *ctx = vb2_get_drv_priv(vq);
786 struct deinterlace_q_data *q_data;
787 unsigned int size, count = *nbuffers;
789 q_data = get_q_data(vq->type);
791 switch (q_data->fmt->fourcc) {
792 case V4L2_PIX_FMT_YUV420:
793 size = q_data->width * q_data->height * 3 / 2;
795 case V4L2_PIX_FMT_YUYV:
797 size = q_data->width * q_data->height * 2;
804 dprintk(ctx->dev, "get %d buffer(s) of size %d each.\n", count, size);
809 static int deinterlace_buf_prepare(struct vb2_buffer *vb)
811 struct deinterlace_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
812 struct deinterlace_q_data *q_data;
814 dprintk(ctx->dev, "type: %d\n", vb->vb2_queue->type);
816 q_data = get_q_data(vb->vb2_queue->type);
818 if (vb2_plane_size(vb, 0) < q_data->sizeimage) {
819 dprintk(ctx->dev, "%s data will not fit into plane (%lu < %lu)\n",
820 __func__, vb2_plane_size(vb, 0), (long)q_data->sizeimage);
824 vb2_set_plane_payload(vb, 0, q_data->sizeimage);
829 static void deinterlace_buf_queue(struct vb2_buffer *vb)
831 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
832 struct deinterlace_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
834 v4l2_m2m_buf_queue(ctx->m2m_ctx, vbuf);
837 static const struct vb2_ops deinterlace_qops = {
838 .queue_setup = deinterlace_queue_setup,
839 .buf_prepare = deinterlace_buf_prepare,
840 .buf_queue = deinterlace_buf_queue,
841 .wait_prepare = vb2_ops_wait_prepare,
842 .wait_finish = vb2_ops_wait_finish,
845 static int queue_init(void *priv, struct vb2_queue *src_vq,
846 struct vb2_queue *dst_vq)
848 struct deinterlace_ctx *ctx = priv;
851 src_vq->type = V4L2_BUF_TYPE_VIDEO_OUTPUT;
852 src_vq->io_modes = VB2_MMAP | VB2_USERPTR;
853 src_vq->drv_priv = ctx;
854 src_vq->buf_struct_size = sizeof(struct v4l2_m2m_buffer);
855 src_vq->ops = &deinterlace_qops;
856 src_vq->mem_ops = &vb2_dma_contig_memops;
857 src_vq->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_COPY;
858 src_vq->dev = ctx->dev->v4l2_dev.dev;
859 src_vq->lock = &ctx->dev->dev_mutex;
860 q_data[V4L2_M2M_SRC].fmt = &formats[0];
861 q_data[V4L2_M2M_SRC].width = 640;
862 q_data[V4L2_M2M_SRC].height = 480;
863 q_data[V4L2_M2M_SRC].sizeimage = (640 * 480 * 3) / 2;
864 q_data[V4L2_M2M_SRC].field = V4L2_FIELD_SEQ_TB;
866 ret = vb2_queue_init(src_vq);
870 dst_vq->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
871 dst_vq->io_modes = VB2_MMAP | VB2_USERPTR;
872 dst_vq->drv_priv = ctx;
873 dst_vq->buf_struct_size = sizeof(struct v4l2_m2m_buffer);
874 dst_vq->ops = &deinterlace_qops;
875 dst_vq->mem_ops = &vb2_dma_contig_memops;
876 dst_vq->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_COPY;
877 dst_vq->dev = ctx->dev->v4l2_dev.dev;
878 dst_vq->lock = &ctx->dev->dev_mutex;
879 q_data[V4L2_M2M_DST].fmt = &formats[0];
880 q_data[V4L2_M2M_DST].width = 640;
881 q_data[V4L2_M2M_DST].height = 480;
882 q_data[V4L2_M2M_DST].sizeimage = (640 * 480 * 3) / 2;
883 q_data[V4L2_M2M_SRC].field = V4L2_FIELD_INTERLACED_TB;
885 return vb2_queue_init(dst_vq);
891 static int deinterlace_open(struct file *file)
893 struct deinterlace_dev *pcdev = video_drvdata(file);
894 struct deinterlace_ctx *ctx = NULL;
896 ctx = kzalloc(sizeof *ctx, GFP_KERNEL);
900 file->private_data = ctx;
903 ctx->m2m_ctx = v4l2_m2m_ctx_init(pcdev->m2m_dev, ctx, &queue_init);
904 if (IS_ERR(ctx->m2m_ctx)) {
905 int ret = PTR_ERR(ctx->m2m_ctx);
911 ctx->xt = kzalloc(sizeof(struct dma_interleaved_template) +
912 sizeof(struct data_chunk), GFP_KERNEL);
918 ctx->colorspace = V4L2_COLORSPACE_REC709;
920 dprintk(pcdev, "Created instance %p, m2m_ctx: %p\n", ctx, ctx->m2m_ctx);
925 static int deinterlace_release(struct file *file)
927 struct deinterlace_dev *pcdev = video_drvdata(file);
928 struct deinterlace_ctx *ctx = file->private_data;
930 dprintk(pcdev, "Releasing instance %p\n", ctx);
932 v4l2_m2m_ctx_release(ctx->m2m_ctx);
939 static __poll_t deinterlace_poll(struct file *file,
940 struct poll_table_struct *wait)
942 struct deinterlace_ctx *ctx = file->private_data;
945 mutex_lock(&ctx->dev->dev_mutex);
946 ret = v4l2_m2m_poll(file, ctx->m2m_ctx, wait);
947 mutex_unlock(&ctx->dev->dev_mutex);
952 static int deinterlace_mmap(struct file *file, struct vm_area_struct *vma)
954 struct deinterlace_ctx *ctx = file->private_data;
956 return v4l2_m2m_mmap(file, ctx->m2m_ctx, vma);
959 static const struct v4l2_file_operations deinterlace_fops = {
960 .owner = THIS_MODULE,
961 .open = deinterlace_open,
962 .release = deinterlace_release,
963 .poll = deinterlace_poll,
964 .unlocked_ioctl = video_ioctl2,
965 .mmap = deinterlace_mmap,
968 static const struct video_device deinterlace_videodev = {
969 .name = MEM2MEM_NAME,
970 .fops = &deinterlace_fops,
971 .ioctl_ops = &deinterlace_ioctl_ops,
973 .release = video_device_release_empty,
974 .vfl_dir = VFL_DIR_M2M,
977 static const struct v4l2_m2m_ops m2m_ops = {
978 .device_run = deinterlace_device_run,
979 .job_ready = deinterlace_job_ready,
980 .job_abort = deinterlace_job_abort,
983 static int deinterlace_probe(struct platform_device *pdev)
985 struct deinterlace_dev *pcdev;
986 struct video_device *vfd;
990 pcdev = devm_kzalloc(&pdev->dev, sizeof(*pcdev), GFP_KERNEL);
994 spin_lock_init(&pcdev->irqlock);
997 dma_cap_set(DMA_INTERLEAVE, mask);
998 pcdev->dma_chan = dma_request_channel(mask, NULL, pcdev);
999 if (!pcdev->dma_chan)
1002 if (!dma_has_cap(DMA_INTERLEAVE, pcdev->dma_chan->device->cap_mask)) {
1003 dev_err(&pdev->dev, "DMA does not support INTERLEAVE\n");
1008 ret = v4l2_device_register(&pdev->dev, &pcdev->v4l2_dev);
1012 atomic_set(&pcdev->busy, 0);
1013 mutex_init(&pcdev->dev_mutex);
1016 *vfd = deinterlace_videodev;
1017 vfd->lock = &pcdev->dev_mutex;
1018 vfd->v4l2_dev = &pcdev->v4l2_dev;
1020 ret = video_register_device(vfd, VFL_TYPE_GRABBER, 0);
1022 v4l2_err(&pcdev->v4l2_dev, "Failed to register video device\n");
1026 video_set_drvdata(vfd, pcdev);
1027 v4l2_info(&pcdev->v4l2_dev, MEM2MEM_TEST_MODULE_NAME
1028 " Device registered as /dev/video%d\n", vfd->num);
1030 platform_set_drvdata(pdev, pcdev);
1032 pcdev->m2m_dev = v4l2_m2m_init(&m2m_ops);
1033 if (IS_ERR(pcdev->m2m_dev)) {
1034 v4l2_err(&pcdev->v4l2_dev, "Failed to init mem2mem device\n");
1035 ret = PTR_ERR(pcdev->m2m_dev);
1042 video_unregister_device(&pcdev->vfd);
1044 v4l2_device_unregister(&pcdev->v4l2_dev);
1046 dma_release_channel(pcdev->dma_chan);
1051 static int deinterlace_remove(struct platform_device *pdev)
1053 struct deinterlace_dev *pcdev = platform_get_drvdata(pdev);
1055 v4l2_info(&pcdev->v4l2_dev, "Removing " MEM2MEM_TEST_MODULE_NAME);
1056 v4l2_m2m_release(pcdev->m2m_dev);
1057 video_unregister_device(&pcdev->vfd);
1058 v4l2_device_unregister(&pcdev->v4l2_dev);
1059 dma_release_channel(pcdev->dma_chan);
1064 static struct platform_driver deinterlace_pdrv = {
1065 .probe = deinterlace_probe,
1066 .remove = deinterlace_remove,
1068 .name = MEM2MEM_NAME,
1071 module_platform_driver(deinterlace_pdrv);