2018-04-22 17:33:20 -04:00
|
|
|
// SPDX-License-Identifier: GPL-2.0+
|
2013-06-04 11:22:30 -03:00
|
|
|
/*
|
|
|
|
* vsp1_video.c -- R-Car VSP1 Video Node
|
|
|
|
*
|
2015-03-15 11:33:07 -03:00
|
|
|
* Copyright (C) 2013-2015 Renesas Electronics Corporation
|
2013-06-04 11:22:30 -03:00
|
|
|
*
|
|
|
|
* Contact: Laurent Pinchart (laurent.pinchart@ideasonboard.com)
|
|
|
|
*/
|
|
|
|
|
|
|
|
#include <linux/list.h>
|
|
|
|
#include <linux/module.h>
|
|
|
|
#include <linux/mutex.h>
|
|
|
|
#include <linux/slab.h>
|
|
|
|
#include <linux/v4l2-mediabus.h>
|
|
|
|
#include <linux/videodev2.h>
|
2015-08-02 14:15:23 -03:00
|
|
|
#include <linux/wait.h>
|
2013-06-04 11:22:30 -03:00
|
|
|
|
|
|
|
#include <media/media-entity.h>
|
|
|
|
#include <media/v4l2-dev.h>
|
|
|
|
#include <media/v4l2-fh.h>
|
|
|
|
#include <media/v4l2-ioctl.h>
|
|
|
|
#include <media/v4l2-subdev.h>
|
2015-09-22 10:30:29 -03:00
|
|
|
#include <media/videobuf2-v4l2.h>
|
2013-06-04 11:22:30 -03:00
|
|
|
#include <media/videobuf2-dma-contig.h>
|
|
|
|
|
|
|
|
#include "vsp1.h"
|
2018-02-26 11:06:21 -05:00
|
|
|
#include "vsp1_brx.h"
|
2015-11-01 15:18:56 -02:00
|
|
|
#include "vsp1_dl.h"
|
2013-06-04 11:22:30 -03:00
|
|
|
#include "vsp1_entity.h"
|
2016-02-24 20:40:22 -03:00
|
|
|
#include "vsp1_hgo.h"
|
2016-09-06 11:38:56 -03:00
|
|
|
#include "vsp1_hgt.h"
|
2015-08-02 14:15:23 -03:00
|
|
|
#include "vsp1_pipe.h"
|
2013-06-04 11:22:30 -03:00
|
|
|
#include "vsp1_rwpf.h"
|
2014-05-30 21:45:48 -03:00
|
|
|
#include "vsp1_uds.h"
|
2013-06-04 11:22:30 -03:00
|
|
|
#include "vsp1_video.h"
|
|
|
|
|
|
|
|
#define VSP1_VIDEO_DEF_FORMAT V4L2_PIX_FMT_YUYV
|
|
|
|
#define VSP1_VIDEO_DEF_WIDTH 1024
|
|
|
|
#define VSP1_VIDEO_DEF_HEIGHT 768
|
|
|
|
|
|
|
|
#define VSP1_VIDEO_MAX_WIDTH 8190U
|
|
|
|
#define VSP1_VIDEO_MAX_HEIGHT 8190U
|
|
|
|
|
|
|
|
/* -----------------------------------------------------------------------------
|
|
|
|
* Helper functions
|
|
|
|
*/
|
|
|
|
|
|
|
|
static struct v4l2_subdev *
|
|
|
|
vsp1_video_remote_subdev(struct media_pad *local, u32 *pad)
|
|
|
|
{
|
|
|
|
struct media_pad *remote;
|
|
|
|
|
2022-06-25 18:02:24 +01:00
|
|
|
remote = media_pad_remote_pad_first(local);
|
2015-05-07 22:12:32 -03:00
|
|
|
if (!remote || !is_media_entity_v4l2_subdev(remote->entity))
|
2013-06-04 11:22:30 -03:00
|
|
|
return NULL;
|
|
|
|
|
|
|
|
if (pad)
|
|
|
|
*pad = remote->index;
|
|
|
|
|
|
|
|
return media_entity_to_v4l2_subdev(remote->entity);
|
|
|
|
}
|
|
|
|
|
|
|
|
static int vsp1_video_verify_format(struct vsp1_video *video)
|
|
|
|
{
|
2023-02-15 17:18:39 +02:00
|
|
|
struct v4l2_subdev_format fmt = {
|
|
|
|
.which = V4L2_SUBDEV_FORMAT_ACTIVE,
|
|
|
|
};
|
2013-06-04 11:22:30 -03:00
|
|
|
struct v4l2_subdev *subdev;
|
|
|
|
int ret;
|
|
|
|
|
|
|
|
subdev = vsp1_video_remote_subdev(&video->pad, &fmt.pad);
|
|
|
|
if (subdev == NULL)
|
|
|
|
return -EINVAL;
|
|
|
|
|
|
|
|
ret = v4l2_subdev_call(subdev, pad, get_fmt, NULL, &fmt);
|
|
|
|
if (ret < 0)
|
|
|
|
return ret == -ENOIOCTLCMD ? -EINVAL : ret;
|
|
|
|
|
2015-07-28 14:00:43 -03:00
|
|
|
if (video->rwpf->fmtinfo->mbus != fmt.format.code ||
|
|
|
|
video->rwpf->format.height != fmt.format.height ||
|
2024-06-18 21:46:44 +03:00
|
|
|
video->rwpf->format.width != fmt.format.width) {
|
|
|
|
dev_dbg(video->vsp1->dev,
|
|
|
|
"Format mismatch: 0x%04x/%ux%u != 0x%04x/%ux%u\n",
|
|
|
|
video->rwpf->fmtinfo->mbus, video->rwpf->format.width,
|
|
|
|
video->rwpf->format.height, fmt.format.code,
|
|
|
|
fmt.format.width, fmt.format.height);
|
|
|
|
return -EPIPE;
|
|
|
|
}
|
2013-06-04 11:22:30 -03:00
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int __vsp1_video_try_format(struct vsp1_video *video,
|
|
|
|
struct v4l2_pix_format_mplane *pix,
|
|
|
|
const struct vsp1_format_info **fmtinfo)
|
|
|
|
{
|
2014-05-26 19:57:21 -03:00
|
|
|
static const u32 xrgb_formats[][2] = {
|
|
|
|
{ V4L2_PIX_FMT_RGB444, V4L2_PIX_FMT_XRGB444 },
|
|
|
|
{ V4L2_PIX_FMT_RGB555, V4L2_PIX_FMT_XRGB555 },
|
|
|
|
{ V4L2_PIX_FMT_BGR32, V4L2_PIX_FMT_XBGR32 },
|
|
|
|
{ V4L2_PIX_FMT_RGB32, V4L2_PIX_FMT_XRGB32 },
|
|
|
|
};
|
|
|
|
|
2013-06-04 11:22:30 -03:00
|
|
|
const struct vsp1_format_info *info;
|
|
|
|
unsigned int width = pix->width;
|
|
|
|
unsigned int height = pix->height;
|
|
|
|
unsigned int i;
|
|
|
|
|
2017-02-26 10:29:50 -03:00
|
|
|
/*
|
|
|
|
* Backward compatibility: replace deprecated RGB formats by their XRGB
|
2014-05-26 19:57:21 -03:00
|
|
|
* equivalent. This selects the format older userspace applications want
|
|
|
|
* while still exposing the new format.
|
|
|
|
*/
|
|
|
|
for (i = 0; i < ARRAY_SIZE(xrgb_formats); ++i) {
|
|
|
|
if (xrgb_formats[i][0] == pix->pixelformat) {
|
|
|
|
pix->pixelformat = xrgb_formats[i][1];
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-02-26 10:29:50 -03:00
|
|
|
/*
|
|
|
|
* Retrieve format information and select the default format if the
|
2013-06-04 11:22:30 -03:00
|
|
|
* requested format isn't supported.
|
|
|
|
*/
|
2016-09-15 16:08:09 -03:00
|
|
|
info = vsp1_get_format_info(video->vsp1, pix->pixelformat);
|
2013-06-04 11:22:30 -03:00
|
|
|
if (info == NULL)
|
2016-09-15 16:08:09 -03:00
|
|
|
info = vsp1_get_format_info(video->vsp1, VSP1_VIDEO_DEF_FORMAT);
|
2013-06-04 11:22:30 -03:00
|
|
|
|
|
|
|
pix->pixelformat = info->fourcc;
|
|
|
|
pix->field = V4L2_FIELD_NONE;
|
2016-09-06 21:04:53 -03:00
|
|
|
|
2025-04-30 02:29:01 +03:00
|
|
|
/*
|
|
|
|
* Adjust the colour space fields. On capture devices, userspace needs
|
|
|
|
* to set the V4L2_PIX_FMT_FLAG_SET_CSC to override the defaults. Reset
|
|
|
|
* all fields to *_DEFAULT if the flag isn't set, to then handle
|
|
|
|
* capture and output devices in the same way.
|
|
|
|
*/
|
|
|
|
if (video->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE &&
|
|
|
|
!(pix->flags & V4L2_PIX_FMT_FLAG_SET_CSC)) {
|
|
|
|
pix->colorspace = V4L2_COLORSPACE_DEFAULT;
|
|
|
|
pix->xfer_func = V4L2_XFER_FUNC_DEFAULT;
|
|
|
|
pix->ycbcr_enc = V4L2_YCBCR_ENC_DEFAULT;
|
|
|
|
pix->quantization = V4L2_QUANTIZATION_DEFAULT;
|
|
|
|
}
|
|
|
|
|
2025-04-30 02:29:00 +03:00
|
|
|
vsp1_adjust_color_space(info->mbus, &pix->colorspace, &pix->xfer_func,
|
|
|
|
&pix->ycbcr_enc, &pix->quantization);
|
2016-09-06 21:04:53 -03:00
|
|
|
|
2013-06-04 11:22:30 -03:00
|
|
|
memset(pix->reserved, 0, sizeof(pix->reserved));
|
|
|
|
|
|
|
|
/* Align the width and height for YUV 4:2:2 and 4:2:0 formats. */
|
|
|
|
width = round_down(width, info->hsub);
|
|
|
|
height = round_down(height, info->vsub);
|
|
|
|
|
|
|
|
/* Clamp the width and height. */
|
2018-08-31 10:40:39 -04:00
|
|
|
pix->width = clamp(width, info->hsub, VSP1_VIDEO_MAX_WIDTH);
|
|
|
|
pix->height = clamp(height, info->vsub, VSP1_VIDEO_MAX_HEIGHT);
|
2013-06-04 11:22:30 -03:00
|
|
|
|
2017-02-26 10:29:50 -03:00
|
|
|
/*
|
|
|
|
* Compute and clamp the stride and image size. While not documented in
|
2013-06-04 11:22:30 -03:00
|
|
|
* the datasheet, strides not aligned to a multiple of 128 bytes result
|
|
|
|
* in image corruption.
|
|
|
|
*/
|
2015-06-19 08:51:22 -03:00
|
|
|
for (i = 0; i < min(info->planes, 2U); ++i) {
|
2013-06-04 11:22:30 -03:00
|
|
|
unsigned int hsub = i > 0 ? info->hsub : 1;
|
|
|
|
unsigned int vsub = i > 0 ? info->vsub : 1;
|
|
|
|
unsigned int align = 128;
|
|
|
|
unsigned int bpl;
|
|
|
|
|
|
|
|
bpl = clamp_t(unsigned int, pix->plane_fmt[i].bytesperline,
|
|
|
|
pix->width / hsub * info->bpp[i] / 8,
|
|
|
|
round_down(65535U, align));
|
|
|
|
|
|
|
|
pix->plane_fmt[i].bytesperline = round_up(bpl, align);
|
|
|
|
pix->plane_fmt[i].sizeimage = pix->plane_fmt[i].bytesperline
|
|
|
|
* pix->height / vsub;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (info->planes == 3) {
|
|
|
|
/* The second and third planes must have the same stride. */
|
|
|
|
pix->plane_fmt[2].bytesperline = pix->plane_fmt[1].bytesperline;
|
|
|
|
pix->plane_fmt[2].sizeimage = pix->plane_fmt[1].sizeimage;
|
|
|
|
}
|
|
|
|
|
|
|
|
pix->num_planes = info->planes;
|
|
|
|
|
|
|
|
if (fmtinfo)
|
|
|
|
*fmtinfo = info;
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* -----------------------------------------------------------------------------
|
|
|
|
* Pipeline Management
|
|
|
|
*/
|
|
|
|
|
2016-01-17 19:55:18 -02:00
|
|
|
/*
|
|
|
|
* vsp1_video_complete_buffer - Complete the current buffer
|
|
|
|
* @video: the video node
|
|
|
|
*
|
|
|
|
* This function completes the current buffer by filling its sequence number,
|
2022-08-10 15:01:27 +02:00
|
|
|
* time stamp and payload size, and hands it back to the vb2 core.
|
2016-01-17 19:55:18 -02:00
|
|
|
*
|
|
|
|
* Return the next queued buffer or NULL if the queue is empty.
|
|
|
|
*/
|
|
|
|
static struct vsp1_vb2_buffer *
|
|
|
|
vsp1_video_complete_buffer(struct vsp1_video *video)
|
|
|
|
{
|
2018-02-22 17:22:43 -05:00
|
|
|
struct vsp1_pipeline *pipe = video->rwpf->entity.pipe;
|
2016-01-17 19:55:18 -02:00
|
|
|
struct vsp1_vb2_buffer *next = NULL;
|
|
|
|
struct vsp1_vb2_buffer *done;
|
|
|
|
unsigned long flags;
|
|
|
|
unsigned int i;
|
|
|
|
|
|
|
|
spin_lock_irqsave(&video->irqlock, flags);
|
|
|
|
|
|
|
|
if (list_empty(&video->irqqueue)) {
|
|
|
|
spin_unlock_irqrestore(&video->irqlock, flags);
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
done = list_first_entry(&video->irqqueue,
|
|
|
|
struct vsp1_vb2_buffer, queue);
|
|
|
|
|
|
|
|
list_del(&done->queue);
|
|
|
|
|
|
|
|
if (!list_empty(&video->irqqueue))
|
|
|
|
next = list_first_entry(&video->irqqueue,
|
|
|
|
struct vsp1_vb2_buffer, queue);
|
|
|
|
|
|
|
|
spin_unlock_irqrestore(&video->irqlock, flags);
|
|
|
|
|
2016-04-10 02:59:04 -03:00
|
|
|
done->buf.sequence = pipe->sequence;
|
2016-01-17 19:55:18 -02:00
|
|
|
done->buf.vb2_buf.timestamp = ktime_get_ns();
|
|
|
|
for (i = 0; i < done->buf.vb2_buf.num_planes; ++i)
|
|
|
|
vb2_set_plane_payload(&done->buf.vb2_buf, i,
|
|
|
|
vb2_plane_size(&done->buf.vb2_buf, i));
|
|
|
|
vb2_buffer_done(&done->buf.vb2_buf, VB2_BUF_STATE_DONE);
|
|
|
|
|
|
|
|
return next;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void vsp1_video_frame_end(struct vsp1_pipeline *pipe,
|
|
|
|
struct vsp1_rwpf *rwpf)
|
|
|
|
{
|
|
|
|
struct vsp1_video *video = rwpf->video;
|
|
|
|
struct vsp1_vb2_buffer *buf;
|
|
|
|
|
|
|
|
buf = vsp1_video_complete_buffer(video);
|
|
|
|
if (buf == NULL)
|
|
|
|
return;
|
|
|
|
|
|
|
|
video->rwpf->mem = buf->mem;
|
|
|
|
pipe->buffers_ready |= 1 << video->pipe_index;
|
|
|
|
}
|
|
|
|
|
2016-09-11 23:26:35 -03:00
|
|
|
static void vsp1_video_pipeline_run_partition(struct vsp1_pipeline *pipe,
|
2017-08-04 12:32:41 -04:00
|
|
|
struct vsp1_dl_list *dl,
|
|
|
|
unsigned int partition)
|
2016-09-11 23:26:35 -03:00
|
|
|
{
|
2023-11-15 00:34:56 +02:00
|
|
|
struct vsp1_partition *part = &pipe->part_table[partition];
|
2018-05-18 16:42:02 -04:00
|
|
|
struct vsp1_dl_body *dlb = vsp1_dl_list_get_body0(dl);
|
2016-09-11 23:26:35 -03:00
|
|
|
struct vsp1_entity *entity;
|
|
|
|
|
2018-05-18 16:42:01 -04:00
|
|
|
list_for_each_entry(entity, &pipe->entities, list_pipe)
|
2023-11-15 00:34:56 +02:00
|
|
|
vsp1_entity_configure_partition(entity, pipe, part, dl, dlb);
|
2016-09-11 23:26:35 -03:00
|
|
|
}
|
|
|
|
|
2016-01-17 19:55:18 -02:00
|
|
|
static void vsp1_video_pipeline_run(struct vsp1_pipeline *pipe)
|
|
|
|
{
|
2016-09-11 23:26:35 -03:00
|
|
|
struct vsp1_device *vsp1 = pipe->output->entity.vsp1;
|
2016-06-11 04:07:56 -03:00
|
|
|
struct vsp1_entity *entity;
|
2018-05-18 16:42:02 -04:00
|
|
|
struct vsp1_dl_body *dlb;
|
2018-05-18 16:42:03 -04:00
|
|
|
struct vsp1_dl_list *dl;
|
2017-08-04 12:32:41 -04:00
|
|
|
unsigned int partition;
|
2016-01-17 19:55:18 -02:00
|
|
|
|
2018-05-18 16:42:03 -04:00
|
|
|
dl = vsp1_dl_list_get(pipe->output->dlm);
|
2016-01-17 19:55:18 -02:00
|
|
|
|
2018-05-18 16:42:03 -04:00
|
|
|
/*
|
|
|
|
* If the VSP hardware isn't configured yet (which occurs either when
|
|
|
|
* processing the first frame or after a system suspend/resume), add the
|
|
|
|
* cached stream configuration to the display list to perform a full
|
|
|
|
* initialisation.
|
|
|
|
*/
|
|
|
|
if (!pipe->configured)
|
|
|
|
vsp1_dl_list_add_body(dl, pipe->stream_config);
|
|
|
|
|
|
|
|
dlb = vsp1_dl_list_get_body0(dl);
|
2018-05-18 16:42:02 -04:00
|
|
|
|
2018-05-18 16:42:01 -04:00
|
|
|
list_for_each_entry(entity, &pipe->entities, list_pipe)
|
2018-05-18 16:42:03 -04:00
|
|
|
vsp1_entity_configure_frame(entity, pipe, dl, dlb);
|
2016-09-11 23:26:35 -03:00
|
|
|
|
2018-05-18 16:42:01 -04:00
|
|
|
/* Run the first partition. */
|
2018-05-18 16:42:03 -04:00
|
|
|
vsp1_video_pipeline_run_partition(pipe, dl, 0);
|
2016-09-11 23:26:35 -03:00
|
|
|
|
2018-05-18 16:42:01 -04:00
|
|
|
/* Process consecutive partitions as necessary. */
|
2017-08-04 12:32:41 -04:00
|
|
|
for (partition = 1; partition < pipe->partitions; ++partition) {
|
2018-05-18 16:42:03 -04:00
|
|
|
struct vsp1_dl_list *dl_next;
|
2016-09-11 23:26:35 -03:00
|
|
|
|
2018-05-18 16:42:03 -04:00
|
|
|
dl_next = vsp1_dl_list_get(pipe->output->dlm);
|
2016-09-11 23:26:35 -03:00
|
|
|
|
2016-09-19 15:18:01 -03:00
|
|
|
/*
|
|
|
|
* An incomplete chain will still function, but output only
|
2016-09-11 23:26:35 -03:00
|
|
|
* the partitions that had a dl available. The frame end
|
|
|
|
* interrupt will be marked on the last dl in the chain.
|
|
|
|
*/
|
2018-05-18 16:42:03 -04:00
|
|
|
if (!dl_next) {
|
2016-09-11 23:26:35 -03:00
|
|
|
dev_err(vsp1->dev, "Failed to obtain a dl list. Frame will be incomplete\n");
|
|
|
|
break;
|
2016-09-12 09:50:13 -03:00
|
|
|
}
|
2016-09-11 23:26:35 -03:00
|
|
|
|
2018-05-18 16:42:03 -04:00
|
|
|
vsp1_video_pipeline_run_partition(pipe, dl_next, partition);
|
|
|
|
vsp1_dl_list_add_chain(dl, dl_next);
|
2016-06-11 04:07:56 -03:00
|
|
|
}
|
|
|
|
|
2016-09-11 23:26:35 -03:00
|
|
|
/* Complete, and commit the head display list. */
|
2019-02-16 03:34:59 +02:00
|
|
|
vsp1_dl_list_commit(dl, 0);
|
2018-05-18 16:42:03 -04:00
|
|
|
pipe->configured = true;
|
2016-01-17 19:55:18 -02:00
|
|
|
|
|
|
|
vsp1_pipeline_run(pipe);
|
|
|
|
}
|
|
|
|
|
2017-06-30 13:14:11 +01:00
|
|
|
static void vsp1_video_pipeline_frame_end(struct vsp1_pipeline *pipe,
|
2018-04-04 17:30:49 -04:00
|
|
|
unsigned int completion)
|
2016-01-17 19:55:18 -02:00
|
|
|
{
|
|
|
|
struct vsp1_device *vsp1 = pipe->output->entity.vsp1;
|
|
|
|
enum vsp1_pipeline_state state;
|
|
|
|
unsigned long flags;
|
|
|
|
unsigned int i;
|
|
|
|
|
2017-06-30 13:14:11 +01:00
|
|
|
/* M2M Pipelines should never call here with an incomplete frame. */
|
2018-04-04 17:30:49 -04:00
|
|
|
WARN_ON_ONCE(!(completion & VSP1_DL_FRAME_END_COMPLETED));
|
2017-06-30 13:14:11 +01:00
|
|
|
|
2016-09-06 14:07:09 -03:00
|
|
|
spin_lock_irqsave(&pipe->irqlock, flags);
|
|
|
|
|
2016-01-17 19:55:18 -02:00
|
|
|
/* Complete buffers on all video nodes. */
|
|
|
|
for (i = 0; i < vsp1->info->rpf_count; ++i) {
|
|
|
|
if (!pipe->inputs[i])
|
|
|
|
continue;
|
|
|
|
|
|
|
|
vsp1_video_frame_end(pipe, pipe->inputs[i]);
|
|
|
|
}
|
|
|
|
|
|
|
|
vsp1_video_frame_end(pipe, pipe->output);
|
|
|
|
|
|
|
|
state = pipe->state;
|
|
|
|
pipe->state = VSP1_PIPELINE_STOPPED;
|
|
|
|
|
2017-02-26 10:29:50 -03:00
|
|
|
/*
|
|
|
|
* If a stop has been requested, mark the pipeline as stopped and
|
2016-01-17 19:55:18 -02:00
|
|
|
* return. Otherwise restart the pipeline if ready.
|
|
|
|
*/
|
|
|
|
if (state == VSP1_PIPELINE_STOPPING)
|
|
|
|
wake_up(&pipe->wq);
|
|
|
|
else if (vsp1_pipeline_ready(pipe))
|
|
|
|
vsp1_video_pipeline_run(pipe);
|
|
|
|
|
|
|
|
spin_unlock_irqrestore(&pipe->irqlock, flags);
|
|
|
|
}
|
|
|
|
|
2016-01-19 19:42:56 -02:00
|
|
|
static int vsp1_video_pipeline_build_branch(struct vsp1_pipeline *pipe,
|
|
|
|
struct vsp1_rwpf *input,
|
|
|
|
struct vsp1_rwpf *output)
|
2013-06-04 11:22:30 -03:00
|
|
|
{
|
2015-12-16 11:32:31 -02:00
|
|
|
struct media_entity_enum ent_enum;
|
2016-02-28 23:28:38 -03:00
|
|
|
struct vsp1_entity *entity;
|
2013-06-04 11:22:30 -03:00
|
|
|
struct media_pad *pad;
|
2018-02-26 11:06:21 -05:00
|
|
|
struct vsp1_brx *brx = NULL;
|
2016-02-28 23:28:38 -03:00
|
|
|
int ret;
|
2013-06-04 11:22:30 -03:00
|
|
|
|
2016-02-28 23:28:38 -03:00
|
|
|
ret = media_entity_enum_init(&ent_enum, &input->entity.vsp1->media_dev);
|
|
|
|
if (ret < 0)
|
|
|
|
return ret;
|
2015-12-16 11:32:31 -02:00
|
|
|
|
2016-09-07 09:09:53 -03:00
|
|
|
/*
|
|
|
|
* The main data path doesn't include the HGO or HGT, use
|
|
|
|
* vsp1_entity_remote_pad() to traverse the graph.
|
|
|
|
*/
|
|
|
|
|
|
|
|
pad = vsp1_entity_remote_pad(&input->entity.pads[RWPF_PAD_SOURCE]);
|
2013-06-04 11:22:30 -03:00
|
|
|
|
|
|
|
while (1) {
|
2015-12-16 11:32:31 -02:00
|
|
|
if (pad == NULL) {
|
2016-02-28 23:28:38 -03:00
|
|
|
ret = -EPIPE;
|
2015-12-16 11:32:31 -02:00
|
|
|
goto out;
|
|
|
|
}
|
2013-06-04 11:22:30 -03:00
|
|
|
|
|
|
|
/* We've reached a video node, that shouldn't have happened. */
|
2015-12-16 11:32:31 -02:00
|
|
|
if (!is_media_entity_v4l2_subdev(pad->entity)) {
|
2016-02-28 23:28:38 -03:00
|
|
|
ret = -EPIPE;
|
2015-12-16 11:32:31 -02:00
|
|
|
goto out;
|
|
|
|
}
|
2013-06-04 11:22:30 -03:00
|
|
|
|
2015-12-16 11:32:31 -02:00
|
|
|
entity = to_vsp1_entity(
|
|
|
|
media_entity_to_v4l2_subdev(pad->entity));
|
2013-06-04 11:22:30 -03:00
|
|
|
|
2017-02-26 10:29:50 -03:00
|
|
|
/*
|
2017-05-25 00:16:57 +03:00
|
|
|
* A BRU or BRS is present in the pipeline, store its input pad
|
2015-11-15 19:14:22 -02:00
|
|
|
* number in the input RPF for use when configuring the RPF.
|
2013-07-10 18:03:46 -03:00
|
|
|
*/
|
2017-05-25 00:16:57 +03:00
|
|
|
if (entity->type == VSP1_ENTITY_BRU ||
|
|
|
|
entity->type == VSP1_ENTITY_BRS) {
|
|
|
|
/* BRU and BRS can't be chained. */
|
2018-02-26 11:06:21 -05:00
|
|
|
if (brx) {
|
2017-05-25 00:16:57 +03:00
|
|
|
ret = -EPIPE;
|
|
|
|
goto out;
|
|
|
|
}
|
2014-05-27 20:35:36 -03:00
|
|
|
|
2018-02-26 11:06:21 -05:00
|
|
|
brx = to_brx(&entity->subdev);
|
|
|
|
brx->inputs[pad->index].rpf = input;
|
|
|
|
input->brx_input = pad->index;
|
2013-07-10 18:03:46 -03:00
|
|
|
}
|
|
|
|
|
2013-06-04 11:22:30 -03:00
|
|
|
/* We've reached the WPF, we're done. */
|
|
|
|
if (entity->type == VSP1_ENTITY_WPF)
|
|
|
|
break;
|
|
|
|
|
|
|
|
/* Ensure the branch has no loop. */
|
2015-12-16 11:32:31 -02:00
|
|
|
if (media_entity_enum_test_and_set(&ent_enum,
|
|
|
|
&entity->subdev.entity)) {
|
2016-02-28 23:28:38 -03:00
|
|
|
ret = -EPIPE;
|
2015-12-16 11:32:31 -02:00
|
|
|
goto out;
|
|
|
|
}
|
2013-06-04 11:22:30 -03:00
|
|
|
|
|
|
|
/* UDS can't be chained. */
|
|
|
|
if (entity->type == VSP1_ENTITY_UDS) {
|
2015-12-16 11:32:31 -02:00
|
|
|
if (pipe->uds) {
|
2016-02-28 23:28:38 -03:00
|
|
|
ret = -EPIPE;
|
2015-12-16 11:32:31 -02:00
|
|
|
goto out;
|
|
|
|
}
|
2014-05-30 21:45:48 -03:00
|
|
|
|
|
|
|
pipe->uds = entity;
|
2018-02-26 11:06:21 -05:00
|
|
|
pipe->uds_input = brx ? &brx->entity : &input->entity;
|
2013-06-04 11:22:30 -03:00
|
|
|
}
|
|
|
|
|
2016-09-07 09:09:53 -03:00
|
|
|
/* Follow the source link, ignoring any HGO or HGT. */
|
2013-06-04 11:22:30 -03:00
|
|
|
pad = &entity->pads[entity->source_pad];
|
2016-09-07 09:09:53 -03:00
|
|
|
pad = vsp1_entity_remote_pad(pad);
|
2013-06-04 11:22:30 -03:00
|
|
|
}
|
|
|
|
|
|
|
|
/* The last entity must be the output WPF. */
|
|
|
|
if (entity != &output->entity)
|
2016-02-28 23:28:38 -03:00
|
|
|
ret = -EPIPE;
|
2013-06-04 11:22:30 -03:00
|
|
|
|
2015-12-16 11:32:31 -02:00
|
|
|
out:
|
|
|
|
media_entity_enum_cleanup(&ent_enum);
|
|
|
|
|
2016-02-28 23:28:38 -03:00
|
|
|
return ret;
|
2013-06-04 11:22:30 -03:00
|
|
|
}
|
|
|
|
|
2016-01-19 19:42:56 -02:00
|
|
|
static int vsp1_video_pipeline_build(struct vsp1_pipeline *pipe,
|
|
|
|
struct vsp1_video *video)
|
2013-06-04 11:22:30 -03:00
|
|
|
{
|
2016-11-21 14:48:30 -02:00
|
|
|
struct media_graph graph;
|
2013-06-04 11:22:30 -03:00
|
|
|
struct media_entity *entity = &video->video.entity;
|
2015-08-19 12:35:21 -03:00
|
|
|
struct media_device *mdev = entity->graph_obj.mdev;
|
2013-06-04 11:22:30 -03:00
|
|
|
unsigned int i;
|
|
|
|
int ret;
|
|
|
|
|
|
|
|
/* Walk the graph to locate the entities and video nodes. */
|
2016-11-21 14:48:30 -02:00
|
|
|
ret = media_graph_walk_init(&graph, mdev);
|
2016-01-17 19:53:56 -02:00
|
|
|
if (ret)
|
2015-12-16 15:32:27 +02:00
|
|
|
return ret;
|
|
|
|
|
2016-11-21 14:48:30 -02:00
|
|
|
media_graph_walk_start(&graph, entity);
|
2013-06-04 11:22:30 -03:00
|
|
|
|
2016-11-21 14:48:30 -02:00
|
|
|
while ((entity = media_graph_walk_next(&graph))) {
|
2013-06-04 11:22:30 -03:00
|
|
|
struct v4l2_subdev *subdev;
|
|
|
|
struct vsp1_rwpf *rwpf;
|
|
|
|
struct vsp1_entity *e;
|
|
|
|
|
2016-03-01 06:27:10 -03:00
|
|
|
if (!is_media_entity_v4l2_subdev(entity))
|
2013-06-04 11:22:30 -03:00
|
|
|
continue;
|
|
|
|
|
|
|
|
subdev = media_entity_to_v4l2_subdev(entity);
|
|
|
|
e = to_vsp1_entity(subdev);
|
|
|
|
list_add_tail(&e->list_pipe, &pipe->entities);
|
2018-02-22 17:22:43 -05:00
|
|
|
e->pipe = pipe;
|
2013-06-04 11:22:30 -03:00
|
|
|
|
2017-05-25 00:16:57 +03:00
|
|
|
switch (e->type) {
|
|
|
|
case VSP1_ENTITY_RPF:
|
2013-06-04 11:22:30 -03:00
|
|
|
rwpf = to_rwpf(subdev);
|
2015-08-05 16:40:31 -03:00
|
|
|
pipe->inputs[rwpf->entity.index] = rwpf;
|
|
|
|
rwpf->video->pipe_index = ++pipe->num_inputs;
|
2017-05-25 00:16:57 +03:00
|
|
|
break;
|
|
|
|
|
|
|
|
case VSP1_ENTITY_WPF:
|
2013-06-04 11:22:30 -03:00
|
|
|
rwpf = to_rwpf(subdev);
|
2015-08-02 17:43:36 -03:00
|
|
|
pipe->output = rwpf;
|
2015-08-02 14:58:43 -03:00
|
|
|
rwpf->video->pipe_index = 0;
|
2017-05-25 00:16:57 +03:00
|
|
|
break;
|
|
|
|
|
|
|
|
case VSP1_ENTITY_LIF:
|
2013-06-04 11:22:30 -03:00
|
|
|
pipe->lif = e;
|
2017-05-25 00:16:57 +03:00
|
|
|
break;
|
|
|
|
|
|
|
|
case VSP1_ENTITY_BRU:
|
|
|
|
case VSP1_ENTITY_BRS:
|
2018-02-26 11:06:21 -05:00
|
|
|
pipe->brx = e;
|
2017-05-25 00:16:57 +03:00
|
|
|
break;
|
2016-02-24 20:40:22 -03:00
|
|
|
|
2017-05-25 00:16:57 +03:00
|
|
|
case VSP1_ENTITY_HGO:
|
2016-02-24 20:40:22 -03:00
|
|
|
pipe->hgo = e;
|
2017-05-25 00:16:57 +03:00
|
|
|
break;
|
2016-09-06 11:38:56 -03:00
|
|
|
|
2017-05-25 00:16:57 +03:00
|
|
|
case VSP1_ENTITY_HGT:
|
2016-09-06 11:38:56 -03:00
|
|
|
pipe->hgt = e;
|
2017-05-25 00:16:57 +03:00
|
|
|
break;
|
|
|
|
|
|
|
|
default:
|
|
|
|
break;
|
2013-06-04 11:22:30 -03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-11-21 14:48:30 -02:00
|
|
|
media_graph_walk_cleanup(&graph);
|
2015-12-16 15:32:27 +02:00
|
|
|
|
2013-06-04 11:22:30 -03:00
|
|
|
/* We need one output and at least one input. */
|
2016-01-17 19:53:56 -02:00
|
|
|
if (pipe->num_inputs == 0 || !pipe->output)
|
|
|
|
return -EPIPE;
|
2013-06-04 11:22:30 -03:00
|
|
|
|
2017-02-26 10:29:50 -03:00
|
|
|
/*
|
|
|
|
* Follow links downstream for each input and make sure the graph
|
2013-06-04 11:22:30 -03:00
|
|
|
* contains no loop and that all branches end at the output WPF.
|
|
|
|
*/
|
2015-12-05 20:17:10 -02:00
|
|
|
for (i = 0; i < video->vsp1->info->rpf_count; ++i) {
|
2015-08-05 16:40:31 -03:00
|
|
|
if (!pipe->inputs[i])
|
|
|
|
continue;
|
|
|
|
|
2016-01-19 19:42:56 -02:00
|
|
|
ret = vsp1_video_pipeline_build_branch(pipe, pipe->inputs[i],
|
|
|
|
pipe->output);
|
2013-06-04 11:22:30 -03:00
|
|
|
if (ret < 0)
|
2016-01-17 19:53:56 -02:00
|
|
|
return ret;
|
2013-06-04 11:22:30 -03:00
|
|
|
}
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2015-08-02 17:29:03 -03:00
|
|
|
static int vsp1_video_pipeline_init(struct vsp1_pipeline *pipe,
|
|
|
|
struct vsp1_video *video)
|
2013-06-04 11:22:30 -03:00
|
|
|
{
|
2023-11-19 00:39:34 +02:00
|
|
|
int ret;
|
|
|
|
|
2016-01-17 19:53:56 -02:00
|
|
|
vsp1_pipeline_init(pipe);
|
|
|
|
|
|
|
|
pipe->frame_end = vsp1_video_pipeline_frame_end;
|
|
|
|
|
2023-11-19 00:39:34 +02:00
|
|
|
ret = vsp1_video_pipeline_build(pipe, video);
|
|
|
|
if (ret)
|
|
|
|
return ret;
|
|
|
|
|
|
|
|
vsp1_pipeline_dump(pipe, "video");
|
|
|
|
|
|
|
|
return 0;
|
2016-01-17 19:53:56 -02:00
|
|
|
}
|
|
|
|
|
|
|
|
static struct vsp1_pipeline *vsp1_video_pipeline_get(struct vsp1_video *video)
|
|
|
|
{
|
|
|
|
struct vsp1_pipeline *pipe;
|
2013-06-04 11:22:30 -03:00
|
|
|
int ret;
|
|
|
|
|
2017-02-26 10:29:50 -03:00
|
|
|
/*
|
|
|
|
* Get a pipeline object for the video node. If a pipeline has already
|
2016-01-17 19:53:56 -02:00
|
|
|
* been allocated just increment its reference count and return it.
|
|
|
|
* Otherwise allocate a new pipeline and initialize it, it will be freed
|
|
|
|
* when the last reference is released.
|
|
|
|
*/
|
2018-02-22 17:22:43 -05:00
|
|
|
if (!video->rwpf->entity.pipe) {
|
2016-01-17 19:53:56 -02:00
|
|
|
pipe = kzalloc(sizeof(*pipe), GFP_KERNEL);
|
|
|
|
if (!pipe)
|
|
|
|
return ERR_PTR(-ENOMEM);
|
2013-06-04 11:22:30 -03:00
|
|
|
|
2016-01-17 19:53:56 -02:00
|
|
|
ret = vsp1_video_pipeline_init(pipe, video);
|
|
|
|
if (ret < 0) {
|
|
|
|
vsp1_pipeline_reset(pipe);
|
|
|
|
kfree(pipe);
|
|
|
|
return ERR_PTR(ret);
|
|
|
|
}
|
|
|
|
} else {
|
2018-02-22 17:22:43 -05:00
|
|
|
pipe = video->rwpf->entity.pipe;
|
2016-01-17 19:53:56 -02:00
|
|
|
kref_get(&pipe->kref);
|
2013-06-04 11:22:30 -03:00
|
|
|
}
|
|
|
|
|
2016-01-17 19:53:56 -02:00
|
|
|
return pipe;
|
2013-06-04 11:22:30 -03:00
|
|
|
}
|
|
|
|
|
2016-01-17 19:53:56 -02:00
|
|
|
static void vsp1_video_pipeline_release(struct kref *kref)
|
2013-06-04 11:22:30 -03:00
|
|
|
{
|
2016-01-17 19:53:56 -02:00
|
|
|
struct vsp1_pipeline *pipe = container_of(kref, typeof(*pipe), kref);
|
2013-06-04 11:22:30 -03:00
|
|
|
|
2016-01-17 19:53:56 -02:00
|
|
|
vsp1_pipeline_reset(pipe);
|
|
|
|
kfree(pipe);
|
|
|
|
}
|
2013-06-04 11:22:30 -03:00
|
|
|
|
2016-01-17 19:53:56 -02:00
|
|
|
static void vsp1_video_pipeline_put(struct vsp1_pipeline *pipe)
|
|
|
|
{
|
|
|
|
struct media_device *mdev = &pipe->output->entity.vsp1->media_dev;
|
|
|
|
|
|
|
|
mutex_lock(&mdev->graph_mutex);
|
|
|
|
kref_put(&pipe->kref, vsp1_video_pipeline_release);
|
|
|
|
mutex_unlock(&mdev->graph_mutex);
|
2013-06-04 11:22:30 -03:00
|
|
|
}
|
|
|
|
|
|
|
|
/* -----------------------------------------------------------------------------
|
|
|
|
* videobuf2 Queue Operations
|
|
|
|
*/
|
|
|
|
|
|
|
|
static int
|
2015-10-28 00:50:37 -02:00
|
|
|
vsp1_video_queue_setup(struct vb2_queue *vq,
|
2016-04-15 09:15:05 -03:00
|
|
|
unsigned int *nbuffers, unsigned int *nplanes,
|
|
|
|
unsigned int sizes[], struct device *alloc_devs[])
|
2013-06-04 11:22:30 -03:00
|
|
|
{
|
|
|
|
struct vsp1_video *video = vb2_get_drv_priv(vq);
|
2015-07-28 14:00:43 -03:00
|
|
|
const struct v4l2_pix_format_mplane *format = &video->rwpf->format;
|
2013-06-04 11:22:30 -03:00
|
|
|
unsigned int i;
|
|
|
|
|
2015-10-28 00:50:37 -02:00
|
|
|
if (*nplanes) {
|
|
|
|
if (*nplanes != format->num_planes)
|
2013-06-04 11:22:30 -03:00
|
|
|
return -EINVAL;
|
|
|
|
|
2016-04-15 09:15:05 -03:00
|
|
|
for (i = 0; i < *nplanes; i++)
|
2015-10-28 00:50:37 -02:00
|
|
|
if (sizes[i] < format->plane_fmt[i].sizeimage)
|
|
|
|
return -EINVAL;
|
|
|
|
return 0;
|
2013-06-04 11:22:30 -03:00
|
|
|
}
|
|
|
|
|
|
|
|
*nplanes = format->num_planes;
|
|
|
|
|
2016-02-15 13:24:11 -02:00
|
|
|
for (i = 0; i < format->num_planes; ++i)
|
2013-06-04 11:22:30 -03:00
|
|
|
sizes[i] = format->plane_fmt[i].sizeimage;
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int vsp1_video_buffer_prepare(struct vb2_buffer *vb)
|
|
|
|
{
|
[media] media: videobuf2: Restructure vb2_buffer
Remove v4l2 stuff - v4l2_buf, v4l2_plane - from struct vb2_buffer.
Add new member variables - bytesused, length, offset, userptr, fd,
data_offset - to struct vb2_plane in order to cover all information
of v4l2_plane.
struct vb2_plane {
<snip>
unsigned int bytesused;
unsigned int length;
union {
unsigned int offset;
unsigned long userptr;
int fd;
} m;
unsigned int data_offset;
}
Replace v4l2_buf with new member variables - index, type, memory - which
are common fields for buffer management.
struct vb2_buffer {
<snip>
unsigned int index;
unsigned int type;
unsigned int memory;
unsigned int num_planes;
struct vb2_plane planes[VIDEO_MAX_PLANES];
<snip>
};
v4l2 specific fields - flags, field, timestamp, timecode,
sequence - are moved to vb2_v4l2_buffer in videobuf2-v4l2.c
struct vb2_v4l2_buffer {
struct vb2_buffer vb2_buf;
__u32 flags;
__u32 field;
struct timeval timestamp;
struct v4l2_timecode timecode;
__u32 sequence;
};
Signed-off-by: Junghak Sung <jh1009.sung@samsung.com>
Signed-off-by: Geunyoung Kim <nenggun.kim@samsung.com>
Acked-by: Seung-Woo Kim <sw0312.kim@samsung.com>
Acked-by: Inki Dae <inki.dae@samsung.com>
Signed-off-by: Hans Verkuil <hans.verkuil@cisco.com>
Signed-off-by: Mauro Carvalho Chehab <mchehab@osg.samsung.com>
2015-09-22 10:30:30 -03:00
|
|
|
struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
|
2013-06-04 11:22:30 -03:00
|
|
|
struct vsp1_video *video = vb2_get_drv_priv(vb->vb2_queue);
|
2015-07-28 14:17:07 -03:00
|
|
|
struct vsp1_vb2_buffer *buf = to_vsp1_vb2_buffer(vbuf);
|
2015-07-28 14:00:43 -03:00
|
|
|
const struct v4l2_pix_format_mplane *format = &video->rwpf->format;
|
2013-06-04 11:22:30 -03:00
|
|
|
unsigned int i;
|
|
|
|
|
|
|
|
if (vb->num_planes < format->num_planes)
|
|
|
|
return -EINVAL;
|
|
|
|
|
|
|
|
for (i = 0; i < vb->num_planes; ++i) {
|
2015-07-28 16:04:47 -03:00
|
|
|
buf->mem.addr[i] = vb2_dma_contig_plane_dma_addr(vb, i);
|
2013-06-04 11:22:30 -03:00
|
|
|
|
2015-11-01 15:18:56 -02:00
|
|
|
if (vb2_plane_size(vb, i) < format->plane_fmt[i].sizeimage)
|
2013-06-04 11:22:30 -03:00
|
|
|
return -EINVAL;
|
|
|
|
}
|
|
|
|
|
2015-11-01 15:18:56 -02:00
|
|
|
for ( ; i < 3; ++i)
|
2015-11-01 13:48:11 -02:00
|
|
|
buf->mem.addr[i] = 0;
|
|
|
|
|
2013-06-04 11:22:30 -03:00
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void vsp1_video_buffer_queue(struct vb2_buffer *vb)
|
|
|
|
{
|
[media] media: videobuf2: Restructure vb2_buffer
Remove v4l2 stuff - v4l2_buf, v4l2_plane - from struct vb2_buffer.
Add new member variables - bytesused, length, offset, userptr, fd,
data_offset - to struct vb2_plane in order to cover all information
of v4l2_plane.
struct vb2_plane {
<snip>
unsigned int bytesused;
unsigned int length;
union {
unsigned int offset;
unsigned long userptr;
int fd;
} m;
unsigned int data_offset;
}
Replace v4l2_buf with new member variables - index, type, memory - which
are common fields for buffer management.
struct vb2_buffer {
<snip>
unsigned int index;
unsigned int type;
unsigned int memory;
unsigned int num_planes;
struct vb2_plane planes[VIDEO_MAX_PLANES];
<snip>
};
v4l2 specific fields - flags, field, timestamp, timecode,
sequence - are moved to vb2_v4l2_buffer in videobuf2-v4l2.c
struct vb2_v4l2_buffer {
struct vb2_buffer vb2_buf;
__u32 flags;
__u32 field;
struct timeval timestamp;
struct v4l2_timecode timecode;
__u32 sequence;
};
Signed-off-by: Junghak Sung <jh1009.sung@samsung.com>
Signed-off-by: Geunyoung Kim <nenggun.kim@samsung.com>
Acked-by: Seung-Woo Kim <sw0312.kim@samsung.com>
Acked-by: Inki Dae <inki.dae@samsung.com>
Signed-off-by: Hans Verkuil <hans.verkuil@cisco.com>
Signed-off-by: Mauro Carvalho Chehab <mchehab@osg.samsung.com>
2015-09-22 10:30:30 -03:00
|
|
|
struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
|
2013-06-04 11:22:30 -03:00
|
|
|
struct vsp1_video *video = vb2_get_drv_priv(vb->vb2_queue);
|
2018-02-22 17:22:43 -05:00
|
|
|
struct vsp1_pipeline *pipe = video->rwpf->entity.pipe;
|
2015-07-28 14:17:07 -03:00
|
|
|
struct vsp1_vb2_buffer *buf = to_vsp1_vb2_buffer(vbuf);
|
2013-06-04 11:22:30 -03:00
|
|
|
unsigned long flags;
|
|
|
|
bool empty;
|
|
|
|
|
|
|
|
spin_lock_irqsave(&video->irqlock, flags);
|
|
|
|
empty = list_empty(&video->irqqueue);
|
|
|
|
list_add_tail(&buf->queue, &video->irqqueue);
|
|
|
|
spin_unlock_irqrestore(&video->irqlock, flags);
|
|
|
|
|
|
|
|
if (!empty)
|
|
|
|
return;
|
|
|
|
|
|
|
|
spin_lock_irqsave(&pipe->irqlock, flags);
|
|
|
|
|
2015-11-01 15:18:56 -02:00
|
|
|
video->rwpf->mem = buf->mem;
|
2013-06-04 11:22:30 -03:00
|
|
|
pipe->buffers_ready |= 1 << video->pipe_index;
|
|
|
|
|
media: vsp1: Replace vb2_is_streaming() with vb2_start_streaming_called()
The vsp1 driver uses the vb2_is_streaming() function in its .buf_queue()
handler to check if the .start_streaming() operation has been called,
and decide whether to just add the buffer to an internal queue, or also
trigger a hardware run. vb2_is_streaming() relies on the vb2_queue
structure's streaming field, which used to be set only after calling the
.start_streaming() operation.
Commit a10b21532574 ("media: vb2: add (un)prepare_streaming queue ops")
changed this, setting the .streaming field in vb2_core_streamon() before
enqueuing buffers to the driver and calling .start_streaming(). This
broke the vsp1 driver which now believes that .start_streaming() has
been called when it hasn't, leading to a crash:
[ 881.058705] Unable to handle kernel NULL pointer dereference at virtual address 0000000000000020
[ 881.067495] Mem abort info:
[ 881.070290] ESR = 0x0000000096000006
[ 881.074042] EC = 0x25: DABT (current EL), IL = 32 bits
[ 881.079358] SET = 0, FnV = 0
[ 881.082414] EA = 0, S1PTW = 0
[ 881.085558] FSC = 0x06: level 2 translation fault
[ 881.090439] Data abort info:
[ 881.093320] ISV = 0, ISS = 0x00000006
[ 881.097157] CM = 0, WnR = 0
[ 881.100126] user pgtable: 4k pages, 48-bit VAs, pgdp=000000004fa51000
[ 881.106573] [0000000000000020] pgd=080000004f36e003, p4d=080000004f36e003, pud=080000004f7ec003, pmd=0000000000000000
[ 881.117217] Internal error: Oops: 0000000096000006 [#1] PREEMPT SMP
[ 881.123494] Modules linked in: rcar_fdp1 v4l2_mem2mem
[ 881.128572] CPU: 0 PID: 1271 Comm: yavta Tainted: G B 6.2.0-rc1-00023-g6c94e2e99343 #556
[ 881.138061] Hardware name: Renesas Salvator-X 2nd version board based on r8a77965 (DT)
[ 881.145981] pstate: 400000c5 (nZcv daIF -PAN -UAO -TCO -DIT -SSBS BTYPE=--)
[ 881.152951] pc : vsp1_dl_list_add_body+0xa8/0xe0
[ 881.157580] lr : vsp1_dl_list_add_body+0x34/0xe0
[ 881.162206] sp : ffff80000c267710
[ 881.165522] x29: ffff80000c267710 x28: ffff000010938ae8 x27: ffff000013a8dd98
[ 881.172683] x26: ffff000010938098 x25: ffff000013a8dc00 x24: ffff000010ed6ba8
[ 881.179841] x23: ffff00000faa4000 x22: 0000000000000000 x21: 0000000000000020
[ 881.186998] x20: ffff00000faa4000 x19: 0000000000000000 x18: 0000000000000000
[ 881.194154] x17: 0000000000000000 x16: 0000000000000000 x15: 0000000000000000
[ 881.201309] x14: 0000000000000000 x13: 746e696174206c65 x12: ffff70000157043d
[ 881.208465] x11: 1ffff0000157043c x10: ffff70000157043c x9 : dfff800000000000
[ 881.215622] x8 : ffff80000ab821e7 x7 : 00008ffffea8fbc4 x6 : 0000000000000001
[ 881.222779] x5 : ffff80000ab821e0 x4 : ffff70000157043d x3 : 0000000000000020
[ 881.229936] x2 : 0000000000000020 x1 : ffff00000e4f6400 x0 : 0000000000000000
[ 881.237092] Call trace:
[ 881.239542] vsp1_dl_list_add_body+0xa8/0xe0
[ 881.243822] vsp1_video_pipeline_run+0x270/0x2a0
[ 881.248449] vsp1_video_buffer_queue+0x1c0/0x1d0
[ 881.253076] __enqueue_in_driver+0xbc/0x260
[ 881.257269] vb2_start_streaming+0x48/0x200
[ 881.261461] vb2_core_streamon+0x13c/0x280
[ 881.265565] vb2_streamon+0x3c/0x90
[ 881.269064] vsp1_video_streamon+0x2fc/0x3e0
[ 881.273344] v4l_streamon+0x50/0x70
[ 881.276844] __video_do_ioctl+0x2bc/0x5d0
[ 881.280861] video_usercopy+0x2a8/0xc80
[ 881.284704] video_ioctl2+0x20/0x40
[ 881.288201] v4l2_ioctl+0xa4/0xc0
[ 881.291525] __arm64_sys_ioctl+0xe8/0x110
[ 881.295543] invoke_syscall+0x68/0x190
[ 881.299303] el0_svc_common.constprop.0+0x88/0x170
[ 881.304105] do_el0_svc+0x4c/0xf0
[ 881.307430] el0_svc+0x4c/0xa0
[ 881.310494] el0t_64_sync_handler+0xbc/0x140
[ 881.314773] el0t_64_sync+0x190/0x194
[ 881.318450] Code: d50323bf d65f03c0 91008263 f9800071 (885f7c60)
[ 881.324551] ---[ end trace 0000000000000000 ]---
[ 881.329173] note: yavta[1271] exited with preempt_count 1
A different regression report sent to the linux-media mailing list ([1])
was answered with a claim that the vb2_is_streaming() function has never
been meant for this purpose. The document of the function, as well as of
the struct vb2_queue streaming field, is sparse, so this claim may be
hard to verify.
The information needed by the vsp1 driver to decide how to process
queued buffers is also available from the vb2_start_streaming_called()
function. Use it instead of vb2_is_streaming() to fix the problem.
[1] https://lore.kernel.org/linux-media/545610e7-3446-2b82-60dc-7385fea3774f@redhat.com/
Fixes: a10b21532574 ("media: vb2: add (un)prepare_streaming queue ops")
Signed-off-by: Laurent Pinchart <laurent.pinchart+renesas@ideasonboard.com>
Reviewed-by: Hans Verkuil <hverkuil-cisco@xs4all.nl>
Tested-by: Duy Nguyen <duy.nguyen.rh@renesas.com>
Signed-off-by: Hans Verkuil <hverkuil-cisco@xs4all.nl>
2023-01-20 22:20:31 +02:00
|
|
|
if (vb2_start_streaming_called(&video->queue) &&
|
2013-06-04 11:22:30 -03:00
|
|
|
vsp1_pipeline_ready(pipe))
|
2015-11-01 15:18:56 -02:00
|
|
|
vsp1_video_pipeline_run(pipe);
|
2013-06-04 11:22:30 -03:00
|
|
|
|
|
|
|
spin_unlock_irqrestore(&pipe->irqlock, flags);
|
|
|
|
}
|
|
|
|
|
2023-11-16 03:00:41 +02:00
|
|
|
static int vsp1_video_pipeline_setup_partitions(struct vsp1_pipeline *pipe)
|
|
|
|
{
|
|
|
|
struct vsp1_device *vsp1 = pipe->output->entity.vsp1;
|
|
|
|
const struct v4l2_mbus_framefmt *format;
|
|
|
|
struct vsp1_entity *entity;
|
|
|
|
unsigned int div_size;
|
|
|
|
unsigned int i;
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Partitions are computed on the size before rotation, use the format
|
|
|
|
* at the WPF sink.
|
|
|
|
*/
|
|
|
|
format = v4l2_subdev_state_get_format(pipe->output->entity.state,
|
|
|
|
RWPF_PAD_SINK);
|
|
|
|
div_size = format->width;
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Only Gen3+ hardware requires image partitioning, Gen2 will operate
|
|
|
|
* with a single partition that covers the whole output.
|
|
|
|
*/
|
|
|
|
if (vsp1->info->gen >= 3) {
|
|
|
|
list_for_each_entry(entity, &pipe->entities, list_pipe) {
|
|
|
|
unsigned int entity_max;
|
|
|
|
|
|
|
|
if (!entity->ops->max_width)
|
|
|
|
continue;
|
|
|
|
|
2023-11-12 04:14:05 +02:00
|
|
|
entity_max = entity->ops->max_width(entity,
|
|
|
|
entity->state,
|
|
|
|
pipe);
|
2023-11-16 03:00:41 +02:00
|
|
|
if (entity_max)
|
|
|
|
div_size = min(div_size, entity_max);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pipe->partitions = DIV_ROUND_UP(format->width, div_size);
|
|
|
|
pipe->part_table = kcalloc(pipe->partitions, sizeof(*pipe->part_table),
|
|
|
|
GFP_KERNEL);
|
|
|
|
if (!pipe->part_table)
|
|
|
|
return -ENOMEM;
|
|
|
|
|
|
|
|
for (i = 0; i < pipe->partitions; ++i)
|
|
|
|
vsp1_pipeline_calculate_partition(pipe, &pipe->part_table[i],
|
|
|
|
div_size, i);
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2015-11-01 15:18:56 -02:00
|
|
|
static int vsp1_video_setup_pipeline(struct vsp1_pipeline *pipe)
|
|
|
|
{
|
|
|
|
struct vsp1_entity *entity;
|
2017-08-04 12:32:40 -04:00
|
|
|
int ret;
|
2015-11-01 15:18:56 -02:00
|
|
|
|
2016-07-12 10:06:34 -03:00
|
|
|
/* Determine this pipelines sizes for image partitioning support. */
|
2017-08-04 12:32:40 -04:00
|
|
|
ret = vsp1_video_pipeline_setup_partitions(pipe);
|
|
|
|
if (ret < 0)
|
|
|
|
return ret;
|
2016-07-12 10:06:34 -03:00
|
|
|
|
2015-11-01 15:18:56 -02:00
|
|
|
if (pipe->uds) {
|
|
|
|
struct vsp1_uds *uds = to_uds(&pipe->uds->subdev);
|
|
|
|
|
2017-02-26 10:29:50 -03:00
|
|
|
/*
|
2017-05-25 00:16:57 +03:00
|
|
|
* If a BRU or BRS is present in the pipeline before the UDS,
|
|
|
|
* the alpha component doesn't need to be scaled as the BRU and
|
|
|
|
* BRS output alpha value is fixed to 255. Otherwise we need to
|
|
|
|
* scale the alpha component only when available at the input
|
|
|
|
* RPF.
|
2015-11-01 15:18:56 -02:00
|
|
|
*/
|
2017-05-25 00:16:57 +03:00
|
|
|
if (pipe->uds_input->type == VSP1_ENTITY_BRU ||
|
|
|
|
pipe->uds_input->type == VSP1_ENTITY_BRS) {
|
2015-11-01 15:18:56 -02:00
|
|
|
uds->scale_alpha = false;
|
|
|
|
} else {
|
|
|
|
struct vsp1_rwpf *rpf =
|
|
|
|
to_rwpf(&pipe->uds_input->subdev);
|
|
|
|
|
|
|
|
uds->scale_alpha = rpf->fmtinfo->alpha;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-18 16:42:03 -04:00
|
|
|
/*
|
|
|
|
* Compute and cache the stream configuration into a body. The cached
|
|
|
|
* body will be added to the display list by vsp1_video_pipeline_run()
|
|
|
|
* whenever the pipeline needs to be fully reconfigured.
|
|
|
|
*/
|
|
|
|
pipe->stream_config = vsp1_dlm_dl_body_get(pipe->output->dlm);
|
|
|
|
if (!pipe->stream_config)
|
|
|
|
return -ENOMEM;
|
|
|
|
|
2015-11-01 15:18:56 -02:00
|
|
|
list_for_each_entry(entity, &pipe->entities, list_pipe) {
|
2018-05-18 16:42:03 -04:00
|
|
|
vsp1_entity_route_setup(entity, pipe, pipe->stream_config);
|
2023-11-12 04:14:05 +02:00
|
|
|
vsp1_entity_configure_stream(entity, entity->state, pipe, NULL,
|
2019-03-11 20:13:43 +02:00
|
|
|
pipe->stream_config);
|
2015-11-01 15:18:56 -02:00
|
|
|
}
|
|
|
|
|
2015-11-17 13:10:26 -02:00
|
|
|
return 0;
|
2015-11-01 15:18:56 -02:00
|
|
|
}
|
|
|
|
|
2018-05-18 16:41:54 -04:00
|
|
|
static void vsp1_video_release_buffers(struct vsp1_video *video)
|
2017-08-04 12:32:38 -04:00
|
|
|
{
|
|
|
|
struct vsp1_vb2_buffer *buffer;
|
|
|
|
unsigned long flags;
|
|
|
|
|
|
|
|
/* Remove all buffers from the IRQ queue. */
|
|
|
|
spin_lock_irqsave(&video->irqlock, flags);
|
|
|
|
list_for_each_entry(buffer, &video->irqqueue, queue)
|
|
|
|
vb2_buffer_done(&buffer->buf.vb2_buf, VB2_BUF_STATE_ERROR);
|
|
|
|
INIT_LIST_HEAD(&video->irqqueue);
|
|
|
|
spin_unlock_irqrestore(&video->irqlock, flags);
|
2018-05-18 16:41:54 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
static void vsp1_video_cleanup_pipeline(struct vsp1_pipeline *pipe)
|
|
|
|
{
|
|
|
|
lockdep_assert_held(&pipe->lock);
|
2017-08-04 12:32:40 -04:00
|
|
|
|
2018-05-18 16:42:03 -04:00
|
|
|
/* Release any cached configuration from our output video. */
|
|
|
|
vsp1_dl_body_put(pipe->stream_config);
|
|
|
|
pipe->stream_config = NULL;
|
|
|
|
pipe->configured = false;
|
|
|
|
|
2018-08-31 10:40:44 -04:00
|
|
|
/* Release our partition table allocation. */
|
2017-08-04 12:32:40 -04:00
|
|
|
kfree(pipe->part_table);
|
|
|
|
pipe->part_table = NULL;
|
2017-08-04 12:32:38 -04:00
|
|
|
}
|
|
|
|
|
2013-06-04 11:22:30 -03:00
|
|
|
static int vsp1_video_start_streaming(struct vb2_queue *vq, unsigned int count)
|
|
|
|
{
|
|
|
|
struct vsp1_video *video = vb2_get_drv_priv(vq);
|
2018-02-22 17:22:43 -05:00
|
|
|
struct vsp1_pipeline *pipe = video->rwpf->entity.pipe;
|
2017-01-06 10:15:28 -02:00
|
|
|
bool start_pipeline = false;
|
2013-06-04 11:22:30 -03:00
|
|
|
unsigned long flags;
|
|
|
|
int ret;
|
|
|
|
|
|
|
|
mutex_lock(&pipe->lock);
|
2015-08-02 17:17:15 -03:00
|
|
|
if (pipe->stream_count == pipe->num_inputs) {
|
2015-11-01 15:18:56 -02:00
|
|
|
ret = vsp1_video_setup_pipeline(pipe);
|
|
|
|
if (ret < 0) {
|
2018-05-18 16:41:54 -04:00
|
|
|
vsp1_video_release_buffers(video);
|
2017-08-04 12:32:38 -04:00
|
|
|
vsp1_video_cleanup_pipeline(pipe);
|
2018-05-18 16:41:54 -04:00
|
|
|
mutex_unlock(&pipe->lock);
|
2015-11-01 15:18:56 -02:00
|
|
|
return ret;
|
2013-06-04 11:22:30 -03:00
|
|
|
}
|
2017-01-06 10:15:28 -02:00
|
|
|
|
|
|
|
start_pipeline = true;
|
2013-06-04 11:22:30 -03:00
|
|
|
}
|
|
|
|
|
|
|
|
pipe->stream_count++;
|
|
|
|
mutex_unlock(&pipe->lock);
|
|
|
|
|
2017-01-06 10:15:28 -02:00
|
|
|
/*
|
|
|
|
* vsp1_pipeline_ready() is not sufficient to establish that all streams
|
|
|
|
* are prepared and the pipeline is configured, as multiple streams
|
|
|
|
* can race through streamon with buffers already queued; Therefore we
|
|
|
|
* don't even attempt to start the pipeline until the last stream has
|
|
|
|
* called through here.
|
|
|
|
*/
|
|
|
|
if (!start_pipeline)
|
|
|
|
return 0;
|
|
|
|
|
2013-06-04 11:22:30 -03:00
|
|
|
spin_lock_irqsave(&pipe->irqlock, flags);
|
|
|
|
if (vsp1_pipeline_ready(pipe))
|
2015-11-01 15:18:56 -02:00
|
|
|
vsp1_video_pipeline_run(pipe);
|
2013-06-04 11:22:30 -03:00
|
|
|
spin_unlock_irqrestore(&pipe->irqlock, flags);
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2014-04-17 02:47:21 -03:00
|
|
|
static void vsp1_video_stop_streaming(struct vb2_queue *vq)
|
2013-06-04 11:22:30 -03:00
|
|
|
{
|
|
|
|
struct vsp1_video *video = vb2_get_drv_priv(vq);
|
2018-02-22 17:22:43 -05:00
|
|
|
struct vsp1_pipeline *pipe = video->rwpf->entity.pipe;
|
2013-06-04 11:22:30 -03:00
|
|
|
unsigned long flags;
|
|
|
|
int ret;
|
|
|
|
|
2016-09-19 15:18:01 -03:00
|
|
|
/*
|
|
|
|
* Clear the buffers ready flag to make sure the device won't be started
|
2016-07-08 06:20:51 -03:00
|
|
|
* by a QBUF on the video node on the other side of the pipeline.
|
|
|
|
*/
|
|
|
|
spin_lock_irqsave(&video->irqlock, flags);
|
|
|
|
pipe->buffers_ready &= ~(1 << video->pipe_index);
|
|
|
|
spin_unlock_irqrestore(&video->irqlock, flags);
|
|
|
|
|
2013-06-04 11:22:30 -03:00
|
|
|
mutex_lock(&pipe->lock);
|
2016-06-17 21:03:29 -03:00
|
|
|
if (--pipe->stream_count == pipe->num_inputs) {
|
2013-06-04 11:22:30 -03:00
|
|
|
/* Stop the pipeline. */
|
|
|
|
ret = vsp1_pipeline_stop(pipe);
|
|
|
|
if (ret == -ETIMEDOUT)
|
|
|
|
dev_err(video->vsp1->dev, "pipeline stop timeout\n");
|
2015-11-01 15:18:56 -02:00
|
|
|
|
2018-05-18 16:41:54 -04:00
|
|
|
vsp1_video_cleanup_pipeline(pipe);
|
2013-06-04 11:22:30 -03:00
|
|
|
}
|
|
|
|
mutex_unlock(&pipe->lock);
|
|
|
|
|
2022-08-31 16:13:33 +02:00
|
|
|
video_device_pipeline_stop(&video->video);
|
2018-05-18 16:41:54 -04:00
|
|
|
vsp1_video_release_buffers(video);
|
2016-01-17 19:53:56 -02:00
|
|
|
vsp1_video_pipeline_put(pipe);
|
2013-06-04 11:22:30 -03:00
|
|
|
}
|
|
|
|
|
2016-06-17 21:11:26 -03:00
|
|
|
static const struct vb2_ops vsp1_video_queue_qops = {
|
2013-06-04 11:22:30 -03:00
|
|
|
.queue_setup = vsp1_video_queue_setup,
|
|
|
|
.buf_prepare = vsp1_video_buffer_prepare,
|
|
|
|
.buf_queue = vsp1_video_buffer_queue,
|
|
|
|
.start_streaming = vsp1_video_start_streaming,
|
|
|
|
.stop_streaming = vsp1_video_stop_streaming,
|
|
|
|
};
|
|
|
|
|
|
|
|
/* -----------------------------------------------------------------------------
|
|
|
|
* V4L2 ioctls
|
|
|
|
*/
|
|
|
|
|
|
|
|
static int
|
|
|
|
vsp1_video_querycap(struct file *file, void *fh, struct v4l2_capability *cap)
|
|
|
|
{
|
|
|
|
struct v4l2_fh *vfh = file->private_data;
|
|
|
|
struct vsp1_video *video = to_vsp1_video(vfh->vdev);
|
|
|
|
|
|
|
|
cap->capabilities = V4L2_CAP_DEVICE_CAPS | V4L2_CAP_STREAMING
|
2025-04-30 02:28:56 +03:00
|
|
|
| V4L2_CAP_IO_MC | V4L2_CAP_VIDEO_CAPTURE_MPLANE
|
2013-06-04 11:22:30 -03:00
|
|
|
| V4L2_CAP_VIDEO_OUTPUT_MPLANE;
|
|
|
|
|
2018-09-10 08:19:14 -04:00
|
|
|
strscpy(cap->driver, "vsp1", sizeof(cap->driver));
|
|
|
|
strscpy(cap->card, video->video.name, sizeof(cap->card));
|
2013-06-04 11:22:30 -03:00
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2025-04-30 02:28:56 +03:00
|
|
|
static int vsp1_video_enum_format(struct file *file, void *fh,
|
|
|
|
struct v4l2_fmtdesc *f)
|
|
|
|
{
|
|
|
|
struct v4l2_fh *vfh = file->private_data;
|
|
|
|
struct vsp1_video *video = to_vsp1_video(vfh->vdev);
|
|
|
|
const struct vsp1_format_info *info;
|
|
|
|
|
|
|
|
info = vsp1_get_format_info_by_index(video->vsp1, f->index, f->mbus_code);
|
|
|
|
if (!info)
|
|
|
|
return -EINVAL;
|
|
|
|
|
|
|
|
f->pixelformat = info->fourcc;
|
|
|
|
|
2025-04-30 02:29:01 +03:00
|
|
|
if (video->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE &&
|
|
|
|
info->mbus == MEDIA_BUS_FMT_AYUV8_1X32)
|
|
|
|
f->flags = V4L2_FMT_FLAG_CSC_YCBCR_ENC
|
|
|
|
| V4L2_FMT_FLAG_CSC_QUANTIZATION;
|
|
|
|
|
2025-04-30 02:28:56 +03:00
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2013-06-04 11:22:30 -03:00
|
|
|
static int
|
|
|
|
vsp1_video_get_format(struct file *file, void *fh, struct v4l2_format *format)
|
|
|
|
{
|
|
|
|
struct v4l2_fh *vfh = file->private_data;
|
|
|
|
struct vsp1_video *video = to_vsp1_video(vfh->vdev);
|
|
|
|
|
|
|
|
if (format->type != video->queue.type)
|
|
|
|
return -EINVAL;
|
|
|
|
|
|
|
|
mutex_lock(&video->lock);
|
2015-07-28 14:00:43 -03:00
|
|
|
format->fmt.pix_mp = video->rwpf->format;
|
2013-06-04 11:22:30 -03:00
|
|
|
mutex_unlock(&video->lock);
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int
|
|
|
|
vsp1_video_try_format(struct file *file, void *fh, struct v4l2_format *format)
|
|
|
|
{
|
|
|
|
struct v4l2_fh *vfh = file->private_data;
|
|
|
|
struct vsp1_video *video = to_vsp1_video(vfh->vdev);
|
|
|
|
|
|
|
|
if (format->type != video->queue.type)
|
|
|
|
return -EINVAL;
|
|
|
|
|
|
|
|
return __vsp1_video_try_format(video, &format->fmt.pix_mp, NULL);
|
|
|
|
}
|
|
|
|
|
|
|
|
static int
|
|
|
|
vsp1_video_set_format(struct file *file, void *fh, struct v4l2_format *format)
|
|
|
|
{
|
|
|
|
struct v4l2_fh *vfh = file->private_data;
|
|
|
|
struct vsp1_video *video = to_vsp1_video(vfh->vdev);
|
|
|
|
const struct vsp1_format_info *info;
|
|
|
|
int ret;
|
|
|
|
|
|
|
|
if (format->type != video->queue.type)
|
|
|
|
return -EINVAL;
|
|
|
|
|
|
|
|
ret = __vsp1_video_try_format(video, &format->fmt.pix_mp, &info);
|
|
|
|
if (ret < 0)
|
|
|
|
return ret;
|
|
|
|
|
|
|
|
mutex_lock(&video->lock);
|
|
|
|
|
|
|
|
if (vb2_is_busy(&video->queue)) {
|
|
|
|
ret = -EBUSY;
|
|
|
|
goto done;
|
|
|
|
}
|
|
|
|
|
2015-07-28 14:00:43 -03:00
|
|
|
video->rwpf->format = format->fmt.pix_mp;
|
|
|
|
video->rwpf->fmtinfo = info;
|
2013-06-04 11:22:30 -03:00
|
|
|
|
|
|
|
done:
|
|
|
|
mutex_unlock(&video->lock);
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int
|
|
|
|
vsp1_video_streamon(struct file *file, void *fh, enum v4l2_buf_type type)
|
|
|
|
{
|
|
|
|
struct v4l2_fh *vfh = file->private_data;
|
|
|
|
struct vsp1_video *video = to_vsp1_video(vfh->vdev);
|
2016-01-17 19:53:56 -02:00
|
|
|
struct media_device *mdev = &video->vsp1->media_dev;
|
2013-06-04 11:22:30 -03:00
|
|
|
struct vsp1_pipeline *pipe;
|
|
|
|
int ret;
|
|
|
|
|
2022-03-18 19:01:03 +01:00
|
|
|
if (vb2_queue_is_busy(&video->queue, file))
|
2013-06-04 11:22:30 -03:00
|
|
|
return -EBUSY;
|
|
|
|
|
2017-02-26 10:29:50 -03:00
|
|
|
/*
|
|
|
|
* Get a pipeline for the video node and start streaming on it. No link
|
2016-01-17 19:53:56 -02:00
|
|
|
* touching an entity in the pipeline can be activated or deactivated
|
|
|
|
* once streaming is started.
|
2013-06-04 11:22:30 -03:00
|
|
|
*/
|
2016-01-17 19:53:56 -02:00
|
|
|
mutex_lock(&mdev->graph_mutex);
|
2013-06-04 11:22:30 -03:00
|
|
|
|
2016-01-17 19:53:56 -02:00
|
|
|
pipe = vsp1_video_pipeline_get(video);
|
|
|
|
if (IS_ERR(pipe)) {
|
|
|
|
mutex_unlock(&mdev->graph_mutex);
|
|
|
|
return PTR_ERR(pipe);
|
|
|
|
}
|
|
|
|
|
2022-08-31 16:13:33 +02:00
|
|
|
ret = __video_device_pipeline_start(&video->video, &pipe->pipe);
|
2016-01-17 19:53:56 -02:00
|
|
|
if (ret < 0) {
|
|
|
|
mutex_unlock(&mdev->graph_mutex);
|
|
|
|
goto err_pipe;
|
|
|
|
}
|
|
|
|
|
|
|
|
mutex_unlock(&mdev->graph_mutex);
|
2013-06-04 11:22:30 -03:00
|
|
|
|
2017-02-26 10:29:50 -03:00
|
|
|
/*
|
|
|
|
* Verify that the configured format matches the output of the connected
|
2013-06-04 11:22:30 -03:00
|
|
|
* subdev.
|
|
|
|
*/
|
|
|
|
ret = vsp1_video_verify_format(video);
|
|
|
|
if (ret < 0)
|
|
|
|
goto err_stop;
|
|
|
|
|
|
|
|
/* Start the queue. */
|
|
|
|
ret = vb2_streamon(&video->queue, type);
|
|
|
|
if (ret < 0)
|
2016-01-17 19:53:56 -02:00
|
|
|
goto err_stop;
|
2013-06-04 11:22:30 -03:00
|
|
|
|
|
|
|
return 0;
|
|
|
|
|
|
|
|
err_stop:
|
2022-08-31 16:13:33 +02:00
|
|
|
video_device_pipeline_stop(&video->video);
|
2016-01-17 19:53:56 -02:00
|
|
|
err_pipe:
|
|
|
|
vsp1_video_pipeline_put(pipe);
|
2013-06-04 11:22:30 -03:00
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
static const struct v4l2_ioctl_ops vsp1_video_ioctl_ops = {
|
|
|
|
.vidioc_querycap = vsp1_video_querycap,
|
2025-04-30 02:28:56 +03:00
|
|
|
.vidioc_enum_fmt_vid_cap = vsp1_video_enum_format,
|
|
|
|
.vidioc_enum_fmt_vid_out = vsp1_video_enum_format,
|
2013-06-04 11:22:30 -03:00
|
|
|
.vidioc_g_fmt_vid_cap_mplane = vsp1_video_get_format,
|
|
|
|
.vidioc_s_fmt_vid_cap_mplane = vsp1_video_set_format,
|
|
|
|
.vidioc_try_fmt_vid_cap_mplane = vsp1_video_try_format,
|
|
|
|
.vidioc_g_fmt_vid_out_mplane = vsp1_video_get_format,
|
|
|
|
.vidioc_s_fmt_vid_out_mplane = vsp1_video_set_format,
|
|
|
|
.vidioc_try_fmt_vid_out_mplane = vsp1_video_try_format,
|
|
|
|
.vidioc_reqbufs = vb2_ioctl_reqbufs,
|
|
|
|
.vidioc_querybuf = vb2_ioctl_querybuf,
|
|
|
|
.vidioc_qbuf = vb2_ioctl_qbuf,
|
|
|
|
.vidioc_dqbuf = vb2_ioctl_dqbuf,
|
2016-12-12 08:37:42 -02:00
|
|
|
.vidioc_expbuf = vb2_ioctl_expbuf,
|
2013-06-04 11:22:30 -03:00
|
|
|
.vidioc_create_bufs = vb2_ioctl_create_bufs,
|
|
|
|
.vidioc_prepare_buf = vb2_ioctl_prepare_buf,
|
|
|
|
.vidioc_streamon = vsp1_video_streamon,
|
|
|
|
.vidioc_streamoff = vb2_ioctl_streamoff,
|
|
|
|
};
|
|
|
|
|
|
|
|
/* -----------------------------------------------------------------------------
|
|
|
|
* V4L2 File Operations
|
|
|
|
*/
|
|
|
|
|
|
|
|
static int vsp1_video_open(struct file *file)
|
|
|
|
{
|
|
|
|
struct vsp1_video *video = video_drvdata(file);
|
|
|
|
struct v4l2_fh *vfh;
|
|
|
|
int ret = 0;
|
|
|
|
|
|
|
|
vfh = kzalloc(sizeof(*vfh), GFP_KERNEL);
|
|
|
|
if (vfh == NULL)
|
|
|
|
return -ENOMEM;
|
|
|
|
|
|
|
|
v4l2_fh_init(vfh, &video->video);
|
|
|
|
v4l2_fh_add(vfh);
|
|
|
|
|
|
|
|
file->private_data = vfh;
|
|
|
|
|
2014-05-31 08:50:32 -03:00
|
|
|
ret = vsp1_device_get(video->vsp1);
|
|
|
|
if (ret < 0) {
|
2013-06-04 11:22:30 -03:00
|
|
|
v4l2_fh_del(vfh);
|
2016-11-25 03:07:57 -02:00
|
|
|
v4l2_fh_exit(vfh);
|
2013-06-04 11:22:30 -03:00
|
|
|
kfree(vfh);
|
|
|
|
}
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int vsp1_video_release(struct file *file)
|
|
|
|
{
|
|
|
|
struct vsp1_video *video = video_drvdata(file);
|
|
|
|
|
2022-03-18 19:01:03 +01:00
|
|
|
vb2_fop_release(file);
|
2013-06-04 11:22:30 -03:00
|
|
|
|
|
|
|
vsp1_device_put(video->vsp1);
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2016-06-17 21:11:26 -03:00
|
|
|
static const struct v4l2_file_operations vsp1_video_fops = {
|
2013-06-04 11:22:30 -03:00
|
|
|
.owner = THIS_MODULE,
|
|
|
|
.unlocked_ioctl = video_ioctl2,
|
|
|
|
.open = vsp1_video_open,
|
|
|
|
.release = vsp1_video_release,
|
|
|
|
.poll = vb2_fop_poll,
|
|
|
|
.mmap = vb2_fop_mmap,
|
|
|
|
};
|
|
|
|
|
2024-06-18 21:46:44 +03:00
|
|
|
/* -----------------------------------------------------------------------------
|
|
|
|
* Media entity operations
|
|
|
|
*/
|
|
|
|
|
|
|
|
static int vsp1_video_link_validate(struct media_link *link)
|
|
|
|
{
|
|
|
|
/*
|
|
|
|
* Ideally, link validation should be implemented here instead of
|
|
|
|
* calling vsp1_video_verify_format() in vsp1_video_streamon()
|
|
|
|
* manually. That would however break userspace that start one video
|
|
|
|
* device before configures formats on other video devices in the
|
|
|
|
* pipeline. This operation is just a no-op to silence the warnings
|
|
|
|
* from v4l2_subdev_link_validate().
|
|
|
|
*/
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
static const struct media_entity_operations vsp1_video_media_ops = {
|
|
|
|
.link_validate = vsp1_video_link_validate,
|
|
|
|
};
|
|
|
|
|
2018-05-18 16:41:55 -04:00
|
|
|
/* -----------------------------------------------------------------------------
|
|
|
|
* Suspend and Resume
|
|
|
|
*/
|
|
|
|
|
|
|
|
void vsp1_video_suspend(struct vsp1_device *vsp1)
|
|
|
|
{
|
|
|
|
unsigned long flags;
|
|
|
|
unsigned int i;
|
|
|
|
int ret;
|
|
|
|
|
|
|
|
/*
|
|
|
|
* To avoid increasing the system suspend time needlessly, loop over the
|
|
|
|
* pipelines twice, first to set them all to the stopping state, and
|
|
|
|
* then to wait for the stop to complete.
|
|
|
|
*/
|
|
|
|
for (i = 0; i < vsp1->info->wpf_count; ++i) {
|
|
|
|
struct vsp1_rwpf *wpf = vsp1->wpf[i];
|
|
|
|
struct vsp1_pipeline *pipe;
|
|
|
|
|
|
|
|
if (wpf == NULL)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
pipe = wpf->entity.pipe;
|
|
|
|
if (pipe == NULL)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
spin_lock_irqsave(&pipe->irqlock, flags);
|
|
|
|
if (pipe->state == VSP1_PIPELINE_RUNNING)
|
|
|
|
pipe->state = VSP1_PIPELINE_STOPPING;
|
|
|
|
spin_unlock_irqrestore(&pipe->irqlock, flags);
|
|
|
|
}
|
|
|
|
|
|
|
|
for (i = 0; i < vsp1->info->wpf_count; ++i) {
|
|
|
|
struct vsp1_rwpf *wpf = vsp1->wpf[i];
|
|
|
|
struct vsp1_pipeline *pipe;
|
|
|
|
|
|
|
|
if (wpf == NULL)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
pipe = wpf->entity.pipe;
|
|
|
|
if (pipe == NULL)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
ret = wait_event_timeout(pipe->wq, vsp1_pipeline_stopped(pipe),
|
|
|
|
msecs_to_jiffies(500));
|
|
|
|
if (ret == 0)
|
|
|
|
dev_warn(vsp1->dev, "pipeline %u stop timeout\n",
|
|
|
|
wpf->entity.index);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void vsp1_video_resume(struct vsp1_device *vsp1)
|
|
|
|
{
|
|
|
|
unsigned long flags;
|
|
|
|
unsigned int i;
|
|
|
|
|
|
|
|
/* Resume all running pipelines. */
|
|
|
|
for (i = 0; i < vsp1->info->wpf_count; ++i) {
|
|
|
|
struct vsp1_rwpf *wpf = vsp1->wpf[i];
|
|
|
|
struct vsp1_pipeline *pipe;
|
|
|
|
|
|
|
|
if (wpf == NULL)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
pipe = wpf->entity.pipe;
|
|
|
|
if (pipe == NULL)
|
|
|
|
continue;
|
|
|
|
|
2018-05-18 16:42:03 -04:00
|
|
|
/*
|
|
|
|
* The hardware may have been reset during a suspend and will
|
|
|
|
* need a full reconfiguration.
|
|
|
|
*/
|
|
|
|
pipe->configured = false;
|
|
|
|
|
2018-05-18 16:41:55 -04:00
|
|
|
spin_lock_irqsave(&pipe->irqlock, flags);
|
|
|
|
if (vsp1_pipeline_ready(pipe))
|
|
|
|
vsp1_video_pipeline_run(pipe);
|
|
|
|
spin_unlock_irqrestore(&pipe->irqlock, flags);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-06-04 11:22:30 -03:00
|
|
|
/* -----------------------------------------------------------------------------
|
|
|
|
* Initialization and Cleanup
|
|
|
|
*/
|
|
|
|
|
2015-07-28 15:46:00 -03:00
|
|
|
struct vsp1_video *vsp1_video_create(struct vsp1_device *vsp1,
|
|
|
|
struct vsp1_rwpf *rwpf)
|
2013-06-04 11:22:30 -03:00
|
|
|
{
|
2015-07-28 15:46:00 -03:00
|
|
|
struct vsp1_video *video;
|
2013-06-04 11:22:30 -03:00
|
|
|
const char *direction;
|
|
|
|
int ret;
|
|
|
|
|
2015-07-28 15:46:00 -03:00
|
|
|
video = devm_kzalloc(vsp1->dev, sizeof(*video), GFP_KERNEL);
|
|
|
|
if (!video)
|
|
|
|
return ERR_PTR(-ENOMEM);
|
2013-06-04 11:22:30 -03:00
|
|
|
|
2015-08-02 14:58:43 -03:00
|
|
|
rwpf->video = video;
|
2015-07-28 15:46:00 -03:00
|
|
|
|
|
|
|
video->vsp1 = vsp1;
|
|
|
|
video->rwpf = rwpf;
|
|
|
|
|
|
|
|
if (rwpf->entity.type == VSP1_ENTITY_RPF) {
|
2013-06-04 11:22:30 -03:00
|
|
|
direction = "input";
|
2015-07-28 15:46:00 -03:00
|
|
|
video->type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
|
2013-06-04 11:22:30 -03:00
|
|
|
video->pad.flags = MEDIA_PAD_FL_SOURCE;
|
|
|
|
video->video.vfl_dir = VFL_DIR_TX;
|
2019-06-26 03:16:36 -04:00
|
|
|
video->video.device_caps = V4L2_CAP_VIDEO_OUTPUT_MPLANE |
|
2025-04-30 02:28:56 +03:00
|
|
|
V4L2_CAP_STREAMING | V4L2_CAP_IO_MC;
|
2015-07-28 15:46:00 -03:00
|
|
|
} else {
|
|
|
|
direction = "output";
|
|
|
|
video->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
|
|
|
|
video->pad.flags = MEDIA_PAD_FL_SINK;
|
|
|
|
video->video.vfl_dir = VFL_DIR_RX;
|
2019-06-26 03:16:36 -04:00
|
|
|
video->video.device_caps = V4L2_CAP_VIDEO_CAPTURE_MPLANE |
|
2025-04-30 02:28:56 +03:00
|
|
|
V4L2_CAP_STREAMING | V4L2_CAP_IO_MC;
|
2013-06-04 11:22:30 -03:00
|
|
|
}
|
|
|
|
|
|
|
|
mutex_init(&video->lock);
|
|
|
|
spin_lock_init(&video->irqlock);
|
|
|
|
INIT_LIST_HEAD(&video->irqqueue);
|
|
|
|
|
|
|
|
/* Initialize the media entity... */
|
2015-12-11 07:44:40 -02:00
|
|
|
ret = media_entity_pads_init(&video->video.entity, 1, &video->pad);
|
2013-06-04 11:22:30 -03:00
|
|
|
if (ret < 0)
|
2015-07-28 15:46:00 -03:00
|
|
|
return ERR_PTR(ret);
|
2013-06-04 11:22:30 -03:00
|
|
|
|
|
|
|
/* ... and the format ... */
|
2015-11-22 14:08:18 -02:00
|
|
|
rwpf->format.pixelformat = VSP1_VIDEO_DEF_FORMAT;
|
2015-07-28 14:00:43 -03:00
|
|
|
rwpf->format.width = VSP1_VIDEO_DEF_WIDTH;
|
|
|
|
rwpf->format.height = VSP1_VIDEO_DEF_HEIGHT;
|
2015-11-22 14:08:18 -02:00
|
|
|
__vsp1_video_try_format(video, &rwpf->format, &rwpf->fmtinfo);
|
2013-06-04 11:22:30 -03:00
|
|
|
|
|
|
|
/* ... and the video node... */
|
|
|
|
video->video.v4l2_dev = &video->vsp1->v4l2_dev;
|
2024-06-18 21:46:44 +03:00
|
|
|
video->video.entity.ops = &vsp1_video_media_ops;
|
2013-06-04 11:22:30 -03:00
|
|
|
video->video.fops = &vsp1_video_fops;
|
|
|
|
snprintf(video->video.name, sizeof(video->video.name), "%s %s",
|
2015-07-28 13:54:03 -03:00
|
|
|
rwpf->entity.subdev.name, direction);
|
2020-02-03 12:41:18 +01:00
|
|
|
video->video.vfl_type = VFL_TYPE_VIDEO;
|
2013-06-04 11:22:30 -03:00
|
|
|
video->video.release = video_device_release_empty;
|
|
|
|
video->video.ioctl_ops = &vsp1_video_ioctl_ops;
|
|
|
|
|
|
|
|
video_set_drvdata(&video->video, video);
|
|
|
|
|
|
|
|
video->queue.type = video->type;
|
|
|
|
video->queue.io_modes = VB2_MMAP | VB2_USERPTR | VB2_DMABUF;
|
|
|
|
video->queue.lock = &video->lock;
|
|
|
|
video->queue.drv_priv = video;
|
2015-07-28 14:17:07 -03:00
|
|
|
video->queue.buf_struct_size = sizeof(struct vsp1_vb2_buffer);
|
2013-06-04 11:22:30 -03:00
|
|
|
video->queue.ops = &vsp1_video_queue_qops;
|
|
|
|
video->queue.mem_ops = &vb2_dma_contig_memops;
|
2014-02-25 19:12:19 -03:00
|
|
|
video->queue.timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_COPY;
|
2017-05-17 02:20:05 +03:00
|
|
|
video->queue.dev = video->vsp1->bus_master;
|
2013-06-04 11:22:30 -03:00
|
|
|
ret = vb2_queue_init(&video->queue);
|
|
|
|
if (ret < 0) {
|
|
|
|
dev_err(video->vsp1->dev, "failed to initialize vb2 queue\n");
|
|
|
|
goto error;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* ... and register the video device. */
|
|
|
|
video->video.queue = &video->queue;
|
2020-02-03 12:41:18 +01:00
|
|
|
ret = video_register_device(&video->video, VFL_TYPE_VIDEO, -1);
|
2013-06-04 11:22:30 -03:00
|
|
|
if (ret < 0) {
|
|
|
|
dev_err(video->vsp1->dev, "failed to register video device\n");
|
|
|
|
goto error;
|
|
|
|
}
|
|
|
|
|
2015-07-28 15:46:00 -03:00
|
|
|
return video;
|
2013-06-04 11:22:30 -03:00
|
|
|
|
|
|
|
error:
|
|
|
|
vsp1_video_cleanup(video);
|
2015-07-28 15:46:00 -03:00
|
|
|
return ERR_PTR(ret);
|
2013-06-04 11:22:30 -03:00
|
|
|
}
|
|
|
|
|
|
|
|
void vsp1_video_cleanup(struct vsp1_video *video)
|
|
|
|
{
|
|
|
|
if (video_is_registered(&video->video))
|
|
|
|
video_unregister_device(&video->video);
|
|
|
|
|
|
|
|
media_entity_cleanup(&video->video.entity);
|
|
|
|
}
|