diff options
Diffstat (limited to 'drivers/media/platform/vsp1/vsp1_video.c')
-rw-r--r-- | drivers/media/platform/vsp1/vsp1_video.c | 518 |
1 files changed, 100 insertions, 418 deletions
diff --git a/drivers/media/platform/vsp1/vsp1_video.c b/drivers/media/platform/vsp1/vsp1_video.c index b4dca57d1ae3..61ee0f92c1e5 100644 --- a/drivers/media/platform/vsp1/vsp1_video.c +++ b/drivers/media/platform/vsp1/vsp1_video.c @@ -14,10 +14,10 @@ #include <linux/list.h> #include <linux/module.h> #include <linux/mutex.h> -#include <linux/sched.h> #include <linux/slab.h> #include <linux/v4l2-mediabus.h> #include <linux/videodev2.h> +#include <linux/wait.h> #include <media/media-entity.h> #include <media/v4l2-dev.h> @@ -30,6 +30,7 @@ #include "vsp1.h" #include "vsp1_bru.h" #include "vsp1_entity.h" +#include "vsp1_pipe.h" #include "vsp1_rwpf.h" #include "vsp1_uds.h" #include "vsp1_video.h" @@ -47,113 +48,6 @@ * Helper functions */ -static const struct vsp1_format_info vsp1_video_formats[] = { - { V4L2_PIX_FMT_RGB332, MEDIA_BUS_FMT_ARGB8888_1X32, - VI6_FMT_RGB_332, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS | - VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS, - 1, { 8, 0, 0 }, false, false, 1, 1, false }, - { V4L2_PIX_FMT_ARGB444, MEDIA_BUS_FMT_ARGB8888_1X32, - VI6_FMT_ARGB_4444, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS | - VI6_RPF_DSWAP_P_WDS, - 1, { 16, 0, 0 }, false, false, 1, 1, true }, - { V4L2_PIX_FMT_XRGB444, MEDIA_BUS_FMT_ARGB8888_1X32, - VI6_FMT_XRGB_4444, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS | - VI6_RPF_DSWAP_P_WDS, - 1, { 16, 0, 0 }, false, false, 1, 1, true }, - { V4L2_PIX_FMT_ARGB555, MEDIA_BUS_FMT_ARGB8888_1X32, - VI6_FMT_ARGB_1555, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS | - VI6_RPF_DSWAP_P_WDS, - 1, { 16, 0, 0 }, false, false, 1, 1, true }, - { V4L2_PIX_FMT_XRGB555, MEDIA_BUS_FMT_ARGB8888_1X32, - VI6_FMT_XRGB_1555, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS | - VI6_RPF_DSWAP_P_WDS, - 1, { 16, 0, 0 }, false, false, 1, 1, false }, - { V4L2_PIX_FMT_RGB565, MEDIA_BUS_FMT_ARGB8888_1X32, - VI6_FMT_RGB_565, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS | - VI6_RPF_DSWAP_P_WDS, - 1, { 16, 0, 0 }, false, false, 1, 1, false }, - { V4L2_PIX_FMT_BGR24, MEDIA_BUS_FMT_ARGB8888_1X32, - VI6_FMT_BGR_888, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS | - VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS, - 1, { 24, 0, 0 }, false, false, 1, 1, false }, - { V4L2_PIX_FMT_RGB24, MEDIA_BUS_FMT_ARGB8888_1X32, - VI6_FMT_RGB_888, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS | - VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS, - 1, { 24, 0, 0 }, false, false, 1, 1, false }, - { V4L2_PIX_FMT_ABGR32, MEDIA_BUS_FMT_ARGB8888_1X32, - VI6_FMT_ARGB_8888, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS, - 1, { 32, 0, 0 }, false, false, 1, 1, true }, - { V4L2_PIX_FMT_XBGR32, MEDIA_BUS_FMT_ARGB8888_1X32, - VI6_FMT_ARGB_8888, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS, - 1, { 32, 0, 0 }, false, false, 1, 1, false }, - { V4L2_PIX_FMT_ARGB32, MEDIA_BUS_FMT_ARGB8888_1X32, - VI6_FMT_ARGB_8888, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS | - VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS, - 1, { 32, 0, 0 }, false, false, 1, 1, true }, - { V4L2_PIX_FMT_XRGB32, MEDIA_BUS_FMT_ARGB8888_1X32, - VI6_FMT_ARGB_8888, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS | - VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS, - 1, { 32, 0, 0 }, false, false, 1, 1, false }, - { V4L2_PIX_FMT_UYVY, MEDIA_BUS_FMT_AYUV8_1X32, - VI6_FMT_YUYV_422, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS | - VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS, - 1, { 16, 0, 0 }, false, false, 2, 1, false }, - { V4L2_PIX_FMT_VYUY, MEDIA_BUS_FMT_AYUV8_1X32, - VI6_FMT_YUYV_422, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS | - VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS, - 1, { 16, 0, 0 }, false, true, 2, 1, false }, - { V4L2_PIX_FMT_YUYV, MEDIA_BUS_FMT_AYUV8_1X32, - VI6_FMT_YUYV_422, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS | - VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS, - 1, { 16, 0, 0 }, true, false, 2, 1, false }, - { V4L2_PIX_FMT_YVYU, MEDIA_BUS_FMT_AYUV8_1X32, - VI6_FMT_YUYV_422, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS | - VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS, - 1, { 16, 0, 0 }, true, true, 2, 1, false }, - { V4L2_PIX_FMT_NV12M, MEDIA_BUS_FMT_AYUV8_1X32, - VI6_FMT_Y_UV_420, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS | - VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS, - 2, { 8, 16, 0 }, false, false, 2, 2, false }, - { V4L2_PIX_FMT_NV21M, MEDIA_BUS_FMT_AYUV8_1X32, - VI6_FMT_Y_UV_420, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS | - VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS, - 2, { 8, 16, 0 }, false, true, 2, 2, false }, - { V4L2_PIX_FMT_NV16M, MEDIA_BUS_FMT_AYUV8_1X32, - VI6_FMT_Y_UV_422, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS | - VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS, - 2, { 8, 16, 0 }, false, false, 2, 1, false }, - { V4L2_PIX_FMT_NV61M, MEDIA_BUS_FMT_AYUV8_1X32, - VI6_FMT_Y_UV_422, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS | - VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS, - 2, { 8, 16, 0 }, false, true, 2, 1, false }, - { V4L2_PIX_FMT_YUV420M, MEDIA_BUS_FMT_AYUV8_1X32, - VI6_FMT_Y_U_V_420, VI6_RPF_DSWAP_P_LLS | VI6_RPF_DSWAP_P_LWS | - VI6_RPF_DSWAP_P_WDS | VI6_RPF_DSWAP_P_BTS, - 3, { 8, 8, 8 }, false, false, 2, 2, false }, -}; - -/* - * vsp1_get_format_info - Retrieve format information for a 4CC - * @fourcc: the format 4CC - * - * Return a pointer to the format information structure corresponding to the - * given V4L2 format 4CC, or NULL if no corresponding format can be found. - */ -static const struct vsp1_format_info *vsp1_get_format_info(u32 fourcc) -{ - unsigned int i; - - for (i = 0; i < ARRAY_SIZE(vsp1_video_formats); ++i) { - const struct vsp1_format_info *info = &vsp1_video_formats[i]; - - if (info->fourcc == fourcc) - return info; - } - - return NULL; -} - - static struct v4l2_subdev * vsp1_video_remote_subdev(struct media_pad *local, u32 *pad) { @@ -184,9 +78,9 @@ static int vsp1_video_verify_format(struct vsp1_video *video) if (ret < 0) return ret == -ENOIOCTLCMD ? -EINVAL : ret; - if (video->fmtinfo->mbus != fmt.format.code || - video->format.height != fmt.format.height || - video->format.width != fmt.format.width) + if (video->rwpf->fmtinfo->mbus != fmt.format.code || + video->rwpf->format.height != fmt.format.height || + video->rwpf->format.width != fmt.format.width) return -EINVAL; return 0; @@ -277,9 +171,9 @@ static int __vsp1_video_try_format(struct vsp1_video *video, * Pipeline Management */ -static int vsp1_pipeline_validate_branch(struct vsp1_pipeline *pipe, - struct vsp1_rwpf *input, - struct vsp1_rwpf *output) +static int vsp1_video_pipeline_validate_branch(struct vsp1_pipeline *pipe, + struct vsp1_rwpf *input, + struct vsp1_rwpf *output) { struct vsp1_entity *entity; struct media_entity_enum ent_enum; @@ -370,29 +264,8 @@ out: return rval; } -static void __vsp1_pipeline_cleanup(struct vsp1_pipeline *pipe) -{ - if (pipe->bru) { - struct vsp1_bru *bru = to_bru(&pipe->bru->subdev); - unsigned int i; - - for (i = 0; i < ARRAY_SIZE(bru->inputs); ++i) - bru->inputs[i].rpf = NULL; - } - - INIT_LIST_HEAD(&pipe->entities); - pipe->state = VSP1_PIPELINE_STOPPED; - pipe->buffers_ready = 0; - pipe->num_video = 0; - pipe->num_inputs = 0; - pipe->output = NULL; - pipe->bru = NULL; - pipe->lif = NULL; - pipe->uds = NULL; -} - -static int vsp1_pipeline_validate(struct vsp1_pipeline *pipe, - struct vsp1_video *video) +static int vsp1_video_pipeline_validate(struct vsp1_pipeline *pipe, + struct vsp1_video *video) { struct media_entity_graph graph; struct media_entity *entity = &video->video.entity; @@ -416,10 +289,8 @@ static int vsp1_pipeline_validate(struct vsp1_pipeline *pipe, struct vsp1_rwpf *rwpf; struct vsp1_entity *e; - if (is_media_entity_v4l2_io(entity)) { - pipe->num_video++; + if (is_media_entity_v4l2_io(entity)) continue; - } subdev = media_entity_to_v4l2_subdev(entity); e = to_vsp1_entity(subdev); @@ -427,12 +298,12 @@ static int vsp1_pipeline_validate(struct vsp1_pipeline *pipe, if (e->type == VSP1_ENTITY_RPF) { rwpf = to_rwpf(subdev); - pipe->inputs[pipe->num_inputs++] = rwpf; - rwpf->video.pipe_index = pipe->num_inputs; + pipe->inputs[rwpf->entity.index] = rwpf; + rwpf->video->pipe_index = ++pipe->num_inputs; } else if (e->type == VSP1_ENTITY_WPF) { rwpf = to_rwpf(subdev); - pipe->output = to_rwpf(subdev); - rwpf->video.pipe_index = 0; + pipe->output = rwpf; + rwpf->video->pipe_index = 0; } else if (e->type == VSP1_ENTITY_LIF) { pipe->lif = e; } else if (e->type == VSP1_ENTITY_BRU) { @@ -453,9 +324,12 @@ static int vsp1_pipeline_validate(struct vsp1_pipeline *pipe, /* Follow links downstream for each input and make sure the graph * contains no loop and that all branches end at the output WPF. */ - for (i = 0; i < pipe->num_inputs; ++i) { - ret = vsp1_pipeline_validate_branch(pipe, pipe->inputs[i], - pipe->output); + for (i = 0; i < video->vsp1->info->rpf_count; ++i) { + if (!pipe->inputs[i]) + continue; + + ret = vsp1_video_pipeline_validate_branch(pipe, pipe->inputs[i], + pipe->output); if (ret < 0) goto error; } @@ -463,12 +337,12 @@ static int vsp1_pipeline_validate(struct vsp1_pipeline *pipe, return 0; error: - __vsp1_pipeline_cleanup(pipe); + vsp1_pipeline_reset(pipe); return ret; } -static int vsp1_pipeline_init(struct vsp1_pipeline *pipe, - struct vsp1_video *video) +static int vsp1_video_pipeline_init(struct vsp1_pipeline *pipe, + struct vsp1_video *video) { int ret; @@ -476,7 +350,7 @@ static int vsp1_pipeline_init(struct vsp1_pipeline *pipe, /* If we're the first user validate and initialize the pipeline. */ if (pipe->use_count == 0) { - ret = vsp1_pipeline_validate(pipe, video); + ret = vsp1_video_pipeline_validate(pipe, video); if (ret < 0) goto done; } @@ -489,75 +363,17 @@ done: return ret; } -static void vsp1_pipeline_cleanup(struct vsp1_pipeline *pipe) +static void vsp1_video_pipeline_cleanup(struct vsp1_pipeline *pipe) { mutex_lock(&pipe->lock); /* If we're the last user clean up the pipeline. */ if (--pipe->use_count == 0) - __vsp1_pipeline_cleanup(pipe); + vsp1_pipeline_reset(pipe); mutex_unlock(&pipe->lock); } -static void vsp1_pipeline_run(struct vsp1_pipeline *pipe) -{ - struct vsp1_device *vsp1 = pipe->output->entity.vsp1; - - vsp1_write(vsp1, VI6_CMD(pipe->output->entity.index), VI6_CMD_STRCMD); - pipe->state = VSP1_PIPELINE_RUNNING; - pipe->buffers_ready = 0; -} - -static bool vsp1_pipeline_stopped(struct vsp1_pipeline *pipe) -{ - unsigned long flags; - bool stopped; - - spin_lock_irqsave(&pipe->irqlock, flags); - stopped = pipe->state == VSP1_PIPELINE_STOPPED; - spin_unlock_irqrestore(&pipe->irqlock, flags); - - return stopped; -} - -static int vsp1_pipeline_stop(struct vsp1_pipeline *pipe) -{ - struct vsp1_entity *entity; - unsigned long flags; - int ret; - - spin_lock_irqsave(&pipe->irqlock, flags); - if (pipe->state == VSP1_PIPELINE_RUNNING) - pipe->state = VSP1_PIPELINE_STOPPING; - spin_unlock_irqrestore(&pipe->irqlock, flags); - - ret = wait_event_timeout(pipe->wq, vsp1_pipeline_stopped(pipe), - msecs_to_jiffies(500)); - ret = ret == 0 ? -ETIMEDOUT : 0; - - list_for_each_entry(entity, &pipe->entities, list_pipe) { - if (entity->route && entity->route->reg) - vsp1_write(entity->vsp1, entity->route->reg, - VI6_DPR_NODE_UNUSED); - - v4l2_subdev_call(&entity->subdev, video, s_stream, 0); - } - - return ret; -} - -static bool vsp1_pipeline_ready(struct vsp1_pipeline *pipe) -{ - unsigned int mask; - - mask = ((1 << pipe->num_inputs) - 1) << 1; - if (!pipe->lif) - mask |= 1 << 0; - - return pipe->buffers_ready == mask; -} - /* * vsp1_video_complete_buffer - Complete the current buffer * @video: the video node @@ -572,12 +388,12 @@ static bool vsp1_pipeline_ready(struct vsp1_pipeline *pipe) * * Return the next queued buffer or NULL if the queue is empty. */ -static struct vsp1_video_buffer * +static struct vsp1_vb2_buffer * vsp1_video_complete_buffer(struct vsp1_video *video) { struct vsp1_pipeline *pipe = to_vsp1_pipeline(&video->video.entity); - struct vsp1_video_buffer *next = NULL; - struct vsp1_video_buffer *done; + struct vsp1_vb2_buffer *next = NULL; + struct vsp1_vb2_buffer *done; unsigned long flags; unsigned int i; @@ -589,7 +405,7 @@ vsp1_video_complete_buffer(struct vsp1_video *video) } done = list_first_entry(&video->irqqueue, - struct vsp1_video_buffer, queue); + struct vsp1_vb2_buffer, queue); /* In DU output mode reuse the buffer if the list is singular. */ if (pipe->lif && list_is_singular(&video->irqqueue)) { @@ -601,23 +417,25 @@ vsp1_video_complete_buffer(struct vsp1_video *video) if (!list_empty(&video->irqqueue)) next = list_first_entry(&video->irqqueue, - struct vsp1_video_buffer, queue); + struct vsp1_vb2_buffer, queue); spin_unlock_irqrestore(&video->irqlock, flags); done->buf.sequence = video->sequence++; done->buf.vb2_buf.timestamp = ktime_get_ns(); for (i = 0; i < done->buf.vb2_buf.num_planes; ++i) - vb2_set_plane_payload(&done->buf.vb2_buf, i, done->length[i]); + vb2_set_plane_payload(&done->buf.vb2_buf, i, + done->mem.length[i]); vb2_buffer_done(&done->buf.vb2_buf, VB2_BUF_STATE_DONE); return next; } static void vsp1_video_frame_end(struct vsp1_pipeline *pipe, - struct vsp1_video *video) + struct vsp1_rwpf *rwpf) { - struct vsp1_video_buffer *buf; + struct vsp1_video *video = rwpf->video; + struct vsp1_vb2_buffer *buf; unsigned long flags; buf = vsp1_video_complete_buffer(video); @@ -626,155 +444,27 @@ static void vsp1_video_frame_end(struct vsp1_pipeline *pipe, spin_lock_irqsave(&pipe->irqlock, flags); - video->ops->queue(video, buf); + video->rwpf->ops->set_memory(video->rwpf, &buf->mem); pipe->buffers_ready |= 1 << video->pipe_index; spin_unlock_irqrestore(&pipe->irqlock, flags); } -void vsp1_pipeline_frame_end(struct vsp1_pipeline *pipe) +static void vsp1_video_pipeline_frame_end(struct vsp1_pipeline *pipe) { - enum vsp1_pipeline_state state; - unsigned long flags; + struct vsp1_device *vsp1 = pipe->output->entity.vsp1; unsigned int i; - if (pipe == NULL) - return; - /* Complete buffers on all video nodes. */ - for (i = 0; i < pipe->num_inputs; ++i) - vsp1_video_frame_end(pipe, &pipe->inputs[i]->video); - - if (!pipe->lif) - vsp1_video_frame_end(pipe, &pipe->output->video); - - spin_lock_irqsave(&pipe->irqlock, flags); - - state = pipe->state; - pipe->state = VSP1_PIPELINE_STOPPED; - - /* If a stop has been requested, mark the pipeline as stopped and - * return. - */ - if (state == VSP1_PIPELINE_STOPPING) { - wake_up(&pipe->wq); - goto done; - } - - /* Restart the pipeline if ready. */ - if (vsp1_pipeline_ready(pipe)) - vsp1_pipeline_run(pipe); - -done: - spin_unlock_irqrestore(&pipe->irqlock, flags); -} - -/* - * Propagate the alpha value through the pipeline. - * - * As the UDS has restricted scaling capabilities when the alpha component needs - * to be scaled, we disable alpha scaling when the UDS input has a fixed alpha - * value. The UDS then outputs a fixed alpha value which needs to be programmed - * from the input RPF alpha. - */ -void vsp1_pipeline_propagate_alpha(struct vsp1_pipeline *pipe, - struct vsp1_entity *input, - unsigned int alpha) -{ - struct vsp1_entity *entity; - struct media_pad *pad; - - pad = media_entity_remote_pad(&input->pads[RWPF_PAD_SOURCE]); - - while (pad) { - if (!is_media_entity_v4l2_subdev(pad->entity)) - break; - - entity = to_vsp1_entity(media_entity_to_v4l2_subdev(pad->entity)); - - /* The BRU background color has a fixed alpha value set to 255, - * the output alpha value is thus always equal to 255. - */ - if (entity->type == VSP1_ENTITY_BRU) - alpha = 255; - - if (entity->type == VSP1_ENTITY_UDS) { - struct vsp1_uds *uds = to_uds(&entity->subdev); - - vsp1_uds_set_alpha(uds, alpha); - break; - } - - pad = &entity->pads[entity->source_pad]; - pad = media_entity_remote_pad(pad); - } -} - -void vsp1_pipelines_suspend(struct vsp1_device *vsp1) -{ - unsigned long flags; - unsigned int i; - int ret; - - /* To avoid increasing the system suspend time needlessly, loop over the - * pipelines twice, first to set them all to the stopping state, and then - * to wait for the stop to complete. - */ - for (i = 0; i < vsp1->pdata.wpf_count; ++i) { - struct vsp1_rwpf *wpf = vsp1->wpf[i]; - struct vsp1_pipeline *pipe; - - if (wpf == NULL) - continue; - - pipe = to_vsp1_pipeline(&wpf->entity.subdev.entity); - if (pipe == NULL) - continue; - - spin_lock_irqsave(&pipe->irqlock, flags); - if (pipe->state == VSP1_PIPELINE_RUNNING) - pipe->state = VSP1_PIPELINE_STOPPING; - spin_unlock_irqrestore(&pipe->irqlock, flags); - } - - for (i = 0; i < vsp1->pdata.wpf_count; ++i) { - struct vsp1_rwpf *wpf = vsp1->wpf[i]; - struct vsp1_pipeline *pipe; - - if (wpf == NULL) - continue; - - pipe = to_vsp1_pipeline(&wpf->entity.subdev.entity); - if (pipe == NULL) + for (i = 0; i < vsp1->info->rpf_count; ++i) { + if (!pipe->inputs[i]) continue; - ret = wait_event_timeout(pipe->wq, vsp1_pipeline_stopped(pipe), - msecs_to_jiffies(500)); - if (ret == 0) - dev_warn(vsp1->dev, "pipeline %u stop timeout\n", - wpf->entity.index); + vsp1_video_frame_end(pipe, pipe->inputs[i]); } -} - -void vsp1_pipelines_resume(struct vsp1_device *vsp1) -{ - unsigned int i; - - /* Resume pipeline all running pipelines. */ - for (i = 0; i < vsp1->pdata.wpf_count; ++i) { - struct vsp1_rwpf *wpf = vsp1->wpf[i]; - struct vsp1_pipeline *pipe; - if (wpf == NULL) - continue; - - pipe = to_vsp1_pipeline(&wpf->entity.subdev.entity); - if (pipe == NULL) - continue; - - if (vsp1_pipeline_ready(pipe)) - vsp1_pipeline_run(pipe); - } + if (!pipe->lif) + vsp1_video_frame_end(pipe, pipe->output); } /* ----------------------------------------------------------------------------- @@ -787,7 +477,7 @@ vsp1_video_queue_setup(struct vb2_queue *vq, unsigned int sizes[], void *alloc_ctxs[]) { struct vsp1_video *video = vb2_get_drv_priv(vq); - const struct v4l2_pix_format_mplane *format = &video->format; + const struct v4l2_pix_format_mplane *format = &video->rwpf->format; unsigned int i; if (*nplanes) { @@ -816,18 +506,20 @@ static int vsp1_video_buffer_prepare(struct vb2_buffer *vb) { struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb); struct vsp1_video *video = vb2_get_drv_priv(vb->vb2_queue); - struct vsp1_video_buffer *buf = to_vsp1_video_buffer(vbuf); - const struct v4l2_pix_format_mplane *format = &video->format; + struct vsp1_vb2_buffer *buf = to_vsp1_vb2_buffer(vbuf); + const struct v4l2_pix_format_mplane *format = &video->rwpf->format; unsigned int i; if (vb->num_planes < format->num_planes) return -EINVAL; + buf->mem.num_planes = vb->num_planes; + for (i = 0; i < vb->num_planes; ++i) { - buf->addr[i] = vb2_dma_contig_plane_dma_addr(vb, i); - buf->length[i] = vb2_plane_size(vb, i); + buf->mem.addr[i] = vb2_dma_contig_plane_dma_addr(vb, i); + buf->mem.length[i] = vb2_plane_size(vb, i); - if (buf->length[i] < format->plane_fmt[i].sizeimage) + if (buf->mem.length[i] < format->plane_fmt[i].sizeimage) return -EINVAL; } @@ -839,7 +531,7 @@ static void vsp1_video_buffer_queue(struct vb2_buffer *vb) struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb); struct vsp1_video *video = vb2_get_drv_priv(vb->vb2_queue); struct vsp1_pipeline *pipe = to_vsp1_pipeline(&video->video.entity); - struct vsp1_video_buffer *buf = to_vsp1_video_buffer(vbuf); + struct vsp1_vb2_buffer *buf = to_vsp1_vb2_buffer(vbuf); unsigned long flags; bool empty; @@ -853,7 +545,7 @@ static void vsp1_video_buffer_queue(struct vb2_buffer *vb) spin_lock_irqsave(&pipe->irqlock, flags); - video->ops->queue(video, buf); + video->rwpf->ops->set_memory(video->rwpf, &buf->mem); pipe->buffers_ready |= 1 << video->pipe_index; if (vb2_is_streaming(&video->queue) && @@ -863,18 +555,6 @@ static void vsp1_video_buffer_queue(struct vb2_buffer *vb) spin_unlock_irqrestore(&pipe->irqlock, flags); } -static void vsp1_entity_route_setup(struct vsp1_entity *source) -{ - struct vsp1_entity *sink; - - if (source->route->reg == 0) - return; - - sink = container_of(source->sink, struct vsp1_entity, subdev.entity); - vsp1_write(source->vsp1, source->route->reg, - sink->route->inputs[source->sink_pad]); -} - static int vsp1_video_start_streaming(struct vb2_queue *vq, unsigned int count) { struct vsp1_video *video = vb2_get_drv_priv(vq); @@ -884,7 +564,7 @@ static int vsp1_video_start_streaming(struct vb2_queue *vq, unsigned int count) int ret; mutex_lock(&pipe->lock); - if (pipe->stream_count == pipe->num_video - 1) { + if (pipe->stream_count == pipe->num_inputs) { if (pipe->uds) { struct vsp1_uds *uds = to_uds(&pipe->uds->subdev); @@ -900,7 +580,7 @@ static int vsp1_video_start_streaming(struct vb2_queue *vq, unsigned int count) struct vsp1_rwpf *rpf = to_rwpf(&pipe->uds_input->subdev); - uds->scale_alpha = rpf->video.fmtinfo->alpha; + uds->scale_alpha = rpf->fmtinfo->alpha; } } @@ -931,7 +611,7 @@ static void vsp1_video_stop_streaming(struct vb2_queue *vq) { struct vsp1_video *video = vb2_get_drv_priv(vq); struct vsp1_pipeline *pipe = to_vsp1_pipeline(&video->video.entity); - struct vsp1_video_buffer *buffer; + struct vsp1_vb2_buffer *buffer; unsigned long flags; int ret; @@ -944,7 +624,7 @@ static void vsp1_video_stop_streaming(struct vb2_queue *vq) } mutex_unlock(&pipe->lock); - vsp1_pipeline_cleanup(pipe); + vsp1_video_pipeline_cleanup(pipe); media_entity_pipeline_stop(&video->video.entity); /* Remove all buffers from the IRQ queue. */ @@ -1004,7 +684,7 @@ vsp1_video_get_format(struct file *file, void *fh, struct v4l2_format *format) return -EINVAL; mutex_lock(&video->lock); - format->fmt.pix_mp = video->format; + format->fmt.pix_mp = video->rwpf->format; mutex_unlock(&video->lock); return 0; @@ -1044,8 +724,8 @@ vsp1_video_set_format(struct file *file, void *fh, struct v4l2_format *format) goto done; } - video->format = format->fmt.pix_mp; - video->fmtinfo = info; + video->rwpf->format = format->fmt.pix_mp; + video->rwpf->fmtinfo = info; done: mutex_unlock(&video->lock); @@ -1085,7 +765,7 @@ vsp1_video_streamon(struct file *file, void *fh, enum v4l2_buf_type type) if (ret < 0) goto err_stop; - ret = vsp1_pipeline_init(pipe, video); + ret = vsp1_video_pipeline_init(pipe, video); if (ret < 0) goto err_stop; @@ -1097,7 +777,7 @@ vsp1_video_streamon(struct file *file, void *fh, enum v4l2_buf_type type) return 0; err_cleanup: - vsp1_pipeline_cleanup(pipe); + vsp1_video_pipeline_cleanup(pipe); err_stop: media_entity_pipeline_stop(&video->video.entity); return ret; @@ -1183,62 +863,64 @@ static struct v4l2_file_operations vsp1_video_fops = { * Initialization and Cleanup */ -int vsp1_video_init(struct vsp1_video *video, struct vsp1_entity *rwpf) +struct vsp1_video *vsp1_video_create(struct vsp1_device *vsp1, + struct vsp1_rwpf *rwpf) { + struct vsp1_video *video; const char *direction; int ret; - switch (video->type) { - case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE: - direction = "output"; - video->pad.flags = MEDIA_PAD_FL_SINK; - break; + video = devm_kzalloc(vsp1->dev, sizeof(*video), GFP_KERNEL); + if (!video) + return ERR_PTR(-ENOMEM); + + rwpf->video = video; + + video->vsp1 = vsp1; + video->rwpf = rwpf; - case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE: + if (rwpf->entity.type == VSP1_ENTITY_RPF) { direction = "input"; + video->type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; video->pad.flags = MEDIA_PAD_FL_SOURCE; video->video.vfl_dir = VFL_DIR_TX; - break; - - default: - return -EINVAL; + } else { + direction = "output"; + video->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + video->pad.flags = MEDIA_PAD_FL_SINK; + video->video.vfl_dir = VFL_DIR_RX; } - video->rwpf = rwpf; - mutex_init(&video->lock); spin_lock_init(&video->irqlock); INIT_LIST_HEAD(&video->irqqueue); - mutex_init(&video->pipe.lock); - spin_lock_init(&video->pipe.irqlock); - INIT_LIST_HEAD(&video->pipe.entities); - init_waitqueue_head(&video->pipe.wq); - video->pipe.state = VSP1_PIPELINE_STOPPED; + vsp1_pipeline_init(&video->pipe); + video->pipe.frame_end = vsp1_video_pipeline_frame_end; /* Initialize the media entity... */ ret = media_entity_pads_init(&video->video.entity, 1, &video->pad); if (ret < 0) - return ret; + return ERR_PTR(ret); /* ... and the format ... */ - video->fmtinfo = vsp1_get_format_info(VSP1_VIDEO_DEF_FORMAT); - video->format.pixelformat = video->fmtinfo->fourcc; - video->format.colorspace = V4L2_COLORSPACE_SRGB; - video->format.field = V4L2_FIELD_NONE; - video->format.width = VSP1_VIDEO_DEF_WIDTH; - video->format.height = VSP1_VIDEO_DEF_HEIGHT; - video->format.num_planes = 1; - video->format.plane_fmt[0].bytesperline = - video->format.width * video->fmtinfo->bpp[0] / 8; - video->format.plane_fmt[0].sizeimage = - video->format.plane_fmt[0].bytesperline * video->format.height; + rwpf->fmtinfo = vsp1_get_format_info(VSP1_VIDEO_DEF_FORMAT); + rwpf->format.pixelformat = rwpf->fmtinfo->fourcc; + rwpf->format.colorspace = V4L2_COLORSPACE_SRGB; + rwpf->format.field = V4L2_FIELD_NONE; + rwpf->format.width = VSP1_VIDEO_DEF_WIDTH; + rwpf->format.height = VSP1_VIDEO_DEF_HEIGHT; + rwpf->format.num_planes = 1; + rwpf->format.plane_fmt[0].bytesperline = + rwpf->format.width * rwpf->fmtinfo->bpp[0] / 8; + rwpf->format.plane_fmt[0].sizeimage = + rwpf->format.plane_fmt[0].bytesperline * rwpf->format.height; /* ... and the video node... */ video->video.v4l2_dev = &video->vsp1->v4l2_dev; video->video.fops = &vsp1_video_fops; snprintf(video->video.name, sizeof(video->video.name), "%s %s", - rwpf->subdev.name, direction); + rwpf->entity.subdev.name, direction); video->video.vfl_type = VFL_TYPE_GRABBER; video->video.release = video_device_release_empty; video->video.ioctl_ops = &vsp1_video_ioctl_ops; @@ -1256,7 +938,7 @@ int vsp1_video_init(struct vsp1_video *video, struct vsp1_entity *rwpf) video->queue.io_modes = VB2_MMAP | VB2_USERPTR | VB2_DMABUF; video->queue.lock = &video->lock; video->queue.drv_priv = video; - video->queue.buf_struct_size = sizeof(struct vsp1_video_buffer); + video->queue.buf_struct_size = sizeof(struct vsp1_vb2_buffer); video->queue.ops = &vsp1_video_queue_qops; video->queue.mem_ops = &vb2_dma_contig_memops; video->queue.timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_COPY; @@ -1274,12 +956,12 @@ int vsp1_video_init(struct vsp1_video *video, struct vsp1_entity *rwpf) goto error; } - return 0; + return video; error: vb2_dma_contig_cleanup_ctx(video->alloc_ctx); vsp1_video_cleanup(video); - return ret; + return ERR_PTR(ret); } void vsp1_video_cleanup(struct vsp1_video *video) |