Message ID | 20241105-qcom-video-iris-v5-10-a88e7c220f78@quicinc.com |
---|---|
State | Superseded |
Headers | show |
Series | Qualcomm iris video decoder driver | expand |
On 11/12/2024 3:48 PM, Hans Verkuil wrote: > On 05/11/2024 07:55, Dikshita Agarwal wrote: >> From: Vedang Nagar <quic_vnagar@quicinc.com> >> >> Implement s_fmt, g_fmt and try_fmt ioctl ops with necessary hooks. >> >> Signed-off-by: Vedang Nagar <quic_vnagar@quicinc.com> >> Signed-off-by: Dikshita Agarwal <quic_dikshita@quicinc.com> >> --- >> drivers/media/platform/qcom/iris/iris_vdec.c | 131 +++++++++++++++++++++++++++ >> drivers/media/platform/qcom/iris/iris_vdec.h | 2 + >> drivers/media/platform/qcom/iris/iris_vidc.c | 48 ++++++++++ >> 3 files changed, 181 insertions(+) >> >> diff --git a/drivers/media/platform/qcom/iris/iris_vdec.c b/drivers/media/platform/qcom/iris/iris_vdec.c >> index 7d1ef31c7c44..e807decdda2b 100644 >> --- a/drivers/media/platform/qcom/iris/iris_vdec.c >> +++ b/drivers/media/platform/qcom/iris/iris_vdec.c >> @@ -3,6 +3,8 @@ >> * Copyright (c) 2022-2024 Qualcomm Innovation Center, Inc. All rights reserved. >> */ >> >> +#include <media/v4l2-mem2mem.h> >> + >> #include "iris_buffer.h" >> #include "iris_instance.h" >> #include "iris_vdec.h" >> @@ -10,6 +12,7 @@ >> >> #define DEFAULT_WIDTH 320 >> #define DEFAULT_HEIGHT 240 >> +#define DEFAULT_CODEC_ALIGNMENT 16 >> >> void iris_vdec_inst_init(struct iris_inst *inst) >> { >> @@ -56,3 +59,131 @@ void iris_vdec_inst_deinit(struct iris_inst *inst) >> kfree(inst->fmt_dst); >> kfree(inst->fmt_src); >> } >> + >> +int iris_vdec_try_fmt(struct iris_inst *inst, struct v4l2_format *f) >> +{ >> + struct v4l2_pix_format_mplane *pixmp = &f->fmt.pix_mp; >> + struct v4l2_m2m_ctx *m2m_ctx = inst->m2m_ctx; >> + struct v4l2_format *f_inst; >> + struct vb2_queue *src_q; >> + >> + memset(pixmp->reserved, 0, sizeof(pixmp->reserved)); >> + switch (f->type) { >> + case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE: >> + if (f->fmt.pix_mp.pixelformat != V4L2_PIX_FMT_H264) { >> + f_inst = inst->fmt_src; >> + f->fmt.pix_mp.width = f_inst->fmt.pix_mp.width; >> + f->fmt.pix_mp.height = f_inst->fmt.pix_mp.height; >> + f->fmt.pix_mp.pixelformat = f_inst->fmt.pix_mp.pixelformat; >> + } >> + break; >> + case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE: >> + if (f->fmt.pix_mp.pixelformat != V4L2_PIX_FMT_NV12) { >> + f_inst = inst->fmt_dst; >> + f->fmt.pix_mp.pixelformat = f_inst->fmt.pix_mp.pixelformat; >> + f->fmt.pix_mp.width = f_inst->fmt.pix_mp.width; >> + f->fmt.pix_mp.height = f_inst->fmt.pix_mp.height; >> + } >> + >> + src_q = v4l2_m2m_get_src_vq(m2m_ctx); >> + if (vb2_is_streaming(src_q)) { >> + f_inst = inst->fmt_src; >> + f->fmt.pix_mp.height = f_inst->fmt.pix_mp.height; >> + f->fmt.pix_mp.width = f_inst->fmt.pix_mp.width; >> + } >> + break; >> + default: >> + return -EINVAL; >> + } >> + >> + if (pixmp->field == V4L2_FIELD_ANY) >> + pixmp->field = V4L2_FIELD_NONE; >> + >> + pixmp->num_planes = 1; >> + >> + return 0; >> +} >> + >> +int iris_vdec_s_fmt(struct iris_inst *inst, struct v4l2_format *f) >> +{ >> + struct v4l2_format *fmt, *output_fmt; >> + struct vb2_queue *q; >> + u32 codec_align; >> + >> + iris_vdec_try_fmt(inst, f); >> + >> + switch (f->type) { >> + case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE: >> + if (f->fmt.pix_mp.pixelformat != V4L2_PIX_FMT_H264) >> + return -EINVAL; >> + >> + fmt = inst->fmt_src; >> + fmt->type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; >> + >> + codec_align = DEFAULT_CODEC_ALIGNMENT; >> + fmt->fmt.pix_mp.width = ALIGN(f->fmt.pix_mp.width, codec_align); >> + fmt->fmt.pix_mp.height = ALIGN(f->fmt.pix_mp.height, codec_align); >> + fmt->fmt.pix_mp.num_planes = 1; >> + fmt->fmt.pix_mp.plane_fmt[0].bytesperline = 0; >> + fmt->fmt.pix_mp.plane_fmt[0].sizeimage = iris_get_buffer_size(inst, BUF_INPUT); >> + inst->buffers[BUF_INPUT].min_count = iris_vpu_buf_count(inst, BUF_INPUT); >> + if (inst->buffers[BUF_INPUT].actual_count < inst->buffers[BUF_INPUT].min_count) >> + inst->buffers[BUF_INPUT].actual_count = inst->buffers[BUF_INPUT].min_count; >> + >> + inst->buffers[BUF_INPUT].size = fmt->fmt.pix_mp.plane_fmt[0].sizeimage; >> + >> + fmt->fmt.pix_mp.colorspace = f->fmt.pix_mp.colorspace; >> + fmt->fmt.pix_mp.xfer_func = f->fmt.pix_mp.xfer_func; >> + fmt->fmt.pix_mp.ycbcr_enc = f->fmt.pix_mp.ycbcr_enc; >> + fmt->fmt.pix_mp.quantization = f->fmt.pix_mp.quantization; >> + >> + output_fmt = inst->fmt_dst; >> + output_fmt->fmt.pix_mp.colorspace = f->fmt.pix_mp.colorspace; >> + output_fmt->fmt.pix_mp.xfer_func = f->fmt.pix_mp.xfer_func; >> + output_fmt->fmt.pix_mp.ycbcr_enc = f->fmt.pix_mp.ycbcr_enc; >> + output_fmt->fmt.pix_mp.quantization = f->fmt.pix_mp.quantization; >> + >> + inst->crop.left = 0; >> + inst->crop.top = 0; >> + inst->crop.width = f->fmt.pix_mp.width; >> + inst->crop.height = f->fmt.pix_mp.height; >> + break; >> + case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE: >> + fmt = inst->fmt_dst; >> + fmt->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; >> + q = v4l2_m2m_get_vq(inst->m2m_ctx, f->type); >> + if (q->streaming) { >> + f->fmt.pix_mp.height = inst->fmt_src->fmt.pix_mp.height; >> + f->fmt.pix_mp.width = inst->fmt_src->fmt.pix_mp.width; >> + } >> + if (fmt->fmt.pix_mp.pixelformat != V4L2_PIX_FMT_NV12) >> + return -EINVAL; >> + fmt->fmt.pix_mp.pixelformat = f->fmt.pix_mp.pixelformat; >> + fmt->fmt.pix_mp.width = ALIGN(f->fmt.pix_mp.width, 128); >> + fmt->fmt.pix_mp.height = ALIGN(f->fmt.pix_mp.height, 32); >> + fmt->fmt.pix_mp.num_planes = 1; >> + fmt->fmt.pix_mp.plane_fmt[0].bytesperline = ALIGN(f->fmt.pix_mp.width, 128); >> + fmt->fmt.pix_mp.plane_fmt[0].sizeimage = iris_get_buffer_size(inst, BUF_OUTPUT); >> + >> + if (!q->streaming) >> + inst->buffers[BUF_OUTPUT].min_count = iris_vpu_buf_count(inst, BUF_INPUT); >> + if (inst->buffers[BUF_OUTPUT].actual_count < inst->buffers[BUF_OUTPUT].min_count) >> + inst->buffers[BUF_OUTPUT].actual_count = >> + inst->buffers[BUF_OUTPUT].min_count; >> + >> + inst->buffers[BUF_OUTPUT].size = fmt->fmt.pix_mp.plane_fmt[0].sizeimage; >> + >> + if (!q->streaming) { >> + inst->crop.top = 0; >> + inst->crop.left = 0; >> + inst->crop.width = f->fmt.pix_mp.width; >> + inst->crop.height = f->fmt.pix_mp.height; >> + } >> + break; >> + default: >> + return -EINVAL; >> + } >> + memcpy(f, fmt, sizeof(*fmt)); >> + >> + return 0; >> +} >> diff --git a/drivers/media/platform/qcom/iris/iris_vdec.h b/drivers/media/platform/qcom/iris/iris_vdec.h >> index 0324d7f796dd..4f2557d15ca2 100644 >> --- a/drivers/media/platform/qcom/iris/iris_vdec.h >> +++ b/drivers/media/platform/qcom/iris/iris_vdec.h >> @@ -10,5 +10,7 @@ struct iris_inst; >> >> void iris_vdec_inst_init(struct iris_inst *inst); >> void iris_vdec_inst_deinit(struct iris_inst *inst); >> +int iris_vdec_try_fmt(struct iris_inst *inst, struct v4l2_format *f); >> +int iris_vdec_s_fmt(struct iris_inst *inst, struct v4l2_format *f); >> >> #endif >> diff --git a/drivers/media/platform/qcom/iris/iris_vidc.c b/drivers/media/platform/qcom/iris/iris_vidc.c >> index ab3b63171c1d..6707eb9917fe 100644 >> --- a/drivers/media/platform/qcom/iris/iris_vidc.c >> +++ b/drivers/media/platform/qcom/iris/iris_vidc.c >> @@ -217,6 +217,48 @@ int iris_close(struct file *filp) >> return 0; >> } >> >> +static int iris_try_fmt_vid_mplane(struct file *filp, void *fh, struct v4l2_format *f) >> +{ >> + struct iris_inst *inst = iris_get_inst(filp, NULL); >> + int ret; >> + >> + mutex_lock(&inst->lock); > > This is a bit weird. Normally the ioctls are serialized through the > lock specified in struct video_device. Only queuing related ioctls > can use a different lock (and they do in this driver). > > So I would expect that vdev->lock is set to &inst->lock in the probe > function, and that these wrapper functions for these ioctls would > disappear, since there is no longer a need for them. > > Drivers should not, in principle, serialize ioctls themselves, and > instead they should set the lock in video_device. Unless there are > very good reasons for doing otherwise. > The intention behind using inst->lock is not to serialize the ioctls, but to serialize the forward (driver) and reverse (firmware) threads. We are taking the lock here, bcoz reverse thread can also update the memebers of inst struture. Thanks, Dikshita >> + ret = iris_vdec_try_fmt(inst, f); >> + mutex_unlock(&inst->lock); >> + >> + return ret; >> +} >> + >> +static int iris_s_fmt_vid_mplane(struct file *filp, void *fh, struct v4l2_format *f) >> +{ >> + struct iris_inst *inst = iris_get_inst(filp, NULL); >> + int ret; >> + >> + mutex_lock(&inst->lock); >> + ret = iris_vdec_s_fmt(inst, f); >> + mutex_unlock(&inst->lock); >> + >> + return ret; >> +} >> + >> +static int iris_g_fmt_vid_mplane(struct file *filp, void *fh, struct v4l2_format *f) >> +{ >> + struct iris_inst *inst = iris_get_inst(filp, NULL); >> + int ret = 0; >> + >> + mutex_lock(&inst->lock); >> + if (V4L2_TYPE_IS_OUTPUT(f->type)) >> + memcpy(f, inst->fmt_src, sizeof(*f)); > > Just do: *f = inst->fmt_src, and do the same below. > >> + else if (V4L2_TYPE_IS_CAPTURE(f->type)) >> + memcpy(f, inst->fmt_dst, sizeof(*f)); >> + else >> + ret = -EINVAL; >> + >> + mutex_unlock(&inst->lock); >> + >> + return ret; >> +} >> + >> static struct v4l2_file_operations iris_v4l2_file_ops = { >> .owner = THIS_MODULE, >> .open = iris_open, >> @@ -231,6 +273,12 @@ static const struct vb2_ops iris_vb2_ops = { >> }; >> >> static const struct v4l2_ioctl_ops iris_v4l2_ioctl_ops = { >> + .vidioc_try_fmt_vid_cap_mplane = iris_try_fmt_vid_mplane, >> + .vidioc_try_fmt_vid_out_mplane = iris_try_fmt_vid_mplane, >> + .vidioc_s_fmt_vid_cap_mplane = iris_s_fmt_vid_mplane, >> + .vidioc_s_fmt_vid_out_mplane = iris_s_fmt_vid_mplane, >> + .vidioc_g_fmt_vid_cap_mplane = iris_g_fmt_vid_mplane, >> + .vidioc_g_fmt_vid_out_mplane = iris_g_fmt_vid_mplane, >> .vidioc_reqbufs = v4l2_m2m_ioctl_reqbufs, >> }; >> >> > > Regards, > > Hans
diff --git a/drivers/media/platform/qcom/iris/iris_vdec.c b/drivers/media/platform/qcom/iris/iris_vdec.c index 7d1ef31c7c44..e807decdda2b 100644 --- a/drivers/media/platform/qcom/iris/iris_vdec.c +++ b/drivers/media/platform/qcom/iris/iris_vdec.c @@ -3,6 +3,8 @@ * Copyright (c) 2022-2024 Qualcomm Innovation Center, Inc. All rights reserved. */ +#include <media/v4l2-mem2mem.h> + #include "iris_buffer.h" #include "iris_instance.h" #include "iris_vdec.h" @@ -10,6 +12,7 @@ #define DEFAULT_WIDTH 320 #define DEFAULT_HEIGHT 240 +#define DEFAULT_CODEC_ALIGNMENT 16 void iris_vdec_inst_init(struct iris_inst *inst) { @@ -56,3 +59,131 @@ void iris_vdec_inst_deinit(struct iris_inst *inst) kfree(inst->fmt_dst); kfree(inst->fmt_src); } + +int iris_vdec_try_fmt(struct iris_inst *inst, struct v4l2_format *f) +{ + struct v4l2_pix_format_mplane *pixmp = &f->fmt.pix_mp; + struct v4l2_m2m_ctx *m2m_ctx = inst->m2m_ctx; + struct v4l2_format *f_inst; + struct vb2_queue *src_q; + + memset(pixmp->reserved, 0, sizeof(pixmp->reserved)); + switch (f->type) { + case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE: + if (f->fmt.pix_mp.pixelformat != V4L2_PIX_FMT_H264) { + f_inst = inst->fmt_src; + f->fmt.pix_mp.width = f_inst->fmt.pix_mp.width; + f->fmt.pix_mp.height = f_inst->fmt.pix_mp.height; + f->fmt.pix_mp.pixelformat = f_inst->fmt.pix_mp.pixelformat; + } + break; + case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE: + if (f->fmt.pix_mp.pixelformat != V4L2_PIX_FMT_NV12) { + f_inst = inst->fmt_dst; + f->fmt.pix_mp.pixelformat = f_inst->fmt.pix_mp.pixelformat; + f->fmt.pix_mp.width = f_inst->fmt.pix_mp.width; + f->fmt.pix_mp.height = f_inst->fmt.pix_mp.height; + } + + src_q = v4l2_m2m_get_src_vq(m2m_ctx); + if (vb2_is_streaming(src_q)) { + f_inst = inst->fmt_src; + f->fmt.pix_mp.height = f_inst->fmt.pix_mp.height; + f->fmt.pix_mp.width = f_inst->fmt.pix_mp.width; + } + break; + default: + return -EINVAL; + } + + if (pixmp->field == V4L2_FIELD_ANY) + pixmp->field = V4L2_FIELD_NONE; + + pixmp->num_planes = 1; + + return 0; +} + +int iris_vdec_s_fmt(struct iris_inst *inst, struct v4l2_format *f) +{ + struct v4l2_format *fmt, *output_fmt; + struct vb2_queue *q; + u32 codec_align; + + iris_vdec_try_fmt(inst, f); + + switch (f->type) { + case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE: + if (f->fmt.pix_mp.pixelformat != V4L2_PIX_FMT_H264) + return -EINVAL; + + fmt = inst->fmt_src; + fmt->type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + + codec_align = DEFAULT_CODEC_ALIGNMENT; + fmt->fmt.pix_mp.width = ALIGN(f->fmt.pix_mp.width, codec_align); + fmt->fmt.pix_mp.height = ALIGN(f->fmt.pix_mp.height, codec_align); + fmt->fmt.pix_mp.num_planes = 1; + fmt->fmt.pix_mp.plane_fmt[0].bytesperline = 0; + fmt->fmt.pix_mp.plane_fmt[0].sizeimage = iris_get_buffer_size(inst, BUF_INPUT); + inst->buffers[BUF_INPUT].min_count = iris_vpu_buf_count(inst, BUF_INPUT); + if (inst->buffers[BUF_INPUT].actual_count < inst->buffers[BUF_INPUT].min_count) + inst->buffers[BUF_INPUT].actual_count = inst->buffers[BUF_INPUT].min_count; + + inst->buffers[BUF_INPUT].size = fmt->fmt.pix_mp.plane_fmt[0].sizeimage; + + fmt->fmt.pix_mp.colorspace = f->fmt.pix_mp.colorspace; + fmt->fmt.pix_mp.xfer_func = f->fmt.pix_mp.xfer_func; + fmt->fmt.pix_mp.ycbcr_enc = f->fmt.pix_mp.ycbcr_enc; + fmt->fmt.pix_mp.quantization = f->fmt.pix_mp.quantization; + + output_fmt = inst->fmt_dst; + output_fmt->fmt.pix_mp.colorspace = f->fmt.pix_mp.colorspace; + output_fmt->fmt.pix_mp.xfer_func = f->fmt.pix_mp.xfer_func; + output_fmt->fmt.pix_mp.ycbcr_enc = f->fmt.pix_mp.ycbcr_enc; + output_fmt->fmt.pix_mp.quantization = f->fmt.pix_mp.quantization; + + inst->crop.left = 0; + inst->crop.top = 0; + inst->crop.width = f->fmt.pix_mp.width; + inst->crop.height = f->fmt.pix_mp.height; + break; + case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE: + fmt = inst->fmt_dst; + fmt->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + q = v4l2_m2m_get_vq(inst->m2m_ctx, f->type); + if (q->streaming) { + f->fmt.pix_mp.height = inst->fmt_src->fmt.pix_mp.height; + f->fmt.pix_mp.width = inst->fmt_src->fmt.pix_mp.width; + } + if (fmt->fmt.pix_mp.pixelformat != V4L2_PIX_FMT_NV12) + return -EINVAL; + fmt->fmt.pix_mp.pixelformat = f->fmt.pix_mp.pixelformat; + fmt->fmt.pix_mp.width = ALIGN(f->fmt.pix_mp.width, 128); + fmt->fmt.pix_mp.height = ALIGN(f->fmt.pix_mp.height, 32); + fmt->fmt.pix_mp.num_planes = 1; + fmt->fmt.pix_mp.plane_fmt[0].bytesperline = ALIGN(f->fmt.pix_mp.width, 128); + fmt->fmt.pix_mp.plane_fmt[0].sizeimage = iris_get_buffer_size(inst, BUF_OUTPUT); + + if (!q->streaming) + inst->buffers[BUF_OUTPUT].min_count = iris_vpu_buf_count(inst, BUF_INPUT); + if (inst->buffers[BUF_OUTPUT].actual_count < inst->buffers[BUF_OUTPUT].min_count) + inst->buffers[BUF_OUTPUT].actual_count = + inst->buffers[BUF_OUTPUT].min_count; + + inst->buffers[BUF_OUTPUT].size = fmt->fmt.pix_mp.plane_fmt[0].sizeimage; + + if (!q->streaming) { + inst->crop.top = 0; + inst->crop.left = 0; + inst->crop.width = f->fmt.pix_mp.width; + inst->crop.height = f->fmt.pix_mp.height; + } + break; + default: + return -EINVAL; + } + memcpy(f, fmt, sizeof(*fmt)); + + return 0; +} diff --git a/drivers/media/platform/qcom/iris/iris_vdec.h b/drivers/media/platform/qcom/iris/iris_vdec.h index 0324d7f796dd..4f2557d15ca2 100644 --- a/drivers/media/platform/qcom/iris/iris_vdec.h +++ b/drivers/media/platform/qcom/iris/iris_vdec.h @@ -10,5 +10,7 @@ struct iris_inst; void iris_vdec_inst_init(struct iris_inst *inst); void iris_vdec_inst_deinit(struct iris_inst *inst); +int iris_vdec_try_fmt(struct iris_inst *inst, struct v4l2_format *f); +int iris_vdec_s_fmt(struct iris_inst *inst, struct v4l2_format *f); #endif diff --git a/drivers/media/platform/qcom/iris/iris_vidc.c b/drivers/media/platform/qcom/iris/iris_vidc.c index ab3b63171c1d..6707eb9917fe 100644 --- a/drivers/media/platform/qcom/iris/iris_vidc.c +++ b/drivers/media/platform/qcom/iris/iris_vidc.c @@ -217,6 +217,48 @@ int iris_close(struct file *filp) return 0; } +static int iris_try_fmt_vid_mplane(struct file *filp, void *fh, struct v4l2_format *f) +{ + struct iris_inst *inst = iris_get_inst(filp, NULL); + int ret; + + mutex_lock(&inst->lock); + ret = iris_vdec_try_fmt(inst, f); + mutex_unlock(&inst->lock); + + return ret; +} + +static int iris_s_fmt_vid_mplane(struct file *filp, void *fh, struct v4l2_format *f) +{ + struct iris_inst *inst = iris_get_inst(filp, NULL); + int ret; + + mutex_lock(&inst->lock); + ret = iris_vdec_s_fmt(inst, f); + mutex_unlock(&inst->lock); + + return ret; +} + +static int iris_g_fmt_vid_mplane(struct file *filp, void *fh, struct v4l2_format *f) +{ + struct iris_inst *inst = iris_get_inst(filp, NULL); + int ret = 0; + + mutex_lock(&inst->lock); + if (V4L2_TYPE_IS_OUTPUT(f->type)) + memcpy(f, inst->fmt_src, sizeof(*f)); + else if (V4L2_TYPE_IS_CAPTURE(f->type)) + memcpy(f, inst->fmt_dst, sizeof(*f)); + else + ret = -EINVAL; + + mutex_unlock(&inst->lock); + + return ret; +} + static struct v4l2_file_operations iris_v4l2_file_ops = { .owner = THIS_MODULE, .open = iris_open, @@ -231,6 +273,12 @@ static const struct vb2_ops iris_vb2_ops = { }; static const struct v4l2_ioctl_ops iris_v4l2_ioctl_ops = { + .vidioc_try_fmt_vid_cap_mplane = iris_try_fmt_vid_mplane, + .vidioc_try_fmt_vid_out_mplane = iris_try_fmt_vid_mplane, + .vidioc_s_fmt_vid_cap_mplane = iris_s_fmt_vid_mplane, + .vidioc_s_fmt_vid_out_mplane = iris_s_fmt_vid_mplane, + .vidioc_g_fmt_vid_cap_mplane = iris_g_fmt_vid_mplane, + .vidioc_g_fmt_vid_out_mplane = iris_g_fmt_vid_mplane, .vidioc_reqbufs = v4l2_m2m_ioctl_reqbufs, };