#include <poll.h>
#include "libavcodec/avcodec.h"
#include "libavcodec/internal.h"
+#include "libavutil/pixdesc.h"
#include "v4l2_context.h"
#include "v4l2_buffers.h"
#include "v4l2_m2m.h"
#define USEC_PER_SEC 1000000
+static AVRational v4l2_timebase = { 1, USEC_PER_SEC };
static inline V4L2m2mContext *buf_to_m2mctx(V4L2Buffer *buf)
{
return buf_to_m2mctx(buf)->avctx;
}
+static inline AVRational v4l2_get_timebase(V4L2Buffer *avbuf)
+{
+ V4L2m2mContext *s = buf_to_m2mctx(avbuf);
+
+ if (s->avctx->pkt_timebase.num)
+ return s->avctx->pkt_timebase;
+ return s->avctx->time_base;
+}
+
static inline void v4l2_set_pts(V4L2Buffer *out, int64_t pts)
{
- V4L2m2mContext *s = buf_to_m2mctx(out);
- AVRational v4l2_timebase = { 1, USEC_PER_SEC };
int64_t v4l2_pts;
if (pts == AV_NOPTS_VALUE)
pts = 0;
/* convert pts to v4l2 timebase */
- v4l2_pts = av_rescale_q(pts, s->avctx->time_base, v4l2_timebase);
+ v4l2_pts = av_rescale_q(pts, v4l2_get_timebase(out), v4l2_timebase);
out->buf.timestamp.tv_usec = v4l2_pts % USEC_PER_SEC;
out->buf.timestamp.tv_sec = v4l2_pts / USEC_PER_SEC;
}
-static inline uint64_t v4l2_get_pts(V4L2Buffer *avbuf)
+static inline int64_t v4l2_get_pts(V4L2Buffer *avbuf)
{
- V4L2m2mContext *s = buf_to_m2mctx(avbuf);
- AVRational v4l2_timebase = { 1, USEC_PER_SEC };
int64_t v4l2_pts;
/* convert pts back to encoder timebase */
v4l2_pts = (int64_t)avbuf->buf.timestamp.tv_sec * USEC_PER_SEC +
avbuf->buf.timestamp.tv_usec;
- return av_rescale_q(v4l2_pts, v4l2_timebase, s->avctx->time_base);
+ return av_rescale_q(v4l2_pts, v4l2_timebase, v4l2_get_timebase(avbuf));
}
static enum AVColorPrimaries v4l2_get_color_primaries(V4L2Buffer *buf)
if (!atomic_load(&s->refcount))
sem_post(&s->refsync);
} else {
- if (s->draining) {
+ if (s->draining && V4L2_TYPE_IS_OUTPUT(avbuf->context->type)) {
/* no need to queue more buffers to the driver */
avbuf->status = V4L2BUF_AVAILABLE;
}
}
}
-static int v4l2_buf_to_bufref(V4L2Buffer *in, int plane, AVBufferRef **buf)
+static int v4l2_buf_increase_ref(V4L2Buffer *in)
{
V4L2m2mContext *s = buf_to_m2mctx(in);
- if (plane >= in->num_planes)
- return AVERROR(EINVAL);
-
- /* even though most encoders return 0 in data_offset encoding vp8 does require this value */
- *buf = av_buffer_create((char *)in->plane_info[plane].mm_addr + in->planes[plane].data_offset,
- in->plane_info[plane].length, v4l2_free_buffer, in, 0);
- if (!*buf)
- return AVERROR(ENOMEM);
-
if (in->context_ref)
atomic_fetch_add(&in->context_refcount, 1);
else {
in->context_ref = av_buffer_ref(s->self_ref);
- if (!in->context_ref) {
- av_buffer_unref(buf);
+ if (!in->context_ref)
return AVERROR(ENOMEM);
- }
+
in->context_refcount = 1;
}
return 0;
}
-static int v4l2_bufref_to_buf(V4L2Buffer *out, int plane, const uint8_t* data, int size, AVBufferRef* bref)
+static int v4l2_buf_to_bufref(V4L2Buffer *in, int plane, AVBufferRef **buf)
+{
+ int ret;
+
+ if (plane >= in->num_planes)
+ return AVERROR(EINVAL);
+
+ /* even though most encoders return 0 in data_offset encoding vp8 does require this value */
+ *buf = av_buffer_create((char *)in->plane_info[plane].mm_addr + in->planes[plane].data_offset,
+ in->plane_info[plane].length, v4l2_free_buffer, in, 0);
+ if (!*buf)
+ return AVERROR(ENOMEM);
+
+ ret = v4l2_buf_increase_ref(in);
+ if (ret)
+ av_buffer_unref(buf);
+
+ return ret;
+}
+
+static int v4l2_bufref_to_buf(V4L2Buffer *out, int plane, const uint8_t* data, int size, int offset)
{
unsigned int bytesused, length;
if (plane >= out->num_planes)
return AVERROR(EINVAL);
- bytesused = FFMIN(size, out->plane_info[plane].length);
length = out->plane_info[plane].length;
+ bytesused = FFMIN(size+offset, length);
- memcpy(out->plane_info[plane].mm_addr, data, FFMIN(size, out->plane_info[plane].length));
+ memcpy((uint8_t*)out->plane_info[plane].mm_addr+offset, data, FFMIN(size, length-offset));
if (V4L2_TYPE_IS_MULTIPLANAR(out->buf.type)) {
out->planes[plane].bytesused = bytesused;
return 0;
}
-/******************************************************************************
- *
- * V4L2uffer interface
- *
- ******************************************************************************/
-
-int ff_v4l2_buffer_avframe_to_buf(const AVFrame *frame, V4L2Buffer* out)
+static int v4l2_buffer_buf_to_swframe(AVFrame *frame, V4L2Buffer *avbuf)
{
int i, ret;
- for(i = 0; i < out->num_planes; i++) {
- ret = v4l2_bufref_to_buf(out, i, frame->buf[i]->data, frame->buf[i]->size, frame->buf[i]);
- if (ret)
- return ret;
- }
-
- v4l2_set_pts(out, frame->pts);
-
- return 0;
-}
-
-int ff_v4l2_buffer_buf_to_avframe(AVFrame *frame, V4L2Buffer *avbuf)
-{
- V4L2m2mContext *s = buf_to_m2mctx(avbuf);
- int i, ret;
-
- av_frame_unref(frame);
+ frame->format = avbuf->context->av_pix_fmt;
- /* 1. get references to the actual data */
for (i = 0; i < avbuf->num_planes; i++) {
ret = v4l2_buf_to_bufref(avbuf, i, &frame->buf[i]);
if (ret)
frame->data[i] = frame->buf[i]->data;
}
- /* 1.1 fixup special cases */
+ /* fixup special cases */
switch (avbuf->context->av_pix_fmt) {
case AV_PIX_FMT_NV12:
+ case AV_PIX_FMT_NV21:
if (avbuf->num_planes > 1)
break;
frame->linesize[1] = avbuf->plane_info[0].bytesperline;
frame->data[1] = frame->buf[0]->data + avbuf->plane_info[0].bytesperline * avbuf->context->format.fmt.pix_mp.height;
break;
+
+ case AV_PIX_FMT_YUV420P:
+ if (avbuf->num_planes > 1)
+ break;
+ frame->linesize[1] = avbuf->plane_info[0].bytesperline >> 1;
+ frame->linesize[2] = avbuf->plane_info[0].bytesperline >> 1;
+ frame->data[1] = frame->buf[0]->data + avbuf->plane_info[0].bytesperline * avbuf->context->format.fmt.pix_mp.height;
+ frame->data[2] = frame->data[1] + ((avbuf->plane_info[0].bytesperline * avbuf->context->format.fmt.pix_mp.height) >> 2);
+ break;
+
default:
break;
}
+ return 0;
+}
+
+static int v4l2_buffer_swframe_to_buf(const AVFrame *frame, V4L2Buffer *out)
+{
+ int i, ret;
+ struct v4l2_format fmt = out->context->format;
+ int pixel_format = V4L2_TYPE_IS_MULTIPLANAR(fmt.type) ?
+ fmt.fmt.pix_mp.pixelformat : fmt.fmt.pix.pixelformat;
+ int height = V4L2_TYPE_IS_MULTIPLANAR(fmt.type) ?
+ fmt.fmt.pix_mp.height : fmt.fmt.pix.height;
+ int is_planar_format = 0;
+
+ switch (pixel_format) {
+ case V4L2_PIX_FMT_YUV420M:
+ case V4L2_PIX_FMT_YVU420M:
+#ifdef V4L2_PIX_FMT_YUV422M
+ case V4L2_PIX_FMT_YUV422M:
+#endif
+#ifdef V4L2_PIX_FMT_YVU422M
+ case V4L2_PIX_FMT_YVU422M:
+#endif
+#ifdef V4L2_PIX_FMT_YUV444M
+ case V4L2_PIX_FMT_YUV444M:
+#endif
+#ifdef V4L2_PIX_FMT_YVU444M
+ case V4L2_PIX_FMT_YVU444M:
+#endif
+ case V4L2_PIX_FMT_NV12M:
+ case V4L2_PIX_FMT_NV21M:
+ case V4L2_PIX_FMT_NV12MT_16X16:
+ case V4L2_PIX_FMT_NV12MT:
+ case V4L2_PIX_FMT_NV16M:
+ case V4L2_PIX_FMT_NV61M:
+ is_planar_format = 1;
+ }
+
+ if (!is_planar_format) {
+ const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(frame->format);
+ int planes_nb = 0;
+ int offset = 0;
+
+ for (i = 0; i < desc->nb_components; i++)
+ planes_nb = FFMAX(planes_nb, desc->comp[i].plane + 1);
+
+ for (i = 0; i < planes_nb; i++) {
+ int size, h = height;
+ if (i == 1 || i == 2) {
+ h = AV_CEIL_RSHIFT(h, desc->log2_chroma_h);
+ }
+ size = frame->linesize[i] * h;
+ ret = v4l2_bufref_to_buf(out, 0, frame->data[i], size, offset);
+ if (ret)
+ return ret;
+ offset += size;
+ }
+ return 0;
+ }
+
+ for (i = 0; i < out->num_planes; i++) {
+ ret = v4l2_bufref_to_buf(out, i, frame->buf[i]->data, frame->buf[i]->size, 0);
+ if (ret)
+ return ret;
+ }
+
+ return 0;
+}
+
+/******************************************************************************
+ *
+ * V4L2Buffer interface
+ *
+ ******************************************************************************/
+
+int ff_v4l2_buffer_avframe_to_buf(const AVFrame *frame, V4L2Buffer *out)
+{
+ v4l2_set_pts(out, frame->pts);
+
+ return v4l2_buffer_swframe_to_buf(frame, out);
+}
+
+int ff_v4l2_buffer_buf_to_avframe(AVFrame *frame, V4L2Buffer *avbuf)
+{
+ int ret;
+
+ av_frame_unref(frame);
+
+ /* 1. get references to the actual data */
+ ret = v4l2_buffer_buf_to_swframe(frame, avbuf);
+ if (ret)
+ return ret;
+
/* 2. get frame information */
frame->key_frame = !!(avbuf->buf.flags & V4L2_BUF_FLAG_KEYFRAME);
- frame->format = avbuf->context->av_pix_fmt;
frame->color_primaries = v4l2_get_color_primaries(avbuf);
frame->colorspace = v4l2_get_color_space(avbuf);
frame->color_range = v4l2_get_color_range(avbuf);
frame->color_trc = v4l2_get_color_trc(avbuf);
frame->pts = v4l2_get_pts(avbuf);
+ frame->pkt_dts = AV_NOPTS_VALUE;
- /* these two values are updated also during re-init in v4l2_process_driver_event */
- frame->height = s->output.height;
- frame->width = s->output.width;
+ /* these values are updated also during re-init in v4l2_process_driver_event */
+ frame->height = avbuf->context->height;
+ frame->width = avbuf->context->width;
+ frame->sample_aspect_ratio = avbuf->context->sample_aspect_ratio;
/* 3. report errors upstream */
if (avbuf->buf.flags & V4L2_BUF_FLAG_ERROR) {
{
int ret;
- ret = v4l2_bufref_to_buf(out, 0, pkt->data, pkt->size, pkt->buf);
+ ret = v4l2_bufref_to_buf(out, 0, pkt->data, pkt->size, 0);
if (ret)
return ret;
if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) {
avbuf->num_planes = 0;
- for (;;) {
- /* in MP, the V4L2 API states that buf.length means num_planes */
- if (avbuf->num_planes >= avbuf->buf.length)
- break;
- if (avbuf->buf.m.planes[avbuf->num_planes].length)
+ /* in MP, the V4L2 API states that buf.length means num_planes */
+ for (i = 0; i < avbuf->buf.length; i++) {
+ if (avbuf->buf.m.planes[i].length)
avbuf->num_planes++;
}
} else