2 * Intel MediaSDK QSV codec-independent code
4 * copyright (c) 2013 Luca Barbato
5 * copyright (c) 2015 Anton Khirnov <anton@khirnov.net>
7 * This file is part of FFmpeg.
9 * FFmpeg is free software; you can redistribute it and/or
10 * modify it under the terms of the GNU Lesser General Public
11 * License as published by the Free Software Foundation; either
12 * version 2.1 of the License, or (at your option) any later version.
14 * FFmpeg is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17 * Lesser General Public License for more details.
19 * You should have received a copy of the GNU Lesser General Public
20 * License along with FFmpeg; if not, write to the Free Software
21 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
25 #include <sys/types.h>
27 #include <mfx/mfxvideo.h>
29 #include "libavutil/common.h"
30 #include "libavutil/mem.h"
31 #include "libavutil/log.h"
32 #include "libavutil/pixfmt.h"
33 #include "libavutil/time.h"
38 #include "qsv_internal.h"
41 int ff_qsv_map_pixfmt(enum AVPixelFormat format)
44 case AV_PIX_FMT_YUV420P:
45 case AV_PIX_FMT_YUVJ420P:
46 return AV_PIX_FMT_NV12;
48 return AVERROR(ENOSYS);
52 int ff_qsv_decode_init(AVCodecContext *avctx, QSVContext *q, AVPacket *avpkt)
54 mfxVideoParam param = { { 0 } };
55 mfxBitstream bs = { { { 0 } } };
57 enum AVPixelFormat pix_fmts[3] = { AV_PIX_FMT_QSV,
61 q->iopattern = MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
63 if (avctx->hwaccel_context) {
64 AVQSVContext *qsv = avctx->hwaccel_context;
66 q->session = qsv->session;
67 q->iopattern = qsv->iopattern;
68 q->ext_buffers = qsv->ext_buffers;
69 q->nb_ext_buffers = qsv->nb_ext_buffers;
72 ret = ff_qsv_init_internal_session(avctx, &q->internal_qs,
77 q->session = q->internal_qs.session;
82 bs.Data = avpkt->data;
83 bs.DataLength = avpkt->size;
84 bs.MaxLength = bs.DataLength;
85 bs.TimeStamp = avpkt->pts;
87 return AVERROR_INVALIDDATA;
89 ret = ff_qsv_codec_id_to_mfx(avctx->codec_id);
91 av_log(avctx, AV_LOG_ERROR, "Unsupported codec_id %08x\n", avctx->codec_id);
95 param.mfx.CodecId = ret;
97 ret = MFXVideoDECODE_DecodeHeader(q->session, &bs, ¶m);
98 if (MFX_ERR_MORE_DATA==ret) {
99 /* this code means that header not found so we return packet size to skip
103 } else if (ret < 0) {
104 av_log(avctx, AV_LOG_ERROR, "Decode header error %d\n", ret);
105 return ff_qsv_error(ret);
107 param.IOPattern = q->iopattern;
108 param.AsyncDepth = q->async_depth;
109 param.ExtParam = q->ext_buffers;
110 param.NumExtParam = q->nb_ext_buffers;
111 param.mfx.FrameInfo.BitDepthLuma = 8;
112 param.mfx.FrameInfo.BitDepthChroma = 8;
114 ret = MFXVideoDECODE_Init(q->session, ¶m);
116 if (MFX_ERR_INVALID_VIDEO_PARAM==ret) {
117 av_log(avctx, AV_LOG_ERROR,
118 "Error initializing the MFX video decoder, unsupported video\n");
120 av_log(avctx, AV_LOG_ERROR,
121 "Error initializing the MFX video decoder %d\n", ret);
123 return ff_qsv_error(ret);
126 ret = ff_get_format(avctx, pix_fmts);
130 avctx->pix_fmt = ret;
131 avctx->profile = param.mfx.CodecProfile;
132 avctx->level = param.mfx.CodecLevel;
133 avctx->coded_width = param.mfx.FrameInfo.Width;
134 avctx->coded_height = param.mfx.FrameInfo.Height;
135 avctx->width = param.mfx.FrameInfo.CropW - param.mfx.FrameInfo.CropX;
136 avctx->height = param.mfx.FrameInfo.CropH - param.mfx.FrameInfo.CropY;
138 /* maximum decoder latency should be not exceed max DPB size for h.264 and
139 HEVC which is 16 for both cases.
140 So weare pre-allocating fifo big enough for 17 elements:
142 if (!q->async_fifo) {
143 q->async_fifo = av_fifo_alloc((1 + 16) *
144 (sizeof(mfxSyncPoint) + sizeof(QSVFrame*)));
146 return AVERROR(ENOMEM);
149 q->input_fifo = av_fifo_alloc(1024*16);
151 return AVERROR(ENOMEM);
158 static int alloc_frame(AVCodecContext *avctx, QSVFrame *frame)
162 ret = ff_get_buffer(avctx, frame->frame, AV_GET_BUFFER_FLAG_REF);
166 if (frame->frame->format == AV_PIX_FMT_QSV) {
167 frame->surface = (mfxFrameSurface1*)frame->frame->data[3];
169 frame->surface_internal.Info.BitDepthLuma = 8;
170 frame->surface_internal.Info.BitDepthChroma = 8;
171 frame->surface_internal.Info.FourCC = MFX_FOURCC_NV12;
172 frame->surface_internal.Info.Width = avctx->coded_width;
173 frame->surface_internal.Info.Height = avctx->coded_height;
174 frame->surface_internal.Info.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
176 frame->surface_internal.Data.PitchLow = frame->frame->linesize[0];
177 frame->surface_internal.Data.Y = frame->frame->data[0];
178 frame->surface_internal.Data.UV = frame->frame->data[1];
180 frame->surface = &frame->surface_internal;
186 static void qsv_clear_unused_frames(QSVContext *q)
188 QSVFrame *cur = q->work_frames;
190 if (cur->surface && !cur->surface->Data.Locked && !cur->queued) {
192 av_frame_unref(cur->frame);
198 static int get_surface(AVCodecContext *avctx, QSVContext *q, mfxFrameSurface1 **surf)
200 QSVFrame *frame, **last;
203 qsv_clear_unused_frames(q);
205 frame = q->work_frames;
206 last = &q->work_frames;
208 if (!frame->surface) {
209 ret = alloc_frame(avctx, frame);
212 *surf = frame->surface;
220 frame = av_mallocz(sizeof(*frame));
222 return AVERROR(ENOMEM);
223 frame->frame = av_frame_alloc();
226 return AVERROR(ENOMEM);
230 ret = alloc_frame(avctx, frame);
234 *surf = frame->surface;
239 static QSVFrame *find_frame(QSVContext *q, mfxFrameSurface1 *surf)
241 QSVFrame *cur = q->work_frames;
243 if (surf == cur->surface)
250 /* This function uses for 'smart' releasing of consumed data
251 from the input bitstream fifo.
252 Since the input fifo mapped to mfxBitstream which does not understand
253 a wrapping of data over fifo end, we should also to relocate a possible
254 data rest to fifo begin. If rest of data is absent then we just reset fifo's
255 pointers to initial positions.
256 NOTE the case when fifo does contain unconsumed data is rare and typical
257 amount of such data is 1..4 bytes.
259 static void qsv_fifo_relocate(AVFifoBuffer *f, int bytes_to_free)
264 av_fifo_drain(f, bytes_to_free);
266 data_size = av_fifo_size(f);
268 if (f->buffer!=f->rptr) {
269 if ( (f->end - f->rptr) < data_size) {
270 data_rest = data_size - (f->end - f->rptr);
271 data_size-=data_rest;
272 memmove(f->buffer+data_size, f->buffer, data_rest);
274 memmove(f->buffer, f->rptr, data_size);
275 data_size+= data_rest;
279 f->wptr = f->buffer + data_size;
284 int ff_qsv_decode(AVCodecContext *avctx, QSVContext *q,
285 AVFrame *frame, int *got_frame,
289 mfxFrameSurface1 *insurf;
290 mfxFrameSurface1 *outsurf;
292 mfxBitstream bs = { { { 0 } } };
297 if (!q->engine_ready) {
298 ret = ff_qsv_decode_init(avctx, q, avpkt);
304 if (av_fifo_size(q->input_fifo)) {
305 /* we have got rest of previous packet into buffer */
306 if (av_fifo_space(q->input_fifo) < avpkt->size) {
307 ret = av_fifo_grow(q->input_fifo, avpkt->size);
311 av_fifo_generic_write(q->input_fifo, avpkt->data, avpkt->size, NULL);
312 bs.Data = q->input_fifo->rptr;
313 bs.DataLength = av_fifo_size(q->input_fifo);
316 bs.Data = avpkt->data;
317 bs.DataLength = avpkt->size;
319 bs.MaxLength = bs.DataLength;
320 bs.TimeStamp = avpkt->pts;
324 ret = get_surface(avctx, q, &insurf);
328 ret = MFXVideoDECODE_DecodeFrameAsync(q->session, avpkt->size ? &bs : NULL,
329 insurf, &outsurf, &sync);
330 if (ret != MFX_WRN_DEVICE_BUSY)
335 if (MFX_WRN_VIDEO_PARAM_CHANGED==ret) {
336 /* TODO: handle here sequence header changing */
340 QSVFrame *out_frame = find_frame(q, outsurf);
343 av_log(avctx, AV_LOG_ERROR,
344 "The returned surface does not correspond to any frame\n");
348 out_frame->queued = 1;
349 av_fifo_generic_write(q->async_fifo, &out_frame, sizeof(out_frame), NULL);
350 av_fifo_generic_write(q->async_fifo, &sync, sizeof(sync), NULL);
354 if (MFX_ERR_MORE_SURFACE != ret && ret < 0)
358 /* make sure we do not enter an infinite loop if the SDK
359 * did not consume any data and did not return anything */
360 if (!sync && !bs.DataOffset) {
361 av_log(avctx, AV_LOG_WARNING, "A decode call did not consume any data\n");
362 bs.DataOffset = avpkt->size;
366 qsv_fifo_relocate(q->input_fifo, bs.DataOffset);
367 } else if (bs.DataOffset!=avpkt->size) {
368 /* some data of packet was not consumed. store it to local buffer */
369 av_fifo_generic_write(q->input_fifo, avpkt->data+bs.DataOffset,
370 avpkt->size - bs.DataOffset, NULL);
373 if (MFX_ERR_MORE_DATA!=ret && ret < 0) {
374 av_log(avctx, AV_LOG_ERROR, "Error %d during QSV decoding.\n", ret);
375 return ff_qsv_error(ret);
377 n_out_frames = av_fifo_size(q->async_fifo) / (sizeof(out_frame)+sizeof(sync));
379 if (n_out_frames > q->async_depth || (!avpkt->size && n_out_frames) ) {
382 av_fifo_generic_read(q->async_fifo, &out_frame, sizeof(out_frame), NULL);
383 av_fifo_generic_read(q->async_fifo, &sync, sizeof(sync), NULL);
384 out_frame->queued = 0;
386 MFXVideoCORE_SyncOperation(q->session, sync, 60000);
388 src_frame = out_frame->frame;
390 ret = av_frame_ref(frame, src_frame);
394 outsurf = out_frame->surface;
396 frame->pkt_pts = frame->pts = outsurf->Data.TimeStamp;
399 outsurf->Info.PicStruct & MFX_PICSTRUCT_FRAME_TRIPLING ? 4 :
400 outsurf->Info.PicStruct & MFX_PICSTRUCT_FRAME_DOUBLING ? 2 :
401 outsurf->Info.PicStruct & MFX_PICSTRUCT_FIELD_REPEATED ? 1 : 0;
402 frame->top_field_first =
403 outsurf->Info.PicStruct & MFX_PICSTRUCT_FIELD_TFF;
404 frame->interlaced_frame =
405 !(outsurf->Info.PicStruct & MFX_PICSTRUCT_PROGRESSIVE);
413 int ff_qsv_decode_close(QSVContext *q)
415 QSVFrame *cur = q->work_frames;
418 q->work_frames = cur->next;
419 av_frame_free(&cur->frame);
421 cur = q->work_frames;
424 av_fifo_free(q->async_fifo);
425 q->async_fifo = NULL;
427 av_fifo_free(q->input_fifo);
428 q->input_fifo = NULL;
430 MFXVideoDECODE_Close(q->session);
433 ff_qsv_close_internal_session(&q->internal_qs);