return 1;
}
+static int init_video_param_jpeg(AVCodecContext *avctx, QSVEncContext *q)
+{
+ enum AVPixelFormat sw_format = avctx->pix_fmt == AV_PIX_FMT_QSV ?
+ avctx->sw_pix_fmt : avctx->pix_fmt;
+ const AVPixFmtDescriptor *desc;
+ int ret;
+
+ ret = ff_qsv_codec_id_to_mfx(avctx->codec_id);
+ if (ret < 0)
+ return AVERROR_BUG;
+ q->param.mfx.CodecId = ret;
+
+ if (avctx->level > 0)
+ q->param.mfx.CodecLevel = avctx->level;
+ q->param.mfx.CodecProfile = q->profile;
+
+ desc = av_pix_fmt_desc_get(sw_format);
+ if (!desc)
+ return AVERROR_BUG;
+
+ ff_qsv_map_pixfmt(sw_format, &q->param.mfx.FrameInfo.FourCC);
+
+ q->param.mfx.FrameInfo.CropX = 0;
+ q->param.mfx.FrameInfo.CropY = 0;
+ q->param.mfx.FrameInfo.CropW = avctx->width;
+ q->param.mfx.FrameInfo.CropH = avctx->height;
+ q->param.mfx.FrameInfo.AspectRatioW = avctx->sample_aspect_ratio.num;
+ q->param.mfx.FrameInfo.AspectRatioH = avctx->sample_aspect_ratio.den;
+ q->param.mfx.FrameInfo.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
+ q->param.mfx.FrameInfo.BitDepthLuma = desc->comp[0].depth;
+ q->param.mfx.FrameInfo.BitDepthChroma = desc->comp[0].depth;
+ q->param.mfx.FrameInfo.Shift = desc->comp[0].depth > 8;
+
+ q->param.mfx.FrameInfo.Width = FFALIGN(avctx->width, 16);
+ q->param.mfx.FrameInfo.Height = FFALIGN(avctx->height, 16);
+
+ if (avctx->hw_frames_ctx) {
+ AVHWFramesContext *frames_ctx = (AVHWFramesContext *)avctx->hw_frames_ctx->data;
+ AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
+ q->param.mfx.FrameInfo.Width = frames_hwctx->surfaces[0].Info.Width;
+ q->param.mfx.FrameInfo.Height = frames_hwctx->surfaces[0].Info.Height;
+ }
+
+ if (avctx->framerate.den > 0 && avctx->framerate.num > 0) {
+ q->param.mfx.FrameInfo.FrameRateExtN = avctx->framerate.num;
+ q->param.mfx.FrameInfo.FrameRateExtD = avctx->framerate.den;
+ } else {
+ q->param.mfx.FrameInfo.FrameRateExtN = avctx->time_base.den;
+ q->param.mfx.FrameInfo.FrameRateExtD = avctx->time_base.num;
+ }
+
+ q->param.mfx.Interleaved = 1;
+ q->param.mfx.Quality = av_clip(avctx->global_quality, 1, 100);
+ q->param.mfx.RestartInterval = 0;
+
+ return 0;
+}
+
static int init_video_param(AVCodecContext *avctx, QSVEncContext *q)
{
enum AVPixelFormat sw_format = avctx->pix_fmt == AV_PIX_FMT_QSV ?
return 0;
}
+static int qsv_retrieve_enc_jpeg_params(AVCodecContext *avctx, QSVEncContext *q)
+{
+ int ret = 0;
+
+ ret = MFXVideoENCODE_GetVideoParam(q->session, &q->param);
+ if (ret < 0)
+ return ff_qsv_print_error(avctx, ret,
+ "Error calling GetVideoParam");
+
+ q->packet_size = q->param.mfx.BufferSizeInKB * 1000;
+
+ // for qsv mjpeg the return value maybe 0 so alloc the buffer
+ if (q->packet_size == 0)
+ q->packet_size = q->param.mfx.FrameInfo.Height * q->param.mfx.FrameInfo.Width * 4;
+
+ return 0;
+}
+
static int qsv_retrieve_enc_params(AVCodecContext *avctx, QSVEncContext *q)
{
AVCPBProperties *cpb_props;
if (ret < 0)
return ret;
- ret = init_video_param(avctx, q);
+ // in the mfxInfoMFX struct, JPEG is different from other codecs
+ switch (avctx->codec_id) {
+ case AV_CODEC_ID_MJPEG:
+ ret = init_video_param_jpeg(avctx, q);
+ break;
+ default:
+ ret = init_video_param(avctx, q);
+ break;
+ }
if (ret < 0)
return ret;
ff_qsv_print_warning(avctx, ret,
"Warning in encoder initialization");
- ret = qsv_retrieve_enc_params(avctx, q);
+ switch (avctx->codec_id) {
+ case AV_CODEC_ID_MJPEG:
+ ret = qsv_retrieve_enc_jpeg_params(avctx, q);
+ break;
+ default:
+ ret = qsv_retrieve_enc_params(avctx, q);
+ break;
+ }
if (ret < 0) {
av_log(avctx, AV_LOG_ERROR, "Error retrieving encoding parameters.\n");
return ret;