#include "libavutil/internal.h"
#include "libavutil/intmath.h"
#include "libavutil/mathematics.h"
+#include "libavutil/mem_internal.h"
#include "libavutil/pixdesc.h"
#include "libavutil/opt.h"
-#include "libavutil/timer.h"
+#include "libavutil/thread.h"
#include "avcodec.h"
#include "dct.h"
#include "idctdsp.h"
#include "mathops.h"
#include "mpegutils.h"
#include "mjpegenc.h"
+#include "speedhqenc.h"
#include "msmpeg4.h"
#include "pixblockdsp.h"
#include "qpeldsp.h"
#include "bytestream.h"
#include "wmv2.h"
#include "rv10.h"
-#include "libxvid.h"
+#include "packet_internal.h"
#include <limits.h>
#include "sp5x.h"
const AVOption ff_mpv_generic_options[] = {
FF_MPV_COMMON_OPTS
+#if FF_API_MPEGVIDEO_OPTS
+ FF_MPV_DEPRECATED_MPEG_QUANT_OPT
+ FF_MPV_DEPRECATED_A53_CC_OPT
+ FF_MPV_DEPRECATED_MATRIX_OPT
+ FF_MPV_DEPRECATED_BFRAME_OPTS
+#endif
{ NULL },
};
}
}
if (shift) {
- av_log(NULL, AV_LOG_INFO,
+ av_log(s->avctx, AV_LOG_INFO,
"Warning, QMAT_SHIFT is larger than %d, overflows possible\n",
QMAT_SHIFT - shift);
}
#undef COPY
}
+static void mpv_encode_init_static(void)
+{
+ for (int i = -16; i < 16; i++)
+ default_fcode_tab[i + MAX_MV] = 1;
+}
+
/**
* Set the given MpegEncContext to defaults for encoding.
* the changed fields will not depend upon the prior state of the MpegEncContext.
*/
static void mpv_encode_defaults(MpegEncContext *s)
{
- int i;
+ static AVOnce init_static_once = AV_ONCE_INIT;
+
ff_mpv_common_defaults(s);
- for (i = -16; i < 16; i++) {
- default_fcode_tab[i + MAX_MV] = 1;
- }
+ ff_thread_once(&init_static_once, mpv_encode_init_static);
+
s->me.mv_penalty = default_mv_penalty;
s->fcode_tab = default_fcode_tab;
{
MpegEncContext *s = avctx->priv_data;
AVCPBProperties *cpb_props;
- int i, ret, format_supported;
+ int i, ret;
mpv_encode_defaults(s);
- switch (avctx->codec_id) {
- case AV_CODEC_ID_MPEG2VIDEO:
- if (avctx->pix_fmt != AV_PIX_FMT_YUV420P &&
- avctx->pix_fmt != AV_PIX_FMT_YUV422P) {
- av_log(avctx, AV_LOG_ERROR,
- "only YUV420 and YUV422 are supported\n");
- return -1;
- }
- break;
- case AV_CODEC_ID_MJPEG:
- case AV_CODEC_ID_AMV:
- format_supported = 0;
- /* JPEG color space */
- if (avctx->pix_fmt == AV_PIX_FMT_YUVJ420P ||
- avctx->pix_fmt == AV_PIX_FMT_YUVJ422P ||
- avctx->pix_fmt == AV_PIX_FMT_YUVJ444P ||
- (avctx->color_range == AVCOL_RANGE_JPEG &&
- (avctx->pix_fmt == AV_PIX_FMT_YUV420P ||
- avctx->pix_fmt == AV_PIX_FMT_YUV422P ||
- avctx->pix_fmt == AV_PIX_FMT_YUV444P)))
- format_supported = 1;
- /* MPEG color space */
- else if (avctx->strict_std_compliance <= FF_COMPLIANCE_UNOFFICIAL &&
- (avctx->pix_fmt == AV_PIX_FMT_YUV420P ||
- avctx->pix_fmt == AV_PIX_FMT_YUV422P ||
- avctx->pix_fmt == AV_PIX_FMT_YUV444P))
- format_supported = 1;
-
- if (!format_supported) {
- av_log(avctx, AV_LOG_ERROR, "colorspace not supported in jpeg\n");
- return -1;
- }
- break;
- default:
- if (avctx->pix_fmt != AV_PIX_FMT_YUV420P) {
- av_log(avctx, AV_LOG_ERROR, "only YUV420 is supported\n");
- return -1;
- }
- }
-
switch (avctx->pix_fmt) {
case AV_PIX_FMT_YUVJ444P:
case AV_PIX_FMT_YUV444P:
avctx->bits_per_raw_sample = av_clip(avctx->bits_per_raw_sample, 0, 8);
-#if FF_API_PRIVATE_OPT
-FF_DISABLE_DEPRECATION_WARNINGS
- if (avctx->rtp_payload_size)
- s->rtp_payload_size = avctx->rtp_payload_size;
- if (avctx->me_penalty_compensation)
- s->me_penalty_compensation = avctx->me_penalty_compensation;
- if (avctx->pre_me)
- s->me_pre = avctx->pre_me;
-FF_ENABLE_DEPRECATION_WARNINGS
-#endif
-
s->bit_rate = avctx->bit_rate;
s->width = avctx->width;
s->height = avctx->height;
/* Fixed QSCALE */
s->fixed_qscale = !!(avctx->flags & AV_CODEC_FLAG_QSCALE);
- s->adaptive_quant = (s->avctx->lumi_masking ||
- s->avctx->dark_masking ||
- s->avctx->temporal_cplx_masking ||
- s->avctx->spatial_cplx_masking ||
- s->avctx->p_masking ||
+ s->adaptive_quant = (avctx->lumi_masking ||
+ avctx->dark_masking ||
+ avctx->temporal_cplx_masking ||
+ avctx->spatial_cplx_masking ||
+ avctx->p_masking ||
s->border_masking ||
(s->mpv_flags & FF_MPV_FLAG_QP_RD)) &&
!s->fixed_qscale;
- s->loop_filter = !!(s->avctx->flags & AV_CODEC_FLAG_LOOP_FILTER);
+ s->loop_filter = !!(avctx->flags & AV_CODEC_FLAG_LOOP_FILTER);
if (avctx->rc_max_rate && !avctx->rc_buffer_size) {
switch(avctx->codec_id) {
if ((!avctx->rc_max_rate) != (!avctx->rc_buffer_size)) {
av_log(avctx, AV_LOG_ERROR, "Either both buffer size and max rate or neither must be specified\n");
- return -1;
+ return AVERROR(EINVAL);
}
if (avctx->rc_min_rate && avctx->rc_max_rate != avctx->rc_min_rate) {
if (avctx->rc_min_rate && avctx->rc_min_rate > avctx->bit_rate) {
av_log(avctx, AV_LOG_ERROR, "bitrate below min bitrate\n");
- return -1;
+ return AVERROR(EINVAL);
}
if (avctx->rc_max_rate && avctx->rc_max_rate < avctx->bit_rate) {
av_log(avctx, AV_LOG_ERROR, "bitrate above max bitrate\n");
- return -1;
+ return AVERROR(EINVAL);
}
if (avctx->rc_max_rate &&
avctx->bit_rate * (int64_t)avctx->time_base.num >
avctx->rc_buffer_size * (int64_t)avctx->time_base.den) {
av_log(avctx, AV_LOG_ERROR, "VBV buffer too small for bitrate\n");
- return -1;
+ return AVERROR(EINVAL);
}
if (!s->fixed_qscale &&
avctx->bit_rate_tolerance = 5 * avctx->bit_rate * av_q2d(avctx->time_base);
}
- if (s->avctx->rc_max_rate &&
- s->avctx->rc_min_rate == s->avctx->rc_max_rate &&
+ if (avctx->rc_max_rate &&
+ avctx->rc_min_rate == avctx->rc_max_rate &&
(s->codec_id == AV_CODEC_ID_MPEG1VIDEO ||
s->codec_id == AV_CODEC_ID_MPEG2VIDEO) &&
90000LL * (avctx->rc_buffer_size - 1) >
- s->avctx->rc_max_rate * 0xFFFFLL) {
+ avctx->rc_max_rate * 0xFFFFLL) {
av_log(avctx, AV_LOG_INFO,
"Warning vbv_delay will be set to 0xFFFF (=VBR) as the "
"specified vbv buffer is too large for the given bitrate!\n");
}
- if ((s->avctx->flags & AV_CODEC_FLAG_4MV) && s->codec_id != AV_CODEC_ID_MPEG4 &&
+ if ((avctx->flags & AV_CODEC_FLAG_4MV) && s->codec_id != AV_CODEC_ID_MPEG4 &&
s->codec_id != AV_CODEC_ID_H263 && s->codec_id != AV_CODEC_ID_H263P &&
s->codec_id != AV_CODEC_ID_FLV1) {
av_log(avctx, AV_LOG_ERROR, "4MV not supported by codec\n");
- return -1;
+ return AVERROR(EINVAL);
}
- if (s->obmc && s->avctx->mb_decision != FF_MB_DECISION_SIMPLE) {
+ if (s->obmc && avctx->mb_decision != FF_MB_DECISION_SIMPLE) {
av_log(avctx, AV_LOG_ERROR,
"OBMC is only supported with simple mb decision\n");
- return -1;
+ return AVERROR(EINVAL);
}
if (s->quarter_sample && s->codec_id != AV_CODEC_ID_MPEG4) {
av_log(avctx, AV_LOG_ERROR, "qpel not supported by codec\n");
- return -1;
+ return AVERROR(EINVAL);
}
if (s->max_b_frames &&
s->codec_id != AV_CODEC_ID_MPEG1VIDEO &&
s->codec_id != AV_CODEC_ID_MPEG2VIDEO) {
av_log(avctx, AV_LOG_ERROR, "B-frames not supported by codec\n");
- return -1;
+ return AVERROR(EINVAL);
}
if (s->max_b_frames < 0) {
av_log(avctx, AV_LOG_ERROR,
"max b frames must be 0 or positive for mpegvideo based encoders\n");
- return -1;
+ return AVERROR(EINVAL);
}
if ((s->codec_id == AV_CODEC_ID_MPEG4 ||
(avctx->width > 2048 ||
avctx->height > 1152 )) {
av_log(avctx, AV_LOG_ERROR, "H.263 does not support resolutions above 2048x1152\n");
- return -1;
+ return AVERROR(EINVAL);
}
if ((s->codec_id == AV_CODEC_ID_H263 ||
- s->codec_id == AV_CODEC_ID_H263P) &&
+ s->codec_id == AV_CODEC_ID_H263P ||
+ s->codec_id == AV_CODEC_ID_RV20) &&
((avctx->width &3) ||
(avctx->height&3) )) {
- av_log(avctx, AV_LOG_ERROR, "w/h must be a multiple of 4\n");
- return -1;
- }
-
- if (s->codec_id == AV_CODEC_ID_MPEG1VIDEO &&
- (avctx->width > 4095 ||
- avctx->height > 4095 )) {
- av_log(avctx, AV_LOG_ERROR, "MPEG-1 does not support resolutions above 4095x4095\n");
- return -1;
- }
-
- if (s->codec_id == AV_CODEC_ID_MPEG2VIDEO &&
- (avctx->width > 16383 ||
- avctx->height > 16383 )) {
- av_log(avctx, AV_LOG_ERROR, "MPEG-2 does not support resolutions above 16383x16383\n");
- return -1;
+ av_log(avctx, AV_LOG_ERROR, "width and height must be a multiple of 4\n");
+ return AVERROR(EINVAL);
}
if (s->codec_id == AV_CODEC_ID_RV10 &&
return AVERROR(EINVAL);
}
- if (s->codec_id == AV_CODEC_ID_RV20 &&
- (avctx->width &3 ||
- avctx->height&3 )) {
- av_log(avctx, AV_LOG_ERROR, "width and height must be a multiple of 4\n");
- return AVERROR(EINVAL);
- }
-
if ((s->codec_id == AV_CODEC_ID_WMV1 ||
s->codec_id == AV_CODEC_ID_WMV2) &&
avctx->width & 1) {
- av_log(avctx, AV_LOG_ERROR, "width must be multiple of 2\n");
- return -1;
+ av_log(avctx, AV_LOG_ERROR, "width must be multiple of 2\n");
+ return AVERROR(EINVAL);
}
- if ((s->avctx->flags & (AV_CODEC_FLAG_INTERLACED_DCT | AV_CODEC_FLAG_INTERLACED_ME)) &&
+ if ((avctx->flags & (AV_CODEC_FLAG_INTERLACED_DCT | AV_CODEC_FLAG_INTERLACED_ME)) &&
s->codec_id != AV_CODEC_ID_MPEG4 && s->codec_id != AV_CODEC_ID_MPEG2VIDEO) {
av_log(avctx, AV_LOG_ERROR, "interlacing not supported by codec\n");
- return -1;
- }
-
-#if FF_API_PRIVATE_OPT
- FF_DISABLE_DEPRECATION_WARNINGS
- if (avctx->mpeg_quant)
- s->mpeg_quant = avctx->mpeg_quant;
- FF_ENABLE_DEPRECATION_WARNINGS
-#endif
-
- // FIXME mpeg2 uses that too
- if (s->mpeg_quant && ( s->codec_id != AV_CODEC_ID_MPEG4
- && s->codec_id != AV_CODEC_ID_MPEG2VIDEO)) {
- av_log(avctx, AV_LOG_ERROR,
- "mpeg2 style quantization not supported by codec\n");
- return -1;
+ return AVERROR(EINVAL);
}
if ((s->mpv_flags & FF_MPV_FLAG_CBP_RD) && !avctx->trellis) {
av_log(avctx, AV_LOG_ERROR, "CBP RD needs trellis quant\n");
- return -1;
+ return AVERROR(EINVAL);
}
if ((s->mpv_flags & FF_MPV_FLAG_QP_RD) &&
- s->avctx->mb_decision != FF_MB_DECISION_RD) {
+ avctx->mb_decision != FF_MB_DECISION_RD) {
av_log(avctx, AV_LOG_ERROR, "QP RD needs mbd=2\n");
- return -1;
+ return AVERROR(EINVAL);
}
if ((s->mpv_flags & FF_MPV_FLAG_QP_RD) &&
// Used to produce garbage with MJPEG.
av_log(avctx, AV_LOG_ERROR,
"QP RD is no longer compatible with MJPEG or AMV\n");
- return -1;
+ return AVERROR(EINVAL);
}
-#if FF_API_PRIVATE_OPT
-FF_DISABLE_DEPRECATION_WARNINGS
- if (avctx->scenechange_threshold)
- s->scenechange_threshold = avctx->scenechange_threshold;
-FF_ENABLE_DEPRECATION_WARNINGS
-#endif
-
if (s->scenechange_threshold < 1000000000 &&
- (s->avctx->flags & AV_CODEC_FLAG_CLOSED_GOP)) {
+ (avctx->flags & AV_CODEC_FLAG_CLOSED_GOP)) {
av_log(avctx, AV_LOG_ERROR,
"closed gop with scene change detection are not supported yet, "
"set threshold to 1000000000\n");
- return -1;
+ return AVERROR_PATCHWELCOME;
}
- if (s->avctx->flags & AV_CODEC_FLAG_LOW_DELAY) {
+ if (avctx->flags & AV_CODEC_FLAG_LOW_DELAY) {
if (s->codec_id != AV_CODEC_ID_MPEG2VIDEO &&
s->strict_std_compliance >= FF_COMPLIANCE_NORMAL) {
av_log(avctx, AV_LOG_ERROR,
"low delay forcing is only available for mpeg2, "
"set strict_std_compliance to 'unofficial' or lower in order to allow it\n");
- return -1;
+ return AVERROR(EINVAL);
}
if (s->max_b_frames != 0) {
av_log(avctx, AV_LOG_ERROR,
"B-frames cannot be used with low delay\n");
- return -1;
+ return AVERROR(EINVAL);
}
}
if (avctx->qmax > 28) {
av_log(avctx, AV_LOG_ERROR,
"non linear quant only supports qmax <= 28 currently\n");
- return -1;
+ return AVERROR_PATCHWELCOME;
}
}
return AVERROR(EINVAL);
}
- if (s->avctx->thread_count > 1 &&
+ if (avctx->thread_count > 1 &&
s->codec_id != AV_CODEC_ID_MPEG4 &&
s->codec_id != AV_CODEC_ID_MPEG1VIDEO &&
s->codec_id != AV_CODEC_ID_MPEG2VIDEO &&
(s->codec_id != AV_CODEC_ID_H263P)) {
av_log(avctx, AV_LOG_ERROR,
"multi threaded encoding not supported by codec\n");
- return -1;
+ return AVERROR_PATCHWELCOME;
}
- if (s->avctx->thread_count < 1) {
+ if (avctx->thread_count < 1) {
av_log(avctx, AV_LOG_ERROR,
"automatic thread number detection not supported by codec, "
"patch welcome\n");
- return -1;
- }
-
- if (!avctx->time_base.den || !avctx->time_base.num) {
- av_log(avctx, AV_LOG_ERROR, "framerate not set\n");
- return -1;
+ return AVERROR_PATCHWELCOME;
}
-#if FF_API_PRIVATE_OPT
-FF_DISABLE_DEPRECATION_WARNINGS
- if (avctx->b_frame_strategy)
- s->b_frame_strategy = avctx->b_frame_strategy;
- if (avctx->b_sensitivity != 40)
- s->b_sensitivity = avctx->b_sensitivity;
-FF_ENABLE_DEPRECATION_WARNINGS
-#endif
-
if (s->b_frame_strategy && (avctx->flags & AV_CODEC_FLAG_PASS2)) {
av_log(avctx, AV_LOG_INFO,
"notice: b_frame_strategy only affects the first pass\n");
//return -1;
}
- if (s->mpeg_quant || s->codec_id == AV_CODEC_ID_MPEG1VIDEO || s->codec_id == AV_CODEC_ID_MPEG2VIDEO || s->codec_id == AV_CODEC_ID_MJPEG || s->codec_id==AV_CODEC_ID_AMV) {
+ if (s->mpeg_quant || s->codec_id == AV_CODEC_ID_MPEG1VIDEO || s->codec_id == AV_CODEC_ID_MPEG2VIDEO || s->codec_id == AV_CODEC_ID_MJPEG || s->codec_id == AV_CODEC_ID_AMV || s->codec_id == AV_CODEC_ID_SPEEDHQ) {
// (a + x * 3 / 8) / x
s->intra_quant_bias = 3 << (QUANT_BIAS_SHIFT - 3);
s->inter_quant_bias = 0;
av_log(avctx, AV_LOG_DEBUG, "intra_quant_bias = %d inter_quant_bias = %d\n",s->intra_quant_bias,s->inter_quant_bias);
if (avctx->codec_id == AV_CODEC_ID_MPEG4 &&
- s->avctx->time_base.den > (1 << 16) - 1) {
+ avctx->time_base.den > (1 << 16) - 1) {
av_log(avctx, AV_LOG_ERROR,
"timebase %d/%d not supported by MPEG 4 standard, "
"the maximum admitted value for the timebase denominator "
- "is %d\n", s->avctx->time_base.num, s->avctx->time_base.den,
+ "is %d\n", avctx->time_base.num, avctx->time_base.den,
(1 << 16) - 1);
- return -1;
+ return AVERROR(EINVAL);
}
- s->time_increment_bits = av_log2(s->avctx->time_base.den - 1) + 1;
+ s->time_increment_bits = av_log2(avctx->time_base.den - 1) + 1;
switch (avctx->codec->id) {
case AV_CODEC_ID_MPEG1VIDEO:
s->out_format = FMT_MPEG1;
- s->low_delay = !!(s->avctx->flags & AV_CODEC_FLAG_LOW_DELAY);
+ s->low_delay = !!(avctx->flags & AV_CODEC_FLAG_LOW_DELAY);
avctx->delay = s->low_delay ? 0 : (s->max_b_frames + 1);
break;
case AV_CODEC_ID_MPEG2VIDEO:
s->out_format = FMT_MPEG1;
- s->low_delay = !!(s->avctx->flags & AV_CODEC_FLAG_LOW_DELAY);
+ s->low_delay = !!(avctx->flags & AV_CODEC_FLAG_LOW_DELAY);
avctx->delay = s->low_delay ? 0 : (s->max_b_frames + 1);
s->rtp_mode = 1;
break;
+#if CONFIG_MJPEG_ENCODER || CONFIG_AMV_ENCODER
case AV_CODEC_ID_MJPEG:
case AV_CODEC_ID_AMV:
s->out_format = FMT_MJPEG;
s->intra_only = 1; /* force intra only for jpeg */
- if (!CONFIG_MJPEG_ENCODER ||
- ff_mjpeg_encode_init(s) < 0)
- return -1;
+ if ((ret = ff_mjpeg_encode_init(s)) < 0)
+ return ret;
+ avctx->delay = 0;
+ s->low_delay = 1;
+ break;
+#endif
+ case AV_CODEC_ID_SPEEDHQ:
+ s->out_format = FMT_SPEEDHQ;
+ s->intra_only = 1; /* force intra only for SHQ */
+ if (!CONFIG_SPEEDHQ_ENCODER)
+ return AVERROR_ENCODER_NOT_FOUND;
+ if ((ret = ff_speedhq_encode_init(s)) < 0)
+ return ret;
avctx->delay = 0;
s->low_delay = 1;
break;
case AV_CODEC_ID_H261:
if (!CONFIG_H261_ENCODER)
- return -1;
+ return AVERROR_ENCODER_NOT_FOUND;
if (ff_h261_get_picture_format(s->width, s->height) < 0) {
av_log(avctx, AV_LOG_ERROR,
"The specified picture size of %dx%d is not valid for the "
"H.261 codec.\nValid sizes are 176x144, 352x288\n",
s->width, s->height);
- return -1;
+ return AVERROR(EINVAL);
}
s->out_format = FMT_H261;
avctx->delay = 0;
break;
case AV_CODEC_ID_H263:
if (!CONFIG_H263_ENCODER)
- return -1;
+ return AVERROR_ENCODER_NOT_FOUND;
if (ff_match_2uint16(ff_h263_format, FF_ARRAY_ELEMS(ff_h263_format),
s->width, s->height) == 8) {
av_log(avctx, AV_LOG_ERROR,
"the H.263 codec.\nValid sizes are 128x96, 176x144, "
"352x288, 704x576, and 1408x1152. "
"Try H.263+.\n", s->width, s->height);
- return -1;
+ return AVERROR(EINVAL);
}
s->out_format = FMT_H263;
avctx->delay = 0;
s->low_delay = 1;
break;
default:
- return -1;
+ return AVERROR(EINVAL);
}
-#if FF_API_PRIVATE_OPT
- FF_DISABLE_DEPRECATION_WARNINGS
- if (avctx->noise_reduction)
- s->noise_reduction = avctx->noise_reduction;
- FF_ENABLE_DEPRECATION_WARNINGS
-#endif
-
avctx->has_b_frames = !s->low_delay;
s->encoding = 1;
/* init */
ff_mpv_idct_init(s);
- if (ff_mpv_common_init(s) < 0)
- return -1;
+ if ((ret = ff_mpv_common_init(s)) < 0)
+ return ret;
ff_fdctdsp_init(&s->fdsp, avctx);
ff_me_cmp_init(&s->mecc, avctx);
ff_qpeldsp_init(&s->qdsp);
if (s->msmpeg4_version) {
- FF_ALLOCZ_OR_GOTO(s->avctx, s->ac_stats,
- 2 * 2 * (MAX_LEVEL + 1) *
- (MAX_RUN + 1) * 2 * sizeof(int), fail);
+ int ac_stats_size = 2 * 2 * (MAX_LEVEL + 1) * (MAX_RUN + 1) * 2 * sizeof(int);
+ if (!(s->ac_stats = av_mallocz(ac_stats_size)))
+ return AVERROR(ENOMEM);
}
- FF_ALLOCZ_OR_GOTO(s->avctx, s->avctx->stats_out, 256, fail);
-
- FF_ALLOCZ_OR_GOTO(s->avctx, s->q_intra_matrix, 64 * 32 * sizeof(int), fail);
- FF_ALLOCZ_OR_GOTO(s->avctx, s->q_chroma_intra_matrix, 64 * 32 * sizeof(int), fail);
- FF_ALLOCZ_OR_GOTO(s->avctx, s->q_inter_matrix, 64 * 32 * sizeof(int), fail);
- FF_ALLOCZ_OR_GOTO(s->avctx, s->q_intra_matrix16, 64 * 32 * 2 * sizeof(uint16_t), fail);
- FF_ALLOCZ_OR_GOTO(s->avctx, s->q_chroma_intra_matrix16, 64 * 32 * 2 * sizeof(uint16_t), fail);
- FF_ALLOCZ_OR_GOTO(s->avctx, s->q_inter_matrix16, 64 * 32 * 2 * sizeof(uint16_t), fail);
- FF_ALLOCZ_OR_GOTO(s->avctx, s->input_picture,
- MAX_PICTURE_COUNT * sizeof(Picture *), fail);
- FF_ALLOCZ_OR_GOTO(s->avctx, s->reordered_input_picture,
- MAX_PICTURE_COUNT * sizeof(Picture *), fail);
+ if (!(avctx->stats_out = av_mallocz(256)) ||
+ !FF_ALLOCZ_TYPED_ARRAY(s->q_intra_matrix, 32) ||
+ !FF_ALLOCZ_TYPED_ARRAY(s->q_chroma_intra_matrix, 32) ||
+ !FF_ALLOCZ_TYPED_ARRAY(s->q_inter_matrix, 32) ||
+ !FF_ALLOCZ_TYPED_ARRAY(s->q_intra_matrix16, 32) ||
+ !FF_ALLOCZ_TYPED_ARRAY(s->q_chroma_intra_matrix16, 32) ||
+ !FF_ALLOCZ_TYPED_ARRAY(s->q_inter_matrix16, 32) ||
+ !FF_ALLOCZ_TYPED_ARRAY(s->input_picture, MAX_PICTURE_COUNT) ||
+ !FF_ALLOCZ_TYPED_ARRAY(s->reordered_input_picture, MAX_PICTURE_COUNT))
+ return AVERROR(ENOMEM);
if (s->noise_reduction) {
- FF_ALLOCZ_OR_GOTO(s->avctx, s->dct_offset,
- 2 * 64 * sizeof(uint16_t), fail);
+ if (!FF_ALLOCZ_TYPED_ARRAY(s->dct_offset, 2))
+ return AVERROR(ENOMEM);
}
ff_dct_encode_init(s);
s->quant_precision = 5;
-#if FF_API_PRIVATE_OPT
-FF_DISABLE_DEPRECATION_WARNINGS
- if (avctx->frame_skip_threshold)
- s->frame_skip_threshold = avctx->frame_skip_threshold;
- if (avctx->frame_skip_factor)
- s->frame_skip_factor = avctx->frame_skip_factor;
- if (avctx->frame_skip_exp)
- s->frame_skip_exp = avctx->frame_skip_exp;
- if (avctx->frame_skip_cmp != FF_CMP_DCTMAX)
- s->frame_skip_cmp = avctx->frame_skip_cmp;
-FF_ENABLE_DEPRECATION_WARNINGS
-#endif
-
- ff_set_cmp(&s->mecc, s->mecc.ildct_cmp, s->avctx->ildct_cmp);
+ ff_set_cmp(&s->mecc, s->mecc.ildct_cmp, avctx->ildct_cmp);
ff_set_cmp(&s->mecc, s->mecc.frame_skip_cmp, s->frame_skip_cmp);
if (CONFIG_H261_ENCODER && s->out_format == FMT_H261)
if (CONFIG_H263_ENCODER && s->out_format == FMT_H263)
ff_h263_encode_init(s);
if (CONFIG_MSMPEG4_ENCODER && s->msmpeg4_version)
- if ((ret = ff_msmpeg4_encode_init(s)) < 0)
- return ret;
+ ff_msmpeg4_encode_init(s);
if ((CONFIG_MPEG1VIDEO_ENCODER || CONFIG_MPEG2VIDEO_ENCODER)
&& s->out_format == FMT_MPEG1)
ff_mpeg1_encode_init(s);
} else if (s->out_format == FMT_H263 || s->out_format == FMT_H261) {
s->intra_matrix[j] =
s->inter_matrix[j] = ff_mpeg1_default_non_intra_matrix[i];
+ } else if (CONFIG_SPEEDHQ_ENCODER && s->codec_id == AV_CODEC_ID_SPEEDHQ) {
+ s->intra_matrix[j] =
+ s->inter_matrix[j] = ff_mpeg1_default_intra_matrix[i];
} else {
/* MPEG-1/2 */
s->chroma_intra_matrix[j] =
s->intra_matrix[j] = ff_mpeg1_default_intra_matrix[i];
s->inter_matrix[j] = ff_mpeg1_default_non_intra_matrix[i];
}
- if (s->avctx->intra_matrix)
- s->intra_matrix[j] = s->avctx->intra_matrix[i];
- if (s->avctx->inter_matrix)
- s->inter_matrix[j] = s->avctx->inter_matrix[i];
+ if (avctx->intra_matrix)
+ s->intra_matrix[j] = avctx->intra_matrix[i];
+ if (avctx->inter_matrix)
+ s->inter_matrix[j] = avctx->inter_matrix[i];
}
/* precompute matrix */
31, 0);
}
- if (ff_rate_control_init(s) < 0)
- return -1;
-
-#if FF_API_PRIVATE_OPT
- FF_DISABLE_DEPRECATION_WARNINGS
- if (avctx->brd_scale)
- s->brd_scale = avctx->brd_scale;
-
- if (avctx->prediction_method)
- s->pred = avctx->prediction_method + 1;
- FF_ENABLE_DEPRECATION_WARNINGS
-#endif
+ if ((ret = ff_rate_control_init(s)) < 0)
+ return ret;
if (s->b_frame_strategy == 2) {
for (i = 0; i < s->max_b_frames + 2; i++) {
s->tmp_frames[i]->width = s->width >> s->brd_scale;
s->tmp_frames[i]->height = s->height >> s->brd_scale;
- ret = av_frame_get_buffer(s->tmp_frames[i], 32);
+ ret = av_frame_get_buffer(s->tmp_frames[i], 0);
if (ret < 0)
return ret;
}
cpb_props->buffer_size = avctx->rc_buffer_size;
return 0;
-fail:
- ff_mpv_encode_end(avctx);
- return AVERROR_UNKNOWN;
}
av_cold int ff_mpv_encode_end(AVCodecContext *avctx)
ff_rate_control_uninit(s);
ff_mpv_common_end(s);
- if (CONFIG_MJPEG_ENCODER &&
+ if ((CONFIG_MJPEG_ENCODER || CONFIG_AMV_ENCODER) &&
s->out_format == FMT_MJPEG)
ff_mjpeg_encode_close(s);
av_frame_free(&s->tmp_frames[i]);
ff_free_picture_tables(&s->new_picture);
- ff_mpeg_unref_picture(s->avctx, &s->new_picture);
+ ff_mpeg_unref_picture(avctx, &s->new_picture);
- av_freep(&s->avctx->stats_out);
+ av_freep(&avctx->stats_out);
av_freep(&s->ac_stats);
if(s->q_chroma_intra_matrix != s->q_intra_matrix ) av_freep(&s->q_chroma_intra_matrix);
return 0;
}
-static int encode_frame(AVCodecContext *c, AVFrame *frame)
+static int encode_frame(AVCodecContext *c, AVFrame *frame, AVPacket *pkt)
{
- AVPacket pkt = { 0 };
int ret;
int size = 0;
- av_init_packet(&pkt);
-
ret = avcodec_send_frame(c, frame);
if (ret < 0)
return ret;
do {
- ret = avcodec_receive_packet(c, &pkt);
+ ret = avcodec_receive_packet(c, pkt);
if (ret >= 0) {
- size += pkt.size;
- av_packet_unref(&pkt);
+ size += pkt->size;
+ av_packet_unref(pkt);
} else if (ret < 0 && ret != AVERROR(EAGAIN) && ret != AVERROR_EOF)
return ret;
} while (ret >= 0);
static int estimate_best_b_count(MpegEncContext *s)
{
const AVCodec *codec = avcodec_find_encoder(s->avctx->codec_id);
+ AVPacket *pkt;
const int scale = s->brd_scale;
int width = s->width >> scale;
int height = s->height >> scale;
av_assert0(scale >= 0 && scale <= 3);
+ pkt = av_packet_alloc();
+ if (!pkt)
+ return AVERROR(ENOMEM);
+
//emms_c();
//s->next_picture_ptr->quality;
p_lambda = s->last_lambda_for[AV_PICTURE_TYPE_P];
break;
c = avcodec_alloc_context3(NULL);
- if (!c)
- return AVERROR(ENOMEM);
+ if (!c) {
+ ret = AVERROR(ENOMEM);
+ goto fail;
+ }
c->width = width;
c->height = height;
if (ret < 0)
goto fail;
+
s->tmp_frames[0]->pict_type = AV_PICTURE_TYPE_I;
s->tmp_frames[0]->quality = 1 * FF_QP2LAMBDA;
- out_size = encode_frame(c, s->tmp_frames[0]);
+ out_size = encode_frame(c, s->tmp_frames[0], pkt);
if (out_size < 0) {
ret = out_size;
goto fail;
AV_PICTURE_TYPE_P : AV_PICTURE_TYPE_B;
s->tmp_frames[i + 1]->quality = is_p ? p_lambda : b_lambda;
- out_size = encode_frame(c, s->tmp_frames[i + 1]);
+ out_size = encode_frame(c, s->tmp_frames[i + 1], pkt);
if (out_size < 0) {
ret = out_size;
goto fail;
}
/* get the delayed frames */
- out_size = encode_frame(c, NULL);
+ out_size = encode_frame(c, NULL, pkt);
if (out_size < 0) {
ret = out_size;
goto fail;
fail:
avcodec_free_context(&c);
- if (ret < 0)
- return ret;
+ av_packet_unref(pkt);
+ if (ret < 0) {
+ best_b_count = ret;
+ break;
+ }
}
+ av_packet_free(&pkt);
+
return best_b_count;
}
// input is not a shared pix -> reuse buffer for current_pix
s->current_picture_ptr = s->reordered_input_picture[0];
for (i = 0; i < 4; i++) {
- s->new_picture.f->data[i] += INPLACE_OFFSET;
+ if (s->new_picture.f->data[i])
+ s->new_picture.f->data[i] += INPLACE_OFFSET;
}
}
ff_mpeg_unref_picture(s->avctx, &s->current_picture);
if (ret < 0)
return -1;
-#if FF_API_STAT_BITS
-FF_DISABLE_DEPRECATION_WARNINGS
- avctx->header_bits = s->header_bits;
- avctx->mv_bits = s->mv_bits;
- avctx->misc_bits = s->misc_bits;
- avctx->i_tex_bits = s->i_tex_bits;
- avctx->p_tex_bits = s->p_tex_bits;
- avctx->i_count = s->i_count;
- // FIXME f/b_count in avctx
- avctx->p_count = s->mb_num - s->i_count - s->skip_count;
- avctx->skip_count = s->skip_count;
-FF_ENABLE_DEPRECATION_WARNINGS
-#endif
-
frame_end(s);
- if (CONFIG_MJPEG_ENCODER && s->out_format == FMT_MJPEG)
+ if ((CONFIG_MJPEG_ENCODER || CONFIG_AMV_ENCODER) && s->out_format == FMT_MJPEG)
ff_mjpeg_encode_picture_trailer(&s->pb, s->header_bits);
if (avctx->rc_buffer_size) {
RateControlContext *rcc = &s->rc_context;
int max_size = FFMAX(rcc->buffer_index * avctx->rc_max_available_vbv_use, rcc->buffer_index - 500);
- int hq = (s->avctx->mb_decision == FF_MB_DECISION_RD || s->avctx->trellis);
+ int hq = (avctx->mb_decision == FF_MB_DECISION_RD || avctx->trellis);
int min_step = hq ? 1 : (1<<(FF_LAMBDA_SHIFT + 7))/139;
if (put_bits_count(&s->pb) > max_size &&
init_put_bits(pb, pb->buf, pb->buf_end - pb->buf);
}
s->vbv_ignore_qmax = 1;
- av_log(s->avctx, AV_LOG_VERBOSE, "reencoding frame due to VBV\n");
+ av_log(avctx, AV_LOG_VERBOSE, "reencoding frame due to VBV\n");
goto vbv_retry;
}
- av_assert0(s->avctx->rc_max_rate);
+ av_assert0(avctx->rc_max_rate);
}
- if (s->avctx->flags & AV_CODEC_FLAG_PASS1)
+ if (avctx->flags & AV_CODEC_FLAG_PASS1)
ff_write_pass1_stats(s);
for (i = 0; i < 4; i++) {
}
ff_side_data_set_encoder_stats(pkt, s->current_picture.f->quality,
s->current_picture_ptr->encoding_error,
- (s->avctx->flags&AV_CODEC_FLAG_PSNR) ? 4 : 0,
+ (avctx->flags&AV_CODEC_FLAG_PSNR) ? 4 : 0,
s->pict_type);
- if (s->avctx->flags & AV_CODEC_FLAG_PASS1)
+ if (avctx->flags & AV_CODEC_FLAG_PASS1)
assert(put_bits_count(&s->pb) == s->header_bits + s->mv_bits +
s->misc_bits + s->i_tex_bits +
s->p_tex_bits);
stuffing_count = ff_vbv_update(s, s->frame_bits);
s->stuffing_bits = 8*stuffing_count;
if (stuffing_count) {
- if (s->pb.buf_end - s->pb.buf - (put_bits_count(&s->pb) >> 3) <
- stuffing_count + 50) {
- av_log(s->avctx, AV_LOG_ERROR, "stuffing too large\n");
+ if (put_bytes_left(&s->pb, 0) < stuffing_count + 50) {
+ av_log(avctx, AV_LOG_ERROR, "stuffing too large\n");
return -1;
}
}
break;
default:
- av_log(s->avctx, AV_LOG_ERROR, "vbv buffer overflow\n");
+ av_log(avctx, AV_LOG_ERROR, "vbv buffer overflow\n");
}
flush_put_bits(&s->pb);
s->frame_bits = put_bits_count(&s->pb);
}
/* update MPEG-1/2 vbv_delay for CBR */
- if (s->avctx->rc_max_rate &&
- s->avctx->rc_min_rate == s->avctx->rc_max_rate &&
+ if (avctx->rc_max_rate &&
+ avctx->rc_min_rate == avctx->rc_max_rate &&
s->out_format == FMT_MPEG1 &&
90000LL * (avctx->rc_buffer_size - 1) <=
- s->avctx->rc_max_rate * 0xFFFFLL) {
+ avctx->rc_max_rate * 0xFFFFLL) {
AVCPBProperties *props;
size_t props_size;
int vbv_delay, min_delay;
- double inbits = s->avctx->rc_max_rate *
- av_q2d(s->avctx->time_base);
+ double inbits = avctx->rc_max_rate *
+ av_q2d(avctx->time_base);
int minbits = s->frame_bits - 8 *
(s->vbv_delay_ptr - s->pb.buf - 1);
double bits = s->rc_context.buffer_index + minbits - inbits;
if (bits < 0)
- av_log(s->avctx, AV_LOG_ERROR,
+ av_log(avctx, AV_LOG_ERROR,
"Internal error, negative bits\n");
av_assert1(s->repeat_first_field == 0);
- vbv_delay = bits * 90000 / s->avctx->rc_max_rate;
- min_delay = (minbits * 90000LL + s->avctx->rc_max_rate - 1) /
- s->avctx->rc_max_rate;
+ vbv_delay = bits * 90000 / avctx->rc_max_rate;
+ min_delay = (minbits * 90000LL + avctx->rc_max_rate - 1) /
+ avctx->rc_max_rate;
vbv_delay = FFMAX(vbv_delay, min_delay);
av_freep(&props);
return ret;
}
-
-#if FF_API_VBV_DELAY
-FF_DISABLE_DEPRECATION_WARNINGS
- avctx->vbv_delay = vbv_delay * 300;
-FF_ENABLE_DEPRECATION_WARNINGS
-#endif
}
s->total_bits += s->frame_bits;
-#if FF_API_STAT_BITS
-FF_DISABLE_DEPRECATION_WARNINGS
- avctx->frame_bits = s->frame_bits;
-FF_ENABLE_DEPRECATION_WARNINGS
-#endif
-
pkt->pts = s->current_picture.f->pts;
if (!s->low_delay && s->pict_type != AV_PICTURE_TYPE_B) {
/* release non-reference frames */
for (i = 0; i < MAX_PICTURE_COUNT; i++) {
if (!s->picture[i].reference)
- ff_mpeg_unref_picture(s->avctx, &s->picture[i]);
+ ff_mpeg_unref_picture(avctx, &s->picture[i]);
}
av_assert1((s->frame_bits & 7) == 0);
if (CONFIG_H263_ENCODER)
ff_h263_encode_mb(s, s->block, motion_x, motion_y);
break;
+#if CONFIG_MJPEG_ENCODER || CONFIG_AMV_ENCODER
case AV_CODEC_ID_MJPEG:
case AV_CODEC_ID_AMV:
- if (CONFIG_MJPEG_ENCODER)
- ff_mjpeg_encode_mb(s, s->block);
+ ff_mjpeg_encode_mb(s, s->block);
+ break;
+#endif
+ case AV_CODEC_ID_SPEEDHQ:
+ if (CONFIG_SPEEDHQ_ENCODER)
+ ff_speedhq_encode_mb(s, s->block);
break;
default:
av_assert1(0);
static int estimate_motion_thread(AVCodecContext *c, void *arg){
MpegEncContext *s= *(void**)arg;
- ff_check_alignment();
-
s->me.dia_size= s->avctx->dia_size;
s->first_slice_line=1;
for(s->mb_y= s->start_mb_y; s->mb_y < s->end_mb_y; s->mb_y++) {
MpegEncContext *s= *(void**)arg;
int mb_x, mb_y;
- ff_check_alignment();
-
for(mb_y=s->start_mb_y; mb_y < s->end_mb_y; mb_y++) {
for(mb_x=0; mb_x < s->mb_width; mb_x++) {
int xx = mb_x * 16;
}
ff_mpeg4_stuffing(&s->pb);
- }else if(CONFIG_MJPEG_ENCODER && s->out_format == FMT_MJPEG){
+ } else if ((CONFIG_MJPEG_ENCODER || CONFIG_AMV_ENCODER) &&
+ s->out_format == FMT_MJPEG) {
ff_mjpeg_encode_stuffing(s);
+ } else if (CONFIG_SPEEDHQ_ENCODER && s->out_format == FMT_SPEEDHQ) {
+ ff_speedhq_end_slice(s);
}
- avpriv_align_put_bits(&s->pb);
flush_put_bits(&s->pb);
if ((s->avctx->flags & AV_CODEC_FLAG_PASS1) && !s->partitioned_frame)
{
if (!s->mb_info)
return;
- if (put_bits_count(&s->pb) - s->prev_mb_info*8 >= s->mb_info*8) {
+ if (put_bytes_count(&s->pb, 0) - s->prev_mb_info >= s->mb_info) {
s->mb_info_size += 12;
s->prev_mb_info = s->last_mb_info;
}
if (startcode) {
- s->prev_mb_info = put_bits_count(&s->pb)/8;
+ s->prev_mb_info = put_bytes_count(&s->pb, 0);
/* This might have incremented mb_info_size above, and we return without
* actually writing any info into that slot yet. But in that case,
* this will be called again at the start of the after writing the
return;
}
- s->last_mb_info = put_bits_count(&s->pb)/8;
+ s->last_mb_info = put_bytes_count(&s->pb, 0);
if (!s->mb_info_size)
s->mb_info_size += 12;
write_mb_info(s);
int ff_mpv_reallocate_putbitbuffer(MpegEncContext *s, size_t threshold, size_t size_increase)
{
- if ( s->pb.buf_end - s->pb.buf - (put_bits_count(&s->pb)>>3) < threshold
+ if (put_bytes_left(&s->pb, 0) < threshold
&& s->slice_context_count == 1
&& s->pb.buf == s->avctx->internal->byte_buffer) {
int lastgob_pos = s->ptr_lastgob - s->pb.buf;
s->ptr_lastgob = s->pb.buf + lastgob_pos;
s->vbv_delay_ptr = s->pb.buf + vbv_pos;
}
- if (s->pb.buf_end - s->pb.buf - (put_bits_count(&s->pb)>>3) < threshold)
+ if (put_bytes_left(&s->pb, 0) < threshold)
return AVERROR(EINVAL);
return 0;
}
static int encode_thread(AVCodecContext *c, void *arg){
MpegEncContext *s= *(void**)arg;
- int mb_x, mb_y;
+ int mb_x, mb_y, mb_y_order;
int chr_h= 16>>s->chroma_y_shift;
int i, j;
MpegEncContext best_s = { 0 }, backup_s;
uint8_t bit_buf_tex[2][MAX_MB_BYTES];
PutBitContext pb[2], pb2[2], tex_pb[2];
- ff_check_alignment();
-
for(i=0; i<2; i++){
init_put_bits(&pb [i], bit_buf [i], MAX_MB_BYTES);
init_put_bits(&pb2 [i], bit_buf2 [i], MAX_MB_BYTES);
s->resync_mb_y=0;
s->first_slice_line = 1;
s->ptr_lastgob = s->pb.buf;
- for(mb_y= s->start_mb_y; mb_y < s->end_mb_y; mb_y++) {
+ for (mb_y_order = s->start_mb_y; mb_y_order < s->end_mb_y; mb_y_order++) {
+ if (CONFIG_SPEEDHQ_ENCODER && s->codec_id == AV_CODEC_ID_SPEEDHQ) {
+ int first_in_slice;
+ mb_y = ff_speedhq_mb_y_order_to_mb(mb_y_order, s->mb_height, &first_in_slice);
+ if (first_in_slice && mb_y_order != s->start_mb_y)
+ ff_speedhq_end_slice(s);
+ s->last_dc[0] = s->last_dc[1] = s->last_dc[2] = 1024 << s->intra_dc_precision;
+ } else {
+ mb_y = mb_y_order;
+ }
s->mb_x=0;
s->mb_y= mb_y;
+ s->mb_width*MAX_MB_BYTES;
ff_mpv_reallocate_putbitbuffer(s, MAX_MB_BYTES, size_increase);
- if(s->pb.buf_end - s->pb.buf - (put_bits_count(&s->pb)>>3) < MAX_MB_BYTES){
+ if (put_bytes_left(&s->pb, 0) < MAX_MB_BYTES){
av_log(s->avctx, AV_LOG_ERROR, "encoded frame too large\n");
return -1;
}
if(s->data_partitioning){
- if( s->pb2 .buf_end - s->pb2 .buf - (put_bits_count(&s-> pb2)>>3) < MAX_MB_BYTES
- || s->tex_pb.buf_end - s->tex_pb.buf - (put_bits_count(&s->tex_pb )>>3) < MAX_MB_BYTES){
+ if (put_bytes_left(&s->pb2, 0) < MAX_MB_BYTES ||
+ put_bytes_left(&s->tex_pb, 0) < MAX_MB_BYTES) {
av_log(s->avctx, AV_LOG_ERROR, "encoded partitioned frame too large\n");
return -1;
}
if(s->rtp_mode){
int current_packet_size, is_gob_start;
- current_packet_size= ((put_bits_count(&s->pb)+7)>>3) - (s->ptr_lastgob - s->pb.buf);
+ current_packet_size = put_bytes_count(&s->pb, 1)
+ - (s->ptr_lastgob - s->pb.buf);
is_gob_start = s->rtp_payload_size &&
current_packet_size >= s->rtp_payload_size &&
current_packet_size= put_bits_ptr(&s->pb) - s->ptr_lastgob;
if (s->error_rate && s->resync_mb_x + s->resync_mb_y > 0) {
- int r= put_bits_count(&s->pb)/8 + s->picture_number + 16 + s->mb_x + s->mb_y;
+ int r = put_bytes_count(&s->pb, 0) + s->picture_number + 16 + s->mb_x + s->mb_y;
int d = 100 / s->error_rate;
if(r % d == 0){
current_packet_size=0;
}
}
-#if FF_API_RTP_CALLBACK
-FF_DISABLE_DEPRECATION_WARNINGS
- if (s->avctx->rtp_callback){
- int number_mb = (mb_y - s->resync_mb_y)*s->mb_width + mb_x - s->resync_mb_x;
- s->avctx->rtp_callback(s->avctx, s->ptr_lastgob, current_packet_size, number_mb);
- }
-FF_ENABLE_DEPRECATION_WARNINGS
-#endif
update_mb_info(s, 1);
switch(s->codec_id){
pb_bits_count= put_bits_count(&s->pb);
flush_put_bits(&s->pb);
- avpriv_copy_bits(&backup_s.pb, bit_buf[next_block^1], pb_bits_count);
+ ff_copy_bits(&backup_s.pb, bit_buf[next_block^1], pb_bits_count);
s->pb= backup_s.pb;
if(s->data_partitioning){
pb2_bits_count= put_bits_count(&s->pb2);
flush_put_bits(&s->pb2);
- avpriv_copy_bits(&backup_s.pb2, bit_buf2[next_block^1], pb2_bits_count);
+ ff_copy_bits(&backup_s.pb2, bit_buf2[next_block^1], pb2_bits_count);
s->pb2= backup_s.pb2;
tex_pb_bits_count= put_bits_count(&s->tex_pb);
flush_put_bits(&s->tex_pb);
- avpriv_copy_bits(&backup_s.tex_pb, bit_buf_tex[next_block^1], tex_pb_bits_count);
+ ff_copy_bits(&backup_s.tex_pb, bit_buf_tex[next_block^1], tex_pb_bits_count);
s->tex_pb= backup_s.tex_pb;
}
s->last_bits= put_bits_count(&s->pb);
write_slice_end(s);
-#if FF_API_RTP_CALLBACK
-FF_DISABLE_DEPRECATION_WARNINGS
- /* Send the last GOB if RTP */
- if (s->avctx->rtp_callback) {
- int number_mb = (mb_y - s->resync_mb_y)*s->mb_width - s->resync_mb_x;
- int pdif = put_bits_ptr(&s->pb) - s->ptr_lastgob;
- /* Call the RTP callback to send the last GOB */
- emms_c();
- s->avctx->rtp_callback(s->avctx, s->ptr_lastgob, pdif, number_mb);
- }
-FF_ENABLE_DEPRECATION_WARNINGS
-#endif
-
return 0;
}
av_assert1(put_bits_count(&src->pb) % 8 ==0);
av_assert1(put_bits_count(&dst->pb) % 8 ==0);
- avpriv_copy_bits(&dst->pb, src->pb.buf, put_bits_count(&src->pb));
+ ff_copy_bits(&dst->pb, src->pb.buf, put_bits_count(&src->pb));
flush_put_bits(&dst->pb);
}
s->f_code= FFMAX3(s->f_code, a, b);
}
- ff_fix_long_p_mvs(s);
- ff_fix_long_mvs(s, NULL, 0, s->p_mv_table, s->f_code, CANDIDATE_MB_TYPE_INTER, 0);
+ ff_fix_long_p_mvs(s, s->intra_penalty ? CANDIDATE_MB_TYPE_INTER : CANDIDATE_MB_TYPE_INTRA);
+ ff_fix_long_mvs(s, NULL, 0, s->p_mv_table, s->f_code, CANDIDATE_MB_TYPE_INTER, !!s->intra_penalty);
if (s->avctx->flags & AV_CODEC_FLAG_INTERLACED_ME) {
int j;
for(i=0; i<2; i++){
for(j=0; j<2; j++)
ff_fix_long_mvs(s, s->p_field_select_table[i], j,
- s->p_field_mv_table[i][j], s->f_code, CANDIDATE_MB_TYPE_INTER_I, 0);
+ s->p_field_mv_table[i][j], s->f_code, CANDIDATE_MB_TYPE_INTER_I, !!s->intra_penalty);
}
}
}
for(i=1;i<64;i++){
int j= s->idsp.idct_permutation[ff_zigzag_direct[i]];
- s->intra_matrix[j] = sp5x_quant_table[5*2+0][i];
- s->chroma_intra_matrix[j] = sp5x_quant_table[5*2+1][i];
+ s->intra_matrix[j] = sp5x_qscale_five_quant_table[0][i];
+ s->chroma_intra_matrix[j] = sp5x_qscale_five_quant_table[1][i];
}
s->y_dc_scale_table= y;
s->c_dc_scale_table= c;
s->qscale= 8;
}
+ if (s->out_format == FMT_SPEEDHQ) {
+ s->y_dc_scale_table=
+ s->c_dc_scale_table= ff_mpeg2_dc_scale_table[3];
+ }
+
//FIXME var duplication
s->current_picture_ptr->f->key_frame =
s->current_picture.f->key_frame = s->pict_type == AV_PICTURE_TYPE_I; //FIXME pic_ptr
s->mb_x = s->mb_y = 0;
s->last_bits= put_bits_count(&s->pb);
switch(s->out_format) {
+#if CONFIG_MJPEG_ENCODER || CONFIG_AMV_ENCODER
case FMT_MJPEG:
- if (CONFIG_MJPEG_ENCODER && s->huffman != HUFFMAN_TABLE_OPTIMAL)
+ /* s->huffman == HUFFMAN_TABLE_OPTIMAL can only be true for MJPEG. */
+ if (!CONFIG_MJPEG_ENCODER || s->huffman != HUFFMAN_TABLE_OPTIMAL)
ff_mjpeg_encode_picture_header(s->avctx, &s->pb, &s->intra_scantable,
s->pred, s->intra_matrix, s->chroma_intra_matrix);
break;
+#endif
+ case FMT_SPEEDHQ:
+ if (CONFIG_SPEEDHQ_ENCODER)
+ ff_speedhq_encode_picture_header(s);
+ break;
case FMT_H261:
if (CONFIG_H261_ENCODER)
ff_h261_encode_picture_header(s, picture_number);
s->avctx->execute(s->avctx, encode_thread, &s->thread_context[0], NULL, context_count, sizeof(void*));
for(i=1; i<context_count; i++){
if (s->pb.buf_end == s->thread_context[i]->pb.buf)
- set_put_bits_buffer_size(&s->pb, FFMIN(s->thread_context[i]->pb.buf_end - s->pb.buf, INT_MAX/8-32));
+ set_put_bits_buffer_size(&s->pb, FFMIN(s->thread_context[i]->pb.buf_end - s->pb.buf, INT_MAX/8-BUF_BITS));
merge_context_after_encode(s, s->thread_context[i]);
}
emms_c();
return last_non_zero;
}
-//#define REFINE_STATS 1
static int16_t basis[64][64];
static void build_basis(uint8_t *perm){
uint8_t * last_length;
int lambda;
int rle_index, run, q = 1, sum; //q is only used when s->mb_intra is true
-#ifdef REFINE_STATS
-static int count=0;
-static int after_last=0;
-static int to_zero=0;
-static int from_zero=0;
-static int raise=0;
-static int lower=0;
-static int messed_sign=0;
-#endif
if(basis[0][0] == 0)
build_basis(s->idsp.idct_permutation);
}
last_non_zero = s->block_last_index[n];
-#ifdef REFINE_STATS
-{START_TIMER
-#endif
dc += (1<<(RECON_SHIFT-1));
for(i=0; i<64; i++){
rem[i] = dc - (orig[i] << RECON_SHIFT); // FIXME use orig directly instead of copying to rem[]
}
-#ifdef REFINE_STATS
-STOP_TIMER("memset rem[]")}
-#endif
+
sum=0;
for(i=0; i<64; i++){
int one= 36;
sum += w*w;
}
lambda= sum*(uint64_t)s->lambda2 >> (FF_LAMBDA_SHIFT - 6 + 6 + 6 + 6);
-#ifdef REFINE_STATS
-{START_TIMER
-#endif
+
run=0;
rle_index=0;
for(i=start_i; i<=last_non_zero; i++){
run++;
}
}
-#ifdef REFINE_STATS
-if(last_non_zero>0){
-STOP_TIMER("init rem[]")
-}
-}
-{START_TIMER
-#endif
for(;;){
int best_score = s->mpvencdsp.try_8x8basis(rem, weight, basis[0], 0);
int best_coeff=0;
int best_change=0;
int run2, best_unquant_change=0, analyze_gradient;
-#ifdef REFINE_STATS
-{START_TIMER
-#endif
analyze_gradient = last_non_zero > 2 || s->quantizer_noise_shaping >= 3;
if(analyze_gradient){
-#ifdef REFINE_STATS
-{START_TIMER
-#endif
for(i=0; i<64; i++){
int w= weight[i];
d1[i] = (rem[i]*w*w + (1<<(RECON_SHIFT+12-1)))>>(RECON_SHIFT+12);
}
-#ifdef REFINE_STATS
-STOP_TIMER("rem*w*w")}
-{START_TIMER
-#endif
s->fdsp.fdct(d1);
-#ifdef REFINE_STATS
-STOP_TIMER("dct")}
-#endif
}
if(start_i){
run++;
}
}
-#ifdef REFINE_STATS
-STOP_TIMER("iterative step")}
-#endif
if(best_change){
int j= perm_scantable[ best_coeff ];
if(best_coeff > last_non_zero){
last_non_zero= best_coeff;
av_assert2(block[j]);
-#ifdef REFINE_STATS
-after_last++;
-#endif
}else{
-#ifdef REFINE_STATS
-if(block[j]){
- if(block[j] - best_change){
- if(FFABS(block[j]) > FFABS(block[j] - best_change)){
- raise++;
- }else{
- lower++;
- }
- }else{
- from_zero++;
- }
-}else{
- to_zero++;
-}
-#endif
for(; last_non_zero>=start_i; last_non_zero--){
if(block[perm_scantable[last_non_zero]])
break;
}
}
-#ifdef REFINE_STATS
-count++;
-if(256*256*256*64 % count == 0){
- av_log(s->avctx, AV_LOG_DEBUG, "after_last:%d to_zero:%d from_zero:%d raise:%d lower:%d sign:%d xyp:%d/%d/%d\n", after_last, to_zero, from_zero, raise, lower, messed_sign, s->mb_x, s->mb_y, s->picture_number);
-}
-#endif
+
run=0;
rle_index=0;
for(i=start_i; i<=last_non_zero; i++){
break;
}
}
-#ifdef REFINE_STATS
-if(last_non_zero>0){
-STOP_TIMER("iterative search")
-}
-}
-#endif
return last_non_zero;
}
{ "obmc", "use overlapped block motion compensation.", OFFSET(obmc), AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, VE },
{ "mb_info", "emit macroblock info for RFC 2190 packetization, the parameter value is the maximum payload size", OFFSET(mb_info), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, VE },
FF_MPV_COMMON_OPTS
+#if FF_API_MPEGVIDEO_OPTS
+ FF_MPV_DEPRECATED_MPEG_QUANT_OPT
+ FF_MPV_DEPRECATED_A53_CC_OPT
+ FF_MPV_DEPRECATED_MATRIX_OPT
+ FF_MPV_DEPRECATED_BFRAME_OPTS
+#endif
{ NULL },
};
.init = ff_mpv_encode_init,
.encode2 = ff_mpv_encode_picture,
.close = ff_mpv_encode_end,
+ .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE | FF_CODEC_CAP_INIT_CLEANUP,
.pix_fmts= (const enum AVPixelFormat[]){AV_PIX_FMT_YUV420P, AV_PIX_FMT_NONE},
.priv_class = &h263_class,
};
{ "obmc", "use overlapped block motion compensation.", OFFSET(obmc), AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, VE },
{ "structured_slices", "Write slice start position at every GOB header instead of just GOB number.", OFFSET(h263_slice_structured), AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, VE},
FF_MPV_COMMON_OPTS
+#if FF_API_MPEGVIDEO_OPTS
+ FF_MPV_DEPRECATED_MPEG_QUANT_OPT
+ FF_MPV_DEPRECATED_A53_CC_OPT
+ FF_MPV_DEPRECATED_MATRIX_OPT
+ FF_MPV_DEPRECATED_BFRAME_OPTS
+#endif
{ NULL },
};
static const AVClass h263p_class = {
.encode2 = ff_mpv_encode_picture,
.close = ff_mpv_encode_end,
.capabilities = AV_CODEC_CAP_SLICE_THREADS,
+ .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE | FF_CODEC_CAP_INIT_CLEANUP,
.pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_YUV420P, AV_PIX_FMT_NONE },
.priv_class = &h263p_class,
};
.init = ff_mpv_encode_init,
.encode2 = ff_mpv_encode_picture,
.close = ff_mpv_encode_end,
+ .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE | FF_CODEC_CAP_INIT_CLEANUP,
.pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_YUV420P, AV_PIX_FMT_NONE },
.priv_class = &msmpeg4v2_class,
};
.init = ff_mpv_encode_init,
.encode2 = ff_mpv_encode_picture,
.close = ff_mpv_encode_end,
+ .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE | FF_CODEC_CAP_INIT_CLEANUP,
.pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_YUV420P, AV_PIX_FMT_NONE },
.priv_class = &msmpeg4v3_class,
};
.init = ff_mpv_encode_init,
.encode2 = ff_mpv_encode_picture,
.close = ff_mpv_encode_end,
+ .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE | FF_CODEC_CAP_INIT_CLEANUP,
.pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_YUV420P, AV_PIX_FMT_NONE },
.priv_class = &wmv1_class,
};