#include "libavutil/intmath.h"
#include "libavutil/mathematics.h"
+#include "libavutil/pixdesc.h"
#include "libavutil/opt.h"
#include "avcodec.h"
#include "dsputil.h"
#include "mpegvideo.h"
#include "h263.h"
+#include "mathops.h"
#include "mjpegenc.h"
#include "msmpeg4.h"
#include "faandct.h"
switch (avctx->codec_id) {
case AV_CODEC_ID_MPEG2VIDEO:
- if (avctx->pix_fmt != PIX_FMT_YUV420P &&
- avctx->pix_fmt != PIX_FMT_YUV422P) {
+ if (avctx->pix_fmt != AV_PIX_FMT_YUV420P &&
+ avctx->pix_fmt != AV_PIX_FMT_YUV422P) {
av_log(avctx, AV_LOG_ERROR,
"only YUV420 and YUV422 are supported\n");
return -1;
}
break;
case AV_CODEC_ID_LJPEG:
- if (avctx->pix_fmt != PIX_FMT_YUVJ420P &&
- avctx->pix_fmt != PIX_FMT_YUVJ422P &&
- avctx->pix_fmt != PIX_FMT_YUVJ444P &&
- avctx->pix_fmt != PIX_FMT_BGRA &&
- ((avctx->pix_fmt != PIX_FMT_YUV420P &&
- avctx->pix_fmt != PIX_FMT_YUV422P &&
- avctx->pix_fmt != PIX_FMT_YUV444P) ||
+ if (avctx->pix_fmt != AV_PIX_FMT_YUVJ420P &&
+ avctx->pix_fmt != AV_PIX_FMT_YUVJ422P &&
+ avctx->pix_fmt != AV_PIX_FMT_YUVJ444P &&
+ avctx->pix_fmt != AV_PIX_FMT_BGRA &&
+ ((avctx->pix_fmt != AV_PIX_FMT_YUV420P &&
+ avctx->pix_fmt != AV_PIX_FMT_YUV422P &&
+ avctx->pix_fmt != AV_PIX_FMT_YUV444P) ||
avctx->strict_std_compliance > FF_COMPLIANCE_UNOFFICIAL)) {
av_log(avctx, AV_LOG_ERROR, "colorspace not supported in LJPEG\n");
return -1;
}
break;
case AV_CODEC_ID_MJPEG:
- if (avctx->pix_fmt != PIX_FMT_YUVJ420P &&
- avctx->pix_fmt != PIX_FMT_YUVJ422P &&
- ((avctx->pix_fmt != PIX_FMT_YUV420P &&
- avctx->pix_fmt != PIX_FMT_YUV422P) ||
+ if (avctx->pix_fmt != AV_PIX_FMT_YUVJ420P &&
+ avctx->pix_fmt != AV_PIX_FMT_YUVJ422P &&
+ ((avctx->pix_fmt != AV_PIX_FMT_YUV420P &&
+ avctx->pix_fmt != AV_PIX_FMT_YUV422P) ||
avctx->strict_std_compliance > FF_COMPLIANCE_UNOFFICIAL)) {
av_log(avctx, AV_LOG_ERROR, "colorspace not supported in jpeg\n");
return -1;
}
break;
default:
- if (avctx->pix_fmt != PIX_FMT_YUV420P) {
+ if (avctx->pix_fmt != AV_PIX_FMT_YUV420P) {
av_log(avctx, AV_LOG_ERROR, "only YUV420 is supported\n");
return -1;
}
}
switch (avctx->pix_fmt) {
- case PIX_FMT_YUVJ422P:
- case PIX_FMT_YUV422P:
+ case AV_PIX_FMT_YUVJ422P:
+ case AV_PIX_FMT_YUV422P:
s->chroma_format = CHROMA_422;
break;
- case PIX_FMT_YUVJ420P:
- case PIX_FMT_YUV420P:
+ case AV_PIX_FMT_YUVJ420P:
+ case AV_PIX_FMT_YUV420P:
default:
s->chroma_format = CHROMA_420;
break;
if (avctx->inter_quant_bias != FF_DEFAULT_QUANT_BIAS)
s->inter_quant_bias = avctx->inter_quant_bias;
- avcodec_get_chroma_sub_sample(avctx->pix_fmt, &chroma_h_shift,
- &chroma_v_shift);
+ av_pix_fmt_get_chroma_sub_sample(avctx->pix_fmt, &chroma_h_shift,
+ &chroma_v_shift);
if (avctx->codec_id == AV_CODEC_ID_MPEG4 &&
s->avctx->time_base.den > (1 << 16) - 1) {
s->out_format = FMT_MJPEG;
s->intra_only = 1; /* force intra only for jpeg */
if (avctx->codec->id == AV_CODEC_ID_LJPEG &&
- avctx->pix_fmt == PIX_FMT_BGRA) {
+ avctx->pix_fmt == AV_PIX_FMT_BGRA) {
s->mjpeg_vsample[0] = s->mjpeg_hsample[0] =
s->mjpeg_vsample[1] = s->mjpeg_hsample[1] =
s->mjpeg_vsample[2] = s->mjpeg_hsample[2] = 1;
if (ff_MPV_common_init(s) < 0)
return -1;
+ if (ARCH_X86)
+ ff_MPV_encode_init_x86(s);
+
if (!s->dct_quantize)
s->dct_quantize = ff_dct_quantize_c;
if (!s->denoise_dct)
if (pic_arg->linesize[2] != s->uvlinesize)
direct = 0;
- //av_log(AV_LOG_DEBUG, "%d %d %d %d\n",pic_arg->linesize[0],
- // pic_arg->linesize[1], s->linesize, s->uvlinesize);
+ av_dlog(s->avctx, "%d %d %d %d\n", pic_arg->linesize[0],
+ pic_arg->linesize[1], s->linesize, s->uvlinesize);
if (direct) {
i = ff_find_unused_picture(s, 1);
// empty
} else {
int h_chroma_shift, v_chroma_shift;
- avcodec_get_chroma_sub_sample(s->avctx->pix_fmt, &h_chroma_shift,
- &v_chroma_shift);
+ av_pix_fmt_get_chroma_sub_sample(s->avctx->pix_fmt,
+ &h_chroma_shift,
+ &v_chroma_shift);
for (i = 0; i < 3; i++) {
int src_stride = pic_arg->linesize[i];
return 0;
}
+static int encode_frame(AVCodecContext *c, AVFrame *frame)
+{
+ AVPacket pkt = { 0 };
+ int ret, got_output;
+
+ av_init_packet(&pkt);
+ ret = avcodec_encode_video2(c, &pkt, frame, &got_output);
+ if (ret < 0)
+ return ret;
+
+ ret = pkt.size;
+ av_free_packet(&pkt);
+ return ret;
+}
+
static int estimate_best_b_count(MpegEncContext *s)
{
AVCodec *codec = avcodec_find_encoder(s->avctx->codec_id);
AVFrame input[FF_MAX_B_FRAMES + 2];
const int scale = s->avctx->brd_scale;
int i, j, out_size, p_lambda, b_lambda, lambda2;
- int outbuf_size = s->width * s->height; // FIXME
- uint8_t *outbuf = av_malloc(outbuf_size);
int64_t best_rd = INT64_MAX;
int best_b_count = -1;
c->me_cmp = s->avctx->me_cmp;
c->mb_cmp = s->avctx->mb_cmp;
c->me_sub_cmp = s->avctx->me_sub_cmp;
- c->pix_fmt = PIX_FMT_YUV420P;
+ c->pix_fmt = AV_PIX_FMT_YUV420P;
c->time_base = s->avctx->time_base;
c->max_b_frames = s->max_b_frames;
input[0].pict_type = AV_PICTURE_TYPE_I;
input[0].quality = 1 * FF_QP2LAMBDA;
- out_size = avcodec_encode_video(c, outbuf,
- outbuf_size, &input[0]);
+
+ out_size = encode_frame(c, &input[0]);
+
//rd += (out_size * lambda2) >> FF_LAMBDA_SHIFT;
for (i = 0; i < s->max_b_frames + 1; i++) {
input[i + 1].pict_type = is_p ?
AV_PICTURE_TYPE_P : AV_PICTURE_TYPE_B;
input[i + 1].quality = is_p ? p_lambda : b_lambda;
- out_size = avcodec_encode_video(c, outbuf, outbuf_size,
- &input[i + 1]);
+
+ out_size = encode_frame(c, &input[i + 1]);
+
rd += (out_size * lambda2) >> (FF_LAMBDA_SHIFT - 3);
}
/* get the delayed frames */
while (out_size) {
- out_size = avcodec_encode_video(c, outbuf, outbuf_size, NULL);
+ out_size = encode_frame(c, NULL);
rd += (out_size * lambda2) >> (FF_LAMBDA_SHIFT - 3);
}
}
}
- av_freep(&outbuf);
avcodec_close(c);
av_freep(&c);
if (s->picture_in_gop_number < s->gop_size &&
skip_check(s, s->input_picture[0], s->next_picture_ptr)) {
// FIXME check that te gop check above is +-1 correct
- //av_log(NULL, AV_LOG_DEBUG, "skip %p %"PRId64"\n",
- // s->input_picture[0]->f.data[0],
- // s->input_picture[0]->pts);
-
if (s->input_picture[0]->f.type == FF_BUFFER_TYPE_SHARED) {
for (i = 0; i < 4; i++)
s->input_picture[0]->f.data[i] = NULL;
}
emms_c();
- //static int b_count = 0;
- //b_count += b_frames;
- //av_log(s->avctx, AV_LOG_DEBUG, "b_frames: %d\n", b_count);
for (i = b_frames - 1; i >= 0; i--) {
int type = s->input_picture[i]->f.pict_type;
ff_copy_picture(&s->current_picture, s->current_picture_ptr);
s->picture_number = s->new_picture.f.display_picture_number;
- //printf("dpn:%d\n", s->picture_number);
} else {
memset(&s->new_picture, 0, sizeof(Picture));
}
s->pict_type = s->new_picture.f.pict_type;
//emms_c();
- //printf("qs:%f %f %d\n", s->new_picture.quality,
- // s->current_picture.quality, s->qscale);
ff_MPV_frame_start(s, avctx);
vbv_retry:
if (encode_picture(s, s->picture_number) < 0)
s->time_base = s->last_time_base;
s->last_non_b_time = s->time - s->pp_time;
}
- //av_log(NULL, AV_LOG_ERROR, "R:%d ", s->next_lambda);
for (i = 0; i < context_count; i++) {
PutBitContext *pb = &s->thread_context[i]->pb;
init_put_bits(pb, pb->buf, pb->buf_end - pb->buf);
if (s->mb_info)
av_packet_shrink_side_data(pkt, AV_PKT_DATA_H263_MB_INFO, s->mb_info_size);
} else {
- assert((put_bits_ptr(&s->pb) == s->pb.buf));
s->frame_bits = 0;
}
assert((s->frame_bits & 7) == 0);
if (mb_x * 16 + 16 > s->width || mb_y * 16 + 16 > s->height) {
uint8_t *ebuf = s->edge_emu_buffer + 32;
- s->dsp.emulated_edge_mc(ebuf, ptr_y, wrap_y, 16, 16, mb_x * 16,
- mb_y * 16, s->width, s->height);
+ s->vdsp.emulated_edge_mc(ebuf, ptr_y, wrap_y, 16, 16, mb_x * 16,
+ mb_y * 16, s->width, s->height);
ptr_y = ebuf;
- s->dsp.emulated_edge_mc(ebuf + 18 * wrap_y, ptr_cb, wrap_c, 8,
- mb_block_height, mb_x * 8, mb_y * 8,
- s->width >> 1, s->height >> 1);
+ s->vdsp.emulated_edge_mc(ebuf + 18 * wrap_y, ptr_cb, wrap_c, 8,
+ mb_block_height, mb_x * 8, mb_y * 8,
+ s->width >> 1, s->height >> 1);
ptr_cb = ebuf + 18 * wrap_y;
- s->dsp.emulated_edge_mc(ebuf + 18 * wrap_y + 8, ptr_cr, wrap_c, 8,
- mb_block_height, mb_x * 8, mb_y * 8,
- s->width >> 1, s->height >> 1);
+ s->vdsp.emulated_edge_mc(ebuf + 18 * wrap_y + 8, ptr_cr, wrap_c, 8,
+ mb_block_height, mb_x * 8, mb_y * 8,
+ s->width >> 1, s->height >> 1);
ptr_cr = ebuf + 18 * wrap_y + 8;
}
uint8_t bit_buf2[2][MAX_MB_BYTES];
uint8_t bit_buf_tex[2][MAX_MB_BYTES];
PutBitContext pb[2], pb2[2], tex_pb[2];
-//printf("%d->%d\n", s->resync_mb_y, s->end_mb_y);
ff_check_alignment();
s->first_slice_line = 1;
s->ptr_lastgob = s->pb.buf;
for(mb_y= s->start_mb_y; mb_y < s->end_mb_y; mb_y++) {
-// printf("row %d at %X\n", s->mb_y, (int)s);
s->mb_x=0;
s->mb_y= mb_y;
s->mb_intra= 0;
motion_x= s->mv[0][0][0] = s->b_forw_mv_table[xy][0];
motion_y= s->mv[0][0][1] = s->b_forw_mv_table[xy][1];
-// printf(" %d %d ", motion_x, motion_y);
break;
case CANDIDATE_MB_TYPE_FORWARD_I:
s->mv_dir = MV_DIR_FORWARD;
if(CONFIG_H263_ENCODER && s->out_format == FMT_H263)
ff_h263_loop_filter(s);
}
-//printf("MB %d %d bits\n", s->mb_x+s->mb_y*s->mb_stride, put_bits_count(&s->pb));
+ av_dlog(s->avctx, "MB %d %d bits\n",
+ s->mb_x + s->mb_y * s->mb_stride, put_bits_count(&s->pb));
}
}
//FIXME broken
}else
s->lambda = s->current_picture.f.quality;
-//printf("%d %d\n", s->avctx->global_quality, s->current_picture.quality);
update_qscale(s);
return 0;
}
static int encode_picture(MpegEncContext *s, int picture_number)
{
- int i;
+ int i, ret;
int bits;
int context_count = s->slice_context_count;
s->mb_intra=0; //for the rate distortion & bit compare functions
for(i=1; i<context_count; i++){
- ff_update_duplicate_context(s->thread_context[i], s);
+ ret = ff_update_duplicate_context(s->thread_context[i], s);
+ if (ret < 0)
+ return ret;
}
if(ff_init_me(s)<0)
s->pict_type= AV_PICTURE_TYPE_I;
for(i=0; i<s->mb_stride*s->mb_height; i++)
s->mb_type[i]= CANDIDATE_MB_TYPE_INTRA;
-//printf("Scene change detected, encoding as I Frame %d %d\n", s->current_picture.mb_var_sum, s->current_picture.mc_mb_var_sum);
+ av_dlog(s, "Scene change detected, encoding as I Frame %d %d\n",
+ s->current_picture.mb_var_sum, s->current_picture.mc_mb_var_sum);
}
if(!s->umvplus){
#define OFFSET(x) offsetof(MpegEncContext, x)
#define VE AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM
static const AVOption h263_options[] = {
- { "obmc", "use overlapped block motion compensation.", OFFSET(obmc), AV_OPT_TYPE_INT, { 0 }, 0, 1, VE },
- { "structured_slices","Write slice start position at every GOB header instead of just GOB number.", OFFSET(h263_slice_structured), AV_OPT_TYPE_INT, { 0 }, 0, 1, VE},
- { "mb_info", "emit macroblock info for RFC 2190 packetization, the parameter value is the maximum payload size", OFFSET(mb_info), AV_OPT_TYPE_INT, { 0 }, 0, INT_MAX, VE },
+ { "obmc", "use overlapped block motion compensation.", OFFSET(obmc), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, VE },
+ { "structured_slices","Write slice start position at every GOB header instead of just GOB number.", OFFSET(h263_slice_structured), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, VE},
+ { "mb_info", "emit macroblock info for RFC 2190 packetization, the parameter value is the maximum payload size", OFFSET(mb_info), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, VE },
FF_MPV_COMMON_OPTS
{ NULL },
};
.init = ff_MPV_encode_init,
.encode2 = ff_MPV_encode_picture,
.close = ff_MPV_encode_end,
- .pix_fmts= (const enum PixelFormat[]){PIX_FMT_YUV420P, PIX_FMT_NONE},
+ .pix_fmts= (const enum AVPixelFormat[]){AV_PIX_FMT_YUV420P, AV_PIX_FMT_NONE},
.long_name= NULL_IF_CONFIG_SMALL("H.263 / H.263-1996"),
.priv_class = &h263_class,
};
static const AVOption h263p_options[] = {
- { "umv", "Use unlimited motion vectors.", OFFSET(umvplus), AV_OPT_TYPE_INT, { 0 }, 0, 1, VE },
- { "aiv", "Use alternative inter VLC.", OFFSET(alt_inter_vlc), AV_OPT_TYPE_INT, { 0 }, 0, 1, VE },
- { "obmc", "use overlapped block motion compensation.", OFFSET(obmc), AV_OPT_TYPE_INT, { 0 }, 0, 1, VE },
- { "structured_slices", "Write slice start position at every GOB header instead of just GOB number.", OFFSET(h263_slice_structured), AV_OPT_TYPE_INT, { 0 }, 0, 1, VE},
+ { "umv", "Use unlimited motion vectors.", OFFSET(umvplus), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, VE },
+ { "aiv", "Use alternative inter VLC.", OFFSET(alt_inter_vlc), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, VE },
+ { "obmc", "use overlapped block motion compensation.", OFFSET(obmc), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, VE },
+ { "structured_slices", "Write slice start position at every GOB header instead of just GOB number.", OFFSET(h263_slice_structured), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, VE},
FF_MPV_COMMON_OPTS
{ NULL },
};
.encode2 = ff_MPV_encode_picture,
.close = ff_MPV_encode_end,
.capabilities = CODEC_CAP_SLICE_THREADS,
- .pix_fmts = (const enum PixelFormat[]){ PIX_FMT_YUV420P, PIX_FMT_NONE },
+ .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_YUV420P, AV_PIX_FMT_NONE },
.long_name = NULL_IF_CONFIG_SMALL("H.263+ / H.263-1998 / H.263 version 2"),
.priv_class = &h263p_class,
};
.init = ff_MPV_encode_init,
.encode2 = ff_MPV_encode_picture,
.close = ff_MPV_encode_end,
- .pix_fmts = (const enum PixelFormat[]){ PIX_FMT_YUV420P, PIX_FMT_NONE },
+ .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_YUV420P, AV_PIX_FMT_NONE },
.long_name = NULL_IF_CONFIG_SMALL("MPEG-4 part 2 Microsoft variant version 2"),
.priv_class = &msmpeg4v2_class,
};
.init = ff_MPV_encode_init,
.encode2 = ff_MPV_encode_picture,
.close = ff_MPV_encode_end,
- .pix_fmts = (const enum PixelFormat[]){ PIX_FMT_YUV420P, PIX_FMT_NONE },
+ .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_YUV420P, AV_PIX_FMT_NONE },
.long_name = NULL_IF_CONFIG_SMALL("MPEG-4 part 2 Microsoft variant version 3"),
.priv_class = &msmpeg4v3_class,
};
.init = ff_MPV_encode_init,
.encode2 = ff_MPV_encode_picture,
.close = ff_MPV_encode_end,
- .pix_fmts = (const enum PixelFormat[]){ PIX_FMT_YUV420P, PIX_FMT_NONE },
+ .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_YUV420P, AV_PIX_FMT_NONE },
.long_name = NULL_IF_CONFIG_SMALL("Windows Media Video 7"),
.priv_class = &wmv1_class,
};