ff_huffyuvencdsp_init(&s->hencdsp);
avctx->extradata = av_mallocz(3*MAX_N + 4);
- if (s->flags&CODEC_FLAG_PASS1) {
+ if (s->flags&AV_CODEC_FLAG_PASS1) {
#define STATS_OUT_SIZE 21*MAX_N*3 + 4
avctx->stats_out = av_mallocz(STATS_OUT_SIZE); // 21*256*3(%llu ) + 3(\n) + 1(0) = 16132
if (!avctx->stats_out)
avctx->bits_per_coded_sample = s->bitstream_bpp;
s->decorrelate = s->bitstream_bpp >= 24 && !s->yuv && !(desc->flags & AV_PIX_FMT_FLAG_PLANAR);
s->predictor = avctx->prediction_method;
- s->interlaced = avctx->flags&CODEC_FLAG_INTERLACED_ME ? 1 : 0;
+ s->interlaced = avctx->flags & AV_CODEC_FLAG_INTERLACED_ME ? 1 : 0;
if (avctx->context_model == 1) {
s->context = avctx->context_model;
- if (s->flags & (CODEC_FLAG_PASS1|CODEC_FLAG_PASS2)) {
+ if (s->flags & (AV_CODEC_FLAG_PASS1 | AV_CODEC_FLAG_PASS2)) {
av_log(avctx, AV_LOG_ERROR,
"context=1 is not compatible with "
"2 pass huffyuv encoding\n");
count /= 2;
- if (s->flags & CODEC_FLAG_PASS1) {
+ if (s->flags & AV_CODEC_FLAG_PASS1) {
for(i = 0; i < count; i++) {
LOAD4;
s->stats[0][y0]++;
s->stats[2][v0]++;
}
}
- if (s->avctx->flags2 & CODEC_FLAG2_NO_OUTPUT)
+ if (s->avctx->flags2 & AV_CODEC_FLAG2_NO_OUTPUT)
return 0;
if (s->context) {
for (i = 0; i < count; i++) {
put_bits(&s->pb, 2, y1&3);
if (s->bps <= 8) {
- if (s->flags & CODEC_FLAG_PASS1) {
+ if (s->flags & AV_CODEC_FLAG_PASS1) {
for (i = 0; i < count; i++) {
LOAD2;
STAT2;
STATEND;
}
}
- if (s->avctx->flags2 & CODEC_FLAG2_NO_OUTPUT)
+ if (s->avctx->flags2 & AV_CODEC_FLAG2_NO_OUTPUT)
return 0;
if (s->context) {
}
} else if (s->bps <= 14) {
int mask = s->n - 1;
- if (s->flags & CODEC_FLAG_PASS1) {
+ if (s->flags & AV_CODEC_FLAG_PASS1) {
for (i = 0; i < count; i++) {
LOAD2_14;
STAT2;
STATEND;
}
}
- if (s->avctx->flags2 & CODEC_FLAG2_NO_OUTPUT)
+ if (s->avctx->flags2 & AV_CODEC_FLAG2_NO_OUTPUT)
return 0;
if (s->context) {
}
}
} else {
- if (s->flags & CODEC_FLAG_PASS1) {
+ if (s->flags & AV_CODEC_FLAG_PASS1) {
for (i = 0; i < count; i++) {
LOAD2_16;
STAT2_16;
STATEND_16;
}
}
- if (s->avctx->flags2 & CODEC_FLAG2_NO_OUTPUT)
+ if (s->avctx->flags2 & AV_CODEC_FLAG2_NO_OUTPUT)
return 0;
if (s->context) {
count /= 2;
- if (s->flags & CODEC_FLAG_PASS1) {
+ if (s->flags & AV_CODEC_FLAG_PASS1) {
for (i = 0; i < count; i++) {
LOAD2;
STAT2;
}
}
- if (s->avctx->flags2 & CODEC_FLAG2_NO_OUTPUT)
+ if (s->avctx->flags2 & AV_CODEC_FLAG2_NO_OUTPUT)
return 0;
if (s->context) {
if (planes == 4) \
put_bits(&s->pb, s->len[2][a], s->bits[2][a]);
- if ((s->flags & CODEC_FLAG_PASS1) &&
- (s->avctx->flags2 & CODEC_FLAG2_NO_OUTPUT)) {
+ if ((s->flags & AV_CODEC_FLAG_PASS1) &&
+ (s->avctx->flags2 & AV_CODEC_FLAG2_NO_OUTPUT)) {
for (i = 0; i < count; i++) {
LOAD_GBRA;
STAT_BGRA;
}
- } else if (s->context || (s->flags & CODEC_FLAG_PASS1)) {
+ } else if (s->context || (s->flags & AV_CODEC_FLAG_PASS1)) {
for (i = 0; i < count; i++) {
LOAD_GBRA;
STAT_BGRA;
const AVFrame * const p = pict;
int i, j, size = 0, ret;
- if ((ret = ff_alloc_packet2(avctx, pkt, width * height * 3 * 4 + FF_MIN_BUFFER_SIZE)) < 0)
+ if ((ret = ff_alloc_packet2(avctx, pkt, width * height * 3 * 4 + AV_INPUT_BUFFER_MIN_SIZE, 0)) < 0)
return ret;
if (s->context) {
put_bits(&s->pb, 15, 0);
size /= 4;
- if ((s->flags&CODEC_FLAG_PASS1) && (s->picture_number & 31) == 0) {
+ if ((s->flags & AV_CODEC_FLAG_PASS1) && (s->picture_number & 31) == 0) {
int j;
char *p = avctx->stats_out;
char *end = p + STATS_OUT_SIZE;
}
} else if (avctx->stats_out)
avctx->stats_out[0] = '\0';
- if (!(s->avctx->flags2 & CODEC_FLAG2_NO_OUTPUT)) {
+ if (!(s->avctx->flags2 & AV_CODEC_FLAG2_NO_OUTPUT)) {
flush_put_bits(&s->pb);
s->bdsp.bswap_buf((uint32_t *) pkt->data, (uint32_t *) pkt->data, size);
}
.init = encode_init,
.encode2 = encode_frame,
.close = encode_end,
- .capabilities = CODEC_CAP_FRAME_THREADS | CODEC_CAP_INTRA_ONLY,
+ .capabilities = AV_CODEC_CAP_FRAME_THREADS | AV_CODEC_CAP_INTRA_ONLY,
.priv_class = &normal_class,
.pix_fmts = (const enum AVPixelFormat[]){
AV_PIX_FMT_YUV422P, AV_PIX_FMT_RGB24,
.init = encode_init,
.encode2 = encode_frame,
.close = encode_end,
- .capabilities = CODEC_CAP_FRAME_THREADS | CODEC_CAP_INTRA_ONLY,
+ .capabilities = AV_CODEC_CAP_FRAME_THREADS | AV_CODEC_CAP_INTRA_ONLY,
.priv_class = &ff_class,
.pix_fmts = (const enum AVPixelFormat[]){
AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUV444P, AV_PIX_FMT_YUV411P,