X-Git-Url: https://git.sesse.net/?a=blobdiff_plain;f=libavcodec%2Fhuffyuvenc.c;h=2882433db518bf832662a324c42c278bea5f9c3e;hb=da6e137cb6e81e2389cf62dca2b1f957862a8498;hp=3662c173ec49bc48ae86b95428d97b6cfdaa4cf9;hpb=bec3b2041dcc20ab4b06a6b31d09130e1a7166c3;p=ffmpeg diff --git a/libavcodec/huffyuvenc.c b/libavcodec/huffyuvenc.c index 3662c173ec4..2882433db51 100644 --- a/libavcodec/huffyuvenc.c +++ b/libavcodec/huffyuvenc.c @@ -268,7 +268,6 @@ FF_ENABLE_DEPRECATION_WARNINGS case AV_PIX_FMT_YUVA420P: case AV_PIX_FMT_YUVA422P: case AV_PIX_FMT_GBRAP: - case AV_PIX_FMT_GRAY8A: case AV_PIX_FMT_YUV420P9: case AV_PIX_FMT_YUV420P10: case AV_PIX_FMT_YUV420P12: @@ -444,7 +443,7 @@ static int encode_422_bitstream(HYuvContext *s, int offset, int count) const uint8_t *u = s->temp[1] + offset / 2; const uint8_t *v = s->temp[2] + offset / 2; - if (s->pb.buf_end - s->pb.buf - (put_bits_count(&s->pb) >> 3) < 2 * 4 * count) { + if (put_bytes_left(&s->pb, 0) < 2 * 4 * count) { av_log(s->avctx, AV_LOG_ERROR, "encoded frame too large\n"); return -1; } @@ -496,7 +495,7 @@ static int encode_plane_bitstream(HYuvContext *s, int width, int plane) { int i, count = width/2; - if (s->pb.buf_end - s->pb.buf - (put_bits_count(&s->pb) >> 3) < count * s->bps / 2) { + if (put_bytes_left(&s->pb, 0) < count * s->bps / 2) { av_log(s->avctx, AV_LOG_ERROR, "encoded frame too large\n"); return -1; } @@ -658,7 +657,7 @@ static int encode_gray_bitstream(HYuvContext *s, int count) { int i; - if (s->pb.buf_end - s->pb.buf - (put_bits_count(&s->pb) >> 3) < 4 * count) { + if (put_bytes_left(&s->pb, 0) < 4 * count) { av_log(s->avctx, AV_LOG_ERROR, "encoded frame too large\n"); return -1; } @@ -703,8 +702,7 @@ static inline int encode_bgra_bitstream(HYuvContext *s, int count, int planes) { int i; - if (s->pb.buf_end - s->pb.buf - (put_bits_count(&s->pb) >> 3) < - 4 * planes * count) { + if (put_bytes_left(&s->pb, 0) < 4 * planes * count) { av_log(s->avctx, AV_LOG_ERROR, "encoded frame too large\n"); return -1; } @@ -1092,7 +1090,7 @@ AVCodec ff_huffyuv_encoder = { .init = encode_init, .encode2 = encode_frame, .close = encode_end, - .capabilities = AV_CODEC_CAP_FRAME_THREADS | AV_CODEC_CAP_INTRA_ONLY, + .capabilities = AV_CODEC_CAP_FRAME_THREADS, .priv_class = &normal_class, .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_YUV422P, AV_PIX_FMT_RGB24, @@ -1112,7 +1110,7 @@ AVCodec ff_ffvhuff_encoder = { .init = encode_init, .encode2 = encode_frame, .close = encode_end, - .capabilities = AV_CODEC_CAP_FRAME_THREADS | AV_CODEC_CAP_INTRA_ONLY, + .capabilities = AV_CODEC_CAP_FRAME_THREADS, .priv_class = &ff_class, .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUV444P, AV_PIX_FMT_YUV411P, @@ -1122,7 +1120,6 @@ AVCodec ff_ffvhuff_encoder = { AV_PIX_FMT_GRAY8, AV_PIX_FMT_GRAY16, AV_PIX_FMT_YUVA420P, AV_PIX_FMT_YUVA422P, AV_PIX_FMT_YUVA444P, AV_PIX_FMT_GBRAP, - AV_PIX_FMT_GRAY8A, AV_PIX_FMT_YUV420P9, AV_PIX_FMT_YUV420P10, AV_PIX_FMT_YUV420P12, AV_PIX_FMT_YUV420P14, AV_PIX_FMT_YUV420P16, AV_PIX_FMT_YUV422P9, AV_PIX_FMT_YUV422P10, AV_PIX_FMT_YUV422P12, AV_PIX_FMT_YUV422P14, AV_PIX_FMT_YUV422P16, AV_PIX_FMT_YUV444P9, AV_PIX_FMT_YUV444P10, AV_PIX_FMT_YUV444P12, AV_PIX_FMT_YUV444P14, AV_PIX_FMT_YUV444P16,