X-Git-Url: https://git.sesse.net/?a=blobdiff_plain;ds=sidebyside;f=libavcodec%2Fpthread.c;h=1dbfe3d221c515b3274a9701e2abe42ce620f17c;hb=42b9150b0d4f0a130c1d93dc991fd5412743a8cf;hp=88d8ade57e32696679c244be28803152bc187aee;hpb=b68c4ac2936c0e407da869a4f0919c26e2c16645;p=ffmpeg diff --git a/libavcodec/pthread.c b/libavcodec/pthread.c index 88d8ade57e3..1dbfe3d221c 100644 --- a/libavcodec/pthread.c +++ b/libavcodec/pthread.c @@ -52,6 +52,8 @@ #include "avcodec.h" #include "internal.h" #include "thread.h" +#include "libavutil/avassert.h" +#include "libavutil/common.h" #if HAVE_PTHREADS #include @@ -79,9 +81,6 @@ typedef struct ThreadContext { int done; } ThreadContext; -/// Max number of frame buffers that can be allocated when using frame threads. -#define MAX_BUFFERS (32+1) - /** * Context used by codec threads and stored in their AVCodecContext thread_opaque. */ @@ -100,7 +99,8 @@ typedef struct PerThreadContext { AVCodecContext *avctx; ///< Context used to decode packets passed to this thread. AVPacket avpkt; ///< Input packet (for decoding) or output (for encoding). - int allocated_buf_size; ///< Size allocated for avpkt.data + uint8_t *buf; ///< backup storage for packet data when the input packet is not refcounted + int allocated_buf_size; ///< Size allocated for buf AVFrame frame; ///< Output frame (for decoding) or input (for encoding). int got_frame; ///< The output of got_picture_ptr from the last avcodec_decode_video() call. @@ -120,16 +120,12 @@ typedef struct PerThreadContext { * Array of frames passed to ff_thread_release_buffer(). * Frames are released after all threads referencing them are finished. */ - AVFrame released_buffers[MAX_BUFFERS]; - int num_released_buffers; - - /** - * Array of progress values used by ff_thread_get_buffer(). - */ - int progress[MAX_BUFFERS][2]; - uint8_t progress_used[MAX_BUFFERS]; + AVFrame *released_buffers; + int num_released_buffers; + int released_buffers_allocated; AVFrame *requested_frame; ///< AVFrame the codec passed to get_buffer() + int requested_flags; ///< flags passed to get_buffer() for requested_frame } PerThreadContext; /** @@ -356,7 +352,7 @@ static attribute_align_arg void *frame_worker_thread(void *arg) PerThreadContext *p = arg; FrameThreadContext *fctx = p->parent; AVCodecContext *avctx = p->avctx; - AVCodec *codec = avctx->codec; + const AVCodec *codec = avctx->codec; while (1) { if (p->state == STATE_INPUT_READY && !fctx->die) { @@ -376,6 +372,10 @@ static attribute_align_arg void *frame_worker_thread(void *arg) p->got_frame = 0; p->result = codec->decode(avctx, &p->frame, &p->got_frame, &p->avpkt); + /* many decoders assign whole AVFrames, thus overwriting extended_data; + * make sure it's set correctly */ + p->frame.extended_data = p->frame.data; + if (p->state == STATE_SETTING_UP) ff_thread_finish_setup(avctx); p->state = STATE_INPUT_READY; @@ -428,6 +428,9 @@ static int update_context_from_thread(AVCodecContext *dst, AVCodecContext *src, dst->colorspace = src->colorspace; dst->color_range = src->color_range; dst->chroma_sample_location = src->chroma_sample_location; + + dst->hwaccel = src->hwaccel; + dst->hwaccel_context = src->hwaccel_context; } if (for_user) { @@ -453,8 +456,11 @@ static int update_context_from_user(AVCodecContext *dst, AVCodecContext *src) dst->flags = src->flags; dst->draw_horiz_band= src->draw_horiz_band; + dst->get_buffer2 = src->get_buffer2; +#if FF_API_GET_BUFFER dst->get_buffer = src->get_buffer; dst->release_buffer = src->release_buffer; +#endif dst->opaque = src->opaque; dst->debug = src->debug; @@ -486,14 +492,6 @@ static int update_context_from_user(AVCodecContext *dst, AVCodecContext *src) #undef copy_fields } -static void free_progress(AVFrame *f) -{ - PerThreadContext *p = f->owner->thread_opaque; - int *progress = f->thread_opaque; - - p->progress_used[(progress - p->progress[0]) / 2] = 0; -} - /// Releases the buffers that this decoding thread was the last user of. static void release_delayed_buffers(PerThreadContext *p) { @@ -503,11 +501,13 @@ static void release_delayed_buffers(PerThreadContext *p) AVFrame *f; pthread_mutex_lock(&fctx->buffer_mutex); + + // fix extended data in case the caller screwed it up + av_assert0(p->avctx->codec_type == AVMEDIA_TYPE_VIDEO); f = &p->released_buffers[--p->num_released_buffers]; - free_progress(f); - f->thread_opaque = NULL; + f->extended_data = f->data; + av_frame_unref(f); - f->owner->release_buffer(f->owner, f); pthread_mutex_unlock(&fctx->buffer_mutex); } } @@ -516,8 +516,7 @@ static int submit_packet(PerThreadContext *p, AVPacket *avpkt) { FrameThreadContext *fctx = p->parent; PerThreadContext *prev_thread = fctx->prev_thread; - AVCodec *codec = p->avctx->codec; - uint8_t *buf = p->avpkt.data; + const AVCodec *codec = p->avctx->codec; if (!avpkt->size && !(codec->capabilities & CODEC_CAP_DELAY)) return 0; @@ -541,11 +540,16 @@ static int submit_packet(PerThreadContext *p, AVPacket *avpkt) } } - av_fast_malloc(&buf, &p->allocated_buf_size, avpkt->size + FF_INPUT_BUFFER_PADDING_SIZE); + av_buffer_unref(&p->avpkt.buf); p->avpkt = *avpkt; - p->avpkt.data = buf; - memcpy(buf, avpkt->data, avpkt->size); - memset(buf + avpkt->size, 0, FF_INPUT_BUFFER_PADDING_SIZE); + if (avpkt->buf) + p->avpkt.buf = av_buffer_ref(avpkt->buf); + else { + av_fast_malloc(&p->buf, &p->allocated_buf_size, avpkt->size + FF_INPUT_BUFFER_PADDING_SIZE); + p->avpkt.data = p->buf; + memcpy(p->buf, avpkt->data, avpkt->size); + memset(p->buf + avpkt->size, 0, FF_INPUT_BUFFER_PADDING_SIZE); + } p->state = STATE_SETTING_UP; pthread_cond_signal(&p->input_cond); @@ -557,15 +561,18 @@ static int submit_packet(PerThreadContext *p, AVPacket *avpkt) * and it calls back to the client here. */ - if (!p->avctx->thread_safe_callbacks && - p->avctx->get_buffer != avcodec_default_get_buffer) { + if (!p->avctx->thread_safe_callbacks && ( +#if FF_API_GET_BUFFER + p->avctx->get_buffer || +#endif + p->avctx->get_buffer2 != avcodec_default_get_buffer2)) { while (p->state != STATE_SETUP_FINISHED && p->state != STATE_INPUT_READY) { pthread_mutex_lock(&p->progress_mutex); while (p->state == STATE_SETTING_UP) pthread_cond_wait(&p->progress_cond, &p->progress_mutex); if (p->state == STATE_GET_BUFFER) { - p->result = p->avctx->get_buffer(p->avctx, p->requested_frame); + p->result = ff_get_buffer(p->avctx, p->requested_frame, p->requested_flags); p->state = STATE_SETTING_UP; pthread_cond_signal(&p->progress_cond); } @@ -627,7 +634,7 @@ int ff_thread_decode_frame(AVCodecContext *avctx, pthread_mutex_unlock(&p->progress_mutex); } - *picture = p->frame; + av_frame_move_ref(picture, &p->frame); *got_picture_ptr = p->got_frame; picture->pkt_dts = p->avpkt.dts; @@ -652,10 +659,10 @@ int ff_thread_decode_frame(AVCodecContext *avctx, return (p->result >= 0) ? avpkt->size : p->result; } -void ff_thread_report_progress(AVFrame *f, int n, int field) +void ff_thread_report_progress(ThreadFrame *f, int n, int field) { PerThreadContext *p; - int *progress = f->thread_opaque; + int *progress = f->progress ? (int*)f->progress->data : NULL; if (!progress || progress[field] >= n) return; @@ -670,10 +677,10 @@ void ff_thread_report_progress(AVFrame *f, int n, int field) pthread_mutex_unlock(&p->progress_mutex); } -void ff_thread_await_progress(AVFrame *f, int n, int field) +void ff_thread_await_progress(ThreadFrame *f, int n, int field) { PerThreadContext *p; - int *progress = f->thread_opaque; + int *progress = f->progress ? (int*)f->progress->data : NULL; if (!progress || progress[field] >= n) return; @@ -719,7 +726,7 @@ static void park_frame_worker_threads(FrameThreadContext *fctx, int thread_count static void frame_thread_free(AVCodecContext *avctx, int thread_count) { FrameThreadContext *fctx = avctx->thread_opaque; - AVCodec *codec = avctx->codec; + const AVCodec *codec = avctx->codec; int i; park_frame_worker_threads(fctx, thread_count); @@ -745,19 +752,20 @@ static void frame_thread_free(AVCodecContext *avctx, int thread_count) avctx->codec = NULL; release_delayed_buffers(p); + av_frame_unref(&p->frame); } for (i = 0; i < thread_count; i++) { PerThreadContext *p = &fctx->threads[i]; - avcodec_default_free_buffers(p->avctx); - pthread_mutex_destroy(&p->mutex); pthread_mutex_destroy(&p->progress_mutex); pthread_cond_destroy(&p->input_cond); pthread_cond_destroy(&p->progress_cond); pthread_cond_destroy(&p->output_cond); - av_freep(&p->avpkt.data); + av_buffer_unref(&p->avpkt.buf); + av_freep(&p->buf); + av_freep(&p->released_buffers); if (i) { av_freep(&p->avctx->priv_data); @@ -776,7 +784,7 @@ static void frame_thread_free(AVCodecContext *avctx, int thread_count) static int frame_thread_init(AVCodecContext *avctx) { int thread_count = avctx->thread_count; - AVCodec *codec = avctx->codec; + const AVCodec *codec = avctx->codec; AVCodecContext *src = avctx; FrameThreadContext *fctx; int i, err = 0; @@ -865,6 +873,7 @@ error: void ff_thread_flush(AVCodecContext *avctx) { + int i; FrameThreadContext *fctx = avctx->thread_opaque; if (!avctx->thread_opaque) return; @@ -880,43 +889,25 @@ void ff_thread_flush(AVCodecContext *avctx) fctx->next_decoding = fctx->next_finished = 0; fctx->delaying = 1; fctx->prev_thread = NULL; - for (int i = 0; i < avctx->thread_count; i++) { + for (i = 0; i < avctx->thread_count; i++) { PerThreadContext *p = &fctx->threads[i]; // Make sure decode flush calls with size=0 won't return old frames p->got_frame = 0; + av_frame_unref(&p->frame); release_delayed_buffers(p); } } -static int *allocate_progress(PerThreadContext *p) -{ - int i; - - for (i = 0; i < MAX_BUFFERS; i++) - if (!p->progress_used[i]) break; - - if (i == MAX_BUFFERS) { - av_log(p->avctx, AV_LOG_ERROR, "allocate_progress() overflow\n"); - return NULL; - } - - p->progress_used[i] = 1; - - return p->progress[i]; -} - -int ff_thread_get_buffer(AVCodecContext *avctx, AVFrame *f) +int ff_thread_get_buffer(AVCodecContext *avctx, ThreadFrame *f, int flags) { PerThreadContext *p = avctx->thread_opaque; - int *progress, err; + int err; f->owner = avctx; - if (!(avctx->active_thread_type&FF_THREAD_FRAME)) { - f->thread_opaque = NULL; - return avctx->get_buffer(avctx, f); - } + if (!(avctx->active_thread_type & FF_THREAD_FRAME)) + return ff_get_buffer(avctx, f->f, flags); if (p->state != STATE_SETTING_UP && (avctx->codec->update_thread_context || !avctx->thread_safe_callbacks)) { @@ -924,22 +915,27 @@ int ff_thread_get_buffer(AVCodecContext *avctx, AVFrame *f) return -1; } - pthread_mutex_lock(&p->parent->buffer_mutex); - f->thread_opaque = progress = allocate_progress(p); + if (avctx->internal->allocate_progress) { + int *progress; + f->progress = av_buffer_alloc(2 * sizeof(int)); + if (!f->progress) { + return AVERROR(ENOMEM); + } + progress = (int*)f->progress->data; - if (!progress) { - pthread_mutex_unlock(&p->parent->buffer_mutex); - return -1; + progress[0] = progress[1] = -1; } - progress[0] = - progress[1] = -1; - - if (avctx->thread_safe_callbacks || - avctx->get_buffer == avcodec_default_get_buffer) { - err = avctx->get_buffer(avctx, f); + pthread_mutex_lock(&p->parent->buffer_mutex); + if (avctx->thread_safe_callbacks || ( +#if FF_API_GET_BUFFER + !avctx->get_buffer && +#endif + avctx->get_buffer2 == avcodec_default_get_buffer2)) { + err = ff_get_buffer(avctx, f->f, flags); } else { - p->requested_frame = f; + p->requested_frame = f->f; + p->requested_flags = flags; p->state = STATE_GET_BUFFER; pthread_mutex_lock(&p->progress_mutex); pthread_cond_signal(&p->progress_cond); @@ -951,42 +947,64 @@ int ff_thread_get_buffer(AVCodecContext *avctx, AVFrame *f) pthread_mutex_unlock(&p->progress_mutex); - if (!avctx->codec->update_thread_context) - ff_thread_finish_setup(avctx); } + if (!avctx->thread_safe_callbacks && !avctx->codec->update_thread_context) + ff_thread_finish_setup(avctx); + + if (err) + av_buffer_unref(&f->progress); - if (err) { - free_progress(f); - f->thread_opaque = NULL; - } pthread_mutex_unlock(&p->parent->buffer_mutex); return err; } -void ff_thread_release_buffer(AVCodecContext *avctx, AVFrame *f) +void ff_thread_release_buffer(AVCodecContext *avctx, ThreadFrame *f) { PerThreadContext *p = avctx->thread_opaque; FrameThreadContext *fctx; + AVFrame *dst, *tmp; + int can_direct_free = !(avctx->active_thread_type & FF_THREAD_FRAME) || + avctx->thread_safe_callbacks || + ( +#if FF_API_GET_BUFFER + !avctx->get_buffer && +#endif + avctx->get_buffer2 == avcodec_default_get_buffer2); - if (!(avctx->active_thread_type&FF_THREAD_FRAME)) { - avctx->release_buffer(avctx, f); + if (!f->f->data[0]) return; - } - if (p->num_released_buffers >= MAX_BUFFERS) { - av_log(p->avctx, AV_LOG_ERROR, "too many thread_release_buffer calls!\n"); + if (avctx->debug & FF_DEBUG_BUFFERS) + av_log(avctx, AV_LOG_DEBUG, "thread_release_buffer called on pic %p\n", f); + + av_buffer_unref(&f->progress); + f->owner = NULL; + + if (can_direct_free) { + av_frame_unref(f->f); return; } - if(avctx->debug & FF_DEBUG_BUFFERS) - av_log(avctx, AV_LOG_DEBUG, "thread_release_buffer called on pic %p\n", f); - fctx = p->parent; pthread_mutex_lock(&fctx->buffer_mutex); - p->released_buffers[p->num_released_buffers++] = *f; + + if (p->num_released_buffers + 1 >= INT_MAX / sizeof(*p->released_buffers)) + goto fail; + tmp = av_fast_realloc(p->released_buffers, &p->released_buffers_allocated, + (p->num_released_buffers + 1) * + sizeof(*p->released_buffers)); + if (!tmp) + goto fail; + p->released_buffers = tmp; + + dst = &p->released_buffers[p->num_released_buffers]; + av_frame_move_ref(dst, f->f); + + p->num_released_buffers++; + +fail: pthread_mutex_unlock(&fctx->buffer_mutex); - memset(f->data, 0, sizeof(f->data)); } /**