X-Git-Url: https://git.sesse.net/?a=blobdiff_plain;f=nageru%2Fffmpeg_capture.cpp;h=984ce5e8b8add69da4b0a9fd8a68d232c7ab6b41;hb=refs%2Fheads%2Fkaeru-cef-overlay-hack;hp=a667d4f5b033aca8f5393d90f0f73bee31a87e4d;hpb=4148366592acbd18be713b0ed333dabb2196f90d;p=nageru diff --git a/nageru/ffmpeg_capture.cpp b/nageru/ffmpeg_capture.cpp index a667d4f..984ce5e 100644 --- a/nageru/ffmpeg_capture.cpp +++ b/nageru/ffmpeg_capture.cpp @@ -258,8 +258,9 @@ RGBTriplet get_neutral_color(AVDictionary *metadata) } // namespace FFmpegCapture::FFmpegCapture(const string &filename, unsigned width, unsigned height) - : filename(filename), width(width), height(height), video_timebase{1, 1} + : width(width), height(height), video_timebase{1, 1} { + filenames.push_back(filename); description = "Video: " + filename; last_frame = steady_clock::now(); @@ -267,6 +268,17 @@ FFmpegCapture::FFmpegCapture(const string &filename, unsigned width, unsigned he avformat_network_init(); // In case someone wants this. } +FFmpegCapture::FFmpegCapture(const std::vector &filenames, unsigned width, unsigned height) + : filenames(filenames), width(width), height(height), video_timebase{1, 1} +{ + description = "Video: " + filenames[0]; + + last_frame = steady_clock::now(); + play_once = true; + + avformat_network_init(); // In case someone wants this. +} + #ifdef HAVE_SRT FFmpegCapture::FFmpegCapture(int srt_sock, const string &stream_id) : srt_sock(srt_sock), @@ -357,11 +369,14 @@ void FFmpegCapture::producer_thread_func() snprintf(thread_name, sizeof(thread_name), "FFmpeg_C_%d", card_index); pthread_setname_np(pthread_self(), thread_name); + printf("CAP\n"); while (!producer_thread_should_quit.should_quit()) { string filename_copy; + printf("CAP %zu LEFT\n", filenames.size()); { lock_guard lock(filename_mu); - filename_copy = filename; + filename_copy = filenames.front(); + filenames.erase(filenames.begin()); } string pathname; @@ -370,6 +385,7 @@ void FFmpegCapture::producer_thread_func() } else { pathname = description; } + printf("CAP %s\n", pathname.c_str()); if (pathname.empty()) { send_disconnected_frame(); if (play_once) { @@ -391,7 +407,7 @@ void FFmpegCapture::producer_thread_func() continue; } - if (play_once) { + if (play_once && filenames.empty()) { send_disconnected_frame(); break; } @@ -438,9 +454,11 @@ void FFmpegCapture::send_disconnected_frame() memset(video_frame.data + frame_width * frame_height, 128, frame_width * frame_height); // Valid for both NV12 and planar. } - frame_callback(-1, AVRational{1, TIMEBASE}, -1, AVRational{1, TIMEBASE}, timecode++, - video_frame, /*video_offset=*/0, video_format, - FrameAllocator::Frame(), /*audio_offset=*/0, AudioFormat()); + if (frame_callback != nullptr) { + frame_callback(-1, AVRational{1, TIMEBASE}, -1, AVRational{1, TIMEBASE}, timecode++, + video_frame, /*video_offset=*/0, video_format, + FrameAllocator::Frame(), /*audio_offset=*/0, AudioFormat()); + } last_frame_was_connected = false; } @@ -498,7 +516,7 @@ bool FFmpegCapture::play_video(const string &pathname) } else { #ifdef HAVE_SRT // SRT socket, already opened. - AVInputFormat *mpegts_fmt = av_find_input_format("mpegts"); + const AVInputFormat *mpegts_fmt = av_find_input_format("mpegts"); format_ctx = avformat_open_input_unique(&FFmpegCapture::read_srt_thunk, this, mpegts_fmt, /*options=*/nullptr, AVIOInterruptCB{ &FFmpegCapture::interrupt_cb_thunk, this }); @@ -528,7 +546,7 @@ bool FFmpegCapture::play_video(const string &pathname) // Open video decoder. const AVCodecParameters *video_codecpar = format_ctx->streams[video_stream_index]->codecpar; - AVCodec *video_codec = avcodec_find_decoder(video_codecpar->codec_id); + const AVCodec *video_codec = avcodec_find_decoder(video_codecpar->codec_id); video_timebase = format_ctx->streams[video_stream_index]->time_base; AVCodecContextWithDeleter video_codec_ctx = avcodec_alloc_context3_unique(nullptr); @@ -545,7 +563,7 @@ bool FFmpegCapture::play_video(const string &pathname) // “whatever goes”, so we don't get VDPAU or CUDA here without enumerating // through several different types. VA-API will do for now. AVBufferRef *hw_device_ctx = nullptr; - if (av_hwdevice_ctx_create(&hw_device_ctx, AV_HWDEVICE_TYPE_VAAPI, nullptr, nullptr, 0) < 0) { + if (av_hwdevice_ctx_create(&hw_device_ctx, AV_HWDEVICE_TYPE_VAAPI, nullptr, nullptr, 0) < 0 || true) { fprintf(stderr, "Failed to initialize VA-API for FFmpeg acceleration. Decoding video in software.\n"); } else { video_codec_ctx->hw_device_ctx = av_buffer_ref(hw_device_ctx); @@ -572,7 +590,7 @@ bool FFmpegCapture::play_video(const string &pathname) fprintf(stderr, "%s: Cannot fill audio codec parameters\n", pathname.c_str()); return false; } - AVCodec *audio_codec = avcodec_find_decoder(audio_codecpar->codec_id); + const AVCodec *audio_codec = avcodec_find_decoder(audio_codecpar->codec_id); if (audio_codec == nullptr) { fprintf(stderr, "%s: Cannot find audio decoder\n", pathname.c_str()); return false; @@ -614,7 +632,7 @@ bool FFmpegCapture::play_video(const string &pathname) // so don't try). return true; } - if (av_seek_frame(format_ctx.get(), /*stream_index=*/-1, /*timestamp=*/0, /*flags=*/0) < 0) { + if (true || av_seek_frame(format_ctx.get(), /*stream_index=*/-1, /*timestamp=*/0, /*flags=*/0) < 0) { fprintf(stderr, "%s: Rewind failed, not looping.\n", pathname.c_str()); return true; } @@ -734,9 +752,11 @@ bool FFmpegCapture::play_video(const string &pathname) timecode += MAX_FPS * 2 + 1; } last_neutral_color = get_neutral_color(frame->metadata); - frame_callback(frame->pts, video_timebase, audio_pts, audio_timebase, timecode++, - video_frame.get_and_release(), 0, video_format, - audio_frame.get_and_release(), 0, audio_format); + if (frame_callback != nullptr) { + frame_callback(frame->pts, video_timebase, audio_pts, audio_timebase, timecode++, + video_frame.get_and_release(), 0, video_format, + audio_frame.get_and_release(), 0, audio_format); + } first_frame = false; last_frame = steady_clock::now(); last_frame_was_connected = true; @@ -841,13 +861,16 @@ AVFrameWithDeleter FFmpegCapture::decode_frame(AVFormatContext *format_ctx, AVCo if (pkt.stream_index == audio_stream_index && audio_callback != nullptr) { audio_callback(&pkt, format_ctx->streams[audio_stream_index]->time_base); } - if (pkt.stream_index == video_stream_index) { + if (pkt.stream_index == video_stream_index && video_callback != nullptr) { + video_callback(&pkt, format_ctx->streams[video_stream_index]->time_base); + } + if (pkt.stream_index == video_stream_index && global_flags.transcode_video) { if (avcodec_send_packet(video_codec_ctx, &pkt) < 0) { fprintf(stderr, "%s: Cannot send packet to video codec.\n", pathname.c_str()); *error = true; return AVFrameWithDeleter(nullptr); } - } else if (pkt.stream_index == audio_stream_index) { + } else if (pkt.stream_index == audio_stream_index && global_flags.transcode_audio) { has_audio = true; if (avcodec_send_packet(audio_codec_ctx, &pkt) < 0) { fprintf(stderr, "%s: Cannot send packet to audio codec.\n", pathname.c_str()); @@ -895,6 +918,8 @@ AVFrameWithDeleter FFmpegCapture::decode_frame(AVFormatContext *format_ctx, AVCo *error = true; return AVFrameWithDeleter(nullptr); } + sw_frame->pts = video_avframe->pts; + sw_frame->pkt_duration = video_avframe->pkt_duration; video_avframe = move(sw_frame); } frame_finished = true; @@ -1107,7 +1132,7 @@ unsigned FFmpegCapture::frame_height(const AVFrame *frame) const if (height == 0) { return frame->height; } else { - return width; + return height; } }