From 11b2ae4ae768b87a4af6f0f50ba39bd75133dbb4 Mon Sep 17 00:00:00 2001 From: "Steinar H. Gunderson" Date: Sun, 4 Apr 2021 11:55:23 +0200 Subject: [PATCH] Add a --no-transcode-video flag to Kaeru. This is useful primarily if you want Kaeru to rewrap the stream into Metacube (for cubemap) and do nothing else with it. Only H.264 is supported for now, since everything else assumes that. Currently, we only really support --http-mux=mpegts; other muxes seem to have issues. --- nageru/ffmpeg_capture.cpp | 19 +++++++++++++------ nageru/ffmpeg_capture.h | 13 +++++++++---- nageru/flags.cpp | 7 +++++++ nageru/flags.h | 1 + nageru/kaeru.cpp | 16 +++++++++++----- 5 files changed, 41 insertions(+), 15 deletions(-) diff --git a/nageru/ffmpeg_capture.cpp b/nageru/ffmpeg_capture.cpp index a8b6f61..0a328e0 100644 --- a/nageru/ffmpeg_capture.cpp +++ b/nageru/ffmpeg_capture.cpp @@ -438,9 +438,11 @@ void FFmpegCapture::send_disconnected_frame() memset(video_frame.data + frame_width * frame_height, 128, frame_width * frame_height); // Valid for both NV12 and planar. } - frame_callback(-1, AVRational{1, TIMEBASE}, -1, AVRational{1, TIMEBASE}, timecode++, - video_frame, /*video_offset=*/0, video_format, - FrameAllocator::Frame(), /*audio_offset=*/0, AudioFormat()); + if (frame_callback != nullptr) { + frame_callback(-1, AVRational{1, TIMEBASE}, -1, AVRational{1, TIMEBASE}, timecode++, + video_frame, /*video_offset=*/0, video_format, + FrameAllocator::Frame(), /*audio_offset=*/0, AudioFormat()); + } last_frame_was_connected = false; } @@ -734,9 +736,11 @@ bool FFmpegCapture::play_video(const string &pathname) timecode += MAX_FPS * 2 + 1; } last_neutral_color = get_neutral_color(frame->metadata); - frame_callback(frame->pts, video_timebase, audio_pts, audio_timebase, timecode++, - video_frame.get_and_release(), 0, video_format, - audio_frame.get_and_release(), 0, audio_format); + if (frame_callback != nullptr) { + frame_callback(frame->pts, video_timebase, audio_pts, audio_timebase, timecode++, + video_frame.get_and_release(), 0, video_format, + audio_frame.get_and_release(), 0, audio_format); + } first_frame = false; last_frame = steady_clock::now(); last_frame_was_connected = true; @@ -841,6 +845,9 @@ AVFrameWithDeleter FFmpegCapture::decode_frame(AVFormatContext *format_ctx, AVCo if (pkt.stream_index == audio_stream_index && audio_callback != nullptr) { audio_callback(&pkt, format_ctx->streams[audio_stream_index]->time_base); } + if (pkt.stream_index == video_stream_index && video_callback != nullptr) { + video_callback(&pkt, format_ctx->streams[video_stream_index]->time_base); + } if (pkt.stream_index == video_stream_index) { if (avcodec_send_packet(video_codec_ctx, &pkt) < 0) { fprintf(stderr, "%s: Cannot send packet to video codec.\n", pathname.c_str()); diff --git a/nageru/ffmpeg_capture.h b/nageru/ffmpeg_capture.h index 1fc8f71..2ab9481 100644 --- a/nageru/ffmpeg_capture.h +++ b/nageru/ffmpeg_capture.h @@ -159,9 +159,13 @@ public: std::placeholders::_11); } - // FFmpegCapture-specific callback that gives the raw audio. - typedef std::function audio_callback_t; - void set_audio_callback(audio_callback_t callback) + // FFmpegCapture-specific callback that gives the raw audio/video. + typedef std::function packet_callback_t; + void set_video_callback(packet_callback_t callback) + { + video_callback = callback; + } + void set_audio_callback(packet_callback_t callback) { audio_callback = callback; } @@ -297,7 +301,8 @@ private: std::unique_ptr owned_video_frame_allocator; std::unique_ptr owned_audio_frame_allocator; frame_callback_t frame_callback = nullptr; - audio_callback_t audio_callback = nullptr; + packet_callback_t video_callback = nullptr; + packet_callback_t audio_callback = nullptr; SwsContextWithDeleter sws_ctx; int sws_last_width = -1, sws_last_height = -1, sws_last_src_format = -1; diff --git a/nageru/flags.cpp b/nageru/flags.cpp index 402e33d..769263b 100644 --- a/nageru/flags.cpp +++ b/nageru/flags.cpp @@ -40,6 +40,7 @@ enum LongOption { OPTION_HTTP_PORT, OPTION_SRT_PORT, OPTION_NO_SRT, + OPTION_NO_TRANSCODE_VIDEO, OPTION_NO_TRANSCODE_AUDIO, OPTION_DISABLE_AUDIO, OPTION_FLAT_AUDIO, @@ -178,6 +179,8 @@ void usage(Program program) fprintf(stderr, " (default is %d)\n", DEFAULT_SRT_PORT); fprintf(stderr, " --no-srt disable receiving SRT streams\n"); if (program == PROGRAM_KAERU) { + fprintf(stderr, " --no-transcode-video copy encoded video raw from the source stream\n"); + fprintf(stderr, " (experimental, must be H.264)\n"); fprintf(stderr, " --no-transcode-audio copy encoded audio raw from the source stream\n"); fprintf(stderr, " (requires --http-audio-codec= to be set)\n"); fprintf(stderr, " --disable-audio do not include any audio in the stream\n"); @@ -266,6 +269,7 @@ void parse_flags(Program program, int argc, char * const argv[]) { "http-port", required_argument, 0, OPTION_HTTP_PORT }, { "srt-port", required_argument, 0, OPTION_SRT_PORT }, { "no-srt", no_argument, 0, OPTION_NO_SRT }, + { "no-transcode-video", no_argument, 0, OPTION_NO_TRANSCODE_VIDEO }, { "no-transcode-audio", no_argument, 0, OPTION_NO_TRANSCODE_AUDIO }, { "disable-audio", no_argument, 0, OPTION_DISABLE_AUDIO }, { "flat-audio", no_argument, 0, OPTION_FLAT_AUDIO }, @@ -392,6 +396,9 @@ void parse_flags(Program program, int argc, char * const argv[]) case OPTION_NO_SRT: global_flags.srt_port = -1; break; + case OPTION_NO_TRANSCODE_VIDEO: + global_flags.transcode_video = false; + break; case OPTION_NO_TRANSCODE_AUDIO: global_flags.transcode_audio = false; break; diff --git a/nageru/flags.h b/nageru/flags.h index bbe84c2..dc9c585 100644 --- a/nageru/flags.h +++ b/nageru/flags.h @@ -66,6 +66,7 @@ struct Flags { bool ten_bit_input = false; bool ten_bit_output = false; // Implies x264_video_to_disk == true and x264_bit_depth == 10. YCbCrInterpretation ycbcr_interpretation[MAX_VIDEO_CARDS]; + bool transcode_video = true; // Kaeru only. bool transcode_audio = true; // Kaeru only. bool enable_audio = true; // Kaeru only. If false, then transcode_audio is also false. int x264_bit_depth = 8; // Not user-settable. diff --git a/nageru/kaeru.cpp b/nageru/kaeru.cpp index e4fba7f..9dc3992 100644 --- a/nageru/kaeru.cpp +++ b/nageru/kaeru.cpp @@ -138,9 +138,9 @@ void video_frame_callback(FFmpegCapture *video, X264Encoder *x264_encoder, Audio } } -void audio_frame_callback(Mux *mux, const AVPacket *pkt, AVRational timebase) +void raw_packet_callback(Mux *mux, int stream_index, const AVPacket *pkt, AVRational timebase) { - mux->add_packet(*pkt, pkt->pts, pkt->dts == AV_NOPTS_VALUE ? pkt->pts : pkt->dts, timebase, /*stream_index=*/1); + mux->add_packet(*pkt, pkt->pts, pkt->dts == AV_NOPTS_VALUE ? pkt->pts : pkt->dts, timebase, stream_index); } void adjust_bitrate(int signal) @@ -205,14 +205,20 @@ int main(int argc, char *argv[]) if (global_flags.transcode_audio) { audio_encoder->add_mux(http_mux.get()); } - x264_encoder->add_mux(http_mux.get()); + if (global_flags.transcode_video) { + x264_encoder->add_mux(http_mux.get()); + } global_x264_encoder = x264_encoder.get(); FFmpegCapture video(argv[optind], global_flags.width, global_flags.height); video.set_pixel_format(FFmpegCapture::PixelFormat_NV12); - video.set_frame_callback(bind(video_frame_callback, &video, x264_encoder.get(), audio_encoder.get(), _1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11)); + if (global_flags.transcode_video) { + video.set_frame_callback(bind(video_frame_callback, &video, x264_encoder.get(), audio_encoder.get(), _1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11)); + } else { + video.set_video_callback(bind(raw_packet_callback, http_mux.get(), /*stream_index=*/0, _1, _2)); + } if (!global_flags.transcode_audio && global_flags.enable_audio) { - video.set_audio_callback(bind(audio_frame_callback, http_mux.get(), _1, _2)); + video.set_audio_callback(bind(raw_packet_callback, http_mux.get(), /*stream_index=*/1, _1, _2)); } video.configure_card(); video.start_bm_capture(); -- 2.39.2