]> git.sesse.net Git - nageru/blobdiff - nageru/kaeru.cpp
Release Nageru and Futatabi 1.9.1.
[nageru] / nageru / kaeru.cpp
index 10f1e9394b2a5c0ddab4e42a226b0ff7af4ccc73..d9b1e0e0a2d2424433c783068f6aa441c7c9717b 100644 (file)
@@ -6,9 +6,9 @@
 #include "flags.h"
 #include "ffmpeg_capture.h"
 #include "mixer.h"
-#include "mux.h"
+#include "shared/mux.h"
 #include "quittable_sleeper.h"
-#include "timebase.h"
+#include "shared/timebase.h"
 #include "x264_encoder.h"
 
 #include <assert.h>
@@ -31,6 +31,8 @@ BasicStats *global_basic_stats = nullptr;
 QuittableSleeper should_quit;
 MuxMetrics stream_mux_metrics;
 
+namespace {
+
 int write_packet(void *opaque, uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time)
 {
        static bool seen_sync_markers = false;
@@ -47,13 +49,15 @@ int write_packet(void *opaque, uint8_t *buf, int buf_size, AVIODataMarkerType ty
 
        if (type == AVIO_DATA_MARKER_HEADER) {
                stream_mux_header.append((char *)buf, buf_size);
-               httpd->set_header(stream_mux_header);
+               httpd->set_header(HTTPD::MAIN_STREAM, stream_mux_header);
        } else {
-               httpd->add_data((char *)buf, buf_size, type == AVIO_DATA_MARKER_SYNC_POINT, time, AVRational{ AV_TIME_BASE, 1 });
+               httpd->add_data(HTTPD::MAIN_STREAM, (char *)buf, buf_size, type == AVIO_DATA_MARKER_SYNC_POINT, time, AVRational{ AV_TIME_BASE, 1 });
        }
        return buf_size;
 }
 
+}  // namespace
+
 unique_ptr<Mux> create_mux(HTTPD *httpd, AVOutputFormat *oformat, X264Encoder *x264_encoder, AudioEncoder *audio_encoder)
 {
        AVFormatContext *avctx = avformat_alloc_context();
@@ -67,8 +71,14 @@ unique_ptr<Mux> create_mux(HTTPD *httpd, AVOutputFormat *oformat, X264Encoder *x
 
        string video_extradata = x264_encoder->get_global_headers();
 
+       // If audio is disabled (ie., we won't ever see any audio packets),
+       // set nullptr here to also not include the stream in the mux.
+       AVCodecParameters *audio_codecpar =
+               global_flags.enable_audio ? audio_encoder->get_codec_parameters().release() : nullptr;
+
        unique_ptr<Mux> mux;
-       mux.reset(new Mux(avctx, global_flags.width, global_flags.height, Mux::CODEC_H264, video_extradata, audio_encoder->get_codec_parameters().get(), COARSE_TIMEBASE,
+       mux.reset(new Mux(avctx, global_flags.width, global_flags.height, Mux::CODEC_H264, video_extradata, audio_codecpar,
+               get_color_space(global_flags.ycbcr_rec709_coefficients), COARSE_TIMEBASE,
                /*write_callback=*/nullptr, Mux::WRITE_FOREGROUND, { &stream_mux_metrics }));
        stream_mux_metrics.init({{ "destination", "http" }});
        return mux;
@@ -86,7 +96,7 @@ void video_frame_callback(FFmpegCapture *video, X264Encoder *x264_encoder, Audio
                ts.ts.push_back(steady_clock::now());
 
                video_pts = av_rescale_q(video_pts, video_timebase, AVRational{ 1, TIMEBASE });
-               int64_t frame_duration = TIMEBASE * video_format.frame_rate_den / video_format.frame_rate_nom;
+               int64_t frame_duration = int64_t(TIMEBASE) * video_format.frame_rate_den / video_format.frame_rate_nom;
                x264_encoder->add_frame(video_pts, frame_duration, video->get_current_frame_ycbcr_format().luma_coefficients, video_frame.data + video_offset, ts);
                global_basic_stats->update(frame_num++, /*dropped_frames=*/0);
        }
@@ -99,17 +109,18 @@ void video_frame_callback(FFmpegCapture *video, X264Encoder *x264_encoder, Audio
                size_t num_samples = audio_frame.len / (audio_format.bits_per_sample / 8);
                vector<float> float_samples;
                float_samples.resize(num_samples);
+
                if (audio_format.bits_per_sample == 16) {
                        const int16_t *src = (const int16_t *)audio_frame.data;
                        float *dst = &float_samples[0];
                        for (size_t i = 0; i < num_samples; ++i) {
-                               *dst++ = le16toh(*src++) * (1.0f / 32768.0f);
+                               *dst++ = int16_t(le16toh(*src++)) * (1.0f / 32768.0f);
                        }
                } else if (audio_format.bits_per_sample == 32) {
                        const int32_t *src = (const int32_t *)audio_frame.data;
                        float *dst = &float_samples[0];
                        for (size_t i = 0; i < num_samples; ++i) {
-                               *dst++ = le32toh(*src++) * (1.0f / 2147483648.0f);
+                               *dst++ = int32_t(le32toh(*src++)) * (1.0f / 2147483648.0f);
                        }
                } else {
                        assert(false);
@@ -167,7 +178,7 @@ int main(int argc, char *argv[])
        parse_flags(PROGRAM_KAERU, argc, argv);
        if (optind + 1 != argc) {
                usage(PROGRAM_KAERU);
-               exit(1);
+               abort();
        }
        global_flags.num_cards = 1;  // For latency metrics.
 
@@ -199,12 +210,12 @@ int main(int argc, char *argv[])
        FFmpegCapture video(argv[optind], global_flags.width, global_flags.height);
        video.set_pixel_format(FFmpegCapture::PixelFormat_NV12);
        video.set_frame_callback(bind(video_frame_callback, &video, x264_encoder.get(), audio_encoder.get(), _1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11));
-       if (!global_flags.transcode_audio) {
+       if (!global_flags.transcode_audio && global_flags.enable_audio) {
                video.set_audio_callback(bind(audio_frame_callback, http_mux.get(), _1, _2));
        }
        video.configure_card();
        video.start_bm_capture();
-       video.change_rate(2.0);  // Be sure never to really fall behind, but also don't dump huge amounts of stuff onto x264.
+       video.change_rate(10.0);  // Play as fast as possible.
 
        BasicStats basic_stats(/*verbose=*/false, /*use_opengl=*/false);
        global_basic_stats = &basic_stats;
@@ -220,6 +231,7 @@ int main(int argc, char *argv[])
 
        video.stop_dequeue_thread();
        // Stop the x264 encoder before killing the mux it's writing to.
+       global_x264_encoder = nullptr;
        x264_encoder.reset();
        return 0;
 }