]> git.sesse.net Git - nageru/commitdiff
Support SRT inputs.
authorSteinar H. Gunderson <sgunderson@bigfoot.com>
Sun, 10 May 2020 21:57:06 +0000 (23:57 +0200)
committerSteinar H. Gunderson <sgunderson@bigfoot.com>
Mon, 11 May 2020 17:02:31 +0000 (19:02 +0200)
FFmpeg can support SRT in VideoInput, but this goes beyond that;
the number of SRT inputs can be dynamic (they can fill any input
card slot), and they generally behave much more like regular input
cards than video inputs. SRT input is on by default (port 9710)
but can be disabled at runtime.

Due to licensing issues (e.g. Debian does not currently have a
suitable libsrt, as its libsrt links to OpenSSL), it is possible
to build without it.

15 files changed:
meson.build
nageru/defs.h
nageru/ffmpeg_capture.cpp
nageru/ffmpeg_capture.h
nageru/flags.cpp
nageru/flags.h
nageru/glwidget.cpp
nageru/main.cpp
nageru/mixer.cpp
nageru/mixer.h
nageru/scene.cpp
nageru/theme.cpp
nageru/theme.h
shared/ffmpeg_raii.cpp
shared/ffmpeg_raii.h

index 6a3021a89d236b167676d0f41904e56f3edb636f..1bcd96953dc9fcb71a8c14e864c4d8ad576dfaf4 100644 (file)
@@ -25,6 +25,7 @@ qcustomplotdep = cxx.find_library('qcustomplot')
 qt5deps = dependency('qt5', modules: ['Core', 'Gui', 'Widgets', 'OpenGLExtensions', 'OpenGL', 'Network'])
 sdl2_imagedep = dependency('SDL2_image', required: false)
 sdl2dep = dependency('sdl2', required: false)
+srtdep = dependency('srt', required: false)
 sqlite3dep = dependency('sqlite3')
 threaddep = dependency('threads')
 vadrmdep = dependency('libva-drm')
@@ -56,6 +57,12 @@ if get_option('cef_dir') != ''
        add_project_arguments('-DHAVE_CEF=1', language: 'cpp')
 endif
 
+if srtdep.found()
+       # NOTE: Seemingly there's no way to figure out if we have OpenSSL libsrt
+       # or gnutls libsrt, so we cannot check license compatibility here.
+       add_project_arguments('-DHAVE_SRT=1', language: 'cpp')
+endif
+
 top_include = include_directories('.')
 
 subdir('shared')
@@ -66,7 +73,7 @@ nageru_srcs = []
 nageru_deps = [shareddep, qt5deps, libjpegdep, movitdep, protobufdep,
        vax11dep, vadrmdep, x11dep, libavformatdep, libswresampledep, libavcodecdep, libavutildep,
        libswscaledep, libusbdep, luajitdep, dldep, x264dep, alsadep, zitaresamplerdep,
-       qcustomplotdep, threaddep, eigendep]
+       qcustomplotdep, threaddep, eigendep, srtdep]
 nageru_include_dirs = [include_directories('nageru')]
 nageru_link_with = []
 nageru_build_rpath = ''
index 5113fda63a9acddcda45e816aeb287317e90c9b2..eb664d0ab113dc13f11982bb6446ff51ff0f26ec 100644 (file)
@@ -20,6 +20,7 @@
 #define LOCAL_DUMP_SUFFIX ".nut"
 #define DEFAULT_STREAM_MUX_NAME "nut"  // Only for HTTP. Local dump guesses from LOCAL_DUMP_SUFFIX.
 #define DEFAULT_HTTPD_PORT 9095
+#define DEFAULT_SRT_PORT 9710
 
 #include "shared/shared_defs.h"
 
index 2d1b3385b4ac0d64c137bba4c3590819f06f05f6..4bc277af61fc05405c8c264fbecc134de2ce59ef 100644 (file)
@@ -39,6 +39,10 @@ extern "C" {
 #include "ref_counted_frame.h"
 #include "shared/timebase.h"
 
+#ifdef HAVE_SRT
+#include <srt/srt.h>
+#endif
+
 #define FRAME_SIZE (8 << 20)  // 8 MB.
 
 using namespace std;
@@ -257,6 +261,25 @@ FFmpegCapture::FFmpegCapture(const string &filename, unsigned width, unsigned he
        avformat_network_init();  // In case someone wants this.
 }
 
+#ifdef HAVE_SRT
+FFmpegCapture::FFmpegCapture(int srt_sock, const string &stream_id)
+       : srt_sock(srt_sock),
+         width(global_flags.width),
+         height(global_flags.height),
+         pixel_format(bmusb::PixelFormat_8BitYCbCrPlanar),
+         video_timebase{1, 1}
+{
+       if (stream_id.empty()) {
+               description = "SRT stream";
+       } else {
+               description = stream_id;
+       }
+       play_as_fast_as_possible = true;
+       play_once = true;
+       last_frame = steady_clock::now();
+}
+#endif
+
 FFmpegCapture::~FFmpegCapture()
 {
        if (has_dequeue_callbacks) {
@@ -330,22 +353,38 @@ void FFmpegCapture::producer_thread_func()
                        filename_copy = filename;
                }
 
-               string pathname = search_for_file(filename_copy);
+               string pathname;
+               if (srt_sock == -1) {
+                       pathname = search_for_file(filename_copy);
+               } else {
+                       pathname = description;
+               }
                if (pathname.empty()) {
-                       fprintf(stderr, "%s not found, sleeping one second and trying again...\n", filename_copy.c_str());
                        send_disconnected_frame();
+                       if (play_once) {
+                               break;
+                       }
                        producer_thread_should_quit.sleep_for(seconds(1));
+                       fprintf(stderr, "%s not found, sleeping one second and trying again...\n", filename_copy.c_str());
                        continue;
                }
                should_interrupt = false;
                if (!play_video(pathname)) {
                        // Error.
-                       fprintf(stderr, "Error when playing %s, sleeping one second and trying again...\n", pathname.c_str());
                        send_disconnected_frame();
+                       if (play_once) {
+                               break;
+                       }
+                       fprintf(stderr, "Error when playing %s, sleeping one second and trying again...\n", pathname.c_str());
                        producer_thread_should_quit.sleep_for(seconds(1));
                        continue;
                }
 
+               if (play_once) {
+                       send_disconnected_frame();
+                       break;
+               }
+
                // Probably just EOF, will exit the loop above on next test.
        }
 
@@ -391,6 +430,13 @@ void FFmpegCapture::send_disconnected_frame()
                        FrameAllocator::Frame(), /*audio_offset=*/0, AudioFormat());
                last_frame_was_connected = false;
        }
+
+       if (play_once) {
+               disconnected = true;
+               if (card_disconnected_callback != nullptr) {
+                       card_disconnected_callback();
+               }
+       }
 }
 
 bool FFmpegCapture::play_video(const string &pathname)
@@ -407,7 +453,23 @@ bool FFmpegCapture::play_video(const string &pathname)
                last_modified = buf.st_mtim;
        }
 
-       auto format_ctx = avformat_open_input_unique(pathname.c_str(), nullptr, nullptr, AVIOInterruptCB{ &FFmpegCapture::interrupt_cb_thunk, this });
+       AVFormatContextWithCloser format_ctx;
+       if (srt_sock == -1) {
+               // Regular file.
+               format_ctx = avformat_open_input_unique(pathname.c_str(), /*fmt=*/nullptr,
+                       /*options=*/nullptr,
+                       AVIOInterruptCB{ &FFmpegCapture::interrupt_cb_thunk, this });
+       } else {
+#ifdef HAVE_SRT
+               // SRT socket, already opened.
+               AVInputFormat *mpegts_fmt = av_find_input_format("mpegts");
+               format_ctx = avformat_open_input_unique(&FFmpegCapture::read_srt_thunk, this,
+                       mpegts_fmt, /*options=*/nullptr,
+                       AVIOInterruptCB{ &FFmpegCapture::interrupt_cb_thunk, this });
+#else
+               assert(false);
+#endif
+       }
        if (format_ctx == nullptr) {
                fprintf(stderr, "%s: Error opening file\n", pathname.c_str());
                return false;
@@ -948,12 +1010,25 @@ UniqueFrame FFmpegCapture::make_video_frame(const AVFrame *frame, const string &
        return video_frame;
 }
 
-int FFmpegCapture::interrupt_cb_thunk(void *unique)
+int FFmpegCapture::interrupt_cb_thunk(void *opaque)
 {
-       return reinterpret_cast<FFmpegCapture *>(unique)->interrupt_cb();
+       return reinterpret_cast<FFmpegCapture *>(opaque)->interrupt_cb();
 }
 
 int FFmpegCapture::interrupt_cb()
 {
        return should_interrupt.load();
 }
+
+#ifdef HAVE_SRT
+int FFmpegCapture::read_srt_thunk(void *opaque, uint8_t *buf, int buf_size)
+{
+       return reinterpret_cast<FFmpegCapture *>(opaque)->read_srt(buf, buf_size);
+}
+
+int FFmpegCapture::read_srt(uint8_t *buf, int buf_size)
+{
+       SRT_MSGCTRL mc = srt_msgctrl_default;
+       return srt_recvmsg2(srt_sock, reinterpret_cast<char *>(buf), buf_size, &mc);
+}
+#endif
index 843c83c254d4b9b5ac8cf46532c236cc269298c7..685191a1f28bf9f463773fccb9abe3858c46b86d 100644 (file)
@@ -57,6 +57,10 @@ class FFmpegCapture : public bmusb::CaptureInterface
 {
 public:
        FFmpegCapture(const std::string &filename, unsigned width, unsigned height);
+#ifdef HAVE_SRT
+       // Takes ownership of the SRT client socket.
+       FFmpegCapture(int srt_sock, const std::string &stream_id);
+#endif
        ~FFmpegCapture();
 
        void set_card_index(int card_index)
@@ -195,6 +199,11 @@ public:
                has_dequeue_callbacks = true;
        }
 
+       void set_card_disconnected_callback(bmusb::card_disconnected_callback_t callback)
+       {
+               card_disconnected_callback = callback;
+       }
+
        std::string get_description() const override
        {
                return description;
@@ -203,7 +212,7 @@ public:
        void configure_card() override;
        void start_bm_capture() override;
        void stop_dequeue_thread() override;
-       bool get_disconnected() const override { return false; }  // We never unplug.
+       bool get_disconnected() const override { return disconnected; }  // Only if play_once == true.
 
        std::map<uint32_t, bmusb::VideoMode> get_available_video_modes() const override;
        void set_video_mode(uint32_t video_mode_id) override {}  // Ignore.
@@ -249,11 +258,17 @@ private:
        bmusb::VideoFormat construct_video_format(const AVFrame *frame, AVRational video_timebase);
        UniqueFrame make_video_frame(const AVFrame *frame, const std::string &pathname, bool *error);
 
-       static int interrupt_cb_thunk(void *unique);
+       static int interrupt_cb_thunk(void *opaque);
        int interrupt_cb();
 
+#ifdef HAVE_SRT
+       static int read_srt_thunk(void *opaque, uint8_t *buf, int buf_size);
+       int read_srt(uint8_t *buf, int buf_size);
+#endif
+
        mutable std::mutex filename_mu;
        std::string description, filename;
+       int srt_sock = -1;
        uint16_t timecode = 0;
        unsigned width, height;
        bmusb::PixelFormat pixel_format = bmusb::PixelFormat_8BitBGRA;
@@ -269,6 +284,10 @@ private:
        std::function<void()> dequeue_init_callback = nullptr;
        std::function<void()> dequeue_cleanup_callback = nullptr;
 
+       bmusb::card_disconnected_callback_t card_disconnected_callback = nullptr;
+       bool play_once = false;  // End thread after playing. Only for SRT, since the socket is ephemeral.
+       std::atomic<bool> disconnected{false};
+
        bmusb::FrameAllocator *video_frame_allocator = nullptr;
        bmusb::FrameAllocator *audio_frame_allocator = nullptr;
        std::unique_ptr<bmusb::FrameAllocator> owned_video_frame_allocator;
index 5e044fd502e88c8e78546f9ad75d45fda008d48f..9c58ad9c0d7bf4aa0d0bfebe46e81a3b8b29b191 100644 (file)
@@ -37,6 +37,8 @@ enum LongOption {
        OPTION_HTTP_AUDIO_CODEC,
        OPTION_HTTP_AUDIO_BITRATE,
        OPTION_HTTP_PORT,
+       OPTION_SRT_PORT,
+       OPTION_NO_SRT,
        OPTION_NO_TRANSCODE_AUDIO,
        OPTION_DISABLE_AUDIO,
        OPTION_FLAT_AUDIO,
@@ -170,6 +172,9 @@ void usage(Program program)
                DEFAULT_AUDIO_OUTPUT_BIT_RATE / 1000);
        fprintf(stderr, "      --http-port=PORT            which port to use for the built-in HTTP server\n");
        fprintf(stderr, "                                  (default is %d)\n", DEFAULT_HTTPD_PORT);
+       fprintf(stderr, "      --srt-port=PORT             which port to use for receiving SRT streams\n");
+       fprintf(stderr, "                                  (default is %d)\n", DEFAULT_SRT_PORT);
+       fprintf(stderr, "      --no-srt                    disable receiving SRT streams\n");
        if (program == PROGRAM_KAERU) {
                fprintf(stderr, "      --no-transcode-audio        copy encoded audio raw from the source stream\n");
                fprintf(stderr, "                                    (requires --http-audio-codec= to be set)\n");
@@ -256,6 +261,8 @@ void parse_flags(Program program, int argc, char * const argv[])
                { "http-audio-codec", required_argument, 0, OPTION_HTTP_AUDIO_CODEC },
                { "http-audio-bitrate", required_argument, 0, OPTION_HTTP_AUDIO_BITRATE },
                { "http-port", required_argument, 0, OPTION_HTTP_PORT },
+               { "srt-port", required_argument, 0, OPTION_SRT_PORT },
+               { "no-srt", no_argument, 0, OPTION_NO_SRT },
                { "no-transcode-audio", no_argument, 0, OPTION_NO_TRANSCODE_AUDIO },
                { "disable-audio", no_argument, 0, OPTION_DISABLE_AUDIO },
                { "flat-audio", no_argument, 0, OPTION_FLAT_AUDIO },
@@ -373,6 +380,12 @@ void parse_flags(Program program, int argc, char * const argv[])
                case OPTION_HTTP_PORT:
                        global_flags.http_port = atoi(optarg);
                        break;
+               case OPTION_SRT_PORT:
+                       global_flags.srt_port = atoi(optarg);
+                       break;
+               case OPTION_NO_SRT:
+                       global_flags.srt_port = -1;
+                       break;
                case OPTION_NO_TRANSCODE_AUDIO:
                        global_flags.transcode_audio = false;
                        break;
index 5758d884c01620765b1d0df770246d18ef1134f7..c1fcdff01f4b60731162c3a7b7c6452f09b6aa4e 100644 (file)
@@ -57,6 +57,7 @@ struct Flags {
        double output_slop_frames = 0.5;
        int max_input_queue_frames = 6;
        int http_port = DEFAULT_HTTPD_PORT;
+       int srt_port = DEFAULT_SRT_PORT;  // -1 for none.
        bool display_timecode_in_stream = false;
        bool display_timecode_on_stdout = false;
        bool enable_quick_cut_keys = false;
index 9aae03b05cbb9df60e21134d1e1070dfa9f51d06..62aeebc9cc14a0a7de7ca47a2be03076d1d79537 100644 (file)
@@ -231,7 +231,7 @@ void GLWidget::show_preview_context_menu(unsigned signal_num, const QPoint &pos)
        unsigned current_card = global_mixer->map_signal_to_card(signal_num);
        bool is_ffmpeg = global_mixer->card_is_ffmpeg(current_card);
 
-       if (!is_ffmpeg) {  // FFmpeg inputs are not connected to any card; they're locked to a given input and have a given Y'CbCr interpretatio and have a given Y'CbCr interpretationn.
+       if (!is_ffmpeg) {  // FFmpeg inputs are not connected to any card; they're locked to a given input and have a given Y'CbCr interpretatio and have a given Y'CbCr interpretation.
                for (unsigned card_index = 0; card_index < num_cards; ++card_index) {
                        QString description(QString::fromStdString(global_mixer->get_card_description(card_index)));
                        QAction *action = new QAction(description, &card_group);
@@ -292,10 +292,8 @@ void GLWidget::show_preview_context_menu(unsigned signal_num, const QPoint &pos)
        if (is_ffmpeg) {
                // Add a menu to change the source URL if we're an FFmpeg card.
                // (The theme can still override.)
-               if (global_mixer->card_is_ffmpeg(current_card)) {
-                       change_url_action = new QAction("Change source filename/URL…", &menu);
-                       menu.addAction(change_url_action);
-               }
+               change_url_action = new QAction("Change source filename/URL…", &menu);
+               menu.addAction(change_url_action);
        } else {
                // Add a submenu for selecting video input, with an action for each input.
                std::map<uint32_t, string> video_inputs = global_mixer->get_available_video_inputs(current_card);
index 351662039d2cd8e1790129f5d2d5e0282de76982..212c0691211a58d1e30dd816eed595dc3409c466 100644 (file)
@@ -20,6 +20,10 @@ extern "C" {
 #include <cef_version.h>
 #endif
 
+#ifdef HAVE_SRT
+#include <srt/srt.h>
+#endif
+
 #include "basic_stats.h"
 #ifdef HAVE_CEF
 #include "nageru_cef_app.h"
@@ -74,6 +78,12 @@ int main(int argc, char *argv[])
        av_register_all();
 #endif
 
+#ifdef HAVE_SRT
+       if (global_flags.srt_port >= 0) {
+               srt_startup();
+       }
+#endif
+
        QCoreApplication::setAttribute(Qt::AA_ShareOpenGLContexts, true);
 
        QSurfaceFormat fmt;
@@ -120,5 +130,10 @@ int main(int argc, char *argv[])
 
        int rc = app.exec();
        delete global_mixer;
+#ifdef HAVE_SRT
+       if (global_flags.srt_port >= 0) {
+               srt_cleanup();
+       }
+#endif
        return rc;
 }
index 76ab1bab21c4c47d136160f96e93fbe40b21b3d1..915ae0eb7b9bb988cd5de06fa50ed4f951c5726d 100644 (file)
 #include <google/protobuf/util/json_util.h>
 #include "json.pb.h"
 
+#ifdef HAVE_SRT
+// Must come after CEF, since it includes <syslog.h>, which has #defines
+// that conflict with CEF logging constants.
+#include <srt/srt.h>
+#endif
+
 class IDeckLink;
 class QOpenGLContext;
 
@@ -468,6 +474,12 @@ Mixer::Mixer(const QSurfaceFormat &format, unsigned num_cards)
        BMUSBCapture::set_card_connected_callback(bind(&Mixer::bm_hotplug_add, this, _1));
        BMUSBCapture::start_bm_thread();
 
+#ifdef HAVE_SRT
+       if (global_flags.srt_port >= 0) {
+               start_srt();
+       }
+#endif
+
        for (unsigned card_index = 0; card_index < num_cards + num_video_inputs + num_html_inputs; ++card_index) {
                cards[card_index].queue_length_policy.reset(card_index);
        }
@@ -537,7 +549,7 @@ Mixer::~Mixer()
        video_encoder.reset(nullptr);
 }
 
-void Mixer::configure_card(unsigned card_index, CaptureInterface *capture, CardType card_type, DeckLinkOutput *output)
+void Mixer::configure_card(unsigned card_index, CaptureInterface *capture, CardType card_type, DeckLinkOutput *output, bool override_card_as_live)
 {
        printf("Configuring card %d...\n", card_index);
 
@@ -589,8 +601,12 @@ void Mixer::configure_card(unsigned card_index, CaptureInterface *capture, CardT
 
        // NOTE: start_bm_capture() happens in thread_func().
 
+       if (override_card_as_live) {
+               assert(card_type == CardType::FFMPEG_INPUT);
+       }
+
        DeviceSpec device;
-       if (card_type == CardType::FFMPEG_INPUT) {
+       if (card_type == CardType::FFMPEG_INPUT && !override_card_as_live) {
                device = DeviceSpec{InputSourceType::FFMPEG_VIDEO_INPUT, card_index - num_cards};
        } else {
                device = DeviceSpec{InputSourceType::CAPTURE_CARD, card_index};
@@ -1096,13 +1112,13 @@ void Mixer::thread_func()
                        assert(master_card_index < num_cards + num_video_inputs);
                }
 
+               handle_hotplugged_cards();
+
                vector<int32_t> raw_audio[MAX_VIDEO_CARDS];  // For MJPEG encoding.
                OutputFrameInfo output_frame_info = get_one_frame_from_each_card(master_card_index, master_card_is_output, new_frames, has_new_frame, raw_audio);
                schedule_audio_resampling_tasks(output_frame_info.dropped_frames, output_frame_info.num_samples, output_frame_info.frame_duration, output_frame_info.is_preroll, output_frame_info.frame_timestamp);
                stats_dropped_frames += output_frame_info.dropped_frames;
 
-               handle_hotplugged_cards();
-
                for (unsigned card_index = 0; card_index < num_cards + num_video_inputs + num_html_inputs; ++card_index) {
                        DeviceSpec device = card_index_to_device(card_index, num_cards);
                        if (card_index == master_card_index || !has_new_frame[card_index]) {
@@ -1374,9 +1390,15 @@ void Mixer::handle_hotplugged_cards()
 
        // Check for cards that have been connected since last frame.
        vector<libusb_device *> hotplugged_cards_copy;
+#ifdef HAVE_SRT
+       vector<int> hotplugged_srt_cards_copy;
+#endif
        {
                lock_guard<mutex> lock(hotplug_mutex);
                swap(hotplugged_cards, hotplugged_cards_copy);
+#ifdef HAVE_SRT
+               swap(hotplugged_srt_cards, hotplugged_srt_cards_copy);
+#endif
        }
        for (libusb_device *new_dev : hotplugged_cards_copy) {
                // Look for a fake capture card where we can stick this in.
@@ -1402,6 +1424,49 @@ void Mixer::handle_hotplugged_cards()
                        capture->start_bm_capture();
                }
        }
+
+#ifdef HAVE_SRT
+       // Same, for SRT inputs.
+       // TODO: On disconnect and reconnect, we might want to use the stream ID
+       // to find the slot it used to go into?
+       for (SRTSOCKET sock : hotplugged_srt_cards_copy) {
+               // Look for a fake capture card where we can stick this in.
+               int free_card_index = -1;
+               for (unsigned card_index = 0; card_index < num_cards; ++card_index) {
+                       if (cards[card_index].is_fake_capture) {
+                               free_card_index = card_index;
+                               break;
+                       }
+               }
+
+               char name[256];
+               int namelen = sizeof(name);
+               srt_getsockopt(sock, /*ignored=*/0, SRTO_STREAMID, name, &namelen);
+               string stream_id(name, namelen);
+
+               if (free_card_index == -1) {
+                       if (stream_id.empty()) {
+                               stream_id = "no name";
+                       }
+                       fprintf(stderr, "New SRT stream connected (%s), but no free slots -- ignoring.\n", stream_id.c_str());
+                       srt_close(sock);
+               } else {
+                       // FFmpegCapture takes ownership.
+                       if (stream_id.empty()) {
+                               fprintf(stderr, "New unnamed SRT stream connected, choosing slot %d.\n", free_card_index);
+                       } else {
+                               fprintf(stderr, "New SRT stream connected (%s), choosing slot %d.\n", stream_id.c_str(), free_card_index);
+                       }
+                       CaptureCard *card = &cards[free_card_index];
+                       FFmpegCapture *capture = new FFmpegCapture(sock, stream_id);
+                       capture->set_card_index(free_card_index);
+                       configure_card(free_card_index, capture, CardType::FFMPEG_INPUT, /*output=*/nullptr, /*override_card_as_live=*/true);
+                       card->queue_length_policy.reset(free_card_index);
+                       capture->set_card_disconnected_callback(bind(&Mixer::bm_hotplug_remove, this, free_card_index));
+                       capture->start_bm_capture();
+               }
+       }
+#endif
 }
 
 
@@ -1641,6 +1706,23 @@ void Mixer::quit()
        audio_task_queue_changed.notify_one();
        mixer_thread.join();
        audio_thread.join();
+#ifdef HAVE_SRT
+       if (global_flags.srt_port >= 0) {
+               // There's seemingly no other reasonable way to wake up the thread
+               // (libsrt's epoll equivalent is busy-waiting).
+               int sock = srt_socket(AF_INET6, 0, 0);
+               if (sock != -1) {
+                       sockaddr_in6 addr;
+                       memset(&addr, 0, sizeof(addr));
+                       addr.sin6_family = AF_INET6;
+                       addr.sin6_addr = IN6ADDR_LOOPBACK_INIT;
+                       addr.sin6_port = htons(global_flags.srt_port);
+                       srt_connect(sock, (sockaddr *)&addr, sizeof(addr));
+                       srt_close(sock);
+               }
+               srt_thread.join();
+       }
+#endif
 }
 
 void Mixer::transition_clicked(int transition_num)
@@ -1828,4 +1910,43 @@ void Mixer::OutputChannel::set_color_updated_callback(Mixer::color_updated_callb
        color_updated_callback = callback;
 }
 
+#ifdef HAVE_SRT
+void Mixer::start_srt()
+{
+       SRTSOCKET sock = srt_socket(AF_INET6, 0, 0);
+       sockaddr_in6 addr;
+       memset(&addr, 0, sizeof(addr));
+       addr.sin6_family = AF_INET6;
+       addr.sin6_port = htons(global_flags.srt_port);
+
+       int err = srt_bind(sock, (sockaddr *)&addr, sizeof(addr));
+       if (err != 0) {
+               fprintf(stderr, "srt_bind: %s\n", srt_getlasterror_str());
+               abort();
+       }
+       err = srt_listen(sock, MAX_VIDEO_CARDS);
+       if (err != 0) {
+               fprintf(stderr, "srt_listen: %s\n", srt_getlasterror_str());
+               abort();
+       }
+
+       srt_thread = thread([this, sock] {
+               sockaddr_in6 addr;
+               for ( ;; ) {
+                       int sa_len = sizeof(addr);
+                       int clientsock = srt_accept(sock, (sockaddr *)&addr, &sa_len);
+                       if (should_quit) {
+                               if (clientsock != -1) {
+                                       srt_close(clientsock);
+                               }
+                               break;
+                       }
+                       lock_guard<mutex> lock(hotplug_mutex);
+                       hotplugged_srt_cards.push_back(clientsock);
+               }
+               srt_close(sock);
+       });
+}
+#endif
+
 mutex RefCountedGLsync::fence_lock;
index 61db4a5c30857b06728d8dee0810a6caf3a06df1..ff8a197bccdd61c6af9de3598f6239e6645553d5 100644 (file)
@@ -331,6 +331,12 @@ public:
 
        bool card_is_ffmpeg(unsigned card_index) const {
                assert(card_index < num_cards + num_video_inputs);
+               if (card_index < num_cards) {
+                       // SRT inputs are more like regular inputs than FFmpeg inputs,
+                       // so show them as such. (This allows the user to right-click
+                       // to select a different input.)
+                       return false;
+               }
                return cards[card_index].type == CardType::FFMPEG_INPUT;
        }
 
@@ -439,7 +445,7 @@ private:
                FFMPEG_INPUT,
                CEF_INPUT,
        };
-       void configure_card(unsigned card_index, bmusb::CaptureInterface *capture, CardType card_type, DeckLinkOutput *output);
+       void configure_card(unsigned card_index, bmusb::CaptureInterface *capture, CardType card_type, DeckLinkOutput *output, bool override_ffmpeg_to_live = false);
        void set_output_card_internal(int card_index);  // Should only be called from the mixer thread.
        void bm_frame(unsigned card_index, uint16_t timecode,
                bmusb::FrameAllocator::Frame video_frame, size_t video_offset, bmusb::VideoFormat video_format,
@@ -454,6 +460,9 @@ private:
        void render_one_frame(int64_t duration);
        void audio_thread_func();
        void release_display_frame(DisplayFrame *frame);
+#ifdef HAVE_SRT
+       void start_srt();
+#endif
        double pts() { return double(pts_int) / TIMEBASE; }
        void trim_queue(CaptureCard *card, size_t safe_queue_length);
        std::pair<std::string, std::string> get_channels_json();
@@ -595,6 +604,9 @@ private:
        // Protected by its own mutex.
        std::mutex hotplug_mutex;
        std::vector<libusb_device *> hotplugged_cards;
+#ifdef HAVE_SRT
+       std::vector<int> hotplugged_srt_cards;
+#endif
 
        class OutputChannel {
        public:
@@ -627,6 +639,9 @@ private:
 
        std::thread mixer_thread;
        std::thread audio_thread;
+#ifdef HAVE_SRT
+       std::thread srt_thread;
+#endif
        std::atomic<bool> should_quit{false};
        std::atomic<bool> should_cut{false};
 
index a44c9a856316d2cf1f3296365b69862498a4e188..8506c9b20b68a5290ea0a8a258afe2ec23f94118 100644 (file)
@@ -209,6 +209,11 @@ int Scene::add_input(lua_State* L)
                if (lua_isnumber(L, 2)) {
                        block->alternatives.emplace_back(new EffectBlueprint(LIVE_INPUT_YCBCR));
                        block->alternatives.emplace_back(new EffectBlueprint(LIVE_INPUT_YCBCR_WITH_DEINTERLACE));
+#ifdef HAVE_SRT
+                       if (global_flags.srt_port >= 0) {
+                               block->alternatives.emplace_back(new EffectBlueprint(LIVE_INPUT_YCBCR_PLANAR));
+                       }
+#endif
 #ifdef HAVE_CEF
                } else if (luaL_testudata(L, 2, "HTMLInput")) {
                        block->alternatives.emplace_back(new EffectBlueprint(LIVE_INPUT_BGRA));
@@ -521,14 +526,22 @@ Scene::get_chain(Theme *theme, lua_State *L, unsigned num, const InputState &inp
        InputStateInfo info(input_state);
        for (Block *block : blocks) {
                if (block->is_input && block->signal_type_to_connect == Block::CONNECT_SIGNAL) {
-                       EffectType chosen_type = current_type(block);
-                       assert(chosen_type == LIVE_INPUT_YCBCR || chosen_type == LIVE_INPUT_YCBCR_WITH_DEINTERLACE);
                        int card_index = theme->map_signal_to_card(block->signal_to_connect);
                        if (info.last_interlaced[card_index]) {
+                               assert(info.last_pixel_format[card_index] == bmusb::PixelFormat_8BitYCbCr ||
+                                      info.last_pixel_format[card_index] == bmusb::PixelFormat_10BitYCbCr);
                                block->currently_chosen_alternative = find_index_of(block, LIVE_INPUT_YCBCR_WITH_DEINTERLACE);
+                       } else if (info.last_pixel_format[card_index] == bmusb::PixelFormat_8BitYCbCrPlanar) {
+                               block->currently_chosen_alternative = find_index_of(block, LIVE_INPUT_YCBCR_PLANAR);
+                       } else if (info.last_pixel_format[card_index] == bmusb::PixelFormat_8BitBGRA) {
+                               block->currently_chosen_alternative = find_index_of(block, LIVE_INPUT_BGRA);
                        } else {
                                block->currently_chosen_alternative = find_index_of(block, LIVE_INPUT_YCBCR);
                        }
+                       if (block->currently_chosen_alternative == -1) {
+                               fprintf(stderr, "ERROR: Input connected to a video card pixel format that it was not ready for.\n");
+                               abort();
+                       }
                }
        }
 
index d04a580f2cd7bbaee8668792443931dd7f4766d4..6c75e114f97ac94899eef4e0a2f9b1c7e323e015 100644 (file)
@@ -102,6 +102,7 @@ InputStateInfo::InputStateInfo(const InputState &input_state)
                last_is_connected[signal_num] = userdata->last_is_connected;
                last_frame_rate_nom[signal_num] = userdata->last_frame_rate_nom;
                last_frame_rate_den[signal_num] = userdata->last_frame_rate_den;
+               last_pixel_format[signal_num] = userdata->pixel_format;
                has_last_subtitle[signal_num] = userdata->has_last_subtitle;
                last_subtitle[signal_num] = userdata->last_subtitle;
        }
index fe713ef1f6da48df468d4923c03a5451e23d1f27..507dc01828cafb3fe2524f8187b9f56a9f279e5e 100644 (file)
@@ -78,6 +78,7 @@ struct InputStateInfo {
        unsigned last_width[MAX_VIDEO_CARDS], last_height[MAX_VIDEO_CARDS];
        bool last_interlaced[MAX_VIDEO_CARDS], last_has_signal[MAX_VIDEO_CARDS], last_is_connected[MAX_VIDEO_CARDS];
        unsigned last_frame_rate_nom[MAX_VIDEO_CARDS], last_frame_rate_den[MAX_VIDEO_CARDS];
+       bmusb::PixelFormat last_pixel_format[MAX_VIDEO_CARDS];
        bool has_last_subtitle[MAX_VIDEO_CARDS];
        std::string last_subtitle[MAX_VIDEO_CARDS];
 };
index 746e03d19122da0cd11ad70735670b9b9d318af5..a1028f844b9cbe3dc8299a7fdbc3d518590d6309 100644 (file)
@@ -37,6 +37,23 @@ AVFormatContextWithCloser avformat_open_input_unique(
        return AVFormatContextWithCloser(format_ctx);
 }
 
+AVFormatContextWithCloser avformat_open_input_unique(
+       int (*read_packet)(void *opaque, uint8_t *buf, int buf_size),
+       void *opaque, AVInputFormat *fmt, AVDictionary **options,
+       const AVIOInterruptCB &interrupt_cb)
+{
+       AVFormatContext *format_ctx = avformat_alloc_context();
+       format_ctx->interrupt_callback = interrupt_cb;
+       constexpr size_t buf_size = 4096;
+       unsigned char *buf = (unsigned char *)av_malloc(buf_size);
+       format_ctx->pb = avio_alloc_context(buf, buf_size, /*write_flag=*/false, opaque,
+               read_packet, /*write_packet=*/nullptr, /*seek=*/nullptr);
+       if (avformat_open_input(&format_ctx, "", fmt, options) != 0) {
+               format_ctx = nullptr;
+       }
+       return AVFormatContextWithCloser(format_ctx);
+}
+
 // AVCodecContext
 
 void avcodec_free_context_unique::operator() (AVCodecContext *codec_ctx) const
index 33d233480528dd8414e92b3fd32b04c1281d4ffa..75675edfbebc470a262e42017ad8161c18dfcaed 100644 (file)
@@ -38,6 +38,11 @@ AVFormatContextWithCloser avformat_open_input_unique(
        AVDictionary **options,
        const AVIOInterruptCB &interrupt_cb);
 
+AVFormatContextWithCloser avformat_open_input_unique(
+       int (*read_packet)(void *opaque, uint8_t *buf, int buf_size),
+       void *opaque, AVInputFormat *fmt, AVDictionary **options,
+       const AVIOInterruptCB &interrupt_cb);
+
 
 // AVCodecContext
 struct avcodec_free_context_unique {