]> git.sesse.net Git - nageru/blobdiff - nageru/mixer.cpp
Fix an integer overflow with very long master frames.
[nageru] / nageru / mixer.cpp
index 464b92008b946729c4bf4f3756e1c54ac26662f6..212041bd1b0f5c69ecab7e6116e5e1844015ad8e 100644 (file)
 #include <google/protobuf/util/json_util.h>
 #include "json.pb.h"
 
+#ifdef HAVE_SRT
+// Must come after CEF, since it includes <syslog.h>, which has #defines
+// that conflict with CEF logging constants.
+#include <srt/srt.h>
+#endif
+
 class IDeckLink;
 class QOpenGLContext;
 
@@ -110,11 +116,17 @@ void ensure_texture_resolution(PBOFrameAllocator::Userdata *userdata, unsigned f
                assert(false);
        }
 
-       if (first ||
-           width != userdata->last_width[field] ||
-           height != userdata->last_height[field] ||
-           cbcr_width != userdata->last_cbcr_width[field] ||
-           cbcr_height != userdata->last_cbcr_height[field]) {
+       const bool recreate_main_texture =
+               first ||
+               width != userdata->last_width[field] ||
+               height != userdata->last_height[field] ||
+               cbcr_width != userdata->last_cbcr_width[field] ||
+               cbcr_height != userdata->last_cbcr_height[field];
+       const bool recreate_v210_texture =
+               global_flags.ten_bit_input &&
+               (first || v210_width != userdata->last_v210_width[field] || height != userdata->last_height[field]);
+
+       if (recreate_main_texture) {
                // We changed resolution since last use of this texture, so we need to create
                // a new object. Note that this each card has its own PBOFrameAllocator,
                // we don't need to worry about these flip-flopping between resolutions.
@@ -166,14 +178,14 @@ void ensure_texture_resolution(PBOFrameAllocator::Userdata *userdata, unsigned f
                userdata->last_cbcr_width[field] = cbcr_width;
                userdata->last_cbcr_height[field] = cbcr_height;
        }
-       if (global_flags.ten_bit_input &&
-           (first || v210_width != userdata->last_v210_width[field])) {
+       if (recreate_v210_texture) {
                // Same as above; we need to recreate the texture.
                glBindTexture(GL_TEXTURE_2D, userdata->tex_v210[field]);
                check_error();
                glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB10_A2, v210_width, height, 0, GL_RGBA, GL_UNSIGNED_INT_2_10_10_10_REV, nullptr);
                check_error();
                userdata->last_v210_width[field] = v210_width;
+               userdata->last_height[field] = height;
        }
 }
 
@@ -462,6 +474,12 @@ Mixer::Mixer(const QSurfaceFormat &format, unsigned num_cards)
        BMUSBCapture::set_card_connected_callback(bind(&Mixer::bm_hotplug_add, this, _1));
        BMUSBCapture::start_bm_thread();
 
+#ifdef HAVE_SRT
+       if (global_flags.srt_port >= 0) {
+               start_srt();
+       }
+#endif
+
        for (unsigned card_index = 0; card_index < num_cards + num_video_inputs + num_html_inputs; ++card_index) {
                cards[card_index].queue_length_policy.reset(card_index);
        }
@@ -531,7 +549,7 @@ Mixer::~Mixer()
        video_encoder.reset(nullptr);
 }
 
-void Mixer::configure_card(unsigned card_index, CaptureInterface *capture, CardType card_type, DeckLinkOutput *output)
+void Mixer::configure_card(unsigned card_index, CaptureInterface *capture, CardType card_type, DeckLinkOutput *output, bool override_card_as_live)
 {
        printf("Configuring card %d...\n", card_index);
 
@@ -562,6 +580,15 @@ void Mixer::configure_card(unsigned card_index, CaptureInterface *capture, CardT
        card->capture->set_frame_callback(bind(&Mixer::bm_frame, this, card_index, _1, _2, _3, _4, _5, _6, _7));
        if (card->frame_allocator == nullptr) {
                card->frame_allocator.reset(new PBOFrameAllocator(pixel_format, 8 << 20, global_flags.width, global_flags.height, card_index, mjpeg_encoder.get()));  // 8 MB.
+       } else {
+               // The format could have changed, but we cannot reset the allocator
+               // and create a new one from scratch, since there may be allocated
+               // frames from it that expect to call release_frame() on it.
+               // Instead, ask the allocator to create new frames for us and discard
+               // any old ones as they come back. This takes the mutex while
+               // allocating, but nothing should really be sending frames in there
+               // right now anyway (start_bm_capture() has not been called yet).
+               card->frame_allocator->reconfigure(pixel_format, 8 << 20, global_flags.width, global_flags.height, card_index, mjpeg_encoder.get());
        }
        card->capture->set_video_frame_allocator(card->frame_allocator.get());
        if (card->surface == nullptr) {
@@ -574,8 +601,12 @@ void Mixer::configure_card(unsigned card_index, CaptureInterface *capture, CardT
 
        // NOTE: start_bm_capture() happens in thread_func().
 
+       if (override_card_as_live) {
+               assert(card_type == CardType::FFMPEG_INPUT);
+       }
+
        DeviceSpec device;
-       if (card_type == CardType::FFMPEG_INPUT) {
+       if (card_type == CardType::FFMPEG_INPUT && !override_card_as_live) {
                device = DeviceSpec{InputSourceType::FFMPEG_VIDEO_INPUT, card_index - num_cards};
        } else {
                device = DeviceSpec{InputSourceType::CAPTURE_CARD, card_index};
@@ -1081,13 +1112,13 @@ void Mixer::thread_func()
                        assert(master_card_index < num_cards + num_video_inputs);
                }
 
+               handle_hotplugged_cards();
+
                vector<int32_t> raw_audio[MAX_VIDEO_CARDS];  // For MJPEG encoding.
                OutputFrameInfo output_frame_info = get_one_frame_from_each_card(master_card_index, master_card_is_output, new_frames, has_new_frame, raw_audio);
                schedule_audio_resampling_tasks(output_frame_info.dropped_frames, output_frame_info.num_samples, output_frame_info.frame_duration, output_frame_info.is_preroll, output_frame_info.frame_timestamp);
                stats_dropped_frames += output_frame_info.dropped_frames;
 
-               handle_hotplugged_cards();
-
                for (unsigned card_index = 0; card_index < num_cards + num_video_inputs + num_html_inputs; ++card_index) {
                        DeviceSpec device = card_index_to_device(card_index, num_cards);
                        if (card_index == master_card_index || !has_new_frame[card_index]) {
@@ -1134,12 +1165,12 @@ void Mixer::thread_func()
                        if (fabs(new_frame->neutral_color.r - last_received_neutral_color[card_index].r) > 1e-3 ||
                            fabs(new_frame->neutral_color.g - last_received_neutral_color[card_index].g) > 1e-3 ||
                            fabs(new_frame->neutral_color.b - last_received_neutral_color[card_index].b) > 1e-3) {
-                               theme->set_wb_for_signal(card_index, new_frame->neutral_color.r, new_frame->neutral_color.g, new_frame->neutral_color.b);
+                               theme->set_wb_for_card(card_index, new_frame->neutral_color.r, new_frame->neutral_color.g, new_frame->neutral_color.b);
                                last_received_neutral_color[card_index] = new_frame->neutral_color;
                        }
 
                        if (new_frame->frame->data_copy != nullptr && mjpeg_encoder->should_encode_mjpeg_for_card(card_index)) {
-                               RGBTriplet neutral_color = theme->get_white_balance_for_signal(card_index);
+                               RGBTriplet neutral_color = theme->get_white_balance_for_card(card_index);
                                mjpeg_encoder->upload_frame(pts_int, card_index, new_frame->frame, new_frame->video_format, new_frame->y_offset, new_frame->cbcr_offset, move(raw_audio[card_index]), neutral_color);
                        }
 
@@ -1335,7 +1366,7 @@ start:
 
        // This might get off by a fractional sample when changing master card
        // between ones with different frame rates, but that's fine.
-       int num_samples_times_timebase = OUTPUT_FREQUENCY * output_frame_info.frame_duration + fractional_samples;
+       int64_t num_samples_times_timebase = int64_t(OUTPUT_FREQUENCY) * output_frame_info.frame_duration + fractional_samples;
        output_frame_info.num_samples = num_samples_times_timebase / TIMEBASE;
        fractional_samples = num_samples_times_timebase % TIMEBASE;
        assert(output_frame_info.num_samples >= 0);
@@ -1359,9 +1390,15 @@ void Mixer::handle_hotplugged_cards()
 
        // Check for cards that have been connected since last frame.
        vector<libusb_device *> hotplugged_cards_copy;
+#ifdef HAVE_SRT
+       vector<int> hotplugged_srt_cards_copy;
+#endif
        {
                lock_guard<mutex> lock(hotplug_mutex);
                swap(hotplugged_cards, hotplugged_cards_copy);
+#ifdef HAVE_SRT
+               swap(hotplugged_srt_cards, hotplugged_srt_cards_copy);
+#endif
        }
        for (libusb_device *new_dev : hotplugged_cards_copy) {
                // Look for a fake capture card where we can stick this in.
@@ -1387,6 +1424,49 @@ void Mixer::handle_hotplugged_cards()
                        capture->start_bm_capture();
                }
        }
+
+#ifdef HAVE_SRT
+       // Same, for SRT inputs.
+       // TODO: On disconnect and reconnect, we might want to use the stream ID
+       // to find the slot it used to go into?
+       for (SRTSOCKET sock : hotplugged_srt_cards_copy) {
+               // Look for a fake capture card where we can stick this in.
+               int free_card_index = -1;
+               for (unsigned card_index = 0; card_index < num_cards; ++card_index) {
+                       if (cards[card_index].is_fake_capture) {
+                               free_card_index = card_index;
+                               break;
+                       }
+               }
+
+               char name[256];
+               int namelen = sizeof(name);
+               srt_getsockopt(sock, /*ignored=*/0, SRTO_STREAMID, name, &namelen);
+               string stream_id(name, namelen);
+
+               if (free_card_index == -1) {
+                       if (stream_id.empty()) {
+                               stream_id = "no name";
+                       }
+                       fprintf(stderr, "New SRT stream connected (%s), but no free slots -- ignoring.\n", stream_id.c_str());
+                       srt_close(sock);
+               } else {
+                       // FFmpegCapture takes ownership.
+                       if (stream_id.empty()) {
+                               fprintf(stderr, "New unnamed SRT stream connected, choosing slot %d.\n", free_card_index);
+                       } else {
+                               fprintf(stderr, "New SRT stream connected (%s), choosing slot %d.\n", stream_id.c_str(), free_card_index);
+                       }
+                       CaptureCard *card = &cards[free_card_index];
+                       FFmpegCapture *capture = new FFmpegCapture(sock, stream_id);
+                       capture->set_card_index(free_card_index);
+                       configure_card(free_card_index, capture, CardType::FFMPEG_INPUT, /*output=*/nullptr, /*override_card_as_live=*/true);
+                       card->queue_length_policy.reset(free_card_index);
+                       capture->set_card_disconnected_callback(bind(&Mixer::bm_hotplug_remove, this, free_card_index));
+                       capture->start_bm_capture();
+               }
+       }
+#endif
 }
 
 
@@ -1626,6 +1706,23 @@ void Mixer::quit()
        audio_task_queue_changed.notify_one();
        mixer_thread.join();
        audio_thread.join();
+#ifdef HAVE_SRT
+       if (global_flags.srt_port >= 0) {
+               // There's seemingly no other reasonable way to wake up the thread
+               // (libsrt's epoll equivalent is busy-waiting).
+               int sock = srt_socket(AF_INET6, 0, 0);
+               if (sock != -1) {
+                       sockaddr_in6 addr;
+                       memset(&addr, 0, sizeof(addr));
+                       addr.sin6_family = AF_INET6;
+                       addr.sin6_addr = IN6ADDR_LOOPBACK_INIT;
+                       addr.sin6_port = htons(global_flags.srt_port);
+                       srt_connect(sock, (sockaddr *)&addr, sizeof(addr));
+                       srt_close(sock);
+               }
+               srt_thread.join();
+       }
+#endif
 }
 
 void Mixer::transition_clicked(int transition_num)
@@ -1813,4 +1910,43 @@ void Mixer::OutputChannel::set_color_updated_callback(Mixer::color_updated_callb
        color_updated_callback = callback;
 }
 
+#ifdef HAVE_SRT
+void Mixer::start_srt()
+{
+       SRTSOCKET sock = srt_socket(AF_INET6, 0, 0);
+       sockaddr_in6 addr;
+       memset(&addr, 0, sizeof(addr));
+       addr.sin6_family = AF_INET6;
+       addr.sin6_port = htons(global_flags.srt_port);
+
+       int err = srt_bind(sock, (sockaddr *)&addr, sizeof(addr));
+       if (err != 0) {
+               fprintf(stderr, "srt_bind: %s\n", srt_getlasterror_str());
+               abort();
+       }
+       err = srt_listen(sock, MAX_VIDEO_CARDS);
+       if (err != 0) {
+               fprintf(stderr, "srt_listen: %s\n", srt_getlasterror_str());
+               abort();
+       }
+
+       srt_thread = thread([this, sock] {
+               sockaddr_in6 addr;
+               for ( ;; ) {
+                       int sa_len = sizeof(addr);
+                       int clientsock = srt_accept(sock, (sockaddr *)&addr, &sa_len);
+                       if (should_quit) {
+                               if (clientsock != -1) {
+                                       srt_close(clientsock);
+                               }
+                               break;
+                       }
+                       lock_guard<mutex> lock(hotplug_mutex);
+                       hotplugged_srt_cards.push_back(clientsock);
+               }
+               srt_close(sock);
+       });
+}
+#endif
+
 mutex RefCountedGLsync::fence_lock;