7 #include <movit/effect.h>
8 #include <movit/effect_chain.h>
9 #include <movit/effect_util.h>
10 #include <movit/flat_input.h>
11 #include <movit/image_format.h>
12 #include <movit/init.h>
13 #include <movit/resource_pool.h>
20 #include <condition_variable>
31 #include "DeckLinkAPI.h"
33 #include "alsa_output.h"
34 #include "basic_stats.h"
35 #include "bmusb/bmusb.h"
36 #include "bmusb/fake_capture.h"
38 #include "cef_capture.h"
40 #include "chroma_subsampler.h"
41 #include "shared/context.h"
42 #include "decklink_capture.h"
43 #include "decklink_output.h"
44 #include "decklink_util.h"
46 #include "shared/disk_space_estimator.h"
47 #include "ffmpeg_capture.h"
49 #include "image_input.h"
50 #include "input_mapping.h"
51 #include "shared/metrics.h"
52 #include "shared/va_display.h"
53 #include "mjpeg_encoder.h"
54 #include "pbo_frame_allocator.h"
55 #include "shared/ref_counted_gl_sync.h"
56 #include "resampling_queue.h"
57 #include "shared/timebase.h"
58 #include "timecode_renderer.h"
59 #include "v210_converter.h"
60 #include "video_encoder.h"
63 #include <google/protobuf/util/json_util.h>
67 // Must come after CEF, since it includes <syslog.h>, which has #defines
68 // that conflict with CEF logging constants.
75 using namespace movit;
77 using namespace std::chrono;
78 using namespace std::placeholders;
79 using namespace bmusb;
81 Mixer *global_mixer = nullptr;
85 void insert_new_frame(RefCountedFrame frame, unsigned field_num, bool interlaced, unsigned card_index, InputState *input_state)
88 for (unsigned frame_num = FRAME_HISTORY_LENGTH; frame_num --> 1; ) { // :-)
89 input_state->buffered_frames[card_index][frame_num] =
90 input_state->buffered_frames[card_index][frame_num - 1];
92 input_state->buffered_frames[card_index][0] = { frame, field_num };
94 for (unsigned frame_num = 0; frame_num < FRAME_HISTORY_LENGTH; ++frame_num) {
95 input_state->buffered_frames[card_index][frame_num] = { frame, field_num };
100 void ensure_texture_resolution(PBOFrameAllocator::Userdata *userdata, unsigned field, unsigned width, unsigned height, unsigned cbcr_width, unsigned cbcr_height, unsigned v210_width)
103 switch (userdata->pixel_format) {
104 case PixelFormat_10BitYCbCr:
105 first = userdata->tex_v210[field] == 0 || userdata->tex_444[field] == 0;
107 case PixelFormat_8BitYCbCr:
108 first = userdata->tex_y[field] == 0 || userdata->tex_cbcr[field] == 0;
110 case PixelFormat_8BitBGRA:
111 first = userdata->tex_rgba[field] == 0;
113 case PixelFormat_8BitYCbCrPlanar:
114 first = userdata->tex_y[field] == 0 || userdata->tex_cb[field] == 0 || userdata->tex_cr[field] == 0;
120 const bool recreate_main_texture =
122 width != userdata->last_width[field] ||
123 height != userdata->last_height[field] ||
124 cbcr_width != userdata->last_cbcr_width[field] ||
125 cbcr_height != userdata->last_cbcr_height[field];
126 const bool recreate_v210_texture =
127 global_flags.bit_depth > 8 &&
128 (first || v210_width != userdata->last_v210_width[field] || height != userdata->last_height[field]);
130 if (recreate_main_texture) {
131 // We changed resolution since last use of this texture, so we need to create
132 // a new object. Note that this each card has its own PBOFrameAllocator,
133 // we don't need to worry about these flip-flopping between resolutions.
134 switch (userdata->pixel_format) {
135 case PixelFormat_10BitYCbCr:
136 glBindTexture(GL_TEXTURE_2D, userdata->tex_444[field]);
138 glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB10_A2, width, height, 0, GL_RGBA, GL_UNSIGNED_INT_2_10_10_10_REV, nullptr);
141 case PixelFormat_8BitYCbCr: {
142 glBindTexture(GL_TEXTURE_2D, userdata->tex_cbcr[field]);
144 glTexImage2D(GL_TEXTURE_2D, 0, GL_RG8, cbcr_width, height, 0, GL_RG, GL_UNSIGNED_BYTE, nullptr);
146 glBindTexture(GL_TEXTURE_2D, userdata->tex_y[field]);
148 glTexImage2D(GL_TEXTURE_2D, 0, GL_R8, width, height, 0, GL_RED, GL_UNSIGNED_BYTE, nullptr);
152 case PixelFormat_8BitYCbCrPlanar: {
153 glBindTexture(GL_TEXTURE_2D, userdata->tex_y[field]);
155 glTexImage2D(GL_TEXTURE_2D, 0, GL_R8, width, height, 0, GL_RED, GL_UNSIGNED_BYTE, nullptr);
157 glBindTexture(GL_TEXTURE_2D, userdata->tex_cb[field]);
159 glTexImage2D(GL_TEXTURE_2D, 0, GL_R8, cbcr_width, cbcr_height, 0, GL_RED, GL_UNSIGNED_BYTE, nullptr);
161 glBindTexture(GL_TEXTURE_2D, userdata->tex_cr[field]);
163 glTexImage2D(GL_TEXTURE_2D, 0, GL_R8, cbcr_width, cbcr_height, 0, GL_RED, GL_UNSIGNED_BYTE, nullptr);
167 case PixelFormat_8BitBGRA:
168 glBindTexture(GL_TEXTURE_2D, userdata->tex_rgba[field]);
170 // NOTE: sRGB may be disabled by sRGBSwitchingFlatInput.
171 glTexImage2D(GL_TEXTURE_2D, 0, GL_SRGB8_ALPHA8, width, height, 0, GL_BGRA, GL_UNSIGNED_INT_8_8_8_8_REV, nullptr);
177 userdata->last_width[field] = width;
178 userdata->last_height[field] = height;
179 userdata->last_cbcr_width[field] = cbcr_width;
180 userdata->last_cbcr_height[field] = cbcr_height;
182 if (recreate_v210_texture) {
183 // Same as above; we need to recreate the texture.
184 glBindTexture(GL_TEXTURE_2D, userdata->tex_v210[field]);
186 glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB10_A2, v210_width, height, 0, GL_RGBA, GL_UNSIGNED_INT_2_10_10_10_REV, nullptr);
188 userdata->last_v210_width[field] = v210_width;
189 userdata->last_height[field] = height;
193 void upload_texture(GLuint tex, GLuint width, GLuint height, GLuint stride, bool interlaced_stride, GLenum format, GLenum type, GLintptr offset)
195 if (interlaced_stride) {
198 if (global_flags.flush_pbos) {
199 glFlushMappedBufferRange(GL_PIXEL_UNPACK_BUFFER, offset, stride * height);
203 glBindTexture(GL_TEXTURE_2D, tex);
205 if (interlaced_stride) {
206 glPixelStorei(GL_UNPACK_ROW_LENGTH, width * 2);
209 glPixelStorei(GL_UNPACK_ROW_LENGTH, 0);
213 glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height, format, type, BUFFER_OFFSET(offset));
215 glBindTexture(GL_TEXTURE_2D, 0);
217 glPixelStorei(GL_UNPACK_ROW_LENGTH, 0);
223 void JitterHistory::register_metrics(const vector<pair<string, string>> &labels)
225 global_metrics.add("input_underestimated_jitter_frames", labels, &metric_input_underestimated_jitter_frames);
226 global_metrics.add("input_estimated_max_jitter_seconds", labels, &metric_input_estimated_max_jitter_seconds, Metrics::TYPE_GAUGE);
229 void JitterHistory::unregister_metrics(const vector<pair<string, string>> &labels)
231 global_metrics.remove("input_underestimated_jitter_frames", labels);
232 global_metrics.remove("input_estimated_max_jitter_seconds", labels);
235 void JitterHistory::frame_arrived(steady_clock::time_point now, int64_t frame_duration, size_t dropped_frames)
237 if (frame_duration != last_duration) {
238 // If the frame rate changed, the input clock is also going to change,
239 // so our historical data doesn't make much sense anymore.
240 // Also, format changes typically introduce blips that are not representative
241 // of the typical frame stream. (We make the assumption that format changes
242 // don't happen all the time in regular use; if they did, we should probably
243 // rather keep the history so that we take jitter they may introduce into account.)
245 last_duration = frame_duration;
247 if (expected_timestamp > steady_clock::time_point::min()) {
248 expected_timestamp += dropped_frames * nanoseconds(frame_duration * 1000000000 / TIMEBASE);
249 double jitter_seconds = fabs(duration<double>(expected_timestamp - now).count());
250 history.push_back(orders.insert(jitter_seconds));
251 if (jitter_seconds > estimate_max_jitter()) {
252 ++metric_input_underestimated_jitter_frames;
255 metric_input_estimated_max_jitter_seconds = estimate_max_jitter();
257 if (history.size() > history_length) {
258 orders.erase(history.front());
261 assert(history.size() <= history_length);
263 expected_timestamp = now + nanoseconds(frame_duration * 1000000000 / TIMEBASE);
266 double JitterHistory::estimate_max_jitter() const
268 if (orders.empty()) {
271 size_t elem_idx = lrint((orders.size() - 1) * percentile);
272 if (percentile <= 0.5) {
273 return *next(orders.begin(), elem_idx) * multiplier;
275 return *prev(orders.end(), orders.size() - elem_idx) * multiplier;
279 void QueueLengthPolicy::register_metrics(const vector<pair<string, string>> &labels)
281 global_metrics.add("input_queue_safe_length_frames", labels, &metric_input_queue_safe_length_frames, Metrics::TYPE_GAUGE);
284 void QueueLengthPolicy::unregister_metrics(const vector<pair<string, string>> &labels)
286 global_metrics.remove("input_queue_safe_length_frames", labels);
289 void QueueLengthPolicy::update_policy(steady_clock::time_point now,
290 steady_clock::time_point expected_next_input_frame,
291 int64_t input_frame_duration,
292 int64_t master_frame_duration,
293 double max_input_card_jitter_seconds,
294 double max_master_card_jitter_seconds)
296 double input_frame_duration_seconds = input_frame_duration / double(TIMEBASE);
297 double master_frame_duration_seconds = master_frame_duration / double(TIMEBASE);
299 // Figure out when we can expect the next frame for this card, assuming
300 // worst-case jitter (ie., the frame is maximally late).
301 double seconds_until_next_frame = max(duration<double>(expected_next_input_frame - now).count() + max_input_card_jitter_seconds, 0.0);
303 // How many times are the master card expected to tick in that time?
304 // We assume the master clock has worst-case jitter but not any rate
305 // discrepancy, ie., it ticks as early as possible every time, but not
307 double frames_needed = (seconds_until_next_frame + max_master_card_jitter_seconds) / master_frame_duration_seconds;
309 // As a special case, if the master card ticks faster than the input card,
310 // we expect the queue to drain by itself even without dropping. But if
311 // the difference is small (e.g. 60 Hz master and 59.94 input), it would
312 // go slowly enough that the effect wouldn't really be appreciable.
313 // We account for this by looking at the situation five frames ahead,
314 // assuming everything else is the same.
315 double frames_allowed;
316 if (master_frame_duration < input_frame_duration) {
317 frames_allowed = frames_needed + 5 * (input_frame_duration_seconds - master_frame_duration_seconds) / master_frame_duration_seconds;
319 frames_allowed = frames_needed;
322 safe_queue_length = max<int>(floor(frames_allowed), 0);
323 metric_input_queue_safe_length_frames = safe_queue_length;
326 Mixer::Mixer(const QSurfaceFormat &format)
328 mixer_surface(create_surface(format)),
329 h264_encoder_surface(create_surface(format)),
330 decklink_output_surface(create_surface(format)),
331 image_update_surface(create_surface(format))
333 memcpy(ycbcr_interpretation, global_flags.ycbcr_interpretation, sizeof(ycbcr_interpretation));
334 CHECK(init_movit(MOVIT_SHADER_DIR, MOVIT_DEBUG_OFF));
337 if (!epoxy_has_gl_extension("GL_EXT_texture_sRGB_decode") ||
338 !epoxy_has_gl_extension("GL_ARB_sampler_objects")) {
339 fprintf(stderr, "Nageru requires GL_EXT_texture_sRGB_decode and GL_ARB_sampler_objects to run.\n");
343 // Since we allow non-bouncing 4:2:2 YCbCrInputs, effective subpixel precision
344 // will be halved when sampling them, and we need to compensate here.
345 movit_texel_subpixel_precision /= 2.0;
347 resource_pool.reset(new ResourcePool);
348 for (unsigned i = 0; i < NUM_OUTPUTS; ++i) {
349 output_channel[i].parent = this;
350 output_channel[i].channel = i;
353 ImageFormat inout_format;
354 inout_format.color_space = COLORSPACE_sRGB;
355 inout_format.gamma_curve = GAMMA_sRGB;
357 // Matches the 4:2:0 format created by the main chain.
358 YCbCrFormat ycbcr_format;
359 ycbcr_format.chroma_subsampling_x = 2;
360 ycbcr_format.chroma_subsampling_y = 2;
361 if (global_flags.ycbcr_rec709_coefficients) {
362 ycbcr_format.luma_coefficients = YCBCR_REC_709;
364 ycbcr_format.luma_coefficients = YCBCR_REC_601;
366 ycbcr_format.full_range = false;
367 ycbcr_format.num_levels = 1 << global_flags.bit_depth;
368 ycbcr_format.cb_x_position = 0.0f;
369 ycbcr_format.cr_x_position = 0.0f;
370 ycbcr_format.cb_y_position = 0.5f;
371 ycbcr_format.cr_y_position = 0.5f;
373 // Initialize the neutral colors to sane values.
374 for (unsigned i = 0; i < MAX_VIDEO_CARDS; ++i) {
375 last_received_neutral_color[i] = RGBTriplet(1.0f, 1.0f, 1.0f);
378 // Display chain; shows the live output produced by the main chain (or rather, a copy of it).
379 display_chain.reset(new EffectChain(global_flags.width, global_flags.height, resource_pool.get()));
381 GLenum type = global_flags.bit_depth > 8 ? GL_UNSIGNED_SHORT : GL_UNSIGNED_BYTE;
382 display_input = new YCbCrInput(inout_format, ycbcr_format, global_flags.width, global_flags.height, YCBCR_INPUT_SPLIT_Y_AND_CBCR, type);
383 display_chain->add_input(display_input);
384 display_chain->add_output(inout_format, OUTPUT_ALPHA_FORMAT_POSTMULTIPLIED);
385 display_chain->set_dither_bits(0); // Don't bother.
386 display_chain->finalize();
388 video_encoder.reset(new VideoEncoder(resource_pool.get(), h264_encoder_surface, global_flags.va_display, global_flags.width, global_flags.height, &httpd, global_disk_space_estimator));
389 if (!global_flags.card_to_mjpeg_stream_export.empty()) {
390 mjpeg_encoder.reset(new MJPEGEncoder(&httpd, global_flags.va_display));
393 // Must be instantiated after VideoEncoder has initialized global_flags.use_zerocopy.
394 theme.reset(new Theme(global_flags.theme_filename, global_flags.theme_dirs, resource_pool.get()));
396 // Must be instantiated after the theme, as the theme decides the number of FFmpeg inputs.
397 std::vector<FFmpegCapture *> video_inputs = theme->get_video_inputs();
398 audio_mixer.reset(new AudioMixer);
400 httpd.add_endpoint("/channels", bind(&Mixer::get_channels_json, this), HTTPD::ALLOW_ALL_ORIGINS);
401 for (int channel_idx = 0; channel_idx < theme->get_num_channels(); ++channel_idx) {
403 snprintf(url, sizeof(url), "/channels/%d/color", channel_idx + 2);
404 httpd.add_endpoint(url, bind(&Mixer::get_channel_color_http, this, unsigned(channel_idx + 2)), HTTPD::ALLOW_ALL_ORIGINS);
407 // Start listening for clients only once VideoEncoder has written its header, if any.
408 httpd.start(global_flags.http_port);
410 // First try initializing the then PCI devices, then USB, then
411 // fill up with fake cards until we have the desired number of cards.
412 unsigned num_pci_devices = 0;
413 unsigned card_index = 0;
416 IDeckLinkIterator *decklink_iterator = CreateDeckLinkIteratorInstance();
417 if (decklink_iterator != nullptr) {
418 for ( ; card_index < unsigned(global_flags.max_num_cards); ++card_index) {
420 if (decklink_iterator->Next(&decklink) != S_OK) {
424 if (!decklink_card_is_active(decklink, card_index)) {
425 fprintf(stderr, "DeckLink card %u is inactive in current profile, skipping (try changing it in Desktop Video Setup)\n", card_index);
430 DeckLinkCapture *capture = new DeckLinkCapture(decklink, card_index);
431 DeckLinkOutput *output = new DeckLinkOutput(resource_pool.get(), decklink_output_surface, global_flags.width, global_flags.height, card_index);
432 if (!output->set_device(decklink, capture->get_input())) {
436 configure_card(card_index, capture, CardType::LIVE_CARD, output, /*is_srt_card=*/false);
439 decklink_iterator->Release();
440 fprintf(stderr, "Found %u DeckLink PCI card(s).\n", num_pci_devices);
442 fprintf(stderr, "DeckLink drivers not found. Probing for USB cards only.\n");
446 unsigned num_usb_devices = BMUSBCapture::num_cards();
447 for (unsigned usb_card_index = 0; usb_card_index < num_usb_devices && card_index < unsigned(global_flags.max_num_cards); ++usb_card_index, ++card_index) {
448 BMUSBCapture *capture = new BMUSBCapture(usb_card_index);
449 capture->set_card_disconnected_callback(bind(&Mixer::bm_hotplug_remove, this, card_index));
450 configure_card(card_index, capture, CardType::LIVE_CARD, /*output=*/nullptr, /*is_srt_card=*/false);
452 fprintf(stderr, "Found %u USB card(s).\n", num_usb_devices);
454 // Fill up with fake cards for as long as we can, so that the FFmpeg
455 // and HTML cards always come last.
456 unsigned num_fake_cards = 0;
458 size_t num_html_inputs = theme->get_html_inputs().size();
460 size_t num_html_inputs = 0;
462 for ( ; card_index < MAX_VIDEO_CARDS - video_inputs.size() - num_html_inputs; ++card_index) {
463 // Only bother to activate fake capture cards to satisfy the minimum.
464 bool is_active = card_index < unsigned(global_flags.min_num_cards) || cards[card_index].force_active;
466 FakeCapture *capture = new FakeCapture(global_flags.width, global_flags.height, FAKE_FPS, OUTPUT_FREQUENCY, card_index, global_flags.fake_cards_audio);
467 configure_card(card_index, capture, CardType::FAKE_CAPTURE, /*output=*/nullptr, /*is_srt_card=*/false);
470 configure_card(card_index, nullptr, CardType::FAKE_CAPTURE, /*output=*/nullptr, /*is_srt_card=*/false);
474 if (num_fake_cards > 0) {
475 fprintf(stderr, "Initialized %u fake cards.\n", num_fake_cards);
478 // Initialize all video inputs the theme asked for.
479 for (unsigned video_card_index = 0; video_card_index < video_inputs.size(); ++card_index, ++video_card_index) {
480 if (card_index >= MAX_VIDEO_CARDS) {
481 fprintf(stderr, "ERROR: Not enough card slots available for the videos the theme requested.\n");
484 configure_card(card_index, video_inputs[video_card_index], CardType::FFMPEG_INPUT, /*output=*/nullptr, /*is_srt_card=*/false);
485 video_inputs[video_card_index]->set_card_index(card_index);
487 num_video_inputs = video_inputs.size();
490 // Same, for HTML inputs.
491 std::vector<CEFCapture *> html_inputs = theme->get_html_inputs();
492 for (unsigned html_card_index = 0; html_card_index < html_inputs.size(); ++card_index, ++html_card_index) {
493 if (card_index >= MAX_VIDEO_CARDS) {
494 fprintf(stderr, "ERROR: Not enough card slots available for the HTML inputs the theme requested.\n");
497 configure_card(card_index, html_inputs[html_card_index], CardType::CEF_INPUT, /*output=*/nullptr, /*is_srt_card=*/false);
498 html_inputs[html_card_index]->set_card_index(card_index);
500 num_html_inputs = html_inputs.size();
503 BMUSBCapture::set_card_connected_callback(bind(&Mixer::bm_hotplug_add, this, _1));
504 BMUSBCapture::start_bm_thread();
507 if (global_flags.srt_port >= 0) {
512 chroma_subsampler.reset(new ChromaSubsampler(resource_pool.get()));
514 if (global_flags.bit_depth > 8) {
515 if (!v210Converter::has_hardware_support()) {
516 fprintf(stderr, "ERROR: --ten-bit-input requires support for OpenGL compute shaders\n");
517 fprintf(stderr, " (OpenGL 4.3, or GL_ARB_compute_shader + GL_ARB_shader_image_load_store).\n");
520 v210_converter.reset(new v210Converter());
522 // These are all the widths listed in the Blackmagic SDK documentation
523 // (section 2.7.3, “Display Modes”).
524 v210_converter->precompile_shader(720);
525 v210_converter->precompile_shader(1280);
526 v210_converter->precompile_shader(1920);
527 v210_converter->precompile_shader(2048);
528 v210_converter->precompile_shader(3840);
529 v210_converter->precompile_shader(4096);
531 if (global_flags.bit_depth > 8) {
532 if (!v210Converter::has_hardware_support()) {
533 fprintf(stderr, "ERROR: --ten-bit-output requires support for OpenGL compute shaders\n");
534 fprintf(stderr, " (OpenGL 4.3, or GL_ARB_compute_shader + GL_ARB_shader_image_load_store).\n");
539 timecode_renderer.reset(new TimecodeRenderer(resource_pool.get(), global_flags.width, global_flags.height));
540 display_timecode_in_stream = global_flags.display_timecode_in_stream;
541 display_timecode_on_stdout = global_flags.display_timecode_on_stdout;
543 if (global_flags.enable_alsa_output) {
544 alsa.reset(new ALSAOutput(OUTPUT_FREQUENCY, /*num_channels=*/2));
546 output_card_is_master = global_flags.output_card_is_master;
547 if (global_flags.output_card != -1) {
548 desired_output_card_index = global_flags.output_card;
549 set_output_card_internal(global_flags.output_card);
552 output_jitter_history.register_metrics({{ "card", "output" }});
554 ImageInput::start_update_thread(image_update_surface);
559 ImageInput::end_update_thread();
561 if (mjpeg_encoder != nullptr) {
562 mjpeg_encoder->stop();
565 BMUSBCapture::stop_bm_thread();
567 for (unsigned card_index = 0; card_index < MAX_VIDEO_CARDS; ++card_index) {
568 if (cards[card_index].capture != nullptr) { // Active.
569 cards[card_index].capture->stop_dequeue_thread();
571 if (cards[card_index].output) {
572 cards[card_index].output->end_output();
573 cards[card_index].output.reset();
577 video_encoder.reset(nullptr);
580 void Mixer::configure_card(unsigned card_index, CaptureInterface *capture, CardType card_type, DeckLinkOutput *output, bool is_srt_card)
582 bool is_active = capture != nullptr;
584 printf("Configuring card %d...\n", card_index);
586 assert(card_type == CardType::FAKE_CAPTURE);
589 CaptureCard *card = &cards[card_index];
590 if (card->capture != nullptr) {
591 card_mutex.unlock(); // The dequeue thread could be waiting for bm_frame().
592 card->capture->stop_dequeue_thread();
595 card->capture.reset(capture);
596 card->is_fake_capture = (card_type == CardType::FAKE_CAPTURE);
597 if (card->is_fake_capture) {
598 card->fake_capture_counter = fake_capture_counter++;
600 card->is_cef_capture = (card_type == CardType::CEF_INPUT);
601 card->may_have_dropped_last_frame = false;
602 card->type = card_type;
603 if (card->output.get() != output) {
604 card->output.reset(output);
607 PixelFormat pixel_format;
608 if (card_type == CardType::FFMPEG_INPUT) {
609 pixel_format = capture->get_current_pixel_format();
610 } else if (card_type == CardType::CEF_INPUT) {
611 pixel_format = PixelFormat_8BitBGRA;
612 } else if (global_flags.bit_depth > 8) {
613 pixel_format = PixelFormat_10BitYCbCr;
615 pixel_format = PixelFormat_8BitYCbCr;
619 card->capture->set_frame_callback(bind(&Mixer::bm_frame, this, card_index, _1, _2, _3, _4, _5, _6, _7));
620 if (card->frame_allocator == nullptr) {
621 card->frame_allocator.reset(new PBOFrameAllocator(pixel_format, FRAME_SIZE, global_flags.width, global_flags.height, card_index, mjpeg_encoder.get()));
623 // The format could have changed, but we cannot reset the allocator
624 // and create a new one from scratch, since there may be allocated
625 // frames from it that expect to call release_frame() on it.
626 // Instead, ask the allocator to create new frames for us and discard
627 // any old ones as they come back. This takes the mutex while
628 // allocating, but nothing should really be sending frames in there
629 // right now anyway (start_bm_capture() has not been called yet).
630 card->frame_allocator->reconfigure(pixel_format, FRAME_SIZE, global_flags.width, global_flags.height, card_index, mjpeg_encoder.get());
632 card->capture->set_video_frame_allocator(card->frame_allocator.get());
633 if (card->surface == nullptr) {
634 card->surface = create_surface_with_same_format(mixer_surface);
636 while (!card->new_frames.empty()) card->new_frames.pop_front();
637 card->last_timecode = -1;
638 card->capture->set_pixel_format(pixel_format);
639 card->capture->configure_card();
641 // NOTE: start_bm_capture() happens in thread_func().
645 assert(card_type == CardType::FFMPEG_INPUT);
648 DeviceSpec device{InputSourceType::CAPTURE_CARD, card_index};
649 unsigned num_channels = card_type == CardType::LIVE_CARD ? 8 : 2;
651 audio_mixer->set_device_parameters(device, card->capture->get_description(), card_type, num_channels, /*active=*/true);
653 // Note: Keeps the previous name, if any.
655 snprintf(name, sizeof(name), "Fake card %u", card_index + 1);
656 audio_mixer->set_device_parameters(device, name, card_type, num_channels, /*active=*/false);
658 audio_mixer->reset_resampler(device);
659 audio_mixer->trigger_state_changed_callback();
661 // Unregister old metrics, if any.
662 if (!card->labels.empty()) {
663 const vector<pair<string, string>> &labels = card->labels;
664 card->jitter_history.unregister_metrics(labels);
665 card->queue_length_policy.unregister_metrics(labels);
666 global_metrics.remove_if_exists("input_received_frames", labels);
667 global_metrics.remove_if_exists("input_dropped_frames_jitter", labels);
668 global_metrics.remove_if_exists("input_dropped_frames_error", labels);
669 global_metrics.remove_if_exists("input_dropped_frames_resets", labels);
670 global_metrics.remove_if_exists("input_queue_length_frames", labels);
671 global_metrics.remove_if_exists("input_queue_duped_frames", labels);
673 global_metrics.remove_if_exists("input_has_signal_bool", labels);
674 global_metrics.remove_if_exists("input_is_connected_bool", labels);
675 global_metrics.remove_if_exists("input_interlaced_bool", labels);
676 global_metrics.remove_if_exists("input_width_pixels", labels);
677 global_metrics.remove_if_exists("input_height_pixels", labels);
678 global_metrics.remove_if_exists("input_frame_rate_nom", labels);
679 global_metrics.remove_if_exists("input_frame_rate_den", labels);
680 global_metrics.remove_if_exists("input_sample_rate_hz", labels);
684 // Global measurements (counters).
685 global_metrics.remove_if_exists("srt_uptime_seconds", labels);
686 global_metrics.remove_if_exists("srt_send_duration_seconds", labels);
687 global_metrics.remove_if_exists("srt_sent_bytes", labels);
688 global_metrics.remove_if_exists("srt_received_bytes", labels);
690 vector<pair<string, string>> packet_labels = card->labels;
691 packet_labels.emplace_back("type", "normal");
692 global_metrics.remove_if_exists("srt_sent_packets", packet_labels);
693 global_metrics.remove_if_exists("srt_received_packets", packet_labels);
695 packet_labels.back().second = "lost";
696 global_metrics.remove_if_exists("srt_sent_packets", packet_labels);
697 global_metrics.remove_if_exists("srt_received_packets", packet_labels);
699 packet_labels.back().second = "retransmitted";
700 global_metrics.remove_if_exists("srt_sent_packets", packet_labels);
701 global_metrics.remove_if_exists("srt_sent_bytes", packet_labels);
703 packet_labels.back().second = "ack";
704 global_metrics.remove_if_exists("srt_sent_packets", packet_labels);
705 global_metrics.remove_if_exists("srt_received_packets", packet_labels);
707 packet_labels.back().second = "nak";
708 global_metrics.remove_if_exists("srt_sent_packets", packet_labels);
709 global_metrics.remove_if_exists("srt_received_packets", packet_labels);
711 packet_labels.back().second = "dropped";
712 global_metrics.remove_if_exists("srt_sent_packets", packet_labels);
713 global_metrics.remove_if_exists("srt_received_packets", packet_labels);
714 global_metrics.remove_if_exists("srt_sent_bytes", packet_labels);
715 global_metrics.remove_if_exists("srt_received_bytes", packet_labels);
717 packet_labels.back().second = "undecryptable";
718 global_metrics.remove_if_exists("srt_received_packets", packet_labels);
719 global_metrics.remove_if_exists("srt_received_bytes", packet_labels);
721 global_metrics.remove_if_exists("srt_filter_sent_extra_packets", labels);
722 global_metrics.remove_if_exists("srt_filter_received_extra_packets", labels);
723 global_metrics.remove_if_exists("srt_filter_received_rebuilt_packets", labels);
724 global_metrics.remove_if_exists("srt_filter_received_lost_packets", labels);
726 // Instant measurements (gauges).
727 global_metrics.remove_if_exists("srt_packet_sending_period_seconds", labels);
728 global_metrics.remove_if_exists("srt_flow_window_packets", labels);
729 global_metrics.remove_if_exists("srt_congestion_window_packets", labels);
730 global_metrics.remove_if_exists("srt_flight_size_packets", labels);
731 global_metrics.remove_if_exists("srt_rtt_seconds", labels);
732 global_metrics.remove_if_exists("srt_estimated_bandwidth_bits_per_second", labels);
733 global_metrics.remove_if_exists("srt_bandwidth_ceiling_bits_per_second", labels);
734 global_metrics.remove_if_exists("srt_send_buffer_available_bytes", labels);
735 global_metrics.remove_if_exists("srt_receive_buffer_available_bytes", labels);
736 global_metrics.remove_if_exists("srt_mss_bytes", labels);
738 global_metrics.remove_if_exists("srt_sender_unacked_packets", labels);
739 global_metrics.remove_if_exists("srt_sender_unacked_bytes", labels);
740 global_metrics.remove_if_exists("srt_sender_unacked_timespan_seconds", labels);
741 global_metrics.remove_if_exists("srt_sender_delivery_delay_seconds", labels);
743 global_metrics.remove_if_exists("srt_receiver_unacked_packets", labels);
744 global_metrics.remove_if_exists("srt_receiver_unacked_bytes", labels);
745 global_metrics.remove_if_exists("srt_receiver_unacked_timespan_seconds", labels);
746 global_metrics.remove_if_exists("srt_receiver_delivery_delay_seconds", labels);
751 vector<pair<string, string>> labels;
753 snprintf(card_name, sizeof(card_name), "%d", card_index);
754 labels.emplace_back("card", card_name);
757 case CardType::LIVE_CARD:
758 labels.emplace_back("cardtype", "live");
760 case CardType::FAKE_CAPTURE:
761 labels.emplace_back("cardtype", "fake");
763 case CardType::FFMPEG_INPUT:
765 labels.emplace_back("cardtype", "srt");
767 labels.emplace_back("cardtype", "ffmpeg");
770 case CardType::CEF_INPUT:
771 labels.emplace_back("cardtype", "cef");
776 card->jitter_history.register_metrics(labels);
777 card->queue_length_policy.register_metrics(labels);
778 global_metrics.add("input_received_frames", labels, &card->metric_input_received_frames);
779 global_metrics.add("input_dropped_frames_jitter", labels, &card->metric_input_dropped_frames_jitter);
780 global_metrics.add("input_dropped_frames_error", labels, &card->metric_input_dropped_frames_error);
781 global_metrics.add("input_dropped_frames_resets", labels, &card->metric_input_resets);
782 global_metrics.add("input_queue_length_frames", labels, &card->metric_input_queue_length_frames, Metrics::TYPE_GAUGE);
783 global_metrics.add("input_queue_duped_frames", labels, &card->metric_input_duped_frames);
785 global_metrics.add("input_has_signal_bool", labels, &card->metric_input_has_signal_bool, Metrics::TYPE_GAUGE);
786 global_metrics.add("input_is_connected_bool", labels, &card->metric_input_is_connected_bool, Metrics::TYPE_GAUGE);
787 global_metrics.add("input_interlaced_bool", labels, &card->metric_input_interlaced_bool, Metrics::TYPE_GAUGE);
788 global_metrics.add("input_width_pixels", labels, &card->metric_input_width_pixels, Metrics::TYPE_GAUGE);
789 global_metrics.add("input_height_pixels", labels, &card->metric_input_height_pixels, Metrics::TYPE_GAUGE);
790 global_metrics.add("input_frame_rate_nom", labels, &card->metric_input_frame_rate_nom, Metrics::TYPE_GAUGE);
791 global_metrics.add("input_frame_rate_den", labels, &card->metric_input_frame_rate_den, Metrics::TYPE_GAUGE);
792 global_metrics.add("input_sample_rate_hz", labels, &card->metric_input_sample_rate_hz, Metrics::TYPE_GAUGE);
795 // Global measurements (counters).
796 global_metrics.add("srt_uptime_seconds", labels, &card->metric_srt_uptime_seconds);
797 global_metrics.add("srt_send_duration_seconds", labels, &card->metric_srt_send_duration_seconds);
798 global_metrics.add("srt_sent_bytes", labels, &card->metric_srt_sent_bytes);
799 global_metrics.add("srt_received_bytes", labels, &card->metric_srt_received_bytes);
801 vector<pair<string, string>> packet_labels = labels;
802 packet_labels.emplace_back("type", "normal");
803 global_metrics.add("srt_sent_packets", packet_labels, &card->metric_srt_sent_packets_normal);
804 global_metrics.add("srt_received_packets", packet_labels, &card->metric_srt_received_packets_normal);
806 packet_labels.back().second = "lost";
807 global_metrics.add("srt_sent_packets", packet_labels, &card->metric_srt_sent_packets_lost);
808 global_metrics.add("srt_received_packets", packet_labels, &card->metric_srt_received_packets_lost);
810 packet_labels.back().second = "retransmitted";
811 global_metrics.add("srt_sent_packets", packet_labels, &card->metric_srt_sent_packets_retransmitted);
812 global_metrics.add("srt_sent_bytes", packet_labels, &card->metric_srt_sent_bytes_retransmitted);
814 packet_labels.back().second = "ack";
815 global_metrics.add("srt_sent_packets", packet_labels, &card->metric_srt_sent_packets_ack);
816 global_metrics.add("srt_received_packets", packet_labels, &card->metric_srt_received_packets_ack);
818 packet_labels.back().second = "nak";
819 global_metrics.add("srt_sent_packets", packet_labels, &card->metric_srt_sent_packets_nak);
820 global_metrics.add("srt_received_packets", packet_labels, &card->metric_srt_received_packets_nak);
822 packet_labels.back().second = "dropped";
823 global_metrics.add("srt_sent_packets", packet_labels, &card->metric_srt_sent_packets_dropped);
824 global_metrics.add("srt_received_packets", packet_labels, &card->metric_srt_received_packets_dropped);
825 global_metrics.add("srt_sent_bytes", packet_labels, &card->metric_srt_sent_bytes_dropped);
826 global_metrics.add("srt_received_bytes", packet_labels, &card->metric_srt_received_bytes_dropped);
828 packet_labels.back().second = "undecryptable";
829 global_metrics.add("srt_received_packets", packet_labels, &card->metric_srt_received_packets_undecryptable);
830 global_metrics.add("srt_received_bytes", packet_labels, &card->metric_srt_received_bytes_undecryptable);
832 global_metrics.add("srt_filter_sent_extra_packets", labels, &card->metric_srt_filter_sent_packets);
833 global_metrics.add("srt_filter_received_extra_packets", labels, &card->metric_srt_filter_received_extra_packets);
834 global_metrics.add("srt_filter_received_rebuilt_packets", labels, &card->metric_srt_filter_received_rebuilt_packets);
835 global_metrics.add("srt_filter_received_lost_packets", labels, &card->metric_srt_filter_received_lost_packets);
837 // Instant measurements (gauges).
838 global_metrics.add("srt_packet_sending_period_seconds", labels, &card->metric_srt_packet_sending_period_seconds, Metrics::TYPE_GAUGE);
839 global_metrics.add("srt_flow_window_packets", labels, &card->metric_srt_flow_window_packets, Metrics::TYPE_GAUGE);
840 global_metrics.add("srt_congestion_window_packets", labels, &card->metric_srt_congestion_window_packets, Metrics::TYPE_GAUGE);
841 global_metrics.add("srt_flight_size_packets", labels, &card->metric_srt_flight_size_packets, Metrics::TYPE_GAUGE);
842 global_metrics.add("srt_rtt_seconds", labels, &card->metric_srt_rtt_seconds, Metrics::TYPE_GAUGE);
843 global_metrics.add("srt_estimated_bandwidth_bits_per_second", labels, &card->metric_srt_estimated_bandwidth_bits_per_second, Metrics::TYPE_GAUGE);
844 global_metrics.add("srt_bandwidth_ceiling_bits_per_second", labels, &card->metric_srt_bandwidth_ceiling_bits_per_second, Metrics::TYPE_GAUGE);
845 global_metrics.add("srt_send_buffer_available_bytes", labels, &card->metric_srt_send_buffer_available_bytes, Metrics::TYPE_GAUGE);
846 global_metrics.add("srt_receive_buffer_available_bytes", labels, &card->metric_srt_receive_buffer_available_bytes, Metrics::TYPE_GAUGE);
847 global_metrics.add("srt_mss_bytes", labels, &card->metric_srt_mss_bytes, Metrics::TYPE_GAUGE);
849 global_metrics.add("srt_sender_unacked_packets", labels, &card->metric_srt_sender_unacked_packets, Metrics::TYPE_GAUGE);
850 global_metrics.add("srt_sender_unacked_bytes", labels, &card->metric_srt_sender_unacked_bytes, Metrics::TYPE_GAUGE);
851 global_metrics.add("srt_sender_unacked_timespan_seconds", labels, &card->metric_srt_sender_unacked_timespan_seconds, Metrics::TYPE_GAUGE);
852 global_metrics.add("srt_sender_delivery_delay_seconds", labels, &card->metric_srt_sender_delivery_delay_seconds, Metrics::TYPE_GAUGE);
854 global_metrics.add("srt_receiver_unacked_packets", labels, &card->metric_srt_receiver_unacked_packets, Metrics::TYPE_GAUGE);
855 global_metrics.add("srt_receiver_unacked_bytes", labels, &card->metric_srt_receiver_unacked_bytes, Metrics::TYPE_GAUGE);
856 global_metrics.add("srt_receiver_unacked_timespan_seconds", labels, &card->metric_srt_receiver_unacked_timespan_seconds, Metrics::TYPE_GAUGE);
857 global_metrics.add("srt_receiver_delivery_delay_seconds", labels, &card->metric_srt_receiver_delivery_delay_seconds, Metrics::TYPE_GAUGE);
860 card->labels = labels;
862 card->labels.clear();
866 void Mixer::set_output_card_internal(int card_index)
868 // We don't really need to take card_mutex, since we're in the mixer
869 // thread and don't mess with any queues (which is the only thing that happens
870 // from other threads), but it's probably the safest in the long run.
871 unique_lock<mutex> lock(card_mutex);
872 if (output_card_index != -1) {
873 // Switch the old card from output to input.
874 CaptureCard *old_card = &cards[output_card_index];
875 old_card->output->end_output();
877 // Stop the fake card that we put into place.
878 // This needs to _not_ happen under the mutex, to avoid deadlock
879 // (delivering the last frame needs to take the mutex).
880 CaptureInterface *fake_capture = old_card->capture.get();
882 fake_capture->stop_dequeue_thread();
884 old_card->capture = move(old_card->parked_capture); // TODO: reset the metrics
885 old_card->is_fake_capture = false;
886 old_card->capture->start_bm_capture();
888 if (card_index != -1) {
889 CaptureCard *card = &cards[card_index];
890 CaptureInterface *capture = card->capture.get();
891 // TODO: DeckLinkCapture::stop_dequeue_thread can actually take
892 // several seconds to complete (blocking on DisableVideoInput);
893 // see if we can maybe do it asynchronously.
895 capture->stop_dequeue_thread();
897 card->parked_capture = move(card->capture);
898 CaptureInterface *fake_capture = new FakeCapture(global_flags.width, global_flags.height, FAKE_FPS, OUTPUT_FREQUENCY, card_index, global_flags.fake_cards_audio);
899 configure_card(card_index, fake_capture, CardType::FAKE_CAPTURE, card->output.release(), /*is_srt_card=*/false);
900 card->jitter_history.clear();
901 card->capture->start_bm_capture();
902 desired_output_video_mode = output_video_mode = card->output->pick_video_mode(desired_output_video_mode);
903 card->output->start_output(desired_output_video_mode, pts_int, /*is_master_card=*/output_card_is_master);
905 output_card_index = card_index;
906 output_jitter_history.clear();
911 int unwrap_timecode(uint16_t current_wrapped, int last)
913 uint16_t last_wrapped = last & 0xffff;
914 if (current_wrapped > last_wrapped) {
915 return (last & ~0xffff) | current_wrapped;
917 return 0x10000 + ((last & ~0xffff) | current_wrapped);
923 void Mixer::bm_frame(unsigned card_index, uint16_t timecode,
924 FrameAllocator::Frame video_frame, size_t video_offset, VideoFormat video_format,
925 FrameAllocator::Frame audio_frame, size_t audio_offset, AudioFormat audio_format)
927 DeviceSpec device{InputSourceType::CAPTURE_CARD, card_index};
928 CaptureCard *card = &cards[card_index];
930 ++card->metric_input_received_frames;
931 card->metric_input_has_signal_bool = video_format.has_signal;
932 card->metric_input_is_connected_bool = video_format.is_connected;
933 card->metric_input_interlaced_bool = video_format.interlaced;
934 card->metric_input_width_pixels = video_format.width;
935 card->metric_input_height_pixels = video_format.height;
936 card->metric_input_frame_rate_nom = video_format.frame_rate_nom;
937 card->metric_input_frame_rate_den = video_format.frame_rate_den;
938 card->metric_input_sample_rate_hz = audio_format.sample_rate;
940 if (is_mode_scanning[card_index]) {
941 if (video_format.has_signal) {
942 // Found a stable signal, so stop scanning.
943 is_mode_scanning[card_index] = false;
945 static constexpr double switch_time_s = 0.1; // Should be enough time for the signal to stabilize.
946 steady_clock::time_point now = steady_clock::now();
947 double sec_since_last_switch = duration<double>(steady_clock::now() - last_mode_scan_change[card_index]).count();
948 if (sec_since_last_switch > switch_time_s) {
949 // It isn't this mode; try the next one.
950 mode_scanlist_index[card_index]++;
951 mode_scanlist_index[card_index] %= mode_scanlist[card_index].size();
952 cards[card_index].capture->set_video_mode(mode_scanlist[card_index][mode_scanlist_index[card_index]]);
953 last_mode_scan_change[card_index] = now;
958 int64_t frame_length = int64_t(TIMEBASE) * video_format.frame_rate_den / video_format.frame_rate_nom;
959 assert(frame_length > 0);
961 size_t num_samples = (audio_frame.len > audio_offset) ? (audio_frame.len - audio_offset) / audio_format.num_channels / (audio_format.bits_per_sample / 8) : 0;
962 if (num_samples > OUTPUT_FREQUENCY / 10 && card->type != CardType::FFMPEG_INPUT) {
963 printf("%s: Dropping frame with implausible audio length (len=%d, offset=%d) [timecode=0x%04x video_len=%d video_offset=%d video_format=%x)\n",
964 description_for_card(card_index).c_str(), int(audio_frame.len), int(audio_offset),
965 timecode, int(video_frame.len), int(video_offset), video_format.id);
966 if (video_frame.owner) {
967 video_frame.owner->release_frame(video_frame);
969 if (audio_frame.owner) {
970 audio_frame.owner->release_frame(audio_frame);
975 int dropped_frames = 0;
976 if (card->last_timecode != -1) {
977 dropped_frames = unwrap_timecode(timecode, card->last_timecode) - card->last_timecode - 1;
980 // Number of samples per frame if we need to insert silence.
981 // (Could be nonintegral, but resampling will save us then.)
982 const int silence_samples = OUTPUT_FREQUENCY * video_format.frame_rate_den / video_format.frame_rate_nom;
984 if (dropped_frames > TYPICAL_FPS * 2) {
985 fprintf(stderr, "%s lost more than two seconds (or time code jumping around; from 0x%04x to 0x%04x), resetting resampler\n",
986 description_for_card(card_index).c_str(), card->last_timecode, timecode);
987 audio_mixer->reset_resampler(device);
989 ++card->metric_input_resets;
990 } else if (dropped_frames > 0) {
991 // Insert silence as needed.
992 fprintf(stderr, "%s dropped %d frame(s) (before timecode 0x%04x), inserting silence.\n",
993 description_for_card(card_index).c_str(), dropped_frames, timecode);
994 card->metric_input_dropped_frames_error += dropped_frames;
998 success = audio_mixer->add_silence(device, silence_samples, dropped_frames);
1002 if (num_samples > 0) {
1003 audio_mixer->add_audio(device, audio_frame.data + audio_offset, num_samples, audio_format, audio_frame.received_timestamp);
1005 // Audio for the MJPEG stream. We don't resample; audio that's not in 48 kHz
1006 // just gets dropped for now.
1008 // Only bother doing MJPEG encoding if there are any connected clients
1009 // that want the stream.
1010 if (httpd.get_num_connected_multicam_clients() > 0 ||
1011 httpd.get_num_connected_siphon_clients(card_index) > 0) {
1012 vector<int32_t> converted_samples = convert_audio_to_fixed32(audio_frame.data + audio_offset, num_samples, audio_format, 2);
1013 lock_guard<mutex> lock(card_mutex);
1014 if (card->new_raw_audio.empty()) {
1015 card->new_raw_audio = move(converted_samples);
1017 // For raw audio, we don't really synchronize audio and video;
1018 // we just put the audio in frame by frame, and if a video frame is
1019 // dropped, we still keep the audio, which means it will be added
1020 // to the beginning of the next frame. It would probably be better
1021 // to move the audio pts earlier to show this, but most players can
1022 // live with some jitter, and in a lot of ways, it's much nicer for
1023 // Futatabi to have all audio locked to a video frame.
1024 card->new_raw_audio.insert(card->new_raw_audio.end(), converted_samples.begin(), converted_samples.end());
1026 // Truncate to one second, just to be sure we don't have infinite buildup in case of weirdness.
1027 if (card->new_raw_audio.size() > OUTPUT_FREQUENCY * 2) {
1028 size_t excess_samples = card->new_raw_audio.size() - OUTPUT_FREQUENCY * 2;
1029 card->new_raw_audio.erase(card->new_raw_audio.begin(), card->new_raw_audio.begin() + excess_samples);
1035 // Done with the audio, so release it.
1036 if (audio_frame.owner) {
1037 audio_frame.owner->release_frame(audio_frame);
1040 card->last_timecode = timecode;
1042 PBOFrameAllocator::Userdata *userdata = (PBOFrameAllocator::Userdata *)video_frame.userdata;
1043 if (card->type == CardType::FFMPEG_INPUT && userdata != nullptr) {
1044 FFmpegCapture *ffmpeg_capture = static_cast<FFmpegCapture *>(card->capture.get());
1045 userdata->has_last_subtitle = ffmpeg_capture->get_has_last_subtitle();
1046 userdata->last_subtitle = ffmpeg_capture->get_last_subtitle();
1049 if (card->type == CardType::FFMPEG_INPUT) {
1050 int srt_sock = static_cast<FFmpegCapture *>(card->capture.get())->get_srt_sock();
1051 if (srt_sock != -1) {
1052 update_srt_stats(srt_sock, card);
1057 size_t y_offset, cbcr_offset;
1058 size_t expected_length = video_format.stride * (video_format.height + video_format.extra_lines_top + video_format.extra_lines_bottom);
1059 if (userdata != nullptr && userdata->pixel_format == PixelFormat_8BitYCbCrPlanar) {
1060 // The calculation above is wrong for planar Y'CbCr, so just override it.
1061 assert(card->type == CardType::FFMPEG_INPUT);
1062 assert(video_offset == 0);
1063 expected_length = video_frame.len;
1065 userdata->ycbcr_format = (static_cast<FFmpegCapture *>(card->capture.get()))->get_current_frame_ycbcr_format();
1067 cbcr_offset = video_format.width * video_format.height;
1069 // All the other Y'CbCr formats are 4:2:2.
1070 y_offset = video_frame.size / 2 + video_offset / 2;
1071 cbcr_offset = video_offset / 2;
1073 if (video_frame.len - video_offset == 0 ||
1074 video_frame.len - video_offset != expected_length) {
1075 if (video_frame.len != 0) {
1076 printf("%s: Dropping video frame with wrong length (%zu; expected %zu)\n",
1077 description_for_card(card_index).c_str(), video_frame.len - video_offset, expected_length);
1079 if (video_frame.owner) {
1080 video_frame.owner->release_frame(video_frame);
1083 // Still send on the information that we _had_ a frame, even though it's corrupted,
1084 // so that pts can go up accordingly.
1086 lock_guard<mutex> lock(card_mutex);
1087 CaptureCard::NewFrame new_frame;
1088 new_frame.frame = RefCountedFrame(FrameAllocator::Frame());
1089 new_frame.length = frame_length;
1090 new_frame.interlaced = false;
1091 new_frame.dropped_frames = dropped_frames;
1092 new_frame.received_timestamp = video_frame.received_timestamp;
1093 card->new_frames.push_back(move(new_frame));
1094 card->jitter_history.frame_arrived(video_frame.received_timestamp, frame_length, dropped_frames);
1096 card->new_frames_changed.notify_all();
1100 unsigned num_fields = video_format.interlaced ? 2 : 1;
1101 steady_clock::time_point frame_upload_start;
1102 if (video_format.interlaced) {
1103 // Send the two fields along as separate frames; the other side will need to add
1104 // a deinterlacer to actually get this right.
1105 assert(video_format.height % 2 == 0);
1106 video_format.height /= 2;
1107 assert(frame_length % 2 == 0);
1110 frame_upload_start = steady_clock::now();
1112 assert(userdata != nullptr);
1113 userdata->last_interlaced = video_format.interlaced;
1114 userdata->last_has_signal = video_format.has_signal;
1115 userdata->last_is_connected = video_format.is_connected;
1116 userdata->last_frame_rate_nom = video_format.frame_rate_nom;
1117 userdata->last_frame_rate_den = video_format.frame_rate_den;
1118 RefCountedFrame frame(video_frame);
1120 // Send the frames on to the main thread, which will upload and process htem.
1121 // It is entirely possible to upload them in the same thread (and it might even be
1122 // faster, depending on the GPU and driver), but it appears to be trickling
1123 // driver bugs very easily.
1125 // Note that this means we must hold on to the actual frame data in <userdata>
1126 // until the upload is done, but we hold on to <frame> much longer than that
1127 // (in fact, all the way until we no longer use the texture in rendering).
1128 for (unsigned field = 0; field < num_fields; ++field) {
1130 // Don't upload the second field as fast as we can; wait until
1131 // the field time has approximately passed. (Otherwise, we could
1132 // get timing jitter against the other sources, and possibly also
1133 // against the video display, although the latter is not as critical.)
1134 // This requires our system clock to be reasonably close to the
1135 // video clock, but that's not an unreasonable assumption.
1136 steady_clock::time_point second_field_start = frame_upload_start +
1137 nanoseconds(frame_length * 1000000000 / TIMEBASE);
1138 this_thread::sleep_until(second_field_start);
1142 lock_guard<mutex> lock(card_mutex);
1143 CaptureCard::NewFrame new_frame;
1144 new_frame.frame = frame;
1145 new_frame.length = frame_length;
1146 new_frame.field = field;
1147 new_frame.interlaced = video_format.interlaced;
1148 new_frame.dropped_frames = dropped_frames;
1149 new_frame.received_timestamp = video_frame.received_timestamp; // Ignore the audio timestamp.
1150 new_frame.video_format = video_format;
1151 new_frame.video_offset = video_offset;
1152 new_frame.y_offset = y_offset;
1153 new_frame.cbcr_offset = cbcr_offset;
1154 new_frame.texture_uploaded = false;
1155 if (card->type == CardType::FFMPEG_INPUT) {
1156 FFmpegCapture *ffmpeg_capture = static_cast<FFmpegCapture *>(card->capture.get());
1157 new_frame.neutral_color = ffmpeg_capture->get_last_neutral_color();
1159 card->new_frames.push_back(move(new_frame));
1160 card->jitter_history.frame_arrived(video_frame.received_timestamp, frame_length, dropped_frames);
1161 card->may_have_dropped_last_frame = false;
1163 card->new_frames_changed.notify_all();
1167 void Mixer::upload_texture_for_frame(
1168 int field, bmusb::VideoFormat video_format,
1169 size_t y_offset, size_t cbcr_offset, size_t video_offset, PBOFrameAllocator::Userdata *userdata)
1171 size_t cbcr_width, cbcr_height;
1172 if (userdata != nullptr && userdata->pixel_format == PixelFormat_8BitYCbCrPlanar) {
1173 cbcr_width = video_format.width / userdata->ycbcr_format.chroma_subsampling_x;
1174 cbcr_height = video_format.height / userdata->ycbcr_format.chroma_subsampling_y;
1176 // All the other Y'CbCr formats are 4:2:2.
1177 cbcr_width = video_format.width / 2;
1178 cbcr_height = video_format.height;
1181 bool interlaced_stride = video_format.interlaced && (video_format.second_field_start == 1);
1182 if (video_format.interlaced) {
1186 unsigned field_start_line;
1188 field_start_line = video_format.second_field_start;
1190 field_start_line = video_format.extra_lines_top;
1193 // For anything not FRAME_FORMAT_YCBCR_10BIT, v210_width will be nonsensical but not used.
1194 size_t v210_width = video_format.stride / sizeof(uint32_t);
1195 ensure_texture_resolution(userdata, field, video_format.width, video_format.height, cbcr_width, cbcr_height, v210_width);
1197 glBindBuffer(GL_PIXEL_UNPACK_BUFFER, userdata->pbo);
1200 switch (userdata->pixel_format) {
1201 case PixelFormat_10BitYCbCr: {
1202 size_t field_start = video_offset + video_format.stride * field_start_line;
1203 upload_texture(userdata->tex_v210[field], v210_width, video_format.height, video_format.stride, interlaced_stride, GL_RGBA, GL_UNSIGNED_INT_2_10_10_10_REV, field_start);
1204 v210_converter->convert(userdata->tex_v210[field], userdata->tex_444[field], video_format.width, video_format.height);
1207 case PixelFormat_8BitYCbCr: {
1208 size_t field_y_start = y_offset + video_format.width * field_start_line;
1209 size_t field_cbcr_start = cbcr_offset + cbcr_width * field_start_line * sizeof(uint16_t);
1211 // Make up our own strides, since we are interleaving.
1212 upload_texture(userdata->tex_y[field], video_format.width, video_format.height, video_format.width, interlaced_stride, GL_RED, GL_UNSIGNED_BYTE, field_y_start);
1213 upload_texture(userdata->tex_cbcr[field], cbcr_width, cbcr_height, cbcr_width * sizeof(uint16_t), interlaced_stride, GL_RG, GL_UNSIGNED_BYTE, field_cbcr_start);
1216 case PixelFormat_8BitYCbCrPlanar: {
1217 assert(field_start_line == 0); // We don't really support interlaced here.
1218 size_t field_y_start = y_offset;
1219 size_t field_cb_start = cbcr_offset;
1220 size_t field_cr_start = cbcr_offset + cbcr_width * cbcr_height;
1222 // Make up our own strides, since we are interleaving.
1223 upload_texture(userdata->tex_y[field], video_format.width, video_format.height, video_format.width, interlaced_stride, GL_RED, GL_UNSIGNED_BYTE, field_y_start);
1224 upload_texture(userdata->tex_cb[field], cbcr_width, cbcr_height, cbcr_width, interlaced_stride, GL_RED, GL_UNSIGNED_BYTE, field_cb_start);
1225 upload_texture(userdata->tex_cr[field], cbcr_width, cbcr_height, cbcr_width, interlaced_stride, GL_RED, GL_UNSIGNED_BYTE, field_cr_start);
1228 case PixelFormat_8BitBGRA: {
1229 size_t field_start = video_offset + video_format.stride * field_start_line;
1230 upload_texture(userdata->tex_rgba[field], video_format.width, video_format.height, video_format.stride, interlaced_stride, GL_BGRA, GL_UNSIGNED_INT_8_8_8_8_REV, field_start);
1231 // These could be asked to deliver mipmaps at any time.
1232 glBindTexture(GL_TEXTURE_2D, userdata->tex_rgba[field]);
1234 glGenerateMipmap(GL_TEXTURE_2D);
1236 glBindTexture(GL_TEXTURE_2D, 0);
1244 glBindBuffer(GL_PIXEL_UNPACK_BUFFER, 0);
1248 void Mixer::bm_hotplug_add(libusb_device *dev)
1250 lock_guard<mutex> lock(hotplug_mutex);
1251 hotplugged_cards.push_back(dev);
1254 void Mixer::bm_hotplug_remove(unsigned card_index)
1256 cards[card_index].new_frames_changed.notify_all();
1259 void Mixer::thread_func()
1261 pthread_setname_np(pthread_self(), "Mixer_OpenGL");
1263 eglBindAPI(EGL_OPENGL_API);
1264 QOpenGLContext *context = create_context(mixer_surface);
1265 if (!make_current(context, mixer_surface)) {
1270 // Start the actual capture. (We don't want to do it before we're actually ready
1271 // to process output frames.)
1272 for (unsigned card_index = 0; card_index < MAX_VIDEO_CARDS; ++card_index) {
1273 if (int(card_index) != output_card_index && cards[card_index].capture != nullptr) {
1274 cards[card_index].capture->start_bm_capture();
1278 BasicStats basic_stats(/*verbose=*/true, /*use_opengl=*/true);
1279 int stats_dropped_frames = 0;
1281 while (!should_quit) {
1282 if (desired_output_card_index != output_card_index) {
1283 set_output_card_internal(desired_output_card_index);
1285 if (output_card_index != -1 &&
1286 desired_output_video_mode != output_video_mode) {
1287 DeckLinkOutput *output = cards[output_card_index].output.get();
1288 output->end_output();
1289 desired_output_video_mode = output_video_mode = output->pick_video_mode(desired_output_video_mode);
1290 output->start_output(desired_output_video_mode, pts_int, /*is_master_card=*/output_card_is_master);
1294 lock_guard<mutex> lock(card_mutex);
1295 handle_hotplugged_cards();
1298 CaptureCard::NewFrame new_frames[MAX_VIDEO_CARDS];
1299 bool has_new_frame[MAX_VIDEO_CARDS] = { false };
1301 bool master_card_is_output;
1302 unsigned master_card_index;
1303 if (output_card_index != -1 && output_card_is_master) {
1304 master_card_is_output = true;
1305 master_card_index = output_card_index;
1307 master_card_is_output = false;
1308 master_card_index = theme->map_signal_to_card(master_clock_channel);
1309 assert(master_card_index < MAX_VIDEO_CARDS);
1312 vector<int32_t> raw_audio[MAX_VIDEO_CARDS]; // For MJPEG encoding.
1313 OutputFrameInfo output_frame_info = get_one_frame_from_each_card(master_card_index, master_card_is_output, new_frames, has_new_frame, raw_audio);
1314 schedule_audio_resampling_tasks(output_frame_info.dropped_frames, output_frame_info.num_samples, output_frame_info.frame_duration, output_frame_info.is_preroll, output_frame_info.frame_timestamp);
1315 stats_dropped_frames += output_frame_info.dropped_frames;
1317 for (unsigned card_index = 0; card_index < MAX_VIDEO_CARDS; ++card_index) {
1318 if (card_index == master_card_index || !has_new_frame[card_index]) {
1321 if (new_frames[card_index].frame->len == 0) {
1322 ++new_frames[card_index].dropped_frames;
1324 if (new_frames[card_index].dropped_frames > 0) {
1325 printf("%s dropped %d frames before this\n",
1326 description_for_card(card_index).c_str(), int(new_frames[card_index].dropped_frames));
1330 // If the first card is reporting a corrupted or otherwise dropped frame,
1331 // just increase the pts (skipping over this frame) and don't try to compute anything new.
1332 if (!master_card_is_output &&
1333 new_frames[master_card_index].frame != nullptr && // Timeout.
1334 new_frames[master_card_index].frame->len == 0) {
1335 ++stats_dropped_frames;
1336 pts_int += new_frames[master_card_index].length;
1340 for (unsigned card_index = 0; card_index < MAX_VIDEO_CARDS; ++card_index) {
1341 if (!has_new_frame[card_index] || new_frames[card_index].frame->len == 0)
1344 CaptureCard::NewFrame *new_frame = &new_frames[card_index];
1345 assert(new_frame->frame != nullptr);
1346 insert_new_frame(new_frame->frame, new_frame->field, new_frame->interlaced, card_index, &input_state);
1349 // The new texture might need uploading before use.
1350 if (!new_frame->texture_uploaded) {
1351 upload_texture_for_frame(new_frame->field, new_frame->video_format, new_frame->y_offset, new_frame->cbcr_offset,
1352 new_frame->video_offset, (PBOFrameAllocator::Userdata *)new_frame->frame->userdata);
1353 new_frame->texture_uploaded = true;
1356 // Only set the white balance if it actually changed. This means that the user
1357 // is free to override the white balance in a video with no white balance information
1358 // actually set (ie. r=g=b=1 all the time), or one where the white point is wrong,
1359 // but frame-to-frame decisions will be heeded. We do this pretty much as late
1360 // as possible (ie., after picking out the frame from the buffer), so that we are sure
1361 // that the change takes effect on exactly the right frame.
1362 if (fabs(new_frame->neutral_color.r - last_received_neutral_color[card_index].r) > 1e-3 ||
1363 fabs(new_frame->neutral_color.g - last_received_neutral_color[card_index].g) > 1e-3 ||
1364 fabs(new_frame->neutral_color.b - last_received_neutral_color[card_index].b) > 1e-3) {
1365 theme->set_wb_for_card(card_index, new_frame->neutral_color.r, new_frame->neutral_color.g, new_frame->neutral_color.b);
1366 last_received_neutral_color[card_index] = new_frame->neutral_color;
1369 if (new_frame->frame->data_copy != nullptr && mjpeg_encoder->should_encode_mjpeg_for_card(card_index)) {
1370 RGBTriplet neutral_color = theme->get_white_balance_for_card(card_index);
1371 mjpeg_encoder->upload_frame(pts_int, card_index, new_frame->frame, new_frame->video_format, new_frame->y_offset, new_frame->cbcr_offset, move(raw_audio[card_index]), neutral_color);
1376 int64_t frame_duration = output_frame_info.frame_duration;
1377 render_one_frame(frame_duration);
1379 lock_guard<mutex> lock(frame_num_mutex);
1382 frame_num_updated.notify_all();
1383 pts_int += frame_duration;
1385 basic_stats.update(frame_num, stats_dropped_frames);
1386 // if (frame_num % 100 == 0) chain->print_phase_timing();
1388 if (should_cut.exchange(false)) { // Test and clear.
1389 video_encoder->do_cut(frame_num);
1393 // Reset every 100 frames, so that local variations in frame times
1394 // (especially for the first few frames, when the shaders are
1395 // compiled etc.) don't make it hard to measure for the entire
1396 // remaining duration of the program.
1397 if (frame == 10000) {
1405 resource_pool->clean_context();
1408 bool Mixer::input_card_is_master_clock(unsigned card_index, unsigned master_card_index) const
1410 if (output_card_index != -1 && output_card_is_master) {
1411 // The output card (ie., cards[output_card_index].output) is the master clock,
1412 // so no input card (ie., cards[card_index].capture) is.
1415 return (card_index == master_card_index);
1418 void Mixer::trim_queue(CaptureCard *card, size_t safe_queue_length)
1420 // Count the number of frames in the queue, including any frames
1421 // we dropped. It's hard to know exactly how we should deal with
1422 // dropped (corrupted) input frames; they don't help our goal of
1423 // avoiding starvation, but they still add to the problem of latency.
1424 // Since dropped frames is going to mean a bump in the signal anyway,
1425 // we err on the side of having more stable latency instead.
1426 unsigned queue_length = 0;
1427 for (const CaptureCard::NewFrame &frame : card->new_frames) {
1428 queue_length += frame.dropped_frames + 1;
1431 // If needed, drop frames until the queue is below the safe limit.
1432 // We prefer to drop from the head, because all else being equal,
1433 // we'd like more recent frames (less latency).
1434 unsigned dropped_frames = 0;
1435 while (queue_length > safe_queue_length) {
1436 assert(!card->new_frames.empty());
1437 assert(queue_length > card->new_frames.front().dropped_frames);
1438 queue_length -= card->new_frames.front().dropped_frames;
1440 if (queue_length <= safe_queue_length) {
1441 // No need to drop anything.
1445 card->new_frames.pop_front();
1446 card->new_frames_changed.notify_all();
1450 if (queue_length == 0 && card->is_cef_capture) {
1451 card->may_have_dropped_last_frame = true;
1455 card->metric_input_dropped_frames_jitter += dropped_frames;
1456 card->metric_input_queue_length_frames = queue_length;
1459 if (dropped_frames > 0) {
1460 fprintf(stderr, "Card %u dropped %u frame(s) to keep latency down.\n",
1461 card_index, dropped_frames);
1466 pair<string, string> Mixer::get_channels_json()
1469 for (int channel_idx = 0; channel_idx < theme->get_num_channels(); ++channel_idx) {
1470 Channel *channel = ret.add_channel();
1471 channel->set_index(channel_idx + 2);
1472 channel->set_name(theme->get_channel_name(channel_idx + 2));
1473 channel->set_color(theme->get_channel_color(channel_idx + 2));
1476 google::protobuf::util::MessageToJsonString(ret, &contents); // Ignore any errors.
1477 return make_pair(contents, "text/json");
1480 pair<string, string> Mixer::get_channel_color_http(unsigned channel_idx)
1482 return make_pair(theme->get_channel_color(channel_idx), "text/plain");
1485 Mixer::OutputFrameInfo Mixer::get_one_frame_from_each_card(unsigned master_card_index, bool master_card_is_output, CaptureCard::NewFrame new_frames[MAX_VIDEO_CARDS], bool has_new_frame[MAX_VIDEO_CARDS], vector<int32_t> raw_audio[MAX_VIDEO_CARDS])
1487 OutputFrameInfo output_frame_info;
1488 constexpr steady_clock::duration master_card_timeout = milliseconds(200);
1490 unique_lock<mutex> lock(card_mutex, defer_lock);
1491 bool timed_out = false;
1492 if (master_card_is_output) {
1493 // Clocked to the output, so wait for it to be ready for the next frame.
1494 cards[master_card_index].output->wait_for_frame(pts_int, &output_frame_info.dropped_frames, &output_frame_info.frame_duration, &output_frame_info.is_preroll, &output_frame_info.frame_timestamp);
1497 // Wait for the master card to have a new frame.
1498 output_frame_info.is_preroll = false;
1500 timed_out = !cards[master_card_index].new_frames_changed.wait_for(lock,
1501 master_card_timeout,
1502 [this, master_card_index] {
1503 return !cards[master_card_index].new_frames.empty() ||
1504 cards[master_card_index].capture == nullptr ||
1505 cards[master_card_index].capture->get_disconnected();
1508 fprintf(stderr, "WARNING: Master card (%s) did not deliver a frame for %u ms, creating a fake one.\n",
1509 description_for_card(master_card_index).c_str(),
1510 unsigned(duration_cast<milliseconds>(master_card_timeout).count()));
1515 // The master card stalled for 200 ms (possible when it's e.g.
1516 // an SRT card). Send a frame no matter what; this also makes sure
1517 // any other cards get to empty their queues, and in general,
1518 // that we make _some_ sort of forward progress.
1519 handle_hotplugged_cards();
1520 } else if (master_card_is_output) {
1521 handle_hotplugged_cards();
1522 } else if (cards[master_card_index].new_frames.empty()) {
1523 // We were woken up, but not due to a new frame. Deal with it
1524 // and then restart.
1525 assert(cards[master_card_index].capture == nullptr ||
1526 cards[master_card_index].capture->get_disconnected());
1527 handle_hotplugged_cards();
1532 for (unsigned card_index = 0; card_index < MAX_VIDEO_CARDS; ++card_index) {
1533 CaptureCard *card = &cards[card_index];
1534 if (card->new_frames.empty()) { // Starvation.
1535 ++card->metric_input_duped_frames;
1537 if (card->is_cef_capture && card->may_have_dropped_last_frame) {
1538 // Unlike other sources, CEF is not guaranteed to send us a steady
1539 // stream of frames, so we'll have to ask it to repaint the frame
1540 // we dropped. (may_have_dropped_last_frame is set whenever we
1541 // trim the queue completely away, and cleared when we actually
1542 // get a new frame.)
1543 ((CEFCapture *)card->capture.get())->request_new_frame(/*ignore_if_locked=*/true);
1547 new_frames[card_index] = move(card->new_frames.front());
1548 has_new_frame[card_index] = true;
1549 card->new_frames.pop_front();
1550 card->new_frames_changed.notify_all();
1553 raw_audio[card_index] = move(card->new_raw_audio);
1557 // Pretend the frame happened a while ago and was only processed now,
1558 // so that we get the duration sort-of right. This isn't ideal.
1559 output_frame_info.dropped_frames = 0; // Hard to define, really.
1560 output_frame_info.frame_duration = lrint(TIMEBASE * duration<double>(master_card_timeout).count());
1561 output_frame_info.frame_timestamp = steady_clock::now() - master_card_timeout;
1562 } else if (!master_card_is_output) {
1563 output_frame_info.frame_timestamp = new_frames[master_card_index].received_timestamp;
1564 output_frame_info.dropped_frames = new_frames[master_card_index].dropped_frames;
1565 output_frame_info.frame_duration = new_frames[master_card_index].length;
1568 if (!output_frame_info.is_preroll) {
1569 output_jitter_history.frame_arrived(output_frame_info.frame_timestamp, output_frame_info.frame_duration, output_frame_info.dropped_frames);
1572 for (unsigned card_index = 0; card_index < MAX_VIDEO_CARDS; ++card_index) {
1573 CaptureCard *card = &cards[card_index];
1574 if (has_new_frame[card_index] &&
1575 !input_card_is_master_clock(card_index, master_card_index) &&
1576 !output_frame_info.is_preroll) {
1577 card->queue_length_policy.update_policy(
1578 output_frame_info.frame_timestamp,
1579 card->jitter_history.get_expected_next_frame(),
1580 new_frames[master_card_index].length,
1581 output_frame_info.frame_duration,
1582 card->jitter_history.estimate_max_jitter(),
1583 output_jitter_history.estimate_max_jitter());
1584 trim_queue(card, min<int>(global_flags.max_input_queue_frames,
1585 card->queue_length_policy.get_safe_queue_length()));
1589 // This might get off by a fractional sample when changing master card
1590 // between ones with different frame rates, but that's fine.
1591 int64_t num_samples_times_timebase = int64_t(OUTPUT_FREQUENCY) * output_frame_info.frame_duration + fractional_samples;
1592 output_frame_info.num_samples = num_samples_times_timebase / TIMEBASE;
1593 fractional_samples = num_samples_times_timebase % TIMEBASE;
1594 assert(output_frame_info.num_samples >= 0);
1597 DeviceSpec device{InputSourceType::CAPTURE_CARD, master_card_index};
1600 success = audio_mixer->add_silence(device, output_frame_info.num_samples, /*dropped_frames=*/0);
1604 return output_frame_info;
1607 void Mixer::handle_hotplugged_cards()
1609 // Check for cards that have been disconnected since last frame.
1610 for (unsigned card_index = 0; card_index < MAX_VIDEO_CARDS; ++card_index) {
1611 CaptureCard *card = &cards[card_index];
1612 if (card->capture != nullptr && card->capture->get_disconnected()) {
1613 bool is_active = card_index < unsigned(global_flags.min_num_cards) || cards[card_index].force_active;
1615 fprintf(stderr, "Card %u went away, replacing with a fake card.\n", card_index);
1616 FakeCapture *capture = new FakeCapture(global_flags.width, global_flags.height, FAKE_FPS, OUTPUT_FREQUENCY, card_index, global_flags.fake_cards_audio);
1617 configure_card(card_index, capture, CardType::FAKE_CAPTURE, /*output=*/nullptr, /*is_srt_card=*/false);
1618 card->jitter_history.clear();
1619 card->capture->start_bm_capture();
1621 // NOTE: The theme might end up forcing the card back at some later point
1622 // (ie., force_active is false now, but might immediately be true again on
1623 // e.g. the next frame). That should be rare, though, so we don't bother
1624 // adjusting the message.
1625 fprintf(stderr, "Card %u went away, removing. (To keep a fake card, increase --num-cards.)\n", card_index);
1626 theme->remove_card(card_index);
1627 configure_card(card_index, /*capture=*/nullptr, CardType::FAKE_CAPTURE, /*output=*/nullptr, /*is_srt_card=*/false);
1628 card->jitter_history.clear();
1633 // Count how many active cards we already have. Used below to check that we
1634 // don't go past the max_cards limit set by the user. Note that (non-SRT) video
1635 // and HTML “cards” don't count towards this limit.
1636 int num_video_cards = 0;
1637 for (unsigned card_index = 0; card_index < MAX_VIDEO_CARDS; ++card_index) {
1638 CaptureCard *card = &cards[card_index];
1639 if (card->type == CardType::LIVE_CARD || is_srt_card(card)) {
1644 // Check for cards that have been connected since last frame.
1645 vector<libusb_device *> hotplugged_cards_copy;
1647 vector<int> hotplugged_srt_cards_copy;
1650 lock_guard<mutex> lock(hotplug_mutex);
1651 swap(hotplugged_cards, hotplugged_cards_copy);
1653 swap(hotplugged_srt_cards, hotplugged_srt_cards_copy);
1656 for (libusb_device *new_dev : hotplugged_cards_copy) {
1657 // Look for a fake capture card where we can stick this in.
1658 int free_card_index = -1;
1659 for (unsigned card_index = 0; card_index < MAX_VIDEO_CARDS; ++card_index) {
1660 if (cards[card_index].is_fake_capture) {
1661 free_card_index = card_index;
1666 if (free_card_index == -1 || num_video_cards >= global_flags.max_num_cards) {
1667 fprintf(stderr, "New card plugged in, but no free slots -- ignoring.\n");
1668 libusb_unref_device(new_dev);
1670 // BMUSBCapture takes ownership.
1671 fprintf(stderr, "New card plugged in, choosing slot %d.\n", free_card_index);
1672 CaptureCard *card = &cards[free_card_index];
1673 BMUSBCapture *capture = new BMUSBCapture(free_card_index, new_dev);
1674 configure_card(free_card_index, capture, CardType::LIVE_CARD, /*output=*/nullptr, /*is_srt_card=*/false);
1675 card->jitter_history.clear();
1676 capture->set_card_disconnected_callback(bind(&Mixer::bm_hotplug_remove, this, free_card_index));
1677 capture->start_bm_capture();
1682 // Same, for SRT inputs.
1683 for (SRTSOCKET sock : hotplugged_srt_cards_copy) {
1685 int namelen = sizeof(name);
1686 srt_getsockopt(sock, /*ignored=*/0, SRTO_STREAMID, name, &namelen);
1687 string stream_id(name, namelen);
1689 // Look for a fake capture card where we can stick this in.
1690 // Prioritize ones that previously held SRT streams with the
1691 // same stream ID, if any exist -- and it multiple exist,
1692 // take the one that disconnected the last.
1693 int first_free_card_index = -1, last_matching_free_card_index = -1;
1694 for (unsigned card_index = 0; card_index < MAX_VIDEO_CARDS; ++card_index) {
1695 CaptureCard *card = &cards[card_index];
1696 if (!card->is_fake_capture) {
1699 if (first_free_card_index == -1) {
1700 first_free_card_index = card_index;
1702 if (card->last_srt_stream_id == stream_id &&
1703 (last_matching_free_card_index == -1 ||
1704 card->fake_capture_counter >
1705 cards[last_matching_free_card_index].fake_capture_counter)) {
1706 last_matching_free_card_index = card_index;
1710 const int free_card_index = (last_matching_free_card_index != -1)
1711 ? last_matching_free_card_index : first_free_card_index;
1712 if (free_card_index == -1 || num_video_cards >= global_flags.max_num_cards) {
1713 if (stream_id.empty()) {
1714 stream_id = "no name";
1716 fprintf(stderr, "New SRT stream connected (%s), but no free slots -- ignoring.\n", stream_id.c_str());
1719 // FFmpegCapture takes ownership.
1720 if (stream_id.empty()) {
1721 fprintf(stderr, "New unnamed SRT stream connected, choosing slot %d.\n", free_card_index);
1723 fprintf(stderr, "New SRT stream connected (%s), choosing slot %d.\n", stream_id.c_str(), free_card_index);
1725 CaptureCard *card = &cards[free_card_index];
1726 FFmpegCapture *capture = new FFmpegCapture(sock, stream_id);
1727 capture->set_card_index(free_card_index);
1728 configure_card(free_card_index, capture, CardType::FFMPEG_INPUT, /*output=*/nullptr, /*is_srt_card=*/true);
1729 update_srt_stats(sock, card); // Initial zero stats.
1730 card->last_srt_stream_id = stream_id;
1731 card->jitter_history.clear();
1732 capture->set_card_disconnected_callback(bind(&Mixer::bm_hotplug_remove, this, free_card_index));
1733 capture->start_bm_capture();
1738 // Finally, newly forced-to-active fake capture cards.
1739 for (unsigned card_index = 0; card_index < MAX_VIDEO_CARDS; ++card_index) {
1740 CaptureCard *card = &cards[card_index];
1741 if (card->capture == nullptr && card->force_active) {
1742 FakeCapture *capture = new FakeCapture(global_flags.width, global_flags.height, FAKE_FPS, OUTPUT_FREQUENCY, card_index, global_flags.fake_cards_audio);
1743 configure_card(card_index, capture, CardType::FAKE_CAPTURE, /*output=*/nullptr, /*is_srt_card=*/false);
1744 card->jitter_history.clear();
1745 card->capture->start_bm_capture();
1751 void Mixer::schedule_audio_resampling_tasks(unsigned dropped_frames, int num_samples_per_frame, int length_per_frame, bool is_preroll, steady_clock::time_point frame_timestamp)
1753 // Resample the audio as needed, including from previously dropped frames.
1754 for (unsigned frame_num = 0; frame_num < dropped_frames + 1; ++frame_num) {
1755 const bool dropped_frame = (frame_num != dropped_frames);
1757 // Signal to the audio thread to process this frame.
1758 // Note that if the frame is a dropped frame, we signal that
1759 // we don't want to use this frame as base for adjusting
1760 // the resampler rate. The reason for this is that the timing
1761 // of these frames is often way too late; they typically don't
1762 // “arrive” before we synthesize them. Thus, we could end up
1763 // in a situation where we have inserted e.g. five audio frames
1764 // into the queue before we then start pulling five of them
1765 // back out. This makes ResamplingQueue overestimate the delay,
1766 // causing undue resampler changes. (We _do_ use the last,
1767 // non-dropped frame; perhaps we should just discard that as well,
1768 // since dropped frames are expected to be rare, and it might be
1769 // better to just wait until we have a slightly more normal situation).
1770 lock_guard<mutex> lock(audio_mutex);
1771 bool adjust_rate = !dropped_frame && !is_preroll;
1772 audio_task_queue.push(AudioTask{pts_int, num_samples_per_frame, adjust_rate, frame_timestamp});
1773 audio_task_queue_changed.notify_one();
1775 if (dropped_frame) {
1776 // For dropped frames, increase the pts. Note that if the format changed
1777 // in the meantime, we have no way of detecting that; we just have to
1778 // assume the frame length is always the same.
1779 pts_int += length_per_frame;
1784 void Mixer::render_one_frame(int64_t duration)
1786 // Determine the time code for this frame before we start rendering.
1787 string timecode_text = timecode_renderer->get_timecode_text(double(pts_int) / TIMEBASE, frame_num);
1788 if (display_timecode_on_stdout) {
1789 printf("Timecode: '%s'\n", timecode_text.c_str());
1792 // Update Y'CbCr settings for all cards.
1794 lock_guard<mutex> lock(card_mutex);
1795 for (unsigned card_index = 0; card_index < MAX_VIDEO_CARDS; ++card_index) {
1796 YCbCrInterpretation *interpretation = &ycbcr_interpretation[card_index];
1797 input_state.ycbcr_coefficients_auto[card_index] = interpretation->ycbcr_coefficients_auto;
1798 input_state.ycbcr_coefficients[card_index] = interpretation->ycbcr_coefficients;
1799 input_state.full_range[card_index] = interpretation->full_range;
1803 // Get the main chain from the theme, and set its state immediately.
1804 Theme::Chain theme_main_chain = theme->get_chain(0, pts(), global_flags.width, global_flags.height, input_state);
1805 EffectChain *chain = theme_main_chain.chain;
1806 theme_main_chain.setup_chain();
1807 //theme_main_chain.chain->enable_phase_timing(true);
1809 // If HDMI/SDI output is active and the user has requested auto mode,
1810 // its mode overrides the existing Y'CbCr setting for the chain.
1811 YCbCrLumaCoefficients ycbcr_output_coefficients;
1812 if (global_flags.ycbcr_auto_coefficients && output_card_index != -1) {
1813 ycbcr_output_coefficients = cards[output_card_index].output->preferred_ycbcr_coefficients();
1815 ycbcr_output_coefficients = global_flags.ycbcr_rec709_coefficients ? YCBCR_REC_709 : YCBCR_REC_601;
1818 // TODO: Reduce the duplication against theme.cpp.
1819 YCbCrFormat output_ycbcr_format;
1820 output_ycbcr_format.chroma_subsampling_x = 1;
1821 output_ycbcr_format.chroma_subsampling_y = 1;
1822 output_ycbcr_format.luma_coefficients = ycbcr_output_coefficients;
1823 output_ycbcr_format.full_range = false;
1824 output_ycbcr_format.num_levels = 1 << global_flags.bit_depth;
1825 chain->change_ycbcr_output_format(output_ycbcr_format);
1827 // Render main chain. If we're using zerocopy Quick Sync encoding
1828 // (the default case), we take an extra copy of the created outputs,
1829 // so that we can display it back to the screen later (it's less memory
1830 // bandwidth than writing and reading back an RGBA texture, even at 16-bit).
1831 // Ideally, we'd like to avoid taking copies and just use the main textures
1832 // for display as well, but they're just views into VA-API memory and must be
1833 // unmapped during encoding, so we can't use them for display, unfortunately.
1834 GLuint y_tex, cbcr_full_tex, cbcr_tex;
1835 GLuint y_copy_tex, cbcr_copy_tex = 0;
1836 GLuint y_display_tex, cbcr_display_tex;
1837 GLenum y_type = (global_flags.bit_depth > 8) ? GL_R16 : GL_R8;
1838 GLenum cbcr_type = (global_flags.bit_depth > 8) ? GL_RG16 : GL_RG8;
1839 const bool is_zerocopy = video_encoder->is_zerocopy();
1841 cbcr_full_tex = resource_pool->create_2d_texture(cbcr_type, global_flags.width, global_flags.height);
1842 y_copy_tex = resource_pool->create_2d_texture(y_type, global_flags.width, global_flags.height);
1843 cbcr_copy_tex = resource_pool->create_2d_texture(cbcr_type, global_flags.width / 2, global_flags.height / 2);
1845 y_display_tex = y_copy_tex;
1846 cbcr_display_tex = cbcr_copy_tex;
1848 // y_tex and cbcr_tex will be given by VideoEncoder.
1850 cbcr_full_tex = resource_pool->create_2d_texture(cbcr_type, global_flags.width, global_flags.height);
1851 y_tex = resource_pool->create_2d_texture(y_type, global_flags.width, global_flags.height);
1852 cbcr_tex = resource_pool->create_2d_texture(cbcr_type, global_flags.width / 2, global_flags.height / 2);
1854 y_display_tex = y_tex;
1855 cbcr_display_tex = cbcr_tex;
1858 const int64_t av_delay = lrint(global_flags.audio_queue_length_ms * 0.001 * TIMEBASE); // Corresponds to the delay in ResamplingQueue.
1859 bool got_frame = video_encoder->begin_frame(pts_int + av_delay, duration, ycbcr_output_coefficients, theme_main_chain.input_frames, &y_tex, &cbcr_tex);
1864 fbo = resource_pool->create_fbo(y_tex, cbcr_full_tex, y_copy_tex);
1866 fbo = resource_pool->create_fbo(y_tex, cbcr_full_tex);
1869 chain->render_to_fbo(fbo, global_flags.width, global_flags.height);
1871 if (display_timecode_in_stream) {
1872 // Render the timecode on top.
1873 timecode_renderer->render_timecode(fbo, timecode_text);
1876 resource_pool->release_fbo(fbo);
1879 chroma_subsampler->subsample_chroma(cbcr_full_tex, global_flags.width, global_flags.height, cbcr_tex, cbcr_copy_tex);
1881 chroma_subsampler->subsample_chroma(cbcr_full_tex, global_flags.width, global_flags.height, cbcr_tex);
1883 if (output_card_index != -1) {
1884 cards[output_card_index].output->send_frame(y_tex, cbcr_full_tex, ycbcr_output_coefficients, theme_main_chain.input_frames, pts_int, duration);
1886 resource_pool->release_2d_texture(cbcr_full_tex);
1888 // Set the right state for the Y' and CbCr textures we use for display.
1889 glBindFramebuffer(GL_FRAMEBUFFER, 0);
1890 glBindTexture(GL_TEXTURE_2D, y_display_tex);
1891 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
1892 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
1893 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
1895 glBindTexture(GL_TEXTURE_2D, cbcr_display_tex);
1896 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
1897 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
1898 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
1900 RefCountedGLsync fence = video_encoder->end_frame();
1902 // The live frame pieces the Y'CbCr texture copies back into RGB and displays them.
1903 // It owns y_display_tex and cbcr_display_tex now (whichever textures they are).
1904 DisplayFrame live_frame;
1905 live_frame.chain = display_chain.get();
1906 live_frame.setup_chain = [this, y_display_tex, cbcr_display_tex]{
1907 display_input->set_texture_num(0, y_display_tex);
1908 display_input->set_texture_num(1, cbcr_display_tex);
1910 live_frame.ready_fence = fence;
1911 live_frame.input_frames = {};
1912 live_frame.temp_textures = { y_display_tex, cbcr_display_tex };
1913 output_channel[OUTPUT_LIVE].output_frame(move(live_frame));
1915 // Set up preview and any additional channels.
1916 for (int i = 1; i < theme->get_num_channels() + 2; ++i) {
1917 DisplayFrame display_frame;
1918 Theme::Chain chain = theme->get_chain(i, pts(), global_flags.width, global_flags.height, input_state); // FIXME: dimensions
1919 display_frame.chain = move(chain.chain);
1920 display_frame.setup_chain = move(chain.setup_chain);
1921 display_frame.ready_fence = fence;
1922 display_frame.input_frames = move(chain.input_frames);
1923 display_frame.temp_textures = {};
1924 output_channel[i].output_frame(move(display_frame));
1928 void Mixer::audio_thread_func()
1930 pthread_setname_np(pthread_self(), "Mixer_Audio");
1932 while (!should_quit) {
1936 unique_lock<mutex> lock(audio_mutex);
1937 audio_task_queue_changed.wait(lock, [this]{ return should_quit || !audio_task_queue.empty(); });
1941 task = audio_task_queue.front();
1942 audio_task_queue.pop();
1945 ResamplingQueue::RateAdjustmentPolicy rate_adjustment_policy =
1946 task.adjust_rate ? ResamplingQueue::ADJUST_RATE : ResamplingQueue::DO_NOT_ADJUST_RATE;
1947 vector<float> samples_out = audio_mixer->get_output(
1948 task.frame_timestamp,
1950 rate_adjustment_policy);
1952 // Send the samples to the sound card, then add them to the output.
1954 alsa->write(samples_out);
1956 if (output_card_index != -1) {
1957 const int64_t av_delay = lrint(global_flags.audio_queue_length_ms * 0.001 * TIMEBASE); // Corresponds to the delay in ResamplingQueue.
1958 cards[output_card_index].output->send_audio(task.pts_int + av_delay, samples_out);
1960 video_encoder->add_audio(task.pts_int, move(samples_out));
1964 void Mixer::release_display_frame(DisplayFrame *frame)
1966 for (GLuint texnum : frame->temp_textures) {
1967 resource_pool->release_2d_texture(texnum);
1969 frame->temp_textures.clear();
1970 frame->ready_fence.reset();
1971 frame->input_frames.clear();
1976 mixer_thread = thread(&Mixer::thread_func, this);
1977 audio_thread = thread(&Mixer::audio_thread_func, this);
1983 audio_task_queue_changed.notify_one();
1984 mixer_thread.join();
1985 audio_thread.join();
1987 if (global_flags.srt_port >= 0) {
1988 // There's seemingly no other reasonable way to wake up the thread
1989 // (libsrt's epoll equivalent is busy-waiting).
1990 int sock = srt_create_socket();
1993 memset(&addr, 0, sizeof(addr));
1994 addr.sin6_family = AF_INET6;
1995 addr.sin6_addr = IN6ADDR_LOOPBACK_INIT;
1996 addr.sin6_port = htons(global_flags.srt_port);
1997 srt_connect(sock, (sockaddr *)&addr, sizeof(addr));
2005 void Mixer::transition_clicked(int transition_num)
2007 theme->transition_clicked(transition_num, pts());
2010 void Mixer::channel_clicked(int preview_num)
2012 theme->channel_clicked(preview_num);
2015 YCbCrInterpretation Mixer::get_input_ycbcr_interpretation(unsigned card_index) const
2017 lock_guard<mutex> lock(card_mutex);
2018 return ycbcr_interpretation[card_index];
2021 void Mixer::set_input_ycbcr_interpretation(unsigned card_index, const YCbCrInterpretation &interpretation)
2023 lock_guard<mutex> lock(card_mutex);
2024 ycbcr_interpretation[card_index] = interpretation;
2027 void Mixer::start_mode_scanning(unsigned card_index)
2029 assert(card_index < MAX_VIDEO_CARDS);
2030 if (cards[card_index].capture == nullptr) {
2031 // Inactive card. Should never happen.
2034 if (is_mode_scanning[card_index]) {
2037 is_mode_scanning[card_index] = true;
2038 mode_scanlist[card_index].clear();
2039 for (const auto &mode : cards[card_index].capture->get_available_video_modes()) {
2040 mode_scanlist[card_index].push_back(mode.first);
2042 assert(!mode_scanlist[card_index].empty());
2043 mode_scanlist_index[card_index] = 0;
2044 cards[card_index].capture->set_video_mode(mode_scanlist[card_index][0]);
2045 last_mode_scan_change[card_index] = steady_clock::now();
2048 map<uint32_t, VideoMode> Mixer::get_available_output_video_modes() const
2050 assert(desired_output_card_index != -1);
2051 lock_guard<mutex> lock(card_mutex);
2052 return cards[desired_output_card_index].output->get_available_video_modes();
2055 string Mixer::get_ffmpeg_filename(unsigned card_index) const
2057 assert(card_index < MAX_VIDEO_CARDS);
2058 assert(cards[card_index].type == CardType::FFMPEG_INPUT);
2059 return ((FFmpegCapture *)(cards[card_index].capture.get()))->get_filename();
2062 void Mixer::set_ffmpeg_filename(unsigned card_index, const string &filename) {
2063 assert(card_index < MAX_VIDEO_CARDS);
2064 assert(cards[card_index].type == CardType::FFMPEG_INPUT);
2065 ((FFmpegCapture *)(cards[card_index].capture.get()))->change_filename(filename);
2068 void Mixer::wait_for_next_frame()
2070 unique_lock<mutex> lock(frame_num_mutex);
2071 unsigned old_frame_num = frame_num;
2072 frame_num_updated.wait_for(lock, seconds(1), // Timeout is just in case.
2073 [old_frame_num, this]{ return this->frame_num > old_frame_num; });
2076 Mixer::OutputChannel::~OutputChannel()
2078 if (has_current_frame) {
2079 parent->release_display_frame(¤t_frame);
2081 if (has_ready_frame) {
2082 parent->release_display_frame(&ready_frame);
2086 void Mixer::OutputChannel::output_frame(DisplayFrame &&frame)
2088 // Store this frame for display. Remove the ready frame if any
2089 // (it was seemingly never used).
2091 lock_guard<mutex> lock(frame_mutex);
2092 if (has_ready_frame) {
2093 parent->release_display_frame(&ready_frame);
2095 ready_frame = move(frame);
2096 has_ready_frame = true;
2098 // Call the callbacks under the mutex (they should be short),
2099 // so that we don't race against a callback removal.
2100 for (const auto &key_and_callback : new_frame_ready_callbacks) {
2101 key_and_callback.second();
2105 // Reduce the number of callbacks by filtering duplicates. The reason
2106 // why we bother doing this is that Qt seemingly can get into a state
2107 // where its builds up an essentially unbounded queue of signals,
2108 // consuming more and more memory, and there's no good way of collapsing
2109 // user-defined signals or limiting the length of the queue.
2110 if (transition_names_updated_callback) {
2111 vector<string> transition_names = global_mixer->get_transition_names();
2112 bool changed = false;
2113 if (transition_names.size() != last_transition_names.size()) {
2116 for (unsigned i = 0; i < transition_names.size(); ++i) {
2117 if (transition_names[i] != last_transition_names[i]) {
2124 transition_names_updated_callback(transition_names);
2125 last_transition_names = transition_names;
2128 if (name_updated_callback) {
2129 string name = global_mixer->get_channel_name(channel);
2130 if (name != last_name) {
2131 name_updated_callback(name);
2135 if (color_updated_callback) {
2136 string color = global_mixer->get_channel_color(channel);
2137 if (color != last_color) {
2138 color_updated_callback(color);
2144 bool Mixer::OutputChannel::get_display_frame(DisplayFrame *frame)
2146 lock_guard<mutex> lock(frame_mutex);
2147 if (!has_current_frame && !has_ready_frame) {
2151 if (has_current_frame && has_ready_frame) {
2152 // We have a new ready frame. Toss the current one.
2153 parent->release_display_frame(¤t_frame);
2154 has_current_frame = false;
2156 if (has_ready_frame) {
2157 assert(!has_current_frame);
2158 current_frame = move(ready_frame);
2159 ready_frame.ready_fence.reset(); // Drop the refcount.
2160 ready_frame.input_frames.clear(); // Drop the refcounts.
2161 has_current_frame = true;
2162 has_ready_frame = false;
2165 *frame = current_frame;
2169 void Mixer::OutputChannel::add_frame_ready_callback(void *key, Mixer::new_frame_ready_callback_t callback)
2171 lock_guard<mutex> lock(frame_mutex);
2172 new_frame_ready_callbacks[key] = callback;
2175 void Mixer::OutputChannel::remove_frame_ready_callback(void *key)
2177 lock_guard<mutex> lock(frame_mutex);
2178 new_frame_ready_callbacks.erase(key);
2181 void Mixer::OutputChannel::set_transition_names_updated_callback(Mixer::transition_names_updated_callback_t callback)
2183 transition_names_updated_callback = callback;
2186 void Mixer::OutputChannel::set_name_updated_callback(Mixer::name_updated_callback_t callback)
2188 name_updated_callback = callback;
2191 void Mixer::OutputChannel::set_color_updated_callback(Mixer::color_updated_callback_t callback)
2193 color_updated_callback = callback;
2197 void Mixer::start_srt()
2199 SRTSOCKET sock = srt_create_socket();
2201 memset(&addr, 0, sizeof(addr));
2202 addr.sin6_family = AF_INET6;
2203 addr.sin6_port = htons(global_flags.srt_port);
2205 int err = srt_bind(sock, (sockaddr *)&addr, sizeof(addr));
2207 fprintf(stderr, "srt_bind: %s\n", srt_getlasterror_str());
2210 err = srt_listen(sock, MAX_VIDEO_CARDS);
2212 fprintf(stderr, "srt_listen: %s\n", srt_getlasterror_str());
2216 srt_thread = thread([this, sock] {
2219 int sa_len = sizeof(addr);
2220 int clientsock = srt_accept(sock, (sockaddr *)&addr, &sa_len);
2222 if (clientsock != -1) {
2223 srt_close(clientsock);
2227 if (!global_flags.enable_srt) { // Runtime UI toggle.
2228 // Perhaps not as good as never listening in the first place,
2229 // but much simpler to turn on and off.
2230 srt_close(clientsock);
2233 lock_guard<mutex> lock(hotplug_mutex);
2234 hotplugged_srt_cards.push_back(clientsock);
2242 void Mixer::update_srt_stats(int srt_sock, Mixer::CaptureCard *card)
2244 SRT_TRACEBSTATS stats;
2245 srt_bistats(srt_sock, &stats, /*clear=*/0, /*instantaneous=*/1);
2247 card->metric_srt_uptime_seconds = stats.msTimeStamp * 1e-3;
2248 card->metric_srt_send_duration_seconds = stats.usSndDurationTotal * 1e-6;
2249 card->metric_srt_sent_bytes = stats.byteSentTotal;
2250 card->metric_srt_received_bytes = stats.byteRecvTotal;
2251 card->metric_srt_sent_packets_normal = stats.pktSentTotal;
2252 card->metric_srt_received_packets_normal = stats.pktRecvTotal;
2253 card->metric_srt_sent_packets_lost = stats.pktSndLossTotal;
2254 card->metric_srt_received_packets_lost = stats.pktRcvLossTotal;
2255 card->metric_srt_sent_packets_retransmitted = stats.pktRetransTotal;
2256 card->metric_srt_sent_bytes_retransmitted = stats.byteRetransTotal;
2257 card->metric_srt_sent_packets_ack = stats.pktSentACKTotal;
2258 card->metric_srt_received_packets_ack = stats.pktRecvACKTotal;
2259 card->metric_srt_sent_packets_nak = stats.pktSentNAKTotal;
2260 card->metric_srt_received_packets_nak = stats.pktRecvNAKTotal;
2261 card->metric_srt_sent_packets_dropped = stats.pktSndDropTotal;
2262 card->metric_srt_received_packets_dropped = stats.pktRcvDropTotal;
2263 card->metric_srt_sent_bytes_dropped = stats.byteSndDropTotal;
2264 card->metric_srt_received_bytes_dropped = stats.byteRcvDropTotal;
2265 card->metric_srt_received_packets_undecryptable = stats.pktRcvUndecryptTotal;
2266 card->metric_srt_received_bytes_undecryptable = stats.byteRcvUndecryptTotal;
2267 card->metric_srt_filter_sent_packets = stats.pktSndFilterExtraTotal;
2268 card->metric_srt_filter_received_extra_packets = stats.pktRcvFilterExtraTotal;
2269 card->metric_srt_filter_received_rebuilt_packets = stats.pktRcvFilterSupplyTotal;
2270 card->metric_srt_filter_received_lost_packets = stats.pktRcvFilterLossTotal;
2273 card->metric_srt_packet_sending_period_seconds = stats.usPktSndPeriod * 1e-6;
2274 card->metric_srt_flow_window_packets = stats.pktFlowWindow;
2275 card->metric_srt_congestion_window_packets = stats.pktCongestionWindow;
2276 card->metric_srt_flight_size_packets = stats.pktFlightSize;
2277 card->metric_srt_rtt_seconds = stats.msRTT * 1e-3;
2278 card->metric_srt_estimated_bandwidth_bits_per_second = stats.mbpsBandwidth * 1e6;
2279 card->metric_srt_bandwidth_ceiling_bits_per_second = stats.mbpsMaxBW * 1e6;
2280 card->metric_srt_send_buffer_available_bytes = stats.byteAvailSndBuf;
2281 card->metric_srt_receive_buffer_available_bytes = stats.byteAvailRcvBuf;
2282 card->metric_srt_mss_bytes = stats.byteMSS;
2283 card->metric_srt_sender_unacked_packets = stats.pktSndBuf;
2284 card->metric_srt_sender_unacked_bytes = stats.byteSndBuf;
2285 card->metric_srt_sender_unacked_timespan_seconds = stats.msSndBuf * 1e-3;
2286 card->metric_srt_sender_delivery_delay_seconds = stats.msSndTsbPdDelay * 1e-3;
2287 card->metric_srt_receiver_unacked_packets = stats.pktRcvBuf;
2288 card->metric_srt_receiver_unacked_bytes = stats.byteRcvBuf;
2289 card->metric_srt_receiver_unacked_timespan_seconds = stats.msRcvBuf * 1e-3;
2290 card->metric_srt_receiver_delivery_delay_seconds = stats.msRcvTsbPdDelay * 1e-3;
2294 string Mixer::description_for_card(unsigned card_index)
2296 CaptureCard *card = &cards[card_index];
2297 if (card->capture == nullptr) {
2298 // Should never be called for inactive cards, but OK.
2300 snprintf(buf, sizeof(buf), "Inactive capture card %u", card_index);
2303 if (card->type != CardType::FFMPEG_INPUT) {
2305 snprintf(buf, sizeof(buf), "Capture card %u (%s)", card_index, card->capture->get_description().c_str());
2309 // Number (non-SRT) FFmpeg inputs from zero, separately from the capture cards,
2310 // since it's not too obvious for the user that they are “cards”.
2311 unsigned ffmpeg_index = 0;
2312 for (unsigned i = 0; i < card_index; ++i) {
2313 CaptureCard *other_card = &cards[i];
2314 if (other_card->type == CardType::FFMPEG_INPUT && !is_srt_card(other_card)) {
2319 snprintf(buf, sizeof(buf), "Video input %u (%s)", ffmpeg_index, card->capture->get_description().c_str());
2323 bool Mixer::is_srt_card(const Mixer::CaptureCard *card)
2326 if (card->type == CardType::FFMPEG_INPUT) {
2327 int srt_sock = static_cast<FFmpegCapture *>(card->capture.get())->get_srt_sock();
2328 return srt_sock != -1;
2334 mutex RefCountedGLsync::fence_lock;