4 // The actual video mixer, running in its own separate background thread.
15 #include <condition_variable>
26 #include <movit/effect.h>
27 #include <movit/image_format.h>
29 #include "audio_mixer.h"
30 #include "bmusb/bmusb.h"
32 #include "shared/httpd.h"
33 #include "input_state.h"
35 #include "pbo_frame_allocator.h"
36 #include "ref_counted_frame.h"
37 #include "shared/ref_counted_gl_sync.h"
39 #include "shared/timebase.h"
40 #include "video_encoder.h"
41 #include "ycbcr_interpretation.h"
44 class ChromaSubsampler;
49 class TimecodeRenderer;
59 // A class to estimate the future jitter. Used in QueueLengthPolicy (see below).
61 // There are many ways to estimate jitter; I've tested a few ones (and also
62 // some algorithms that don't explicitly model jitter) with different
63 // parameters on some real-life data in experiments/queue_drop_policy.cpp.
64 // This is one based on simple order statistics where I've added some margin in
65 // the number of starvation events; I believe that about one every hour would
66 // probably be acceptable, but this one typically goes lower than that, at the
67 // cost of 2–3 ms extra latency. (If the queue is hard-limited to one frame, it's
68 // possible to get ~10 ms further down, but this would mean framedrops every
69 // second or so.) The general strategy is: Take the 99.9-percentile jitter over
70 // last 5000 frames, multiply by two, and that's our worst-case jitter
71 // estimate. The fact that we're not using the max value means that we could
72 // actually even throw away very late frames immediately, which means we only
73 // get one user-visible event instead of seeing something both when the frame
74 // arrives late (duplicate frame) and then again when we drop.
77 static constexpr size_t history_length = 5000;
78 static constexpr double percentile = 0.999;
79 static constexpr double multiplier = 2.0;
82 void register_metrics(const std::vector<std::pair<std::string, std::string>> &labels);
83 void unregister_metrics(const std::vector<std::pair<std::string, std::string>> &labels);
89 void frame_arrived(std::chrono::steady_clock::time_point now, int64_t frame_duration, size_t dropped_frames);
90 std::chrono::steady_clock::time_point get_expected_next_frame() const { return expected_timestamp; }
91 double estimate_max_jitter() const;
94 // A simple O(k) based algorithm for getting the k-th largest or
95 // smallest element from our window; we simply keep the multiset
96 // ordered (insertions and deletions are O(n) as always) and then
97 // iterate from one of the sides. If we had larger values of k,
98 // we could go for a more complicated setup with two sets or heaps
99 // (one increasing and one decreasing) that we keep balanced around
100 // the point, or it is possible to reimplement std::set with
101 // counts in each node. However, since k=5, we don't need this.
102 std::multiset<double> orders;
103 std::deque<std::multiset<double>::iterator> history;
105 std::chrono::steady_clock::time_point expected_timestamp = std::chrono::steady_clock::time_point::min();
107 // Metrics. There are no direct summaries for jitter, since we already have latency summaries.
108 std::atomic<int64_t> metric_input_underestimated_jitter_frames{0};
109 std::atomic<double> metric_input_estimated_max_jitter_seconds{0.0 / 0.0};
112 // For any card that's not the master (where we pick out the frames as they
113 // come, as fast as we can process), there's going to be a queue. The question
114 // is when we should drop frames from that queue (apart from the obvious
115 // dropping if the 16-frame queue should become full), especially given that
116 // the frame rate could be lower or higher than the master (either subtly or
117 // dramatically). We have two (conflicting) demands:
119 // 1. We want to avoid starving the queue.
120 // 2. We don't want to add more delay than is needed.
122 // Our general strategy is to drop as many frames as we can (helping for #2)
123 // that we think is safe for #1 given jitter. To this end, we measure the
124 // deviation from the expected arrival time for all cards, and use that for
125 // continuous jitter estimation.
127 // We then drop everything from the queue that we're sure we won't need to
128 // serve the output in the time before the next frame arrives. Typically,
129 // this means the queue will contain 0 or 1 frames, although more is also
130 // possible if the jitter is very high.
131 class QueueLengthPolicy {
133 QueueLengthPolicy() {}
134 void reset(unsigned card_index) {
135 this->card_index = card_index;
138 void register_metrics(const std::vector<std::pair<std::string, std::string>> &labels);
139 void unregister_metrics(const std::vector<std::pair<std::string, std::string>> &labels);
141 // Call after picking out a frame, so 0 means starvation.
142 void update_policy(std::chrono::steady_clock::time_point now,
143 std::chrono::steady_clock::time_point expected_next_frame,
144 int64_t input_frame_duration,
145 int64_t master_frame_duration,
146 double max_input_card_jitter_seconds,
147 double max_master_card_jitter_seconds);
148 unsigned get_safe_queue_length() const { return safe_queue_length; }
151 unsigned card_index; // For debugging and metrics only.
152 unsigned safe_queue_length = 0; // Can never go below zero.
155 std::atomic<int64_t> metric_input_queue_safe_length_frames{1};
160 // The surface format is used for offscreen destinations for OpenGL contexts we need.
161 Mixer(const QSurfaceFormat &format, unsigned num_cards);
166 void transition_clicked(int transition_num);
167 void channel_clicked(int preview_num);
172 OUTPUT_INPUT0, // 1, 2, 3, up to 15 follow numerically.
176 struct DisplayFrame {
177 // The chain for rendering this frame. To render a display frame,
178 // first wait for <ready_fence>, then call <setup_chain>
179 // to wire up all the inputs, and then finally call
180 // chain->render_to_screen() or similar.
181 movit::EffectChain *chain;
182 std::function<void()> setup_chain;
184 // Asserted when all the inputs are ready; you cannot render the chain
186 RefCountedGLsync ready_fence;
188 // Holds on to all the input frames needed for this display frame,
189 // so they are not released while still rendering.
190 std::vector<RefCountedFrame> input_frames;
192 // Textures that should be released back to the resource pool
193 // when this frame disappears, if any.
194 // TODO: Refcount these as well?
195 std::vector<GLuint> temp_textures;
197 // Implicitly frees the previous one if there's a new frame available.
198 bool get_display_frame(Output output, DisplayFrame *frame) {
199 return output_channel[output].get_display_frame(frame);
202 // NOTE: Callbacks will be called with a mutex held, so you should probably
203 // not do real work in them.
204 typedef std::function<void()> new_frame_ready_callback_t;
205 void add_frame_ready_callback(Output output, void *key, new_frame_ready_callback_t callback)
207 output_channel[output].add_frame_ready_callback(key, callback);
210 void remove_frame_ready_callback(Output output, void *key)
212 output_channel[output].remove_frame_ready_callback(key);
215 // TODO: Should this really be per-channel? Shouldn't it just be called for e.g. the live output?
216 typedef std::function<void(const std::vector<std::string> &)> transition_names_updated_callback_t;
217 void set_transition_names_updated_callback(Output output, transition_names_updated_callback_t callback)
219 output_channel[output].set_transition_names_updated_callback(callback);
222 typedef std::function<void(const std::string &)> name_updated_callback_t;
223 void set_name_updated_callback(Output output, name_updated_callback_t callback)
225 output_channel[output].set_name_updated_callback(callback);
228 typedef std::function<void(const std::string &)> color_updated_callback_t;
229 void set_color_updated_callback(Output output, color_updated_callback_t callback)
231 output_channel[output].set_color_updated_callback(callback);
234 std::vector<std::string> get_transition_names()
236 return theme->get_transition_names(pts());
239 unsigned get_num_channels() const
241 return theme->get_num_channels();
244 std::string get_channel_name(unsigned channel) const
246 return theme->get_channel_name(channel);
249 std::string get_channel_color(unsigned channel) const
251 return theme->get_channel_color(channel);
254 int map_channel_to_signal(unsigned channel) const
256 return theme->map_channel_to_signal(channel);
259 int map_signal_to_card(int signal)
261 return theme->map_signal_to_card(signal);
264 unsigned get_master_clock() const
266 return master_clock_channel;
269 void set_master_clock(unsigned channel)
271 master_clock_channel = channel;
274 void set_signal_mapping(int signal, int card)
276 return theme->set_signal_mapping(signal, card);
279 YCbCrInterpretation get_input_ycbcr_interpretation(unsigned card_index) const;
280 void set_input_ycbcr_interpretation(unsigned card_index, const YCbCrInterpretation &interpretation);
282 bool get_supports_set_wb(unsigned channel) const
284 return theme->get_supports_set_wb(channel);
287 void set_wb(unsigned channel, double r, double g, double b) const
289 theme->set_wb(channel, r, g, b);
292 std::string format_status_line(const std::string &disk_space_left_text, double file_length_seconds)
294 return theme->format_status_line(disk_space_left_text, file_length_seconds);
297 // Note: You can also get this through the global variable global_audio_mixer.
298 AudioMixer *get_audio_mixer() { return audio_mixer.get(); }
299 const AudioMixer *get_audio_mixer() const { return audio_mixer.get(); }
306 unsigned get_num_cards() const { return num_cards; }
308 std::string get_card_description(unsigned card_index) const {
309 assert(card_index < num_cards);
310 return cards[card_index].capture->get_description();
313 // The difference between this and the previous function is that if a card
314 // is used as the current output, get_card_description() will return the
315 // fake card that's replacing it for input, whereas this function will return
316 // the card's actual name.
317 std::string get_output_card_description(unsigned card_index) const {
318 assert(card_can_be_used_as_output(card_index));
319 assert(card_index < num_cards);
320 if (cards[card_index].parked_capture) {
321 return cards[card_index].parked_capture->get_description();
323 return cards[card_index].capture->get_description();
327 bool card_can_be_used_as_output(unsigned card_index) const {
328 assert(card_index < num_cards);
329 return cards[card_index].output != nullptr;
332 bool card_is_ffmpeg(unsigned card_index) const {
333 assert(card_index < num_cards + num_video_inputs);
334 if (card_index < num_cards) {
335 // SRT inputs are more like regular inputs than FFmpeg inputs,
336 // so show them as such. (This allows the user to right-click
337 // to select a different input.)
340 return cards[card_index].type == CardType::FFMPEG_INPUT;
343 std::map<uint32_t, bmusb::VideoMode> get_available_video_modes(unsigned card_index) const {
344 assert(card_index < num_cards);
345 return cards[card_index].capture->get_available_video_modes();
348 uint32_t get_current_video_mode(unsigned card_index) const {
349 assert(card_index < num_cards);
350 return cards[card_index].capture->get_current_video_mode();
353 void set_video_mode(unsigned card_index, uint32_t mode) {
354 assert(card_index < num_cards);
355 cards[card_index].capture->set_video_mode(mode);
358 void start_mode_scanning(unsigned card_index);
360 std::map<uint32_t, std::string> get_available_video_inputs(unsigned card_index) const {
361 assert(card_index < num_cards);
362 return cards[card_index].capture->get_available_video_inputs();
365 uint32_t get_current_video_input(unsigned card_index) const {
366 assert(card_index < num_cards);
367 return cards[card_index].capture->get_current_video_input();
370 void set_video_input(unsigned card_index, uint32_t input) {
371 assert(card_index < num_cards);
372 cards[card_index].capture->set_video_input(input);
375 std::map<uint32_t, std::string> get_available_audio_inputs(unsigned card_index) const {
376 assert(card_index < num_cards);
377 return cards[card_index].capture->get_available_audio_inputs();
380 uint32_t get_current_audio_input(unsigned card_index) const {
381 assert(card_index < num_cards);
382 return cards[card_index].capture->get_current_audio_input();
385 void set_audio_input(unsigned card_index, uint32_t input) {
386 assert(card_index < num_cards);
387 cards[card_index].capture->set_audio_input(input);
390 std::string get_ffmpeg_filename(unsigned card_index) const;
392 void set_ffmpeg_filename(unsigned card_index, const std::string &filename);
394 void change_x264_bitrate(unsigned rate_kbit) {
395 video_encoder->change_x264_bitrate(rate_kbit);
398 int get_output_card_index() const { // -1 = no output, just stream.
399 return desired_output_card_index;
402 void set_output_card(int card_index) { // -1 = no output, just stream.
403 desired_output_card_index = card_index;
406 std::map<uint32_t, bmusb::VideoMode> get_available_output_video_modes() const;
408 uint32_t get_output_video_mode() const {
409 return desired_output_video_mode;
412 void set_output_video_mode(uint32_t mode) {
413 desired_output_video_mode = mode;
416 void set_display_timecode_in_stream(bool enable) {
417 display_timecode_in_stream = enable;
420 void set_display_timecode_on_stdout(bool enable) {
421 display_timecode_on_stdout = enable;
424 int64_t get_num_connected_clients() const {
425 return httpd.get_num_connected_clients();
428 Theme::MenuEntry *get_theme_menu() { return theme->get_theme_menu(); }
430 void theme_menu_entry_clicked(int lua_ref) { return theme->theme_menu_entry_clicked(lua_ref); }
432 void set_theme_menu_callback(std::function<void()> callback)
434 theme->set_theme_menu_callback(callback);
437 void wait_for_next_frame();
442 enum class CardType {
448 void configure_card(unsigned card_index, bmusb::CaptureInterface *capture, CardType card_type, DeckLinkOutput *output, bool is_srt_card = false);
449 void set_output_card_internal(int card_index); // Should only be called from the mixer thread.
450 void bm_frame(unsigned card_index, uint16_t timecode,
451 bmusb::FrameAllocator::Frame video_frame, size_t video_offset, bmusb::VideoFormat video_format,
452 bmusb::FrameAllocator::Frame audio_frame, size_t audio_offset, bmusb::AudioFormat audio_format);
453 void bm_hotplug_add(libusb_device *dev);
454 void bm_hotplug_remove(unsigned card_index);
455 void place_rectangle(movit::Effect *resample_effect, movit::Effect *padding_effect, float x0, float y0, float x1, float y1);
457 void handle_hotplugged_cards();
458 void schedule_audio_resampling_tasks(unsigned dropped_frames, int num_samples_per_frame, int length_per_frame, bool is_preroll, std::chrono::steady_clock::time_point frame_timestamp);
459 std::string get_timecode_text() const;
460 void render_one_frame(int64_t duration);
461 void audio_thread_func();
462 void release_display_frame(DisplayFrame *frame);
466 double pts() { return double(pts_int) / TIMEBASE; }
467 void trim_queue(CaptureCard *card, size_t safe_queue_length);
468 std::pair<std::string, std::string> get_channels_json();
469 std::pair<std::string, std::string> get_channel_color_http(unsigned channel_idx);
472 unsigned num_cards, num_video_inputs, num_html_inputs = 0;
474 QSurface *mixer_surface, *h264_encoder_surface, *decklink_output_surface, *image_update_surface;
475 std::unique_ptr<movit::ResourcePool> resource_pool;
476 std::unique_ptr<Theme> theme;
477 std::atomic<unsigned> audio_source_channel{0};
478 std::atomic<int> master_clock_channel{0}; // Gets overridden by <output_card_index> if set.
479 int output_card_index = -1; // -1 for none.
480 uint32_t output_video_mode = -1;
482 // The mechanics of changing the output card and modes are so intricately connected
483 // with the work the mixer thread is doing. Thus, we don't change it directly,
484 // we just set this variable instead, which signals to the mixer thread that
485 // it should do the change before the next frame. This simplifies locking
486 // considerations immensely.
487 std::atomic<int> desired_output_card_index{-1};
488 std::atomic<uint32_t> desired_output_video_mode{0};
490 std::unique_ptr<movit::EffectChain> display_chain;
491 std::unique_ptr<ChromaSubsampler> chroma_subsampler;
492 std::unique_ptr<v210Converter> v210_converter;
493 std::unique_ptr<VideoEncoder> video_encoder;
494 std::unique_ptr<MJPEGEncoder> mjpeg_encoder;
496 std::unique_ptr<TimecodeRenderer> timecode_renderer;
497 std::atomic<bool> display_timecode_in_stream{false};
498 std::atomic<bool> display_timecode_on_stdout{false};
500 // Effects part of <display_chain>. Owned by <display_chain>.
501 movit::YCbCrInput *display_input;
503 int64_t pts_int = 0; // In TIMEBASE units.
505 mutable std::mutex frame_num_mutex;
506 std::condition_variable frame_num_updated;
507 unsigned frame_num = 0; // Under <frame_num_mutex>.
509 // Accumulated errors in number of 1/TIMEBASE audio samples. If OUTPUT_FREQUENCY divided by
510 // frame rate is integer, will always stay zero.
511 unsigned fractional_samples = 0;
513 // Monotonic counter that lets us know which slot was last turned into
514 // a fake capture. Used for SRT re-plugging.
515 unsigned fake_capture_counter = 0;
517 mutable std::mutex card_mutex;
518 bool has_bmusb_thread = false;
520 std::unique_ptr<bmusb::CaptureInterface> capture;
521 bool is_fake_capture;
522 // If is_fake_capture is true, contains a monotonic timer value for when
523 // it was last changed. Otherwise undefined. Used for SRT re-plugging.
524 int fake_capture_counter;
525 std::string last_srt_stream_id = "<default, matches nothing>"; // Used for SRT re-plugging.
527 std::unique_ptr<DeckLinkOutput> output;
529 // CEF only delivers frames when it actually has a change.
530 // If we trim the queue for latency reasons, we could thus
531 // end up in a situation trimming a frame that was meant to
532 // be displayed for a long time, which is really suboptimal.
533 // Thus, if we drop the last frame we have, may_have_dropped_last_frame
534 // is set to true, and the next starvation event will trigger
535 // us requestin a CEF repaint.
536 bool is_cef_capture, may_have_dropped_last_frame = false;
538 // If this card is used for output (ie., output_card_index points to it),
539 // it cannot simultaneously be uesd for capture, so <capture> gets replaced
540 // by a FakeCapture. However, since reconstructing the real capture object
541 // with all its state can be annoying, it is not being deleted, just stopped
543 std::unique_ptr<bmusb::CaptureInterface> parked_capture;
545 std::unique_ptr<PBOFrameAllocator> frame_allocator;
547 // Stuff for the OpenGL context (for texture uploading).
548 QSurface *surface = nullptr;
551 RefCountedFrame frame;
552 int64_t length; // In TIMEBASE units.
554 unsigned field; // Which field (0 or 1) of the frame to use. Always 0 for progressive.
555 std::function<void()> upload_func; // Needs to be called to actually upload the texture to OpenGL.
556 unsigned dropped_frames = 0; // Number of dropped frames before this one.
557 std::chrono::steady_clock::time_point received_timestamp = std::chrono::steady_clock::time_point::min();
558 movit::RGBTriplet neutral_color{1.0f, 1.0f, 1.0f};
560 // Used for MJPEG encoding. (upload_func packs everything it needs
561 // into the functor, but would otherwise also use these.)
562 // width=0 or height=0 means a broken frame, ie., do not upload.
563 bmusb::VideoFormat video_format;
564 size_t y_offset, cbcr_offset;
566 std::deque<NewFrame> new_frames;
567 std::condition_variable new_frames_changed; // Set whenever new_frames is changed.
568 QueueLengthPolicy queue_length_policy; // Refers to the "new_frames" queue.
570 std::vector<int32_t> new_raw_audio;
572 int last_timecode = -1; // Unwrapped.
574 JitterHistory jitter_history;
577 std::vector<std::pair<std::string, std::string>> labels;
578 std::atomic<int64_t> metric_input_received_frames{0};
579 std::atomic<int64_t> metric_input_duped_frames{0};
580 std::atomic<int64_t> metric_input_dropped_frames_jitter{0};
581 std::atomic<int64_t> metric_input_dropped_frames_error{0};
582 std::atomic<int64_t> metric_input_resets{0};
583 std::atomic<int64_t> metric_input_queue_length_frames{0};
585 std::atomic<int64_t> metric_input_has_signal_bool{-1};
586 std::atomic<int64_t> metric_input_is_connected_bool{-1};
587 std::atomic<int64_t> metric_input_interlaced_bool{-1};
588 std::atomic<int64_t> metric_input_width_pixels{-1};
589 std::atomic<int64_t> metric_input_height_pixels{-1};
590 std::atomic<int64_t> metric_input_frame_rate_nom{-1};
591 std::atomic<int64_t> metric_input_frame_rate_den{-1};
592 std::atomic<int64_t> metric_input_sample_rate_hz{-1};
595 std::atomic<double> metric_srt_uptime_seconds{0.0 / 0.0};
596 std::atomic<double> metric_srt_send_duration_seconds{0.0 / 0.0};
597 std::atomic<int64_t> metric_srt_sent_bytes{-1};
598 std::atomic<int64_t> metric_srt_received_bytes{-1};
599 std::atomic<int64_t> metric_srt_sent_packets_normal{-1};
600 std::atomic<int64_t> metric_srt_received_packets_normal{-1};
601 std::atomic<int64_t> metric_srt_sent_packets_lost{-1};
602 std::atomic<int64_t> metric_srt_received_packets_lost{-1};
603 std::atomic<int64_t> metric_srt_sent_packets_retransmitted{-1};
604 std::atomic<int64_t> metric_srt_sent_bytes_retransmitted{-1};
605 std::atomic<int64_t> metric_srt_sent_packets_ack{-1};
606 std::atomic<int64_t> metric_srt_received_packets_ack{-1};
607 std::atomic<int64_t> metric_srt_sent_packets_nak{-1};
608 std::atomic<int64_t> metric_srt_received_packets_nak{-1};
609 std::atomic<int64_t> metric_srt_sent_packets_dropped{-1};
610 std::atomic<int64_t> metric_srt_received_packets_dropped{-1};
611 std::atomic<int64_t> metric_srt_sent_bytes_dropped{-1};
612 std::atomic<int64_t> metric_srt_received_bytes_dropped{-1};
613 std::atomic<int64_t> metric_srt_received_packets_undecryptable{-1};
614 std::atomic<int64_t> metric_srt_received_bytes_undecryptable{-1};
616 std::atomic<int64_t> metric_srt_filter_received_extra_packets{-1};
617 std::atomic<int64_t> metric_srt_filter_received_rebuilt_packets{-1};
618 std::atomic<int64_t> metric_srt_filter_received_lost_packets{-1};
620 std::atomic<double> metric_srt_packet_sending_period_seconds{0.0 / 0.0};
621 std::atomic<int64_t> metric_srt_flow_window_packets{-1};
622 std::atomic<int64_t> metric_srt_congestion_window_packets{-1};
623 std::atomic<int64_t> metric_srt_flight_size_packets{-1};
624 std::atomic<double> metric_srt_rtt_seconds{0.0 / 0.0};
625 std::atomic<double> metric_srt_estimated_bandwidth_bits_per_second{0.0 / 0.0};
626 std::atomic<double> metric_srt_bandwidth_ceiling_bits_per_second{0.0 / 0.0};
627 std::atomic<int64_t> metric_srt_send_buffer_available_bytes{-1};
628 std::atomic<int64_t> metric_srt_receive_buffer_available_bytes{-1};
629 std::atomic<int64_t> metric_srt_mss_bytes{-1};
630 std::atomic<int64_t> metric_srt_sender_unacked_packets{-1};
631 std::atomic<int64_t> metric_srt_sender_unacked_bytes{-1};
632 std::atomic<double> metric_srt_sender_unacked_timespan_seconds{0.0 / 0.0};
633 std::atomic<double> metric_srt_sender_delivery_delay_seconds{0.0 / 0.0};
634 std::atomic<int64_t> metric_srt_receiver_unacked_packets{-1};
635 std::atomic<int64_t> metric_srt_receiver_unacked_bytes{-1};
636 std::atomic<double> metric_srt_receiver_unacked_timespan_seconds{0.0 / 0.0};
637 std::atomic<double> metric_srt_receiver_delivery_delay_seconds{0.0 / 0.0};
638 std::atomic<int64_t> metric_srt_filter_sent_packets{-1};
641 JitterHistory output_jitter_history;
642 CaptureCard cards[MAX_VIDEO_CARDS]; // Protected by <card_mutex>.
643 YCbCrInterpretation ycbcr_interpretation[MAX_VIDEO_CARDS]; // Protected by <card_mutex>.
644 movit::RGBTriplet last_received_neutral_color[MAX_VIDEO_CARDS]; // Used by the mixer thread only. Constructor-initialiezd.
645 std::unique_ptr<AudioMixer> audio_mixer; // Same as global_audio_mixer (see audio_mixer.h).
646 bool input_card_is_master_clock(unsigned card_index, unsigned master_card_index) const;
647 struct OutputFrameInfo {
648 int dropped_frames; // Since last frame.
649 int num_samples; // Audio samples needed for this output frame.
650 int64_t frame_duration; // In TIMEBASE units.
652 std::chrono::steady_clock::time_point frame_timestamp;
654 OutputFrameInfo get_one_frame_from_each_card(unsigned master_card_index, bool master_card_is_output, CaptureCard::NewFrame new_frames[MAX_VIDEO_CARDS], bool has_new_frame[MAX_VIDEO_CARDS], std::vector<int32_t> raw_audio[MAX_VIDEO_CARDS]);
657 void update_srt_stats(int srt_sock, Mixer::CaptureCard *card);
660 InputState input_state;
662 // Cards we have been noticed about being hotplugged, but haven't tried adding yet.
663 // Protected by its own mutex.
664 std::mutex hotplug_mutex;
665 std::vector<libusb_device *> hotplugged_cards;
667 std::vector<int> hotplugged_srt_cards;
670 class OutputChannel {
673 void output_frame(DisplayFrame &&frame);
674 bool get_display_frame(DisplayFrame *frame);
675 void add_frame_ready_callback(void *key, new_frame_ready_callback_t callback);
676 void remove_frame_ready_callback(void *key);
677 void set_transition_names_updated_callback(transition_names_updated_callback_t callback);
678 void set_name_updated_callback(name_updated_callback_t callback);
679 void set_color_updated_callback(color_updated_callback_t callback);
685 Mixer *parent = nullptr; // Not owned.
686 std::mutex frame_mutex;
687 DisplayFrame current_frame, ready_frame; // protected by <frame_mutex>
688 bool has_current_frame = false, has_ready_frame = false; // protected by <frame_mutex>
689 std::map<void *, new_frame_ready_callback_t> new_frame_ready_callbacks; // protected by <frame_mutex>
690 transition_names_updated_callback_t transition_names_updated_callback;
691 name_updated_callback_t name_updated_callback;
692 color_updated_callback_t color_updated_callback;
694 std::vector<std::string> last_transition_names;
695 std::string last_name, last_color;
697 OutputChannel output_channel[NUM_OUTPUTS];
699 std::thread mixer_thread;
700 std::thread audio_thread;
702 std::thread srt_thread;
704 std::atomic<bool> should_quit{false};
705 std::atomic<bool> should_cut{false};
707 std::unique_ptr<ALSAOutput> alsa;
713 std::chrono::steady_clock::time_point frame_timestamp;
715 std::mutex audio_mutex;
716 std::condition_variable audio_task_queue_changed;
717 std::queue<AudioTask> audio_task_queue; // Under audio_mutex.
719 // For mode scanning.
720 bool is_mode_scanning[MAX_VIDEO_CARDS]{ false };
721 std::vector<uint32_t> mode_scanlist[MAX_VIDEO_CARDS];
722 unsigned mode_scanlist_index[MAX_VIDEO_CARDS]{ 0 };
723 std::chrono::steady_clock::time_point last_mode_scan_change[MAX_VIDEO_CARDS];
726 extern Mixer *global_mixer;
728 #endif // !defined(_MIXER_H)