]> git.sesse.net Git - nageru/blob - nageru/mixer.h
Make number of cards flexible at runtime.
[nageru] / nageru / mixer.h
1 #ifndef _MIXER_H
2 #define _MIXER_H 1
3
4 // The actual video mixer, running in its own separate background thread.
5
6 #include <assert.h>
7 #include <epoxy/gl.h>
8
9 #undef Success
10
11 #include <stdbool.h>
12 #include <stdint.h>
13 #include <atomic>
14 #include <chrono>
15 #include <condition_variable>
16 #include <cstddef>
17 #include <functional>
18 #include <map>
19 #include <memory>
20 #include <mutex>
21 #include <queue>
22 #include <string>
23 #include <thread>
24 #include <vector>
25
26 #include <movit/effect.h>
27 #include <movit/image_format.h>
28
29 #include "audio_mixer.h"
30 #include "bmusb/bmusb.h"
31 #include "defs.h"
32 #include "ffmpeg_capture.h"
33 #include "shared/httpd.h"
34 #include "input_state.h"
35 #include "libusb.h"
36 #include "pbo_frame_allocator.h"
37 #include "ref_counted_frame.h"
38 #include "shared/ref_counted_gl_sync.h"
39 #include "theme.h"
40 #include "shared/timebase.h"
41 #include "video_encoder.h"
42 #include "ycbcr_interpretation.h"
43
44 class ALSAOutput;
45 class ChromaSubsampler;
46 class DeckLinkOutput;
47 class MJPEGEncoder;
48 class QSurface;
49 class QSurfaceFormat;
50 class TimecodeRenderer;
51 class v210Converter;
52
53 namespace movit {
54 class Effect;
55 class EffectChain;
56 class ResourcePool;
57 class YCbCrInput;
58 }  // namespace movit
59
60 // A class to estimate the future jitter. Used in QueueLengthPolicy (see below).
61 //
62 // There are many ways to estimate jitter; I've tested a few ones (and also
63 // some algorithms that don't explicitly model jitter) with different
64 // parameters on some real-life data in experiments/queue_drop_policy.cpp.
65 // This is one based on simple order statistics where I've added some margin in
66 // the number of starvation events; I believe that about one every hour would
67 // probably be acceptable, but this one typically goes lower than that, at the
68 // cost of 2–3 ms extra latency. (If the queue is hard-limited to one frame, it's
69 // possible to get ~10 ms further down, but this would mean framedrops every
70 // second or so.) The general strategy is: Take the 99.9-percentile jitter over
71 // last 5000 frames, multiply by two, and that's our worst-case jitter
72 // estimate. The fact that we're not using the max value means that we could
73 // actually even throw away very late frames immediately, which means we only
74 // get one user-visible event instead of seeing something both when the frame
75 // arrives late (duplicate frame) and then again when we drop.
76 class JitterHistory {
77 private:
78         static constexpr size_t history_length = 5000;
79         static constexpr double percentile = 0.999;
80         static constexpr double multiplier = 2.0;
81
82 public:
83         void register_metrics(const std::vector<std::pair<std::string, std::string>> &labels);
84         void unregister_metrics(const std::vector<std::pair<std::string, std::string>> &labels);
85
86         void clear() {
87                 history.clear();
88                 orders.clear();
89         }
90         void frame_arrived(std::chrono::steady_clock::time_point now, int64_t frame_duration, size_t dropped_frames);
91         std::chrono::steady_clock::time_point get_expected_next_frame() const { return expected_timestamp; }
92         double estimate_max_jitter() const;
93
94 private:
95         // A simple O(k) based algorithm for getting the k-th largest or
96         // smallest element from our window; we simply keep the multiset
97         // ordered (insertions and deletions are O(n) as always) and then
98         // iterate from one of the sides. If we had larger values of k,
99         // we could go for a more complicated setup with two sets or heaps
100         // (one increasing and one decreasing) that we keep balanced around
101         // the point, or it is possible to reimplement std::set with
102         // counts in each node. However, since k=5, we don't need this.
103         std::multiset<double> orders;
104         std::deque<std::multiset<double>::iterator> history;
105
106         std::chrono::steady_clock::time_point expected_timestamp = std::chrono::steady_clock::time_point::min();
107
108         // Metrics. There are no direct summaries for jitter, since we already have latency summaries.
109         std::atomic<int64_t> metric_input_underestimated_jitter_frames{0};
110         std::atomic<double> metric_input_estimated_max_jitter_seconds{0.0 / 0.0};
111 };
112
113 // For any card that's not the master (where we pick out the frames as they
114 // come, as fast as we can process), there's going to be a queue. The question
115 // is when we should drop frames from that queue (apart from the obvious
116 // dropping if the 16-frame queue should become full), especially given that
117 // the frame rate could be lower or higher than the master (either subtly or
118 // dramatically). We have two (conflicting) demands:
119 //
120 //   1. We want to avoid starving the queue.
121 //   2. We don't want to add more delay than is needed.
122 //
123 // Our general strategy is to drop as many frames as we can (helping for #2)
124 // that we think is safe for #1 given jitter. To this end, we measure the
125 // deviation from the expected arrival time for all cards, and use that for
126 // continuous jitter estimation.
127 //
128 // We then drop everything from the queue that we're sure we won't need to
129 // serve the output in the time before the next frame arrives. Typically,
130 // this means the queue will contain 0 or 1 frames, although more is also
131 // possible if the jitter is very high.
132 class QueueLengthPolicy {
133 public:
134         QueueLengthPolicy() {}
135         void reset(unsigned card_index) {
136                 this->card_index = card_index;
137         }
138
139         void register_metrics(const std::vector<std::pair<std::string, std::string>> &labels);
140         void unregister_metrics(const std::vector<std::pair<std::string, std::string>> &labels);
141
142         // Call after picking out a frame, so 0 means starvation.
143         void update_policy(std::chrono::steady_clock::time_point now,
144                            std::chrono::steady_clock::time_point expected_next_frame,
145                            int64_t input_frame_duration,
146                            int64_t master_frame_duration,
147                            double max_input_card_jitter_seconds,
148                            double max_master_card_jitter_seconds);
149         unsigned get_safe_queue_length() const { return safe_queue_length; }
150
151 private:
152         unsigned card_index;  // For debugging and metrics only.
153         unsigned safe_queue_length = 0;  // Can never go below zero.
154
155         // Metrics.
156         std::atomic<int64_t> metric_input_queue_safe_length_frames{1};
157 };
158
159 class Mixer {
160 public:
161         // The surface format is used for offscreen destinations for OpenGL contexts we need.
162         Mixer(const QSurfaceFormat &format);
163         ~Mixer();
164         void start();
165         void quit();
166
167         void transition_clicked(int transition_num);
168         void channel_clicked(int preview_num);
169
170         enum Output {
171                 OUTPUT_LIVE = 0,
172                 OUTPUT_PREVIEW,
173                 OUTPUT_INPUT0,  // 1, 2, 3, up to 15 follow numerically.
174                 NUM_OUTPUTS = 18
175         };
176
177         struct DisplayFrame {
178                 // The chain for rendering this frame. To render a display frame,
179                 // first wait for <ready_fence>, then call <setup_chain>
180                 // to wire up all the inputs, and then finally call
181                 // chain->render_to_screen() or similar.
182                 movit::EffectChain *chain;
183                 std::function<void()> setup_chain;
184
185                 // Asserted when all the inputs are ready; you cannot render the chain
186                 // before this.
187                 RefCountedGLsync ready_fence;
188
189                 // Holds on to all the input frames needed for this display frame,
190                 // so they are not released while still rendering.
191                 std::vector<RefCountedFrame> input_frames;
192
193                 // Textures that should be released back to the resource pool
194                 // when this frame disappears, if any.
195                 // TODO: Refcount these as well?
196                 std::vector<GLuint> temp_textures;
197         };
198         // Implicitly frees the previous one if there's a new frame available.
199         bool get_display_frame(Output output, DisplayFrame *frame) {
200                 return output_channel[output].get_display_frame(frame);
201         }
202
203         // NOTE: Callbacks will be called with a mutex held, so you should probably
204         // not do real work in them.
205         typedef std::function<void()> new_frame_ready_callback_t;
206         void add_frame_ready_callback(Output output, void *key, new_frame_ready_callback_t callback)
207         {
208                 output_channel[output].add_frame_ready_callback(key, callback);
209         }
210
211         void remove_frame_ready_callback(Output output, void *key)
212         {
213                 output_channel[output].remove_frame_ready_callback(key);
214         }
215
216         // TODO: Should this really be per-channel? Shouldn't it just be called for e.g. the live output?
217         typedef std::function<void(const std::vector<std::string> &)> transition_names_updated_callback_t;
218         void set_transition_names_updated_callback(Output output, transition_names_updated_callback_t callback)
219         {
220                 output_channel[output].set_transition_names_updated_callback(callback);
221         }
222
223         typedef std::function<void(const std::string &)> name_updated_callback_t;
224         void set_name_updated_callback(Output output, name_updated_callback_t callback)
225         {
226                 output_channel[output].set_name_updated_callback(callback);
227         }
228
229         typedef std::function<void(const std::string &)> color_updated_callback_t;
230         void set_color_updated_callback(Output output, color_updated_callback_t callback)
231         {
232                 output_channel[output].set_color_updated_callback(callback);
233         }
234
235         std::vector<std::string> get_transition_names()
236         {
237                 return theme->get_transition_names(pts());
238         }
239
240         unsigned get_num_channels() const
241         {
242                 return theme->get_num_channels();
243         }
244
245         std::string get_channel_name(unsigned channel) const
246         {
247                 return theme->get_channel_name(channel);
248         }
249
250         std::string get_channel_color(unsigned channel) const
251         {
252                 return theme->get_channel_color(channel);
253         }
254
255         int map_channel_to_signal(unsigned channel) const
256         {
257                 return theme->map_channel_to_signal(channel);
258         }
259
260         int map_signal_to_card(int signal)
261         {
262                 return theme->map_signal_to_card(signal);
263         }
264
265         unsigned get_master_clock() const
266         {
267                 return master_clock_channel;
268         }
269
270         void set_master_clock(unsigned channel)
271         {
272                 master_clock_channel = channel;
273         }
274
275         void set_signal_mapping(int signal, int card)
276         {
277                 return theme->set_signal_mapping(signal, card);
278         }
279
280         YCbCrInterpretation get_input_ycbcr_interpretation(unsigned card_index) const;
281         void set_input_ycbcr_interpretation(unsigned card_index, const YCbCrInterpretation &interpretation);
282
283         bool get_supports_set_wb(unsigned channel) const
284         {
285                 return theme->get_supports_set_wb(channel);
286         }
287
288         void set_wb(unsigned channel, double r, double g, double b) const
289         {
290                 theme->set_wb(channel, r, g, b);
291         }
292
293         std::string format_status_line(const std::string &disk_space_left_text, double file_length_seconds)
294         {
295                 return theme->format_status_line(disk_space_left_text, file_length_seconds);
296         }
297
298         // Note: You can also get this through the global variable global_audio_mixer.
299         AudioMixer *get_audio_mixer() { return audio_mixer.get(); }
300         const AudioMixer *get_audio_mixer() const { return audio_mixer.get(); }
301
302         void schedule_cut()
303         {
304                 should_cut = true;
305         }
306
307         std::string get_card_description(unsigned card_index) const {
308                 assert(card_index < MAX_VIDEO_CARDS);
309                 return cards[card_index].capture->get_description();
310         }
311
312         // The difference between this and the previous function is that if a card
313         // is used as the current output, get_card_description() will return the
314         // fake card that's replacing it for input, whereas this function will return
315         // the card's actual name.
316         std::string get_output_card_description(unsigned card_index) const {
317                 assert(card_can_be_used_as_output(card_index));
318                 assert(card_index < MAX_VIDEO_CARDS);
319                 if (cards[card_index].parked_capture) {
320                         return cards[card_index].parked_capture->get_description();
321                 } else {
322                         return cards[card_index].capture->get_description();
323                 }
324         }
325
326         bool card_can_be_used_as_output(unsigned card_index) const {
327                 assert(card_index < MAX_VIDEO_CARDS);
328                 return cards[card_index].output != nullptr && cards[card_index].capture != nullptr;
329         }
330
331         bool card_is_cef(unsigned card_index) const {
332                 assert(card_index < MAX_VIDEO_CARDS);
333                 return cards[card_index].type == CardType::CEF_INPUT;
334         }
335
336         bool card_is_ffmpeg(unsigned card_index) const {
337                 assert(card_index < MAX_VIDEO_CARDS);
338                 if (cards[card_index].type != CardType::FFMPEG_INPUT) {
339                         return false;
340                 }
341 #ifdef HAVE_SRT
342                 // SRT inputs are more like regular inputs than FFmpeg inputs,
343                 // so show them as such. (This allows the user to right-click
344                 // to select a different input.)
345                 return static_cast<FFmpegCapture *>(cards[card_index].capture.get())->get_srt_sock() == -1;
346 #else
347                 return true;
348 #endif
349         }
350
351         bool card_is_active(unsigned card_index) const {
352                 assert(card_index < MAX_VIDEO_CARDS);
353                 std::lock_guard<std::mutex> lock(card_mutex);
354                 return cards[card_index].capture != nullptr;
355         }
356
357         void force_card_active(unsigned card_index)
358         {
359                 // handle_hotplugged_cards() will pick this up.
360                 std::lock_guard<std::mutex> lock(card_mutex);
361                 cards[card_index].force_active = true;
362         }
363
364         std::map<uint32_t, bmusb::VideoMode> get_available_video_modes(unsigned card_index) const {
365                 assert(card_index < MAX_VIDEO_CARDS);
366                 return cards[card_index].capture->get_available_video_modes();
367         }
368
369         uint32_t get_current_video_mode(unsigned card_index) const {
370                 assert(card_index < MAX_VIDEO_CARDS);
371                 return cards[card_index].capture->get_current_video_mode();
372         }
373
374         void set_video_mode(unsigned card_index, uint32_t mode) {
375                 assert(card_index < MAX_VIDEO_CARDS);
376                 cards[card_index].capture->set_video_mode(mode);
377         }
378
379         void start_mode_scanning(unsigned card_index);
380
381         std::map<uint32_t, std::string> get_available_video_inputs(unsigned card_index) const {
382                 assert(card_index < MAX_VIDEO_CARDS);
383                 return cards[card_index].capture->get_available_video_inputs();
384         }
385
386         uint32_t get_current_video_input(unsigned card_index) const {
387                 assert(card_index < MAX_VIDEO_CARDS);
388                 return cards[card_index].capture->get_current_video_input();
389         }
390
391         void set_video_input(unsigned card_index, uint32_t input) {
392                 assert(card_index < MAX_VIDEO_CARDS);
393                 cards[card_index].capture->set_video_input(input);
394         }
395
396         std::map<uint32_t, std::string> get_available_audio_inputs(unsigned card_index) const {
397                 assert(card_index < MAX_VIDEO_CARDS);
398                 return cards[card_index].capture->get_available_audio_inputs();
399         }
400
401         uint32_t get_current_audio_input(unsigned card_index) const {
402                 assert(card_index < MAX_VIDEO_CARDS);
403                 return cards[card_index].capture->get_current_audio_input();
404         }
405
406         void set_audio_input(unsigned card_index, uint32_t input) {
407                 assert(card_index < MAX_VIDEO_CARDS);
408                 cards[card_index].capture->set_audio_input(input);
409         }
410
411         std::string get_ffmpeg_filename(unsigned card_index) const;
412
413         void set_ffmpeg_filename(unsigned card_index, const std::string &filename);
414
415         void change_x264_bitrate(unsigned rate_kbit) {
416                 video_encoder->change_x264_bitrate(rate_kbit);
417         }
418
419         int get_output_card_index() const {  // -1 = no output, just stream.
420                 return desired_output_card_index;
421         }
422
423         void set_output_card(int card_index) { // -1 = no output, just stream.
424                 desired_output_card_index = card_index;
425         }
426
427         std::map<uint32_t, bmusb::VideoMode> get_available_output_video_modes() const;
428
429         uint32_t get_output_video_mode() const {
430                 return desired_output_video_mode;
431         }
432
433         void set_output_video_mode(uint32_t mode) {
434                 desired_output_video_mode = mode;
435         }
436
437         void set_display_timecode_in_stream(bool enable) {
438                 display_timecode_in_stream = enable;
439         }
440
441         void set_display_timecode_on_stdout(bool enable) {
442                 display_timecode_on_stdout = enable;
443         }
444
445         int64_t get_num_connected_clients() const {
446                 return httpd.get_num_connected_clients();
447         }
448
449         Theme::MenuEntry *get_theme_menu() { return theme->get_theme_menu(); }
450
451         void theme_menu_entry_clicked(int lua_ref) { return theme->theme_menu_entry_clicked(lua_ref); }
452
453         void set_theme_menu_callback(std::function<void()> callback)
454         {
455                 theme->set_theme_menu_callback(callback);
456         }
457
458         void wait_for_next_frame();
459
460 private:
461         struct CaptureCard;
462
463         void configure_card(unsigned card_index, bmusb::CaptureInterface *capture, CardType card_type, DeckLinkOutput *output, bool is_srt_card);
464         void set_output_card_internal(int card_index);  // Should only be called from the mixer thread.
465         void bm_frame(unsigned card_index, uint16_t timecode,
466                 bmusb::FrameAllocator::Frame video_frame, size_t video_offset, bmusb::VideoFormat video_format,
467                 bmusb::FrameAllocator::Frame audio_frame, size_t audio_offset, bmusb::AudioFormat audio_format);
468         void upload_texture_for_frame(
469                 int field, bmusb::VideoFormat video_format,
470                 size_t y_offset, size_t cbcr_offset, size_t video_offset,
471                 PBOFrameAllocator::Userdata *userdata);
472         void bm_hotplug_add(libusb_device *dev);
473         void bm_hotplug_remove(unsigned card_index);
474         void place_rectangle(movit::Effect *resample_effect, movit::Effect *padding_effect, float x0, float y0, float x1, float y1);
475         void thread_func();
476         void handle_hotplugged_cards();
477         void schedule_audio_resampling_tasks(unsigned dropped_frames, int num_samples_per_frame, int length_per_frame, bool is_preroll, std::chrono::steady_clock::time_point frame_timestamp);
478         std::string get_timecode_text() const;
479         void render_one_frame(int64_t duration);
480         void audio_thread_func();
481         void release_display_frame(DisplayFrame *frame);
482 #ifdef HAVE_SRT
483         void start_srt();
484 #endif
485         double pts() { return double(pts_int) / TIMEBASE; }
486         void trim_queue(CaptureCard *card, size_t safe_queue_length);
487         std::pair<std::string, std::string> get_channels_json();
488         std::pair<std::string, std::string> get_channel_color_http(unsigned channel_idx);
489
490         HTTPD httpd;
491         unsigned num_video_inputs, num_html_inputs = 0;
492
493         QSurface *mixer_surface, *h264_encoder_surface, *decklink_output_surface, *image_update_surface;
494         std::unique_ptr<movit::ResourcePool> resource_pool;
495         std::unique_ptr<Theme> theme;
496         std::atomic<unsigned> audio_source_channel{0};
497         std::atomic<int> master_clock_channel{0};  // Gets overridden by <output_card_index> if set.
498         int output_card_index = -1;  // -1 for none.
499         uint32_t output_video_mode = -1;
500
501         // The mechanics of changing the output card and modes are so intricately connected
502         // with the work the mixer thread is doing. Thus, we don't change it directly,
503         // we just set this variable instead, which signals to the mixer thread that
504         // it should do the change before the next frame. This simplifies locking
505         // considerations immensely.
506         std::atomic<int> desired_output_card_index{-1};
507         std::atomic<uint32_t> desired_output_video_mode{0};
508
509         std::unique_ptr<movit::EffectChain> display_chain;
510         std::unique_ptr<ChromaSubsampler> chroma_subsampler;
511         std::unique_ptr<v210Converter> v210_converter;
512         std::unique_ptr<VideoEncoder> video_encoder;
513         std::unique_ptr<MJPEGEncoder> mjpeg_encoder;
514
515         std::unique_ptr<TimecodeRenderer> timecode_renderer;
516         std::atomic<bool> display_timecode_in_stream{false};
517         std::atomic<bool> display_timecode_on_stdout{false};
518
519         // Effects part of <display_chain>. Owned by <display_chain>.
520         movit::YCbCrInput *display_input;
521
522         int64_t pts_int = 0;  // In TIMEBASE units.
523
524         mutable std::mutex frame_num_mutex;
525         std::condition_variable frame_num_updated;
526         unsigned frame_num = 0;  // Under <frame_num_mutex>.
527
528         // Accumulated errors in number of 1/TIMEBASE audio samples. If OUTPUT_FREQUENCY divided by
529         // frame rate is integer, will always stay zero.
530         unsigned fractional_samples = 0;
531
532         // Monotonic counter that lets us know which slot was last turned into
533         // a fake capture. Used for SRT re-plugging.
534         unsigned fake_capture_counter = 0;
535
536         mutable std::mutex card_mutex;
537         bool has_bmusb_thread = false;
538         struct CaptureCard {
539                 // If nullptr, the card is inactive, and will be hidden in the UI.
540                 // Only fake capture cards can be inactive.
541                 std::unique_ptr<bmusb::CaptureInterface> capture;
542                 // If true, card must always be active (typically because it's one of the
543                 // first cards, or because the theme has explicitly asked for it).
544                 bool force_active = false;
545                 bool is_fake_capture;
546                 // If is_fake_capture is true, contains a monotonic timer value for when
547                 // it was last changed. Otherwise undefined. Used for SRT re-plugging.
548                 int fake_capture_counter;
549                 std::string last_srt_stream_id = "<default, matches nothing>";  // Used for SRT re-plugging.
550                 CardType type;
551                 std::unique_ptr<DeckLinkOutput> output;
552
553                 // CEF only delivers frames when it actually has a change.
554                 // If we trim the queue for latency reasons, we could thus
555                 // end up in a situation trimming a frame that was meant to
556                 // be displayed for a long time, which is really suboptimal.
557                 // Thus, if we drop the last frame we have, may_have_dropped_last_frame
558                 // is set to true, and the next starvation event will trigger
559                 // us requestin a CEF repaint.
560                 bool is_cef_capture, may_have_dropped_last_frame = false;
561
562                 // If this card is used for output (ie., output_card_index points to it),
563                 // it cannot simultaneously be uesd for capture, so <capture> gets replaced
564                 // by a FakeCapture. However, since reconstructing the real capture object
565                 // with all its state can be annoying, it is not being deleted, just stopped
566                 // and moved here.
567                 std::unique_ptr<bmusb::CaptureInterface> parked_capture;
568
569                 std::unique_ptr<PBOFrameAllocator> frame_allocator;
570
571                 // Stuff for the OpenGL context (for texture uploading).
572                 QSurface *surface = nullptr;
573
574                 struct NewFrame {
575                         RefCountedFrame frame;
576                         int64_t length;  // In TIMEBASE units.
577                         bool interlaced;
578                         unsigned field;  // Which field (0 or 1) of the frame to use. Always 0 for progressive.
579                         bool texture_uploaded = false;
580                         unsigned dropped_frames = 0;  // Number of dropped frames before this one.
581                         std::chrono::steady_clock::time_point received_timestamp = std::chrono::steady_clock::time_point::min();
582                         movit::RGBTriplet neutral_color{1.0f, 1.0f, 1.0f};
583
584                         // Used for MJPEG encoding, and texture upload.
585                         // width=0 or height=0 means a broken frame, ie., do not upload.
586                         bmusb::VideoFormat video_format;
587                         size_t video_offset, y_offset, cbcr_offset;
588                 };
589                 std::deque<NewFrame> new_frames;
590                 std::condition_variable new_frames_changed;  // Set whenever new_frames is changed.
591                 QueueLengthPolicy queue_length_policy;  // Refers to the "new_frames" queue.
592
593                 std::vector<int32_t> new_raw_audio;
594
595                 int last_timecode = -1;  // Unwrapped.
596
597                 JitterHistory jitter_history;
598
599                 // Metrics.
600                 std::vector<std::pair<std::string, std::string>> labels;
601                 std::atomic<int64_t> metric_input_received_frames{0};
602                 std::atomic<int64_t> metric_input_duped_frames{0};
603                 std::atomic<int64_t> metric_input_dropped_frames_jitter{0};
604                 std::atomic<int64_t> metric_input_dropped_frames_error{0};
605                 std::atomic<int64_t> metric_input_resets{0};
606                 std::atomic<int64_t> metric_input_queue_length_frames{0};
607
608                 std::atomic<int64_t> metric_input_has_signal_bool{-1};
609                 std::atomic<int64_t> metric_input_is_connected_bool{-1};
610                 std::atomic<int64_t> metric_input_interlaced_bool{-1};
611                 std::atomic<int64_t> metric_input_width_pixels{-1};
612                 std::atomic<int64_t> metric_input_height_pixels{-1};
613                 std::atomic<int64_t> metric_input_frame_rate_nom{-1};
614                 std::atomic<int64_t> metric_input_frame_rate_den{-1};
615                 std::atomic<int64_t> metric_input_sample_rate_hz{-1};
616
617                 // SRT metrics.
618                 std::atomic<double> metric_srt_uptime_seconds{0.0 / 0.0};
619                 std::atomic<double> metric_srt_send_duration_seconds{0.0 / 0.0};
620                 std::atomic<int64_t> metric_srt_sent_bytes{-1};
621                 std::atomic<int64_t> metric_srt_received_bytes{-1};
622                 std::atomic<int64_t> metric_srt_sent_packets_normal{-1};
623                 std::atomic<int64_t> metric_srt_received_packets_normal{-1};
624                 std::atomic<int64_t> metric_srt_sent_packets_lost{-1};
625                 std::atomic<int64_t> metric_srt_received_packets_lost{-1};
626                 std::atomic<int64_t> metric_srt_sent_packets_retransmitted{-1};
627                 std::atomic<int64_t> metric_srt_sent_bytes_retransmitted{-1};
628                 std::atomic<int64_t> metric_srt_sent_packets_ack{-1};
629                 std::atomic<int64_t> metric_srt_received_packets_ack{-1};
630                 std::atomic<int64_t> metric_srt_sent_packets_nak{-1};
631                 std::atomic<int64_t> metric_srt_received_packets_nak{-1};
632                 std::atomic<int64_t> metric_srt_sent_packets_dropped{-1};
633                 std::atomic<int64_t> metric_srt_received_packets_dropped{-1};
634                 std::atomic<int64_t> metric_srt_sent_bytes_dropped{-1};
635                 std::atomic<int64_t> metric_srt_received_bytes_dropped{-1};
636                 std::atomic<int64_t> metric_srt_received_packets_undecryptable{-1};
637                 std::atomic<int64_t> metric_srt_received_bytes_undecryptable{-1};
638
639                 std::atomic<int64_t> metric_srt_filter_received_extra_packets{-1};
640                 std::atomic<int64_t> metric_srt_filter_received_rebuilt_packets{-1};
641                 std::atomic<int64_t> metric_srt_filter_received_lost_packets{-1};
642
643                 std::atomic<double> metric_srt_packet_sending_period_seconds{0.0 / 0.0};
644                 std::atomic<int64_t> metric_srt_flow_window_packets{-1};
645                 std::atomic<int64_t> metric_srt_congestion_window_packets{-1};
646                 std::atomic<int64_t> metric_srt_flight_size_packets{-1};
647                 std::atomic<double> metric_srt_rtt_seconds{0.0 / 0.0};
648                 std::atomic<double> metric_srt_estimated_bandwidth_bits_per_second{0.0 / 0.0};
649                 std::atomic<double> metric_srt_bandwidth_ceiling_bits_per_second{0.0 / 0.0};
650                 std::atomic<int64_t> metric_srt_send_buffer_available_bytes{-1};
651                 std::atomic<int64_t> metric_srt_receive_buffer_available_bytes{-1};
652                 std::atomic<int64_t> metric_srt_mss_bytes{-1};
653                 std::atomic<int64_t> metric_srt_sender_unacked_packets{-1};
654                 std::atomic<int64_t> metric_srt_sender_unacked_bytes{-1};
655                 std::atomic<double> metric_srt_sender_unacked_timespan_seconds{0.0 / 0.0};
656                 std::atomic<double> metric_srt_sender_delivery_delay_seconds{0.0 / 0.0};
657                 std::atomic<int64_t> metric_srt_receiver_unacked_packets{-1};
658                 std::atomic<int64_t> metric_srt_receiver_unacked_bytes{-1};
659                 std::atomic<double> metric_srt_receiver_unacked_timespan_seconds{0.0 / 0.0};
660                 std::atomic<double> metric_srt_receiver_delivery_delay_seconds{0.0 / 0.0};
661                 std::atomic<int64_t> metric_srt_filter_sent_packets{-1};
662
663         };
664         JitterHistory output_jitter_history;
665         CaptureCard cards[MAX_VIDEO_CARDS];  // Protected by <card_mutex>.
666         YCbCrInterpretation ycbcr_interpretation[MAX_VIDEO_CARDS];  // Protected by <card_mutex>.
667         movit::RGBTriplet last_received_neutral_color[MAX_VIDEO_CARDS];  // Used by the mixer thread only. Constructor-initialiezd.
668         std::unique_ptr<AudioMixer> audio_mixer;  // Same as global_audio_mixer (see audio_mixer.h).
669         bool input_card_is_master_clock(unsigned card_index, unsigned master_card_index) const;
670         struct OutputFrameInfo {
671                 int dropped_frames;  // Since last frame.
672                 int num_samples;  // Audio samples needed for this output frame.
673                 int64_t frame_duration;  // In TIMEBASE units.
674                 bool is_preroll;
675                 std::chrono::steady_clock::time_point frame_timestamp;
676         };
677         OutputFrameInfo get_one_frame_from_each_card(unsigned master_card_index, bool master_card_is_output, CaptureCard::NewFrame new_frames[MAX_VIDEO_CARDS], bool has_new_frame[MAX_VIDEO_CARDS], std::vector<int32_t> raw_audio[MAX_VIDEO_CARDS]);
678
679 #ifdef HAVE_SRT
680         void update_srt_stats(int srt_sock, Mixer::CaptureCard *card);
681 #endif
682
683         std::string description_for_card(unsigned card_index);
684         static bool is_srt_card(const CaptureCard *card);
685
686         InputState input_state;
687
688         // Cards we have been noticed about being hotplugged, but haven't tried adding yet.
689         // Protected by its own mutex.
690         std::mutex hotplug_mutex;
691         std::vector<libusb_device *> hotplugged_cards;
692 #ifdef HAVE_SRT
693         std::vector<int> hotplugged_srt_cards;
694 #endif
695
696         class OutputChannel {
697         public:
698                 ~OutputChannel();
699                 void output_frame(DisplayFrame &&frame);
700                 bool get_display_frame(DisplayFrame *frame);
701                 void add_frame_ready_callback(void *key, new_frame_ready_callback_t callback);
702                 void remove_frame_ready_callback(void *key);
703                 void set_transition_names_updated_callback(transition_names_updated_callback_t callback);
704                 void set_name_updated_callback(name_updated_callback_t callback);
705                 void set_color_updated_callback(color_updated_callback_t callback);
706
707         private:
708                 friend class Mixer;
709
710                 unsigned channel;
711                 Mixer *parent = nullptr;  // Not owned.
712                 std::mutex frame_mutex;
713                 DisplayFrame current_frame, ready_frame;  // protected by <frame_mutex>
714                 bool has_current_frame = false, has_ready_frame = false;  // protected by <frame_mutex>
715                 std::map<void *, new_frame_ready_callback_t> new_frame_ready_callbacks;  // protected by <frame_mutex>
716                 transition_names_updated_callback_t transition_names_updated_callback;
717                 name_updated_callback_t name_updated_callback;
718                 color_updated_callback_t color_updated_callback;
719
720                 std::vector<std::string> last_transition_names;
721                 std::string last_name, last_color;
722         };
723         OutputChannel output_channel[NUM_OUTPUTS];
724
725         std::thread mixer_thread;
726         std::thread audio_thread;
727 #ifdef HAVE_SRT
728         std::thread srt_thread;
729 #endif
730         std::atomic<bool> should_quit{false};
731         std::atomic<bool> should_cut{false};
732
733         std::unique_ptr<ALSAOutput> alsa;
734
735         struct AudioTask {
736                 int64_t pts_int;
737                 int num_samples;
738                 bool adjust_rate;
739                 std::chrono::steady_clock::time_point frame_timestamp;
740         };
741         std::mutex audio_mutex;
742         std::condition_variable audio_task_queue_changed;
743         std::queue<AudioTask> audio_task_queue;  // Under audio_mutex.
744
745         // For mode scanning.
746         bool is_mode_scanning[MAX_VIDEO_CARDS]{ false };
747         std::vector<uint32_t> mode_scanlist[MAX_VIDEO_CARDS];
748         unsigned mode_scanlist_index[MAX_VIDEO_CARDS]{ 0 };
749         std::chrono::steady_clock::time_point last_mode_scan_change[MAX_VIDEO_CARDS];
750 };
751
752 extern Mixer *global_mixer;
753
754 #endif  // !defined(_MIXER_H)