]> git.sesse.net Git - nageru/blob - nageru/mixer.h
Support unsynchronized HDMI/SDI output.
[nageru] / nageru / mixer.h
1 #ifndef _MIXER_H
2 #define _MIXER_H 1
3
4 // The actual video mixer, running in its own separate background thread.
5
6 #include <assert.h>
7 #include <epoxy/gl.h>
8
9 #undef Success
10
11 #include <stdbool.h>
12 #include <stdint.h>
13 #include <atomic>
14 #include <chrono>
15 #include <condition_variable>
16 #include <cstddef>
17 #include <functional>
18 #include <map>
19 #include <memory>
20 #include <mutex>
21 #include <queue>
22 #include <string>
23 #include <thread>
24 #include <vector>
25
26 #include <movit/effect.h>
27 #include <movit/image_format.h>
28
29 #include "audio_mixer.h"
30 #include "bmusb/bmusb.h"
31 #include "defs.h"
32 #include "ffmpeg_capture.h"
33 #include "shared/httpd.h"
34 #include "input_state.h"
35 #include "libusb.h"
36 #include "pbo_frame_allocator.h"
37 #include "queue_length_policy.h"
38 #include "ref_counted_frame.h"
39 #include "shared/ref_counted_gl_sync.h"
40 #include "theme.h"
41 #include "shared/timebase.h"
42 #include "video_encoder.h"
43 #include "ycbcr_interpretation.h"
44
45 class ALSAOutput;
46 class ChromaSubsampler;
47 class DeckLinkOutput;
48 class MJPEGEncoder;
49 class QSurface;
50 class QSurfaceFormat;
51 class TimecodeRenderer;
52 class v210Converter;
53
54 namespace movit {
55 class Effect;
56 class EffectChain;
57 class ResourcePool;
58 class YCbCrInput;
59 }  // namespace movit
60
61 class Mixer {
62 public:
63         // The surface format is used for offscreen destinations for OpenGL contexts we need.
64         Mixer(const QSurfaceFormat &format);
65         ~Mixer();
66         void start();
67         void quit();
68
69         void transition_clicked(int transition_num);
70         void channel_clicked(int preview_num);
71
72         enum Output {
73                 OUTPUT_LIVE = 0,
74                 OUTPUT_PREVIEW,
75                 OUTPUT_INPUT0,  // 1, 2, 3, up to 15 follow numerically.
76                 NUM_OUTPUTS = 18
77         };
78
79         struct DisplayFrame {
80                 // The chain for rendering this frame. To render a display frame,
81                 // first wait for <ready_fence>, then call <setup_chain>
82                 // to wire up all the inputs, and then finally call
83                 // chain->render_to_screen() or similar.
84                 movit::EffectChain *chain;
85                 std::function<void()> setup_chain;
86
87                 // Asserted when all the inputs are ready; you cannot render the chain
88                 // before this.
89                 RefCountedGLsync ready_fence;
90
91                 // Holds on to all the input frames needed for this display frame,
92                 // so they are not released while still rendering.
93                 std::vector<RefCountedFrame> input_frames;
94
95                 // Textures that should be released back to the resource pool
96                 // when this frame disappears, if any.
97                 // TODO: Refcount these as well?
98                 std::vector<GLuint> temp_textures;
99         };
100         // Implicitly frees the previous one if there's a new frame available.
101         bool get_display_frame(Output output, DisplayFrame *frame) {
102                 return output_channel[output].get_display_frame(frame);
103         }
104
105         // NOTE: Callbacks will be called with a mutex held, so you should probably
106         // not do real work in them.
107         typedef std::function<void()> new_frame_ready_callback_t;
108         void add_frame_ready_callback(Output output, void *key, new_frame_ready_callback_t callback)
109         {
110                 output_channel[output].add_frame_ready_callback(key, callback);
111         }
112
113         void remove_frame_ready_callback(Output output, void *key)
114         {
115                 output_channel[output].remove_frame_ready_callback(key);
116         }
117
118         // TODO: Should this really be per-channel? Shouldn't it just be called for e.g. the live output?
119         typedef std::function<void(const std::vector<std::string> &)> transition_names_updated_callback_t;
120         void set_transition_names_updated_callback(Output output, transition_names_updated_callback_t callback)
121         {
122                 output_channel[output].set_transition_names_updated_callback(callback);
123         }
124
125         typedef std::function<void(const std::string &)> name_updated_callback_t;
126         void set_name_updated_callback(Output output, name_updated_callback_t callback)
127         {
128                 output_channel[output].set_name_updated_callback(callback);
129         }
130
131         typedef std::function<void(const std::string &)> color_updated_callback_t;
132         void set_color_updated_callback(Output output, color_updated_callback_t callback)
133         {
134                 output_channel[output].set_color_updated_callback(callback);
135         }
136
137         std::vector<std::string> get_transition_names()
138         {
139                 return theme->get_transition_names(pts());
140         }
141
142         unsigned get_num_channels() const
143         {
144                 return theme->get_num_channels();
145         }
146
147         std::string get_channel_name(unsigned channel) const
148         {
149                 return theme->get_channel_name(channel);
150         }
151
152         std::string get_channel_color(unsigned channel) const
153         {
154                 return theme->get_channel_color(channel);
155         }
156
157         int map_channel_to_signal(unsigned channel) const
158         {
159                 return theme->map_channel_to_signal(channel);
160         }
161
162         int map_signal_to_card(int signal)
163         {
164                 return theme->map_signal_to_card(signal);
165         }
166
167         unsigned get_master_clock() const
168         {
169                 return master_clock_channel;
170         }
171
172         void set_master_clock(unsigned channel)
173         {
174                 master_clock_channel = channel;
175         }
176
177         void set_signal_mapping(int signal, int card)
178         {
179                 return theme->set_signal_mapping(signal, card);
180         }
181
182         YCbCrInterpretation get_input_ycbcr_interpretation(unsigned card_index) const;
183         void set_input_ycbcr_interpretation(unsigned card_index, const YCbCrInterpretation &interpretation);
184
185         bool get_supports_set_wb(unsigned channel) const
186         {
187                 return theme->get_supports_set_wb(channel);
188         }
189
190         void set_wb(unsigned channel, double r, double g, double b) const
191         {
192                 theme->set_wb(channel, r, g, b);
193         }
194
195         std::string format_status_line(const std::string &disk_space_left_text, double file_length_seconds)
196         {
197                 return theme->format_status_line(disk_space_left_text, file_length_seconds);
198         }
199
200         // Note: You can also get this through the global variable global_audio_mixer.
201         AudioMixer *get_audio_mixer() { return audio_mixer.get(); }
202         const AudioMixer *get_audio_mixer() const { return audio_mixer.get(); }
203
204         void schedule_cut()
205         {
206                 should_cut = true;
207         }
208
209         std::string get_card_description(unsigned card_index) const {
210                 assert(card_index < MAX_VIDEO_CARDS);
211                 return cards[card_index].capture->get_description();
212         }
213
214         // The difference between this and the previous function is that if a card
215         // is used as the current output, get_card_description() will return the
216         // fake card that's replacing it for input, whereas this function will return
217         // the card's actual name.
218         std::string get_output_card_description(unsigned card_index) const {
219                 assert(card_can_be_used_as_output(card_index));
220                 assert(card_index < MAX_VIDEO_CARDS);
221                 if (cards[card_index].parked_capture) {
222                         return cards[card_index].parked_capture->get_description();
223                 } else {
224                         return cards[card_index].capture->get_description();
225                 }
226         }
227
228         bool card_can_be_used_as_output(unsigned card_index) const {
229                 assert(card_index < MAX_VIDEO_CARDS);
230                 return cards[card_index].output != nullptr && cards[card_index].capture != nullptr;
231         }
232
233         bool card_is_cef(unsigned card_index) const {
234                 assert(card_index < MAX_VIDEO_CARDS);
235                 return cards[card_index].type == CardType::CEF_INPUT;
236         }
237
238         bool card_is_ffmpeg(unsigned card_index) const {
239                 assert(card_index < MAX_VIDEO_CARDS);
240                 if (cards[card_index].type != CardType::FFMPEG_INPUT) {
241                         return false;
242                 }
243 #ifdef HAVE_SRT
244                 // SRT inputs are more like regular inputs than FFmpeg inputs,
245                 // so show them as such. (This allows the user to right-click
246                 // to select a different input.)
247                 return static_cast<FFmpegCapture *>(cards[card_index].capture.get())->get_srt_sock() == -1;
248 #else
249                 return true;
250 #endif
251         }
252
253         bool card_is_active(unsigned card_index) const {
254                 assert(card_index < MAX_VIDEO_CARDS);
255                 std::lock_guard<std::mutex> lock(card_mutex);
256                 return cards[card_index].capture != nullptr;
257         }
258
259         void force_card_active(unsigned card_index)
260         {
261                 // handle_hotplugged_cards() will pick this up.
262                 std::lock_guard<std::mutex> lock(card_mutex);
263                 cards[card_index].force_active = true;
264         }
265
266         std::map<uint32_t, bmusb::VideoMode> get_available_video_modes(unsigned card_index) const {
267                 assert(card_index < MAX_VIDEO_CARDS);
268                 return cards[card_index].capture->get_available_video_modes();
269         }
270
271         uint32_t get_current_video_mode(unsigned card_index) const {
272                 assert(card_index < MAX_VIDEO_CARDS);
273                 return cards[card_index].capture->get_current_video_mode();
274         }
275
276         void set_video_mode(unsigned card_index, uint32_t mode) {
277                 assert(card_index < MAX_VIDEO_CARDS);
278                 cards[card_index].capture->set_video_mode(mode);
279         }
280
281         void start_mode_scanning(unsigned card_index);
282
283         std::map<uint32_t, std::string> get_available_video_inputs(unsigned card_index) const {
284                 assert(card_index < MAX_VIDEO_CARDS);
285                 return cards[card_index].capture->get_available_video_inputs();
286         }
287
288         uint32_t get_current_video_input(unsigned card_index) const {
289                 assert(card_index < MAX_VIDEO_CARDS);
290                 return cards[card_index].capture->get_current_video_input();
291         }
292
293         void set_video_input(unsigned card_index, uint32_t input) {
294                 assert(card_index < MAX_VIDEO_CARDS);
295                 cards[card_index].capture->set_video_input(input);
296         }
297
298         std::map<uint32_t, std::string> get_available_audio_inputs(unsigned card_index) const {
299                 assert(card_index < MAX_VIDEO_CARDS);
300                 return cards[card_index].capture->get_available_audio_inputs();
301         }
302
303         uint32_t get_current_audio_input(unsigned card_index) const {
304                 assert(card_index < MAX_VIDEO_CARDS);
305                 return cards[card_index].capture->get_current_audio_input();
306         }
307
308         void set_audio_input(unsigned card_index, uint32_t input) {
309                 assert(card_index < MAX_VIDEO_CARDS);
310                 cards[card_index].capture->set_audio_input(input);
311         }
312
313         std::string get_ffmpeg_filename(unsigned card_index) const;
314
315         void set_ffmpeg_filename(unsigned card_index, const std::string &filename);
316
317         void change_x264_bitrate(unsigned rate_kbit) {
318                 video_encoder->change_x264_bitrate(rate_kbit);
319         }
320
321         int get_output_card_index() const {  // -1 = no output, just stream.
322                 return desired_output_card_index;
323         }
324
325         void set_output_card(int card_index) { // -1 = no output, just stream.
326                 desired_output_card_index = card_index;
327         }
328
329         bool get_output_card_is_master() const {
330                 return output_card_is_master;
331         }
332
333         std::map<uint32_t, bmusb::VideoMode> get_available_output_video_modes() const;
334
335         uint32_t get_output_video_mode() const {
336                 return desired_output_video_mode;
337         }
338
339         void set_output_video_mode(uint32_t mode) {
340                 desired_output_video_mode = mode;
341         }
342
343         void set_display_timecode_in_stream(bool enable) {
344                 display_timecode_in_stream = enable;
345         }
346
347         void set_display_timecode_on_stdout(bool enable) {
348                 display_timecode_on_stdout = enable;
349         }
350
351         int64_t get_num_connected_clients() const {
352                 return httpd.get_num_connected_clients();
353         }
354
355         Theme::MenuEntry *get_theme_menu() { return theme->get_theme_menu(); }
356
357         void theme_menu_entry_clicked(int lua_ref) { return theme->theme_menu_entry_clicked(lua_ref); }
358
359         void set_theme_menu_callback(std::function<void()> callback)
360         {
361                 theme->set_theme_menu_callback(callback);
362         }
363
364         void wait_for_next_frame();
365
366 private:
367         struct CaptureCard;
368
369         void configure_card(unsigned card_index, bmusb::CaptureInterface *capture, CardType card_type, DeckLinkOutput *output, bool is_srt_card);
370         void set_output_card_internal(int card_index);  // Should only be called from the mixer thread.
371         void bm_frame(unsigned card_index, uint16_t timecode,
372                 bmusb::FrameAllocator::Frame video_frame, size_t video_offset, bmusb::VideoFormat video_format,
373                 bmusb::FrameAllocator::Frame audio_frame, size_t audio_offset, bmusb::AudioFormat audio_format);
374         void upload_texture_for_frame(
375                 int field, bmusb::VideoFormat video_format,
376                 size_t y_offset, size_t cbcr_offset, size_t video_offset,
377                 PBOFrameAllocator::Userdata *userdata);
378         void bm_hotplug_add(libusb_device *dev);
379         void bm_hotplug_remove(unsigned card_index);
380         void place_rectangle(movit::Effect *resample_effect, movit::Effect *padding_effect, float x0, float y0, float x1, float y1);
381         void thread_func();
382         void handle_hotplugged_cards();
383         void schedule_audio_resampling_tasks(unsigned dropped_frames, int num_samples_per_frame, int length_per_frame, bool is_preroll, std::chrono::steady_clock::time_point frame_timestamp);
384         std::string get_timecode_text() const;
385         void render_one_frame(int64_t duration);
386         void audio_thread_func();
387         void release_display_frame(DisplayFrame *frame);
388 #ifdef HAVE_SRT
389         void start_srt();
390 #endif
391         double pts() { return double(pts_int) / TIMEBASE; }
392         void trim_queue(CaptureCard *card, size_t safe_queue_length);
393         std::pair<std::string, std::string> get_channels_json();
394         std::pair<std::string, std::string> get_channel_color_http(unsigned channel_idx);
395
396         HTTPD httpd;
397         unsigned num_video_inputs, num_html_inputs = 0;
398
399         QSurface *mixer_surface, *h264_encoder_surface, *decklink_output_surface, *image_update_surface;
400         std::unique_ptr<movit::ResourcePool> resource_pool;
401         std::unique_ptr<Theme> theme;
402         std::atomic<unsigned> audio_source_channel{0};
403         std::atomic<int> master_clock_channel{0};  // Gets overridden by <output_card_index> if output_card_is_master == true.
404         int output_card_index = -1;  // -1 for none.
405         uint32_t output_video_mode = -1;
406         bool output_card_is_master = false;  // Only relevant if output_card_index != -1.
407
408         // The mechanics of changing the output card and modes are so intricately connected
409         // with the work the mixer thread is doing. Thus, we don't change it directly,
410         // we just set this variable instead, which signals to the mixer thread that
411         // it should do the change before the next frame. This simplifies locking
412         // considerations immensely.
413         std::atomic<int> desired_output_card_index{-1};
414         std::atomic<uint32_t> desired_output_video_mode{0};
415
416         std::unique_ptr<movit::EffectChain> display_chain;
417         std::unique_ptr<ChromaSubsampler> chroma_subsampler;
418         std::unique_ptr<v210Converter> v210_converter;
419         std::unique_ptr<VideoEncoder> video_encoder;
420         std::unique_ptr<MJPEGEncoder> mjpeg_encoder;
421
422         std::unique_ptr<TimecodeRenderer> timecode_renderer;
423         std::atomic<bool> display_timecode_in_stream{false};
424         std::atomic<bool> display_timecode_on_stdout{false};
425
426         // Effects part of <display_chain>. Owned by <display_chain>.
427         movit::YCbCrInput *display_input;
428
429         int64_t pts_int = 0;  // In TIMEBASE units.
430
431         mutable std::mutex frame_num_mutex;
432         std::condition_variable frame_num_updated;
433         unsigned frame_num = 0;  // Under <frame_num_mutex>.
434
435         // Accumulated errors in number of 1/TIMEBASE audio samples. If OUTPUT_FREQUENCY divided by
436         // frame rate is integer, will always stay zero.
437         unsigned fractional_samples = 0;
438
439         // Monotonic counter that lets us know which slot was last turned into
440         // a fake capture. Used for SRT re-plugging.
441         unsigned fake_capture_counter = 0;
442
443         mutable std::mutex card_mutex;
444         bool has_bmusb_thread = false;
445         struct CaptureCard {
446                 // If nullptr, the card is inactive, and will be hidden in the UI.
447                 // Only fake capture cards can be inactive.
448                 std::unique_ptr<bmusb::CaptureInterface> capture;
449                 // If true, card must always be active (typically because it's one of the
450                 // first cards, or because the theme has explicitly asked for it).
451                 bool force_active = false;
452                 bool is_fake_capture;
453                 // If is_fake_capture is true, contains a monotonic timer value for when
454                 // it was last changed. Otherwise undefined. Used for SRT re-plugging.
455                 int fake_capture_counter;
456                 std::string last_srt_stream_id = "<default, matches nothing>";  // Used for SRT re-plugging.
457                 CardType type;
458                 std::unique_ptr<DeckLinkOutput> output;
459
460                 // CEF only delivers frames when it actually has a change.
461                 // If we trim the queue for latency reasons, we could thus
462                 // end up in a situation trimming a frame that was meant to
463                 // be displayed for a long time, which is really suboptimal.
464                 // Thus, if we drop the last frame we have, may_have_dropped_last_frame
465                 // is set to true, and the next starvation event will trigger
466                 // us requestin a CEF repaint.
467                 bool is_cef_capture, may_have_dropped_last_frame = false;
468
469                 // If this card is used for output (ie., output_card_index points to it),
470                 // it cannot simultaneously be uesd for capture, so <capture> gets replaced
471                 // by a FakeCapture. However, since reconstructing the real capture object
472                 // with all its state can be annoying, it is not being deleted, just stopped
473                 // and moved here.
474                 std::unique_ptr<bmusb::CaptureInterface> parked_capture;
475
476                 std::unique_ptr<PBOFrameAllocator> frame_allocator;
477
478                 // Stuff for the OpenGL context (for texture uploading).
479                 QSurface *surface = nullptr;
480
481                 struct NewFrame {
482                         RefCountedFrame frame;
483                         int64_t length;  // In TIMEBASE units.
484                         bool interlaced;
485                         unsigned field;  // Which field (0 or 1) of the frame to use. Always 0 for progressive.
486                         bool texture_uploaded = false;
487                         unsigned dropped_frames = 0;  // Number of dropped frames before this one.
488                         std::chrono::steady_clock::time_point received_timestamp = std::chrono::steady_clock::time_point::min();
489                         movit::RGBTriplet neutral_color{1.0f, 1.0f, 1.0f};
490
491                         // Used for MJPEG encoding, and texture upload.
492                         // width=0 or height=0 means a broken frame, ie., do not upload.
493                         bmusb::VideoFormat video_format;
494                         size_t video_offset, y_offset, cbcr_offset;
495                 };
496                 std::deque<NewFrame> new_frames;
497                 std::condition_variable new_frames_changed;  // Set whenever new_frames is changed.
498                 QueueLengthPolicy queue_length_policy;  // Refers to the "new_frames" queue.
499
500                 std::vector<int32_t> new_raw_audio;
501
502                 int last_timecode = -1;  // Unwrapped.
503
504                 JitterHistory jitter_history;
505
506                 // Metrics.
507                 std::vector<std::pair<std::string, std::string>> labels;
508                 std::atomic<int64_t> metric_input_received_frames{0};
509                 std::atomic<int64_t> metric_input_duped_frames{0};
510                 std::atomic<int64_t> metric_input_dropped_frames_jitter{0};
511                 std::atomic<int64_t> metric_input_dropped_frames_error{0};
512                 std::atomic<int64_t> metric_input_resets{0};
513                 std::atomic<int64_t> metric_input_queue_length_frames{0};
514
515                 std::atomic<int64_t> metric_input_has_signal_bool{-1};
516                 std::atomic<int64_t> metric_input_is_connected_bool{-1};
517                 std::atomic<int64_t> metric_input_interlaced_bool{-1};
518                 std::atomic<int64_t> metric_input_width_pixels{-1};
519                 std::atomic<int64_t> metric_input_height_pixels{-1};
520                 std::atomic<int64_t> metric_input_frame_rate_nom{-1};
521                 std::atomic<int64_t> metric_input_frame_rate_den{-1};
522                 std::atomic<int64_t> metric_input_sample_rate_hz{-1};
523
524                 // SRT metrics.
525                 std::atomic<double> metric_srt_uptime_seconds{0.0 / 0.0};
526                 std::atomic<double> metric_srt_send_duration_seconds{0.0 / 0.0};
527                 std::atomic<int64_t> metric_srt_sent_bytes{-1};
528                 std::atomic<int64_t> metric_srt_received_bytes{-1};
529                 std::atomic<int64_t> metric_srt_sent_packets_normal{-1};
530                 std::atomic<int64_t> metric_srt_received_packets_normal{-1};
531                 std::atomic<int64_t> metric_srt_sent_packets_lost{-1};
532                 std::atomic<int64_t> metric_srt_received_packets_lost{-1};
533                 std::atomic<int64_t> metric_srt_sent_packets_retransmitted{-1};
534                 std::atomic<int64_t> metric_srt_sent_bytes_retransmitted{-1};
535                 std::atomic<int64_t> metric_srt_sent_packets_ack{-1};
536                 std::atomic<int64_t> metric_srt_received_packets_ack{-1};
537                 std::atomic<int64_t> metric_srt_sent_packets_nak{-1};
538                 std::atomic<int64_t> metric_srt_received_packets_nak{-1};
539                 std::atomic<int64_t> metric_srt_sent_packets_dropped{-1};
540                 std::atomic<int64_t> metric_srt_received_packets_dropped{-1};
541                 std::atomic<int64_t> metric_srt_sent_bytes_dropped{-1};
542                 std::atomic<int64_t> metric_srt_received_bytes_dropped{-1};
543                 std::atomic<int64_t> metric_srt_received_packets_undecryptable{-1};
544                 std::atomic<int64_t> metric_srt_received_bytes_undecryptable{-1};
545
546                 std::atomic<int64_t> metric_srt_filter_received_extra_packets{-1};
547                 std::atomic<int64_t> metric_srt_filter_received_rebuilt_packets{-1};
548                 std::atomic<int64_t> metric_srt_filter_received_lost_packets{-1};
549
550                 std::atomic<double> metric_srt_packet_sending_period_seconds{0.0 / 0.0};
551                 std::atomic<int64_t> metric_srt_flow_window_packets{-1};
552                 std::atomic<int64_t> metric_srt_congestion_window_packets{-1};
553                 std::atomic<int64_t> metric_srt_flight_size_packets{-1};
554                 std::atomic<double> metric_srt_rtt_seconds{0.0 / 0.0};
555                 std::atomic<double> metric_srt_estimated_bandwidth_bits_per_second{0.0 / 0.0};
556                 std::atomic<double> metric_srt_bandwidth_ceiling_bits_per_second{0.0 / 0.0};
557                 std::atomic<int64_t> metric_srt_send_buffer_available_bytes{-1};
558                 std::atomic<int64_t> metric_srt_receive_buffer_available_bytes{-1};
559                 std::atomic<int64_t> metric_srt_mss_bytes{-1};
560                 std::atomic<int64_t> metric_srt_sender_unacked_packets{-1};
561                 std::atomic<int64_t> metric_srt_sender_unacked_bytes{-1};
562                 std::atomic<double> metric_srt_sender_unacked_timespan_seconds{0.0 / 0.0};
563                 std::atomic<double> metric_srt_sender_delivery_delay_seconds{0.0 / 0.0};
564                 std::atomic<int64_t> metric_srt_receiver_unacked_packets{-1};
565                 std::atomic<int64_t> metric_srt_receiver_unacked_bytes{-1};
566                 std::atomic<double> metric_srt_receiver_unacked_timespan_seconds{0.0 / 0.0};
567                 std::atomic<double> metric_srt_receiver_delivery_delay_seconds{0.0 / 0.0};
568                 std::atomic<int64_t> metric_srt_filter_sent_packets{-1};
569
570         };
571         JitterHistory output_jitter_history;
572         CaptureCard cards[MAX_VIDEO_CARDS];  // Protected by <card_mutex>.
573         YCbCrInterpretation ycbcr_interpretation[MAX_VIDEO_CARDS];  // Protected by <card_mutex>.
574         movit::RGBTriplet last_received_neutral_color[MAX_VIDEO_CARDS];  // Used by the mixer thread only. Constructor-initialiezd.
575         std::unique_ptr<AudioMixer> audio_mixer;  // Same as global_audio_mixer (see audio_mixer.h).
576         bool input_card_is_master_clock(unsigned card_index, unsigned master_card_index) const;
577         struct OutputFrameInfo {
578                 int dropped_frames;  // Since last frame.
579                 int num_samples;  // Audio samples needed for this output frame.
580                 int64_t frame_duration;  // In TIMEBASE units.
581                 bool is_preroll;
582                 std::chrono::steady_clock::time_point frame_timestamp;
583         };
584         OutputFrameInfo get_one_frame_from_each_card(unsigned master_card_index, bool master_card_is_output, CaptureCard::NewFrame new_frames[MAX_VIDEO_CARDS], bool has_new_frame[MAX_VIDEO_CARDS], std::vector<int32_t> raw_audio[MAX_VIDEO_CARDS]);
585
586 #ifdef HAVE_SRT
587         void update_srt_stats(int srt_sock, Mixer::CaptureCard *card);
588 #endif
589
590         std::string description_for_card(unsigned card_index);
591         static bool is_srt_card(const CaptureCard *card);
592
593         InputState input_state;
594
595         // Cards we have been noticed about being hotplugged, but haven't tried adding yet.
596         // Protected by its own mutex.
597         std::mutex hotplug_mutex;
598         std::vector<libusb_device *> hotplugged_cards;
599 #ifdef HAVE_SRT
600         std::vector<int> hotplugged_srt_cards;
601 #endif
602
603         class OutputChannel {
604         public:
605                 ~OutputChannel();
606                 void output_frame(DisplayFrame &&frame);
607                 bool get_display_frame(DisplayFrame *frame);
608                 void add_frame_ready_callback(void *key, new_frame_ready_callback_t callback);
609                 void remove_frame_ready_callback(void *key);
610                 void set_transition_names_updated_callback(transition_names_updated_callback_t callback);
611                 void set_name_updated_callback(name_updated_callback_t callback);
612                 void set_color_updated_callback(color_updated_callback_t callback);
613
614         private:
615                 friend class Mixer;
616
617                 unsigned channel;
618                 Mixer *parent = nullptr;  // Not owned.
619                 std::mutex frame_mutex;
620                 DisplayFrame current_frame, ready_frame;  // protected by <frame_mutex>
621                 bool has_current_frame = false, has_ready_frame = false;  // protected by <frame_mutex>
622                 std::map<void *, new_frame_ready_callback_t> new_frame_ready_callbacks;  // protected by <frame_mutex>
623                 transition_names_updated_callback_t transition_names_updated_callback;
624                 name_updated_callback_t name_updated_callback;
625                 color_updated_callback_t color_updated_callback;
626
627                 std::vector<std::string> last_transition_names;
628                 std::string last_name, last_color;
629         };
630         OutputChannel output_channel[NUM_OUTPUTS];
631
632         std::thread mixer_thread;
633         std::thread audio_thread;
634 #ifdef HAVE_SRT
635         std::thread srt_thread;
636 #endif
637         std::atomic<bool> should_quit{false};
638         std::atomic<bool> should_cut{false};
639
640         std::unique_ptr<ALSAOutput> alsa;
641
642         struct AudioTask {
643                 int64_t pts_int;
644                 int num_samples;
645                 bool adjust_rate;
646                 std::chrono::steady_clock::time_point frame_timestamp;
647         };
648         std::mutex audio_mutex;
649         std::condition_variable audio_task_queue_changed;
650         std::queue<AudioTask> audio_task_queue;  // Under audio_mutex.
651
652         // For mode scanning.
653         bool is_mode_scanning[MAX_VIDEO_CARDS]{ false };
654         std::vector<uint32_t> mode_scanlist[MAX_VIDEO_CARDS];
655         unsigned mode_scanlist_index[MAX_VIDEO_CARDS]{ 0 };
656         std::chrono::steady_clock::time_point last_mode_scan_change[MAX_VIDEO_CARDS];
657 };
658
659 extern Mixer *global_mixer;
660
661 #endif  // !defined(_MIXER_H)