]> git.sesse.net Git - nageru/blob - nageru/mixer.h
49afa7f1f2f7a0e1ce7254e05e0bef3b2b6f8027
[nageru] / nageru / mixer.h
1 #ifndef _MIXER_H
2 #define _MIXER_H 1
3
4 // The actual video mixer, running in its own separate background thread.
5
6 #include <assert.h>
7 #include <epoxy/gl.h>
8
9 #undef Success
10
11 #include <stdbool.h>
12 #include <stdint.h>
13 #include <atomic>
14 #include <chrono>
15 #include <condition_variable>
16 #include <cstddef>
17 #include <functional>
18 #include <map>
19 #include <memory>
20 #include <mutex>
21 #include <queue>
22 #include <string>
23 #include <thread>
24 #include <vector>
25
26 #include <movit/effect.h>
27 #include <movit/image_format.h>
28
29 #include "audio_mixer.h"
30 #include "bmusb/bmusb.h"
31 #include "defs.h"
32 #include "shared/httpd.h"
33 #include "input_state.h"
34 #include "libusb.h"
35 #include "pbo_frame_allocator.h"
36 #include "ref_counted_frame.h"
37 #include "shared/ref_counted_gl_sync.h"
38 #include "theme.h"
39 #include "shared/timebase.h"
40 #include "video_encoder.h"
41 #include "ycbcr_interpretation.h"
42
43 class ALSAOutput;
44 class ChromaSubsampler;
45 class DeckLinkOutput;
46 class MJPEGEncoder;
47 class QSurface;
48 class QSurfaceFormat;
49 class TimecodeRenderer;
50 class v210Converter;
51
52 namespace movit {
53 class Effect;
54 class EffectChain;
55 class ResourcePool;
56 class YCbCrInput;
57 }  // namespace movit
58
59 // A class to estimate the future jitter. Used in QueueLengthPolicy (see below).
60 //
61 // There are many ways to estimate jitter; I've tested a few ones (and also
62 // some algorithms that don't explicitly model jitter) with different
63 // parameters on some real-life data in experiments/queue_drop_policy.cpp.
64 // This is one based on simple order statistics where I've added some margin in
65 // the number of starvation events; I believe that about one every hour would
66 // probably be acceptable, but this one typically goes lower than that, at the
67 // cost of 2–3 ms extra latency. (If the queue is hard-limited to one frame, it's
68 // possible to get ~10 ms further down, but this would mean framedrops every
69 // second or so.) The general strategy is: Take the 99.9-percentile jitter over
70 // last 5000 frames, multiply by two, and that's our worst-case jitter
71 // estimate. The fact that we're not using the max value means that we could
72 // actually even throw away very late frames immediately, which means we only
73 // get one user-visible event instead of seeing something both when the frame
74 // arrives late (duplicate frame) and then again when we drop.
75 class JitterHistory {
76 private:
77         static constexpr size_t history_length = 5000;
78         static constexpr double percentile = 0.999;
79         static constexpr double multiplier = 2.0;
80
81 public:
82         void register_metrics(const std::vector<std::pair<std::string, std::string>> &labels);
83         void unregister_metrics(const std::vector<std::pair<std::string, std::string>> &labels);
84
85         void clear() {
86                 history.clear();
87                 orders.clear();
88         }
89         void frame_arrived(std::chrono::steady_clock::time_point now, int64_t frame_duration, size_t dropped_frames);
90         std::chrono::steady_clock::time_point get_expected_next_frame() const { return expected_timestamp; }
91         double estimate_max_jitter() const;
92
93 private:
94         // A simple O(k) based algorithm for getting the k-th largest or
95         // smallest element from our window; we simply keep the multiset
96         // ordered (insertions and deletions are O(n) as always) and then
97         // iterate from one of the sides. If we had larger values of k,
98         // we could go for a more complicated setup with two sets or heaps
99         // (one increasing and one decreasing) that we keep balanced around
100         // the point, or it is possible to reimplement std::set with
101         // counts in each node. However, since k=5, we don't need this.
102         std::multiset<double> orders;
103         std::deque<std::multiset<double>::iterator> history;
104
105         std::chrono::steady_clock::time_point expected_timestamp = std::chrono::steady_clock::time_point::min();
106
107         // Metrics. There are no direct summaries for jitter, since we already have latency summaries.
108         std::atomic<int64_t> metric_input_underestimated_jitter_frames{0};
109         std::atomic<double> metric_input_estimated_max_jitter_seconds{0.0 / 0.0};
110 };
111
112 // For any card that's not the master (where we pick out the frames as they
113 // come, as fast as we can process), there's going to be a queue. The question
114 // is when we should drop frames from that queue (apart from the obvious
115 // dropping if the 16-frame queue should become full), especially given that
116 // the frame rate could be lower or higher than the master (either subtly or
117 // dramatically). We have two (conflicting) demands:
118 //
119 //   1. We want to avoid starving the queue.
120 //   2. We don't want to add more delay than is needed.
121 //
122 // Our general strategy is to drop as many frames as we can (helping for #2)
123 // that we think is safe for #1 given jitter. To this end, we measure the
124 // deviation from the expected arrival time for all cards, and use that for
125 // continuous jitter estimation.
126 //
127 // We then drop everything from the queue that we're sure we won't need to
128 // serve the output in the time before the next frame arrives. Typically,
129 // this means the queue will contain 0 or 1 frames, although more is also
130 // possible if the jitter is very high.
131 class QueueLengthPolicy {
132 public:
133         QueueLengthPolicy() {}
134         void reset(unsigned card_index) {
135                 this->card_index = card_index;
136         }
137
138         void register_metrics(const std::vector<std::pair<std::string, std::string>> &labels);
139         void unregister_metrics(const std::vector<std::pair<std::string, std::string>> &labels);
140
141         // Call after picking out a frame, so 0 means starvation.
142         void update_policy(std::chrono::steady_clock::time_point now,
143                            std::chrono::steady_clock::time_point expected_next_frame,
144                            int64_t input_frame_duration,
145                            int64_t master_frame_duration,
146                            double max_input_card_jitter_seconds,
147                            double max_master_card_jitter_seconds);
148         unsigned get_safe_queue_length() const { return safe_queue_length; }
149
150 private:
151         unsigned card_index;  // For debugging and metrics only.
152         unsigned safe_queue_length = 0;  // Can never go below zero.
153
154         // Metrics.
155         std::atomic<int64_t> metric_input_queue_safe_length_frames{1};
156 };
157
158 class Mixer {
159 public:
160         // The surface format is used for offscreen destinations for OpenGL contexts we need.
161         Mixer(const QSurfaceFormat &format, unsigned num_cards);
162         ~Mixer();
163         void start();
164         void quit();
165
166         void transition_clicked(int transition_num);
167         void channel_clicked(int preview_num);
168
169         enum Output {
170                 OUTPUT_LIVE = 0,
171                 OUTPUT_PREVIEW,
172                 OUTPUT_INPUT0,  // 1, 2, 3, up to 15 follow numerically.
173                 NUM_OUTPUTS = 18
174         };
175
176         struct DisplayFrame {
177                 // The chain for rendering this frame. To render a display frame,
178                 // first wait for <ready_fence>, then call <setup_chain>
179                 // to wire up all the inputs, and then finally call
180                 // chain->render_to_screen() or similar.
181                 movit::EffectChain *chain;
182                 std::function<void()> setup_chain;
183
184                 // Asserted when all the inputs are ready; you cannot render the chain
185                 // before this.
186                 RefCountedGLsync ready_fence;
187
188                 // Holds on to all the input frames needed for this display frame,
189                 // so they are not released while still rendering.
190                 std::vector<RefCountedFrame> input_frames;
191
192                 // Textures that should be released back to the resource pool
193                 // when this frame disappears, if any.
194                 // TODO: Refcount these as well?
195                 std::vector<GLuint> temp_textures;
196         };
197         // Implicitly frees the previous one if there's a new frame available.
198         bool get_display_frame(Output output, DisplayFrame *frame) {
199                 return output_channel[output].get_display_frame(frame);
200         }
201
202         // NOTE: Callbacks will be called with a mutex held, so you should probably
203         // not do real work in them.
204         typedef std::function<void()> new_frame_ready_callback_t;
205         void add_frame_ready_callback(Output output, void *key, new_frame_ready_callback_t callback)
206         {
207                 output_channel[output].add_frame_ready_callback(key, callback);
208         }
209
210         void remove_frame_ready_callback(Output output, void *key)
211         {
212                 output_channel[output].remove_frame_ready_callback(key);
213         }
214
215         // TODO: Should this really be per-channel? Shouldn't it just be called for e.g. the live output?
216         typedef std::function<void(const std::vector<std::string> &)> transition_names_updated_callback_t;
217         void set_transition_names_updated_callback(Output output, transition_names_updated_callback_t callback)
218         {
219                 output_channel[output].set_transition_names_updated_callback(callback);
220         }
221
222         typedef std::function<void(const std::string &)> name_updated_callback_t;
223         void set_name_updated_callback(Output output, name_updated_callback_t callback)
224         {
225                 output_channel[output].set_name_updated_callback(callback);
226         }
227
228         typedef std::function<void(const std::string &)> color_updated_callback_t;
229         void set_color_updated_callback(Output output, color_updated_callback_t callback)
230         {
231                 output_channel[output].set_color_updated_callback(callback);
232         }
233
234         std::vector<std::string> get_transition_names()
235         {
236                 return theme->get_transition_names(pts());
237         }
238
239         unsigned get_num_channels() const
240         {
241                 return theme->get_num_channels();
242         }
243
244         std::string get_channel_name(unsigned channel) const
245         {
246                 return theme->get_channel_name(channel);
247         }
248
249         std::string get_channel_color(unsigned channel) const
250         {
251                 return theme->get_channel_color(channel);
252         }
253
254         int get_channel_signal(unsigned channel) const
255         {
256                 return theme->get_channel_signal(channel);
257         }
258
259         int map_signal(unsigned channel)
260         {
261                 return theme->map_signal(channel);
262         }
263
264         unsigned get_master_clock() const
265         {
266                 return master_clock_channel;
267         }
268
269         void set_master_clock(unsigned channel)
270         {
271                 master_clock_channel = channel;
272         }
273
274         void set_signal_mapping(int signal, int card)
275         {
276                 return theme->set_signal_mapping(signal, card);
277         }
278
279         YCbCrInterpretation get_input_ycbcr_interpretation(unsigned card_index) const;
280         void set_input_ycbcr_interpretation(unsigned card_index, const YCbCrInterpretation &interpretation);
281
282         bool get_supports_set_wb(unsigned channel) const
283         {
284                 return theme->get_supports_set_wb(channel);
285         }
286
287         void set_wb(unsigned channel, double r, double g, double b) const
288         {
289                 theme->set_wb(channel, r, g, b);
290         }
291
292         std::string format_status_line(const std::string &disk_space_left_text, double file_length_seconds)
293         {
294                 return theme->format_status_line(disk_space_left_text, file_length_seconds);
295         }
296
297         // Note: You can also get this through the global variable global_audio_mixer.
298         AudioMixer *get_audio_mixer() { return audio_mixer.get(); }
299         const AudioMixer *get_audio_mixer() const { return audio_mixer.get(); }
300
301         void schedule_cut()
302         {
303                 should_cut = true;
304         }
305
306         unsigned get_num_cards() const { return num_cards; }
307
308         std::string get_card_description(unsigned card_index) const {
309                 assert(card_index < num_cards);
310                 return cards[card_index].capture->get_description();
311         }
312
313         // The difference between this and the previous function is that if a card
314         // is used as the current output, get_card_description() will return the
315         // fake card that's replacing it for input, whereas this function will return
316         // the card's actual name.
317         std::string get_output_card_description(unsigned card_index) const {
318                 assert(card_can_be_used_as_output(card_index));
319                 assert(card_index < num_cards);
320                 if (cards[card_index].parked_capture) {
321                         return cards[card_index].parked_capture->get_description();
322                 } else {
323                         return cards[card_index].capture->get_description();
324                 }
325         }
326
327         bool card_can_be_used_as_output(unsigned card_index) const {
328                 assert(card_index < num_cards);
329                 return cards[card_index].output != nullptr;
330         }
331
332         bool card_is_ffmpeg(unsigned card_index) const {
333                 assert(card_index < num_cards + num_video_inputs);
334                 return cards[card_index].type == CardType::FFMPEG_INPUT;
335         }
336
337         std::map<uint32_t, bmusb::VideoMode> get_available_video_modes(unsigned card_index) const {
338                 assert(card_index < num_cards);
339                 return cards[card_index].capture->get_available_video_modes();
340         }
341
342         uint32_t get_current_video_mode(unsigned card_index) const {
343                 assert(card_index < num_cards);
344                 return cards[card_index].capture->get_current_video_mode();
345         }
346
347         void set_video_mode(unsigned card_index, uint32_t mode) {
348                 assert(card_index < num_cards);
349                 cards[card_index].capture->set_video_mode(mode);
350         }
351
352         void start_mode_scanning(unsigned card_index);
353
354         std::map<uint32_t, std::string> get_available_video_inputs(unsigned card_index) const {
355                 assert(card_index < num_cards);
356                 return cards[card_index].capture->get_available_video_inputs();
357         }
358
359         uint32_t get_current_video_input(unsigned card_index) const {
360                 assert(card_index < num_cards);
361                 return cards[card_index].capture->get_current_video_input();
362         }
363
364         void set_video_input(unsigned card_index, uint32_t input) {
365                 assert(card_index < num_cards);
366                 cards[card_index].capture->set_video_input(input);
367         }
368
369         std::map<uint32_t, std::string> get_available_audio_inputs(unsigned card_index) const {
370                 assert(card_index < num_cards);
371                 return cards[card_index].capture->get_available_audio_inputs();
372         }
373
374         uint32_t get_current_audio_input(unsigned card_index) const {
375                 assert(card_index < num_cards);
376                 return cards[card_index].capture->get_current_audio_input();
377         }
378
379         void set_audio_input(unsigned card_index, uint32_t input) {
380                 assert(card_index < num_cards);
381                 cards[card_index].capture->set_audio_input(input);
382         }
383
384         std::string get_ffmpeg_filename(unsigned card_index) const;
385
386         void set_ffmpeg_filename(unsigned card_index, const std::string &filename);
387
388         void change_x264_bitrate(unsigned rate_kbit) {
389                 video_encoder->change_x264_bitrate(rate_kbit);
390         }
391
392         int get_output_card_index() const {  // -1 = no output, just stream.
393                 return desired_output_card_index;
394         }
395
396         void set_output_card(int card_index) { // -1 = no output, just stream.
397                 desired_output_card_index = card_index;
398         }
399
400         std::map<uint32_t, bmusb::VideoMode> get_available_output_video_modes() const;
401
402         uint32_t get_output_video_mode() const {
403                 return desired_output_video_mode;
404         }
405
406         void set_output_video_mode(uint32_t mode) {
407                 desired_output_video_mode = mode;
408         }
409
410         void set_display_timecode_in_stream(bool enable) {
411                 display_timecode_in_stream = enable;
412         }
413
414         void set_display_timecode_on_stdout(bool enable) {
415                 display_timecode_on_stdout = enable;
416         }
417
418         int64_t get_num_connected_clients() const {
419                 return httpd.get_num_connected_clients();
420         }
421
422         Theme::MenuEntry *get_theme_menu() { return theme->get_theme_menu(); }
423
424         void theme_menu_entry_clicked(int lua_ref) { return theme->theme_menu_entry_clicked(lua_ref); }
425
426         void set_theme_menu_callback(std::function<void()> callback)
427         {
428                 theme->set_theme_menu_callback(callback);
429         }
430
431         void wait_for_next_frame();
432
433 private:
434         struct CaptureCard;
435
436         enum class CardType {
437                 LIVE_CARD,
438                 FAKE_CAPTURE,
439                 FFMPEG_INPUT,
440                 CEF_INPUT,
441         };
442         void configure_card(unsigned card_index, bmusb::CaptureInterface *capture, CardType card_type, DeckLinkOutput *output);
443         void set_output_card_internal(int card_index);  // Should only be called from the mixer thread.
444         void bm_frame(unsigned card_index, uint16_t timecode,
445                 bmusb::FrameAllocator::Frame video_frame, size_t video_offset, bmusb::VideoFormat video_format,
446                 bmusb::FrameAllocator::Frame audio_frame, size_t audio_offset, bmusb::AudioFormat audio_format);
447         void bm_hotplug_add(libusb_device *dev);
448         void bm_hotplug_remove(unsigned card_index);
449         void place_rectangle(movit::Effect *resample_effect, movit::Effect *padding_effect, float x0, float y0, float x1, float y1);
450         void thread_func();
451         void handle_hotplugged_cards();
452         void schedule_audio_resampling_tasks(unsigned dropped_frames, int num_samples_per_frame, int length_per_frame, bool is_preroll, std::chrono::steady_clock::time_point frame_timestamp);
453         std::string get_timecode_text() const;
454         void render_one_frame(int64_t duration);
455         void audio_thread_func();
456         void release_display_frame(DisplayFrame *frame);
457         double pts() { return double(pts_int) / TIMEBASE; }
458         void trim_queue(CaptureCard *card, size_t safe_queue_length);
459         std::pair<std::string, std::string> get_channels_json();
460         std::pair<std::string, std::string> get_channel_color_http(unsigned channel_idx);
461
462         HTTPD httpd;
463         unsigned num_cards, num_video_inputs, num_html_inputs = 0;
464
465         QSurface *mixer_surface, *h264_encoder_surface, *decklink_output_surface, *image_update_surface;
466         std::unique_ptr<movit::ResourcePool> resource_pool;
467         std::unique_ptr<Theme> theme;
468         std::atomic<unsigned> audio_source_channel{0};
469         std::atomic<int> master_clock_channel{0};  // Gets overridden by <output_card_index> if set.
470         int output_card_index = -1;  // -1 for none.
471         uint32_t output_video_mode = -1;
472
473         // The mechanics of changing the output card and modes are so intricately connected
474         // with the work the mixer thread is doing. Thus, we don't change it directly,
475         // we just set this variable instead, which signals to the mixer thread that
476         // it should do the change before the next frame. This simplifies locking
477         // considerations immensely.
478         std::atomic<int> desired_output_card_index{-1};
479         std::atomic<uint32_t> desired_output_video_mode{0};
480
481         std::unique_ptr<movit::EffectChain> display_chain;
482         std::unique_ptr<ChromaSubsampler> chroma_subsampler;
483         std::unique_ptr<v210Converter> v210_converter;
484         std::unique_ptr<VideoEncoder> video_encoder;
485         std::unique_ptr<MJPEGEncoder> mjpeg_encoder;
486
487         std::unique_ptr<TimecodeRenderer> timecode_renderer;
488         std::atomic<bool> display_timecode_in_stream{false};
489         std::atomic<bool> display_timecode_on_stdout{false};
490
491         // Effects part of <display_chain>. Owned by <display_chain>.
492         movit::YCbCrInput *display_input;
493
494         int64_t pts_int = 0;  // In TIMEBASE units.
495
496         mutable std::mutex frame_num_mutex;
497         std::condition_variable frame_num_updated;
498         unsigned frame_num = 0;  // Under <frame_num_mutex>.
499
500         // Accumulated errors in number of 1/TIMEBASE audio samples. If OUTPUT_FREQUENCY divided by
501         // frame rate is integer, will always stay zero.
502         unsigned fractional_samples = 0;
503
504         mutable std::mutex card_mutex;
505         bool has_bmusb_thread = false;
506         struct CaptureCard {
507                 std::unique_ptr<bmusb::CaptureInterface> capture;
508                 bool is_fake_capture;
509                 CardType type;
510                 std::unique_ptr<DeckLinkOutput> output;
511
512                 // CEF only delivers frames when it actually has a change.
513                 // If we trim the queue for latency reasons, we could thus
514                 // end up in a situation trimming a frame that was meant to
515                 // be displayed for a long time, which is really suboptimal.
516                 // Thus, if we drop the last frame we have, may_have_dropped_last_frame
517                 // is set to true, and the next starvation event will trigger
518                 // us requestin a CEF repaint.
519                 bool is_cef_capture, may_have_dropped_last_frame = false;
520
521                 // If this card is used for output (ie., output_card_index points to it),
522                 // it cannot simultaneously be uesd for capture, so <capture> gets replaced
523                 // by a FakeCapture. However, since reconstructing the real capture object
524                 // with all its state can be annoying, it is not being deleted, just stopped
525                 // and moved here.
526                 std::unique_ptr<bmusb::CaptureInterface> parked_capture;
527
528                 std::unique_ptr<PBOFrameAllocator> frame_allocator;
529
530                 // Stuff for the OpenGL context (for texture uploading).
531                 QSurface *surface = nullptr;
532
533                 struct NewFrame {
534                         RefCountedFrame frame;
535                         int64_t length;  // In TIMEBASE units.
536                         bool interlaced;
537                         unsigned field;  // Which field (0 or 1) of the frame to use. Always 0 for progressive.
538                         std::function<void()> upload_func;  // Needs to be called to actually upload the texture to OpenGL.
539                         unsigned dropped_frames = 0;  // Number of dropped frames before this one.
540                         std::chrono::steady_clock::time_point received_timestamp = std::chrono::steady_clock::time_point::min();
541                         movit::RGBTriplet neutral_color{1.0f, 1.0f, 1.0f};
542
543                         // Used for MJPEG encoding. (upload_func packs everything it needs
544                         // into the functor, but would otherwise also use these.)
545                         // width=0 or height=0 means a broken frame, ie., do not upload.
546                         bmusb::VideoFormat video_format;
547                         size_t y_offset, cbcr_offset;
548                 };
549                 std::deque<NewFrame> new_frames;
550                 std::condition_variable new_frames_changed;  // Set whenever new_frames is changed.
551                 QueueLengthPolicy queue_length_policy;  // Refers to the "new_frames" queue.
552
553                 std::vector<int32_t> new_raw_audio;
554
555                 int last_timecode = -1;  // Unwrapped.
556
557                 JitterHistory jitter_history;
558
559                 // Metrics.
560                 std::vector<std::pair<std::string, std::string>> labels;
561                 std::atomic<int64_t> metric_input_received_frames{0};
562                 std::atomic<int64_t> metric_input_duped_frames{0};
563                 std::atomic<int64_t> metric_input_dropped_frames_jitter{0};
564                 std::atomic<int64_t> metric_input_dropped_frames_error{0};
565                 std::atomic<int64_t> metric_input_resets{0};
566                 std::atomic<int64_t> metric_input_queue_length_frames{0};
567
568                 std::atomic<int64_t> metric_input_has_signal_bool{-1};
569                 std::atomic<int64_t> metric_input_is_connected_bool{-1};
570                 std::atomic<int64_t> metric_input_interlaced_bool{-1};
571                 std::atomic<int64_t> metric_input_width_pixels{-1};
572                 std::atomic<int64_t> metric_input_height_pixels{-1};
573                 std::atomic<int64_t> metric_input_frame_rate_nom{-1};
574                 std::atomic<int64_t> metric_input_frame_rate_den{-1};
575                 std::atomic<int64_t> metric_input_sample_rate_hz{-1};
576         };
577         JitterHistory output_jitter_history;
578         CaptureCard cards[MAX_VIDEO_CARDS];  // Protected by <card_mutex>.
579         YCbCrInterpretation ycbcr_interpretation[MAX_VIDEO_CARDS];  // Protected by <card_mutex>.
580         movit::RGBTriplet last_received_neutral_color[MAX_VIDEO_CARDS];  // Used by the mixer thread only. Constructor-initialiezd.
581         std::unique_ptr<AudioMixer> audio_mixer;  // Same as global_audio_mixer (see audio_mixer.h).
582         bool input_card_is_master_clock(unsigned card_index, unsigned master_card_index) const;
583         struct OutputFrameInfo {
584                 int dropped_frames;  // Since last frame.
585                 int num_samples;  // Audio samples needed for this output frame.
586                 int64_t frame_duration;  // In TIMEBASE units.
587                 bool is_preroll;
588                 std::chrono::steady_clock::time_point frame_timestamp;
589         };
590         OutputFrameInfo get_one_frame_from_each_card(unsigned master_card_index, bool master_card_is_output, CaptureCard::NewFrame new_frames[MAX_VIDEO_CARDS], bool has_new_frame[MAX_VIDEO_CARDS], std::vector<int32_t> raw_audio[MAX_VIDEO_CARDS]);
591
592         InputState input_state;
593
594         // Cards we have been noticed about being hotplugged, but haven't tried adding yet.
595         // Protected by its own mutex.
596         std::mutex hotplug_mutex;
597         std::vector<libusb_device *> hotplugged_cards;
598
599         class OutputChannel {
600         public:
601                 ~OutputChannel();
602                 void output_frame(DisplayFrame &&frame);
603                 bool get_display_frame(DisplayFrame *frame);
604                 void add_frame_ready_callback(void *key, new_frame_ready_callback_t callback);
605                 void remove_frame_ready_callback(void *key);
606                 void set_transition_names_updated_callback(transition_names_updated_callback_t callback);
607                 void set_name_updated_callback(name_updated_callback_t callback);
608                 void set_color_updated_callback(color_updated_callback_t callback);
609
610         private:
611                 friend class Mixer;
612
613                 unsigned channel;
614                 Mixer *parent = nullptr;  // Not owned.
615                 std::mutex frame_mutex;
616                 DisplayFrame current_frame, ready_frame;  // protected by <frame_mutex>
617                 bool has_current_frame = false, has_ready_frame = false;  // protected by <frame_mutex>
618                 std::map<void *, new_frame_ready_callback_t> new_frame_ready_callbacks;  // protected by <frame_mutex>
619                 transition_names_updated_callback_t transition_names_updated_callback;
620                 name_updated_callback_t name_updated_callback;
621                 color_updated_callback_t color_updated_callback;
622
623                 std::vector<std::string> last_transition_names;
624                 std::string last_name, last_color;
625         };
626         OutputChannel output_channel[NUM_OUTPUTS];
627
628         std::thread mixer_thread;
629         std::thread audio_thread;
630         std::atomic<bool> should_quit{false};
631         std::atomic<bool> should_cut{false};
632
633         std::unique_ptr<ALSAOutput> alsa;
634
635         struct AudioTask {
636                 int64_t pts_int;
637                 int num_samples;
638                 bool adjust_rate;
639                 std::chrono::steady_clock::time_point frame_timestamp;
640         };
641         std::mutex audio_mutex;
642         std::condition_variable audio_task_queue_changed;
643         std::queue<AudioTask> audio_task_queue;  // Under audio_mutex.
644
645         // For mode scanning.
646         bool is_mode_scanning[MAX_VIDEO_CARDS]{ false };
647         std::vector<uint32_t> mode_scanlist[MAX_VIDEO_CARDS];
648         unsigned mode_scanlist_index[MAX_VIDEO_CARDS]{ 0 };
649         std::chrono::steady_clock::time_point last_mode_scan_change[MAX_VIDEO_CARDS];
650 };
651
652 extern Mixer *global_mixer;
653
654 #endif  // !defined(_MIXER_H)