]> git.sesse.net Git - nageru/blob - mixer.h
Remove another unused member variable.
[nageru] / mixer.h
1 #ifndef _MIXER_H
2 #define _MIXER_H 1
3
4 // The actual video mixer, running in its own separate background thread.
5
6 #include <epoxy/gl.h>
7 #undef Success
8 #include <stdbool.h>
9 #include <stdint.h>
10
11 #include <movit/effect_chain.h>
12 #include <movit/flat_input.h>
13 #include <zita-resampler/resampler.h>
14 #include <atomic>
15 #include <condition_variable>
16 #include <cstddef>
17 #include <functional>
18 #include <memory>
19 #include <mutex>
20 #include <string>
21 #include <thread>
22 #include <vector>
23
24 #include "bmusb/bmusb.h"
25 #include "alsa_output.h"
26 #include "ebu_r128_proc.h"
27 #include "h264encode.h"
28 #include "httpd.h"
29 #include "pbo_frame_allocator.h"
30 #include "ref_counted_frame.h"
31 #include "ref_counted_gl_sync.h"
32 #include "resampling_queue.h"
33 #include "theme.h"
34 #include "timebase.h"
35 #include "stereocompressor.h"
36 #include "filter.h"
37
38 class H264Encoder;
39 class QSurface;
40 namespace movit {
41 class Effect;
42 class EffectChain;
43 class FlatInput;
44 class ResourcePool;
45 }  // namespace movit
46
47 namespace movit {
48 class YCbCrInput;
49 }
50 class QOpenGLContext;
51 class QSurfaceFormat;
52
53 class Mixer {
54 public:
55         // The surface format is used for offscreen destinations for OpenGL contexts we need.
56         Mixer(const QSurfaceFormat &format, unsigned num_cards);
57         ~Mixer();
58         void start();
59         void quit();
60
61         void transition_clicked(int transition_num);
62         void channel_clicked(int preview_num);
63
64         enum Output {
65                 OUTPUT_LIVE = 0,
66                 OUTPUT_PREVIEW,
67                 OUTPUT_INPUT0,  // 1, 2, 3, up to 15 follow numerically.
68                 NUM_OUTPUTS = 18
69         };
70
71         struct DisplayFrame {
72                 // The chain for rendering this frame. To render a display frame,
73                 // first wait for <ready_fence>, then call <setup_chain>
74                 // to wire up all the inputs, and then finally call
75                 // chain->render_to_screen() or similar.
76                 movit::EffectChain *chain;
77                 std::function<void()> setup_chain;
78
79                 // Asserted when all the inputs are ready; you cannot render the chain
80                 // before this.
81                 RefCountedGLsync ready_fence;
82
83                 // Holds on to all the input frames needed for this display frame,
84                 // so they are not released while still rendering.
85                 std::vector<RefCountedFrame> input_frames;
86
87                 // Textures that should be released back to the resource pool
88                 // when this frame disappears, if any.
89                 // TODO: Refcount these as well?
90                 std::vector<GLuint> temp_textures;
91         };
92         // Implicitly frees the previous one if there's a new frame available.
93         bool get_display_frame(Output output, DisplayFrame *frame) {
94                 return output_channel[output].get_display_frame(frame);
95         }
96
97         typedef std::function<void()> new_frame_ready_callback_t;
98         void set_frame_ready_callback(Output output, new_frame_ready_callback_t callback)
99         {
100                 output_channel[output].set_frame_ready_callback(callback);
101         }
102
103         typedef std::function<void(float level_lufs, float peak_db,
104                                    float global_level_lufs, float range_low_lufs, float range_high_lufs,
105                                    float auto_gain_staging_db)> audio_level_callback_t;
106         void set_audio_level_callback(audio_level_callback_t callback)
107         {
108                 audio_level_callback = callback;
109         }
110
111         std::vector<std::string> get_transition_names()
112         {
113                 return theme->get_transition_names(pts());
114         }
115
116         unsigned get_num_channels() const
117         {
118                 return theme->get_num_channels();
119         }
120
121         std::string get_channel_name(unsigned channel) const
122         {
123                 return theme->get_channel_name(channel);
124         }
125
126         bool get_supports_set_wb(unsigned channel) const
127         {
128                 return theme->get_supports_set_wb(channel);
129         }
130
131         void set_wb(unsigned channel, double r, double g, double b) const
132         {
133                 theme->set_wb(channel, r, g, b);
134         }
135
136         void set_locut_cutoff(float cutoff_hz)
137         {
138                 locut_cutoff_hz = cutoff_hz;
139         }
140
141         float get_limiter_threshold_dbfs()
142         {
143                 return limiter_threshold_dbfs;
144         }
145
146         float get_compressor_threshold_dbfs()
147         {
148                 return compressor_threshold_dbfs;
149         }
150
151         void set_limiter_threshold_dbfs(float threshold_dbfs)
152         {
153                 limiter_threshold_dbfs = threshold_dbfs;
154         }
155
156         void set_compressor_threshold_dbfs(float threshold_dbfs)
157         {
158                 compressor_threshold_dbfs = threshold_dbfs;
159         }
160
161         void set_limiter_enabled(bool enabled)
162         {
163                 limiter_enabled = enabled;
164         }
165
166         void set_compressor_enabled(bool enabled)
167         {
168                 compressor_enabled = enabled;
169         }
170
171         void reset_meters();
172
173         struct BufferedFrame {
174                 RefCountedFrame frame;
175                 unsigned field_number;
176         };
177
178         BufferedFrame get_buffered_frame(int card, int history_pos)
179         {
180                 return buffered_frames[card][history_pos];
181         }
182
183 private:
184         void bm_frame(unsigned card_index, uint16_t timecode,
185                 FrameAllocator::Frame video_frame, size_t video_offset, uint16_t video_format,
186                 FrameAllocator::Frame audio_frame, size_t audio_offset, uint16_t audio_format);
187         void place_rectangle(movit::Effect *resample_effect, movit::Effect *padding_effect, float x0, float y0, float x1, float y1);
188         void thread_func();
189         void audio_thread_func();
190         void process_audio_one_frame(int64_t frame_pts_int, int num_samples);
191         void subsample_chroma(GLuint src_tex, GLuint dst_dst);
192         void release_display_frame(DisplayFrame *frame);
193         double pts() { return double(pts_int) / TIMEBASE; }
194
195         HTTPD httpd;
196         unsigned num_cards;
197
198         QSurface *mixer_surface, *h264_encoder_surface;
199         std::unique_ptr<movit::ResourcePool> resource_pool;
200         std::unique_ptr<Theme> theme;
201         std::unique_ptr<movit::EffectChain> display_chain;
202         GLuint cbcr_program_num;  // Owned by <resource_pool>.
203         std::unique_ptr<H264Encoder> h264_encoder;
204
205         // Effects part of <display_chain>. Owned by <display_chain>.
206         movit::FlatInput *display_input;
207
208         int64_t pts_int = 0;  // In TIMEBASE units.
209
210         std::mutex bmusb_mutex;
211         struct CaptureCard {
212                 BMUSBCapture *usb;
213                 std::unique_ptr<PBOFrameAllocator> frame_allocator;
214
215                 // Stuff for the OpenGL context (for texture uploading).
216                 QSurface *surface;
217                 QOpenGLContext *context;
218
219                 bool new_data_ready = false;  // Whether new_frame contains anything.
220                 bool should_quit = false;
221                 RefCountedFrame new_frame;
222                 int64_t new_frame_length;  // In TIMEBASE units.
223                 bool new_frame_interlaced;
224                 unsigned new_frame_field;  // Which field (0 or 1) of the frame to use. Always 0 for progressive.
225                 GLsync new_data_ready_fence;  // Whether new_frame is ready for rendering.
226                 std::condition_variable new_data_ready_changed;  // Set whenever new_data_ready is changed.
227                 unsigned dropped_frames = 0;  // Before new_frame.
228
229                 // Accumulated errors in number of 1/TIMEBASE samples. If OUTPUT_FREQUENCY divided by
230                 // frame rate is integer, will always stay zero.
231                 unsigned fractional_samples = 0;
232
233                 std::mutex audio_mutex;
234                 std::unique_ptr<ResamplingQueue> resampling_queue;  // Under audio_mutex.
235                 int last_timecode = -1;  // Unwrapped.
236                 int64_t next_local_pts = 0;  // Beginning of next frame, in TIMEBASE units.
237         };
238         CaptureCard cards[MAX_CARDS];  // protected by <bmusb_mutex>
239
240         // For each card, the last three frames (or fields), with 0 being the
241         // most recent one. Note that we only need the actual history if we have
242         // interlaced output (for deinterlacing), so if we detect progressive input,
243         // we immediately clear out all history and all entries will point to the same
244         // frame.
245         BufferedFrame buffered_frames[MAX_CARDS][FRAME_HISTORY_LENGTH];
246
247         class OutputChannel {
248         public:
249                 ~OutputChannel();
250                 void output_frame(DisplayFrame frame);
251                 bool get_display_frame(DisplayFrame *frame);
252                 void set_frame_ready_callback(new_frame_ready_callback_t callback);
253
254         private:
255                 friend class Mixer;
256
257                 Mixer *parent = nullptr;  // Not owned.
258                 std::mutex frame_mutex;
259                 DisplayFrame current_frame, ready_frame;  // protected by <frame_mutex>
260                 bool has_current_frame = false, has_ready_frame = false;  // protected by <frame_mutex>
261                 new_frame_ready_callback_t new_frame_ready_callback;
262                 bool has_new_frame_ready_callback = false;
263         };
264         OutputChannel output_channel[NUM_OUTPUTS];
265
266         std::thread mixer_thread;
267         std::thread audio_thread;
268         std::atomic<bool> should_quit{false};
269
270         audio_level_callback_t audio_level_callback = nullptr;
271         std::mutex r128_mutex;
272         Ebu_r128_proc r128;  // Under r128_mutex.
273
274         Resampler peak_resampler;
275         std::atomic<float> peak{0.0f};
276
277         StereoFilter locut;  // Default cutoff 150 Hz, 24 dB/oct.
278         std::atomic<float> locut_cutoff_hz;
279
280         // First compressor; takes us up to about -12 dBFS.
281         StereoCompressor level_compressor;
282         float last_gain_staging_db = 0.0f;
283
284         static constexpr float ref_level_dbfs = -14.0f;
285
286         StereoCompressor limiter;
287         std::atomic<float> limiter_threshold_dbfs{ref_level_dbfs + 4.0f};   // 4 dB.
288         std::atomic<bool> limiter_enabled{true};
289         StereoCompressor compressor;
290         std::atomic<float> compressor_threshold_dbfs{ref_level_dbfs - 12.0f};  // -12 dB.
291         std::atomic<bool> compressor_enabled{true};
292
293         std::unique_ptr<ALSAOutput> alsa;
294
295         struct AudioTask {
296                 int64_t pts_int;
297                 int num_samples;
298         };
299         std::mutex audio_mutex;
300         std::condition_variable audio_task_queue_changed;
301         std::queue<AudioTask> audio_task_queue;  // Under audio_mutex.
302 };
303
304 extern Mixer *global_mixer;
305
306 #endif  // !defined(_MIXER_H)