]> git.sesse.net Git - nageru/blob - mixer.h
Rename compressor to level_compressor.
[nageru] / mixer.h
1 #ifndef _MIXER_H
2 #define _MIXER_H 1
3
4 // The actual video mixer, running in its own separate background thread.
5
6 #include <epoxy/gl.h>
7 #undef Success
8 #include <stdbool.h>
9 #include <stdint.h>
10
11 #include <movit/effect_chain.h>
12 #include <movit/flat_input.h>
13 #include <condition_variable>
14 #include <cstddef>
15 #include <functional>
16 #include <memory>
17 #include <mutex>
18 #include <string>
19 #include <thread>
20 #include <vector>
21
22 #include "bmusb/bmusb.h"
23 #include "ebu_r128_proc.h"
24 #include "h264encode.h"
25 #include "httpd.h"
26 #include "pbo_frame_allocator.h"
27 #include "ref_counted_frame.h"
28 #include "ref_counted_gl_sync.h"
29 #include "resampler.h"
30 #include "theme.h"
31 #include "timebase.h"
32 #include "stereocompressor.h"
33
34 class H264Encoder;
35 class QSurface;
36 namespace movit {
37 class Effect;
38 class EffectChain;
39 class FlatInput;
40 class ResourcePool;
41 }  // namespace movit
42
43 namespace movit {
44 class YCbCrInput;
45 }
46 class QOpenGLContext;
47 class QSurfaceFormat;
48
49 class Mixer {
50 public:
51         // The surface format is used for offscreen destinations for OpenGL contexts we need.
52         Mixer(const QSurfaceFormat &format, unsigned num_cards);
53         ~Mixer();
54         void start();
55         void quit();
56
57         void transition_clicked(int transition_num);
58         void channel_clicked(int preview_num);
59
60         enum Output {
61                 OUTPUT_LIVE = 0,
62                 OUTPUT_PREVIEW,
63                 OUTPUT_INPUT0,  // 1, 2, 3, up to 15 follow numerically.
64                 NUM_OUTPUTS = 18
65         };
66
67         struct DisplayFrame {
68                 // The chain for rendering this frame. To render a display frame,
69                 // first wait for <ready_fence>, then call <setup_chain>
70                 // to wire up all the inputs, and then finally call
71                 // chain->render_to_screen() or similar.
72                 movit::EffectChain *chain;
73                 std::function<void()> setup_chain;
74
75                 // Asserted when all the inputs are ready; you cannot render the chain
76                 // before this.
77                 RefCountedGLsync ready_fence;
78
79                 // Holds on to all the input frames needed for this display frame,
80                 // so they are not released while still rendering.
81                 std::vector<RefCountedFrame> input_frames;
82
83                 // Textures that should be released back to the resource pool
84                 // when this frame disappears, if any.
85                 // TODO: Refcount these as well?
86                 std::vector<GLuint> temp_textures;
87         };
88         // Implicitly frees the previous one if there's a new frame available.
89         bool get_display_frame(Output output, DisplayFrame *frame) {
90                 return output_channel[output].get_display_frame(frame);
91         }
92
93         typedef std::function<void()> new_frame_ready_callback_t;
94         void set_frame_ready_callback(Output output, new_frame_ready_callback_t callback)
95         {
96                 output_channel[output].set_frame_ready_callback(callback);
97         }
98
99         typedef std::function<void(float, float, float, float, float)> audio_level_callback_t;
100         void set_audio_level_callback(audio_level_callback_t callback)
101         {
102                 audio_level_callback = callback;
103         }
104
105         std::vector<std::string> get_transition_names()
106         {
107                 return theme->get_transition_names(pts());
108         }
109
110         unsigned get_num_channels() const
111         {
112                 return theme->get_num_channels();
113         }
114
115         std::string get_channel_name(unsigned channel) const
116         {
117                 return theme->get_channel_name(channel);
118         }
119
120         bool get_supports_set_wb(unsigned channel) const
121         {
122                 return theme->get_supports_set_wb(channel);
123         }
124
125         void set_wb(unsigned channel, double r, double g, double b) const
126         {
127                 theme->set_wb(channel, r, g, b);
128         }
129
130 private:
131         void bm_frame(unsigned card_index, uint16_t timecode,
132                 FrameAllocator::Frame video_frame, size_t video_offset, uint16_t video_format,
133                 FrameAllocator::Frame audio_frame, size_t audio_offset, uint16_t audio_format);
134         void place_rectangle(movit::Effect *resample_effect, movit::Effect *padding_effect, float x0, float y0, float x1, float y1);
135         void thread_func();
136         void process_audio_one_frame();
137         void subsample_chroma(GLuint src_tex, GLuint dst_dst);
138         void release_display_frame(DisplayFrame *frame);
139         double pts() { return double(pts_int) / TIMEBASE; }
140
141         HTTPD httpd;
142         unsigned num_cards;
143
144         QSurface *mixer_surface, *h264_encoder_surface;
145         std::unique_ptr<movit::ResourcePool> resource_pool;
146         std::unique_ptr<Theme> theme;
147         std::unique_ptr<movit::EffectChain> display_chain;
148         GLuint cbcr_program_num;  // Owned by <resource_pool>.
149         std::unique_ptr<H264Encoder> h264_encoder;
150
151         // Effects part of <display_chain>. Owned by <display_chain>.
152         movit::FlatInput *display_input;
153
154         int64_t pts_int = 0;  // In TIMEBASE units.
155
156         std::mutex bmusb_mutex;
157         struct CaptureCard {
158                 BMUSBCapture *usb;
159                 std::unique_ptr<PBOFrameAllocator> frame_allocator;
160
161                 // Stuff for the OpenGL context (for texture uploading).
162                 QSurface *surface;
163                 QOpenGLContext *context;
164
165                 bool new_data_ready = false;  // Whether new_frame contains anything.
166                 bool should_quit = false;
167                 RefCountedFrame new_frame;
168                 GLsync new_data_ready_fence;  // Whether new_frame is ready for rendering.
169                 std::condition_variable new_data_ready_changed;  // Set whenever new_data_ready is changed.
170                 unsigned dropped_frames = 0;  // Before new_frame.
171
172                 std::mutex audio_mutex;
173                 std::unique_ptr<Resampler> resampler;  // Under audio_mutex.
174                 int last_timecode = -1;  // Unwrapped.
175         };
176         CaptureCard cards[MAX_CARDS];  // protected by <bmusb_mutex>
177
178         RefCountedFrame bmusb_current_rendering_frame[MAX_CARDS];
179
180         class OutputChannel {
181         public:
182                 ~OutputChannel();
183                 void output_frame(DisplayFrame frame);
184                 bool get_display_frame(DisplayFrame *frame);
185                 void set_frame_ready_callback(new_frame_ready_callback_t callback);
186
187         private:
188                 friend class Mixer;
189
190                 Mixer *parent = nullptr;  // Not owned.
191                 std::mutex frame_mutex;
192                 DisplayFrame current_frame, ready_frame;  // protected by <frame_mutex>
193                 bool has_current_frame = false, has_ready_frame = false;  // protected by <frame_mutex>
194                 new_frame_ready_callback_t new_frame_ready_callback;
195                 bool has_new_frame_ready_callback = false;
196         };
197         OutputChannel output_channel[NUM_OUTPUTS];
198
199         std::thread mixer_thread;
200         bool should_quit = false;
201
202         audio_level_callback_t audio_level_callback = nullptr;
203         Ebu_r128_proc r128;
204
205         // TODO: Implement oversampled peak detection.
206         float peak = 0.0f;
207
208         // First compressor; takes us up to about -12 dBFS.
209         StereoCompressor level_compressor;
210 };
211
212 extern Mixer *global_mixer;
213
214 #endif  // !defined(_MIXER_H)