}
// Note: You can also get this through the global variable global_audio_mixer.
- AudioMixer *get_audio_mixer() { return &audio_mixer; }
- const AudioMixer *get_audio_mixer() const { return &audio_mixer; }
+ AudioMixer *get_audio_mixer() { return audio_mixer.get(); }
+ const AudioMixer *get_audio_mixer() const { return audio_mixer.get(); }
void schedule_cut()
{
return cards[card_index].output != nullptr;
}
+ bool card_is_ffmpeg(unsigned card_index) const {
+ assert(card_index < num_cards + num_video_inputs);
+ return cards[card_index].type == CardType::FFMPEG_INPUT;
+ }
+
std::map<uint32_t, bmusb::VideoMode> get_available_video_modes(unsigned card_index) const {
assert(card_index < num_cards);
return cards[card_index].capture->get_available_video_modes();
cards[card_index].capture->set_audio_input(input);
}
+ std::string get_ffmpeg_filename(unsigned card_index) const;
+
+ void set_ffmpeg_filename(unsigned card_index, const std::string &filename);
+
void change_x264_bitrate(unsigned rate_kbit) {
video_encoder->change_x264_bitrate(rate_kbit);
}
display_timecode_on_stdout = enable;
}
+ int64_t get_num_connected_clients() const {
+ return httpd.get_num_connected_clients();
+ }
+
+ std::vector<Theme::MenuEntry> get_theme_menu() { return theme->get_theme_menu(); }
+
+ void theme_menu_entry_clicked(int lua_ref) { return theme->theme_menu_entry_clicked(lua_ref); }
+
+ void set_theme_menu_callback(std::function<void()> callback)
+ {
+ theme->set_theme_menu_callback(callback);
+ }
+
+ void wait_for_next_frame();
+
private:
struct CaptureCard;
enum class CardType {
LIVE_CARD,
FAKE_CAPTURE,
- FFMPEG_INPUT
+ FFMPEG_INPUT,
+ CEF_INPUT,
};
void configure_card(unsigned card_index, bmusb::CaptureInterface *capture, CardType card_type, DeckLinkOutput *output);
void set_output_card_internal(int card_index); // Should only be called from the mixer thread.
void release_display_frame(DisplayFrame *frame);
double pts() { return double(pts_int) / TIMEBASE; }
void trim_queue(CaptureCard *card, size_t safe_queue_length);
+ std::pair<std::string, std::string> get_channels_json();
+ std::pair<std::string, std::string> get_channel_color_http(unsigned channel_idx);
HTTPD httpd;
- unsigned num_cards, num_video_inputs;
+ unsigned num_cards, num_video_inputs, num_html_inputs = 0;
QSurface *mixer_surface, *h264_encoder_surface, *decklink_output_surface;
std::unique_ptr<movit::ResourcePool> resource_pool;
movit::YCbCrInput *display_input;
int64_t pts_int = 0; // In TIMEBASE units.
- unsigned frame_num = 0;
+
+ mutable std::mutex frame_num_mutex;
+ std::condition_variable frame_num_updated;
+ unsigned frame_num = 0; // Under <frame_num_mutex>.
// Accumulated errors in number of 1/TIMEBASE audio samples. If OUTPUT_FREQUENCY divided by
// frame rate is integer, will always stay zero.
CardType type;
std::unique_ptr<DeckLinkOutput> output;
+ // CEF only delivers frames when it actually has a change.
+ // If we trim the queue for latency reasons, we could thus
+ // end up in a situation trimming a frame that was meant to
+ // be displayed for a long time, which is really suboptimal.
+ // Thus, if we drop the last frame we have, may_have_dropped_last_frame
+ // is set to true, and the next starvation event will trigger
+ // us requestin a CEF repaint.
+ bool is_cef_capture, may_have_dropped_last_frame = false;
+
// If this card is used for output (ie., output_card_index points to it),
// it cannot simultaneously be uesd for capture, so <capture> gets replaced
// by a FakeCapture. However, since reconstructing the real capture object
std::chrono::steady_clock::time_point received_timestamp = std::chrono::steady_clock::time_point::min();
};
std::deque<NewFrame> new_frames;
- bool should_quit = false;
- std::condition_variable new_frames_changed; // Set whenever new_frames (or should_quit) is changed.
+ std::condition_variable new_frames_changed; // Set whenever new_frames is changed.
QueueLengthPolicy queue_length_policy; // Refers to the "new_frames" queue.
JitterHistory output_jitter_history;
CaptureCard cards[MAX_VIDEO_CARDS]; // Protected by <card_mutex>.
YCbCrInterpretation ycbcr_interpretation[MAX_VIDEO_CARDS]; // Protected by <card_mutex>.
- AudioMixer audio_mixer; // Same as global_audio_mixer (see audio_mixer.h).
+ std::unique_ptr<AudioMixer> audio_mixer; // Same as global_audio_mixer (see audio_mixer.h).
bool input_card_is_master_clock(unsigned card_index, unsigned master_card_index) const;
struct OutputFrameInfo {
int dropped_frames; // Since last frame.
class OutputChannel {
public:
~OutputChannel();
- void output_frame(DisplayFrame frame);
+ void output_frame(DisplayFrame &&frame);
bool get_display_frame(DisplayFrame *frame);
void add_frame_ready_callback(void *key, new_frame_ready_callback_t callback);
void remove_frame_ready_callback(void *key);