#include <stdint.h>
#include <stdio.h>
#include <stdlib.h>
-#include <sys/resource.h>
#include <algorithm>
#include <chrono>
#include <condition_variable>
#include "DeckLinkAPI.h"
#include "LinuxCOM.h"
#include "alsa_output.h"
+#include "basic_stats.h"
#include "bmusb/bmusb.h"
#include "bmusb/fake_capture.h"
+#ifdef HAVE_CEF
+#include "cef_capture.h"
+#endif
#include "chroma_subsampler.h"
#include "context.h"
#include "decklink_capture.h"
#include "v210_converter.h"
#include "video_encoder.h"
+#undef Status
+#include <google/protobuf/util/json_util.h>
+#include "json.pb.h"
+
class IDeckLink;
class QOpenGLContext;
using namespace bmusb;
Mixer *global_mixer = nullptr;
-bool uses_mlock = false;
namespace {
if (percentile <= 0.5) {
return *next(orders.begin(), elem_idx) * multiplier;
} else {
- return *prev(orders.end(), elem_idx + 1) * multiplier;
+ return *prev(orders.end(), orders.size() - elem_idx) * multiplier;
}
}
// Must be instantiated after VideoEncoder has initialized global_flags.use_zerocopy.
theme.reset(new Theme(global_flags.theme_filename, global_flags.theme_dirs, resource_pool.get(), num_cards));
+ httpd.add_endpoint("/channels", bind(&Mixer::get_channels_json, this), HTTPD::ALLOW_ALL_ORIGINS);
+ for (int channel_idx = 2; channel_idx < theme->get_num_channels(); ++channel_idx) {
+ char url[256];
+ snprintf(url, sizeof(url), "/channels/%d/color", channel_idx);
+ httpd.add_endpoint(url, bind(&Mixer::get_channel_color_http, this, unsigned(channel_idx)), HTTPD::ALLOW_ALL_ORIGINS);
+ }
+
// Start listening for clients only once VideoEncoder has written its header, if any.
- httpd.start(9095);
+ httpd.start(global_flags.http_port);
// First try initializing the then PCI devices, then USB, then
// fill up with fake cards until we have the desired number of cards.
DeckLinkCapture *capture = new DeckLinkCapture(decklink, card_index);
DeckLinkOutput *output = new DeckLinkOutput(resource_pool.get(), decklink_output_surface, global_flags.width, global_flags.height, card_index);
- output->set_device(decklink);
+ if (!output->set_device(decklink)) {
+ delete output;
+ output = nullptr;
+ }
configure_card(card_index, capture, CardType::LIVE_CARD, output);
++num_pci_devices;
}
}
num_video_inputs = video_inputs.size();
+#ifdef HAVE_CEF
+ // Same, for HTML inputs.
+ std::vector<CEFCapture *> html_inputs = theme->get_html_inputs();
+ for (unsigned html_card_index = 0; html_card_index < html_inputs.size(); ++card_index, ++html_card_index) {
+ if (card_index >= MAX_VIDEO_CARDS) {
+ fprintf(stderr, "ERROR: Not enough card slots available for the HTML inputs the theme requested.\n");
+ exit(1);
+ }
+ configure_card(card_index, html_inputs[html_card_index], CardType::CEF_INPUT, /*output=*/nullptr);
+ html_inputs[html_card_index]->set_card_index(card_index);
+ }
+ num_html_inputs = html_inputs.size();
+#endif
+
BMUSBCapture::set_card_connected_callback(bind(&Mixer::bm_hotplug_add, this, _1));
BMUSBCapture::start_bm_thread();
- for (unsigned card_index = 0; card_index < num_cards + num_video_inputs; ++card_index) {
+ for (unsigned card_index = 0; card_index < num_cards + num_video_inputs + num_html_inputs; ++card_index) {
cards[card_index].queue_length_policy.reset(card_index);
}
set_output_card_internal(global_flags.output_card);
}
- metric_start_time_seconds = get_timestamp_for_metrics();
-
output_jitter_history.register_metrics({{ "card", "output" }});
- global_metrics.add("frames_output_total", &metric_frames_output_total);
- global_metrics.add("frames_output_dropped", &metric_frames_output_dropped);
- global_metrics.add("start_time_seconds", &metric_start_time_seconds, Metrics::TYPE_GAUGE);
- global_metrics.add("memory_used_bytes", &metrics_memory_used_bytes);
- global_metrics.add("memory_locked_limit_bytes", &metrics_memory_locked_limit_bytes);
}
Mixer::~Mixer()
{
BMUSBCapture::stop_bm_thread();
- for (unsigned card_index = 0; card_index < num_cards + num_video_inputs; ++card_index) {
+ for (unsigned card_index = 0; card_index < num_cards + num_video_inputs + num_html_inputs; ++card_index) {
{
unique_lock<mutex> lock(card_mutex);
cards[card_index].should_quit = true; // Unblock thread.
PixelFormat pixel_format;
if (card_type == CardType::FFMPEG_INPUT) {
pixel_format = capture->get_current_pixel_format();
+ } else if (card_type == CardType::CEF_INPUT) {
+ pixel_format = PixelFormat_8BitBGRA;
} else if (global_flags.ten_bit_input) {
pixel_format = PixelFormat_10BitYCbCr;
} else {
case CardType::FFMPEG_INPUT:
labels.emplace_back("cardtype", "ffmpeg");
break;
+ case CardType::CEF_INPUT:
+ labels.emplace_back("cardtype", "cef");
+ break;
default:
assert(false);
}
// Start the actual capture. (We don't want to do it before we're actually ready
// to process output frames.)
- for (unsigned card_index = 0; card_index < num_cards + num_video_inputs; ++card_index) {
+ for (unsigned card_index = 0; card_index < num_cards + num_video_inputs + num_html_inputs; ++card_index) {
if (int(card_index) != output_card_index) {
cards[card_index].capture->start_bm_capture();
}
}
- steady_clock::time_point start, now;
- start = steady_clock::now();
-
+ BasicStats basic_stats(/*verbose=*/true);
int stats_dropped_frames = 0;
while (!should_quit) {
handle_hotplugged_cards();
- for (unsigned card_index = 0; card_index < num_cards + num_video_inputs; ++card_index) {
+ for (unsigned card_index = 0; card_index < num_cards + num_video_inputs + num_html_inputs; ++card_index) {
if (card_index == master_card_index || !has_new_frame[card_index]) {
continue;
}
continue;
}
- for (unsigned card_index = 0; card_index < num_cards + num_video_inputs; ++card_index) {
+ for (unsigned card_index = 0; card_index < num_cards + num_video_inputs + num_html_inputs; ++card_index) {
if (!has_new_frame[card_index] || new_frames[card_index].frame->len == 0)
continue;
++frame_num;
pts_int += frame_duration;
- now = steady_clock::now();
- double elapsed = duration<double>(now - start).count();
-
- metric_frames_output_total = frame_num;
- metric_frames_output_dropped = stats_dropped_frames;
-
- if (frame_num % 100 == 0) {
- printf("%d frames (%d dropped) in %.3f seconds = %.1f fps (%.1f ms/frame)",
- frame_num, stats_dropped_frames, elapsed, frame_num / elapsed,
- 1e3 * elapsed / frame_num);
- // chain->print_phase_timing();
-
- // Check our memory usage, to see if we are close to our mlockall()
- // limit (if at all set).
- rusage used;
- if (getrusage(RUSAGE_SELF, &used) == -1) {
- perror("getrusage(RUSAGE_SELF)");
- assert(false);
- }
-
- if (uses_mlock) {
- rlimit limit;
- if (getrlimit(RLIMIT_MEMLOCK, &limit) == -1) {
- perror("getrlimit(RLIMIT_MEMLOCK)");
- assert(false);
- }
-
- if (limit.rlim_cur == 0) {
- printf(", using %ld MB memory (locked)",
- long(used.ru_maxrss / 1024));
- } else {
- printf(", using %ld / %ld MB lockable memory (%.1f%%)",
- long(used.ru_maxrss / 1024),
- long(limit.rlim_cur / 1048576),
- float(100.0 * (used.ru_maxrss * 1024.0) / limit.rlim_cur));
- }
- metrics_memory_locked_limit_bytes = limit.rlim_cur;
- } else {
- printf(", using %ld MB memory (not locked)",
- long(used.ru_maxrss / 1024));
- metrics_memory_locked_limit_bytes = 0.0 / 0.0;
- }
-
- printf("\n");
-
- metrics_memory_used_bytes = used.ru_maxrss * 1024;
- }
-
+ basic_stats.update(frame_num, stats_dropped_frames);
+ // if (frame_num % 100 == 0) chain->print_phase_timing();
if (should_cut.exchange(false)) { // Test and clear.
video_encoder->do_cut(frame_num);
#endif
}
+pair<string, string> Mixer::get_channels_json()
+{
+ Channels ret;
+ for (int channel_idx = 2; channel_idx < theme->get_num_channels(); ++channel_idx) {
+ Channel *channel = ret.add_channel();
+ channel->set_index(channel_idx);
+ channel->set_name(theme->get_channel_name(channel_idx));
+ channel->set_color(theme->get_channel_color(channel_idx));
+ }
+ string contents;
+ google::protobuf::util::MessageToJsonString(ret, &contents); // Ignore any errors.
+ return make_pair(contents, "text/json");
+}
+
+pair<string, string> Mixer::get_channel_color_http(unsigned channel_idx)
+{
+ return make_pair(theme->get_channel_color(channel_idx), "text/plain");
+}
Mixer::OutputFrameInfo Mixer::get_one_frame_from_each_card(unsigned master_card_index, bool master_card_is_output, CaptureCard::NewFrame new_frames[MAX_VIDEO_CARDS], bool has_new_frame[MAX_VIDEO_CARDS])
{
goto start;
}
- for (unsigned card_index = 0; card_index < num_cards + num_video_inputs; ++card_index) {
+ for (unsigned card_index = 0; card_index < num_cards + num_video_inputs + num_html_inputs; ++card_index) {
CaptureCard *card = &cards[card_index];
if (card->new_frames.empty()) { // Starvation.
++card->metric_input_duped_frames;
output_jitter_history.frame_arrived(output_frame_info.frame_timestamp, output_frame_info.frame_duration, output_frame_info.dropped_frames);
}
- for (unsigned card_index = 0; card_index < num_cards + num_video_inputs; ++card_index) {
+ for (unsigned card_index = 0; card_index < num_cards + num_video_inputs + num_html_inputs; ++card_index) {
CaptureCard *card = &cards[card_index];
if (has_new_frame[card_index] &&
!input_card_is_master_clock(card_index, master_card_index) &&
//theme_main_chain.chain->enable_phase_timing(true);
// The theme can't (or at least shouldn't!) call connect_signal() on
- // each FFmpeg input, so we'll do it here.
- for (const pair<LiveInputWrapper *, FFmpegCapture *> &conn : theme->get_signal_connections()) {
+ // each FFmpeg or CEF input, so we'll do it here.
+ for (const pair<LiveInputWrapper *, FFmpegCapture *> &conn : theme->get_video_signal_connections()) {
+ conn.first->connect_signal_raw(conn.second->get_card_index(), input_state);
+ }
+#ifdef HAVE_CEF
+ for (const pair<LiveInputWrapper *, CEFCapture *> &conn : theme->get_html_signal_connections()) {
conn.first->connect_signal_raw(conn.second->get_card_index(), input_state);
}
+#endif
// If HDMI/SDI output is active and the user has requested auto mode,
// its mode overrides the existing Y'CbCr setting for the chain.
live_frame.ready_fence = fence;
live_frame.input_frames = {};
live_frame.temp_textures = { y_display_tex, cbcr_display_tex };
- output_channel[OUTPUT_LIVE].output_frame(live_frame);
+ output_channel[OUTPUT_LIVE].output_frame(move(live_frame));
// Set up preview and any additional channels.
for (int i = 1; i < theme->get_num_channels() + 2; ++i) {
DisplayFrame display_frame;
Theme::Chain chain = theme->get_chain(i, pts(), global_flags.width, global_flags.height, input_state); // FIXME: dimensions
- display_frame.chain = chain.chain;
- display_frame.setup_chain = chain.setup_chain;
+ display_frame.chain = move(chain.chain);
+ display_frame.setup_chain = move(chain.setup_chain);
display_frame.ready_fence = fence;
- display_frame.input_frames = chain.input_frames;
+ display_frame.input_frames = move(chain.input_frames);
display_frame.temp_textures = {};
- output_channel[i].output_frame(display_frame);
+ output_channel[i].output_frame(move(display_frame));
}
}
}
}
-void Mixer::OutputChannel::output_frame(DisplayFrame frame)
+void Mixer::OutputChannel::output_frame(DisplayFrame &&frame)
{
// Store this frame for display. Remove the ready frame if any
// (it was seemingly never used).
if (has_ready_frame) {
parent->release_display_frame(&ready_frame);
}
- ready_frame = frame;
+ ready_frame = move(frame);
has_ready_frame = true;
// Call the callbacks under the mutex (they should be short),
}
if (has_ready_frame) {
assert(!has_current_frame);
- current_frame = ready_frame;
+ current_frame = move(ready_frame);
ready_frame.ready_fence.reset(); // Drop the refcount.
ready_frame.input_frames.clear(); // Drop the refcounts.
has_current_frame = true;