X-Git-Url: https://git.sesse.net/?a=blobdiff_plain;f=mixer.cpp;h=70175a163c4a2c9a51be1690a9c9c408ac16f81f;hb=9e47a2f661b9d292598ef0277e507458e3dad62f;hp=31277f1cd87af1bf2e03a67a1f169dcdf8f8c03a;hpb=fa0b850bb90894ae9686e0ad7a17ed1b2aafb5d1;p=nageru diff --git a/mixer.cpp b/mixer.cpp index 31277f1..70175a1 100644 --- a/mixer.cpp +++ b/mixer.cpp @@ -14,7 +14,6 @@ #include #include #include -#include #include #include #include @@ -31,6 +30,7 @@ #include "DeckLinkAPI.h" #include "LinuxCOM.h" #include "alsa_output.h" +#include "basic_stats.h" #include "bmusb/bmusb.h" #include "bmusb/fake_capture.h" #include "chroma_subsampler.h" @@ -51,6 +51,10 @@ #include "v210_converter.h" #include "video_encoder.h" +#undef Status +#include +#include "json.pb.h" + class IDeckLink; class QOpenGLContext; @@ -61,7 +65,6 @@ using namespace std::placeholders; using namespace bmusb; Mixer *global_mixer = nullptr; -bool uses_mlock = false; namespace { @@ -240,7 +243,7 @@ double JitterHistory::estimate_max_jitter() const if (percentile <= 0.5) { return *next(orders.begin(), elem_idx) * multiplier; } else { - return *prev(orders.end(), elem_idx + 1) * multiplier; + return *prev(orders.end(), orders.size() - elem_idx) * multiplier; } } @@ -256,10 +259,12 @@ void QueueLengthPolicy::unregister_metrics(const vector> &l void QueueLengthPolicy::update_policy(steady_clock::time_point now, steady_clock::time_point expected_next_frame, + int64_t input_frame_duration, int64_t master_frame_duration, double max_input_card_jitter_seconds, double max_master_card_jitter_seconds) { + double input_frame_duration_seconds = input_frame_duration / double(TIMEBASE); double master_frame_duration_seconds = master_frame_duration / double(TIMEBASE); // Figure out when we can expect the next frame for this card, assuming @@ -279,8 +284,8 @@ void QueueLengthPolicy::update_policy(steady_clock::time_point now, // We account for this by looking at the situation five frames ahead, // assuming everything else is the same. double frames_allowed; - if (max_master_card_jitter_seconds < max_input_card_jitter_seconds) { - frames_allowed = frames_needed + 5 * (max_input_card_jitter_seconds - max_master_card_jitter_seconds) / master_frame_duration_seconds; + if (master_frame_duration < input_frame_duration) { + frames_allowed = frames_needed + 5 * (input_frame_duration_seconds - master_frame_duration_seconds) / master_frame_duration_seconds; } else { frames_allowed = frames_needed; } @@ -351,8 +356,15 @@ Mixer::Mixer(const QSurfaceFormat &format, unsigned num_cards) // Must be instantiated after VideoEncoder has initialized global_flags.use_zerocopy. theme.reset(new Theme(global_flags.theme_filename, global_flags.theme_dirs, resource_pool.get(), num_cards)); + httpd.add_endpoint("/channels", bind(&Mixer::get_channels_json, this)); + for (int channel_idx = 2; channel_idx < theme->get_num_channels(); ++channel_idx) { + char url[256]; + snprintf(url, sizeof(url), "/channels/%d/color", channel_idx); + httpd.add_endpoint(url, bind(&Mixer::get_channel_color_http, this, unsigned(channel_idx))); + } + // Start listening for clients only once VideoEncoder has written its header, if any. - httpd.start(9095); + httpd.start(global_flags.http_port); // First try initializing the then PCI devices, then USB, then // fill up with fake cards until we have the desired number of cards. @@ -370,7 +382,10 @@ Mixer::Mixer(const QSurfaceFormat &format, unsigned num_cards) DeckLinkCapture *capture = new DeckLinkCapture(decklink, card_index); DeckLinkOutput *output = new DeckLinkOutput(resource_pool.get(), decklink_output_surface, global_flags.width, global_flags.height, card_index); - output->set_device(decklink); + if (!output->set_device(decklink)) { + delete output; + output = nullptr; + } configure_card(card_index, capture, CardType::LIVE_CARD, output); ++num_pci_devices; } @@ -458,14 +473,7 @@ Mixer::Mixer(const QSurfaceFormat &format, unsigned num_cards) set_output_card_internal(global_flags.output_card); } - metric_start_time_seconds = get_timestamp_for_metrics(); - output_jitter_history.register_metrics({{ "card", "output" }}); - global_metrics.add("frames_output_total", &metric_frames_output_total); - global_metrics.add("frames_output_dropped", &metric_frames_output_dropped); - global_metrics.add("start_time_seconds", &metric_start_time_seconds, Metrics::TYPE_GAUGE); - global_metrics.add("memory_used_bytes", &metrics_memory_used_bytes); - global_metrics.add("memory_locked_limit_bytes", &metrics_memory_locked_limit_bytes); } Mixer::~Mixer() @@ -729,7 +737,9 @@ void Mixer::bm_frame(unsigned card_index, uint16_t timecode, } while (!success); } - audio_mixer.add_audio(device, audio_frame.data + audio_offset, num_samples, audio_format, frame_length, audio_frame.received_timestamp); + if (num_samples > 0) { + audio_mixer.add_audio(device, audio_frame.data + audio_offset, num_samples, audio_format, frame_length, audio_frame.received_timestamp); + } // Done with the audio, so release it. if (audio_frame.owner) { @@ -738,11 +748,6 @@ void Mixer::bm_frame(unsigned card_index, uint16_t timecode, card->last_timecode = timecode; - // Calculate jitter for this card here. We do it on arrival so that we - // make sure every frame counts, even the dropped ones -- and it will also - // make sure the jitter number is as recent as possible, should it change. - card->jitter_history.frame_arrived(video_frame.received_timestamp, frame_length, dropped_frames); - PBOFrameAllocator::Userdata *userdata = (PBOFrameAllocator::Userdata *)video_frame.userdata; size_t cbcr_width, cbcr_height, cbcr_offset, y_offset; @@ -786,8 +791,9 @@ void Mixer::bm_frame(unsigned card_index, uint16_t timecode, new_frame.dropped_frames = dropped_frames; new_frame.received_timestamp = video_frame.received_timestamp; card->new_frames.push_back(move(new_frame)); - card->new_frames_changed.notify_all(); + card->jitter_history.frame_arrived(video_frame.received_timestamp, frame_length, dropped_frames); } + card->new_frames_changed.notify_all(); return; } @@ -911,8 +917,9 @@ void Mixer::bm_frame(unsigned card_index, uint16_t timecode, new_frame.dropped_frames = dropped_frames; new_frame.received_timestamp = video_frame.received_timestamp; // Ignore the audio timestamp. card->new_frames.push_back(move(new_frame)); - card->new_frames_changed.notify_all(); + card->jitter_history.frame_arrived(video_frame.received_timestamp, frame_length, dropped_frames); } + card->new_frames_changed.notify_all(); } } @@ -946,9 +953,7 @@ void Mixer::thread_func() } } - steady_clock::time_point start, now; - start = steady_clock::now(); - + BasicStats basic_stats(/*verbose=*/true); int stats_dropped_frames = 0; while (!should_quit) { @@ -1025,54 +1030,8 @@ void Mixer::thread_func() ++frame_num; pts_int += frame_duration; - now = steady_clock::now(); - double elapsed = duration(now - start).count(); - - metric_frames_output_total = frame_num; - metric_frames_output_dropped = stats_dropped_frames; - - if (frame_num % 100 == 0) { - printf("%d frames (%d dropped) in %.3f seconds = %.1f fps (%.1f ms/frame)", - frame_num, stats_dropped_frames, elapsed, frame_num / elapsed, - 1e3 * elapsed / frame_num); - // chain->print_phase_timing(); - - // Check our memory usage, to see if we are close to our mlockall() - // limit (if at all set). - rusage used; - if (getrusage(RUSAGE_SELF, &used) == -1) { - perror("getrusage(RUSAGE_SELF)"); - assert(false); - } - - if (uses_mlock) { - rlimit limit; - if (getrlimit(RLIMIT_MEMLOCK, &limit) == -1) { - perror("getrlimit(RLIMIT_MEMLOCK)"); - assert(false); - } - - if (limit.rlim_cur == 0) { - printf(", using %ld MB memory (locked)", - long(used.ru_maxrss / 1024)); - } else { - printf(", using %ld / %ld MB lockable memory (%.1f%%)", - long(used.ru_maxrss / 1024), - long(limit.rlim_cur / 1048576), - float(100.0 * (used.ru_maxrss * 1024.0) / limit.rlim_cur)); - } - metrics_memory_locked_limit_bytes = limit.rlim_cur; - } else { - printf(", using %ld MB memory (not locked)", - long(used.ru_maxrss / 1024)); - metrics_memory_locked_limit_bytes = 0.0 / 0.0; - } - - printf("\n"); - - metrics_memory_used_bytes = used.ru_maxrss * 1024; - } - + basic_stats.update(frame_num, stats_dropped_frames); + // if (frame_num % 100 == 0) chain->print_phase_timing(); if (should_cut.exchange(false)) { // Test and clear. video_encoder->do_cut(frame_num); @@ -1148,6 +1107,24 @@ void Mixer::trim_queue(CaptureCard *card, size_t safe_queue_length) #endif } +pair Mixer::get_channels_json() +{ + Channels ret; + for (int channel_idx = 2; channel_idx < theme->get_num_channels(); ++channel_idx) { + Channel *channel = ret.add_channel(); + channel->set_index(channel_idx); + channel->set_name(theme->get_channel_name(channel_idx)); + channel->set_color(theme->get_channel_color(channel_idx)); + } + string contents; + google::protobuf::util::MessageToJsonString(ret, &contents); // Ignore any errors. + return make_pair(contents, "text/json"); +} + +pair Mixer::get_channel_color_http(unsigned channel_idx) +{ + return make_pair(theme->get_channel_color(channel_idx), "text/plain"); +} Mixer::OutputFrameInfo Mixer::get_one_frame_from_each_card(unsigned master_card_index, bool master_card_is_output, CaptureCard::NewFrame new_frames[MAX_VIDEO_CARDS], bool has_new_frame[MAX_VIDEO_CARDS]) { @@ -1207,6 +1184,7 @@ start: card->queue_length_policy.update_policy( output_frame_info.frame_timestamp, card->jitter_history.get_expected_next_frame(), + new_frames[master_card_index].length, output_frame_info.frame_duration, card->jitter_history.estimate_max_jitter(), output_jitter_history.estimate_max_jitter()); @@ -1441,18 +1419,18 @@ void Mixer::render_one_frame(int64_t duration) live_frame.ready_fence = fence; live_frame.input_frames = {}; live_frame.temp_textures = { y_display_tex, cbcr_display_tex }; - output_channel[OUTPUT_LIVE].output_frame(live_frame); + output_channel[OUTPUT_LIVE].output_frame(move(live_frame)); // Set up preview and any additional channels. for (int i = 1; i < theme->get_num_channels() + 2; ++i) { DisplayFrame display_frame; Theme::Chain chain = theme->get_chain(i, pts(), global_flags.width, global_flags.height, input_state); // FIXME: dimensions - display_frame.chain = chain.chain; - display_frame.setup_chain = chain.setup_chain; + display_frame.chain = move(chain.chain); + display_frame.setup_chain = move(chain.setup_chain); display_frame.ready_fence = fence; - display_frame.input_frames = chain.input_frames; + display_frame.input_frames = move(chain.input_frames); display_frame.temp_textures = {}; - output_channel[i].output_frame(display_frame); + output_channel[i].output_frame(move(display_frame)); } } @@ -1572,7 +1550,7 @@ Mixer::OutputChannel::~OutputChannel() } } -void Mixer::OutputChannel::output_frame(DisplayFrame frame) +void Mixer::OutputChannel::output_frame(DisplayFrame &&frame) { // Store this frame for display. Remove the ready frame if any // (it was seemingly never used). @@ -1581,7 +1559,7 @@ void Mixer::OutputChannel::output_frame(DisplayFrame frame) if (has_ready_frame) { parent->release_display_frame(&ready_frame); } - ready_frame = frame; + ready_frame = move(frame); has_ready_frame = true; // Call the callbacks under the mutex (they should be short), @@ -1644,7 +1622,7 @@ bool Mixer::OutputChannel::get_display_frame(DisplayFrame *frame) } if (has_ready_frame) { assert(!has_current_frame); - current_frame = ready_frame; + current_frame = move(ready_frame); ready_frame.ready_fence.reset(); // Drop the refcount. ready_frame.input_frames.clear(); // Drop the refcounts. has_current_frame = true;