X-Git-Url: https://git.sesse.net/?a=blobdiff_plain;f=mixer.cpp;h=b26e097e9e49bffc208dc890b047cdf1ea1f3978;hb=d41e4825e6e02a693661ae7b055b081411e8b1dc;hp=73a59d5a3a0745b60e5e327cb866895f30f3ac11;hpb=c6c5e2ca4ea51426e32608f8b8e2cbcd5f1ab74f;p=nageru diff --git a/mixer.cpp b/mixer.cpp index 73a59d5..b26e097 100644 --- a/mixer.cpp +++ b/mixer.cpp @@ -499,14 +499,10 @@ Mixer::Mixer(const QSurfaceFormat &format, unsigned num_cards) Mixer::~Mixer() { + httpd.stop(); BMUSBCapture::stop_bm_thread(); for (unsigned card_index = 0; card_index < num_cards + num_video_inputs + num_html_inputs; ++card_index) { - { - unique_lock lock(card_mutex); - cards[card_index].should_quit = true; // Unblock thread. - cards[card_index].new_frames_changed.notify_all(); - } cards[card_index].capture->stop_dequeue_thread(); if (cards[card_index].output) { cards[card_index].output->end_output(); @@ -689,18 +685,22 @@ int unwrap_timecode(uint16_t current_wrapped, int last) } } +DeviceSpec card_index_to_device(unsigned card_index, unsigned num_cards) +{ + if (card_index >= num_cards) { + return DeviceSpec{InputSourceType::FFMPEG_VIDEO_INPUT, card_index - num_cards}; + } else { + return DeviceSpec{InputSourceType::CAPTURE_CARD, card_index}; + } +} + } // namespace void Mixer::bm_frame(unsigned card_index, uint16_t timecode, FrameAllocator::Frame video_frame, size_t video_offset, VideoFormat video_format, FrameAllocator::Frame audio_frame, size_t audio_offset, AudioFormat audio_format) { - DeviceSpec device; - if (card_index >= num_cards) { - device = DeviceSpec{InputSourceType::FFMPEG_VIDEO_INPUT, card_index - num_cards}; - } else { - device = DeviceSpec{InputSourceType::CAPTURE_CARD, card_index}; - } + DeviceSpec device = card_index_to_device(card_index, num_cards); CaptureCard *card = &cards[card_index]; ++card->metric_input_received_frames; @@ -731,9 +731,6 @@ void Mixer::bm_frame(unsigned card_index, uint16_t timecode, } } - int64_t frame_length = int64_t(TIMEBASE) * video_format.frame_rate_den / video_format.frame_rate_nom; - assert(frame_length > 0); - size_t num_samples = (audio_frame.len > audio_offset) ? (audio_frame.len - audio_offset) / audio_format.num_channels / (audio_format.bits_per_sample / 8) : 0; if (num_samples > OUTPUT_FREQUENCY / 10 && card->type != CardType::FFMPEG_INPUT) { printf("%s: Dropping frame with implausible audio length (len=%d, offset=%d) [timecode=0x%04x video_len=%d video_offset=%d video_format=%x)\n", @@ -748,6 +745,17 @@ void Mixer::bm_frame(unsigned card_index, uint16_t timecode, return; } + int64_t frame_length; + bool audio_only_frame = false; + if (video_frame.len - video_offset == 0 && num_samples > 0) { + // Audio-only frame (probably from FFmpegCapture). + frame_length = int64_t(TIMEBASE) * num_samples / OUTPUT_FREQUENCY; + audio_only_frame = true; + } else { + frame_length = int64_t(TIMEBASE) * video_format.frame_rate_den / video_format.frame_rate_nom; + } + assert(frame_length > 0); + int dropped_frames = 0; if (card->last_timecode != -1) { dropped_frames = unwrap_timecode(timecode, card->last_timecode) - card->last_timecode - 1; @@ -819,7 +827,7 @@ void Mixer::bm_frame(unsigned card_index, uint16_t timecode, } // Still send on the information that we _had_ a frame, even though it's corrupted, - // so that pts can go up accordingly. + // so that pts can go up accordingly. (This is also used for audio-only frames.) { unique_lock lock(card_mutex); CaptureCard::NewFrame new_frame; @@ -827,6 +835,7 @@ void Mixer::bm_frame(unsigned card_index, uint16_t timecode, new_frame.length = frame_length; new_frame.interlaced = false; new_frame.dropped_frames = dropped_frames; + new_frame.audio_only = audio_only_frame; new_frame.received_timestamp = video_frame.received_timestamp; card->new_frames.push_back(move(new_frame)); card->jitter_history.frame_arrived(video_frame.received_timestamp, frame_length, dropped_frames); @@ -992,7 +1001,7 @@ void Mixer::thread_func() } } - BasicStats basic_stats(/*verbose=*/true); + BasicStats basic_stats(/*verbose=*/true, /*use_opengl=*/true); int stats_dropped_frames = 0; while (!should_quit) { @@ -1028,15 +1037,17 @@ void Mixer::thread_func() handle_hotplugged_cards(); for (unsigned card_index = 0; card_index < num_cards + num_video_inputs + num_html_inputs; ++card_index) { + DeviceSpec device = card_index_to_device(card_index, num_cards); if (card_index == master_card_index || !has_new_frame[card_index]) { continue; } - if (new_frames[card_index].frame->len == 0) { + if (new_frames[card_index].frame->len == 0 && + !new_frames[card_index].audio_only) { ++new_frames[card_index].dropped_frames; } if (new_frames[card_index].dropped_frames > 0) { - printf("Card %u dropped %d frames before this\n", - card_index, int(new_frames[card_index].dropped_frames)); + printf("%s dropped %d frames before this\n", + spec_to_string(device).c_str(), int(new_frames[card_index].dropped_frames)); } } @@ -1066,7 +1077,11 @@ void Mixer::thread_func() int64_t frame_duration = output_frame_info.frame_duration; render_one_frame(frame_duration); - ++frame_num; + { + lock_guard lock(frame_num_mutex); + ++frame_num; + } + frame_num_updated.notify_all(); pts_int += frame_duration; basic_stats.update(frame_num, stats_dropped_frames); @@ -1587,6 +1602,25 @@ map Mixer::get_available_output_video_modes() const return cards[desired_output_card_index].output->get_available_video_modes(); } +string Mixer::get_ffmpeg_filename(unsigned card_index) const +{ + assert(card_index >= num_cards && card_index < num_cards + num_video_inputs); + return ((FFmpegCapture *)(cards[card_index].capture.get()))->get_filename(); +} + +void Mixer::set_ffmpeg_filename(unsigned card_index, const string &filename) { + assert(card_index >= num_cards && card_index < num_cards + num_video_inputs); + ((FFmpegCapture *)(cards[card_index].capture.get()))->change_filename(filename); +} + +void Mixer::wait_for_next_frame() +{ + unique_lock lock(frame_num_mutex); + unsigned old_frame_num = frame_num; + frame_num_updated.wait_for(lock, seconds(1), // Timeout is just in case. + [old_frame_num, this]{ return this->frame_num > old_frame_num; }); +} + Mixer::OutputChannel::~OutputChannel() { if (has_current_frame) {