card->last_timecode = timecode;
PBOFrameAllocator::Userdata *userdata = (PBOFrameAllocator::Userdata *)video_frame.userdata;
+ if (card->type == CardType::FFMPEG_INPUT) {
+ FFmpegCapture *ffmpeg_capture = static_cast<FFmpegCapture *>(card->capture.get());
+ userdata->has_last_subtitle = ffmpeg_capture->get_has_last_subtitle();
+ userdata->last_subtitle = ffmpeg_capture->get_last_subtitle();
+ }
size_t cbcr_width, cbcr_height, cbcr_offset, y_offset;
size_t expected_length = video_format.stride * (video_format.height + video_format.extra_lines_top + video_format.extra_lines_bottom);
// Still send on the information that we _had_ a frame, even though it's corrupted,
// so that pts can go up accordingly.
{
- unique_lock<mutex> lock(card_mutex);
+ lock_guard<mutex> lock(card_mutex);
CaptureCard::NewFrame new_frame;
new_frame.frame = RefCountedFrame(FrameAllocator::Frame());
new_frame.length = frame_length;
}
frame_upload_start = steady_clock::now();
}
+ assert(userdata != nullptr);
userdata->last_interlaced = video_format.interlaced;
userdata->last_has_signal = video_format.has_signal;
userdata->last_is_connected = video_format.is_connected;
}
{
- unique_lock<mutex> lock(card_mutex);
+ lock_guard<mutex> lock(card_mutex);
CaptureCard::NewFrame new_frame;
new_frame.frame = frame;
new_frame.length = frame_length;
// non-dropped frame; perhaps we should just discard that as well,
// since dropped frames are expected to be rare, and it might be
// better to just wait until we have a slightly more normal situation).
- unique_lock<mutex> lock(audio_mutex);
+ lock_guard<mutex> lock(audio_mutex);
bool adjust_rate = !dropped_frame && !is_preroll;
audio_task_queue.push(AudioTask{pts_int, num_samples_per_frame, adjust_rate, frame_timestamp});
audio_task_queue_changed.notify_one();
// Update Y'CbCr settings for all cards.
{
- unique_lock<mutex> lock(card_mutex);
+ lock_guard<mutex> lock(card_mutex);
for (unsigned card_index = 0; card_index < num_cards; ++card_index) {
YCbCrInterpretation *interpretation = &ycbcr_interpretation[card_index];
input_state.ycbcr_coefficients_auto[card_index] = interpretation->ycbcr_coefficients_auto;
YCbCrInterpretation Mixer::get_input_ycbcr_interpretation(unsigned card_index) const
{
- unique_lock<mutex> lock(card_mutex);
+ lock_guard<mutex> lock(card_mutex);
return ycbcr_interpretation[card_index];
}
void Mixer::set_input_ycbcr_interpretation(unsigned card_index, const YCbCrInterpretation &interpretation)
{
- unique_lock<mutex> lock(card_mutex);
+ lock_guard<mutex> lock(card_mutex);
ycbcr_interpretation[card_index] = interpretation;
}
map<uint32_t, VideoMode> Mixer::get_available_output_video_modes() const
{
assert(desired_output_card_index != -1);
- unique_lock<mutex> lock(card_mutex);
+ lock_guard<mutex> lock(card_mutex);
return cards[desired_output_card_index].output->get_available_video_modes();
}
// Store this frame for display. Remove the ready frame if any
// (it was seemingly never used).
{
- unique_lock<mutex> lock(frame_mutex);
+ lock_guard<mutex> lock(frame_mutex);
if (has_ready_frame) {
parent->release_display_frame(&ready_frame);
}
bool Mixer::OutputChannel::get_display_frame(DisplayFrame *frame)
{
- unique_lock<mutex> lock(frame_mutex);
+ lock_guard<mutex> lock(frame_mutex);
if (!has_current_frame && !has_ready_frame) {
return false;
}
void Mixer::OutputChannel::add_frame_ready_callback(void *key, Mixer::new_frame_ready_callback_t callback)
{
- unique_lock<mutex> lock(frame_mutex);
+ lock_guard<mutex> lock(frame_mutex);
new_frame_ready_callbacks[key] = callback;
}
void Mixer::OutputChannel::remove_frame_ready_callback(void *key)
{
- unique_lock<mutex> lock(frame_mutex);
+ lock_guard<mutex> lock(frame_mutex);
new_frame_ready_callbacks.erase(key);
}