last_is_connected[signal_num] = userdata->last_is_connected;
last_frame_rate_nom[signal_num] = userdata->last_frame_rate_nom;
last_frame_rate_den[signal_num] = userdata->last_frame_rate_den;
+ last_pixel_format[signal_num] = userdata->pixel_format;
has_last_subtitle[signal_num] = userdata->has_last_subtitle;
last_subtitle[signal_num] = userdata->last_subtitle;
}
{ "new", Scene_new },
{ "__gc", Scene_gc },
{ "add_input", Scene::add_input },
- { "add_auto_white_balance", Scene::add_auto_white_balance },
+ { "add_white_balance", Scene::add_white_balance },
{ "add_effect", Scene::add_effect },
{ "add_optional_effect", Scene::add_optional_effect },
{ "finalize", Scene::finalize },
}
int card_idx = theme->map_signal_to_card(signal_num);
- connect_signal_raw(card_idx, *theme->input_state);
+ connect_card(card_idx, *theme->input_state);
return true;
}
-void LiveInputWrapper::connect_signal_raw(int card_idx, const InputState &input_state)
+void LiveInputWrapper::connect_card(int card_idx, const InputState &input_state)
{
BufferedFrame first_frame = input_state.buffered_frames[card_idx][0];
if (first_frame.frame == nullptr) {
ycbcr_inputs[i]->set_texture_num(0, userdata->tex_y[frame.field_number]);
ycbcr_inputs[i]->set_texture_num(1, userdata->tex_cb[frame.field_number]);
ycbcr_inputs[i]->set_texture_num(2, userdata->tex_cr[frame.field_number]);
- ycbcr_inputs[i]->change_ycbcr_format(userdata->ycbcr_format);
+ // YCbCrPlanar is used for video streams, where we can have metadata from the mux.
+ // Prefer that if there's no override. (Overrides are only available when using
+ // video as SRT cards.)
+ if (input_state.ycbcr_coefficients_auto[card_idx]) {
+ ycbcr_inputs[i]->change_ycbcr_format(userdata->ycbcr_format);
+ } else {
+ ycbcr_inputs[i]->change_ycbcr_format(input_ycbcr_format);
+ }
ycbcr_inputs[i]->set_width(width);
ycbcr_inputs[i]->set_height(height);
break;
// each FFmpeg or CEF input, so we'll do it here.
if (video_signal_connections.count(effect_chain)) {
for (const VideoSignalConnection &conn : video_signal_connections[effect_chain]) {
- conn.wrapper->connect_signal_raw(conn.source->get_card_index(), input_state);
+ conn.wrapper->connect_card(conn.source->get_card_index(), input_state);
}
}
#ifdef HAVE_CEF
if (html_signal_connections.count(effect_chain)) {
for (const CEFSignalConnection &conn : html_signal_connections[effect_chain]) {
- conn.wrapper->connect_signal_raw(conn.source->get_card_index(), input_state);
+ conn.wrapper->connect_card(conn.source->get_card_index(), input_state);
}
}
#endif
lock_guard<mutex> lock(m);
if (signal != -1) {
- white_balance_for_signal[signal] = RGBTriplet{ r, g, b };
+ int card_idx = map_signal_to_card(signal);
+ white_balance_for_card[card_idx] = RGBTriplet{ r, g, b };
}
call_lua_wb_callback(channel, r, g, b);
}
-void Theme::set_wb_for_signal(int signal, float r, float g, float b)
+void Theme::set_wb_for_card(int card_idx, float r, float g, float b)
{
lock_guard<mutex> lock(m);
- white_balance_for_signal[signal] = RGBTriplet{ r, g, b };
+ white_balance_for_card[card_idx] = RGBTriplet{ r, g, b };
for (const auto &channel_and_signal : channel_signals) {
- if (channel_and_signal.second == signal) {
+ if (map_signal_to_card(channel_and_signal.second) == card_idx) {
call_lua_wb_callback(channel_and_signal.first, r, g, b);
}
}
assert(lua_gettop(L) == 0);
}
-RGBTriplet Theme::get_white_balance_for_signal(int signal)
+RGBTriplet Theme::get_white_balance_for_card(int card_idx)
{
- if (white_balance_for_signal.count(signal)) {
- return white_balance_for_signal[signal];
+ if (white_balance_for_card.count(card_idx)) {
+ return white_balance_for_card[card_idx];
} else {
return RGBTriplet{ 1.0, 1.0, 1.0 };
}