#include "defs.h"
#include "deinterlace_effect.h"
+#include "ffmpeg_capture.h"
#include "flags.h"
#include "image_input.h"
#include "input.h"
bool override_bounce = checkbool(L, 2);
bool deinterlace = checkbool(L, 3);
bmusb::PixelFormat pixel_format = global_flags.ten_bit_input ? bmusb::PixelFormat_10BitYCbCr : bmusb::PixelFormat_8BitYCbCr;
- return wrap_lua_object<LiveInputWrapper>(L, "LiveInputWrapper", theme, chain, pixel_format, override_bounce, deinterlace);
+
+ // Needs to be nonowned to match add_video_input (see below).
+ return wrap_lua_object_nonowned<LiveInputWrapper>(L, "LiveInputWrapper", theme, chain, pixel_format, override_bounce, deinterlace);
+}
+
+int EffectChain_add_video_input(lua_State* L)
+{
+ assert(lua_gettop(L) == 3);
+ Theme *theme = get_theme_updata(L);
+ EffectChain *chain = (EffectChain *)luaL_checkudata(L, 1, "EffectChain");
+ FFmpegCapture **capture = (FFmpegCapture **)luaL_checkudata(L, 2, "VideoInput");
+ bool deinterlace = checkbool(L, 3);
+
+ // These need to be nonowned, so that the LiveInputWrapper still exists
+ // and can feed frames to the right EffectChain even if the Lua code
+ // doesn't care about the object anymore. (If we change this, we'd need
+ // to also unregister the signal connection on __gc.)
+ int ret = wrap_lua_object_nonowned<LiveInputWrapper>(
+ L, "LiveInputWrapper", theme, chain, (*capture)->get_current_pixel_format(),
+ /*override_bounce=*/false, deinterlace);
+ if (ret == 1) {
+ Theme *theme = get_theme_updata(L);
+ LiveInputWrapper **live_input = (LiveInputWrapper **)lua_touserdata(L, -1);
+ theme->register_signal_connection(*live_input, *capture);
+ }
+ return ret;
}
int EffectChain_add_effect(lua_State* L)
vector<Effect *> inputs;
for (int idx = 3; idx <= lua_gettop(L); ++idx) {
if (luaL_testudata(L, idx, "LiveInputWrapper")) {
- LiveInputWrapper *input = (LiveInputWrapper *)lua_touserdata(L, idx);
- inputs.push_back(input->get_effect());
+ LiveInputWrapper **input = (LiveInputWrapper **)lua_touserdata(L, idx);
+ inputs.push_back((*input)->get_effect());
} else {
inputs.push_back(get_effect(L, idx));
}
int LiveInputWrapper_connect_signal(lua_State* L)
{
assert(lua_gettop(L) == 2);
- LiveInputWrapper *input = (LiveInputWrapper *)luaL_checkudata(L, 1, "LiveInputWrapper");
+ LiveInputWrapper **input = (LiveInputWrapper **)luaL_checkudata(L, 1, "LiveInputWrapper");
int signal_num = luaL_checknumber(L, 2);
- input->connect_signal(signal_num);
+ (*input)->connect_signal(signal_num);
return 0;
}
return wrap_lua_object_nonowned<ImageInput>(L, "ImageInput", filename);
}
+int VideoInput_new(lua_State* L)
+{
+ assert(lua_gettop(L) == 2);
+ string filename = checkstdstring(L, 1);
+ int pixel_format = luaL_checknumber(L, 2);
+ if (pixel_format != bmusb::PixelFormat_8BitYCbCrPlanar &&
+ pixel_format != bmusb::PixelFormat_8BitBGRA) {
+ fprintf(stderr, "WARNING: Invalid enum %d used for video format, choosing Y'CbCr.\n",
+ pixel_format);
+ pixel_format = bmusb::PixelFormat_8BitYCbCrPlanar;
+ }
+ int ret = wrap_lua_object_nonowned<FFmpegCapture>(L, "VideoInput", filename, global_flags.width, global_flags.height);
+ if (ret == 1) {
+ FFmpegCapture **capture = (FFmpegCapture **)lua_touserdata(L, -1);
+ (*capture)->set_pixel_format(bmusb::PixelFormat(pixel_format));
+
+ Theme *theme = get_theme_updata(L);
+ theme->register_video_input(*capture);
+ }
+ return ret;
+}
+
+int VideoInput_rewind(lua_State* L)
+{
+ assert(lua_gettop(L) == 1);
+ FFmpegCapture **video_input = (FFmpegCapture **)luaL_checkudata(L, 1, "VideoInput");
+ (*video_input)->rewind();
+ return 0;
+}
+
+int VideoInput_change_rate(lua_State* L)
+{
+ assert(lua_gettop(L) == 2);
+ FFmpegCapture **video_input = (FFmpegCapture **)luaL_checkudata(L, 1, "VideoInput");
+ double new_rate = luaL_checknumber(L, 2);
+ (*video_input)->change_rate(new_rate);
+ return 0;
+}
+
+int VideoInput_get_signal_num(lua_State* L)
+{
+ assert(lua_gettop(L) == 1);
+ FFmpegCapture **video_input = (FFmpegCapture **)luaL_checkudata(L, 1, "VideoInput");
+ lua_pushnumber(L, -1 - (*video_input)->get_card_index());
+ return 1;
+}
+
int WhiteBalanceEffect_new(lua_State* L)
{
assert(lua_gettop(L) == 0);
{ "new", EffectChain_new },
{ "__gc", EffectChain_gc },
{ "add_live_input", EffectChain_add_live_input },
+ { "add_video_input", EffectChain_add_video_input },
{ "add_effect", EffectChain_add_effect },
{ "finalize", EffectChain_finalize },
{ NULL, NULL }
{ NULL, NULL }
};
+const luaL_Reg VideoInput_funcs[] = {
+ { "new", VideoInput_new },
+ { "rewind", VideoInput_rewind },
+ { "change_rate", VideoInput_change_rate },
+ { "get_signal_num", VideoInput_get_signal_num },
+ { NULL, NULL }
+};
+
const luaL_Reg WhiteBalanceEffect_funcs[] = {
{ "new", WhiteBalanceEffect_new },
{ "set_float", Effect_set_float },
num_inputs = 1;
}
- if (pixel_format == bmusb::PixelFormat_8BitRGBA) {
+ if (pixel_format == bmusb::PixelFormat_8BitBGRA) {
for (unsigned i = 0; i < num_inputs; ++i) {
- rgba_inputs.push_back(new FlatInput(inout_format, FORMAT_RGBA_POSTMULTIPLIED_ALPHA, GL_UNSIGNED_BYTE, global_flags.width, global_flags.height));
+ // We upload our textures ourselves, and Movit swaps
+ // R and B in the shader if we specify BGRA, so lie and say RGBA.
+ if (global_flags.can_disable_srgb_decoder) {
+ rgba_inputs.push_back(new sRGBSwitchingFlatInput(inout_format, FORMAT_RGBA_POSTMULTIPLIED_ALPHA, GL_UNSIGNED_BYTE, global_flags.width, global_flags.height));
+ } else {
+ rgba_inputs.push_back(new NonsRGBCapableFlatInput(inout_format, FORMAT_RGBA_POSTMULTIPLIED_ALPHA, GL_UNSIGNED_BYTE, global_flags.width, global_flags.height));
+ }
chain->add_input(rgba_inputs.back());
}
chain->add_effect(deinterlace_effect, reverse_inputs);
}
} else {
- assert(pixel_format == bmusb::PixelFormat_8BitYCbCr || pixel_format == bmusb::PixelFormat_10BitYCbCr);
- // The Blackmagic driver docs claim that the device outputs Y'CbCr
- // according to Rec. 601, but practical testing indicates it definitely
- // is Rec. 709 (at least up to errors attributable to rounding errors).
- // Perhaps 601 was only to indicate the subsampling positions, not the
- // colorspace itself? Tested with a Lenovo X1 gen 3 as input.
- YCbCrFormat input_ycbcr_format;
+ assert(pixel_format == bmusb::PixelFormat_8BitYCbCr ||
+ pixel_format == bmusb::PixelFormat_10BitYCbCr ||
+ pixel_format == bmusb::PixelFormat_8BitYCbCrPlanar);
+
+ // Most of these settings will be overridden later if using PixelFormat_8BitYCbCrPlanar.
input_ycbcr_format.chroma_subsampling_x = (pixel_format == bmusb::PixelFormat_10BitYCbCr) ? 1 : 2;
input_ycbcr_format.chroma_subsampling_y = 1;
input_ycbcr_format.num_levels = (pixel_format == bmusb::PixelFormat_10BitYCbCr) ? 1024 : 256;
input_ycbcr_format.cr_x_position = 0.0;
input_ycbcr_format.cb_y_position = 0.5;
input_ycbcr_format.cr_y_position = 0.5;
- input_ycbcr_format.luma_coefficients = YCBCR_REC_709;
- input_ycbcr_format.full_range = false;
+ input_ycbcr_format.luma_coefficients = YCBCR_REC_709; // Will be overridden later even if not planar.
+ input_ycbcr_format.full_range = false; // Will be overridden later even if not planar.
for (unsigned i = 0; i < num_inputs; ++i) {
// When using 10-bit input, we're converting to interleaved through v210Converter.
- YCbCrInputSplitting splitting = (pixel_format == bmusb::PixelFormat_10BitYCbCr) ? YCBCR_INPUT_INTERLEAVED : YCBCR_INPUT_SPLIT_Y_AND_CBCR;
+ YCbCrInputSplitting splitting;
+ if (pixel_format == bmusb::PixelFormat_10BitYCbCr) {
+ splitting = YCBCR_INPUT_INTERLEAVED;
+ } else if (pixel_format == bmusb::PixelFormat_8BitYCbCr) {
+ splitting = YCBCR_INPUT_SPLIT_Y_AND_CBCR;
+ } else {
+ splitting = YCBCR_INPUT_PLANAR;
+ }
if (override_bounce) {
ycbcr_inputs.push_back(new NonBouncingYCbCrInput(inout_format, input_ycbcr_format, global_flags.width, global_flags.height, splitting));
} else {
}
signal_num = theme->map_signal(signal_num);
+ connect_signal_raw(signal_num);
+}
+void LiveInputWrapper::connect_signal_raw(int signal_num)
+{
BufferedFrame first_frame = theme->input_state->buffered_frames[signal_num][0];
if (first_frame.frame == nullptr) {
// No data yet.
height = userdata->last_height[first_frame.field_number];
}
+ movit::YCbCrLumaCoefficients ycbcr_coefficients = theme->input_state->ycbcr_coefficients[signal_num];
+ bool full_range = theme->input_state->full_range[signal_num];
+
+ if (theme->input_state->ycbcr_coefficients_auto[signal_num]) {
+ full_range = false;
+
+ // The Blackmagic driver docs claim that the device outputs Y'CbCr
+ // according to Rec. 601, but this seems to indicate the subsampling
+ // positions only, as they publish Y'CbCr → RGB formulas that are
+ // different for HD and SD (corresponding to Rec. 709 and 601, respectively),
+ // and a Lenovo X1 gen 3 I used to test definitely outputs Rec. 709
+ // (at least up to rounding error). Other devices seem to use Rec. 601
+ // even on HD resolutions. Nevertheless, Rec. 709 _is_ the right choice
+ // for HD, so we default to that if the user hasn't set anything.
+ if (height >= 720) {
+ ycbcr_coefficients = YCBCR_REC_709;
+ } else {
+ ycbcr_coefficients = YCBCR_REC_601;
+ }
+ }
+
+ // This is a global, but it doesn't really matter.
+ input_ycbcr_format.luma_coefficients = ycbcr_coefficients;
+ input_ycbcr_format.full_range = full_range;
+
BufferedFrame last_good_frame = first_frame;
for (unsigned i = 0; i < max(ycbcr_inputs.size(), rgba_inputs.size()); ++i) {
BufferedFrame frame = theme->input_state->buffered_frames[signal_num][i];
case bmusb::PixelFormat_8BitYCbCr:
ycbcr_inputs[i]->set_texture_num(0, userdata->tex_y[frame.field_number]);
ycbcr_inputs[i]->set_texture_num(1, userdata->tex_cbcr[frame.field_number]);
- ycbcr_inputs[i]->set_width(this_width);
- ycbcr_inputs[i]->set_height(this_height);
+ ycbcr_inputs[i]->change_ycbcr_format(input_ycbcr_format);
+ ycbcr_inputs[i]->set_width(width);
+ ycbcr_inputs[i]->set_height(height);
+ break;
+ case bmusb::PixelFormat_8BitYCbCrPlanar:
+ ycbcr_inputs[i]->set_texture_num(0, userdata->tex_y[frame.field_number]);
+ ycbcr_inputs[i]->set_texture_num(1, userdata->tex_cb[frame.field_number]);
+ ycbcr_inputs[i]->set_texture_num(2, userdata->tex_cr[frame.field_number]);
+ ycbcr_inputs[i]->change_ycbcr_format(userdata->ycbcr_format);
+ ycbcr_inputs[i]->set_width(width);
+ ycbcr_inputs[i]->set_height(height);
break;
case bmusb::PixelFormat_10BitYCbCr:
ycbcr_inputs[i]->set_texture_num(0, userdata->tex_444[frame.field_number]);
- ycbcr_inputs[i]->set_width(this_width);
- ycbcr_inputs[i]->set_height(this_height);
+ ycbcr_inputs[i]->change_ycbcr_format(input_ycbcr_format);
+ ycbcr_inputs[i]->set_width(width);
+ ycbcr_inputs[i]->set_height(height);
break;
- case bmusb::PixelFormat_8BitRGBA:
+ case bmusb::PixelFormat_8BitBGRA:
rgba_inputs[i]->set_texture_num(userdata->tex_rgba[frame.field_number]);
- rgba_inputs[i]->set_width(this_width);
- rgba_inputs[i]->set_height(this_height);
+ rgba_inputs[i]->set_width(width);
+ rgba_inputs[i]->set_height(height);
break;
default:
assert(false);
L = luaL_newstate();
luaL_openlibs(L);
+ register_constants();
register_class("EffectChain", EffectChain_funcs);
register_class("LiveInputWrapper", LiveInputWrapper_funcs);
register_class("ImageInput", ImageInput_funcs);
+ register_class("VideoInput", VideoInput_funcs);
register_class("WhiteBalanceEffect", WhiteBalanceEffect_funcs);
register_class("ResampleEffect", ResampleEffect_funcs);
register_class("PaddingEffect", PaddingEffect_funcs);
lua_close(L);
}
+void Theme::register_constants()
+{
+ // Set Nageru.VIDEO_FORMAT_BGRA = bmusb::PixelFormat_8BitBGRA, etc.
+ const vector<pair<string, int>> constants = {
+ { "VIDEO_FORMAT_BGRA", bmusb::PixelFormat_8BitBGRA },
+ { "VIDEO_FORMAT_YCBCR", bmusb::PixelFormat_8BitYCbCrPlanar },
+ };
+
+ lua_newtable(L); // t = {}
+
+ for (const pair<string, int> &constant : constants) {
+ lua_pushstring(L, constant.first.c_str());
+ lua_pushinteger(L, constant.second);
+ lua_settable(L, 1); // t[key] = value
+ }
+
+ lua_setglobal(L, "Nageru"); // Nageru = t
+ assert(lua_gettop(L) == 0);
+}
+
void Theme::register_class(const char *class_name, const luaL_Reg *funcs)
{
assert(lua_gettop(L) == 0);
int Theme::map_signal(int signal_num)
{
+ // Negative numbers map to raw signals.
+ if (signal_num < 0) {
+ return -1 - signal_num;
+ }
+
unique_lock<mutex> lock(map_m);
if (signal_to_card_mapping.count(signal_num)) {
return signal_to_card_mapping[signal_num];