}
}
-void ensure_texture_resolution(PBOFrameAllocator::Userdata *userdata, unsigned field, unsigned width, unsigned height)
+void ensure_texture_resolution(PBOFrameAllocator::Userdata *userdata, unsigned field, unsigned width, unsigned height, unsigned v210_width)
{
bool first;
if (global_flags.ten_bit_input) {
// a new object. Note that this each card has its own PBOFrameAllocator,
// we don't need to worry about these flip-flopping between resolutions.
if (global_flags.ten_bit_input) {
- const size_t v210_width = v210Converter::get_minimum_v210_texture_width(width);
-
- glBindTexture(GL_TEXTURE_2D, userdata->tex_v210[field]);
- check_error();
- glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB10_A2, v210_width, height, 0, GL_RGBA, GL_UNSIGNED_INT_2_10_10_10_REV, nullptr);
- check_error();
glBindTexture(GL_TEXTURE_2D, userdata->tex_444[field]);
check_error();
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB10_A2, width, height, 0, GL_RGBA, GL_UNSIGNED_INT_2_10_10_10_REV, nullptr);
userdata->last_width[field] = width;
userdata->last_height[field] = height;
}
+ if (global_flags.ten_bit_input &&
+ (first || v210_width != userdata->last_v210_width[field])) {
+ // Same as above; we need to recreate the texture.
+ glBindTexture(GL_TEXTURE_2D, userdata->tex_v210[field]);
+ check_error();
+ glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB10_A2, v210_width, height, 0, GL_RGBA, GL_UNSIGNED_INT_2_10_10_10_REV, nullptr);
+ check_error();
+ userdata->last_v210_width[field] = v210_width;
+ }
}
void upload_texture(GLuint tex, GLuint width, GLuint height, GLuint stride, bool interlaced_stride, GLenum format, GLenum type, GLintptr offset)
field_start_line = video_format.extra_lines_top;
}
- ensure_texture_resolution(userdata, field, video_format.width, video_format.height);
+ // For 8-bit input, v210_width will be nonsensical but not used.
+ size_t v210_width = video_format.stride / sizeof(uint32_t);
+ ensure_texture_resolution(userdata, field, video_format.width, video_format.height, v210_width);
glBindBuffer(GL_PIXEL_UNPACK_BUFFER, userdata->pbo);
check_error();
if (global_flags.ten_bit_input) {
size_t field_start = video_offset + video_format.stride * field_start_line;
- upload_texture(userdata->tex_v210[field], video_format.stride / sizeof(uint32_t), video_format.height, video_format.stride, interlaced_stride, GL_RGBA, GL_UNSIGNED_INT_2_10_10_10_REV, field_start);
+ upload_texture(userdata->tex_v210[field], v210_width, video_format.height, video_format.stride, interlaced_stride, GL_RGBA, GL_UNSIGNED_INT_2_10_10_10_REV, field_start);
v210_converter->convert(userdata->tex_v210[field], userdata->tex_444[field], video_format.width, video_format.height);
} else {
size_t field_y_start = y_offset + video_format.width * field_start_line;
theme_main_chain.setup_chain();
//theme_main_chain.chain->enable_phase_timing(true);
+ const int64_t av_delay = lrint(global_flags.audio_queue_length_ms * 0.001 * TIMEBASE); // Corresponds to the delay in ResamplingQueue.
GLuint y_tex, cbcr_tex;
- bool got_frame = video_encoder->begin_frame(&y_tex, &cbcr_tex);
+ bool got_frame = video_encoder->begin_frame(pts_int + av_delay, duration, theme_main_chain.input_frames, &y_tex, &cbcr_tex);
assert(got_frame);
// Render main chain.
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
- const int64_t av_delay = lrint(global_flags.audio_queue_length_ms * 0.001 * TIMEBASE); // Corresponds to the delay in ResamplingQueue.
- RefCountedGLsync fence = video_encoder->end_frame(pts_int + av_delay, duration, theme_main_chain.input_frames);
+ RefCountedGLsync fence = video_encoder->end_frame();
// The live frame just shows the RGBA texture we just rendered.
// It owns rgba_tex now.