]> git.sesse.net Git - nageru/blobdiff - mixer.cpp
Make the API for begin_frame()/end_frame() in VideoEncoder a bit more sensible.
[nageru] / mixer.cpp
index a5ceeaf052b66edb95966446c3d5741ce925fb14..79d8e97eb9b9391cda7ceaf2ad686719a9f00caf 100644 (file)
--- a/mixer.cpp
+++ b/mixer.cpp
@@ -78,7 +78,7 @@ void insert_new_frame(RefCountedFrame frame, unsigned field_num, bool interlaced
        }
 }
 
-void ensure_texture_resolution(PBOFrameAllocator::Userdata *userdata, unsigned field, unsigned width, unsigned height)
+void ensure_texture_resolution(PBOFrameAllocator::Userdata *userdata, unsigned field, unsigned width, unsigned height, unsigned v210_width)
 {
        bool first;
        if (global_flags.ten_bit_input) {
@@ -94,12 +94,6 @@ void ensure_texture_resolution(PBOFrameAllocator::Userdata *userdata, unsigned f
                // a new object. Note that this each card has its own PBOFrameAllocator,
                // we don't need to worry about these flip-flopping between resolutions.
                if (global_flags.ten_bit_input) {
-                       const size_t v210_width = v210Converter::get_minimum_v210_texture_width(width);
-
-                       glBindTexture(GL_TEXTURE_2D, userdata->tex_v210[field]);
-                       check_error();
-                       glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB10_A2, v210_width, height, 0, GL_RGBA, GL_UNSIGNED_INT_2_10_10_10_REV, nullptr);
-                       check_error();
                        glBindTexture(GL_TEXTURE_2D, userdata->tex_444[field]);
                        check_error();
                        glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB10_A2, width, height, 0, GL_RGBA, GL_UNSIGNED_INT_2_10_10_10_REV, nullptr);
@@ -119,6 +113,15 @@ void ensure_texture_resolution(PBOFrameAllocator::Userdata *userdata, unsigned f
                userdata->last_width[field] = width;
                userdata->last_height[field] = height;
        }
+       if (global_flags.ten_bit_input &&
+           (first || v210_width != userdata->last_v210_width[field])) {
+               // Same as above; we need to recreate the texture.
+               glBindTexture(GL_TEXTURE_2D, userdata->tex_v210[field]);
+               check_error();
+               glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB10_A2, v210_width, height, 0, GL_RGBA, GL_UNSIGNED_INT_2_10_10_10_REV, nullptr);
+               check_error();
+               userdata->last_v210_width[field] = v210_width;
+       }
 }
 
 void upload_texture(GLuint tex, GLuint width, GLuint height, GLuint stride, bool interlaced_stride, GLenum format, GLenum type, GLintptr offset)
@@ -563,14 +566,16 @@ void Mixer::bm_frame(unsigned card_index, uint16_t timecode,
                                field_start_line = video_format.extra_lines_top;
                        }
 
-                       ensure_texture_resolution(userdata, field, video_format.width, video_format.height);
+                       // For 8-bit input, v210_width will be nonsensical but not used.
+                       size_t v210_width = video_format.stride / sizeof(uint32_t);
+                       ensure_texture_resolution(userdata, field, video_format.width, video_format.height, v210_width);
 
                        glBindBuffer(GL_PIXEL_UNPACK_BUFFER, userdata->pbo);
                        check_error();
 
                        if (global_flags.ten_bit_input) {
                                size_t field_start = video_offset + video_format.stride * field_start_line;
-                               upload_texture(userdata->tex_v210[field], video_format.stride / sizeof(uint32_t), video_format.height, video_format.stride, interlaced_stride, GL_RGBA, GL_UNSIGNED_INT_2_10_10_10_REV, field_start);
+                               upload_texture(userdata->tex_v210[field], v210_width, video_format.height, video_format.stride, interlaced_stride, GL_RGBA, GL_UNSIGNED_INT_2_10_10_10_REV, field_start);
                                v210_converter->convert(userdata->tex_v210[field], userdata->tex_444[field], video_format.width, video_format.height);
                        } else {
                                size_t field_y_start = y_offset + video_format.width * field_start_line;
@@ -995,8 +1000,9 @@ void Mixer::render_one_frame(int64_t duration)
        theme_main_chain.setup_chain();
        //theme_main_chain.chain->enable_phase_timing(true);
 
+       const int64_t av_delay = lrint(global_flags.audio_queue_length_ms * 0.001 * TIMEBASE);  // Corresponds to the delay in ResamplingQueue.
        GLuint y_tex, cbcr_tex;
-       bool got_frame = video_encoder->begin_frame(&y_tex, &cbcr_tex);
+       bool got_frame = video_encoder->begin_frame(pts_int + av_delay, duration, theme_main_chain.input_frames, &y_tex, &cbcr_tex);
        assert(got_frame);
 
        // Render main chain.
@@ -1026,8 +1032,7 @@ void Mixer::render_one_frame(int64_t duration)
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
 
-       const int64_t av_delay = lrint(global_flags.audio_queue_length_ms * 0.001 * TIMEBASE);  // Corresponds to the delay in ResamplingQueue.
-       RefCountedGLsync fence = video_encoder->end_frame(pts_int + av_delay, duration, theme_main_chain.input_frames);
+       RefCountedGLsync fence = video_encoder->end_frame();
 
        // The live frame just shows the RGBA texture we just rendered.
        // It owns rgba_tex now.