]> git.sesse.net Git - nageru/blobdiff - video_stream.cpp
Fix so that make clean removes all objects.
[nageru] / video_stream.cpp
index f5ea6f4472e41b361722d5d8652b10161787c262..2951ff10ced84b0ae8f9fa735f930b7ee4075032 100644 (file)
@@ -255,6 +255,8 @@ void VideoStream::stop()
 
 void VideoStream::schedule_original_frame(int64_t output_pts, unsigned stream_idx, int64_t input_pts)
 {
+       fprintf(stderr, "output_pts=%ld  original      input_pts=%ld\n", output_pts, input_pts);
+
        QueuedFrame qf;
        qf.type = QueuedFrame::ORIGINAL;
        qf.output_pts = output_pts;
@@ -268,6 +270,8 @@ void VideoStream::schedule_original_frame(int64_t output_pts, unsigned stream_id
 
 void VideoStream::schedule_interpolated_frame(int64_t output_pts, unsigned stream_idx, int64_t input_first_pts, int64_t input_second_pts, float alpha)
 {
+       fprintf(stderr, "output_pts=%ld  interpolated  input_pts1=%ld input_pts2=%ld alpha=%.3f\n", output_pts, input_first_pts, input_second_pts, alpha);
+
        // Get the temporary OpenGL resources we need for doing the interpolation.
        InterpolatedFrameResources resources;
        {
@@ -291,7 +295,11 @@ void VideoStream::schedule_interpolated_frame(int64_t output_pts, unsigned strea
        // Convert frame0 and frame1 to OpenGL textures.
        // TODO: Deduplicate against JPEGFrameView::setDecodedFrame?
        for (size_t frame_no = 0; frame_no < 2; ++frame_no) {
-               shared_ptr<Frame> frame = decode_jpeg(filename_for_frame(stream_idx, frame_no == 1 ? input_second_pts : input_first_pts));
+               JPEGID jpeg_id;
+               jpeg_id.stream_idx = stream_idx;
+               jpeg_id.pts = frame_no == 1 ? input_second_pts : input_first_pts;
+               bool did_decode;
+               shared_ptr<Frame> frame = decode_jpeg_with_cache(jpeg_id, DECODE_IF_NOT_IN_CACHE, &did_decode);
                ycbcr_format.chroma_subsampling_x = frame->chroma_subsampling_x;
                ycbcr_format.chroma_subsampling_y = frame->chroma_subsampling_y;
                ycbcr_input->change_ycbcr_format(ycbcr_format);
@@ -314,12 +322,16 @@ void VideoStream::schedule_interpolated_frame(int64_t output_pts, unsigned strea
        check_error();
        glGenerateTextureMipmap(resources.gray_tex);
        check_error();
-       GLuint flow_tex = compute_flow->exec(resources.gray_tex, DISComputeFlow::FORWARD_AND_BACKWARD, DISComputeFlow::DO_NOT_RESIZE_FLOW);
+       qf.flow_tex = compute_flow->exec(resources.gray_tex, DISComputeFlow::FORWARD_AND_BACKWARD, DISComputeFlow::DO_NOT_RESIZE_FLOW);
        check_error();
 
-       qf.output_tex = interpolate->exec(resources.input_tex, flow_tex, 1280, 720, alpha);
+       qf.output_tex = interpolate->exec(resources.input_tex, qf.flow_tex, 1280, 720, alpha);
        check_error();
 
+       // We could have released qf.flow_tex here, but to make sure we don't cause a stall
+       // when trying to reuse it for the next frame, we can just as well hold on to it
+       // and release it only when the readback is done.
+
        // Read it down (asynchronously) to the CPU.
        glPixelStorei(GL_PACK_ROW_LENGTH, 0);
        glBindBuffer(GL_PIXEL_PACK_BUFFER, resources.pbo);
@@ -341,6 +353,7 @@ void VideoStream::schedule_interpolated_frame(int64_t output_pts, unsigned strea
 
 void VideoStream::encode_thread_func()
 {
+       pthread_setname_np(pthread_self(), "VideoStream");
        QSurface *surface = create_surface();
        QOpenGLContext *context = create_context(surface);
        bool ok = make_current(context, surface);
@@ -373,6 +386,8 @@ void VideoStream::encode_thread_func()
                        glClientWaitSync(qf.fence.get(), /*flags=*/0, GL_TIMEOUT_IGNORED);
 
                        vector<uint8_t> jpeg = encode_jpeg((const uint8_t *)qf.resources.pbo_contents, 1280, 720);
+                       compute_flow->release_texture(qf.flow_tex);
+                       interpolate->release_texture(qf.output_tex);
 
                        AVPacket pkt;
                        av_init_packet(&pkt);