]> git.sesse.net Git - nageru/blobdiff - video_stream.cpp
Send refresh frames every 100 ms or so, so that the client does not think we are...
[nageru] / video_stream.cpp
index 69dfbe6f0fea07ff22a09f60fabf09122b0e93e4..39b5b6c28985452d412406c0371fb16654f2b336 100644 (file)
@@ -228,6 +228,13 @@ VideoStream::VideoStream()
        interpolate_no_split.reset(new Interpolate(operating_point2, /*split_ycbcr_output=*/false));
        chroma_subsampler.reset(new ChromaSubsampler);
        check_error();
+
+       // The “last frame” is initially black.
+       unique_ptr<uint8_t[]> y(new uint8_t[1280 * 720]);
+       unique_ptr<uint8_t[]> cb_or_cr(new uint8_t[640 * 720]);
+       memset(y.get(), 16, 1280 * 720);
+       memset(cb_or_cr.get(), 128, 640 * 720);
+       last_frame = encode_jpeg(y.get(), cb_or_cr.get(), cb_or_cr.get(), 1280, 720);
 }
 
 VideoStream::~VideoStream() {}
@@ -355,13 +362,20 @@ void VideoStream::schedule_interpolated_frame(int64_t output_pts, unsigned strea
                fprintf(stderr, "output_pts=%ld  interpolated  input_pts1=%ld input_pts2=%ld alpha=%.3f\n", output_pts, input_first_pts, input_second_pts, alpha);
        }
 
+       JPEGID id;
+       if (secondary_stream_idx == -1) {
+               id = JPEGID{ stream_idx, output_pts, /*interpolated=*/true };
+       } else {
+               id = create_jpegid_for_interpolated_fade(stream_idx, output_pts, secondary_stream_idx, secondary_input_pts);
+       }
+
        // Get the temporary OpenGL resources we need for doing the interpolation.
        InterpolatedFrameResources resources;
        {
                unique_lock<mutex> lock(queue_lock);
                if (interpolate_resources.empty()) {
                        fprintf(stderr, "WARNING: Too many interpolated frames already in transit; dropping one.\n");
-                       JPEGFrameView::insert_interpolated_frame(stream_idx, output_pts, nullptr);
+                       JPEGFrameView::insert_interpolated_frame(id, nullptr);
                        return;
                }
                resources = interpolate_resources.front();
@@ -373,6 +387,7 @@ void VideoStream::schedule_interpolated_frame(int64_t output_pts, unsigned strea
        qf.output_pts = output_pts;
        qf.stream_idx = stream_idx;
        qf.resources = resources;
+       qf.id = id;
 
        check_error();
 
@@ -454,6 +469,16 @@ void VideoStream::schedule_interpolated_frame(int64_t output_pts, unsigned strea
        queue_nonempty.notify_all();
 }
 
+void VideoStream::schedule_refresh_frame(int64_t output_pts)
+{
+       AVPacket pkt;
+       av_init_packet(&pkt);
+       pkt.stream_index = 0;
+       pkt.data = (uint8_t *)last_frame.data();
+       pkt.size = last_frame.size();
+       stream_mux->add_packet(pkt, output_pts, output_pts);
+}
+
 namespace {
 
 shared_ptr<Frame> frame_from_pbo(void *contents, size_t width, size_t height)
@@ -516,6 +541,8 @@ void VideoStream::encode_thread_func()
                        pkt.data = (uint8_t *)jpeg.data();
                        pkt.size = jpeg.size();
                        stream_mux->add_packet(pkt, qf.output_pts, qf.output_pts);
+
+                       last_frame.assign(&jpeg[0], &jpeg[0] + jpeg.size());
                } else if (qf.type == QueuedFrame::FADED) {
                        glClientWaitSync(qf.fence.get(), /*flags=*/0, GL_TIMEOUT_IGNORED);
 
@@ -530,6 +557,7 @@ void VideoStream::encode_thread_func()
                        pkt.data = (uint8_t *)jpeg.data();
                        pkt.size = jpeg.size();
                        stream_mux->add_packet(pkt, qf.output_pts, qf.output_pts);
+                       last_frame = move(jpeg);
 
                        // Put the frame resources back.
                        unique_lock<mutex> lock(queue_lock);
@@ -539,7 +567,7 @@ void VideoStream::encode_thread_func()
 
                        // Send a copy of the frame on to display.
                        shared_ptr<Frame> frame = frame_from_pbo(qf.resources.pbo_contents, 1280, 720);
-                       JPEGFrameView::insert_interpolated_frame(qf.stream_idx, qf.output_pts, frame);  // TODO: this is wrong for fades
+                       JPEGFrameView::insert_interpolated_frame(qf.id, frame);
 
                        // Now JPEG encode it, and send it on to the stream.
                        vector<uint8_t> jpeg = encode_jpeg(frame->y.get(), frame->cb.get(), frame->cr.get(), 1280, 720);
@@ -555,6 +583,7 @@ void VideoStream::encode_thread_func()
                        pkt.data = (uint8_t *)jpeg.data();
                        pkt.size = jpeg.size();
                        stream_mux->add_packet(pkt, qf.output_pts, qf.output_pts);
+                       last_frame = move(jpeg);
 
                        // Put the frame resources back.
                        unique_lock<mutex> lock(queue_lock);