]> git.sesse.net Git - nageru/blobdiff - video_stream.cpp
Fix loading of existing frames with -d.
[nageru] / video_stream.cpp
index 6142aedf6cdf2d0f0bceb73dceca2e8e467ed1e8..015fa66613d473bc860841c41ea37598efd3b22a 100644 (file)
@@ -5,11 +5,9 @@ extern "C" {
 #include <libavformat/avio.h>
 }
 
-#include <jpeglib.h>
-#include <unistd.h>
-
 #include "chroma_subsampler.h"
 #include "context.h"
+#include "flags.h"
 #include "flow.h"
 #include "httpd.h"
 #include "jpeg_frame_view.h"
@@ -20,6 +18,8 @@ extern "C" {
 #include "ycbcr_converter.h"
 
 #include <epoxy/glx.h>
+#include <jpeglib.h>
+#include <unistd.h>
 
 using namespace std;
 
@@ -134,8 +134,8 @@ vector<uint8_t> encode_jpeg(const uint8_t *y_data, const uint8_t *cb_data, const
        for (unsigned y = 0; y < height; y += 8) {
                for (unsigned yy = 0; yy < 8; ++yy) {
                        yptr[yy] = const_cast<JSAMPROW>(&y_data[(y + yy) * width]);
-                       cbptr[yy] = const_cast<JSAMPROW>(&cb_data[(y + yy) * width/2]);
-                       crptr[yy] = const_cast<JSAMPROW>(&cr_data[(y + yy) * width/2]);
+                       cbptr[yy] = const_cast<JSAMPROW>(&cb_data[(y + yy) * width / 2]);
+                       crptr[yy] = const_cast<JSAMPROW>(&cr_data[(y + yy) * width / 2]);
                }
 
                jpeg_write_raw_data(&cinfo, data, /*num_lines=*/8);
@@ -217,17 +217,37 @@ VideoStream::VideoStream()
                check_error();
                glNamedBufferStorage(resource.pbo, width * height * 4, nullptr, GL_MAP_READ_BIT | GL_MAP_PERSISTENT_BIT);
                check_error();
-               resource.pbo_contents = glMapNamedBufferRange(resource.pbo, 0, width * height * 4, GL_MAP_READ_BIT | GL_MAP_PERSISTENT_BIT); 
+               resource.pbo_contents = glMapNamedBufferRange(resource.pbo, 0, width * height * 4, GL_MAP_READ_BIT | GL_MAP_PERSISTENT_BIT);
                interpolate_resources.push_back(resource);
        }
 
        check_error();
 
-       compute_flow.reset(new DISComputeFlow(width, height, operating_point2));
-       interpolate.reset(new Interpolate(operating_point2, /*split_ycbcr_output=*/true));
-       interpolate_no_split.reset(new Interpolate(operating_point2, /*split_ycbcr_output=*/false));
+       OperatingPoint op;
+       if (global_flags.interpolation_quality == 1) {
+               op = operating_point1;
+       } else if (global_flags.interpolation_quality == 2) {
+               op = operating_point2;
+       } else if (global_flags.interpolation_quality == 3) {
+               op = operating_point3;
+       } else if (global_flags.interpolation_quality == 4) {
+               op = operating_point4;
+       } else {
+               assert(false);
+       }
+
+       compute_flow.reset(new DISComputeFlow(width, height, op));
+       interpolate.reset(new Interpolate(op, /*split_ycbcr_output=*/true));
+       interpolate_no_split.reset(new Interpolate(op, /*split_ycbcr_output=*/false));
        chroma_subsampler.reset(new ChromaSubsampler);
        check_error();
+
+       // The “last frame” is initially black.
+       unique_ptr<uint8_t[]> y(new uint8_t[1280 * 720]);
+       unique_ptr<uint8_t[]> cb_or_cr(new uint8_t[640 * 720]);
+       memset(y.get(), 16, 1280 * 720);
+       memset(cb_or_cr.get(), 128, 640 * 720);
+       last_frame = encode_jpeg(y.get(), cb_or_cr.get(), cb_or_cr.get(), 1280, 720);
 }
 
 VideoStream::~VideoStream() {}
@@ -269,7 +289,7 @@ void VideoStream::schedule_original_frame(int64_t output_pts, unsigned stream_id
        qf.type = QueuedFrame::ORIGINAL;
        qf.output_pts = output_pts;
        qf.stream_idx = stream_idx;
-       qf.input_first_pts = input_pts; 
+       qf.input_first_pts = input_pts;
 
        unique_lock<mutex> lock(queue_lock);
        frame_queue.push_back(qf);
@@ -430,7 +450,6 @@ void VideoStream::schedule_interpolated_frame(int64_t output_pts, unsigned strea
                chroma_subsampler->subsample_chroma(qf.cbcr_tex, 1280, 720, resources.cb_tex, resources.cr_tex);
        }
 
-
        // We could have released qf.flow_tex here, but to make sure we don't cause a stall
        // when trying to reuse it for the next frame, we can just as well hold on to it
        // and release it only when the readback is done.
@@ -462,6 +481,16 @@ void VideoStream::schedule_interpolated_frame(int64_t output_pts, unsigned strea
        queue_nonempty.notify_all();
 }
 
+void VideoStream::schedule_refresh_frame(int64_t output_pts)
+{
+       AVPacket pkt;
+       av_init_packet(&pkt);
+       pkt.stream_index = 0;
+       pkt.data = (uint8_t *)last_frame.data();
+       pkt.size = last_frame.size();
+       stream_mux->add_packet(pkt, output_pts, output_pts);
+}
+
 namespace {
 
 shared_ptr<Frame> frame_from_pbo(void *contents, size_t width, size_t height)
@@ -524,6 +553,8 @@ void VideoStream::encode_thread_func()
                        pkt.data = (uint8_t *)jpeg.data();
                        pkt.size = jpeg.size();
                        stream_mux->add_packet(pkt, qf.output_pts, qf.output_pts);
+
+                       last_frame.assign(&jpeg[0], &jpeg[0] + jpeg.size());
                } else if (qf.type == QueuedFrame::FADED) {
                        glClientWaitSync(qf.fence.get(), /*flags=*/0, GL_TIMEOUT_IGNORED);
 
@@ -538,6 +569,7 @@ void VideoStream::encode_thread_func()
                        pkt.data = (uint8_t *)jpeg.data();
                        pkt.size = jpeg.size();
                        stream_mux->add_packet(pkt, qf.output_pts, qf.output_pts);
+                       last_frame = move(jpeg);
 
                        // Put the frame resources back.
                        unique_lock<mutex> lock(queue_lock);
@@ -563,6 +595,7 @@ void VideoStream::encode_thread_func()
                        pkt.data = (uint8_t *)jpeg.data();
                        pkt.size = jpeg.size();
                        stream_mux->add_packet(pkt, qf.output_pts, qf.output_pts);
+                       last_frame = move(jpeg);
 
                        // Put the frame resources back.
                        unique_lock<mutex> lock(queue_lock);
@@ -597,4 +630,3 @@ int VideoStream::write_packet2(uint8_t *buf, int buf_size, AVIODataMarkerType ty
        }
        return buf_size;
 }
-