]> git.sesse.net Git - nageru/blobdiff - video_stream.cpp
Preload original JPEGs from disk.
[nageru] / video_stream.cpp
index e89839a6314f963f410f577dc7607f135499a579..c645ce30f33ca86bdbf649e0de293fdd35f1253d 100644 (file)
@@ -7,6 +7,7 @@ extern "C" {
 
 #include "chroma_subsampler.h"
 #include "context.h"
+#include "flags.h"
 #include "flow.h"
 #include "httpd.h"
 #include "jpeg_frame_view.h"
@@ -21,6 +22,7 @@ extern "C" {
 #include <unistd.h>
 
 using namespace std;
+using namespace std::chrono;
 
 extern HTTPD *global_httpd;
 
@@ -155,12 +157,12 @@ VideoStream::VideoStream()
        GLuint fade_y_output_tex[num_interpolate_slots], fade_cbcr_output_tex[num_interpolate_slots];
        GLuint cb_tex[num_interpolate_slots], cr_tex[num_interpolate_slots];
 
-       glCreateTextures(GL_TEXTURE_2D_ARRAY, 10, input_tex);
-       glCreateTextures(GL_TEXTURE_2D_ARRAY, 10, gray_tex);
-       glCreateTextures(GL_TEXTURE_2D, 10, fade_y_output_tex);
-       glCreateTextures(GL_TEXTURE_2D, 10, fade_cbcr_output_tex);
-       glCreateTextures(GL_TEXTURE_2D, 10, cb_tex);
-       glCreateTextures(GL_TEXTURE_2D, 10, cr_tex);
+       glCreateTextures(GL_TEXTURE_2D_ARRAY, num_interpolate_slots, input_tex);
+       glCreateTextures(GL_TEXTURE_2D_ARRAY, num_interpolate_slots, gray_tex);
+       glCreateTextures(GL_TEXTURE_2D, num_interpolate_slots, fade_y_output_tex);
+       glCreateTextures(GL_TEXTURE_2D, num_interpolate_slots, fade_cbcr_output_tex);
+       glCreateTextures(GL_TEXTURE_2D, num_interpolate_slots, cb_tex);
+       glCreateTextures(GL_TEXTURE_2D, num_interpolate_slots, cr_tex);
        check_error();
 
        constexpr size_t width = 1280, height = 720;  // FIXME: adjustable width, height
@@ -222,9 +224,22 @@ VideoStream::VideoStream()
 
        check_error();
 
-       compute_flow.reset(new DISComputeFlow(width, height, operating_point2));
-       interpolate.reset(new Interpolate(operating_point2, /*split_ycbcr_output=*/true));
-       interpolate_no_split.reset(new Interpolate(operating_point2, /*split_ycbcr_output=*/false));
+       OperatingPoint op;
+       if (global_flags.interpolation_quality == 1) {
+               op = operating_point1;
+       } else if (global_flags.interpolation_quality == 2) {
+               op = operating_point2;
+       } else if (global_flags.interpolation_quality == 3) {
+               op = operating_point3;
+       } else if (global_flags.interpolation_quality == 4) {
+               op = operating_point4;
+       } else {
+               assert(false);
+       }
+
+       compute_flow.reset(new DISComputeFlow(width, height, op));
+       interpolate.reset(new Interpolate(op, /*split_ycbcr_output=*/true));
+       interpolate_no_split.reset(new Interpolate(op, /*split_ycbcr_output=*/false));
        chroma_subsampler.reset(new ChromaSubsampler);
        check_error();
 
@@ -267,22 +282,34 @@ void VideoStream::stop()
        encode_thread.join();
 }
 
-void VideoStream::schedule_original_frame(int64_t output_pts, unsigned stream_idx, int64_t input_pts)
+void VideoStream::clear_queue()
+{
+       unique_lock<mutex> lock(queue_lock);
+       frame_queue.clear();
+}
+
+void VideoStream::schedule_original_frame(steady_clock::time_point local_pts, int64_t output_pts, function<void()> &&display_func, unsigned stream_idx, int64_t input_pts)
 {
        fprintf(stderr, "output_pts=%ld  original      input_pts=%ld\n", output_pts, input_pts);
 
+       // Preload the file from disk, so that the encoder thread does not get stalled.
+       // TODO: Consider sending it through the queue instead.
+       (void)read_file(filename_for_frame(stream_idx, input_pts));
+
        QueuedFrame qf;
+       qf.local_pts = local_pts;
        qf.type = QueuedFrame::ORIGINAL;
        qf.output_pts = output_pts;
        qf.stream_idx = stream_idx;
        qf.input_first_pts = input_pts;
+       qf.display_func = move(display_func);
 
        unique_lock<mutex> lock(queue_lock);
        frame_queue.push_back(qf);
-       queue_nonempty.notify_all();
+       queue_changed.notify_all();
 }
 
-void VideoStream::schedule_faded_frame(int64_t output_pts, unsigned stream_idx, int64_t input_pts, int secondary_stream_idx, int64_t secondary_input_pts, float fade_alpha)
+bool VideoStream::schedule_faded_frame(steady_clock::time_point local_pts, int64_t output_pts, function<void()> &&display_func, unsigned stream_idx, int64_t input_pts, int secondary_stream_idx, int64_t secondary_input_pts, float fade_alpha)
 {
        fprintf(stderr, "output_pts=%ld  faded         input_pts=%ld,%ld  fade_alpha=%.2f\n", output_pts, input_pts, secondary_input_pts, fade_alpha);
 
@@ -295,7 +322,7 @@ void VideoStream::schedule_faded_frame(int64_t output_pts, unsigned stream_idx,
                unique_lock<mutex> lock(queue_lock);
                if (interpolate_resources.empty()) {
                        fprintf(stderr, "WARNING: Too many interpolated frames already in transit; dropping one.\n");
-                       return;
+                       return false;
                }
                resources = interpolate_resources.front();
                interpolate_resources.pop_front();
@@ -318,11 +345,13 @@ void VideoStream::schedule_faded_frame(int64_t output_pts, unsigned stream_idx,
        ycbcr_semiplanar_converter->prepare_chain_for_fade(frame1, frame2, fade_alpha)->render_to_fbo(resources.fade_fbo, 1280, 720);
 
        QueuedFrame qf;
+       qf.local_pts = local_pts;
        qf.type = QueuedFrame::FADED;
        qf.output_pts = output_pts;
        qf.stream_idx = stream_idx;
        qf.resources = resources;
        qf.input_first_pts = input_pts;
+       qf.display_func = move(display_func);
 
        qf.secondary_stream_idx = secondary_stream_idx;
        qf.secondary_input_pts = secondary_input_pts;
@@ -350,10 +379,11 @@ void VideoStream::schedule_faded_frame(int64_t output_pts, unsigned stream_idx,
 
        unique_lock<mutex> lock(queue_lock);
        frame_queue.push_back(qf);
-       queue_nonempty.notify_all();
+       queue_changed.notify_all();
+       return true;
 }
 
-void VideoStream::schedule_interpolated_frame(int64_t output_pts, unsigned stream_idx, int64_t input_first_pts, int64_t input_second_pts, float alpha, int secondary_stream_idx, int64_t secondary_input_pts, float fade_alpha)
+bool VideoStream::schedule_interpolated_frame(steady_clock::time_point local_pts, int64_t output_pts, function<void()> &&display_func, unsigned stream_idx, int64_t input_first_pts, int64_t input_second_pts, float alpha, int secondary_stream_idx, int64_t secondary_input_pts, float fade_alpha)
 {
        if (secondary_stream_idx != -1) {
                fprintf(stderr, "output_pts=%ld  interpolated  input_pts1=%ld input_pts2=%ld alpha=%.3f  secondary_pts=%ld  fade_alpha=%.2f\n", output_pts, input_first_pts, input_second_pts, alpha, secondary_input_pts, fade_alpha);
@@ -374,8 +404,7 @@ void VideoStream::schedule_interpolated_frame(int64_t output_pts, unsigned strea
                unique_lock<mutex> lock(queue_lock);
                if (interpolate_resources.empty()) {
                        fprintf(stderr, "WARNING: Too many interpolated frames already in transit; dropping one.\n");
-                       JPEGFrameView::insert_interpolated_frame(id, nullptr);
-                       return;
+                       return false;
                }
                resources = interpolate_resources.front();
                interpolate_resources.pop_front();
@@ -387,6 +416,7 @@ void VideoStream::schedule_interpolated_frame(int64_t output_pts, unsigned strea
        qf.stream_idx = stream_idx;
        qf.resources = resources;
        qf.id = id;
+       qf.display_func = move(display_func);
 
        check_error();
 
@@ -464,17 +494,20 @@ void VideoStream::schedule_interpolated_frame(int64_t output_pts, unsigned strea
 
        unique_lock<mutex> lock(queue_lock);
        frame_queue.push_back(qf);
-       queue_nonempty.notify_all();
+       queue_changed.notify_all();
+       return true;
 }
 
-void VideoStream::schedule_refresh_frame(int64_t output_pts)
+void VideoStream::schedule_refresh_frame(steady_clock::time_point local_pts, int64_t output_pts, function<void()> &&display_func)
 {
-       AVPacket pkt;
-       av_init_packet(&pkt);
-       pkt.stream_index = 0;
-       pkt.data = (uint8_t *)last_frame.data();
-       pkt.size = last_frame.size();
-       stream_mux->add_packet(pkt, output_pts, output_pts);
+       QueuedFrame qf;
+       qf.type = QueuedFrame::REFRESH;
+       qf.output_pts = output_pts;
+       qf.display_func = move(display_func);
+
+       unique_lock<mutex> lock(queue_lock);
+       frame_queue.push_back(qf);
+       queue_changed.notify_all();
 }
 
 namespace {
@@ -523,9 +556,22 @@ void VideoStream::encode_thread_func()
                QueuedFrame qf;
                {
                        unique_lock<mutex> lock(queue_lock);
-                       queue_nonempty.wait(lock, [this]{
+
+                       // Wait until we have a frame to play.
+                       queue_changed.wait(lock, [this]{
                                return !frame_queue.empty();
                        });
+                       steady_clock::time_point frame_start = frame_queue.front().local_pts;
+
+                       // Now sleep until the frame is supposed to start (the usual case),
+                       // _or_ clear_queue() happened.
+                       bool aborted = queue_changed.wait_until(lock, frame_start, [this, frame_start]{
+                               return frame_queue.empty() || frame_queue.front().local_pts != frame_start;
+                       });
+                       if (aborted) {
+                               // clear_queue() happened, so don't play this frame after all.
+                               continue;
+                       }
                        qf = frame_queue.front();
                        frame_queue.pop_front();
                }
@@ -586,9 +632,19 @@ void VideoStream::encode_thread_func()
                        // Put the frame resources back.
                        unique_lock<mutex> lock(queue_lock);
                        interpolate_resources.push_back(qf.resources);
+               } else if (qf.type == QueuedFrame::REFRESH) {
+                       AVPacket pkt;
+                       av_init_packet(&pkt);
+                       pkt.stream_index = 0;
+                       pkt.data = (uint8_t *)last_frame.data();
+                       pkt.size = last_frame.size();
+                       stream_mux->add_packet(pkt, qf.output_pts, qf.output_pts);
                } else {
                        assert(false);
                }
+               if (qf.display_func != nullptr) {
+                       qf.display_func();
+               }
        }
 }