]> git.sesse.net Git - nageru/blobdiff - video_stream.h
Send refresh frames every 100 ms or so, so that the client does not think we are...
[nageru] / video_stream.h
index 2d1e8f12ba90e866a6196881dd570e142f6de388..8ac3bcf70efa1854b6d57dbcdc962fb0453df18f 100644 (file)
@@ -15,8 +15,10 @@ extern "C" {
 #include <thread>
 
 #include <movit/effect_chain.h>
+#include <movit/mix_effect.h>
 #include <movit/ycbcr_input.h>
 
+#include "jpeg_frame_view.h"
 #include "ref_counted_gl_sync.h"
 
 class ChromaSubsampler;
@@ -25,6 +27,7 @@ class Interpolate;
 class Mux;
 class QSurface;
 class QSurfaceFormat;
+class YCbCrConverter;
 
 class VideoStream {
 public:
@@ -34,7 +37,9 @@ public:
        void stop();
 
        void schedule_original_frame(int64_t output_pts, unsigned stream_idx, int64_t input_pts);
-       void schedule_interpolated_frame(int64_t output_pts, unsigned stream_idx, int64_t input_first_pts, int64_t input_second_pts, float alpha);
+       void schedule_faded_frame(int64_t output_pts, unsigned stream_idx, int64_t input_pts, int secondary_stream_idx, int64_t secondary_input_pts, float fade_alpha);
+       void schedule_interpolated_frame(int64_t output_pts, unsigned stream_idx, int64_t input_first_pts, int64_t input_second_pts, float alpha, int secondary_stream_idx = -1, int64_t secondary_inputs_pts = -1, float fade_alpha = 0.0f); // -1 = no secondary frame.
+       void schedule_refresh_frame(int64_t output_pts);
 
 private:
 
@@ -47,10 +52,16 @@ private:
        // Allocated at the very start; if we're empty, we start dropping frames
        // (so that we don't build up an infinite interpolation backlog).
        struct InterpolatedFrameResources {
-               GLuint input_tex;  // Layered (contains both input frames).
-               GLuint gray_tex;  // Same.
-               GLuint cb_tex, cr_tex;
+               GLuint input_tex;  // Layered (contains both input frames), Y'CbCr.
+               GLuint gray_tex;  // Same, but Y only.
                GLuint input_fbos[2];  // For rendering to the two layers of input_tex.
+
+               // Destination textures and FBO if there is a fade.
+               GLuint fade_y_output_tex, fade_cbcr_output_tex;
+               GLuint fade_fbo;
+
+               GLuint cb_tex, cr_tex;  // Subsampled, final output.
+
                GLuint pbo;  // For reading the data back.
                void *pbo_contents;  // Persistently mapped.
        };
@@ -59,16 +70,21 @@ private:
 
        struct QueuedFrame {
                int64_t output_pts;
-               enum Type { ORIGINAL, INTERPOLATED } type;
+               enum Type { ORIGINAL, FADED, INTERPOLATED, FADED_INTERPOLATED } type;
                unsigned stream_idx;
                int64_t input_first_pts;  // The only pts for original frames.  
 
+               // For fades only (including fades against interpolated frames).
+               int secondary_stream_idx = -1;
+               int64_t secondary_input_pts;
+
                // For interpolated frames only.
                int64_t input_second_pts;
                float alpha;
                InterpolatedFrameResources resources;
                RefCountedGLsync fence;  // Set when the interpolated image is read back to the CPU.
                GLuint flow_tex, output_tex, cbcr_tex;  // Released in the receiving thread; not really used for anything else.
+               JPEGID id;
        };
        std::deque<QueuedFrame> frame_queue;  // Under <queue_lock>.
        std::mutex queue_lock;
@@ -78,19 +94,15 @@ private:
        std::string stream_mux_header;
        bool seen_sync_markers = false;
 
-       QSurface *gl_surface;
-
-       // Effectively only converts from 4:2:2 to 4:4:4.
-       // TODO: Have a separate version with ResampleEffect, for scaling?
-       std::unique_ptr<movit::EffectChain> ycbcr_convert_chain;
-
-       movit::YCbCrInput *ycbcr_input;
-       movit::YCbCrFormat ycbcr_format;
+       std::unique_ptr<YCbCrConverter> ycbcr_converter;
+       std::unique_ptr<YCbCrConverter> ycbcr_semiplanar_converter;
 
        // Frame interpolation.
        std::unique_ptr<DISComputeFlow> compute_flow;
-       std::unique_ptr<Interpolate> interpolate;
+       std::unique_ptr<Interpolate> interpolate, interpolate_no_split;
        std::unique_ptr<ChromaSubsampler> chroma_subsampler;
+
+       std::vector<uint8_t> last_frame;
 };
 
 #endif  // !defined(_VIDEO_STREAM_H)