]> git.sesse.net Git - nageru/blobdiff - video_stream.h
Do not try to show a frame until we've computed it (do it in a callback instead).
[nageru] / video_stream.h
index 146df0bedbae2abbb4887869b21d536319dc98ac..c1b808e9baf069e0b6d65258fb6fe063ea1e60f2 100644 (file)
@@ -1,24 +1,25 @@
 #ifndef _VIDEO_STREAM_H
 #define _VIDEO_STREAM_H 1
 
-#include <stdint.h>
 #include <epoxy/gl.h>
+#include <stdint.h>
 
 extern "C" {
 #include <libavformat/avio.h>
 }
 
+#include "jpeg_frame_view.h"
+#include "ref_counted_gl_sync.h"
+
 #include <condition_variable>
 #include <deque>
-#include <mutex>
-#include <string>
-#include <thread>
-
+#include <functional>
 #include <movit/effect_chain.h>
 #include <movit/mix_effect.h>
 #include <movit/ycbcr_input.h>
-
-#include "ref_counted_gl_sync.h"
+#include <mutex>
+#include <string>
+#include <thread>
 
 class ChromaSubsampler;
 class DISComputeFlow;
@@ -35,12 +36,14 @@ public:
        void start();
        void stop();
 
-       void schedule_original_frame(int64_t output_pts, unsigned stream_idx, int64_t input_pts);
-       void schedule_faded_frame(int64_t output_pts, unsigned stream_idx, int64_t input_pts, int secondary_stream_idx, int64_t secondary_input_pts, float fade_alpha);
-       void schedule_interpolated_frame(int64_t output_pts, unsigned stream_idx, int64_t input_first_pts, int64_t input_second_pts, float alpha, int secondary_stream_idx = -1, int64_t secondary_inputs_pts = -1, float fade_alpha = 0.0f); // -1 = no secondary frame.
+       // “display_func” is called after the frame has been calculated (if needed)
+       // and has gone out to the stream.
+       void schedule_original_frame(int64_t output_pts, std::function<void()> &&display_func, unsigned stream_idx, int64_t input_pts);
+       void schedule_faded_frame(int64_t output_pts, std::function<void()> &&display_func, unsigned stream_idx, int64_t input_pts, int secondary_stream_idx, int64_t secondary_input_pts, float fade_alpha);
+       void schedule_interpolated_frame(int64_t output_pts, std::function<void()> &&display_func, unsigned stream_idx, int64_t input_first_pts, int64_t input_second_pts, float alpha, int secondary_stream_idx = -1, int64_t secondary_inputs_pts = -1, float fade_alpha = 0.0f);  // -1 = no secondary frame.
+       void schedule_refresh_frame(int64_t output_pts, std::function<void()> &&display_func);
 
 private:
-
        void encode_thread_func();
        std::thread encode_thread;
 
@@ -68,9 +71,9 @@ private:
 
        struct QueuedFrame {
                int64_t output_pts;
-               enum Type { ORIGINAL, FADED, INTERPOLATED, FADED_INTERPOLATED } type;
+               enum Type { ORIGINAL, FADED, INTERPOLATED, FADED_INTERPOLATED, REFRESH } type;
                unsigned stream_idx;
-               int64_t input_first_pts;  // The only pts for original frames.  
+               int64_t input_first_pts;  // The only pts for original frames.
 
                // For fades only (including fades against interpolated frames).
                int secondary_stream_idx = -1;
@@ -82,6 +85,9 @@ private:
                InterpolatedFrameResources resources;
                RefCountedGLsync fence;  // Set when the interpolated image is read back to the CPU.
                GLuint flow_tex, output_tex, cbcr_tex;  // Released in the receiving thread; not really used for anything else.
+               JPEGID id;
+
+               std::function<void()> display_func;  // Called when the image is done decoding.
        };
        std::deque<QueuedFrame> frame_queue;  // Under <queue_lock>.
        std::mutex queue_lock;
@@ -98,6 +104,8 @@ private:
        std::unique_ptr<DISComputeFlow> compute_flow;
        std::unique_ptr<Interpolate> interpolate, interpolate_no_split;
        std::unique_ptr<ChromaSubsampler> chroma_subsampler;
+
+       std::vector<uint8_t> last_frame;
 };
 
 #endif  // !defined(_VIDEO_STREAM_H)