]> git.sesse.net Git - nageru/blob - video_stream.h
Add a queue of frames going into VideoStream.
[nageru] / video_stream.h
1 #ifndef _VIDEO_STREAM_H
2 #define _VIDEO_STREAM_H 1
3
4 #include <epoxy/gl.h>
5 #include <stdint.h>
6
7 extern "C" {
8 #include <libavformat/avio.h>
9 }
10
11 #include "jpeg_frame_view.h"
12 #include "ref_counted_gl_sync.h"
13
14 #include <chrono>
15 #include <condition_variable>
16 #include <deque>
17 #include <functional>
18 #include <movit/effect_chain.h>
19 #include <movit/mix_effect.h>
20 #include <movit/ycbcr_input.h>
21 #include <mutex>
22 #include <string>
23 #include <thread>
24
25 class ChromaSubsampler;
26 class DISComputeFlow;
27 class Interpolate;
28 class Mux;
29 class QSurface;
30 class QSurfaceFormat;
31 class YCbCrConverter;
32
33 class VideoStream {
34 public:
35         VideoStream();
36         ~VideoStream();
37         void start();
38         void stop();
39         void clear_queue();
40
41         // “display_func” is called after the frame has been calculated (if needed)
42         // and has gone out to the stream. Returns false on failure (ie., couldn't
43         // schedule the frame due to lack of resources).
44         void schedule_original_frame(std::chrono::steady_clock::time_point, int64_t output_pts, std::function<void()> &&display_func, unsigned stream_idx, int64_t input_pts);
45         bool schedule_faded_frame(std::chrono::steady_clock::time_point, int64_t output_pts, std::function<void()> &&display_func, unsigned stream_idx, int64_t input_pts, int secondary_stream_idx, int64_t secondary_input_pts, float fade_alpha);
46         bool schedule_interpolated_frame(std::chrono::steady_clock::time_point, int64_t output_pts, std::function<void()> &&display_func, unsigned stream_idx, int64_t input_first_pts, int64_t input_second_pts, float alpha, int secondary_stream_idx = -1, int64_t secondary_inputs_pts = -1, float fade_alpha = 0.0f);  // -1 = no secondary frame.
47         void schedule_refresh_frame(std::chrono::steady_clock::time_point, int64_t output_pts, std::function<void()> &&display_func);
48
49 private:
50         void encode_thread_func();
51         std::thread encode_thread;
52
53         static int write_packet2_thunk(void *opaque, uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time);
54         int write_packet2(uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time);
55
56         // Allocated at the very start; if we're empty, we start dropping frames
57         // (so that we don't build up an infinite interpolation backlog).
58         struct InterpolatedFrameResources {
59                 GLuint input_tex;  // Layered (contains both input frames), Y'CbCr.
60                 GLuint gray_tex;  // Same, but Y only.
61                 GLuint input_fbos[2];  // For rendering to the two layers of input_tex.
62
63                 // Destination textures and FBO if there is a fade.
64                 GLuint fade_y_output_tex, fade_cbcr_output_tex;
65                 GLuint fade_fbo;
66
67                 GLuint cb_tex, cr_tex;  // Subsampled, final output.
68
69                 GLuint pbo;  // For reading the data back.
70                 void *pbo_contents;  // Persistently mapped.
71         };
72         std::deque<InterpolatedFrameResources> interpolate_resources;  // Under <queue_lock>.
73         static constexpr size_t num_interpolate_slots = 15;  // Should be larger than Player::max_queued_frames, or we risk mass-dropping frames.
74
75         struct QueuedFrame {
76                 std::chrono::steady_clock::time_point local_pts;
77
78                 int64_t output_pts;
79                 enum Type { ORIGINAL, FADED, INTERPOLATED, FADED_INTERPOLATED, REFRESH } type;
80                 unsigned stream_idx;
81                 int64_t input_first_pts;  // The only pts for original frames.
82
83                 // For fades only (including fades against interpolated frames).
84                 int secondary_stream_idx = -1;
85                 int64_t secondary_input_pts;
86
87                 // For interpolated frames only.
88                 int64_t input_second_pts;
89                 float alpha;
90                 InterpolatedFrameResources resources;
91                 RefCountedGLsync fence;  // Set when the interpolated image is read back to the CPU.
92                 GLuint flow_tex, output_tex, cbcr_tex;  // Released in the receiving thread; not really used for anything else.
93                 JPEGID id;
94
95                 std::function<void()> display_func;  // Called when the image is done decoding.
96         };
97         std::deque<QueuedFrame> frame_queue;  // Under <queue_lock>.
98         std::mutex queue_lock;
99         std::condition_variable queue_changed;
100
101         std::unique_ptr<Mux> stream_mux;  // To HTTP.
102         std::string stream_mux_header;
103         bool seen_sync_markers = false;
104
105         std::unique_ptr<YCbCrConverter> ycbcr_converter;
106         std::unique_ptr<YCbCrConverter> ycbcr_semiplanar_converter;
107
108         // Frame interpolation.
109         std::unique_ptr<DISComputeFlow> compute_flow;
110         std::unique_ptr<Interpolate> interpolate, interpolate_no_split;
111         std::unique_ptr<ChromaSubsampler> chroma_subsampler;
112
113         std::vector<uint8_t> last_frame;
114 };
115
116 #endif  // !defined(_VIDEO_STREAM_H)