]> git.sesse.net Git - nageru/blob - video_stream.h
c1b808e9baf069e0b6d65258fb6fe063ea1e60f2
[nageru] / video_stream.h
1 #ifndef _VIDEO_STREAM_H
2 #define _VIDEO_STREAM_H 1
3
4 #include <epoxy/gl.h>
5 #include <stdint.h>
6
7 extern "C" {
8 #include <libavformat/avio.h>
9 }
10
11 #include "jpeg_frame_view.h"
12 #include "ref_counted_gl_sync.h"
13
14 #include <condition_variable>
15 #include <deque>
16 #include <functional>
17 #include <movit/effect_chain.h>
18 #include <movit/mix_effect.h>
19 #include <movit/ycbcr_input.h>
20 #include <mutex>
21 #include <string>
22 #include <thread>
23
24 class ChromaSubsampler;
25 class DISComputeFlow;
26 class Interpolate;
27 class Mux;
28 class QSurface;
29 class QSurfaceFormat;
30 class YCbCrConverter;
31
32 class VideoStream {
33 public:
34         VideoStream();
35         ~VideoStream();
36         void start();
37         void stop();
38
39         // “display_func” is called after the frame has been calculated (if needed)
40         // and has gone out to the stream.
41         void schedule_original_frame(int64_t output_pts, std::function<void()> &&display_func, unsigned stream_idx, int64_t input_pts);
42         void schedule_faded_frame(int64_t output_pts, std::function<void()> &&display_func, unsigned stream_idx, int64_t input_pts, int secondary_stream_idx, int64_t secondary_input_pts, float fade_alpha);
43         void schedule_interpolated_frame(int64_t output_pts, std::function<void()> &&display_func, unsigned stream_idx, int64_t input_first_pts, int64_t input_second_pts, float alpha, int secondary_stream_idx = -1, int64_t secondary_inputs_pts = -1, float fade_alpha = 0.0f);  // -1 = no secondary frame.
44         void schedule_refresh_frame(int64_t output_pts, std::function<void()> &&display_func);
45
46 private:
47         void encode_thread_func();
48         std::thread encode_thread;
49
50         static int write_packet2_thunk(void *opaque, uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time);
51         int write_packet2(uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time);
52
53         // Allocated at the very start; if we're empty, we start dropping frames
54         // (so that we don't build up an infinite interpolation backlog).
55         struct InterpolatedFrameResources {
56                 GLuint input_tex;  // Layered (contains both input frames), Y'CbCr.
57                 GLuint gray_tex;  // Same, but Y only.
58                 GLuint input_fbos[2];  // For rendering to the two layers of input_tex.
59
60                 // Destination textures and FBO if there is a fade.
61                 GLuint fade_y_output_tex, fade_cbcr_output_tex;
62                 GLuint fade_fbo;
63
64                 GLuint cb_tex, cr_tex;  // Subsampled, final output.
65
66                 GLuint pbo;  // For reading the data back.
67                 void *pbo_contents;  // Persistently mapped.
68         };
69         std::deque<InterpolatedFrameResources> interpolate_resources;  // Under <queue_lock>.
70         static constexpr size_t num_interpolate_slots = 10;
71
72         struct QueuedFrame {
73                 int64_t output_pts;
74                 enum Type { ORIGINAL, FADED, INTERPOLATED, FADED_INTERPOLATED, REFRESH } type;
75                 unsigned stream_idx;
76                 int64_t input_first_pts;  // The only pts for original frames.
77
78                 // For fades only (including fades against interpolated frames).
79                 int secondary_stream_idx = -1;
80                 int64_t secondary_input_pts;
81
82                 // For interpolated frames only.
83                 int64_t input_second_pts;
84                 float alpha;
85                 InterpolatedFrameResources resources;
86                 RefCountedGLsync fence;  // Set when the interpolated image is read back to the CPU.
87                 GLuint flow_tex, output_tex, cbcr_tex;  // Released in the receiving thread; not really used for anything else.
88                 JPEGID id;
89
90                 std::function<void()> display_func;  // Called when the image is done decoding.
91         };
92         std::deque<QueuedFrame> frame_queue;  // Under <queue_lock>.
93         std::mutex queue_lock;
94         std::condition_variable queue_nonempty;
95
96         std::unique_ptr<Mux> stream_mux;  // To HTTP.
97         std::string stream_mux_header;
98         bool seen_sync_markers = false;
99
100         std::unique_ptr<YCbCrConverter> ycbcr_converter;
101         std::unique_ptr<YCbCrConverter> ycbcr_semiplanar_converter;
102
103         // Frame interpolation.
104         std::unique_ptr<DISComputeFlow> compute_flow;
105         std::unique_ptr<Interpolate> interpolate, interpolate_no_split;
106         std::unique_ptr<ChromaSubsampler> chroma_subsampler;
107
108         std::vector<uint8_t> last_frame;
109 };
110
111 #endif  // !defined(_VIDEO_STREAM_H)