]> git.sesse.net Git - nageru/blob - video_stream.h
Send refresh frames every 100 ms or so, so that the client does not think we are...
[nageru] / video_stream.h
1 #ifndef _VIDEO_STREAM_H
2 #define _VIDEO_STREAM_H 1
3
4 #include <stdint.h>
5 #include <epoxy/gl.h>
6
7 extern "C" {
8 #include <libavformat/avio.h>
9 }
10
11 #include <condition_variable>
12 #include <deque>
13 #include <mutex>
14 #include <string>
15 #include <thread>
16
17 #include <movit/effect_chain.h>
18 #include <movit/mix_effect.h>
19 #include <movit/ycbcr_input.h>
20
21 #include "jpeg_frame_view.h"
22 #include "ref_counted_gl_sync.h"
23
24 class ChromaSubsampler;
25 class DISComputeFlow;
26 class Interpolate;
27 class Mux;
28 class QSurface;
29 class QSurfaceFormat;
30 class YCbCrConverter;
31
32 class VideoStream {
33 public:
34         VideoStream();
35         ~VideoStream();
36         void start();
37         void stop();
38
39         void schedule_original_frame(int64_t output_pts, unsigned stream_idx, int64_t input_pts);
40         void schedule_faded_frame(int64_t output_pts, unsigned stream_idx, int64_t input_pts, int secondary_stream_idx, int64_t secondary_input_pts, float fade_alpha);
41         void schedule_interpolated_frame(int64_t output_pts, unsigned stream_idx, int64_t input_first_pts, int64_t input_second_pts, float alpha, int secondary_stream_idx = -1, int64_t secondary_inputs_pts = -1, float fade_alpha = 0.0f); // -1 = no secondary frame.
42         void schedule_refresh_frame(int64_t output_pts);
43
44 private:
45
46         void encode_thread_func();
47         std::thread encode_thread;
48
49         static int write_packet2_thunk(void *opaque, uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time);
50         int write_packet2(uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time);
51
52         // Allocated at the very start; if we're empty, we start dropping frames
53         // (so that we don't build up an infinite interpolation backlog).
54         struct InterpolatedFrameResources {
55                 GLuint input_tex;  // Layered (contains both input frames), Y'CbCr.
56                 GLuint gray_tex;  // Same, but Y only.
57                 GLuint input_fbos[2];  // For rendering to the two layers of input_tex.
58
59                 // Destination textures and FBO if there is a fade.
60                 GLuint fade_y_output_tex, fade_cbcr_output_tex;
61                 GLuint fade_fbo;
62
63                 GLuint cb_tex, cr_tex;  // Subsampled, final output.
64
65                 GLuint pbo;  // For reading the data back.
66                 void *pbo_contents;  // Persistently mapped.
67         };
68         std::deque<InterpolatedFrameResources> interpolate_resources;  // Under <queue_lock>.
69         static constexpr size_t num_interpolate_slots = 10;
70
71         struct QueuedFrame {
72                 int64_t output_pts;
73                 enum Type { ORIGINAL, FADED, INTERPOLATED, FADED_INTERPOLATED } type;
74                 unsigned stream_idx;
75                 int64_t input_first_pts;  // The only pts for original frames.  
76
77                 // For fades only (including fades against interpolated frames).
78                 int secondary_stream_idx = -1;
79                 int64_t secondary_input_pts;
80
81                 // For interpolated frames only.
82                 int64_t input_second_pts;
83                 float alpha;
84                 InterpolatedFrameResources resources;
85                 RefCountedGLsync fence;  // Set when the interpolated image is read back to the CPU.
86                 GLuint flow_tex, output_tex, cbcr_tex;  // Released in the receiving thread; not really used for anything else.
87                 JPEGID id;
88         };
89         std::deque<QueuedFrame> frame_queue;  // Under <queue_lock>.
90         std::mutex queue_lock;
91         std::condition_variable queue_nonempty;
92
93         std::unique_ptr<Mux> stream_mux;  // To HTTP.
94         std::string stream_mux_header;
95         bool seen_sync_markers = false;
96
97         std::unique_ptr<YCbCrConverter> ycbcr_converter;
98         std::unique_ptr<YCbCrConverter> ycbcr_semiplanar_converter;
99
100         // Frame interpolation.
101         std::unique_ptr<DISComputeFlow> compute_flow;
102         std::unique_ptr<Interpolate> interpolate, interpolate_no_split;
103         std::unique_ptr<ChromaSubsampler> chroma_subsampler;
104
105         std::vector<uint8_t> last_frame;
106 };
107
108 #endif  // !defined(_VIDEO_STREAM_H)