]> git.sesse.net Git - nageru/blob - video_stream.h
Move Y'CbCr conversion into a common utility class.
[nageru] / video_stream.h
1 #ifndef _VIDEO_STREAM_H
2 #define _VIDEO_STREAM_H 1
3
4 #include <stdint.h>
5 #include <epoxy/gl.h>
6
7 extern "C" {
8 #include <libavformat/avio.h>
9 }
10
11 #include <condition_variable>
12 #include <deque>
13 #include <mutex>
14 #include <string>
15 #include <thread>
16
17 #include <movit/effect_chain.h>
18 #include <movit/ycbcr_input.h>
19
20 #include "ref_counted_gl_sync.h"
21
22 class ChromaSubsampler;
23 class DISComputeFlow;
24 class Interpolate;
25 class Mux;
26 class QSurface;
27 class QSurfaceFormat;
28 class YCbCrConverter;
29
30 class VideoStream {
31 public:
32         VideoStream();
33         ~VideoStream();
34         void start();
35         void stop();
36
37         void schedule_original_frame(int64_t output_pts, unsigned stream_idx, int64_t input_pts);
38         void schedule_interpolated_frame(int64_t output_pts, unsigned stream_idx, int64_t input_first_pts, int64_t input_second_pts, float alpha);
39
40 private:
41
42         void encode_thread_func();
43         std::thread encode_thread;
44
45         static int write_packet2_thunk(void *opaque, uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time);
46         int write_packet2(uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time);
47
48         // Allocated at the very start; if we're empty, we start dropping frames
49         // (so that we don't build up an infinite interpolation backlog).
50         struct InterpolatedFrameResources {
51                 GLuint input_tex;  // Layered (contains both input frames).
52                 GLuint gray_tex;  // Same.
53                 GLuint cb_tex, cr_tex;
54                 GLuint input_fbos[2];  // For rendering to the two layers of input_tex.
55                 GLuint pbo;  // For reading the data back.
56                 void *pbo_contents;  // Persistently mapped.
57         };
58         std::deque<InterpolatedFrameResources> interpolate_resources;  // Under <queue_lock>.
59         static constexpr size_t num_interpolate_slots = 10;
60
61         struct QueuedFrame {
62                 int64_t output_pts;
63                 enum Type { ORIGINAL, INTERPOLATED } type;
64                 unsigned stream_idx;
65                 int64_t input_first_pts;  // The only pts for original frames.  
66
67                 // For interpolated frames only.
68                 int64_t input_second_pts;
69                 float alpha;
70                 InterpolatedFrameResources resources;
71                 RefCountedGLsync fence;  // Set when the interpolated image is read back to the CPU.
72                 GLuint flow_tex, output_tex, cbcr_tex;  // Released in the receiving thread; not really used for anything else.
73         };
74         std::deque<QueuedFrame> frame_queue;  // Under <queue_lock>.
75         std::mutex queue_lock;
76         std::condition_variable queue_nonempty;
77
78         std::unique_ptr<Mux> stream_mux;  // To HTTP.
79         std::string stream_mux_header;
80         bool seen_sync_markers = false;
81
82         std::unique_ptr<YCbCrConverter> ycbcr_converter;
83
84         // Frame interpolation.
85         std::unique_ptr<DISComputeFlow> compute_flow;
86         std::unique_ptr<Interpolate> interpolate;
87         std::unique_ptr<ChromaSubsampler> chroma_subsampler;
88 };
89
90 #endif  // !defined(_VIDEO_STREAM_H)