]> git.sesse.net Git - nageru/blob - video_stream.h
Do the interpolation in Y'CbCr instead of RGBA; saves some conversions back and forth...
[nageru] / video_stream.h
1 #ifndef _VIDEO_STREAM_H
2 #define _VIDEO_STREAM_H 1
3
4 #include <stdint.h>
5 #include <epoxy/gl.h>
6
7 extern "C" {
8 #include <libavformat/avio.h>
9 }
10
11 #include <condition_variable>
12 #include <deque>
13 #include <mutex>
14 #include <string>
15 #include <thread>
16
17 #include <movit/effect_chain.h>
18 #include <movit/ycbcr_input.h>
19
20 #include "ref_counted_gl_sync.h"
21
22 class DISComputeFlow;
23 class Interpolate;
24 class Mux;
25 class QSurface;
26 class QSurfaceFormat;
27
28 class VideoStream {
29 public:
30         VideoStream();
31         ~VideoStream();
32         void start();
33         void stop();
34
35         void schedule_original_frame(int64_t output_pts, unsigned stream_idx, int64_t input_pts);
36         void schedule_interpolated_frame(int64_t output_pts, unsigned stream_idx, int64_t input_first_pts, int64_t input_second_pts, float alpha);
37
38 private:
39
40         void encode_thread_func();
41         std::thread encode_thread;
42
43         static int write_packet2_thunk(void *opaque, uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time);
44         int write_packet2(uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time);
45
46         // Allocated at the very start; if we're empty, we start dropping frames
47         // (so that we don't build up an infinite interpolation backlog).
48         struct InterpolatedFrameResources {
49                 GLuint input_tex;  // Layered (contains both input frames).
50                 GLuint gray_tex;  // Same.
51                 GLuint input_fbos[2];  // For rendering to the two layers of input_tex.
52                 GLuint pbo;  // For reading the data back.
53                 void *pbo_contents;  // Persistently mapped.
54         };
55         std::deque<InterpolatedFrameResources> interpolate_resources;  // Under <queue_lock>.
56         static constexpr size_t num_interpolate_slots = 10;
57
58         struct QueuedFrame {
59                 int64_t output_pts;
60                 enum Type { ORIGINAL, INTERPOLATED } type;
61                 unsigned stream_idx;
62                 int64_t input_first_pts;  // The only pts for original frames.  
63
64                 // For interpolated frames only.
65                 int64_t input_second_pts;
66                 float alpha;
67                 InterpolatedFrameResources resources;
68                 RefCountedGLsync fence;  // Set when the interpolated image is read back to the CPU.
69                 GLuint flow_tex, output_tex;  // Released in the receiving thread; not really used for anything else.
70         };
71         std::deque<QueuedFrame> frame_queue;  // Under <queue_lock>.
72         std::mutex queue_lock;
73         std::condition_variable queue_nonempty;
74
75         std::unique_ptr<Mux> stream_mux;  // To HTTP.
76         std::string stream_mux_header;
77         bool seen_sync_markers = false;
78
79         QSurface *gl_surface;
80
81         // Effectively only converts from 4:2:2 to 4:4:4.
82         // TODO: Have a separate version with ResampleEffect, for scaling?
83         std::unique_ptr<movit::EffectChain> ycbcr_convert_chain;
84
85         movit::YCbCrInput *ycbcr_input;
86         movit::YCbCrFormat ycbcr_format;
87
88         // Frame interpolation.
89         std::unique_ptr<DISComputeFlow> compute_flow;
90         std::unique_ptr<Interpolate> interpolate;
91 };
92
93 #endif  // !defined(_VIDEO_STREAM_H)