]> git.sesse.net Git - nageru/blob - video_stream.h
Start hacking in support for interpolated frames in the main application.
[nageru] / video_stream.h
1 #ifndef _VIDEO_STREAM_H
2 #define _VIDEO_STREAM_H 1
3
4 #include <stdint.h>
5 #include <epoxy/gl.h>
6
7 extern "C" {
8 #include <libavformat/avio.h>
9 }
10
11 #include <condition_variable>
12 #include <deque>
13 #include <mutex>
14 #include <string>
15 #include <thread>
16
17 #include <movit/effect_chain.h>
18 #include <movit/ycbcr_input.h>
19
20 #include "ref_counted_gl_sync.h"
21
22 class DISComputeFlow;
23 class GrayscaleConversion;
24 class Interpolate;
25 class Mux;
26 class QSurface;
27 class QSurfaceFormat;
28
29 class VideoStream {
30 public:
31         VideoStream();
32         ~VideoStream();
33         void start();
34         void stop();
35
36         void schedule_original_frame(int64_t output_pts, unsigned stream_idx, int64_t input_pts);
37         void schedule_interpolated_frame(int64_t output_pts, unsigned stream_idx, int64_t input_first_pts, int64_t input_second_pts, float alpha);
38
39 private:
40
41         void encode_thread_func();
42         std::thread encode_thread;
43
44         static int write_packet2_thunk(void *opaque, uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time);
45         int write_packet2(uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time);
46
47         // Allocated at the very start; if we're empty, we start dropping frames
48         // (so that we don't build up an infinite interpolation backlog).
49         struct InterpolatedFrameResources {
50                 GLuint input_tex;  // Layered (contains both input frames).
51                 GLuint gray_tex;  // Same.
52                 GLuint input_fbos[2];  // For rendering to the two layers of input_tex.
53                 GLuint pbo;  // For reading the data back.
54                 void *pbo_contents;  // Persistently mapped.
55         };
56         std::deque<InterpolatedFrameResources> interpolate_resources;  // Under <queue_lock>.
57         static constexpr size_t num_interpolate_slots = 10;
58
59         struct QueuedFrame {
60                 int64_t output_pts;
61                 enum Type { ORIGINAL, INTERPOLATED } type;
62                 unsigned stream_idx;
63                 int64_t input_first_pts;  // The only pts for original frames.  
64
65                 // For interpolated frames only.
66                 int64_t input_second_pts;
67                 float alpha;
68                 InterpolatedFrameResources resources;
69                 GLuint output_tex;
70                 RefCountedGLsync fence;  // Set when the interpolated image is read back to the CPU.
71         };
72         std::deque<QueuedFrame> frame_queue;  // Under <queue_lock>.
73         std::mutex queue_lock;
74         std::condition_variable queue_nonempty;
75
76         std::unique_ptr<Mux> stream_mux;  // To HTTP.
77         std::string stream_mux_header;
78         bool seen_sync_markers = false;
79
80         QSurface *gl_surface;
81         std::unique_ptr<movit::EffectChain> ycbcr_convert_chain;  // TODO: Have a separate version with resample, for scaling?
82         movit::YCbCrInput *ycbcr_input;
83         movit::YCbCrFormat ycbcr_format;
84
85         // Frame interpolation.
86         std::unique_ptr<GrayscaleConversion> gray;
87         std::unique_ptr<DISComputeFlow> compute_flow;
88         std::unique_ptr<Interpolate> interpolate;
89 };
90
91 #endif  // !defined(_VIDEO_STREAM_H)