1 #ifndef _VIDEO_STREAM_H
2 #define _VIDEO_STREAM_H 1
8 #include <libavformat/avio.h>
11 #include "jpeg_frame_view.h"
12 #include "ref_counted_gl_sync.h"
15 #include <condition_variable>
18 #include <movit/effect_chain.h>
19 #include <movit/mix_effect.h>
20 #include <movit/ycbcr_input.h>
25 class ChromaSubsampler;
41 // “display_func” is called after the frame has been calculated (if needed)
42 // and has gone out to the stream. Returns false on failure (ie., couldn't
43 // schedule the frame due to lack of resources).
44 void schedule_original_frame(std::chrono::steady_clock::time_point,
45 int64_t output_pts, std::function<void()> &&display_func,
46 unsigned stream_idx, int64_t input_pts);
47 bool schedule_faded_frame(std::chrono::steady_clock::time_point, int64_t output_pts,
48 std::function<void()> &&display_func, unsigned stream_idx,
49 int64_t input_pts, int secondary_stream_idx,
50 int64_t secondary_input_pts, float fade_alpha);
51 bool schedule_interpolated_frame(std::chrono::steady_clock::time_point, int64_t output_pts,
52 std::function<void()> &&display_func, unsigned stream_idx,
53 int64_t input_first_pts, int64_t input_second_pts, float alpha,
54 int secondary_stream_idx = -1, int64_t secondary_inputs_pts = -1,
55 float fade_alpha = 0.0f); // -1 = no secondary frame.
56 void schedule_refresh_frame(std::chrono::steady_clock::time_point, int64_t output_pts,
57 std::function<void()> &&display_func);
60 void encode_thread_func();
61 std::thread encode_thread;
63 static int write_packet2_thunk(void *opaque, uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time);
64 int write_packet2(uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time);
66 // Allocated at the very start; if we're empty, we start dropping frames
67 // (so that we don't build up an infinite interpolation backlog).
68 struct InterpolatedFrameResources {
69 GLuint input_tex; // Layered (contains both input frames), Y'CbCr.
70 GLuint gray_tex; // Same, but Y only.
71 GLuint input_fbos[2]; // For rendering to the two layers of input_tex.
73 // Destination textures and FBO if there is a fade.
74 GLuint fade_y_output_tex, fade_cbcr_output_tex;
77 GLuint cb_tex, cr_tex; // Subsampled, final output.
79 GLuint pbo; // For reading the data back.
80 void *pbo_contents; // Persistently mapped.
82 std::deque<InterpolatedFrameResources> interpolate_resources; // Under <queue_lock>.
83 static constexpr size_t num_interpolate_slots = 15; // Should be larger than Player::max_queued_frames, or we risk mass-dropping frames.
86 std::chrono::steady_clock::time_point local_pts;
89 enum Type { ORIGINAL, FADED, INTERPOLATED, FADED_INTERPOLATED, REFRESH } type;
91 int64_t input_first_pts; // The only pts for original frames.
93 // For fades only (including fades against interpolated frames).
94 int secondary_stream_idx = -1;
95 int64_t secondary_input_pts;
97 // For interpolated frames only.
98 int64_t input_second_pts;
100 InterpolatedFrameResources resources;
101 RefCountedGLsync fence; // Set when the interpolated image is read back to the CPU.
102 GLuint flow_tex, output_tex, cbcr_tex; // Released in the receiving thread; not really used for anything else.
105 std::function<void()> display_func; // Called when the image is done decoding.
107 std::deque<QueuedFrame> frame_queue; // Under <queue_lock>.
108 std::mutex queue_lock;
109 std::condition_variable queue_changed;
111 std::unique_ptr<Mux> stream_mux; // To HTTP.
112 std::string stream_mux_header;
113 bool seen_sync_markers = false;
115 std::unique_ptr<YCbCrConverter> ycbcr_converter;
116 std::unique_ptr<YCbCrConverter> ycbcr_semiplanar_converter;
118 // Frame interpolation.
119 std::unique_ptr<DISComputeFlow> compute_flow;
120 std::unique_ptr<Interpolate> interpolate, interpolate_no_split;
121 std::unique_ptr<ChromaSubsampler> chroma_subsampler;
123 std::vector<uint8_t> last_frame;
126 #endif // !defined(_VIDEO_STREAM_H)