1 #ifndef _VIDEO_STREAM_H
2 #define _VIDEO_STREAM_H 1
8 #include <libavformat/avio.h>
11 #include "jpeg_frame_view.h"
12 #include "ref_counted_gl_sync.h"
15 #include <condition_variable>
18 #include <movit/effect_chain.h>
19 #include <movit/mix_effect.h>
20 #include <movit/ycbcr_input.h>
25 class ChromaSubsampler;
41 // “display_func” is called after the frame has been calculated (if needed)
42 // and has gone out to the stream. Returns false on failure (ie., couldn't
43 // schedule the frame due to lack of resources).
44 void schedule_original_frame(std::chrono::steady_clock::time_point, int64_t output_pts, std::function<void()> &&display_func, unsigned stream_idx, int64_t input_pts);
45 bool schedule_faded_frame(std::chrono::steady_clock::time_point, int64_t output_pts, std::function<void()> &&display_func, unsigned stream_idx, int64_t input_pts, int secondary_stream_idx, int64_t secondary_input_pts, float fade_alpha);
46 bool schedule_interpolated_frame(std::chrono::steady_clock::time_point, int64_t output_pts, std::function<void()> &&display_func, unsigned stream_idx, int64_t input_first_pts, int64_t input_second_pts, float alpha, int secondary_stream_idx = -1, int64_t secondary_inputs_pts = -1, float fade_alpha = 0.0f); // -1 = no secondary frame.
47 void schedule_refresh_frame(std::chrono::steady_clock::time_point, int64_t output_pts, std::function<void()> &&display_func);
50 void encode_thread_func();
51 std::thread encode_thread;
53 static int write_packet2_thunk(void *opaque, uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time);
54 int write_packet2(uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time);
56 // Allocated at the very start; if we're empty, we start dropping frames
57 // (so that we don't build up an infinite interpolation backlog).
58 struct InterpolatedFrameResources {
59 GLuint input_tex; // Layered (contains both input frames), Y'CbCr.
60 GLuint gray_tex; // Same, but Y only.
61 GLuint input_fbos[2]; // For rendering to the two layers of input_tex.
63 // Destination textures and FBO if there is a fade.
64 GLuint fade_y_output_tex, fade_cbcr_output_tex;
67 GLuint cb_tex, cr_tex; // Subsampled, final output.
69 GLuint pbo; // For reading the data back.
70 void *pbo_contents; // Persistently mapped.
72 std::deque<InterpolatedFrameResources> interpolate_resources; // Under <queue_lock>.
73 static constexpr size_t num_interpolate_slots = 15; // Should be larger than Player::max_queued_frames, or we risk mass-dropping frames.
76 std::chrono::steady_clock::time_point local_pts;
79 enum Type { ORIGINAL, FADED, INTERPOLATED, FADED_INTERPOLATED, REFRESH } type;
81 int64_t input_first_pts; // The only pts for original frames.
83 // For fades only (including fades against interpolated frames).
84 int secondary_stream_idx = -1;
85 int64_t secondary_input_pts;
87 // For interpolated frames only.
88 int64_t input_second_pts;
90 InterpolatedFrameResources resources;
91 RefCountedGLsync fence; // Set when the interpolated image is read back to the CPU.
92 GLuint flow_tex, output_tex, cbcr_tex; // Released in the receiving thread; not really used for anything else.
95 std::function<void()> display_func; // Called when the image is done decoding.
97 std::deque<QueuedFrame> frame_queue; // Under <queue_lock>.
98 std::mutex queue_lock;
99 std::condition_variable queue_changed;
101 std::unique_ptr<Mux> stream_mux; // To HTTP.
102 std::string stream_mux_header;
103 bool seen_sync_markers = false;
105 std::unique_ptr<YCbCrConverter> ycbcr_converter;
106 std::unique_ptr<YCbCrConverter> ycbcr_semiplanar_converter;
108 // Frame interpolation.
109 std::unique_ptr<DISComputeFlow> compute_flow;
110 std::unique_ptr<Interpolate> interpolate, interpolate_no_split;
111 std::unique_ptr<ChromaSubsampler> chroma_subsampler;
113 std::vector<uint8_t> last_frame;
116 #endif // !defined(_VIDEO_STREAM_H)