X-Git-Url: https://git.sesse.net/?a=blobdiff_plain;f=video_stream.h;h=1439eb0db1ef1377fa920feadb2cfdae5bd8e8c5;hb=3035f82f46c322a9ba8f091b162ee36985688de9;hp=925cace4c62f5465c7bfb4d21bd0e64aca17a3c3;hpb=8deea0eee4f9b18540463ba253747c232873527b;p=nageru diff --git a/video_stream.h b/video_stream.h index 925cace..1439eb0 100644 --- a/video_stream.h +++ b/video_stream.h @@ -15,15 +15,19 @@ extern "C" { #include #include +#include #include +#include "jpeg_frame_view.h" #include "ref_counted_gl_sync.h" +class ChromaSubsampler; class DISComputeFlow; class Interpolate; class Mux; class QSurface; class QSurfaceFormat; +class YCbCrConverter; class VideoStream { public: @@ -33,7 +37,8 @@ public: void stop(); void schedule_original_frame(int64_t output_pts, unsigned stream_idx, int64_t input_pts); - void schedule_interpolated_frame(int64_t output_pts, unsigned stream_idx, int64_t input_first_pts, int64_t input_second_pts, float alpha); + void schedule_faded_frame(int64_t output_pts, unsigned stream_idx, int64_t input_pts, int secondary_stream_idx, int64_t secondary_input_pts, float fade_alpha); + void schedule_interpolated_frame(int64_t output_pts, unsigned stream_idx, int64_t input_first_pts, int64_t input_second_pts, float alpha, int secondary_stream_idx = -1, int64_t secondary_inputs_pts = -1, float fade_alpha = 0.0f); // -1 = no secondary frame. private: @@ -46,9 +51,16 @@ private: // Allocated at the very start; if we're empty, we start dropping frames // (so that we don't build up an infinite interpolation backlog). struct InterpolatedFrameResources { - GLuint input_tex; // Layered (contains both input frames). - GLuint gray_tex; // Same. + GLuint input_tex; // Layered (contains both input frames), Y'CbCr. + GLuint gray_tex; // Same, but Y only. GLuint input_fbos[2]; // For rendering to the two layers of input_tex. + + // Destination textures and FBO if there is a fade. + GLuint fade_y_output_tex, fade_cbcr_output_tex; + GLuint fade_fbo; + + GLuint cb_tex, cr_tex; // Subsampled, final output. + GLuint pbo; // For reading the data back. void *pbo_contents; // Persistently mapped. }; @@ -57,16 +69,21 @@ private: struct QueuedFrame { int64_t output_pts; - enum Type { ORIGINAL, INTERPOLATED } type; + enum Type { ORIGINAL, FADED, INTERPOLATED, FADED_INTERPOLATED } type; unsigned stream_idx; int64_t input_first_pts; // The only pts for original frames. + // For fades only (including fades against interpolated frames). + int secondary_stream_idx = -1; + int64_t secondary_input_pts; + // For interpolated frames only. int64_t input_second_pts; float alpha; InterpolatedFrameResources resources; RefCountedGLsync fence; // Set when the interpolated image is read back to the CPU. - GLuint flow_tex, output_tex; // Released in the receiving thread; not really used for anything else. + GLuint flow_tex, output_tex, cbcr_tex; // Released in the receiving thread; not really used for anything else. + JPEGID id; }; std::deque frame_queue; // Under . std::mutex queue_lock; @@ -76,18 +93,13 @@ private: std::string stream_mux_header; bool seen_sync_markers = false; - QSurface *gl_surface; - - // Effectively only converts from 4:2:2 to 4:4:4. - // TODO: Have a separate version with ResampleEffect, for scaling? - std::unique_ptr ycbcr_convert_chain; - - movit::YCbCrInput *ycbcr_input; - movit::YCbCrFormat ycbcr_format; + std::unique_ptr ycbcr_converter; + std::unique_ptr ycbcr_semiplanar_converter; // Frame interpolation. std::unique_ptr compute_flow; - std::unique_ptr interpolate; + std::unique_ptr interpolate, interpolate_no_split; + std::unique_ptr chroma_subsampler; }; #endif // !defined(_VIDEO_STREAM_H)