#include <stdint.h>
extern "C" {
+#include <libavformat/avformat.h>
#include <libavformat/avio.h>
}
#include "frame_on_disk.h"
#include "jpeg_frame_view.h"
-#include "shared/ref_counted_gl_sync.h"
#include "queue_spot_holder.h"
+#include "shared/mux.h"
+#include "shared/ref_counted_gl_sync.h"
+#include <atomic>
#include <chrono>
#include <condition_variable>
#include <deque>
class ChromaSubsampler;
class DISComputeFlow;
class Interpolate;
-class Mux;
class QSurface;
class QSurfaceFormat;
class YCbCrConverter;
class VideoStream {
public:
- VideoStream();
+ VideoStream(AVFormatContext *file_avctx); // nullptr if output to stream.
~VideoStream();
void start();
void stop();
void schedule_original_frame(std::chrono::steady_clock::time_point,
int64_t output_pts, std::function<void()> &&display_func,
QueueSpotHolder &&queue_spot_holder,
- FrameOnDisk frame);
+ FrameOnDisk frame, const std::string &subtitle,
+ bool include_audio);
void schedule_faded_frame(std::chrono::steady_clock::time_point, int64_t output_pts,
std::function<void()> &&display_func,
QueueSpotHolder &&queue_spot_holder,
FrameOnDisk frame1, FrameOnDisk frame2,
- float fade_alpha);
+ float fade_alpha, const std::string &subtitle); // Always no audio.
void schedule_interpolated_frame(std::chrono::steady_clock::time_point, int64_t output_pts,
- std::function<void(std::shared_ptr<Frame>)> &&display_func,
- QueueSpotHolder &&queue_spot_holder,
- FrameOnDisk frame1, FrameOnDisk frame2,
- float alpha, FrameOnDisk secondary_frame = {}, // Empty = no secondary (fade) frame.
- float fade_alpha = 0.0f);
+ std::function<void(std::shared_ptr<Frame>)> &&display_func,
+ QueueSpotHolder &&queue_spot_holder,
+ FrameOnDisk frame1, FrameOnDisk frame2,
+ float alpha, FrameOnDisk secondary_frame, // Empty = no secondary (fade) frame.
+ float fade_alpha, const std::string &subtitle,
+ bool include_audio);
void schedule_refresh_frame(std::chrono::steady_clock::time_point, int64_t output_pts,
std::function<void()> &&display_func,
- QueueSpotHolder &&queue_spot_holder);
+ QueueSpotHolder &&queue_spot_holder, const std::string &subtitle); // Always no audio.
+ void schedule_silence(std::chrono::steady_clock::time_point, int64_t output_pts,
+ int64_t length_pts, QueueSpotHolder &&queue_spot_holder);
private:
+ struct QueuedFrame;
+
FrameReader frame_reader;
void encode_thread_func();
std::thread encode_thread;
+ std::atomic<bool> should_quit{ false };
static int write_packet2_thunk(void *opaque, uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time);
int write_packet2(uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time);
+ void add_silence(int64_t pts, int64_t length_pts);
+ void add_audio_or_silence(const QueuedFrame &qf);
// Allocated at the very start; if we're empty, we start dropping frames
// (so that we don't build up an infinite interpolation backlog).
static constexpr size_t num_interpolate_slots = 15; // Should be larger than Player::max_queued_frames, or we risk mass-dropping frames.
struct IFRReleaser {
- void operator() (InterpolatedFrameResources *ifr) const
+ void operator()(InterpolatedFrameResources *ifr) const
{
if (ifr != nullptr) {
- std::unique_lock<std::mutex> lock(ifr->owner->queue_lock);
+ std::lock_guard<std::mutex> lock(ifr->owner->queue_lock);
ifr->owner->interpolate_resources.emplace_back(ifr);
}
}
std::chrono::steady_clock::time_point local_pts;
int64_t output_pts;
- enum Type { ORIGINAL, FADED, INTERPOLATED, FADED_INTERPOLATED, REFRESH } type;
- FrameOnDisk frame1; // The only frame for original frames.
+ enum Type { ORIGINAL, FADED, INTERPOLATED, FADED_INTERPOLATED, REFRESH, SILENCE } type;
+
+ // For original frames only. Made move-only so we know explicitly
+ // we don't copy these ~200 kB files around inadvertedly.
+ std::unique_ptr<std::string> encoded_jpeg;
+
+ // For everything except original frames and silence.
+ FrameOnDisk frame1;
// For fades only (including fades against interpolated frames).
FrameOnDisk secondary_frame;
float alpha;
BorrowedInterpolatedFrameResources resources;
RefCountedGLsync fence; // Set when the interpolated image is read back to the CPU.
- GLuint flow_tex, output_tex, cbcr_tex; // Released in the receiving thread; not really used for anything else.
+ GLuint flow_tex, output_tex, cbcr_tex; // Released in the receiving thread; not really used for anything else. flow_tex will typically even be from a previous frame.
FrameOnDisk id;
std::function<void()> display_func; // Called when the image is done decoding.
std::function<void(std::shared_ptr<Frame>)> display_decoded_func; // Same, except for INTERPOLATED and FADED_INTERPOLATED.
+ std::string subtitle; // Blank for none.
+
+ // Audio, in stereo interleaved 32-bit PCM. If empty and not of type SILENCE, one frame's worth of silence samples
+ // is synthesized.
+ std::string audio;
+
+ // For silence frames only.
+ int64_t silence_length_pts;
+
QueueSpotHolder queue_spot_holder;
};
std::deque<QueuedFrame> frame_queue; // Under <queue_lock>.
std::condition_variable queue_changed;
- std::unique_ptr<Mux> stream_mux; // To HTTP.
- std::string stream_mux_header;
+ AVFormatContext *avctx;
+ std::unique_ptr<Mux> mux; // To HTTP, or to file.
+ std::string stream_mux_header; // Only used in HTTP.
bool seen_sync_markers = false;
+ bool output_fast_forward;
std::unique_ptr<YCbCrConverter> ycbcr_converter;
std::unique_ptr<YCbCrConverter> ycbcr_semiplanar_converter;
std::unique_ptr<Interpolate> interpolate, interpolate_no_split;
std::unique_ptr<ChromaSubsampler> chroma_subsampler;
- std::vector<uint8_t> last_frame;
+ // Cached flow computation from previous frame, if any.
+ GLuint last_flow_tex = 0;
+ FrameOnDisk last_frame1, last_frame2;
+
+ std::string last_frame;
+ Mux::WithSubtitles with_subtitles; // true for streaming, false for export to file.
};
#endif // !defined(_VIDEO_STREAM_H)