double next_clip_fade_time = -1.0;
for ( ;; ) {
+ bool clip_ready;
+ steady_clock::time_point before_sleep = steady_clock::now();
+
// Wait until we're supposed to play something.
{
unique_lock<mutex> lock(queue_state_mu);
- new_clip_changed.wait(lock, [this]{
+ clip_ready = new_clip_changed.wait_for(lock, milliseconds(100), [this]{
return new_clip_ready && current_clip.pts_in != -1;
});
new_clip_ready = false;
playing = true;
}
+ steady_clock::duration time_slept = steady_clock::now() - before_sleep;
+ pts += duration_cast<duration<size_t, TimebaseRatio>>(time_slept).count();
+
+ if (!clip_ready) {
+ if (video_stream != nullptr) {
+ video_stream->schedule_refresh_frame(pts);
+ }
+ continue;
+ }
+
Clip clip;
unsigned stream_idx;
{
interpolate_no_split.reset(new Interpolate(operating_point2, /*split_ycbcr_output=*/false));
chroma_subsampler.reset(new ChromaSubsampler);
check_error();
+
+ // The “last frame” is initially black.
+ unique_ptr<uint8_t[]> y(new uint8_t[1280 * 720]);
+ unique_ptr<uint8_t[]> cb_or_cr(new uint8_t[640 * 720]);
+ memset(y.get(), 16, 1280 * 720);
+ memset(cb_or_cr.get(), 128, 640 * 720);
+ last_frame = encode_jpeg(y.get(), cb_or_cr.get(), cb_or_cr.get(), 1280, 720);
}
VideoStream::~VideoStream() {}
queue_nonempty.notify_all();
}
+void VideoStream::schedule_refresh_frame(int64_t output_pts)
+{
+ AVPacket pkt;
+ av_init_packet(&pkt);
+ pkt.stream_index = 0;
+ pkt.data = (uint8_t *)last_frame.data();
+ pkt.size = last_frame.size();
+ stream_mux->add_packet(pkt, output_pts, output_pts);
+}
+
namespace {
shared_ptr<Frame> frame_from_pbo(void *contents, size_t width, size_t height)
pkt.data = (uint8_t *)jpeg.data();
pkt.size = jpeg.size();
stream_mux->add_packet(pkt, qf.output_pts, qf.output_pts);
+
+ last_frame.assign(&jpeg[0], &jpeg[0] + jpeg.size());
} else if (qf.type == QueuedFrame::FADED) {
glClientWaitSync(qf.fence.get(), /*flags=*/0, GL_TIMEOUT_IGNORED);
pkt.data = (uint8_t *)jpeg.data();
pkt.size = jpeg.size();
stream_mux->add_packet(pkt, qf.output_pts, qf.output_pts);
+ last_frame = move(jpeg);
// Put the frame resources back.
unique_lock<mutex> lock(queue_lock);
pkt.data = (uint8_t *)jpeg.data();
pkt.size = jpeg.size();
stream_mux->add_packet(pkt, qf.output_pts, qf.output_pts);
+ last_frame = move(jpeg);
// Put the frame resources back.
unique_lock<mutex> lock(queue_lock);
void schedule_original_frame(int64_t output_pts, unsigned stream_idx, int64_t input_pts);
void schedule_faded_frame(int64_t output_pts, unsigned stream_idx, int64_t input_pts, int secondary_stream_idx, int64_t secondary_input_pts, float fade_alpha);
void schedule_interpolated_frame(int64_t output_pts, unsigned stream_idx, int64_t input_first_pts, int64_t input_second_pts, float alpha, int secondary_stream_idx = -1, int64_t secondary_inputs_pts = -1, float fade_alpha = 0.0f); // -1 = no secondary frame.
+ void schedule_refresh_frame(int64_t output_pts);
private:
std::unique_ptr<DISComputeFlow> compute_flow;
std::unique_ptr<Interpolate> interpolate, interpolate_no_split;
std::unique_ptr<ChromaSubsampler> chroma_subsampler;
+
+ std::vector<uint8_t> last_frame;
};
#endif // !defined(_VIDEO_STREAM_H)