interpolate_no_split.reset(new Interpolate(operating_point2, /*split_ycbcr_output=*/false));
chroma_subsampler.reset(new ChromaSubsampler);
check_error();
+
+ // The “last frame” is initially black.
+ unique_ptr<uint8_t[]> y(new uint8_t[1280 * 720]);
+ unique_ptr<uint8_t[]> cb_or_cr(new uint8_t[640 * 720]);
+ memset(y.get(), 16, 1280 * 720);
+ memset(cb_or_cr.get(), 128, 640 * 720);
+ last_frame = encode_jpeg(y.get(), cb_or_cr.get(), cb_or_cr.get(), 1280, 720);
}
VideoStream::~VideoStream() {}
fprintf(stderr, "output_pts=%ld interpolated input_pts1=%ld input_pts2=%ld alpha=%.3f\n", output_pts, input_first_pts, input_second_pts, alpha);
}
+ JPEGID id;
+ if (secondary_stream_idx == -1) {
+ id = JPEGID{ stream_idx, output_pts, /*interpolated=*/true };
+ } else {
+ id = create_jpegid_for_interpolated_fade(stream_idx, output_pts, secondary_stream_idx, secondary_input_pts);
+ }
+
// Get the temporary OpenGL resources we need for doing the interpolation.
InterpolatedFrameResources resources;
{
unique_lock<mutex> lock(queue_lock);
if (interpolate_resources.empty()) {
fprintf(stderr, "WARNING: Too many interpolated frames already in transit; dropping one.\n");
- JPEGFrameView::insert_interpolated_frame(stream_idx, output_pts, nullptr);
+ JPEGFrameView::insert_interpolated_frame(id, nullptr);
return;
}
resources = interpolate_resources.front();
qf.output_pts = output_pts;
qf.stream_idx = stream_idx;
qf.resources = resources;
+ qf.id = id;
check_error();
queue_nonempty.notify_all();
}
+void VideoStream::schedule_refresh_frame(int64_t output_pts)
+{
+ AVPacket pkt;
+ av_init_packet(&pkt);
+ pkt.stream_index = 0;
+ pkt.data = (uint8_t *)last_frame.data();
+ pkt.size = last_frame.size();
+ stream_mux->add_packet(pkt, output_pts, output_pts);
+}
+
namespace {
shared_ptr<Frame> frame_from_pbo(void *contents, size_t width, size_t height)
pkt.data = (uint8_t *)jpeg.data();
pkt.size = jpeg.size();
stream_mux->add_packet(pkt, qf.output_pts, qf.output_pts);
+
+ last_frame.assign(&jpeg[0], &jpeg[0] + jpeg.size());
} else if (qf.type == QueuedFrame::FADED) {
glClientWaitSync(qf.fence.get(), /*flags=*/0, GL_TIMEOUT_IGNORED);
pkt.data = (uint8_t *)jpeg.data();
pkt.size = jpeg.size();
stream_mux->add_packet(pkt, qf.output_pts, qf.output_pts);
+ last_frame = move(jpeg);
// Put the frame resources back.
unique_lock<mutex> lock(queue_lock);
// Send a copy of the frame on to display.
shared_ptr<Frame> frame = frame_from_pbo(qf.resources.pbo_contents, 1280, 720);
- JPEGFrameView::insert_interpolated_frame(qf.stream_idx, qf.output_pts, frame); // TODO: this is wrong for fades
+ JPEGFrameView::insert_interpolated_frame(qf.id, frame);
// Now JPEG encode it, and send it on to the stream.
vector<uint8_t> jpeg = encode_jpeg(frame->y.get(), frame->cb.get(), frame->cr.get(), 1280, 720);
pkt.data = (uint8_t *)jpeg.data();
pkt.size = jpeg.size();
stream_mux->add_packet(pkt, qf.output_pts, qf.output_pts);
+ last_frame = move(jpeg);
// Put the frame resources back.
unique_lock<mutex> lock(queue_lock);