]> git.sesse.net Git - nageru/blobdiff - futatabi/video_stream.cpp
Fix a dangling reference (found by GCC 14).
[nageru] / futatabi / video_stream.cpp
index da57ebb9d9f0cc882065fa1658bf55bc4d467194..c12acdf28f19163f7c62d0c4174b0c9496e111b5 100644 (file)
@@ -3,16 +3,22 @@
 extern "C" {
 #include <libavformat/avformat.h>
 #include <libavformat/avio.h>
+#include <libavutil/channel_layout.h>
 }
 
 #include "chroma_subsampler.h"
+#include "exif_parser.h"
 #include "flags.h"
 #include "flow.h"
 #include "jpeg_frame_view.h"
 #include "movit/util.h"
+#include "pbo_pool.h"
 #include "player.h"
 #include "shared/context.h"
+#include "shared/ffmpeg_raii.h"
 #include "shared/httpd.h"
+#include "shared/metrics.h"
+#include "shared/shared_defs.h"
 #include "shared/mux.h"
 #include "util.h"
 #include "ycbcr_converter.h"
@@ -21,9 +27,29 @@ extern "C" {
 #include <jpeglib.h>
 #include <unistd.h>
 
+using namespace movit;
 using namespace std;
 using namespace std::chrono;
 
+namespace {
+
+once_flag video_metrics_inited;
+Summary metric_jpeg_encode_time_seconds;
+Summary metric_fade_latency_seconds;
+Summary metric_interpolation_latency_seconds;
+Summary metric_fade_fence_wait_time_seconds;
+Summary metric_interpolation_fence_wait_time_seconds;
+
+void wait_for_upload(shared_ptr<Frame> &frame)
+{
+       if (frame->uploaded_interpolation != nullptr) {
+               glWaitSync(frame->uploaded_interpolation.get(), /*flags=*/0, GL_TIMEOUT_IGNORED);
+               frame->uploaded_interpolation.reset();
+       }
+}
+
+}  // namespace
+
 extern HTTPD *global_httpd;
 
 struct VectorDestinationManager {
@@ -78,8 +104,9 @@ struct VectorDestinationManager {
 };
 static_assert(std::is_standard_layout<VectorDestinationManager>::value, "");
 
-string encode_jpeg(const uint8_t *y_data, const uint8_t *cb_data, const uint8_t *cr_data, unsigned width, unsigned height)
+string encode_jpeg(const uint8_t *y_data, const uint8_t *cb_data, const uint8_t *cr_data, unsigned width, unsigned height, const string exif_data)
 {
+       steady_clock::time_point start = steady_clock::now();
        VectorDestinationManager dest;
 
        jpeg_compress_struct cinfo;
@@ -111,6 +138,10 @@ string encode_jpeg(const uint8_t *y_data, const uint8_t *cb_data, const uint8_t
        // (and nothing else).
        jpeg_write_marker(&cinfo, JPEG_COM, (const JOCTET *)"CS=ITU601", strlen("CS=ITU601"));
 
+       if (!exif_data.empty()) {
+               jpeg_write_marker(&cinfo, JPEG_APP0 + 1, (const JOCTET *)exif_data.data(), exif_data.size());
+       }
+
        JSAMPROW yptr[8], cbptr[8], crptr[8];
        JSAMPARRAY data[3] = { yptr, cbptr, crptr };
        for (unsigned y = 0; y < height; y += 8) {
@@ -126,12 +157,39 @@ string encode_jpeg(const uint8_t *y_data, const uint8_t *cb_data, const uint8_t
        jpeg_finish_compress(&cinfo);
        jpeg_destroy_compress(&cinfo);
 
+       steady_clock::time_point stop = steady_clock::now();
+       metric_jpeg_encode_time_seconds.count_event(duration<double>(stop - start).count());
+
        return move(dest.dest);
 }
 
+string encode_jpeg_from_pbo(void *contents, unsigned width, unsigned height, const string exif_data)
+{
+       unsigned chroma_width = width / 2;
+
+       const uint8_t *y = (const uint8_t *)contents;
+       const uint8_t *cb = (const uint8_t *)contents + width * height;
+       const uint8_t *cr = (const uint8_t *)contents + width * height + chroma_width * height;
+       return encode_jpeg(y, cb, cr, width, height, move(exif_data));
+}
+
 VideoStream::VideoStream(AVFormatContext *file_avctx)
        : avctx(file_avctx), output_fast_forward(file_avctx != nullptr)
 {
+       call_once(video_metrics_inited, [] {
+               vector<double> quantiles{ 0.01, 0.1, 0.25, 0.5, 0.75, 0.9, 0.99 };
+               metric_jpeg_encode_time_seconds.init(quantiles, 60.0);
+               global_metrics.add("jpeg_encode_time_seconds", &metric_jpeg_encode_time_seconds);
+               metric_fade_fence_wait_time_seconds.init(quantiles, 60.0);
+               global_metrics.add("fade_fence_wait_time_seconds", &metric_fade_fence_wait_time_seconds);
+               metric_interpolation_fence_wait_time_seconds.init(quantiles, 60.0);
+               global_metrics.add("interpolation_fence_wait_time_seconds", &metric_interpolation_fence_wait_time_seconds);
+               metric_fade_latency_seconds.init(quantiles, 60.0);
+               global_metrics.add("fade_latency_seconds", &metric_fade_latency_seconds);
+               metric_interpolation_latency_seconds.init(quantiles, 60.0);
+               global_metrics.add("interpolation_latency_seconds", &metric_interpolation_latency_seconds);
+       });
+
        ycbcr_converter.reset(new YCbCrConverter(YCbCrConverter::OUTPUT_TO_DUAL_YCBCR, /*resource_pool=*/nullptr));
        ycbcr_semiplanar_converter.reset(new YCbCrConverter(YCbCrConverter::OUTPUT_TO_SEMIPLANAR, /*resource_pool=*/nullptr));
 
@@ -233,7 +291,13 @@ VideoStream::VideoStream(AVFormatContext *file_avctx)
        unique_ptr<uint8_t[]> cb_or_cr(new uint8_t[(global_flags.width / 2) * global_flags.height]);
        memset(y.get(), 16, global_flags.width * global_flags.height);
        memset(cb_or_cr.get(), 128, (global_flags.width / 2) * global_flags.height);
-       last_frame = encode_jpeg(y.get(), cb_or_cr.get(), cb_or_cr.get(), global_flags.width, global_flags.height);
+       last_frame = encode_jpeg(y.get(), cb_or_cr.get(), cb_or_cr.get(), global_flags.width, global_flags.height, /*exif_data=*/"");
+
+       if (file_avctx != nullptr) {
+               with_subtitles = Mux::WITHOUT_SUBTITLES;
+       } else {
+               with_subtitles = Mux::WITH_SUBTITLES;
+       }
 }
 
 VideoStream::~VideoStream()
@@ -286,10 +350,20 @@ void VideoStream::start()
                avctx->flags = AVFMT_FLAG_CUSTOM_IO;
        }
 
+       AVCodecParameters *audio_codecpar = avcodec_parameters_alloc();
+
+       audio_codecpar->codec_type = AVMEDIA_TYPE_AUDIO;
+       audio_codecpar->codec_id = AV_CODEC_ID_PCM_S32LE;
+       audio_codecpar->ch_layout.order = AV_CHANNEL_ORDER_NATIVE;
+       audio_codecpar->ch_layout.nb_channels = 2;
+       audio_codecpar->ch_layout.u.mask = AV_CH_LAYOUT_STEREO;
+       audio_codecpar->sample_rate = OUTPUT_FREQUENCY;
+
        size_t width = global_flags.width, height = global_flags.height;  // Doesn't matter for MJPEG.
-       mux.reset(new Mux(avctx, width, height, Mux::CODEC_MJPEG, /*video_extradata=*/"", /*audio_codec_parameters=*/nullptr,
-                         AVCOL_SPC_BT709, COARSE_TIMEBASE, /*write_callback=*/nullptr, Mux::WRITE_FOREGROUND, {}, Mux::WITH_SUBTITLES));
+       mux.reset(new Mux(avctx, width, height, Mux::CODEC_MJPEG, /*video_extradata=*/"", audio_codecpar,
+                         AVCOL_SPC_BT709, COARSE_TIMEBASE, /*write_callback=*/nullptr, Mux::WRITE_FOREGROUND, {}, with_subtitles));
 
+       avcodec_parameters_free(&audio_codecpar);
        encode_thread = thread(&VideoStream::encode_thread_func, this);
 }
 
@@ -331,9 +405,9 @@ void VideoStream::clear_queue()
 void VideoStream::schedule_original_frame(steady_clock::time_point local_pts,
                                           int64_t output_pts, function<void()> &&display_func,
                                           QueueSpotHolder &&queue_spot_holder,
-                                          FrameOnDisk frame, const string &subtitle)
+                                          FrameOnDisk frame, const string &subtitle, bool include_audio)
 {
-       fprintf(stderr, "output_pts=%ld  original      input_pts=%ld\n", output_pts, frame.pts);
+       fprintf(stderr, "output_pts=%" PRId64 "  original      input_pts=%" PRId64 "\n", output_pts, frame.pts);
 
        QueuedFrame qf;
        qf.local_pts = local_pts;
@@ -342,7 +416,9 @@ void VideoStream::schedule_original_frame(steady_clock::time_point local_pts,
        qf.display_func = move(display_func);
        qf.queue_spot_holder = move(queue_spot_holder);
        qf.subtitle = subtitle;
-       qf.encoded_jpeg.reset(new string(frame_reader.read_frame(frame)));
+       FrameReader::Frame read_frame = frame_reader.read_frame(frame, /*read_video=*/true, include_audio);
+       qf.encoded_jpeg.reset(new string(move(read_frame.video)));
+       qf.audio = move(read_frame.audio);
 
        lock_guard<mutex> lock(queue_lock);
        frame_queue.push_back(move(qf));
@@ -355,7 +431,7 @@ void VideoStream::schedule_faded_frame(steady_clock::time_point local_pts, int64
                                        FrameOnDisk frame1_spec, FrameOnDisk frame2_spec,
                                        float fade_alpha, const string &subtitle)
 {
-       fprintf(stderr, "output_pts=%ld  faded         input_pts=%ld,%ld  fade_alpha=%.2f\n", output_pts, frame1_spec.pts, frame2_spec.pts, fade_alpha);
+       fprintf(stderr, "output_pts=%" PRId64 "  faded         input_pts=%" PRId64 ",%" PRId64 "  fade_alpha=%.2f\n", output_pts, frame1_spec.pts, frame2_spec.pts, fade_alpha);
 
        // Get the temporary OpenGL resources we need for doing the fade.
        // (We share these with interpolated frames, which is slightly
@@ -376,6 +452,8 @@ void VideoStream::schedule_faded_frame(steady_clock::time_point local_pts, int64
 
        shared_ptr<Frame> frame1 = decode_jpeg_with_cache(frame1_spec, DECODE_IF_NOT_IN_CACHE, &frame_reader, &did_decode);
        shared_ptr<Frame> frame2 = decode_jpeg_with_cache(frame2_spec, DECODE_IF_NOT_IN_CACHE, &frame_reader, &did_decode);
+       wait_for_upload(frame1);
+       wait_for_upload(frame2);
 
        ycbcr_semiplanar_converter->prepare_chain_for_fade(frame1, frame2, fade_alpha)->render_to_fbo(resources->fade_fbo, global_flags.width, global_flags.height);
 
@@ -408,6 +486,7 @@ void VideoStream::schedule_faded_frame(steady_clock::time_point local_pts, int64
        // Set a fence we can wait for to make sure the CPU sees the read.
        glMemoryBarrier(GL_CLIENT_MAPPED_BUFFER_BARRIER_BIT);
        check_error();
+       qf.fence_created = steady_clock::now();
        qf.fence = RefCountedGLsync(GL_SYNC_GPU_COMMANDS_COMPLETE, /*flags=*/0);
        check_error();
        qf.resources = move(resources);
@@ -422,12 +501,13 @@ void VideoStream::schedule_interpolated_frame(steady_clock::time_point local_pts
                                               int64_t output_pts, function<void(shared_ptr<Frame>)> &&display_func,
                                               QueueSpotHolder &&queue_spot_holder,
                                               FrameOnDisk frame1, FrameOnDisk frame2,
-                                              float alpha, FrameOnDisk secondary_frame, float fade_alpha, const string &subtitle)
+                                              float alpha, FrameOnDisk secondary_frame, float fade_alpha, const string &subtitle,
+                                              bool play_audio)
 {
        if (secondary_frame.pts != -1) {
-               fprintf(stderr, "output_pts=%ld  interpolated  input_pts1=%ld input_pts2=%ld alpha=%.3f  secondary_pts=%ld  fade_alpha=%.2f\n", output_pts, frame1.pts, frame2.pts, alpha, secondary_frame.pts, fade_alpha);
+               fprintf(stderr, "output_pts=%" PRId64 "  interpolated  input_pts1=%" PRId64 " input_pts2=%" PRId64 " alpha=%.3f  secondary_pts=%" PRId64 "  fade_alpha=%.2f\n", output_pts, frame1.pts, frame2.pts, alpha, secondary_frame.pts, fade_alpha);
        } else {
-               fprintf(stderr, "output_pts=%ld  interpolated  input_pts1=%ld input_pts2=%ld alpha=%.3f\n", output_pts, frame1.pts, frame2.pts, alpha);
+               fprintf(stderr, "output_pts=%" PRId64 "  interpolated  input_pts1=%" PRId64 " input_pts2=%" PRId64 " alpha=%.3f\n", output_pts, frame1.pts, frame2.pts, alpha);
        }
 
        // Get the temporary OpenGL resources we need for doing the interpolation.
@@ -450,6 +530,10 @@ void VideoStream::schedule_interpolated_frame(steady_clock::time_point local_pts
        qf.local_pts = local_pts;
        qf.subtitle = subtitle;
 
+       if (play_audio) {
+               qf.audio = frame_reader.read_frame(frame1, /*read_video=*/false, /*read_audio=*/true).audio;
+       }
+
        check_error();
 
        // Convert frame0 and frame1 to OpenGL textures.
@@ -457,7 +541,11 @@ void VideoStream::schedule_interpolated_frame(steady_clock::time_point local_pts
                FrameOnDisk frame_spec = frame_no == 1 ? frame2 : frame1;
                bool did_decode;
                shared_ptr<Frame> frame = decode_jpeg_with_cache(frame_spec, DECODE_IF_NOT_IN_CACHE, &frame_reader, &did_decode);
+               wait_for_upload(frame);
                ycbcr_converter->prepare_chain_for_conversion(frame)->render_to_fbo(resources->input_fbos[frame_no], global_flags.width, global_flags.height);
+               if (frame_no == 1) {
+                       qf.exif_data = frame->exif_data;  // Use the white point from the last frame.
+               }
        }
 
        glGenerateTextureMipmap(resources->input_tex);
@@ -494,14 +582,19 @@ void VideoStream::schedule_interpolated_frame(steady_clock::time_point local_pts
                // Now decode the image we are fading against.
                bool did_decode;
                shared_ptr<Frame> frame2 = decode_jpeg_with_cache(secondary_frame, DECODE_IF_NOT_IN_CACHE, &frame_reader, &did_decode);
+               wait_for_upload(frame2);
 
                // Then fade against it, putting it into the fade Y' and CbCr textures.
-               ycbcr_semiplanar_converter->prepare_chain_for_fade_from_texture(qf.output_tex, global_flags.width, global_flags.height, frame2, fade_alpha)->render_to_fbo(resources->fade_fbo, global_flags.width, global_flags.height);
+               RGBTriplet neutral_color = get_neutral_color(qf.exif_data);
+               ycbcr_semiplanar_converter->prepare_chain_for_fade_from_texture(qf.output_tex, neutral_color, global_flags.width, global_flags.height, frame2, fade_alpha)->render_to_fbo(resources->fade_fbo, global_flags.width, global_flags.height);
 
                // Subsample and split Cb/Cr.
                chroma_subsampler->subsample_chroma(resources->fade_cbcr_output_tex, global_flags.width, global_flags.height, resources->cb_tex, resources->cr_tex);
 
                interpolate_no_split->release_texture(qf.output_tex);
+
+               // We already applied the white balance, so don't have the client redo it.
+               qf.exif_data.clear();
        } else {
                tie(qf.output_tex, qf.cbcr_tex) = interpolate->exec(resources->input_tex, resources->gray_tex, flow_tex, global_flags.width, global_flags.height, alpha);
                check_error();
@@ -536,6 +629,7 @@ void VideoStream::schedule_interpolated_frame(steady_clock::time_point local_pts
        // Set a fence we can wait for to make sure the CPU sees the read.
        glMemoryBarrier(GL_CLIENT_MAPPED_BUFFER_BARRIER_BIT);
        check_error();
+       qf.fence_created = steady_clock::now();
        qf.fence = RefCountedGLsync(GL_SYNC_GPU_COMMANDS_COMPLETE, /*flags=*/0);
        check_error();
        qf.resources = move(resources);
@@ -561,33 +655,43 @@ void VideoStream::schedule_refresh_frame(steady_clock::time_point local_pts,
        queue_changed.notify_all();
 }
 
+void VideoStream::schedule_silence(steady_clock::time_point local_pts, int64_t output_pts,
+                                   int64_t length_pts, QueueSpotHolder &&queue_spot_holder)
+{
+       QueuedFrame qf;
+       qf.type = QueuedFrame::SILENCE;
+       qf.output_pts = output_pts;
+       qf.queue_spot_holder = move(queue_spot_holder);
+       qf.silence_length_pts = length_pts;
+
+       lock_guard<mutex> lock(queue_lock);
+       frame_queue.push_back(move(qf));
+       queue_changed.notify_all();
+}
+
 namespace {
 
-shared_ptr<Frame> frame_from_pbo(void *contents, size_t width, size_t height)
+RefCountedTexture clone_r8_texture(GLuint src_tex, unsigned width, unsigned height)
 {
-       size_t chroma_width = width / 2;
-
-       const uint8_t *y = (const uint8_t *)contents;
-       const uint8_t *cb = (const uint8_t *)contents + width * height;
-       const uint8_t *cr = (const uint8_t *)contents + width * height + chroma_width * height;
+       GLuint tex;
+       glCreateTextures(GL_TEXTURE_2D, 1, &tex);
+       check_error();
+       glTextureStorage2D(tex, 1, GL_R8, width, height);
+       check_error();
+       glCopyImageSubData(src_tex, GL_TEXTURE_2D, 0, 0, 0, 0,
+                          tex, GL_TEXTURE_2D, 0, 0, 0, 0,
+                          width, height, 1);
+       check_error();
+       glTextureParameteri(tex, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+       check_error();
+       glTextureParameteri(tex, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+       check_error();
+       glTextureParameteri(tex, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+       check_error();
+       glTextureParameteri(tex, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+       check_error();
 
-       shared_ptr<Frame> frame(new Frame);
-       frame->y.reset(new uint8_t[width * height]);
-       frame->cb.reset(new uint8_t[chroma_width * height]);
-       frame->cr.reset(new uint8_t[chroma_width * height]);
-       for (unsigned yy = 0; yy < height; ++yy) {
-               memcpy(frame->y.get() + width * yy, y + width * yy, width);
-               memcpy(frame->cb.get() + chroma_width * yy, cb + chroma_width * yy, chroma_width);
-               memcpy(frame->cr.get() + chroma_width * yy, cr + chroma_width * yy, chroma_width);
-       }
-       frame->is_semiplanar = false;
-       frame->width = width;
-       frame->height = height;
-       frame->chroma_subsampling_x = 2;
-       frame->chroma_subsampling_y = 1;
-       frame->pitch_y = width;
-       frame->pitch_chroma = chroma_width;
-       return frame;
+       return RefCountedTexture(new GLuint(tex), TextureDeleter());
 }
 
 }  // namespace
@@ -600,9 +704,11 @@ void VideoStream::encode_thread_func()
        bool ok = make_current(context, surface);
        if (!ok) {
                fprintf(stderr, "Video stream couldn't get an OpenGL context\n");
-               exit(1);
+               abort();
        }
 
+       init_pbo_pool();
+
        while (!should_quit) {
                QueuedFrame qf;
                {
@@ -637,56 +743,75 @@ void VideoStream::encode_thread_func()
 
                // Hack: We mux the subtitle packet one time unit before the actual frame,
                // so that Nageru is sure to get it first.
-               if (!qf.subtitle.empty()) {
-                       AVPacket pkt;
-                       av_init_packet(&pkt);
-                       pkt.stream_index = mux->get_subtitle_stream_idx();
-                       assert(pkt.stream_index != -1);
-                       pkt.data = (uint8_t *)qf.subtitle.data();
-                       pkt.size = qf.subtitle.size();
-                       pkt.flags = 0;
-                       pkt.duration = lrint(TIMEBASE / global_flags.output_framerate);  // Doesn't really matter for Nageru.
-                       mux->add_packet(pkt, qf.output_pts - 1, qf.output_pts - 1);
+               if (!qf.subtitle.empty() && with_subtitles == Mux::WITH_SUBTITLES) {
+                       AVPacketWithDeleter pkt = av_packet_alloc_unique();
+                       pkt->stream_index = mux->get_subtitle_stream_idx();
+                       assert(pkt->stream_index != -1);
+                       pkt->data = (uint8_t *)qf.subtitle.data();
+                       pkt->size = qf.subtitle.size();
+                       pkt->flags = 0;
+                       pkt->duration = lrint(TIMEBASE / global_flags.output_framerate);  // Doesn't really matter for Nageru.
+                       mux->add_packet(*pkt, qf.output_pts - 1, qf.output_pts - 1);
                }
 
                if (qf.type == QueuedFrame::ORIGINAL) {
                        // Send the JPEG frame on, unchanged.
                        string jpeg = move(*qf.encoded_jpeg);
-                       AVPacket pkt;
-                       av_init_packet(&pkt);
-                       pkt.stream_index = 0;
-                       pkt.data = (uint8_t *)jpeg.data();
-                       pkt.size = jpeg.size();
-                       pkt.flags = AV_PKT_FLAG_KEY;
-                       mux->add_packet(pkt, qf.output_pts, qf.output_pts);
+                       AVPacketWithDeleter pkt = av_packet_alloc_unique();
+                       pkt->stream_index = 0;
+                       pkt->data = (uint8_t *)jpeg.data();
+                       pkt->size = jpeg.size();
+                       pkt->flags = AV_PKT_FLAG_KEY;
+                       mux->add_packet(*pkt, qf.output_pts, qf.output_pts);
                        last_frame = move(jpeg);
+
+                       add_audio_or_silence(qf);
                } else if (qf.type == QueuedFrame::FADED) {
+                       steady_clock::time_point start = steady_clock::now();
                        glClientWaitSync(qf.fence.get(), /*flags=*/0, GL_TIMEOUT_IGNORED);
-
-                       shared_ptr<Frame> frame = frame_from_pbo(qf.resources->pbo_contents, global_flags.width, global_flags.height);
+                       steady_clock::time_point stop = steady_clock::now();
+                       metric_fade_fence_wait_time_seconds.count_event(duration<double>(stop - start).count());
+                       metric_fade_latency_seconds.count_event(duration<double>(stop - qf.fence_created).count());
 
                        // Now JPEG encode it, and send it on to the stream.
-                       string jpeg = encode_jpeg(frame->y.get(), frame->cb.get(), frame->cr.get(), global_flags.width, global_flags.height);
-
-                       AVPacket pkt;
-                       av_init_packet(&pkt);
-                       pkt.stream_index = 0;
-                       pkt.data = (uint8_t *)jpeg.data();
-                       pkt.size = jpeg.size();
-                       pkt.flags = AV_PKT_FLAG_KEY;
-                       mux->add_packet(pkt, qf.output_pts, qf.output_pts);
+                       string jpeg = encode_jpeg_from_pbo(qf.resources->pbo_contents, global_flags.width, global_flags.height, /*exif_data=*/"");
+
+                       AVPacketWithDeleter pkt = av_packet_alloc_unique();
+                       pkt->stream_index = 0;
+                       pkt->data = (uint8_t *)jpeg.data();
+                       pkt->size = jpeg.size();
+                       pkt->flags = AV_PKT_FLAG_KEY;
+                       mux->add_packet(*pkt, qf.output_pts, qf.output_pts);
                        last_frame = move(jpeg);
+
+                       add_audio_or_silence(qf);
                } else if (qf.type == QueuedFrame::INTERPOLATED || qf.type == QueuedFrame::FADED_INTERPOLATED) {
+                       steady_clock::time_point start = steady_clock::now();
                        glClientWaitSync(qf.fence.get(), /*flags=*/0, GL_TIMEOUT_IGNORED);
+                       steady_clock::time_point stop = steady_clock::now();
+                       metric_interpolation_fence_wait_time_seconds.count_event(duration<double>(stop - start).count());
+                       metric_interpolation_latency_seconds.count_event(duration<double>(stop - qf.fence_created).count());
 
                        // Send it on to display.
-                       shared_ptr<Frame> frame = frame_from_pbo(qf.resources->pbo_contents, global_flags.width, global_flags.height);
                        if (qf.display_decoded_func != nullptr) {
-                               qf.display_decoded_func(frame);
+                               shared_ptr<Frame> frame(new Frame);
+                               if (qf.type == QueuedFrame::FADED_INTERPOLATED) {
+                                       frame->y = clone_r8_texture(qf.resources->fade_y_output_tex, global_flags.width, global_flags.height);
+                               } else {
+                                       frame->y = clone_r8_texture(qf.output_tex, global_flags.width, global_flags.height);
+                               }
+                               frame->cb = clone_r8_texture(qf.resources->cb_tex, global_flags.width / 2, global_flags.height);
+                               frame->cr = clone_r8_texture(qf.resources->cr_tex, global_flags.width / 2, global_flags.height);
+                               frame->width = global_flags.width;
+                               frame->height = global_flags.height;
+                               frame->chroma_subsampling_x = 2;
+                               frame->chroma_subsampling_y = 1;
+                               frame->uploaded_ui_thread = RefCountedGLsync(GL_SYNC_GPU_COMMANDS_COMPLETE, /*flags=*/0);
+                               qf.display_decoded_func(move(frame));
                        }
 
                        // Now JPEG encode it, and send it on to the stream.
-                       string jpeg = encode_jpeg(frame->y.get(), frame->cb.get(), frame->cr.get(), global_flags.width, global_flags.height);
+                       string jpeg = encode_jpeg_from_pbo(qf.resources->pbo_contents, global_flags.width, global_flags.height, move(qf.exif_data));
                        if (qf.flow_tex != 0) {
                                compute_flow->release_texture(qf.flow_tex);
                        }
@@ -695,22 +820,26 @@ void VideoStream::encode_thread_func()
                                interpolate->release_texture(qf.cbcr_tex);
                        }
 
-                       AVPacket pkt;
-                       av_init_packet(&pkt);
-                       pkt.stream_index = 0;
-                       pkt.data = (uint8_t *)jpeg.data();
-                       pkt.size = jpeg.size();
-                       pkt.flags = AV_PKT_FLAG_KEY;
-                       mux->add_packet(pkt, qf.output_pts, qf.output_pts);
+                       AVPacketWithDeleter pkt = av_packet_alloc_unique();
+                       pkt->stream_index = 0;
+                       pkt->data = (uint8_t *)jpeg.data();
+                       pkt->size = jpeg.size();
+                       pkt->flags = AV_PKT_FLAG_KEY;
+                       mux->add_packet(*pkt, qf.output_pts, qf.output_pts);
                        last_frame = move(jpeg);
+
+                       add_audio_or_silence(qf);
                } else if (qf.type == QueuedFrame::REFRESH) {
-                       AVPacket pkt;
-                       av_init_packet(&pkt);
-                       pkt.stream_index = 0;
-                       pkt.data = (uint8_t *)last_frame.data();
-                       pkt.size = last_frame.size();
-                       pkt.flags = AV_PKT_FLAG_KEY;
-                       mux->add_packet(pkt, qf.output_pts, qf.output_pts);
+                       AVPacketWithDeleter pkt = av_packet_alloc_unique();
+                       pkt->stream_index = 0;
+                       pkt->data = (uint8_t *)last_frame.data();
+                       pkt->size = last_frame.size();
+                       pkt->flags = AV_PKT_FLAG_KEY;
+                       mux->add_packet(*pkt, qf.output_pts, qf.output_pts);
+
+                       add_audio_or_silence(qf);  // Definitely silence.
+               } else if (qf.type == QueuedFrame::SILENCE) {
+                       add_silence(qf.output_pts, qf.silence_length_pts);
                } else {
                        assert(false);
                }
@@ -736,11 +865,45 @@ int VideoStream::write_packet2(uint8_t *buf, int buf_size, AVIODataMarkerType ty
                type = AVIO_DATA_MARKER_SYNC_POINT;
        }
 
+       HTTPD::StreamID stream_id{ HTTPD::MAIN_STREAM, 0 };
        if (type == AVIO_DATA_MARKER_HEADER) {
                stream_mux_header.append((char *)buf, buf_size);
-               global_httpd->set_header(HTTPD::MAIN_STREAM, stream_mux_header);
+               global_httpd->set_header(stream_id, stream_mux_header);
        } else {
-               global_httpd->add_data(HTTPD::MAIN_STREAM, (char *)buf, buf_size, type == AVIO_DATA_MARKER_SYNC_POINT, time, AVRational{ AV_TIME_BASE, 1 });
+               global_httpd->add_data(stream_id, (char *)buf, buf_size, type == AVIO_DATA_MARKER_SYNC_POINT, time, AVRational{ AV_TIME_BASE, 1 });
        }
        return buf_size;
 }
+
+void VideoStream::add_silence(int64_t pts, int64_t length_pts)
+{
+       // At 59.94, this will never quite add up (even discounting refresh frames,
+       // which have unpredictable length), but hopefully, the player in the other
+       // end should be able to stretch silence easily enough.
+       long num_samples = lrint(length_pts * double(OUTPUT_FREQUENCY) / double(TIMEBASE)) * 2;
+       uint8_t *zero = (uint8_t *)calloc(num_samples, sizeof(int32_t));
+
+       AVPacketWithDeleter pkt = av_packet_alloc_unique();
+       pkt->stream_index = 1;
+       pkt->data = zero;
+       pkt->size = num_samples * sizeof(int32_t);
+       pkt->flags = AV_PKT_FLAG_KEY;
+       mux->add_packet(*pkt, pts, pts);
+
+       free(zero);
+}
+
+void VideoStream::add_audio_or_silence(const QueuedFrame &qf)
+{
+       if (qf.audio.empty()) {
+               int64_t frame_length = lrint(double(TIMEBASE) / global_flags.output_framerate);
+               add_silence(qf.output_pts, frame_length);
+       } else {
+               AVPacketWithDeleter pkt = av_packet_alloc_unique();
+               pkt->stream_index = 1;
+               pkt->data = (uint8_t *)qf.audio.data();
+               pkt->size = qf.audio.size();
+               pkt->flags = AV_PKT_FLAG_KEY;
+               mux->add_packet(*pkt, qf.output_pts, qf.output_pts);
+       }
+}