]> git.sesse.net Git - nageru/blobdiff - futatabi/video_stream.cpp
Make multitrack export include audio.
[nageru] / futatabi / video_stream.cpp
index dcf44f8e452fb9277e25bd08eafd681d0ed286b4..06acfd2601a43ebe4ff17af8b0f7fffee788fc25 100644 (file)
@@ -6,14 +6,14 @@ extern "C" {
 }
 
 #include "chroma_subsampler.h"
-#include "shared/context.h"
 #include "flags.h"
 #include "flow.h"
-#include "shared/httpd.h"
 #include "jpeg_frame_view.h"
 #include "movit/util.h"
-#include "shared/mux.h"
 #include "player.h"
+#include "shared/context.h"
+#include "shared/httpd.h"
+#include "shared/mux.h"
 #include "util.h"
 #include "ycbcr_converter.h"
 
@@ -28,7 +28,7 @@ extern HTTPD *global_httpd;
 
 struct VectorDestinationManager {
        jpeg_destination_mgr pub;
-       std::vector<uint8_t> dest;
+       string dest;
 
        VectorDestinationManager()
        {
@@ -62,7 +62,7 @@ struct VectorDestinationManager {
        {
                dest.resize(bytes_used + 4096);
                dest.resize(dest.capacity());
-               pub.next_output_byte = dest.data() + bytes_used;
+               pub.next_output_byte = (uint8_t *)dest.data() + bytes_used;
                pub.free_in_buffer = dest.size() - bytes_used;
        }
 
@@ -78,7 +78,7 @@ struct VectorDestinationManager {
 };
 static_assert(std::is_standard_layout<VectorDestinationManager>::value, "");
 
-vector<uint8_t> encode_jpeg(const uint8_t *y_data, const uint8_t *cb_data, const uint8_t *cr_data, unsigned width, unsigned height)
+string encode_jpeg(const uint8_t *y_data, const uint8_t *cb_data, const uint8_t *cr_data, unsigned width, unsigned height)
 {
        VectorDestinationManager dest;
 
@@ -241,6 +241,30 @@ VideoStream::~VideoStream()
        if (last_flow_tex != 0) {
                compute_flow->release_texture(last_flow_tex);
        }
+
+       for (const unique_ptr<InterpolatedFrameResources> &resource : interpolate_resources) {
+               glUnmapNamedBuffer(resource->pbo);
+               check_error();
+               glDeleteBuffers(1, &resource->pbo);
+               check_error();
+               glDeleteFramebuffers(2, resource->input_fbos);
+               check_error();
+               glDeleteFramebuffers(1, &resource->fade_fbo);
+               check_error();
+               glDeleteTextures(1, &resource->input_tex);
+               check_error();
+               glDeleteTextures(1, &resource->gray_tex);
+               check_error();
+               glDeleteTextures(1, &resource->fade_y_output_tex);
+               check_error();
+               glDeleteTextures(1, &resource->fade_cbcr_output_tex);
+               check_error();
+               glDeleteTextures(1, &resource->cb_tex);
+               check_error();
+               glDeleteTextures(1, &resource->cr_tex);
+               check_error();
+       }
+       assert(interpolate_resources.size() == num_interpolate_slots);
 }
 
 void VideoStream::start()
@@ -264,7 +288,7 @@ void VideoStream::start()
 
        size_t width = global_flags.width, height = global_flags.height;  // Doesn't matter for MJPEG.
        mux.reset(new Mux(avctx, width, height, Mux::CODEC_MJPEG, /*video_extradata=*/"", /*audio_codec_parameters=*/nullptr,
-               AVCOL_SPC_BT709, COARSE_TIMEBASE, /*write_callback=*/nullptr, Mux::WRITE_FOREGROUND, {}));
+                         AVCOL_SPC_BT709, COARSE_TIMEBASE, /*write_callback=*/nullptr, Mux::WRITE_FOREGROUND, {}, Mux::WITH_SUBTITLES));
 
        encode_thread = thread(&VideoStream::encode_thread_func, this);
 }
@@ -282,7 +306,7 @@ void VideoStream::clear_queue()
        deque<QueuedFrame> q;
 
        {
-               unique_lock<mutex> lock(queue_lock);
+               lock_guard<mutex> lock(queue_lock);
                q = move(frame_queue);
        }
 
@@ -307,23 +331,22 @@ void VideoStream::clear_queue()
 void VideoStream::schedule_original_frame(steady_clock::time_point local_pts,
                                           int64_t output_pts, function<void()> &&display_func,
                                           QueueSpotHolder &&queue_spot_holder,
-                                          FrameOnDisk frame)
+                                          FrameOnDisk frame, const string &subtitle)
 {
-       fprintf(stderr, "output_pts=%ld  original      input_pts=%ld\n", output_pts, frame.pts);
+       fprintf(stderr, "output_pts=%" PRId64 "  original      input_pts=%" PRId64 "\n", output_pts, frame.pts);
 
-       // Preload the file from disk, so that the encoder thread does not get stalled.
-       // TODO: Consider sending it through the queue instead.
-       (void)frame_reader.read_frame(frame);
+       // TODO: Write audio if at the right speed.
 
        QueuedFrame qf;
        qf.local_pts = local_pts;
        qf.type = QueuedFrame::ORIGINAL;
        qf.output_pts = output_pts;
-       qf.frame1 = frame;
        qf.display_func = move(display_func);
        qf.queue_spot_holder = move(queue_spot_holder);
+       qf.subtitle = subtitle;
+       qf.encoded_jpeg.reset(new string(frame_reader.read_frame(frame, /*read_audio=*/false).video));
 
-       unique_lock<mutex> lock(queue_lock);
+       lock_guard<mutex> lock(queue_lock);
        frame_queue.push_back(move(qf));
        queue_changed.notify_all();
 }
@@ -332,9 +355,9 @@ void VideoStream::schedule_faded_frame(steady_clock::time_point local_pts, int64
                                        function<void()> &&display_func,
                                        QueueSpotHolder &&queue_spot_holder,
                                        FrameOnDisk frame1_spec, FrameOnDisk frame2_spec,
-                                       float fade_alpha)
+                                       float fade_alpha, const string &subtitle)
 {
-       fprintf(stderr, "output_pts=%ld  faded         input_pts=%ld,%ld  fade_alpha=%.2f\n", output_pts, frame1_spec.pts, frame2_spec.pts, fade_alpha);
+       fprintf(stderr, "output_pts=%" PRId64 "  faded         input_pts=%" PRId64 ",%" PRId64 "  fade_alpha=%.2f\n", output_pts, frame1_spec.pts, frame2_spec.pts, fade_alpha);
 
        // Get the temporary OpenGL resources we need for doing the fade.
        // (We share these with interpolated frames, which is slightly
@@ -342,7 +365,7 @@ void VideoStream::schedule_faded_frame(steady_clock::time_point local_pts, int64
        // separate pools around.)
        BorrowedInterpolatedFrameResources resources;
        {
-               unique_lock<mutex> lock(queue_lock);
+               lock_guard<mutex> lock(queue_lock);
                if (interpolate_resources.empty()) {
                        fprintf(stderr, "WARNING: Too many interpolated frames already in transit; dropping one.\n");
                        return;
@@ -365,6 +388,7 @@ void VideoStream::schedule_faded_frame(steady_clock::time_point local_pts, int64
        qf.frame1 = frame1_spec;
        qf.display_func = move(display_func);
        qf.queue_spot_holder = move(queue_spot_holder);
+       qf.subtitle = subtitle;
 
        qf.secondary_frame = frame2_spec;
 
@@ -391,7 +415,7 @@ void VideoStream::schedule_faded_frame(steady_clock::time_point local_pts, int64
        qf.resources = move(resources);
        qf.local_pts = local_pts;
 
-       unique_lock<mutex> lock(queue_lock);
+       lock_guard<mutex> lock(queue_lock);
        frame_queue.push_back(move(qf));
        queue_changed.notify_all();
 }
@@ -400,18 +424,18 @@ void VideoStream::schedule_interpolated_frame(steady_clock::time_point local_pts
                                               int64_t output_pts, function<void(shared_ptr<Frame>)> &&display_func,
                                               QueueSpotHolder &&queue_spot_holder,
                                               FrameOnDisk frame1, FrameOnDisk frame2,
-                                              float alpha, FrameOnDisk secondary_frame, float fade_alpha)
+                                              float alpha, FrameOnDisk secondary_frame, float fade_alpha, const string &subtitle)
 {
        if (secondary_frame.pts != -1) {
-               fprintf(stderr, "output_pts=%ld  interpolated  input_pts1=%ld input_pts2=%ld alpha=%.3f  secondary_pts=%ld  fade_alpha=%.2f\n", output_pts, frame1.pts, frame2.pts, alpha, secondary_frame.pts, fade_alpha);
+               fprintf(stderr, "output_pts=%" PRId64 "  interpolated  input_pts1=%" PRId64 " input_pts2=%" PRId64 " alpha=%.3f  secondary_pts=%" PRId64 "  fade_alpha=%.2f\n", output_pts, frame1.pts, frame2.pts, alpha, secondary_frame.pts, fade_alpha);
        } else {
-               fprintf(stderr, "output_pts=%ld  interpolated  input_pts1=%ld input_pts2=%ld alpha=%.3f\n", output_pts, frame1.pts, frame2.pts, alpha);
+               fprintf(stderr, "output_pts=%" PRId64 "  interpolated  input_pts1=%" PRId64 " input_pts2=%" PRId64 " alpha=%.3f\n", output_pts, frame1.pts, frame2.pts, alpha);
        }
 
        // Get the temporary OpenGL resources we need for doing the interpolation.
        BorrowedInterpolatedFrameResources resources;
        {
-               unique_lock<mutex> lock(queue_lock);
+               lock_guard<mutex> lock(queue_lock);
                if (interpolate_resources.empty()) {
                        fprintf(stderr, "WARNING: Too many interpolated frames already in transit; dropping one.\n");
                        return;
@@ -426,6 +450,7 @@ void VideoStream::schedule_interpolated_frame(steady_clock::time_point local_pts
        qf.display_decoded_func = move(display_func);
        qf.queue_spot_holder = move(queue_spot_holder);
        qf.local_pts = local_pts;
+       qf.subtitle = subtitle;
 
        check_error();
 
@@ -517,22 +542,23 @@ void VideoStream::schedule_interpolated_frame(steady_clock::time_point local_pts
        check_error();
        qf.resources = move(resources);
 
-       unique_lock<mutex> lock(queue_lock);
+       lock_guard<mutex> lock(queue_lock);
        frame_queue.push_back(move(qf));
        queue_changed.notify_all();
 }
 
 void VideoStream::schedule_refresh_frame(steady_clock::time_point local_pts,
                                          int64_t output_pts, function<void()> &&display_func,
-                                         QueueSpotHolder &&queue_spot_holder)
+                                         QueueSpotHolder &&queue_spot_holder, const string &subtitle)
 {
        QueuedFrame qf;
        qf.type = QueuedFrame::REFRESH;
        qf.output_pts = output_pts;
        qf.display_func = move(display_func);
        qf.queue_spot_holder = move(queue_spot_holder);
+       qf.subtitle = subtitle;
 
-       unique_lock<mutex> lock(queue_lock);
+       lock_guard<mutex> lock(queue_lock);
        frame_queue.push_back(move(qf));
        queue_changed.notify_all();
 }
@@ -576,7 +602,7 @@ void VideoStream::encode_thread_func()
        bool ok = make_current(context, surface);
        if (!ok) {
                fprintf(stderr, "Video stream couldn't get an OpenGL context\n");
-               exit(1);
+               abort();
        }
 
        while (!should_quit) {
@@ -585,7 +611,7 @@ void VideoStream::encode_thread_func()
                        unique_lock<mutex> lock(queue_lock);
 
                        // Wait until we have a frame to play.
-                       queue_changed.wait(lock, [this]{
+                       queue_changed.wait(lock, [this] {
                                return !frame_queue.empty() || should_quit;
                        });
                        if (should_quit) {
@@ -599,7 +625,7 @@ void VideoStream::encode_thread_func()
                        if (output_fast_forward) {
                                aborted = frame_queue.empty() || frame_queue.front().local_pts != frame_start;
                        } else {
-                               aborted = queue_changed.wait_until(lock, frame_start, [this, frame_start]{
+                               aborted = queue_changed.wait_until(lock, frame_start, [this, frame_start] {
                                        return frame_queue.empty() || frame_queue.front().local_pts != frame_start;
                                });
                        }
@@ -611,9 +637,23 @@ void VideoStream::encode_thread_func()
                        frame_queue.pop_front();
                }
 
+               // Hack: We mux the subtitle packet one time unit before the actual frame,
+               // so that Nageru is sure to get it first.
+               if (!qf.subtitle.empty()) {
+                       AVPacket pkt;
+                       av_init_packet(&pkt);
+                       pkt.stream_index = mux->get_subtitle_stream_idx();
+                       assert(pkt.stream_index != -1);
+                       pkt.data = (uint8_t *)qf.subtitle.data();
+                       pkt.size = qf.subtitle.size();
+                       pkt.flags = 0;
+                       pkt.duration = lrint(TIMEBASE / global_flags.output_framerate);  // Doesn't really matter for Nageru.
+                       mux->add_packet(pkt, qf.output_pts - 1, qf.output_pts - 1);
+               }
+
                if (qf.type == QueuedFrame::ORIGINAL) {
                        // Send the JPEG frame on, unchanged.
-                       string jpeg = frame_reader.read_frame(qf.frame1);
+                       string jpeg = move(*qf.encoded_jpeg);
                        AVPacket pkt;
                        av_init_packet(&pkt);
                        pkt.stream_index = 0;
@@ -621,15 +661,14 @@ void VideoStream::encode_thread_func()
                        pkt.size = jpeg.size();
                        pkt.flags = AV_PKT_FLAG_KEY;
                        mux->add_packet(pkt, qf.output_pts, qf.output_pts);
-
-                       last_frame.assign(&jpeg[0], &jpeg[0] + jpeg.size());
+                       last_frame = move(jpeg);
                } else if (qf.type == QueuedFrame::FADED) {
                        glClientWaitSync(qf.fence.get(), /*flags=*/0, GL_TIMEOUT_IGNORED);
 
                        shared_ptr<Frame> frame = frame_from_pbo(qf.resources->pbo_contents, global_flags.width, global_flags.height);
 
                        // Now JPEG encode it, and send it on to the stream.
-                       vector<uint8_t> jpeg = encode_jpeg(frame->y.get(), frame->cb.get(), frame->cr.get(), global_flags.width, global_flags.height);
+                       string jpeg = encode_jpeg(frame->y.get(), frame->cb.get(), frame->cr.get(), global_flags.width, global_flags.height);
 
                        AVPacket pkt;
                        av_init_packet(&pkt);
@@ -649,7 +688,7 @@ void VideoStream::encode_thread_func()
                        }
 
                        // Now JPEG encode it, and send it on to the stream.
-                       vector<uint8_t> jpeg = encode_jpeg(frame->y.get(), frame->cb.get(), frame->cr.get(), global_flags.width, global_flags.height);
+                       string jpeg = encode_jpeg(frame->y.get(), frame->cb.get(), frame->cr.get(), global_flags.width, global_flags.height);
                        if (qf.flow_tex != 0) {
                                compute_flow->release_texture(qf.flow_tex);
                        }