]> git.sesse.net Git - nageru/blobdiff - futatabi/video_stream.cpp
Make multitrack export include audio.
[nageru] / futatabi / video_stream.cpp
index 8b29794abe2ff7aca21eff8267b8e6a76092c13c..06acfd2601a43ebe4ff17af8b0f7fffee788fc25 100644 (file)
@@ -28,7 +28,7 @@ extern HTTPD *global_httpd;
 
 struct VectorDestinationManager {
        jpeg_destination_mgr pub;
-       std::vector<uint8_t> dest;
+       string dest;
 
        VectorDestinationManager()
        {
@@ -62,7 +62,7 @@ struct VectorDestinationManager {
        {
                dest.resize(bytes_used + 4096);
                dest.resize(dest.capacity());
-               pub.next_output_byte = dest.data() + bytes_used;
+               pub.next_output_byte = (uint8_t *)dest.data() + bytes_used;
                pub.free_in_buffer = dest.size() - bytes_used;
        }
 
@@ -78,7 +78,7 @@ struct VectorDestinationManager {
 };
 static_assert(std::is_standard_layout<VectorDestinationManager>::value, "");
 
-vector<uint8_t> encode_jpeg(const uint8_t *y_data, const uint8_t *cb_data, const uint8_t *cr_data, unsigned width, unsigned height)
+string encode_jpeg(const uint8_t *y_data, const uint8_t *cb_data, const uint8_t *cr_data, unsigned width, unsigned height)
 {
        VectorDestinationManager dest;
 
@@ -333,20 +333,18 @@ void VideoStream::schedule_original_frame(steady_clock::time_point local_pts,
                                           QueueSpotHolder &&queue_spot_holder,
                                           FrameOnDisk frame, const string &subtitle)
 {
-       fprintf(stderr, "output_pts=%ld  original      input_pts=%ld\n", output_pts, frame.pts);
+       fprintf(stderr, "output_pts=%" PRId64 "  original      input_pts=%" PRId64 "\n", output_pts, frame.pts);
 
-       // Preload the file from disk, so that the encoder thread does not get stalled.
-       // TODO: Consider sending it through the queue instead.
-       (void)frame_reader.read_frame(frame);
+       // TODO: Write audio if at the right speed.
 
        QueuedFrame qf;
        qf.local_pts = local_pts;
        qf.type = QueuedFrame::ORIGINAL;
        qf.output_pts = output_pts;
-       qf.frame1 = frame;
        qf.display_func = move(display_func);
        qf.queue_spot_holder = move(queue_spot_holder);
        qf.subtitle = subtitle;
+       qf.encoded_jpeg.reset(new string(frame_reader.read_frame(frame, /*read_audio=*/false).video));
 
        lock_guard<mutex> lock(queue_lock);
        frame_queue.push_back(move(qf));
@@ -359,7 +357,7 @@ void VideoStream::schedule_faded_frame(steady_clock::time_point local_pts, int64
                                        FrameOnDisk frame1_spec, FrameOnDisk frame2_spec,
                                        float fade_alpha, const string &subtitle)
 {
-       fprintf(stderr, "output_pts=%ld  faded         input_pts=%ld,%ld  fade_alpha=%.2f\n", output_pts, frame1_spec.pts, frame2_spec.pts, fade_alpha);
+       fprintf(stderr, "output_pts=%" PRId64 "  faded         input_pts=%" PRId64 ",%" PRId64 "  fade_alpha=%.2f\n", output_pts, frame1_spec.pts, frame2_spec.pts, fade_alpha);
 
        // Get the temporary OpenGL resources we need for doing the fade.
        // (We share these with interpolated frames, which is slightly
@@ -429,9 +427,9 @@ void VideoStream::schedule_interpolated_frame(steady_clock::time_point local_pts
                                               float alpha, FrameOnDisk secondary_frame, float fade_alpha, const string &subtitle)
 {
        if (secondary_frame.pts != -1) {
-               fprintf(stderr, "output_pts=%ld  interpolated  input_pts1=%ld input_pts2=%ld alpha=%.3f  secondary_pts=%ld  fade_alpha=%.2f\n", output_pts, frame1.pts, frame2.pts, alpha, secondary_frame.pts, fade_alpha);
+               fprintf(stderr, "output_pts=%" PRId64 "  interpolated  input_pts1=%" PRId64 " input_pts2=%" PRId64 " alpha=%.3f  secondary_pts=%" PRId64 "  fade_alpha=%.2f\n", output_pts, frame1.pts, frame2.pts, alpha, secondary_frame.pts, fade_alpha);
        } else {
-               fprintf(stderr, "output_pts=%ld  interpolated  input_pts1=%ld input_pts2=%ld alpha=%.3f\n", output_pts, frame1.pts, frame2.pts, alpha);
+               fprintf(stderr, "output_pts=%" PRId64 "  interpolated  input_pts1=%" PRId64 " input_pts2=%" PRId64 " alpha=%.3f\n", output_pts, frame1.pts, frame2.pts, alpha);
        }
 
        // Get the temporary OpenGL resources we need for doing the interpolation.
@@ -604,7 +602,7 @@ void VideoStream::encode_thread_func()
        bool ok = make_current(context, surface);
        if (!ok) {
                fprintf(stderr, "Video stream couldn't get an OpenGL context\n");
-               exit(1);
+               abort();
        }
 
        while (!should_quit) {
@@ -655,7 +653,7 @@ void VideoStream::encode_thread_func()
 
                if (qf.type == QueuedFrame::ORIGINAL) {
                        // Send the JPEG frame on, unchanged.
-                       string jpeg = frame_reader.read_frame(qf.frame1);
+                       string jpeg = move(*qf.encoded_jpeg);
                        AVPacket pkt;
                        av_init_packet(&pkt);
                        pkt.stream_index = 0;
@@ -663,15 +661,14 @@ void VideoStream::encode_thread_func()
                        pkt.size = jpeg.size();
                        pkt.flags = AV_PKT_FLAG_KEY;
                        mux->add_packet(pkt, qf.output_pts, qf.output_pts);
-
-                       last_frame.assign(&jpeg[0], &jpeg[0] + jpeg.size());
+                       last_frame = move(jpeg);
                } else if (qf.type == QueuedFrame::FADED) {
                        glClientWaitSync(qf.fence.get(), /*flags=*/0, GL_TIMEOUT_IGNORED);
 
                        shared_ptr<Frame> frame = frame_from_pbo(qf.resources->pbo_contents, global_flags.width, global_flags.height);
 
                        // Now JPEG encode it, and send it on to the stream.
-                       vector<uint8_t> jpeg = encode_jpeg(frame->y.get(), frame->cb.get(), frame->cr.get(), global_flags.width, global_flags.height);
+                       string jpeg = encode_jpeg(frame->y.get(), frame->cb.get(), frame->cr.get(), global_flags.width, global_flags.height);
 
                        AVPacket pkt;
                        av_init_packet(&pkt);
@@ -691,7 +688,7 @@ void VideoStream::encode_thread_func()
                        }
 
                        // Now JPEG encode it, and send it on to the stream.
-                       vector<uint8_t> jpeg = encode_jpeg(frame->y.get(), frame->cb.get(), frame->cr.get(), global_flags.width, global_flags.height);
+                       string jpeg = encode_jpeg(frame->y.get(), frame->cb.get(), frame->cr.get(), global_flags.width, global_flags.height);
                        if (qf.flow_tex != 0) {
                                compute_flow->release_texture(qf.flow_tex);
                        }