]> git.sesse.net Git - nageru/commitdiff
Support exporting interpolated singletrack video. Probably tickles leaks in Player...
authorSteinar H. Gunderson <sgunderson@bigfoot.com>
Thu, 13 Dec 2018 19:27:52 +0000 (20:27 +0100)
committerSteinar H. Gunderson <sgunderson@bigfoot.com>
Thu, 13 Dec 2018 19:27:52 +0000 (20:27 +0100)
futatabi/export.cpp
futatabi/export.h
futatabi/mainwindow.cpp
futatabi/mainwindow.h
futatabi/mainwindow.ui
futatabi/player.cpp
futatabi/player.h
futatabi/video_stream.cpp
futatabi/video_stream.h

index bed2643547a03335662bbda0f60dec36cd5fa981..3371404d15e0d7a176f62197821758739a593bdf 100644 (file)
@@ -3,12 +3,14 @@
 #include "export.h"
 #include "flags.h"
 #include "frame_on_disk.h"
+#include "player.h"
 #include "shared/ffmpeg_raii.h"
 #include "shared/timebase.h"
 
 #include <QMessageBox>
 #include <QProgressDialog>
 
+#include <future>
 #include <vector>
 
 #include <unistd.h>
@@ -194,3 +196,53 @@ void export_multitrack_clip(const string &filename, const Clip &clip)
        frames_written += buffered_jpegs.size();
        progress.setValue(frames_written);
 }
+
+void export_interpolated_clip(const string &filename, const Clip &clip)
+{
+       AVFormatContext *avctx = nullptr;
+       avformat_alloc_output_context2(&avctx, NULL, NULL, filename.c_str());
+       if (avctx == nullptr) {
+               QMessageBox msgbox;
+               msgbox.setText("Could not allocate FFmpeg context");
+               msgbox.exec();
+               return;
+       }
+       AVFormatContextWithCloser closer(avctx);
+
+       int ret = avio_open(&avctx->pb, filename.c_str(), AVIO_FLAG_WRITE);
+       if (ret < 0) {
+               QMessageBox msgbox;
+               msgbox.setText(QString::fromStdString("Could not open output file '" + filename + "'"));
+               msgbox.exec();
+               return;
+       }
+
+       QProgressDialog progress(QString::fromStdString("Exporting to " + filename + "..."), "Abort", 0, 1);
+       progress.setWindowTitle("Futatabi");
+       progress.setWindowModality(Qt::WindowModal);
+       progress.setMinimumDuration(1000);
+       progress.setMaximum(100000);
+       progress.setValue(0);
+
+       promise<void> done_promise;
+       future<void> done = done_promise.get_future();
+       std::atomic<double> current_value{0.0};
+
+       unique_ptr<Player> player(new Player(/*destination=*/nullptr, Player::FILE_STREAM_OUTPUT, closer.release()));
+       player->set_done_callback([&done_promise] {
+               done_promise.set_value();
+       });
+       player->set_progress_callback([&current_value] (const std::map<size_t, double> &player_progress) {
+               assert(player_progress.size() == 1);
+               current_value = player_progress.begin()->second;
+       });
+       player->play_clip(clip, /*clip_idx=*/0, clip.stream_idx);
+       while (done.wait_for(std::chrono::milliseconds(100)) != future_status::ready && !progress.wasCanceled()) {
+               progress.setValue(lrint(100000.0 * current_value));
+       }
+       if (progress.wasCanceled()) {
+               unlink(filename.c_str());
+               player.reset();
+               return;
+       }
+}
index 2349db73565fb2cb82818eb03d4e4ad9ba8e045b..934937c72017f8effd11c437a0792a65a2648c78 100644 (file)
@@ -4,5 +4,6 @@
 #include <string>
 
 void export_multitrack_clip(const std::string &filename, const Clip &clip);
+void export_interpolated_clip(const std::string &filename, const Clip &clip);
 
 #endif
index 6d0f7c9767bf8e3f0f2d47716f97a6b0ed2b5b3f..54efa1c1de221c107f3b57cbcc15a74418096fd5 100644 (file)
@@ -42,6 +42,7 @@ MainWindow::MainWindow()
        // The menus.
        connect(ui->exit_action, &QAction::triggered, this, &MainWindow::exit_triggered);
        connect(ui->export_cliplist_clip_multitrack_action, &QAction::triggered, this, &MainWindow::export_cliplist_clip_multitrack_triggered);
+       connect(ui->export_playlist_clip_interpolated_action, &QAction::triggered, this, &MainWindow::export_playlist_clip_interpolated_triggered);
        connect(ui->manual_action, &QAction::triggered, this, &MainWindow::manual_triggered);
        connect(ui->about_action, &QAction::triggered, this, &MainWindow::about_triggered);
 
@@ -129,8 +130,8 @@ MainWindow::MainWindow()
                this, &MainWindow::playlist_selection_changed);
        playlist_selection_changed();  // First time set-up.
 
-       preview_player.reset(new Player(ui->preview_display, /*also_output_to_stream=*/false));
-       live_player.reset(new Player(ui->live_display, /*also_output_to_stream=*/true));
+       preview_player.reset(new Player(ui->preview_display, Player::NO_STREAM_OUTPUT));
+       live_player.reset(new Player(ui->live_display, Player::HTTPD_STREAM_OUTPUT));
        live_player->set_done_callback([this]{
                post_to_main_thread([this]{
                        live_player_clip_done();
@@ -781,6 +782,30 @@ void MainWindow::export_cliplist_clip_multitrack_triggered()
        export_multitrack_clip(filename.toStdString(), clip);
 }
 
+void MainWindow::export_playlist_clip_interpolated_triggered()
+{
+       QItemSelectionModel *selected = ui->playlist->selectionModel();
+       if (!selected->hasSelection()) {
+               QMessageBox msgbox;
+               msgbox.setText("No clip selected in the playlist. Select one and try exporting again.");
+               msgbox.exec();
+               return;
+       }
+
+       QModelIndex index = selected->currentIndex();
+       Clip clip = *playlist_clips->clip(index.row());
+       QString filename = QFileDialog::getSaveFileName(this,
+               "Export interpolated clip", QString(), tr("Matroska video files (*.mkv)"));
+       if (filename.isNull()) {
+               // Cancel.
+               return;
+       }
+       if (!filename.endsWith(".mkv")) {
+               filename += ".mkv";
+       }
+       export_interpolated_clip(filename.toStdString(), clip);
+}
+
 void MainWindow::manual_triggered()
 {
        if (!QDesktopServices::openUrl(QUrl("https://nageru.sesse.net/doc/"))) {
index a0c938d03574c06aa29f855759645135f827109f..e21b3f38c181ef994f5120cffe4fd12b83a75812 100644 (file)
@@ -103,6 +103,7 @@ private:
        void report_disk_space(off_t free_bytes, double estimated_seconds_left);
        void exit_triggered();
        void export_cliplist_clip_multitrack_triggered();
+       void export_playlist_clip_interpolated_triggered();
        void manual_triggered();
        void about_triggered();
 
index 0af53fe67f907db0a2593ea69c472c60106c351b..81167d8e8cf6ffcdf8d9ee9676eb7d6b41df2dee 100644 (file)
       <string>&amp;Export</string>
      </property>
      <addaction name="export_cliplist_clip_multitrack_action"/>
+     <addaction name="export_playlist_clip_interpolated_action"/>
     </widget>
     <addaction name="menu_Export"/>
     <addaction name="exit_action"/>
     <string>Selected clip list clip as raw &amp;multitrackā€¦</string>
    </property>
   </action>
+  <action name="export_playlist_clip_interpolated_action">
+   <property name="text">
+    <string>Selected playlist clip as &amp;interpolated single trackā€¦</string>
+   </property>
+  </action>
  </widget>
  <customwidgets>
   <customwidget>
index a5d093ade2bba23422d092b382233f24f8cb9539..80e77d1bda6fccf2faf2bb276e740cff05b20635 100644 (file)
@@ -26,7 +26,7 @@ using namespace std::chrono;
 
 extern HTTPD *global_httpd;
 
-void Player::thread_func(bool also_output_to_stream)
+void Player::thread_func(Player::StreamOutput stream_output, AVFormatContext *file_avctx)
 {
        pthread_setname_np(pthread_self(), "Player");
 
@@ -40,8 +40,8 @@ void Player::thread_func(bool also_output_to_stream)
        check_error();
 
        // Create the VideoStream object, now that we have an OpenGL context.
-       if (also_output_to_stream) {
-               video_stream.reset(new VideoStream);
+       if (stream_output != NO_STREAM_OUTPUT) {
+               video_stream.reset(new VideoStream(file_avctx));
                video_stream->start();
        }
 
@@ -126,7 +126,7 @@ got_clip:
                        }
 
                        steady_clock::duration time_behind = steady_clock::now() - next_frame_start;
-                       if (time_behind >= milliseconds(200)) {
+                       if (stream_output != FILE_STREAM_OUTPUT && time_behind >= milliseconds(200)) {
                                fprintf(stderr, "WARNING: %ld ms behind, dropping a frame (no matter the type).\n",
                                        lrint(1e3 * duration<double>(time_behind).count()));
                                continue;
@@ -238,7 +238,9 @@ got_clip:
 
                        if (frame_lower.pts == frame_upper.pts || global_flags.interpolation_quality == 0) {
                                auto display_func = [this, primary_stream_idx, frame_lower, secondary_frame, fade_alpha]{
-                                       destination->setFrame(primary_stream_idx, frame_lower, secondary_frame, fade_alpha);
+                                       if (destination != nullptr) {
+                                               destination->setFrame(primary_stream_idx, frame_lower, secondary_frame, fade_alpha);
+                                       }
                                };
                                if (video_stream == nullptr) {
                                        display_func();
@@ -265,7 +267,9 @@ got_clip:
                                double snap_pts_as_frameno = (snap_frame.pts - in_pts_origin) * output_framerate / TIMEBASE / speed;
                                if (fabs(snap_pts_as_frameno - frameno) < 0.01) {
                                        auto display_func = [this, primary_stream_idx, snap_frame, secondary_frame, fade_alpha]{
-                                               destination->setFrame(primary_stream_idx, snap_frame, secondary_frame, fade_alpha);
+                                               if (destination != nullptr) {
+                                                       destination->setFrame(primary_stream_idx, snap_frame, secondary_frame, fade_alpha);
+                                               }
                                        };
                                        if (video_stream == nullptr) {
                                                display_func();
@@ -290,7 +294,7 @@ got_clip:
                                continue;
                        }
 
-                       if (time_behind >= milliseconds(100)) {
+                       if (stream_output != FILE_STREAM_OUTPUT && time_behind >= milliseconds(100)) {
                                fprintf(stderr, "WARNING: %ld ms behind, dropping an interpolated frame.\n",
                                        lrint(1e3 * duration<double>(time_behind).count()));
                                continue;
@@ -301,10 +305,14 @@ got_clip:
                        if (video_stream == nullptr) {
                                // Previews don't do any interpolation.
                                assert(secondary_stream_idx == -1);
-                               destination->setFrame(primary_stream_idx, frame_lower);
+                               if (destination != nullptr) {
+                                       destination->setFrame(primary_stream_idx, frame_lower);
+                               }
                        } else {
                                auto display_func = [this](shared_ptr<Frame> frame) {
-                                       destination->setFrame(frame);
+                                       if (destination != nullptr) {
+                                               destination->setFrame(frame);
+                                       }
                                };
                                video_stream->schedule_interpolated_frame(
                                        next_frame_start, pts, display_func, QueueSpotHolder(this),
@@ -377,10 +385,10 @@ bool Player::find_surrounding_frames(int64_t pts, int stream_idx, FrameOnDisk *f
        return true;
 }
 
-Player::Player(JPEGFrameView *destination, bool also_output_to_stream)
+Player::Player(JPEGFrameView *destination, Player::StreamOutput stream_output, AVFormatContext *file_avctx)
        : destination(destination)
 {
-       player_thread = thread(&Player::thread_func, this, also_output_to_stream);
+       player_thread = thread(&Player::thread_func, this, stream_output, file_avctx);
 }
 
 Player::~Player()
index b57bada647f2cb48439265cf8e534a6179c4d8f3..03da29e364b80db3c2a503fc3e3d98b25ee90a84 100644 (file)
@@ -6,6 +6,7 @@
 #include "queue_spot_holder.h"
 
 extern "C" {
+#include <libavformat/avformat.h>
 #include <libavformat/avio.h>
 }
 
@@ -21,7 +22,12 @@ class QSurfaceFormat;
 
 class Player : public QueueInterface {
 public:
-       Player(JPEGFrameView *destination, bool also_output_to_stream);
+       enum StreamOutput {
+               NO_STREAM_OUTPUT,
+               HTTPD_STREAM_OUTPUT,  // Output to global_httpd.
+               FILE_STREAM_OUTPUT    // Output to file_avctx.
+       };
+       Player(JPEGFrameView *destination, StreamOutput stream_output, AVFormatContext *file_avctx = nullptr);
        ~Player();
 
        void play_clip(const Clip &clip, size_t clip_idx, unsigned stream_idx);
@@ -48,7 +54,7 @@ public:
        void release_queue_spot() override;
 
 private:
-       void thread_func(bool also_output_to_stream);
+       void thread_func(StreamOutput stream_output, AVFormatContext *file_avctx);
        void open_output_stream();
        static int write_packet2_thunk(void *opaque, uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time);
        int write_packet2(uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time);
index 6749f0d8df7887a0eeb9cc57b8c50fa0289d41a4..ef56ee4a96711a703b900b3ab60121298803160d 100644 (file)
@@ -129,7 +129,8 @@ vector<uint8_t> encode_jpeg(const uint8_t *y_data, const uint8_t *cb_data, const
        return move(dest.dest);
 }
 
-VideoStream::VideoStream()
+VideoStream::VideoStream(AVFormatContext *file_avctx)
+       : avctx(file_avctx), output_fast_forward(file_avctx != nullptr)
 {
        ycbcr_converter.reset(new YCbCrConverter(YCbCrConverter::OUTPUT_TO_DUAL_YCBCR, /*resource_pool=*/nullptr));
        ycbcr_semiplanar_converter.reset(new YCbCrConverter(YCbCrConverter::OUTPUT_TO_SEMIPLANAR, /*resource_pool=*/nullptr));
@@ -240,27 +241,25 @@ VideoStream::~VideoStream() {}
 
 void VideoStream::start()
 {
-       AVFormatContext *avctx = avformat_alloc_context();
+       if (avctx == nullptr) {
+               avctx = avformat_alloc_context();
 
-       // We use Matroska, because it's pretty much the only mux where FFmpeg
-       // allows writing chroma location to override JFIF's default center placement.
-       // (Note that at the time of writing, however, FFmpeg does not correctly
-       // _read_ this information!)
-       avctx->oformat = av_guess_format("matroska", nullptr, nullptr);
+               // We use Matroska, because it's pretty much the only mux where FFmpeg
+               // allows writing chroma location to override JFIF's default center placement.
+               // (Note that at the time of writing, however, FFmpeg does not correctly
+               // _read_ this information!)
+               avctx->oformat = av_guess_format("matroska", nullptr, nullptr);
 
-       uint8_t *buf = (uint8_t *)av_malloc(MUX_BUFFER_SIZE);
-       avctx->pb = avio_alloc_context(buf, MUX_BUFFER_SIZE, 1, this, nullptr, nullptr, nullptr);
-       avctx->pb->write_data_type = &VideoStream::write_packet2_thunk;
-       avctx->pb->ignore_boundary_point = 1;
+               uint8_t *buf = (uint8_t *)av_malloc(MUX_BUFFER_SIZE);
+               avctx->pb = avio_alloc_context(buf, MUX_BUFFER_SIZE, 1, this, nullptr, nullptr, nullptr);
+               avctx->pb->write_data_type = &VideoStream::write_packet2_thunk;
+               avctx->pb->ignore_boundary_point = 1;
 
-       Mux::Codec video_codec = Mux::CODEC_MJPEG;
-
-       avctx->flags = AVFMT_FLAG_CUSTOM_IO;
-
-       string video_extradata;
+               avctx->flags = AVFMT_FLAG_CUSTOM_IO;
+       }
 
        size_t width = global_flags.width, height = global_flags.height;  // Doesn't matter for MJPEG.
-       stream_mux.reset(new Mux(avctx, width, height, video_codec, video_extradata, /*audio_codec_parameters=*/nullptr,
+       mux.reset(new Mux(avctx, width, height, Mux::CODEC_MJPEG, /*video_extradata=*/"", /*audio_codec_parameters=*/nullptr,
                AVCOL_SPC_BT709, Mux::WITHOUT_AUDIO,
                COARSE_TIMEBASE, /*write_callback=*/nullptr, Mux::WRITE_FOREGROUND, {}));
 
@@ -567,9 +566,14 @@ void VideoStream::encode_thread_func()
 
                        // Now sleep until the frame is supposed to start (the usual case),
                        // _or_ clear_queue() happened.
-                       bool aborted = queue_changed.wait_until(lock, frame_start, [this, frame_start]{
-                               return frame_queue.empty() || frame_queue.front().local_pts != frame_start;
-                       });
+                       bool aborted;
+                       if (output_fast_forward) {
+                               aborted = frame_queue.empty() || frame_queue.front().local_pts != frame_start;
+                       } else {
+                               aborted = queue_changed.wait_until(lock, frame_start, [this, frame_start]{
+                                       return frame_queue.empty() || frame_queue.front().local_pts != frame_start;
+                               });
+                       }
                        if (aborted) {
                                // clear_queue() happened, so don't play this frame after all.
                                continue;
@@ -586,7 +590,7 @@ void VideoStream::encode_thread_func()
                        pkt.stream_index = 0;
                        pkt.data = (uint8_t *)jpeg.data();
                        pkt.size = jpeg.size();
-                       stream_mux->add_packet(pkt, qf.output_pts, qf.output_pts);
+                       mux->add_packet(pkt, qf.output_pts, qf.output_pts);
 
                        last_frame.assign(&jpeg[0], &jpeg[0] + jpeg.size());
                } else if (qf.type == QueuedFrame::FADED) {
@@ -602,7 +606,7 @@ void VideoStream::encode_thread_func()
                        pkt.stream_index = 0;
                        pkt.data = (uint8_t *)jpeg.data();
                        pkt.size = jpeg.size();
-                       stream_mux->add_packet(pkt, qf.output_pts, qf.output_pts);
+                       mux->add_packet(pkt, qf.output_pts, qf.output_pts);
                        last_frame = move(jpeg);
                } else if (qf.type == QueuedFrame::INTERPOLATED || qf.type == QueuedFrame::FADED_INTERPOLATED) {
                        glClientWaitSync(qf.fence.get(), /*flags=*/0, GL_TIMEOUT_IGNORED);
@@ -626,7 +630,7 @@ void VideoStream::encode_thread_func()
                        pkt.stream_index = 0;
                        pkt.data = (uint8_t *)jpeg.data();
                        pkt.size = jpeg.size();
-                       stream_mux->add_packet(pkt, qf.output_pts, qf.output_pts);
+                       mux->add_packet(pkt, qf.output_pts, qf.output_pts);
                        last_frame = move(jpeg);
                } else if (qf.type == QueuedFrame::REFRESH) {
                        AVPacket pkt;
@@ -634,7 +638,7 @@ void VideoStream::encode_thread_func()
                        pkt.stream_index = 0;
                        pkt.data = (uint8_t *)last_frame.data();
                        pkt.size = last_frame.size();
-                       stream_mux->add_packet(pkt, qf.output_pts, qf.output_pts);
+                       mux->add_packet(pkt, qf.output_pts, qf.output_pts);
                } else {
                        assert(false);
                }
index d4cb18eb7e91e758fc53d6e99adb90e7a2741e45..d522ab1e8f5a98f9494004599b9c1aa2a74a76c0 100644 (file)
@@ -5,6 +5,7 @@
 #include <stdint.h>
 
 extern "C" {
+#include <libavformat/avformat.h>
 #include <libavformat/avio.h>
 }
 
@@ -35,7 +36,7 @@ class YCbCrConverter;
 
 class VideoStream {
 public:
-       VideoStream();
+       VideoStream(AVFormatContext *file_avctx);  // nullptr if output to stream.
        ~VideoStream();
        void start();
        void stop();
@@ -131,9 +132,11 @@ private:
        std::deque<QueuedFrame> frame_queue;  // Under <queue_lock>.
        std::condition_variable queue_changed;
 
-       std::unique_ptr<Mux> stream_mux;  // To HTTP.
-       std::string stream_mux_header;
+       AVFormatContext *avctx;
+       std::unique_ptr<Mux> mux;  // To HTTP, or to file.
+       std::string stream_mux_header;  // Only used in HTTP.
        bool seen_sync_markers = false;
+       bool output_fast_forward;
 
        std::unique_ptr<YCbCrConverter> ycbcr_converter;
        std::unique_ptr<YCbCrConverter> ycbcr_semiplanar_converter;