#include "export.h"
#include "flags.h"
#include "frame_on_disk.h"
+#include "player.h"
#include "shared/ffmpeg_raii.h"
#include "shared/timebase.h"
#include <QMessageBox>
#include <QProgressDialog>
+#include <future>
#include <vector>
#include <unistd.h>
frames_written += buffered_jpegs.size();
progress.setValue(frames_written);
}
+
+void export_interpolated_clip(const string &filename, const Clip &clip)
+{
+ AVFormatContext *avctx = nullptr;
+ avformat_alloc_output_context2(&avctx, NULL, NULL, filename.c_str());
+ if (avctx == nullptr) {
+ QMessageBox msgbox;
+ msgbox.setText("Could not allocate FFmpeg context");
+ msgbox.exec();
+ return;
+ }
+ AVFormatContextWithCloser closer(avctx);
+
+ int ret = avio_open(&avctx->pb, filename.c_str(), AVIO_FLAG_WRITE);
+ if (ret < 0) {
+ QMessageBox msgbox;
+ msgbox.setText(QString::fromStdString("Could not open output file '" + filename + "'"));
+ msgbox.exec();
+ return;
+ }
+
+ QProgressDialog progress(QString::fromStdString("Exporting to " + filename + "..."), "Abort", 0, 1);
+ progress.setWindowTitle("Futatabi");
+ progress.setWindowModality(Qt::WindowModal);
+ progress.setMinimumDuration(1000);
+ progress.setMaximum(100000);
+ progress.setValue(0);
+
+ promise<void> done_promise;
+ future<void> done = done_promise.get_future();
+ std::atomic<double> current_value{0.0};
+
+ unique_ptr<Player> player(new Player(/*destination=*/nullptr, Player::FILE_STREAM_OUTPUT, closer.release()));
+ player->set_done_callback([&done_promise] {
+ done_promise.set_value();
+ });
+ player->set_progress_callback([¤t_value] (const std::map<size_t, double> &player_progress) {
+ assert(player_progress.size() == 1);
+ current_value = player_progress.begin()->second;
+ });
+ player->play_clip(clip, /*clip_idx=*/0, clip.stream_idx);
+ while (done.wait_for(std::chrono::milliseconds(100)) != future_status::ready && !progress.wasCanceled()) {
+ progress.setValue(lrint(100000.0 * current_value));
+ }
+ if (progress.wasCanceled()) {
+ unlink(filename.c_str());
+ player.reset();
+ return;
+ }
+}
#include <string>
void export_multitrack_clip(const std::string &filename, const Clip &clip);
+void export_interpolated_clip(const std::string &filename, const Clip &clip);
#endif
// The menus.
connect(ui->exit_action, &QAction::triggered, this, &MainWindow::exit_triggered);
connect(ui->export_cliplist_clip_multitrack_action, &QAction::triggered, this, &MainWindow::export_cliplist_clip_multitrack_triggered);
+ connect(ui->export_playlist_clip_interpolated_action, &QAction::triggered, this, &MainWindow::export_playlist_clip_interpolated_triggered);
connect(ui->manual_action, &QAction::triggered, this, &MainWindow::manual_triggered);
connect(ui->about_action, &QAction::triggered, this, &MainWindow::about_triggered);
this, &MainWindow::playlist_selection_changed);
playlist_selection_changed(); // First time set-up.
- preview_player.reset(new Player(ui->preview_display, /*also_output_to_stream=*/false));
- live_player.reset(new Player(ui->live_display, /*also_output_to_stream=*/true));
+ preview_player.reset(new Player(ui->preview_display, Player::NO_STREAM_OUTPUT));
+ live_player.reset(new Player(ui->live_display, Player::HTTPD_STREAM_OUTPUT));
live_player->set_done_callback([this]{
post_to_main_thread([this]{
live_player_clip_done();
export_multitrack_clip(filename.toStdString(), clip);
}
+void MainWindow::export_playlist_clip_interpolated_triggered()
+{
+ QItemSelectionModel *selected = ui->playlist->selectionModel();
+ if (!selected->hasSelection()) {
+ QMessageBox msgbox;
+ msgbox.setText("No clip selected in the playlist. Select one and try exporting again.");
+ msgbox.exec();
+ return;
+ }
+
+ QModelIndex index = selected->currentIndex();
+ Clip clip = *playlist_clips->clip(index.row());
+ QString filename = QFileDialog::getSaveFileName(this,
+ "Export interpolated clip", QString(), tr("Matroska video files (*.mkv)"));
+ if (filename.isNull()) {
+ // Cancel.
+ return;
+ }
+ if (!filename.endsWith(".mkv")) {
+ filename += ".mkv";
+ }
+ export_interpolated_clip(filename.toStdString(), clip);
+}
+
void MainWindow::manual_triggered()
{
if (!QDesktopServices::openUrl(QUrl("https://nageru.sesse.net/doc/"))) {
void report_disk_space(off_t free_bytes, double estimated_seconds_left);
void exit_triggered();
void export_cliplist_clip_multitrack_triggered();
+ void export_playlist_clip_interpolated_triggered();
void manual_triggered();
void about_triggered();
<string>&Export</string>
</property>
<addaction name="export_cliplist_clip_multitrack_action"/>
+ <addaction name="export_playlist_clip_interpolated_action"/>
</widget>
<addaction name="menu_Export"/>
<addaction name="exit_action"/>
<string>Selected clip list clip as raw &multitrackā¦</string>
</property>
</action>
+ <action name="export_playlist_clip_interpolated_action">
+ <property name="text">
+ <string>Selected playlist clip as &interpolated single trackā¦</string>
+ </property>
+ </action>
</widget>
<customwidgets>
<customwidget>
extern HTTPD *global_httpd;
-void Player::thread_func(bool also_output_to_stream)
+void Player::thread_func(Player::StreamOutput stream_output, AVFormatContext *file_avctx)
{
pthread_setname_np(pthread_self(), "Player");
check_error();
// Create the VideoStream object, now that we have an OpenGL context.
- if (also_output_to_stream) {
- video_stream.reset(new VideoStream);
+ if (stream_output != NO_STREAM_OUTPUT) {
+ video_stream.reset(new VideoStream(file_avctx));
video_stream->start();
}
}
steady_clock::duration time_behind = steady_clock::now() - next_frame_start;
- if (time_behind >= milliseconds(200)) {
+ if (stream_output != FILE_STREAM_OUTPUT && time_behind >= milliseconds(200)) {
fprintf(stderr, "WARNING: %ld ms behind, dropping a frame (no matter the type).\n",
lrint(1e3 * duration<double>(time_behind).count()));
continue;
if (frame_lower.pts == frame_upper.pts || global_flags.interpolation_quality == 0) {
auto display_func = [this, primary_stream_idx, frame_lower, secondary_frame, fade_alpha]{
- destination->setFrame(primary_stream_idx, frame_lower, secondary_frame, fade_alpha);
+ if (destination != nullptr) {
+ destination->setFrame(primary_stream_idx, frame_lower, secondary_frame, fade_alpha);
+ }
};
if (video_stream == nullptr) {
display_func();
double snap_pts_as_frameno = (snap_frame.pts - in_pts_origin) * output_framerate / TIMEBASE / speed;
if (fabs(snap_pts_as_frameno - frameno) < 0.01) {
auto display_func = [this, primary_stream_idx, snap_frame, secondary_frame, fade_alpha]{
- destination->setFrame(primary_stream_idx, snap_frame, secondary_frame, fade_alpha);
+ if (destination != nullptr) {
+ destination->setFrame(primary_stream_idx, snap_frame, secondary_frame, fade_alpha);
+ }
};
if (video_stream == nullptr) {
display_func();
continue;
}
- if (time_behind >= milliseconds(100)) {
+ if (stream_output != FILE_STREAM_OUTPUT && time_behind >= milliseconds(100)) {
fprintf(stderr, "WARNING: %ld ms behind, dropping an interpolated frame.\n",
lrint(1e3 * duration<double>(time_behind).count()));
continue;
if (video_stream == nullptr) {
// Previews don't do any interpolation.
assert(secondary_stream_idx == -1);
- destination->setFrame(primary_stream_idx, frame_lower);
+ if (destination != nullptr) {
+ destination->setFrame(primary_stream_idx, frame_lower);
+ }
} else {
auto display_func = [this](shared_ptr<Frame> frame) {
- destination->setFrame(frame);
+ if (destination != nullptr) {
+ destination->setFrame(frame);
+ }
};
video_stream->schedule_interpolated_frame(
next_frame_start, pts, display_func, QueueSpotHolder(this),
return true;
}
-Player::Player(JPEGFrameView *destination, bool also_output_to_stream)
+Player::Player(JPEGFrameView *destination, Player::StreamOutput stream_output, AVFormatContext *file_avctx)
: destination(destination)
{
- player_thread = thread(&Player::thread_func, this, also_output_to_stream);
+ player_thread = thread(&Player::thread_func, this, stream_output, file_avctx);
}
Player::~Player()
#include "queue_spot_holder.h"
extern "C" {
+#include <libavformat/avformat.h>
#include <libavformat/avio.h>
}
class Player : public QueueInterface {
public:
- Player(JPEGFrameView *destination, bool also_output_to_stream);
+ enum StreamOutput {
+ NO_STREAM_OUTPUT,
+ HTTPD_STREAM_OUTPUT, // Output to global_httpd.
+ FILE_STREAM_OUTPUT // Output to file_avctx.
+ };
+ Player(JPEGFrameView *destination, StreamOutput stream_output, AVFormatContext *file_avctx = nullptr);
~Player();
void play_clip(const Clip &clip, size_t clip_idx, unsigned stream_idx);
void release_queue_spot() override;
private:
- void thread_func(bool also_output_to_stream);
+ void thread_func(StreamOutput stream_output, AVFormatContext *file_avctx);
void open_output_stream();
static int write_packet2_thunk(void *opaque, uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time);
int write_packet2(uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time);
return move(dest.dest);
}
-VideoStream::VideoStream()
+VideoStream::VideoStream(AVFormatContext *file_avctx)
+ : avctx(file_avctx), output_fast_forward(file_avctx != nullptr)
{
ycbcr_converter.reset(new YCbCrConverter(YCbCrConverter::OUTPUT_TO_DUAL_YCBCR, /*resource_pool=*/nullptr));
ycbcr_semiplanar_converter.reset(new YCbCrConverter(YCbCrConverter::OUTPUT_TO_SEMIPLANAR, /*resource_pool=*/nullptr));
void VideoStream::start()
{
- AVFormatContext *avctx = avformat_alloc_context();
+ if (avctx == nullptr) {
+ avctx = avformat_alloc_context();
- // We use Matroska, because it's pretty much the only mux where FFmpeg
- // allows writing chroma location to override JFIF's default center placement.
- // (Note that at the time of writing, however, FFmpeg does not correctly
- // _read_ this information!)
- avctx->oformat = av_guess_format("matroska", nullptr, nullptr);
+ // We use Matroska, because it's pretty much the only mux where FFmpeg
+ // allows writing chroma location to override JFIF's default center placement.
+ // (Note that at the time of writing, however, FFmpeg does not correctly
+ // _read_ this information!)
+ avctx->oformat = av_guess_format("matroska", nullptr, nullptr);
- uint8_t *buf = (uint8_t *)av_malloc(MUX_BUFFER_SIZE);
- avctx->pb = avio_alloc_context(buf, MUX_BUFFER_SIZE, 1, this, nullptr, nullptr, nullptr);
- avctx->pb->write_data_type = &VideoStream::write_packet2_thunk;
- avctx->pb->ignore_boundary_point = 1;
+ uint8_t *buf = (uint8_t *)av_malloc(MUX_BUFFER_SIZE);
+ avctx->pb = avio_alloc_context(buf, MUX_BUFFER_SIZE, 1, this, nullptr, nullptr, nullptr);
+ avctx->pb->write_data_type = &VideoStream::write_packet2_thunk;
+ avctx->pb->ignore_boundary_point = 1;
- Mux::Codec video_codec = Mux::CODEC_MJPEG;
-
- avctx->flags = AVFMT_FLAG_CUSTOM_IO;
-
- string video_extradata;
+ avctx->flags = AVFMT_FLAG_CUSTOM_IO;
+ }
size_t width = global_flags.width, height = global_flags.height; // Doesn't matter for MJPEG.
- stream_mux.reset(new Mux(avctx, width, height, video_codec, video_extradata, /*audio_codec_parameters=*/nullptr,
+ mux.reset(new Mux(avctx, width, height, Mux::CODEC_MJPEG, /*video_extradata=*/"", /*audio_codec_parameters=*/nullptr,
AVCOL_SPC_BT709, Mux::WITHOUT_AUDIO,
COARSE_TIMEBASE, /*write_callback=*/nullptr, Mux::WRITE_FOREGROUND, {}));
// Now sleep until the frame is supposed to start (the usual case),
// _or_ clear_queue() happened.
- bool aborted = queue_changed.wait_until(lock, frame_start, [this, frame_start]{
- return frame_queue.empty() || frame_queue.front().local_pts != frame_start;
- });
+ bool aborted;
+ if (output_fast_forward) {
+ aborted = frame_queue.empty() || frame_queue.front().local_pts != frame_start;
+ } else {
+ aborted = queue_changed.wait_until(lock, frame_start, [this, frame_start]{
+ return frame_queue.empty() || frame_queue.front().local_pts != frame_start;
+ });
+ }
if (aborted) {
// clear_queue() happened, so don't play this frame after all.
continue;
pkt.stream_index = 0;
pkt.data = (uint8_t *)jpeg.data();
pkt.size = jpeg.size();
- stream_mux->add_packet(pkt, qf.output_pts, qf.output_pts);
+ mux->add_packet(pkt, qf.output_pts, qf.output_pts);
last_frame.assign(&jpeg[0], &jpeg[0] + jpeg.size());
} else if (qf.type == QueuedFrame::FADED) {
pkt.stream_index = 0;
pkt.data = (uint8_t *)jpeg.data();
pkt.size = jpeg.size();
- stream_mux->add_packet(pkt, qf.output_pts, qf.output_pts);
+ mux->add_packet(pkt, qf.output_pts, qf.output_pts);
last_frame = move(jpeg);
} else if (qf.type == QueuedFrame::INTERPOLATED || qf.type == QueuedFrame::FADED_INTERPOLATED) {
glClientWaitSync(qf.fence.get(), /*flags=*/0, GL_TIMEOUT_IGNORED);
pkt.stream_index = 0;
pkt.data = (uint8_t *)jpeg.data();
pkt.size = jpeg.size();
- stream_mux->add_packet(pkt, qf.output_pts, qf.output_pts);
+ mux->add_packet(pkt, qf.output_pts, qf.output_pts);
last_frame = move(jpeg);
} else if (qf.type == QueuedFrame::REFRESH) {
AVPacket pkt;
pkt.stream_index = 0;
pkt.data = (uint8_t *)last_frame.data();
pkt.size = last_frame.size();
- stream_mux->add_packet(pkt, qf.output_pts, qf.output_pts);
+ mux->add_packet(pkt, qf.output_pts, qf.output_pts);
} else {
assert(false);
}
#include <stdint.h>
extern "C" {
+#include <libavformat/avformat.h>
#include <libavformat/avio.h>
}
class VideoStream {
public:
- VideoStream();
+ VideoStream(AVFormatContext *file_avctx); // nullptr if output to stream.
~VideoStream();
void start();
void stop();
std::deque<QueuedFrame> frame_queue; // Under <queue_lock>.
std::condition_variable queue_changed;
- std::unique_ptr<Mux> stream_mux; // To HTTP.
- std::string stream_mux_header;
+ AVFormatContext *avctx;
+ std::unique_ptr<Mux> mux; // To HTTP, or to file.
+ std::string stream_mux_header; // Only used in HTTP.
bool seen_sync_markers = false;
+ bool output_fast_forward;
std::unique_ptr<YCbCrConverter> ycbcr_converter;
std::unique_ptr<YCbCrConverter> ycbcr_semiplanar_converter;