]> git.sesse.net Git - nageru/blob - futatabi/export.cpp
Support exporting interpolated singletrack video. Probably tickles leaks in Player...
[nageru] / futatabi / export.cpp
1 #include "clip_list.h"
2 #include "defs.h"
3 #include "export.h"
4 #include "flags.h"
5 #include "frame_on_disk.h"
6 #include "player.h"
7 #include "shared/ffmpeg_raii.h"
8 #include "shared/timebase.h"
9
10 #include <QMessageBox>
11 #include <QProgressDialog>
12
13 #include <future>
14 #include <vector>
15
16 #include <unistd.h>
17
18 extern "C" {
19 #include <libavformat/avformat.h>
20 }
21
22 using namespace std;
23
24 namespace {
25
26 // Only used in export_cliplist_clip_multitrack_triggered.
27 struct BufferedJPEG {
28         int64_t pts;
29         unsigned stream_idx;
30         string jpeg;
31 };
32
33 bool write_buffered_jpegs(AVFormatContext *avctx, const vector<BufferedJPEG> &buffered_jpegs)
34 {
35         for (const BufferedJPEG &jpeg : buffered_jpegs) {
36                 AVPacket pkt;
37                 av_init_packet(&pkt);
38                 pkt.stream_index = jpeg.stream_idx;
39                 pkt.data = (uint8_t *)jpeg.jpeg.data();
40                 pkt.size = jpeg.jpeg.size();
41                 pkt.pts = jpeg.pts;
42                 pkt.dts = jpeg.pts;
43                 pkt.flags = AV_PKT_FLAG_KEY;
44
45                 if (av_write_frame(avctx, &pkt) < 0) {
46                         return false;
47                 }
48         }
49         return true;
50 }
51
52 }  // namespace
53
54 void export_multitrack_clip(const string &filename, const Clip &clip)
55 {
56         AVFormatContext *avctx = nullptr;
57         avformat_alloc_output_context2(&avctx, NULL, NULL, filename.c_str());
58         if (avctx == nullptr) {
59                 QMessageBox msgbox;
60                 msgbox.setText("Could not allocate FFmpeg context");
61                 msgbox.exec();
62                 return;
63         }
64         AVFormatContextWithCloser closer(avctx);
65
66         int ret = avio_open(&avctx->pb, filename.c_str(), AVIO_FLAG_WRITE);
67         if (ret < 0) {
68                 QMessageBox msgbox;
69                 msgbox.setText(QString::fromStdString("Could not open output file '" + filename + "'"));
70                 msgbox.exec();
71                 return;
72         }
73
74         // Find the first frame for each stream.
75         size_t num_frames = 0;
76         size_t num_streams_with_frames_left = 0;
77         size_t last_stream_idx = 0;
78         FrameReader readers[MAX_STREAMS];
79         bool has_frames[MAX_STREAMS];
80         size_t first_frame_idx[MAX_STREAMS], last_frame_idx[MAX_STREAMS];  // Inclusive, exclusive.
81         {
82                 lock_guard<mutex> lock(frame_mu);
83                 for (size_t stream_idx = 0; stream_idx < MAX_STREAMS; ++stream_idx) {
84                         // Find the first frame such that frame.pts <= pts_in.
85                         auto it = find_first_frame_at_or_after(frames[stream_idx], clip.pts_in);
86                         first_frame_idx[stream_idx] = distance(frames[stream_idx].begin(), it);
87                         has_frames[stream_idx] = (it != frames[stream_idx].end());
88
89                         // Find the first frame such that frame.pts >= pts_out.
90                         it = find_first_frame_at_or_after(frames[stream_idx], clip.pts_out);
91                         last_frame_idx[stream_idx] = distance(frames[stream_idx].begin(), it);
92                         num_frames += last_frame_idx[stream_idx] - first_frame_idx[stream_idx];
93
94                         if (has_frames[stream_idx]) {
95                                 ++num_streams_with_frames_left;
96                                 last_stream_idx = stream_idx;
97                         }
98                 }
99         }
100
101         // Create the streams. Note that some of them could be without frames
102         // (we try to maintain the stream indexes in the export).
103         vector<AVStream *> video_streams; 
104         for (unsigned stream_idx = 0; stream_idx <= last_stream_idx; ++stream_idx) {
105                 AVStream *avstream_video = avformat_new_stream(avctx, nullptr);
106                 if (avstream_video == nullptr) {
107                         fprintf(stderr, "avformat_new_stream() failed\n");
108                         exit(1);
109                 }
110                 avstream_video->time_base = AVRational{1, TIMEBASE};
111                 avstream_video->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
112                 avstream_video->codecpar->codec_id = AV_CODEC_ID_MJPEG;
113                 avstream_video->codecpar->width = global_flags.width;  // Might be wrong, but doesn't matter all that much.
114                 avstream_video->codecpar->height = global_flags.height;
115
116                 // TODO: Deduplicate this against Mux.
117                 avstream_video->codecpar->color_primaries = AVCOL_PRI_BT709;  // RGB colorspace (inout_format.color_space).
118                 avstream_video->codecpar->color_trc = AVCOL_TRC_IEC61966_2_1;  // Gamma curve (inout_format.gamma_curve).
119                 // YUV colorspace (output_ycbcr_format.luma_coefficients).
120                 avstream_video->codecpar->color_space = AVCOL_SPC_BT709;
121                 avstream_video->codecpar->color_range = AVCOL_RANGE_MPEG;  // Full vs. limited range (output_ycbcr_format.full_range).
122                 avstream_video->codecpar->chroma_location = AVCHROMA_LOC_LEFT;  // Chroma sample location. See chroma_offset_0[] in Mixer::subsample_chroma().
123                 avstream_video->codecpar->field_order = AV_FIELD_PROGRESSIVE;
124                 video_streams.push_back(avstream_video);
125         }
126
127         if (avformat_write_header(avctx, nullptr) < 0) {
128                 QMessageBox msgbox;
129                 msgbox.setText("Writing header failed");
130                 msgbox.exec();
131                 unlink(filename.c_str());
132                 return;
133         }
134
135         QProgressDialog progress(QString::fromStdString("Exporting to " + filename + "..."), "Abort", 0, 1);
136         progress.setWindowTitle("Futatabi");
137         progress.setWindowModality(Qt::WindowModal);
138         progress.setMinimumDuration(1000);
139         progress.setMaximum(num_frames);
140         progress.setValue(0);
141
142         // We buffer up to 1000 frames at a time, in a hope that we can reduce
143         // the amount of seeking needed on rotational media.
144         vector<BufferedJPEG> buffered_jpegs;
145         size_t frames_written = 0;
146         while (num_streams_with_frames_left > 0) {
147                 // Find the stream with the lowest frame. Lower stream indexes win.
148                 FrameOnDisk first_frame;
149                 unsigned first_frame_stream_idx = 0;
150                 {
151                         lock_guard<mutex> lock(frame_mu);
152                         for (size_t stream_idx = 0; stream_idx < MAX_STREAMS; ++stream_idx) {
153                                 if (!has_frames[stream_idx]) {
154                                         continue;
155                                 }
156                                 if (first_frame.pts == -1 || frames[stream_idx][first_frame_idx[stream_idx]].pts < first_frame.pts) {
157                                         first_frame = frames[stream_idx][first_frame_idx[stream_idx]];
158                                         first_frame_stream_idx = stream_idx;
159                                 }
160                         }
161                         ++first_frame_idx[first_frame_stream_idx];
162                         if (first_frame_idx[first_frame_stream_idx] >= last_frame_idx[first_frame_stream_idx]) {
163                                 has_frames[first_frame_stream_idx] = false;
164                                 --num_streams_with_frames_left;
165                         }
166                 }
167                 string jpeg = readers[first_frame_stream_idx].read_frame(first_frame);
168                 int64_t scaled_pts = av_rescale_q(first_frame.pts, AVRational{1, TIMEBASE},
169                         video_streams[first_frame_stream_idx]->time_base);
170                 buffered_jpegs.emplace_back(BufferedJPEG{ scaled_pts, first_frame_stream_idx, std::move(jpeg) });
171                 if (buffered_jpegs.size() >= 1000) {
172                         if (!write_buffered_jpegs(avctx, buffered_jpegs)) {
173                                 QMessageBox msgbox;
174                                 msgbox.setText("Writing frames failed");
175                                 msgbox.exec();
176                                 unlink(filename.c_str());
177                                 return;
178                         }
179                         frames_written += buffered_jpegs.size();
180                         progress.setValue(frames_written);
181                         buffered_jpegs.clear();
182                 }
183                 if (progress.wasCanceled()) {
184                         unlink(filename.c_str());
185                         return;
186                 }
187         }
188
189         if (!write_buffered_jpegs(avctx, buffered_jpegs)) {
190                 QMessageBox msgbox;
191                 msgbox.setText("Writing frames failed");
192                 msgbox.exec();
193                 unlink(filename.c_str());
194                 return;
195         }
196         frames_written += buffered_jpegs.size();
197         progress.setValue(frames_written);
198 }
199
200 void export_interpolated_clip(const string &filename, const Clip &clip)
201 {
202         AVFormatContext *avctx = nullptr;
203         avformat_alloc_output_context2(&avctx, NULL, NULL, filename.c_str());
204         if (avctx == nullptr) {
205                 QMessageBox msgbox;
206                 msgbox.setText("Could not allocate FFmpeg context");
207                 msgbox.exec();
208                 return;
209         }
210         AVFormatContextWithCloser closer(avctx);
211
212         int ret = avio_open(&avctx->pb, filename.c_str(), AVIO_FLAG_WRITE);
213         if (ret < 0) {
214                 QMessageBox msgbox;
215                 msgbox.setText(QString::fromStdString("Could not open output file '" + filename + "'"));
216                 msgbox.exec();
217                 return;
218         }
219
220         QProgressDialog progress(QString::fromStdString("Exporting to " + filename + "..."), "Abort", 0, 1);
221         progress.setWindowTitle("Futatabi");
222         progress.setWindowModality(Qt::WindowModal);
223         progress.setMinimumDuration(1000);
224         progress.setMaximum(100000);
225         progress.setValue(0);
226
227         promise<void> done_promise;
228         future<void> done = done_promise.get_future();
229         std::atomic<double> current_value{0.0};
230
231         unique_ptr<Player> player(new Player(/*destination=*/nullptr, Player::FILE_STREAM_OUTPUT, closer.release()));
232         player->set_done_callback([&done_promise] {
233                 done_promise.set_value();
234         });
235         player->set_progress_callback([&current_value] (const std::map<size_t, double> &player_progress) {
236                 assert(player_progress.size() == 1);
237                 current_value = player_progress.begin()->second;
238         });
239         player->play_clip(clip, /*clip_idx=*/0, clip.stream_idx);
240         while (done.wait_for(std::chrono::milliseconds(100)) != future_status::ready && !progress.wasCanceled()) {
241                 progress.setValue(lrint(100000.0 * current_value));
242         }
243         if (progress.wasCanceled()) {
244                 unlink(filename.c_str());
245                 player.reset();
246                 return;
247         }
248 }