]> git.sesse.net Git - nageru/blob - futatabi/export.cpp
ca6a5589501be64cdeb57d242b8e82d4cbd4c015
[nageru] / futatabi / export.cpp
1 #include "export.h"
2
3 #include "clip_list.h"
4 #include "defs.h"
5 #include "flags.h"
6 #include "frame_on_disk.h"
7 #include "player.h"
8 #include "shared/ffmpeg_raii.h"
9 #include "shared/shared_defs.h"
10 #include "shared/timebase.h"
11
12 #include <QMessageBox>
13 #include <QProgressDialog>
14 #include <future>
15 #include <unistd.h>
16 #include <vector>
17
18 extern "C" {
19 #include <libavformat/avformat.h>
20 #include <libavutil/channel_layout.h>
21 }
22
23 using namespace std;
24
25 namespace {
26
27 // Only used in export_cliplist_clip_multitrack_triggered.
28 struct BufferedFrame {
29         int64_t pts;
30         unsigned video_stream_idx;
31         string data;
32 };
33
34 bool write_buffered_frames(AVFormatContext *avctx, const vector<BufferedFrame> &buffered_frames)
35 {
36         for (const BufferedFrame &frame : buffered_frames) {
37                 AVPacket pkt;
38                 av_init_packet(&pkt);
39                 pkt.stream_index = frame.video_stream_idx;
40                 pkt.data = (uint8_t *)frame.data.data();
41                 pkt.size = frame.data.size();
42                 pkt.pts = frame.pts;
43                 pkt.dts = frame.pts;
44                 pkt.flags = AV_PKT_FLAG_KEY;
45
46                 if (av_write_frame(avctx, &pkt) < 0) {
47                         return false;
48                 }
49         }
50         return true;
51 }
52
53 }  // namespace
54
55 void export_multitrack_clip(const string &filename, const Clip &clip)
56 {
57         AVFormatContext *avctx = nullptr;
58         avformat_alloc_output_context2(&avctx, NULL, NULL, filename.c_str());
59         if (avctx == nullptr) {
60                 QMessageBox msgbox;
61                 msgbox.setText("Could not allocate FFmpeg context");
62                 msgbox.exec();
63                 return;
64         }
65         AVFormatContextWithCloser closer(avctx);
66
67         int ret = avio_open(&avctx->pb, filename.c_str(), AVIO_FLAG_WRITE);
68         if (ret < 0) {
69                 QMessageBox msgbox;
70                 msgbox.setText(QString::fromStdString("Could not open output file '" + filename + "'"));
71                 msgbox.exec();
72                 return;
73         }
74
75         // Find the first frame for each stream.
76         size_t num_frames = 0;
77         size_t num_streams_with_frames_left = 0;
78         size_t last_stream_idx = 0;
79         FrameReader readers[MAX_STREAMS];
80         bool has_frames[MAX_STREAMS];
81         size_t first_frame_idx[MAX_STREAMS], last_frame_idx[MAX_STREAMS];  // Inclusive, exclusive.
82         {
83                 lock_guard<mutex> lock(frame_mu);
84                 for (size_t stream_idx = 0; stream_idx < MAX_STREAMS; ++stream_idx) {
85                         // Find the first frame such that frame.pts <= pts_in.
86                         auto it = find_first_frame_at_or_after(frames[stream_idx], clip.pts_in);
87                         first_frame_idx[stream_idx] = distance(frames[stream_idx].begin(), it);
88                         has_frames[stream_idx] = (it != frames[stream_idx].end());
89
90                         // Find the first frame such that frame.pts >= pts_out.
91                         it = find_first_frame_at_or_after(frames[stream_idx], clip.pts_out);
92                         last_frame_idx[stream_idx] = distance(frames[stream_idx].begin(), it);
93                         num_frames += last_frame_idx[stream_idx] - first_frame_idx[stream_idx];
94
95                         if (has_frames[stream_idx]) {
96                                 ++num_streams_with_frames_left;
97                                 last_stream_idx = stream_idx;
98                         }
99                 }
100         }
101
102         // Create the streams. Note that some of them could be without frames
103         // (we try to maintain the stream indexes in the export).
104         vector<AVStream *> video_streams;
105         for (unsigned stream_idx = 0; stream_idx <= last_stream_idx; ++stream_idx) {
106                 AVStream *avstream_video = avformat_new_stream(avctx, nullptr);
107                 if (avstream_video == nullptr) {
108                         fprintf(stderr, "avformat_new_stream() failed\n");
109                         abort();
110                 }
111                 avstream_video->time_base = AVRational{ 1, TIMEBASE };
112                 avstream_video->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
113                 avstream_video->codecpar->codec_id = AV_CODEC_ID_MJPEG;
114                 avstream_video->codecpar->width = global_flags.width;  // Might be wrong, but doesn't matter all that much.
115                 avstream_video->codecpar->height = global_flags.height;
116
117                 // TODO: Deduplicate this against Mux.
118                 avstream_video->codecpar->color_primaries = AVCOL_PRI_BT709;  // RGB colorspace (inout_format.color_space).
119                 avstream_video->codecpar->color_trc = AVCOL_TRC_IEC61966_2_1;  // Gamma curve (inout_format.gamma_curve).
120                 // YUV colorspace (output_ycbcr_format.luma_coefficients).
121                 avstream_video->codecpar->color_space = AVCOL_SPC_BT709;
122                 avstream_video->codecpar->color_range = AVCOL_RANGE_MPEG;  // Full vs. limited range (output_ycbcr_format.full_range).
123                 avstream_video->codecpar->chroma_location = AVCHROMA_LOC_LEFT;  // Chroma sample location. See chroma_offset_0[] in Mixer::subsample_chroma().
124                 avstream_video->codecpar->field_order = AV_FIELD_PROGRESSIVE;
125                 video_streams.push_back(avstream_video);
126         }
127
128         // Similar, for audio streams.
129         vector<AVStream *> audio_streams;
130         for (unsigned stream_idx = 0; stream_idx <= last_stream_idx; ++stream_idx) {
131                 AVStream *avstream_audio = avformat_new_stream(avctx, nullptr);
132                 if (avstream_audio == nullptr) {
133                         fprintf(stderr, "avformat_new_stream() failed\n");
134                         abort();
135                 }
136                 avstream_audio->time_base = AVRational{ 1, TIMEBASE };
137                 avstream_audio->codecpar->codec_type = AVMEDIA_TYPE_AUDIO;
138                 avstream_audio->codecpar->codec_id = AV_CODEC_ID_PCM_S32LE;
139                 avstream_audio->codecpar->channel_layout = AV_CH_LAYOUT_STEREO;
140                 avstream_audio->codecpar->channels = 2;
141                 avstream_audio->codecpar->sample_rate = OUTPUT_FREQUENCY;
142                 audio_streams.push_back(avstream_audio);
143         }
144
145         if (avformat_write_header(avctx, nullptr) < 0) {
146                 QMessageBox msgbox;
147                 msgbox.setText("Writing header failed");
148                 msgbox.exec();
149                 unlink(filename.c_str());
150                 return;
151         }
152
153         QProgressDialog progress(QString::fromStdString("Exporting to " + filename + "..."), "Abort", 0, 1);
154         progress.setWindowTitle("Futatabi");
155         progress.setWindowModality(Qt::WindowModal);
156         progress.setMinimumDuration(1000);
157         progress.setMaximum(num_frames);
158         progress.setValue(0);
159
160         // We buffer up to 1000 frames at a time, in a hope that we can reduce
161         // the amount of seeking needed on rotational media.
162         vector<BufferedFrame> buffered_frames;
163         size_t frames_written = 0;
164         while (num_streams_with_frames_left > 0) {
165                 // Find the stream with the lowest frame. Lower stream indexes win.
166                 FrameOnDisk first_frame;
167                 unsigned first_frame_stream_idx = 0;
168                 {
169                         lock_guard<mutex> lock(frame_mu);
170                         for (size_t stream_idx = 0; stream_idx < MAX_STREAMS; ++stream_idx) {
171                                 if (!has_frames[stream_idx]) {
172                                         continue;
173                                 }
174                                 if (first_frame.pts == -1 || frames[stream_idx][first_frame_idx[stream_idx]].pts < first_frame.pts) {
175                                         first_frame = frames[stream_idx][first_frame_idx[stream_idx]];
176                                         first_frame_stream_idx = stream_idx;
177                                 }
178                         }
179                         ++first_frame_idx[first_frame_stream_idx];
180                         if (first_frame_idx[first_frame_stream_idx] >= last_frame_idx[first_frame_stream_idx]) {
181                                 has_frames[first_frame_stream_idx] = false;
182                                 --num_streams_with_frames_left;
183                         }
184                 }
185
186                 FrameReader::Frame frame = readers[first_frame_stream_idx].read_frame(first_frame, /*read_video=*/true, /*read_audio=*/true);
187
188                 // Write audio. (Before video, since that's what we expect on input.)
189                 if (!frame.audio.empty()) {
190                         unsigned audio_stream_idx = first_frame_stream_idx + video_streams.size();
191                         int64_t scaled_audio_pts = av_rescale_q(first_frame.pts, AVRational{ 1, TIMEBASE },
192                                                                 audio_streams[first_frame_stream_idx]->time_base);
193                         buffered_frames.emplace_back(BufferedFrame{ scaled_audio_pts, audio_stream_idx, std::move(frame.audio) });
194                 }
195
196                 // Write video.
197                 unsigned video_stream_idx = first_frame_stream_idx;
198                 int64_t scaled_video_pts = av_rescale_q(first_frame.pts, AVRational{ 1, TIMEBASE },
199                                                         video_streams[first_frame_stream_idx]->time_base);
200                 buffered_frames.emplace_back(BufferedFrame{ scaled_video_pts, video_stream_idx, std::move(frame.video) });
201
202                 // Flush to disk if required.
203                 if (buffered_frames.size() >= 1000) {
204                         if (!write_buffered_frames(avctx, buffered_frames)) {
205                                 QMessageBox msgbox;
206                                 msgbox.setText("Writing frames failed");
207                                 msgbox.exec();
208                                 unlink(filename.c_str());
209                                 return;
210                         }
211                         frames_written += buffered_frames.size();
212                         progress.setValue(frames_written);
213                         buffered_frames.clear();
214                 }
215                 if (progress.wasCanceled()) {
216                         unlink(filename.c_str());
217                         return;
218                 }
219         }
220
221         if (!write_buffered_frames(avctx, buffered_frames)) {
222                 QMessageBox msgbox;
223                 msgbox.setText("Writing frames failed");
224                 msgbox.exec();
225                 unlink(filename.c_str());
226                 return;
227         }
228         frames_written += buffered_frames.size();
229         progress.setValue(frames_written);
230 }
231
232 void export_interpolated_clip(const string &filename, const vector<Clip> &clips)
233 {
234         AVFormatContext *avctx = nullptr;
235         avformat_alloc_output_context2(&avctx, NULL, NULL, filename.c_str());
236         if (avctx == nullptr) {
237                 QMessageBox msgbox;
238                 msgbox.setText("Could not allocate FFmpeg context");
239                 msgbox.exec();
240                 return;
241         }
242         AVFormatContextWithCloser closer(avctx);
243
244         int ret = avio_open(&avctx->pb, filename.c_str(), AVIO_FLAG_WRITE);
245         if (ret < 0) {
246                 QMessageBox msgbox;
247                 msgbox.setText(QString::fromStdString("Could not open output file '" + filename + "'"));
248                 msgbox.exec();
249                 return;
250         }
251
252         QProgressDialog progress(QString::fromStdString("Exporting to " + filename + "..."), "Abort", 0, 1);
253         progress.setWindowTitle("Futatabi");
254         progress.setWindowModality(Qt::WindowModal);
255         progress.setMinimumDuration(1000);
256         progress.setMaximum(100000);
257         progress.setValue(0);
258
259         vector<ClipWithID> clips_with_id;
260         for (const Clip &clip : clips) {
261                 clips_with_id.emplace_back(ClipWithID{ clip, 0 });
262         }
263         TimeRemaining total_length = compute_total_time(clips_with_id);
264
265         promise<void> done_promise;
266         future<void> done = done_promise.get_future();
267         std::atomic<double> current_value{ 0.0 };
268
269         Player player(/*destination=*/nullptr, Player::FILE_STREAM_OUTPUT, closer.release());
270         player.set_done_callback([&done_promise] {
271                 done_promise.set_value();
272         });
273         player.set_progress_callback([&current_value, total_length](const std::map<uint64_t, double> &player_progress, TimeRemaining time_remaining) {
274                 current_value = 1.0 - time_remaining.t / total_length.t;  // Nothing to do about the infinite clips.
275         });
276         player.play(clips_with_id);
277         while (done.wait_for(std::chrono::milliseconds(100)) != future_status::ready && !progress.wasCanceled()) {
278                 progress.setValue(lrint(100000.0 * current_value));
279         }
280         if (progress.wasCanceled()) {
281                 unlink(filename.c_str());
282                 // Destroying player on scope exit will abort the render job.
283         }
284 }