6 #include "frame_on_disk.h"
8 #include "shared/ffmpeg_raii.h"
9 #include "shared/shared_defs.h"
10 #include "shared/timebase.h"
12 #include <QMessageBox>
13 #include <QProgressDialog>
19 #include <libavformat/avformat.h>
20 #include <libavutil/channel_layout.h>
27 // Only used in export_cliplist_clip_multitrack_triggered.
28 struct BufferedFrame {
30 unsigned video_stream_idx;
34 bool write_buffered_frames(AVFormatContext *avctx, const vector<BufferedFrame> &buffered_frames)
36 for (const BufferedFrame &frame : buffered_frames) {
39 pkt.stream_index = frame.video_stream_idx;
40 pkt.data = (uint8_t *)frame.data.data();
41 pkt.size = frame.data.size();
44 pkt.flags = AV_PKT_FLAG_KEY;
46 if (av_write_frame(avctx, &pkt) < 0) {
55 void export_multitrack_clip(const string &filename, const Clip &clip)
57 AVFormatContext *avctx = nullptr;
58 avformat_alloc_output_context2(&avctx, NULL, NULL, filename.c_str());
59 if (avctx == nullptr) {
61 msgbox.setText("Could not allocate FFmpeg context");
65 AVFormatContextWithCloser closer(avctx);
67 int ret = avio_open(&avctx->pb, filename.c_str(), AVIO_FLAG_WRITE);
70 msgbox.setText(QString::fromStdString("Could not open output file '" + filename + "'"));
75 // Find the first frame for each stream.
76 size_t num_frames = 0;
77 size_t num_streams_with_frames_left = 0;
78 size_t last_stream_idx = 0;
79 FrameReader readers[MAX_STREAMS];
80 bool has_frames[MAX_STREAMS];
81 size_t first_frame_idx[MAX_STREAMS], last_frame_idx[MAX_STREAMS]; // Inclusive, exclusive.
83 lock_guard<mutex> lock(frame_mu);
84 for (size_t stream_idx = 0; stream_idx < MAX_STREAMS; ++stream_idx) {
85 // Find the first frame such that frame.pts <= pts_in.
86 auto it = find_first_frame_at_or_after(frames[stream_idx], clip.pts_in);
87 first_frame_idx[stream_idx] = distance(frames[stream_idx].begin(), it);
88 has_frames[stream_idx] = (it != frames[stream_idx].end());
90 // Find the first frame such that frame.pts >= pts_out.
91 it = find_first_frame_at_or_after(frames[stream_idx], clip.pts_out);
92 last_frame_idx[stream_idx] = distance(frames[stream_idx].begin(), it);
93 num_frames += last_frame_idx[stream_idx] - first_frame_idx[stream_idx];
95 if (has_frames[stream_idx]) {
96 ++num_streams_with_frames_left;
97 last_stream_idx = stream_idx;
102 // Create the streams. Note that some of them could be without frames
103 // (we try to maintain the stream indexes in the export).
104 vector<AVStream *> video_streams;
105 for (unsigned stream_idx = 0; stream_idx <= last_stream_idx; ++stream_idx) {
106 AVStream *avstream_video = avformat_new_stream(avctx, nullptr);
107 if (avstream_video == nullptr) {
108 fprintf(stderr, "avformat_new_stream() failed\n");
111 avstream_video->time_base = AVRational{ 1, TIMEBASE };
112 avstream_video->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
113 avstream_video->codecpar->codec_id = AV_CODEC_ID_MJPEG;
114 avstream_video->codecpar->width = global_flags.width; // Might be wrong, but doesn't matter all that much.
115 avstream_video->codecpar->height = global_flags.height;
117 // TODO: Deduplicate this against Mux.
118 avstream_video->codecpar->color_primaries = AVCOL_PRI_BT709; // RGB colorspace (inout_format.color_space).
119 avstream_video->codecpar->color_trc = AVCOL_TRC_IEC61966_2_1; // Gamma curve (inout_format.gamma_curve).
120 // YUV colorspace (output_ycbcr_format.luma_coefficients).
121 avstream_video->codecpar->color_space = AVCOL_SPC_BT709;
122 avstream_video->codecpar->color_range = AVCOL_RANGE_MPEG; // Full vs. limited range (output_ycbcr_format.full_range).
123 avstream_video->codecpar->chroma_location = AVCHROMA_LOC_LEFT; // Chroma sample location. See chroma_offset_0[] in Mixer::subsample_chroma().
124 avstream_video->codecpar->field_order = AV_FIELD_PROGRESSIVE;
125 video_streams.push_back(avstream_video);
128 // Similar, for audio streams.
129 vector<AVStream *> audio_streams;
130 for (unsigned stream_idx = 0; stream_idx <= last_stream_idx; ++stream_idx) {
131 AVStream *avstream_audio = avformat_new_stream(avctx, nullptr);
132 if (avstream_audio == nullptr) {
133 fprintf(stderr, "avformat_new_stream() failed\n");
136 avstream_audio->time_base = AVRational{ 1, TIMEBASE };
137 avstream_audio->codecpar->codec_type = AVMEDIA_TYPE_AUDIO;
138 avstream_audio->codecpar->codec_id = AV_CODEC_ID_PCM_S32LE;
139 avstream_audio->codecpar->ch_layout.order = AV_CHANNEL_ORDER_NATIVE;
140 avstream_audio->codecpar->ch_layout.nb_channels = 2;
141 avstream_audio->codecpar->ch_layout.u.mask = AV_CH_LAYOUT_STEREO;
142 avstream_audio->codecpar->sample_rate = OUTPUT_FREQUENCY;
143 audio_streams.push_back(avstream_audio);
146 if (avformat_write_header(avctx, nullptr) < 0) {
148 msgbox.setText("Writing header failed");
150 unlink(filename.c_str());
154 QProgressDialog progress(QString::fromStdString("Exporting to " + filename + "..."), "Abort", 0, 1);
155 progress.setWindowTitle("Futatabi");
156 progress.setWindowModality(Qt::WindowModal);
157 progress.setMinimumDuration(1000);
158 progress.setMaximum(num_frames);
159 progress.setValue(0);
161 // We buffer up to 1000 frames at a time, in a hope that we can reduce
162 // the amount of seeking needed on rotational media.
163 vector<BufferedFrame> buffered_frames;
164 size_t frames_written = 0;
165 while (num_streams_with_frames_left > 0) {
166 // Find the stream with the lowest frame. Lower stream indexes win.
167 FrameOnDisk first_frame;
168 unsigned first_frame_stream_idx = 0;
170 lock_guard<mutex> lock(frame_mu);
171 for (size_t stream_idx = 0; stream_idx < MAX_STREAMS; ++stream_idx) {
172 if (!has_frames[stream_idx]) {
175 if (first_frame.pts == -1 || frames[stream_idx][first_frame_idx[stream_idx]].pts < first_frame.pts) {
176 first_frame = frames[stream_idx][first_frame_idx[stream_idx]];
177 first_frame_stream_idx = stream_idx;
180 ++first_frame_idx[first_frame_stream_idx];
181 if (first_frame_idx[first_frame_stream_idx] >= last_frame_idx[first_frame_stream_idx]) {
182 has_frames[first_frame_stream_idx] = false;
183 --num_streams_with_frames_left;
187 FrameReader::Frame frame = readers[first_frame_stream_idx].read_frame(first_frame, /*read_video=*/true, /*read_audio=*/true);
189 // Write audio. (Before video, since that's what we expect on input.)
190 if (!frame.audio.empty()) {
191 unsigned audio_stream_idx = first_frame_stream_idx + video_streams.size();
192 int64_t scaled_audio_pts = av_rescale_q(first_frame.pts, AVRational{ 1, TIMEBASE },
193 audio_streams[first_frame_stream_idx]->time_base);
194 buffered_frames.emplace_back(BufferedFrame{ scaled_audio_pts, audio_stream_idx, std::move(frame.audio) });
198 unsigned video_stream_idx = first_frame_stream_idx;
199 int64_t scaled_video_pts = av_rescale_q(first_frame.pts, AVRational{ 1, TIMEBASE },
200 video_streams[first_frame_stream_idx]->time_base);
201 buffered_frames.emplace_back(BufferedFrame{ scaled_video_pts, video_stream_idx, std::move(frame.video) });
203 // Flush to disk if required.
204 if (buffered_frames.size() >= 1000) {
205 if (!write_buffered_frames(avctx, buffered_frames)) {
207 msgbox.setText("Writing frames failed");
209 unlink(filename.c_str());
212 frames_written += buffered_frames.size();
213 progress.setValue(frames_written);
214 buffered_frames.clear();
216 if (progress.wasCanceled()) {
217 unlink(filename.c_str());
222 if (!write_buffered_frames(avctx, buffered_frames)) {
224 msgbox.setText("Writing frames failed");
226 unlink(filename.c_str());
229 frames_written += buffered_frames.size();
230 progress.setValue(frames_written);
233 void export_interpolated_clip(const string &filename, const vector<Clip> &clips)
235 AVFormatContext *avctx = nullptr;
236 avformat_alloc_output_context2(&avctx, NULL, NULL, filename.c_str());
237 if (avctx == nullptr) {
239 msgbox.setText("Could not allocate FFmpeg context");
243 AVFormatContextWithCloser closer(avctx);
245 int ret = avio_open(&avctx->pb, filename.c_str(), AVIO_FLAG_WRITE);
248 msgbox.setText(QString::fromStdString("Could not open output file '" + filename + "'"));
253 QProgressDialog progress(QString::fromStdString("Exporting to " + filename + "..."), "Abort", 0, 1);
254 progress.setWindowTitle("Futatabi");
255 progress.setWindowModality(Qt::WindowModal);
256 progress.setMinimumDuration(1000);
257 progress.setMaximum(100000);
258 progress.setValue(0);
260 vector<ClipWithID> clips_with_id;
261 for (const Clip &clip : clips) {
262 clips_with_id.emplace_back(ClipWithID{ clip, 0 });
264 TimeRemaining total_length = compute_total_time(clips_with_id);
266 promise<void> done_promise;
267 future<void> done = done_promise.get_future();
268 std::atomic<double> current_value{ 0.0 };
270 Player player(/*destination=*/nullptr, Player::FILE_STREAM_OUTPUT, closer.release());
271 player.set_done_callback([&done_promise] {
272 done_promise.set_value();
274 player.set_progress_callback([¤t_value, total_length](const std::map<uint64_t, double> &player_progress, TimeRemaining time_remaining) {
275 current_value = 1.0 - time_remaining.t / total_length.t; // Nothing to do about the infinite clips.
277 player.play(clips_with_id);
278 while (done.wait_for(std::chrono::milliseconds(100)) != future_status::ready && !progress.wasCanceled()) {
279 progress.setValue(lrint(100000.0 * current_value));
281 if (progress.wasCanceled()) {
282 unlink(filename.c_str());
283 // Destroying player on scope exit will abort the render job.