]> git.sesse.net Git - nageru/blob - shared/mux.cpp
Fix ImageInput sRGB behavior.
[nageru] / shared / mux.cpp
1 #include "shared/mux.h"
2
3 #include <algorithm>
4 #include <assert.h>
5 #include <mutex>
6 #include <stdint.h>
7 #include <stdio.h>
8 #include <stdlib.h>
9 #include <string.h>
10 #include <string>
11 #include <utility>
12 #include <vector>
13
14 extern "C" {
15 #include <libavformat/avio.h>
16 #include <libavutil/avutil.h>
17 #include <libavutil/dict.h>
18 #include <libavutil/mathematics.h>
19 #include <libavutil/mem.h>
20 #include <libavutil/pixfmt.h>
21 #include <libavutil/rational.h>
22 }
23
24 #include "shared/metrics.h"
25 #include "shared/shared_defs.h"
26 #include "shared/timebase.h"
27
28 using namespace std;
29
30 struct PacketBefore {
31         PacketBefore(const AVFormatContext *ctx) : ctx(ctx) {}
32
33         bool operator() (const Mux::QueuedPacket &a_qp, const Mux::QueuedPacket &b_qp) const {
34                 const AVPacket *a = a_qp.pkt;
35                 const AVPacket *b = b_qp.pkt;
36                 int64_t a_dts = (a->dts == AV_NOPTS_VALUE ? a->pts : a->dts);
37                 int64_t b_dts = (b->dts == AV_NOPTS_VALUE ? b->pts : b->dts);
38                 AVRational a_timebase = ctx->streams[a->stream_index]->time_base;
39                 AVRational b_timebase = ctx->streams[b->stream_index]->time_base;
40                 if (av_compare_ts(a_dts, a_timebase, b_dts, b_timebase) != 0) {
41                         return av_compare_ts(a_dts, a_timebase, b_dts, b_timebase) < 0;
42                 } else {
43                         return av_compare_ts(a->pts, a_timebase, b->pts, b_timebase) < 0;
44                 }
45         }
46
47         const AVFormatContext * const ctx;
48 };
49
50 Mux::Mux(AVFormatContext *avctx, int width, int height, Codec video_codec, const string &video_extradata, const AVCodecParameters *audio_codecpar, AVColorSpace color_space, int time_base, function<void(int64_t)> write_callback, WriteStrategy write_strategy, const vector<MuxMetrics *> &metrics, WithSubtitles with_subtitles)
51         : write_strategy(write_strategy), avctx(avctx), write_callback(write_callback), metrics(metrics)
52 {
53         AVStream *avstream_video = avformat_new_stream(avctx, nullptr);
54         if (avstream_video == nullptr) {
55                 fprintf(stderr, "avformat_new_stream() failed\n");
56                 abort();
57         }
58         avstream_video->time_base = AVRational{1, time_base};
59         avstream_video->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
60         if (video_codec == CODEC_H264) {
61                 avstream_video->codecpar->codec_id = AV_CODEC_ID_H264;
62         } else if (video_codec == CODEC_NV12) {
63                 avstream_video->codecpar->codec_id = AV_CODEC_ID_RAWVIDEO;
64                 avstream_video->codecpar->codec_tag = avcodec_pix_fmt_to_codec_tag(AV_PIX_FMT_NV12);
65         } else {
66                 assert(video_codec == CODEC_MJPEG);
67                 avstream_video->codecpar->codec_id = AV_CODEC_ID_MJPEG;
68         }
69         avstream_video->codecpar->width = width;
70         avstream_video->codecpar->height = height;
71
72         // Colorspace details. Closely correspond to settings in EffectChain_finalize,
73         // as noted in each comment.
74         // Note that the H.264 stream also contains this information and depending on the
75         // mux, this might simply get ignored. See sps_rbsp().
76         // Note that there's no way to change this per-frame as the H.264 stream
77         // would like to be able to.
78         avstream_video->codecpar->color_primaries = AVCOL_PRI_BT709;  // RGB colorspace (inout_format.color_space).
79         avstream_video->codecpar->color_trc = AVCOL_TRC_IEC61966_2_1;  // Gamma curve (inout_format.gamma_curve).
80         // YUV colorspace (output_ycbcr_format.luma_coefficients).
81         avstream_video->codecpar->color_space = color_space;
82         avstream_video->codecpar->color_range = AVCOL_RANGE_MPEG;  // Full vs. limited range (output_ycbcr_format.full_range).
83         avstream_video->codecpar->chroma_location = AVCHROMA_LOC_LEFT;  // Chroma sample location. See chroma_offset_0[] in Mixer::subsample_chroma().
84         avstream_video->codecpar->field_order = AV_FIELD_PROGRESSIVE;
85
86         if (!video_extradata.empty()) {
87                 avstream_video->codecpar->extradata = (uint8_t *)av_malloc(video_extradata.size());
88                 avstream_video->codecpar->extradata_size = video_extradata.size();
89                 memcpy(avstream_video->codecpar->extradata, video_extradata.data(), video_extradata.size());
90         }
91         streams.push_back(avstream_video);
92
93         if (audio_codecpar != nullptr) {
94                 AVStream *avstream_audio = avformat_new_stream(avctx, nullptr);
95                 if (avstream_audio == nullptr) {
96                         fprintf(stderr, "avformat_new_stream() failed\n");
97                         abort();
98                 }
99                 avstream_audio->time_base = AVRational{1, time_base};
100                 if (avcodec_parameters_copy(avstream_audio->codecpar, audio_codecpar) < 0) {
101                         fprintf(stderr, "avcodec_parameters_copy() failed\n");
102                         abort();
103                 }
104                 streams.push_back(avstream_audio);
105         }
106
107         if (with_subtitles == WITH_SUBTITLES) {
108                 AVStream *avstream_subtitles = avformat_new_stream(avctx, nullptr);
109                 if (avstream_subtitles == nullptr) {
110                         fprintf(stderr, "avformat_new_stream() failed\n");
111                         abort();
112                 }
113                 avstream_subtitles->time_base = AVRational{1, time_base};
114                 avstream_subtitles->codecpar->codec_type = AVMEDIA_TYPE_SUBTITLE;
115                 avstream_subtitles->codecpar->codec_id = AV_CODEC_ID_WEBVTT;
116                 avstream_subtitles->disposition = AV_DISPOSITION_METADATA;
117                 streams.push_back(avstream_subtitles);
118                 subtitle_stream_idx = streams.size() - 1;
119         }
120
121         AVDictionary *options = NULL;
122         vector<pair<string, string>> opts = MUX_OPTS;
123         for (pair<string, string> opt : opts) {
124                 av_dict_set(&options, opt.first.c_str(), opt.second.c_str(), 0);
125         }
126         if (avformat_write_header(avctx, &options) < 0) {
127                 fprintf(stderr, "avformat_write_header() failed\n");
128                 abort();
129         }
130         for (MuxMetrics *metric : metrics) {
131                 metric->metric_written_bytes += avctx->pb->pos;
132         }
133
134         // Make sure the header is written before the constructor exits.
135         avio_flush(avctx->pb);
136
137         if (write_strategy == WRITE_BACKGROUND) {
138                 writer_thread = thread(&Mux::thread_func, this);
139         }
140 }
141
142 Mux::~Mux()
143 {
144         assert(plug_count == 0);
145         if (write_strategy == WRITE_BACKGROUND) {
146                 writer_thread_should_quit = true;
147                 packet_queue_ready.notify_all();
148                 writer_thread.join();
149         }
150         int64_t old_pos = avctx->pb->pos;
151         av_write_trailer(avctx);
152         for (MuxMetrics *metric : metrics) {
153                 metric->metric_written_bytes += avctx->pb->pos - old_pos;
154         }
155
156         if (!(avctx->oformat->flags & AVFMT_NOFILE) &&
157             !(avctx->flags & AVFMT_FLAG_CUSTOM_IO)) {
158                 avio_closep(&avctx->pb);
159         }
160         avformat_free_context(avctx);
161 }
162
163 void Mux::add_packet(const AVPacket &pkt, int64_t pts, int64_t dts, AVRational timebase, int stream_index_override)
164 {
165         AVPacket pkt_copy;
166         av_init_packet(&pkt_copy);
167         if (av_packet_ref(&pkt_copy, &pkt) < 0) {
168                 fprintf(stderr, "av_copy_packet() failed\n");
169                 abort();
170         }
171         if (stream_index_override != -1) {
172                 pkt_copy.stream_index = stream_index_override;
173         }
174         assert(size_t(pkt_copy.stream_index) < streams.size());
175         AVRational time_base = streams[pkt_copy.stream_index]->time_base;
176         pkt_copy.pts = av_rescale_q(pts, timebase, time_base);
177         pkt_copy.dts = av_rescale_q(dts, timebase, time_base);
178         pkt_copy.duration = av_rescale_q(pkt.duration, timebase, time_base);
179
180         {
181                 lock_guard<mutex> lock(mu);
182                 if (write_strategy == WriteStrategy::WRITE_BACKGROUND) {
183                         packet_queue.push_back(QueuedPacket{ av_packet_clone(&pkt_copy), pts });
184                         if (plug_count == 0)
185                                 packet_queue_ready.notify_all();
186                 } else if (plug_count > 0) {
187                         packet_queue.push_back(QueuedPacket{ av_packet_clone(&pkt_copy), pts });
188                 } else {
189                         write_packet_or_die(pkt_copy, pts);
190                 }
191         }
192
193         av_packet_unref(&pkt_copy);
194 }
195
196 void Mux::write_packet_or_die(const AVPacket &pkt, int64_t unscaled_pts)
197 {
198         for (MuxMetrics *metric : metrics) {
199                 if (pkt.stream_index == 0) {
200                         metric->metric_video_bytes += pkt.size;
201                 } else if (pkt.stream_index == 1) {
202                         metric->metric_audio_bytes += pkt.size;
203                 } else {
204                         assert(false);
205                 }
206         }
207         int64_t old_pos = avctx->pb->pos;
208         if (av_interleaved_write_frame(avctx, const_cast<AVPacket *>(&pkt)) < 0) {
209                 fprintf(stderr, "av_interleaved_write_frame() failed\n");
210                 abort();
211         }
212         avio_flush(avctx->pb);
213         for (MuxMetrics *metric : metrics) {
214                 metric->metric_written_bytes += avctx->pb->pos - old_pos;
215         }
216
217         if (pkt.stream_index == 0 && write_callback != nullptr) {
218                 write_callback(unscaled_pts);
219         }
220 }
221
222 void Mux::plug()
223 {
224         lock_guard<mutex> lock(mu);
225         ++plug_count;
226 }
227
228 void Mux::unplug()
229 {
230         lock_guard<mutex> lock(mu);
231         if (--plug_count > 0) {
232                 return;
233         }
234         assert(plug_count >= 0);
235
236         sort(packet_queue.begin(), packet_queue.end(), PacketBefore(avctx));
237
238         if (write_strategy == WRITE_BACKGROUND) {
239                 packet_queue_ready.notify_all();
240         } else {
241                 for (QueuedPacket &qp : packet_queue) {
242                         write_packet_or_die(*qp.pkt, qp.unscaled_pts);
243                         av_packet_free(&qp.pkt);
244                 }
245                 packet_queue.clear();
246         }
247 }
248
249 void Mux::thread_func()
250 {
251         pthread_setname_np(pthread_self(), "Mux");
252
253         unique_lock<mutex> lock(mu);
254         for ( ;; ) {
255                 packet_queue_ready.wait(lock, [this]() {
256                         return writer_thread_should_quit || (!packet_queue.empty() && plug_count == 0);
257                 });
258                 if (writer_thread_should_quit && packet_queue.empty()) {
259                         // All done.
260                         break;
261                 }
262
263                 assert(!packet_queue.empty() && plug_count == 0);
264                 vector<QueuedPacket> packets;
265                 swap(packets, packet_queue);
266
267                 lock.unlock();
268                 for (QueuedPacket &qp : packets) {
269                         write_packet_or_die(*qp.pkt, qp.unscaled_pts);
270                         av_packet_free(&qp.pkt);
271                 }
272                 lock.lock();
273         }
274 }
275
276 void MuxMetrics::init(const vector<pair<string, string>> &labels)
277 {
278         vector<pair<string, string>> labels_video = labels;
279         labels_video.emplace_back("stream", "video");
280         global_metrics.add("mux_stream_bytes", labels_video, &metric_video_bytes);
281
282         vector<pair<string, string>> labels_audio = labels;
283         labels_audio.emplace_back("stream", "audio");
284         global_metrics.add("mux_stream_bytes", labels_audio, &metric_audio_bytes);
285
286         global_metrics.add("mux_written_bytes", labels, &metric_written_bytes);
287 }