]> git.sesse.net Git - nageru/blob - shared/mux.cpp
Simplify timebase handling in Mux a bit.
[nageru] / shared / mux.cpp
1 #include "shared/mux.h"
2
3 #include <algorithm>
4 #include <assert.h>
5 #include <mutex>
6 #include <stdint.h>
7 #include <stdio.h>
8 #include <stdlib.h>
9 #include <string.h>
10 #include <string>
11 #include <utility>
12 #include <vector>
13
14 extern "C" {
15 #include <libavformat/avio.h>
16 #include <libavutil/avutil.h>
17 #include <libavutil/dict.h>
18 #include <libavutil/mathematics.h>
19 #include <libavutil/mem.h>
20 #include <libavutil/pixfmt.h>
21 #include <libavutil/rational.h>
22 }
23
24 #include "shared/metrics.h"
25 #include "shared/shared_defs.h"
26 #include "shared/timebase.h"
27
28 using namespace std;
29
30 struct PacketBefore {
31         PacketBefore(const AVFormatContext *ctx) : ctx(ctx) {}
32
33         bool operator() (const Mux::QueuedPacket &a_qp, const Mux::QueuedPacket &b_qp) const {
34                 const AVPacket *a = a_qp.pkt;
35                 const AVPacket *b = b_qp.pkt;
36                 int64_t a_dts = (a->dts == AV_NOPTS_VALUE ? a->pts : a->dts);
37                 int64_t b_dts = (b->dts == AV_NOPTS_VALUE ? b->pts : b->dts);
38                 AVRational a_timebase = ctx->streams[a->stream_index]->time_base;
39                 AVRational b_timebase = ctx->streams[b->stream_index]->time_base;
40                 if (av_compare_ts(a_dts, a_timebase, b_dts, b_timebase) != 0) {
41                         return av_compare_ts(a_dts, a_timebase, b_dts, b_timebase) < 0;
42                 } else {
43                         return av_compare_ts(a->pts, a_timebase, b->pts, b_timebase) < 0;
44                 }
45         }
46
47         const AVFormatContext * const ctx;
48 };
49
50 Mux::Mux(AVFormatContext *avctx, int width, int height, Codec video_codec, const string &video_extradata, const AVCodecParameters *audio_codecpar, AVColorSpace color_space, int time_base, function<void(int64_t)> write_callback, WriteStrategy write_strategy, const vector<MuxMetrics *> &metrics)
51         : write_strategy(write_strategy), avctx(avctx), write_callback(write_callback), metrics(metrics)
52 {
53         AVStream *avstream_video = avformat_new_stream(avctx, nullptr);
54         if (avstream_video == nullptr) {
55                 fprintf(stderr, "avformat_new_stream() failed\n");
56                 exit(1);
57         }
58         avstream_video->time_base = AVRational{1, time_base};
59         avstream_video->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
60         if (video_codec == CODEC_H264) {
61                 avstream_video->codecpar->codec_id = AV_CODEC_ID_H264;
62         } else if (video_codec == CODEC_NV12) {
63                 avstream_video->codecpar->codec_id = AV_CODEC_ID_RAWVIDEO;
64                 avstream_video->codecpar->codec_tag = avcodec_pix_fmt_to_codec_tag(AV_PIX_FMT_NV12);
65         } else {
66                 assert(video_codec == CODEC_MJPEG);
67                 avstream_video->codecpar->codec_id = AV_CODEC_ID_MJPEG;
68         }
69         avstream_video->codecpar->width = width;
70         avstream_video->codecpar->height = height;
71
72         // Colorspace details. Closely correspond to settings in EffectChain_finalize,
73         // as noted in each comment.
74         // Note that the H.264 stream also contains this information and depending on the
75         // mux, this might simply get ignored. See sps_rbsp().
76         // Note that there's no way to change this per-frame as the H.264 stream
77         // would like to be able to.
78         avstream_video->codecpar->color_primaries = AVCOL_PRI_BT709;  // RGB colorspace (inout_format.color_space).
79         avstream_video->codecpar->color_trc = AVCOL_TRC_IEC61966_2_1;  // Gamma curve (inout_format.gamma_curve).
80         // YUV colorspace (output_ycbcr_format.luma_coefficients).
81         avstream_video->codecpar->color_space = color_space;
82         avstream_video->codecpar->color_range = AVCOL_RANGE_MPEG;  // Full vs. limited range (output_ycbcr_format.full_range).
83         avstream_video->codecpar->chroma_location = AVCHROMA_LOC_LEFT;  // Chroma sample location. See chroma_offset_0[] in Mixer::subsample_chroma().
84         avstream_video->codecpar->field_order = AV_FIELD_PROGRESSIVE;
85
86         if (!video_extradata.empty()) {
87                 avstream_video->codecpar->extradata = (uint8_t *)av_malloc(video_extradata.size());
88                 avstream_video->codecpar->extradata_size = video_extradata.size();
89                 memcpy(avstream_video->codecpar->extradata, video_extradata.data(), video_extradata.size());
90         }
91         streams.push_back(avstream_video);
92
93         if (audio_codecpar != nullptr) {
94                 AVStream *avstream_audio = avformat_new_stream(avctx, nullptr);
95                 if (avstream_audio == nullptr) {
96                         fprintf(stderr, "avformat_new_stream() failed\n");
97                         exit(1);
98                 }
99                 avstream_audio->time_base = AVRational{1, time_base};
100                 if (avcodec_parameters_copy(avstream_audio->codecpar, audio_codecpar) < 0) {
101                         fprintf(stderr, "avcodec_parameters_copy() failed\n");
102                         exit(1);
103                 }
104                 streams.push_back(avstream_audio);
105         }
106
107         AVDictionary *options = NULL;
108         vector<pair<string, string>> opts = MUX_OPTS;
109         for (pair<string, string> opt : opts) {
110                 av_dict_set(&options, opt.first.c_str(), opt.second.c_str(), 0);
111         }
112         if (avformat_write_header(avctx, &options) < 0) {
113                 fprintf(stderr, "avformat_write_header() failed\n");
114                 exit(1);
115         }
116         for (MuxMetrics *metric : metrics) {
117                 metric->metric_written_bytes += avctx->pb->pos;
118         }
119
120         // Make sure the header is written before the constructor exits.
121         avio_flush(avctx->pb);
122
123         if (write_strategy == WRITE_BACKGROUND) {
124                 writer_thread = thread(&Mux::thread_func, this);
125         }
126 }
127
128 Mux::~Mux()
129 {
130         assert(plug_count == 0);
131         if (write_strategy == WRITE_BACKGROUND) {
132                 writer_thread_should_quit = true;
133                 packet_queue_ready.notify_all();
134                 writer_thread.join();
135         }
136         int64_t old_pos = avctx->pb->pos;
137         av_write_trailer(avctx);
138         for (MuxMetrics *metric : metrics) {
139                 metric->metric_written_bytes += avctx->pb->pos - old_pos;
140         }
141
142         if (!(avctx->oformat->flags & AVFMT_NOFILE) &&
143             !(avctx->flags & AVFMT_FLAG_CUSTOM_IO)) {
144                 avio_closep(&avctx->pb);
145         }
146         avformat_free_context(avctx);
147 }
148
149 void Mux::add_packet(const AVPacket &pkt, int64_t pts, int64_t dts, AVRational timebase, int stream_index_override)
150 {
151         AVPacket pkt_copy;
152         av_init_packet(&pkt_copy);
153         if (av_packet_ref(&pkt_copy, &pkt) < 0) {
154                 fprintf(stderr, "av_copy_packet() failed\n");
155                 exit(1);
156         }
157         if (stream_index_override != -1) {
158                 pkt_copy.stream_index = stream_index_override;
159         }
160         assert(size_t(pkt_copy.stream_index) < streams.size());
161         AVRational time_base = streams[pkt_copy.stream_index]->time_base;
162         pkt_copy.pts = av_rescale_q(pts, timebase, time_base);
163         pkt_copy.dts = av_rescale_q(dts, timebase, time_base);
164         pkt_copy.duration = av_rescale_q(pkt.duration, timebase, time_base);
165
166         {
167                 lock_guard<mutex> lock(mu);
168                 if (write_strategy == WriteStrategy::WRITE_BACKGROUND) {
169                         packet_queue.push_back(QueuedPacket{ av_packet_clone(&pkt_copy), pts });
170                         if (plug_count == 0)
171                                 packet_queue_ready.notify_all();
172                 } else if (plug_count > 0) {
173                         packet_queue.push_back(QueuedPacket{ av_packet_clone(&pkt_copy), pts });
174                 } else {
175                         write_packet_or_die(pkt_copy, pts);
176                 }
177         }
178
179         av_packet_unref(&pkt_copy);
180 }
181
182 void Mux::write_packet_or_die(const AVPacket &pkt, int64_t unscaled_pts)
183 {
184         for (MuxMetrics *metric : metrics) {
185                 if (pkt.stream_index == 0) {
186                         metric->metric_video_bytes += pkt.size;
187                 } else if (pkt.stream_index == 1) {
188                         metric->metric_audio_bytes += pkt.size;
189                 } else {
190                         assert(false);
191                 }
192         }
193         int64_t old_pos = avctx->pb->pos;
194         if (av_interleaved_write_frame(avctx, const_cast<AVPacket *>(&pkt)) < 0) {
195                 fprintf(stderr, "av_interleaved_write_frame() failed\n");
196                 abort();
197         }
198         avio_flush(avctx->pb);
199         for (MuxMetrics *metric : metrics) {
200                 metric->metric_written_bytes += avctx->pb->pos - old_pos;
201         }
202
203         if (pkt.stream_index == 0 && write_callback != nullptr) {
204                 write_callback(unscaled_pts);
205         }
206 }
207
208 void Mux::plug()
209 {
210         lock_guard<mutex> lock(mu);
211         ++plug_count;
212 }
213
214 void Mux::unplug()
215 {
216         lock_guard<mutex> lock(mu);
217         if (--plug_count > 0) {
218                 return;
219         }
220         assert(plug_count >= 0);
221
222         sort(packet_queue.begin(), packet_queue.end(), PacketBefore(avctx));
223
224         if (write_strategy == WRITE_BACKGROUND) {
225                 packet_queue_ready.notify_all();
226         } else {
227                 for (QueuedPacket &qp : packet_queue) {
228                         write_packet_or_die(*qp.pkt, qp.unscaled_pts);
229                         av_packet_free(&qp.pkt);
230                 }
231                 packet_queue.clear();
232         }
233 }
234
235 void Mux::thread_func()
236 {
237         pthread_setname_np(pthread_self(), "Mux");
238
239         unique_lock<mutex> lock(mu);
240         for ( ;; ) {
241                 packet_queue_ready.wait(lock, [this]() {
242                         return writer_thread_should_quit || (!packet_queue.empty() && plug_count == 0);
243                 });
244                 if (writer_thread_should_quit && packet_queue.empty()) {
245                         // All done.
246                         break;
247                 }
248
249                 assert(!packet_queue.empty() && plug_count == 0);
250                 vector<QueuedPacket> packets;
251                 swap(packets, packet_queue);
252
253                 lock.unlock();
254                 for (QueuedPacket &qp : packets) {
255                         write_packet_or_die(*qp.pkt, qp.unscaled_pts);
256                         av_packet_free(&qp.pkt);
257                 }
258                 lock.lock();
259         }
260 }
261
262 void MuxMetrics::init(const vector<pair<string, string>> &labels)
263 {
264         vector<pair<string, string>> labels_video = labels;
265         labels_video.emplace_back("stream", "video");
266         global_metrics.add("mux_stream_bytes", labels_video, &metric_video_bytes);
267
268         vector<pair<string, string>> labels_audio = labels;
269         labels_audio.emplace_back("stream", "audio");
270         global_metrics.add("mux_stream_bytes", labels_audio, &metric_audio_bytes);
271
272         global_metrics.add("mux_written_bytes", labels, &metric_written_bytes);
273 }