-#include <assert.h>
+#include "mux.h"
+#include <assert.h>
+#include <stdint.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
#include <algorithm>
#include <mutex>
#include <string>
+#include <utility>
#include <vector>
+extern "C" {
+#include <libavformat/avio.h>
+#include <libavutil/avutil.h>
+#include <libavutil/dict.h>
+#include <libavutil/mathematics.h>
+#include <libavutil/mem.h>
+#include <libavutil/pixfmt.h>
+#include <libavutil/rational.h>
+}
+
#include "defs.h"
-#include "mux.h"
+#include "flags.h"
+#include "metrics.h"
#include "timebase.h"
using namespace std;
const AVFormatContext * const ctx;
};
-Mux::Mux(AVFormatContext *avctx, int width, int height, Codec video_codec, const string &video_extradata, const AVCodecParameters *audio_codecpar, int time_base)
- : avctx(avctx)
+Mux::Mux(AVFormatContext *avctx, int width, int height, Codec video_codec, const string &video_extradata, const AVCodecParameters *audio_codecpar, int time_base, std::function<void(int64_t)> write_callback, const vector<MuxMetrics *> &metrics)
+ : avctx(avctx), write_callback(write_callback), metrics(metrics)
{
avstream_video = avformat_new_stream(avctx, nullptr);
if (avstream_video == nullptr) {
// as noted in each comment.
// Note that the H.264 stream also contains this information and depending on the
// mux, this might simply get ignored. See sps_rbsp().
+ // Note that there's no way to change this per-frame as the H.264 stream
+ // would like to be able to.
avstream_video->codecpar->color_primaries = AVCOL_PRI_BT709; // RGB colorspace (inout_format.color_space).
avstream_video->codecpar->color_trc = AVCOL_TRC_UNSPECIFIED; // Gamma curve (inout_format.gamma_curve).
- avstream_video->codecpar->color_space = AVCOL_SPC_SMPTE170M; // YUV colorspace (output_ycbcr_format.luma_coefficients).
+ // YUV colorspace (output_ycbcr_format.luma_coefficients).
+ if (global_flags.ycbcr_rec709_coefficients) {
+ avstream_video->codecpar->color_space = AVCOL_SPC_BT709;
+ } else {
+ avstream_video->codecpar->color_space = AVCOL_SPC_SMPTE170M;
+ }
avstream_video->codecpar->color_range = AVCOL_RANGE_MPEG; // Full vs. limited range (output_ycbcr_format.full_range).
avstream_video->codecpar->chroma_location = AVCHROMA_LOC_LEFT; // Chroma sample location. See chroma_offset_0[] in Mixer::subsample_chroma().
avstream_video->codecpar->field_order = AV_FIELD_PROGRESSIVE;
fprintf(stderr, "avformat_write_header() failed\n");
exit(1);
}
+ for (MuxMetrics *metric : metrics) {
+ metric->metric_written_bytes += avctx->pb->pos;
+ }
// Make sure the header is written before the constructor exits.
avio_flush(avctx->pb);
Mux::~Mux()
{
+ int64_t old_pos = avctx->pb->pos;
av_write_trailer(avctx);
- avio_closep(&avctx->pb);
+ for (MuxMetrics *metric : metrics) {
+ metric->metric_written_bytes += avctx->pb->pos - old_pos;
+ }
+
+ if (!(avctx->oformat->flags & AVFMT_NOFILE) &&
+ !(avctx->flags & AVFMT_FLAG_CUSTOM_IO)) {
+ avio_closep(&avctx->pb);
+ }
avformat_free_context(avctx);
}
}
av_packet_unref(&pkt_copy);
+
+ // Note: This will be wrong in the case of plugged packets, but that only happens
+ // for network streams, not for files, and write callbacks are only really relevant
+ // for files. (We don't want to do this from write_packet_or_die, as it only has
+ // the rescaled pts, which is unsuitable for callback.)
+ if (pkt.stream_index == 0 && write_callback != nullptr) {
+ write_callback(pts);
+ }
}
void Mux::write_packet_or_die(const AVPacket &pkt)
{
+ for (MuxMetrics *metric : metrics) {
+ if (pkt.stream_index == 0) {
+ metric->metric_video_bytes += pkt.size;
+ } else if (pkt.stream_index == 1) {
+ metric->metric_audio_bytes += pkt.size;
+ } else {
+ assert(false);
+ }
+ }
+ int64_t old_pos = avctx->pb->pos;
if (av_interleaved_write_frame(avctx, const_cast<AVPacket *>(&pkt)) < 0) {
fprintf(stderr, "av_interleaved_write_frame() failed\n");
exit(1);
}
avio_flush(avctx->pb);
+ for (MuxMetrics *metric : metrics) {
+ metric->metric_written_bytes += avctx->pb->pos - old_pos;
+ }
}
void Mux::plug()
}
plugged_packets.clear();
}
+
+void MuxMetrics::init(const vector<pair<string, string>> &labels)
+{
+ vector<pair<string, string>> labels_video = labels;
+ labels_video.emplace_back("stream", "video");
+ global_metrics.add("mux_stream_bytes", labels_video, &metric_video_bytes);
+
+ vector<pair<string, string>> labels_audio = labels;
+ labels_audio.emplace_back("stream", "audio");
+ global_metrics.add("mux_stream_bytes", labels_audio, &metric_audio_bytes);
+
+ global_metrics.add("mux_written_bytes", labels, &metric_written_bytes);
+}