X-Git-Url: https://git.sesse.net/?a=blobdiff_plain;f=nageru%2Fvideo_encoder.cpp;h=29f31050e084a4e9527407cbdfc6823c0a7330d4;hb=e2b654d6a8cc8c64142a9a8ef8bcd82e9d9a9289;hp=7e4f09f78cb2112a81c1d9b246d36c7d6a2e987b;hpb=d9026465e91a6df3dadf62f9e4b0e6df7d2306e3;p=nageru diff --git a/nageru/video_encoder.cpp b/nageru/video_encoder.cpp index 7e4f09f..29f3105 100644 --- a/nageru/video_encoder.cpp +++ b/nageru/video_encoder.cpp @@ -4,6 +4,9 @@ #include #include #include +#include +#include +#include #include #include @@ -12,6 +15,9 @@ extern "C" { } #include "audio_encoder.h" +#ifdef HAVE_AV1 +#include "av1_encoder.h" +#endif #include "defs.h" #include "shared/ffmpeg_raii.h" #include "flags.h" @@ -51,6 +57,10 @@ string generate_local_dump_filename(int frame) VideoEncoder::VideoEncoder(ResourcePool *resource_pool, QSurface *surface, const std::string &va_display, int width, int height, HTTPD *httpd, DiskSpaceEstimator *disk_space_estimator) : resource_pool(resource_pool), surface(surface), va_display(va_display), width(width), height(height), httpd(httpd), disk_space_estimator(disk_space_estimator) { + // TODO: If we're outputting AV1, we can't use MPEG-TS currently. + srt_oformat = av_guess_format("mpegts", nullptr, nullptr); + assert(srt_oformat != nullptr); + oformat = av_guess_format(global_flags.stream_mux_name.c_str(), nullptr, nullptr); assert(oformat != nullptr); if (global_flags.stream_audio_codec_name.empty()) { @@ -59,24 +69,46 @@ VideoEncoder::VideoEncoder(ResourcePool *resource_pool, QSurface *surface, const stream_audio_encoder.reset(new AudioEncoder(global_flags.stream_audio_codec_name, global_flags.stream_audio_codec_bitrate, oformat)); } if (global_flags.x264_video_to_http || global_flags.x264_video_to_disk) { - x264_encoder.reset(new X264Encoder(oformat)); + x264_encoder.reset(new X264Encoder(oformat, /*use_separate_disk_params=*/false)); + } + VideoCodecInterface *http_encoder = x264_encoder.get(); + VideoCodecInterface *disk_encoder = x264_encoder.get(); +#ifdef HAVE_AV1 + if (global_flags.av1_video_to_http) { + av1_encoder.reset(new AV1Encoder(oformat)); + http_encoder = av1_encoder.get(); + } +#endif + if (global_flags.x264_separate_disk_encode) { + x264_disk_encoder.reset(new X264Encoder(oformat, /*use_separate_disk_params=*/true)); + disk_encoder = x264_disk_encoder.get(); } string filename = generate_local_dump_filename(/*frame=*/0); - quicksync_encoder.reset(new QuickSyncEncoder(filename, resource_pool, surface, va_display, width, height, oformat, x264_encoder.get(), disk_space_estimator)); + quicksync_encoder.reset(new QuickSyncEncoder(filename, resource_pool, surface, va_display, width, height, oformat, http_encoder, disk_encoder, disk_space_estimator)); - open_output_stream(); - stream_audio_encoder->add_mux(stream_mux.get()); - quicksync_encoder->set_stream_mux(stream_mux.get()); + open_output_streams(); + stream_audio_encoder->add_mux(http_mux.get()); + stream_audio_encoder->add_mux(srt_mux.get()); + quicksync_encoder->set_http_mux(http_mux.get()); + quicksync_encoder->set_srt_mux(srt_mux.get()); if (global_flags.x264_video_to_http) { - x264_encoder->add_mux(stream_mux.get()); + x264_encoder->add_mux(http_mux.get()); + x264_encoder->add_mux(srt_mux.get()); + } +#ifdef HAVE_AV1 + if (global_flags.av1_video_to_http) { + av1_encoder->add_mux(http_mux.get()); + av1_encoder->add_mux(srt_mux.get()); } +#endif } VideoEncoder::~VideoEncoder() { quicksync_encoder->shutdown(); x264_encoder.reset(nullptr); + x264_disk_encoder.reset(nullptr); quicksync_encoder->close_file(); quicksync_encoder.reset(nullptr); while (quicksync_encoders_in_shutdown.load() > 0) { @@ -95,19 +127,24 @@ void VideoEncoder::do_cut(int frame) // However, since this means both encoders could be sending packets at // the same time, it means pts could come out of order to the stream mux, // and we need to plug it until the shutdown is complete. - stream_mux->plug(); + http_mux->plug(); lock(qs_mu, qs_audio_mu); lock_guard lock1(qs_mu, adopt_lock), lock2(qs_audio_mu, adopt_lock); QuickSyncEncoder *old_encoder = quicksync_encoder.release(); // When we go C++14, we can use move capture instead. X264Encoder *old_x264_encoder = nullptr; + X264Encoder *old_x264_disk_encoder = nullptr; if (global_flags.x264_video_to_disk) { old_x264_encoder = x264_encoder.release(); } - thread([old_encoder, old_x264_encoder, this]{ + if (global_flags.x264_separate_disk_encode) { + old_x264_disk_encoder = x264_disk_encoder.release(); + } + thread([old_encoder, old_x264_encoder, old_x264_disk_encoder, this]{ old_encoder->shutdown(); delete old_x264_encoder; + delete old_x264_disk_encoder; old_encoder->close_file(); - stream_mux->unplug(); + http_mux->unplug(); // We cannot delete the encoder here, as this thread has no OpenGL context. // We'll deal with it in begin_frame(). @@ -116,17 +153,24 @@ void VideoEncoder::do_cut(int frame) }).detach(); if (global_flags.x264_video_to_disk) { - x264_encoder.reset(new X264Encoder(oformat)); + x264_encoder.reset(new X264Encoder(oformat, /*use_separate_disk_params=*/false)); + assert(global_flags.x264_video_to_http); if (global_flags.x264_video_to_http) { - x264_encoder->add_mux(stream_mux.get()); + x264_encoder->add_mux(http_mux.get()); } if (overriding_bitrate != 0) { x264_encoder->change_bitrate(overriding_bitrate); } } + X264Encoder *http_encoder = x264_encoder.get(); + X264Encoder *disk_encoder = x264_encoder.get(); + if (global_flags.x264_separate_disk_encode) { + x264_disk_encoder.reset(new X264Encoder(oformat, /*use_separate_disk_params=*/true)); + disk_encoder = x264_disk_encoder.get(); + } - quicksync_encoder.reset(new QuickSyncEncoder(filename, resource_pool, surface, va_display, width, height, oformat, x264_encoder.get(), disk_space_estimator)); - quicksync_encoder->set_stream_mux(stream_mux.get()); + quicksync_encoder.reset(new QuickSyncEncoder(filename, resource_pool, surface, va_display, width, height, oformat, http_encoder, disk_encoder, disk_space_estimator)); + quicksync_encoder->set_http_mux(http_mux.get()); } void VideoEncoder::change_x264_bitrate(unsigned rate_kbit) @@ -169,34 +213,54 @@ RefCountedGLsync VideoEncoder::end_frame() return quicksync_encoder->end_frame(); } -void VideoEncoder::open_output_stream() +void VideoEncoder::open_output_streams() { - AVFormatContext *avctx = avformat_alloc_context(); - avctx->oformat = oformat; + for (bool is_srt : {false, true}) { + if (is_srt && global_flags.srt_destination_host.empty()) { + continue; + } - uint8_t *buf = (uint8_t *)av_malloc(MUX_BUFFER_SIZE); - avctx->pb = avio_alloc_context(buf, MUX_BUFFER_SIZE, 1, this, nullptr, nullptr, nullptr); - avctx->pb->write_data_type = &VideoEncoder::write_packet2_thunk; - avctx->pb->ignore_boundary_point = 1; + AVFormatContext *avctx = avformat_alloc_context(); + avctx->oformat = is_srt ? srt_oformat : oformat; - Mux::Codec video_codec; - if (global_flags.uncompressed_video_to_http) { - video_codec = Mux::CODEC_NV12; - } else { - video_codec = Mux::CODEC_H264; - } + uint8_t *buf = (uint8_t *)av_malloc(MUX_BUFFER_SIZE); + avctx->pb = avio_alloc_context(buf, MUX_BUFFER_SIZE, 1, this, nullptr, nullptr, nullptr); + if (is_srt) { + avctx->pb->write_packet = &VideoEncoder::write_srt_packet_thunk; + } else { + avctx->pb->write_data_type = &VideoEncoder::write_packet2_thunk; + avctx->pb->ignore_boundary_point = 1; + } - avctx->flags = AVFMT_FLAG_CUSTOM_IO; + Mux::Codec video_codec; + if (global_flags.av1_video_to_http) { + video_codec = Mux::CODEC_AV1; + } else { + video_codec = Mux::CODEC_H264; + } - string video_extradata; - if (global_flags.x264_video_to_http || global_flags.x264_video_to_disk) { - video_extradata = x264_encoder->get_global_headers(); - } + avctx->flags = AVFMT_FLAG_CUSTOM_IO; + + string video_extradata; + if (global_flags.x264_video_to_http) { + video_extradata = x264_encoder->get_global_headers(); +#ifdef HAVE_AV1 + } else if (global_flags.av1_video_to_http) { + video_extradata = av1_encoder->get_global_headers(); +#endif + } - stream_mux.reset(new Mux(avctx, width, height, video_codec, video_extradata, stream_audio_encoder->get_codec_parameters().get(), - get_color_space(global_flags.ycbcr_rec709_coefficients), COARSE_TIMEBASE, - /*write_callback=*/nullptr, Mux::WRITE_FOREGROUND, { &stream_mux_metrics })); - stream_mux_metrics.init({{ "destination", "http" }}); + Mux *mux = new Mux(avctx, width, height, video_codec, video_extradata, stream_audio_encoder->get_codec_parameters().get(), + get_color_space(global_flags.ycbcr_rec709_coefficients), COARSE_TIMEBASE, + /*write_callback=*/nullptr, is_srt ? Mux::WRITE_BACKGROUND : Mux::WRITE_FOREGROUND, { is_srt ? &srt_mux_metrics : &http_mux_metrics }); + if (is_srt) { + srt_mux.reset(mux); + srt_mux_metrics.init({{ "destination", "srt" }}); + } else { + http_mux.reset(mux); + http_mux_metrics.init({{ "destination", "http" }}); + } + } } int VideoEncoder::write_packet2_thunk(void *opaque, uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time) @@ -216,10 +280,134 @@ int VideoEncoder::write_packet2(uint8_t *buf, int buf_size, AVIODataMarkerType t } if (type == AVIO_DATA_MARKER_HEADER) { - stream_mux_header.append((char *)buf, buf_size); - httpd->set_header(HTTPD::MAIN_STREAM, stream_mux_header); + http_mux_header.append((char *)buf, buf_size); + httpd->set_header(HTTPD::StreamID{ HTTPD::MAIN_STREAM, 0 }, http_mux_header); + } else { + httpd->add_data(HTTPD::StreamID{ HTTPD::MAIN_STREAM, 0 }, (char *)buf, buf_size, type == AVIO_DATA_MARKER_SYNC_POINT, time, AVRational{ AV_TIME_BASE, 1 }); + } + return buf_size; +} + +int VideoEncoder::write_srt_packet_thunk(void *opaque, uint8_t *buf, int buf_size) +{ + VideoEncoder *video_encoder = (VideoEncoder *)opaque; + return video_encoder->write_srt_packet(buf, buf_size); +} + +static string print_addrinfo(const addrinfo *ai) +{ + char hoststr[NI_MAXHOST], portstr[NI_MAXSERV]; + if (getnameinfo(ai->ai_addr, ai->ai_addrlen, hoststr, sizeof(hoststr), portstr, sizeof(portstr), NI_DGRAM | NI_NUMERICHOST | NI_NUMERICSERV) != 0) { + return ""; // Should basically never happen, since we're not doing DNS lookups. + } + + if (ai->ai_family == AF_INET6) { + return string("[") + hoststr + "]:" + portstr; } else { - httpd->add_data(HTTPD::MAIN_STREAM, (char *)buf, buf_size, type == AVIO_DATA_MARKER_SYNC_POINT, time, AVRational{ AV_TIME_BASE, 1 }); + return string(hoststr) + ":" + portstr; + } +} + +int VideoEncoder::open_srt_socket() +{ + int sock = srt_create_socket(); + if (sock == -1) { + fprintf(stderr, "srt_create_socket(): %s\n", srt_getlasterror_str()); + return -1; + } + + SRT_TRANSTYPE live = SRTT_LIVE; + if (srt_setsockopt(sock, 0, SRTO_TRANSTYPE, &live, sizeof(live)) < 0) { + fprintf(stderr, "srt_setsockopt(SRTO_TRANSTYPE): %s\n", srt_getlasterror_str()); + srt_close(sock); + return -1; + } + + if (srt_setsockopt(sock, 0, SRTO_LATENCY, &global_flags.srt_output_latency, sizeof(global_flags.srt_output_latency)) < 0) { + fprintf(stderr, "srt_setsockopt(SRTO_LATENCY): %s\n", srt_getlasterror_str()); + srt_close(sock); + return -1; + } + + if (!global_flags.srt_streamid.empty()) { + if (srt_setsockopt(sock, 0, SRTO_STREAMID, global_flags.srt_streamid.data(), global_flags.srt_streamid.size()) < 0) { + fprintf(stderr, "srt_setsockopt(SRTO_STREAMID): %s\n", srt_getlasterror_str()); + srt_close(sock); + return -1; + } + } + + if (!global_flags.srt_passphrase.empty()) { + if (srt_setsockopt(sock, 0, SRTO_PASSPHRASE, global_flags.srt_passphrase.data(), global_flags.srt_passphrase.size()) < 0) { + fprintf(stderr, "srt_setsockopt(SRTO_PASSPHRASE): %s\n", srt_getlasterror_str()); + srt_close(sock); + return -1; + } + } + + return sock; +} + +int VideoEncoder::connect_to_srt() +{ + // We need to specify SOCK_DGRAM as a hint, or we'll get all addresses + // three times (for each of TCP, UDP, raw). + addrinfo hints; + memset(&hints, 0, sizeof(hints)); + hints.ai_flags = AI_ADDRCONFIG; + hints.ai_socktype = SOCK_DGRAM; + + addrinfo *ai; + int ret = getaddrinfo(global_flags.srt_destination_host.c_str(), global_flags.srt_destination_port.c_str(), &hints, &ai); + if (ret != 0) { + fprintf(stderr, "getaddrinfo(%s:%s): %s\n", global_flags.srt_destination_host.c_str(), global_flags.srt_destination_port.c_str(), gai_strerror(ret)); + return -1; + } + + for (const addrinfo *cur = ai; cur != nullptr; cur = cur->ai_next) { + // Seemingly, srt_create_socket() isn't universal; once we try to connect, + // it gets locked to either IPv4 or IPv6. So we need to create a new one + // for every address we try. + int sock = open_srt_socket(); + if (sock == -1) { + // Die immediately. + return sock; + } + if (srt_connect(sock, cur->ai_addr, cur->ai_addrlen) < 0) { + fprintf(stderr, "srt_connect(%s): %s\n", print_addrinfo(cur).c_str(), srt_getlasterror_str()); + srt_close(sock); + continue; + } + fprintf(stderr, "Connected to destination SRT endpoint at %s.\n", print_addrinfo(cur).c_str()); + freeaddrinfo(ai); + return sock; + } + + // Out of candidates, so give up. + freeaddrinfo(ai); + return -1; +} + +int VideoEncoder::write_srt_packet(uint8_t *buf, int buf_size) +{ + while (buf_size > 0) { + if (srt_sock == -1) { + srt_sock = connect_to_srt(); + if (srt_sock == -1) { + usleep(100000); + continue; + } + } + int to_send = min(buf_size, SRT_LIVE_DEF_PLSIZE); + int ret = srt_send(srt_sock, (char *)buf, to_send); + if (ret < 0) { + fprintf(stderr, "srt_send(): %s\n", srt_getlasterror_str()); + srt_close(srt_sock); + srt_sock = connect_to_srt(); + continue; + } + buf += ret; + buf_size -= ret; } return buf_size; }