1 #include "video_encoder.h"
8 #include <sys/socket.h>
15 #include <libavutil/mem.h>
18 #include "audio_encoder.h"
20 #include "av1_encoder.h"
23 #include "shared/ffmpeg_raii.h"
25 #include "shared/httpd.h"
26 #include "shared/mux.h"
27 #include "quicksync_encoder.h"
28 #include "shared/timebase.h"
29 #include "x264_encoder.h"
31 class RefCountedFrame;
34 using namespace std::chrono;
35 using namespace movit;
39 string generate_local_dump_filename(int frame)
41 time_t now = time(NULL);
43 localtime_r(&now, &now_tm);
46 strftime(timestamp, sizeof(timestamp), "%F-%H%M%S%z", &now_tm);
48 // Use the frame number to disambiguate between two cuts starting
49 // on the same second.
51 snprintf(filename, sizeof(filename), "%s/%s%s-f%02d%s",
52 global_flags.recording_dir.c_str(),
53 LOCAL_DUMP_PREFIX, timestamp, frame % 100, LOCAL_DUMP_SUFFIX);
59 VideoEncoder::VideoEncoder(ResourcePool *resource_pool, QSurface *surface, const std::string &va_display, int width, int height, HTTPD *httpd, DiskSpaceEstimator *disk_space_estimator)
60 : resource_pool(resource_pool), surface(surface), va_display(va_display), width(width), height(height), httpd(httpd), disk_space_estimator(disk_space_estimator)
62 // TODO: If we're outputting AV1, we can't use MPEG-TS currently.
63 srt_oformat = av_guess_format("mpegts", nullptr, nullptr);
64 assert(srt_oformat != nullptr);
66 oformat = av_guess_format(global_flags.stream_mux_name.c_str(), nullptr, nullptr);
67 assert(oformat != nullptr);
68 if (global_flags.stream_audio_codec_name.empty()) {
69 stream_audio_encoder.reset(new AudioEncoder(AUDIO_OUTPUT_CODEC_NAME, DEFAULT_AUDIO_OUTPUT_BIT_RATE, oformat));
71 stream_audio_encoder.reset(new AudioEncoder(global_flags.stream_audio_codec_name, global_flags.stream_audio_codec_bitrate, oformat));
73 if (global_flags.x264_video_to_http || global_flags.x264_video_to_disk) {
74 x264_encoder.reset(new X264Encoder(oformat, /*use_separate_disk_params=*/false));
76 VideoCodecInterface *http_encoder = x264_encoder.get();
77 VideoCodecInterface *disk_encoder = x264_encoder.get();
79 if (global_flags.av1_video_to_http) {
80 av1_encoder.reset(new AV1Encoder(oformat));
81 http_encoder = av1_encoder.get();
84 if (global_flags.x264_separate_disk_encode) {
85 x264_disk_encoder.reset(new X264Encoder(oformat, /*use_separate_disk_params=*/true));
86 disk_encoder = x264_disk_encoder.get();
89 string filename = generate_local_dump_filename(/*frame=*/0);
90 quicksync_encoder.reset(new QuickSyncEncoder(filename, resource_pool, surface, va_display, width, height, oformat, http_encoder, disk_encoder, disk_space_estimator));
92 open_output_streams();
93 stream_audio_encoder->add_mux(http_mux.get());
94 if (srt_mux != nullptr) {
95 stream_audio_encoder->add_mux(srt_mux.get());
97 quicksync_encoder->set_http_mux(http_mux.get());
98 if (srt_mux != nullptr) {
99 quicksync_encoder->set_srt_mux(srt_mux.get());
101 if (global_flags.x264_video_to_http) {
102 x264_encoder->add_mux(http_mux.get());
103 if (srt_mux != nullptr) {
104 x264_encoder->add_mux(srt_mux.get());
108 if (global_flags.av1_video_to_http) {
109 av1_encoder->add_mux(http_mux.get());
110 if (srt_mux != nullptr) {
111 av1_encoder->add_mux(srt_mux.get());
117 VideoEncoder::~VideoEncoder()
120 quicksync_encoder->shutdown();
121 x264_encoder.reset(nullptr);
122 x264_disk_encoder.reset(nullptr);
123 quicksync_encoder->close_file();
124 quicksync_encoder.reset(nullptr);
125 while (quicksync_encoders_in_shutdown.load() > 0) {
130 void VideoEncoder::do_cut(int frame)
132 string filename = generate_local_dump_filename(frame);
133 printf("Starting new recording: %s\n", filename.c_str());
135 // Do the shutdown of the old encoder in a separate thread, since it can
136 // take some time (it needs to wait for all the frames in the queue to be
137 // done encoding, for one) and we are running on the main mixer thread.
138 // However, since this means both encoders could be sending packets at
139 // the same time, it means pts could come out of order to the stream mux,
140 // and we need to plug it until the shutdown is complete.
142 lock(qs_mu, qs_audio_mu);
143 lock_guard<mutex> lock1(qs_mu, adopt_lock), lock2(qs_audio_mu, adopt_lock);
144 QuickSyncEncoder *old_encoder = quicksync_encoder.release(); // When we go C++14, we can use move capture instead.
145 X264Encoder *old_x264_encoder = nullptr;
146 X264Encoder *old_x264_disk_encoder = nullptr;
147 if (global_flags.x264_video_to_disk) {
148 old_x264_encoder = x264_encoder.release();
150 if (global_flags.x264_separate_disk_encode) {
151 old_x264_disk_encoder = x264_disk_encoder.release();
153 thread([old_encoder, old_x264_encoder, old_x264_disk_encoder, this]{
154 old_encoder->shutdown();
155 delete old_x264_encoder;
156 delete old_x264_disk_encoder;
157 old_encoder->close_file();
160 // We cannot delete the encoder here, as this thread has no OpenGL context.
161 // We'll deal with it in begin_frame().
162 lock_guard<mutex> lock(qs_mu);
163 qs_needing_cleanup.emplace_back(old_encoder);
166 if (global_flags.x264_video_to_disk) {
167 x264_encoder.reset(new X264Encoder(oformat, /*use_separate_disk_params=*/false));
168 assert(global_flags.x264_video_to_http);
169 if (global_flags.x264_video_to_http) {
170 x264_encoder->add_mux(http_mux.get());
172 if (overriding_bitrate != 0) {
173 x264_encoder->change_bitrate(overriding_bitrate);
176 X264Encoder *http_encoder = x264_encoder.get();
177 X264Encoder *disk_encoder = x264_encoder.get();
178 if (global_flags.x264_separate_disk_encode) {
179 x264_disk_encoder.reset(new X264Encoder(oformat, /*use_separate_disk_params=*/true));
180 disk_encoder = x264_disk_encoder.get();
183 quicksync_encoder.reset(new QuickSyncEncoder(filename, resource_pool, surface, va_display, width, height, oformat, http_encoder, disk_encoder, disk_space_estimator));
184 quicksync_encoder->set_http_mux(http_mux.get());
187 void VideoEncoder::change_x264_bitrate(unsigned rate_kbit)
189 overriding_bitrate = rate_kbit;
190 x264_encoder->change_bitrate(rate_kbit);
193 void VideoEncoder::add_audio(int64_t pts, std::vector<float> audio)
195 // Take only qs_audio_mu, since add_audio() is thread safe
196 // (we can only conflict with do_cut(), which takes qs_audio_mu)
197 // and we don't want to contend with begin_frame().
199 lock_guard<mutex> lock(qs_audio_mu);
200 quicksync_encoder->add_audio(pts, audio);
202 stream_audio_encoder->encode_audio(audio, pts + quicksync_encoder->global_delay());
205 bool VideoEncoder::is_zerocopy() const
207 // Explicitly do _not_ take qs_mu; this is called from the mixer,
208 // and qs_mu might be contended. is_zerocopy() is thread safe
209 // and never called in parallel with do_cut() (both happen only
210 // from the mixer thread).
211 return quicksync_encoder->is_zerocopy();
214 bool VideoEncoder::begin_frame(int64_t pts, int64_t duration, movit::YCbCrLumaCoefficients ycbcr_coefficients, const std::vector<RefCountedFrame> &input_frames, GLuint *y_tex, GLuint *cbcr_tex)
216 lock_guard<mutex> lock(qs_mu);
217 qs_needing_cleanup.clear(); // Since we have an OpenGL context here, and are called regularly.
218 return quicksync_encoder->begin_frame(pts, duration, ycbcr_coefficients, input_frames, y_tex, cbcr_tex);
221 RefCountedGLsync VideoEncoder::end_frame()
223 want_srt_metric_update = true;
224 lock_guard<mutex> lock(qs_mu);
225 return quicksync_encoder->end_frame();
228 void VideoEncoder::open_output_streams()
230 for (bool is_srt : {false, true}) {
231 if (is_srt && global_flags.srt_destination_host.empty()) {
235 AVFormatContext *avctx = avformat_alloc_context();
236 avctx->oformat = is_srt ? srt_oformat : oformat;
238 uint8_t *buf = (uint8_t *)av_malloc(MUX_BUFFER_SIZE);
239 avctx->pb = avio_alloc_context(buf, MUX_BUFFER_SIZE, 1, this, nullptr, nullptr, nullptr);
241 avctx->pb->write_packet = &VideoEncoder::write_srt_packet_thunk;
243 avctx->pb->write_data_type = &VideoEncoder::write_packet2_thunk;
244 avctx->pb->ignore_boundary_point = 1;
247 Mux::Codec video_codec;
248 if (global_flags.av1_video_to_http) {
249 video_codec = Mux::CODEC_AV1;
251 video_codec = Mux::CODEC_H264;
254 avctx->flags = AVFMT_FLAG_CUSTOM_IO;
256 string video_extradata;
257 if (global_flags.x264_video_to_http) {
258 video_extradata = x264_encoder->get_global_headers();
260 } else if (global_flags.av1_video_to_http) {
261 video_extradata = av1_encoder->get_global_headers();
265 Mux *mux = new Mux(avctx, width, height, video_codec, video_extradata, stream_audio_encoder->get_codec_parameters().get(),
266 get_color_space(global_flags.ycbcr_rec709_coefficients), COARSE_TIMEBASE,
267 /*write_callback=*/nullptr, is_srt ? Mux::WRITE_BACKGROUND : Mux::WRITE_FOREGROUND, { is_srt ? &srt_mux_metrics : &http_mux_metrics });
270 srt_mux_metrics.init({{ "destination", "srt" }});
271 srt_metrics.init({{ "cardtype", "output" }});
272 global_metrics.add("srt_num_connection_attempts", {{ "cardtype", "output" }}, &metric_srt_num_connection_attempts);
275 http_mux_metrics.init({{ "destination", "http" }});
280 int VideoEncoder::write_packet2_thunk(void *opaque, uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time)
282 VideoEncoder *video_encoder = (VideoEncoder *)opaque;
283 return video_encoder->write_packet2(buf, buf_size, type, time);
286 int VideoEncoder::write_packet2(uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time)
288 if (type == AVIO_DATA_MARKER_SYNC_POINT || type == AVIO_DATA_MARKER_BOUNDARY_POINT) {
289 seen_sync_markers = true;
290 } else if (type == AVIO_DATA_MARKER_UNKNOWN && !seen_sync_markers) {
291 // We don't know if this is a keyframe or not (the muxer could
292 // avoid marking it), so we just have to make the best of it.
293 type = AVIO_DATA_MARKER_SYNC_POINT;
296 if (type == AVIO_DATA_MARKER_HEADER) {
297 http_mux_header.append((char *)buf, buf_size);
298 httpd->set_header(HTTPD::StreamID{ HTTPD::MAIN_STREAM, 0 }, http_mux_header);
300 httpd->add_data(HTTPD::StreamID{ HTTPD::MAIN_STREAM, 0 }, (char *)buf, buf_size, type == AVIO_DATA_MARKER_SYNC_POINT, time, AVRational{ AV_TIME_BASE, 1 });
305 int VideoEncoder::write_srt_packet_thunk(void *opaque, uint8_t *buf, int buf_size)
307 VideoEncoder *video_encoder = (VideoEncoder *)opaque;
308 return video_encoder->write_srt_packet(buf, buf_size);
311 static string print_addrinfo(const addrinfo *ai)
313 char hoststr[NI_MAXHOST], portstr[NI_MAXSERV];
314 if (getnameinfo(ai->ai_addr, ai->ai_addrlen, hoststr, sizeof(hoststr), portstr, sizeof(portstr), NI_DGRAM | NI_NUMERICHOST | NI_NUMERICSERV) != 0) {
315 return "<unknown address>"; // Should basically never happen, since we're not doing DNS lookups.
318 if (ai->ai_family == AF_INET6) {
319 return string("[") + hoststr + "]:" + portstr;
321 return string(hoststr) + ":" + portstr;
325 int VideoEncoder::open_srt_socket()
327 int sock = srt_create_socket();
329 fprintf(stderr, "srt_create_socket(): %s\n", srt_getlasterror_str());
333 SRT_TRANSTYPE live = SRTT_LIVE;
334 if (srt_setsockopt(sock, 0, SRTO_TRANSTYPE, &live, sizeof(live)) < 0) {
335 fprintf(stderr, "srt_setsockopt(SRTO_TRANSTYPE): %s\n", srt_getlasterror_str());
340 if (srt_setsockopt(sock, 0, SRTO_LATENCY, &global_flags.srt_output_latency_ms, sizeof(global_flags.srt_output_latency_ms)) < 0) {
341 fprintf(stderr, "srt_setsockopt(SRTO_LATENCY): %s\n", srt_getlasterror_str());
346 if (!global_flags.srt_streamid.empty()) {
347 if (srt_setsockopt(sock, 0, SRTO_STREAMID, global_flags.srt_streamid.data(), global_flags.srt_streamid.size()) < 0) {
348 fprintf(stderr, "srt_setsockopt(SRTO_STREAMID): %s\n", srt_getlasterror_str());
354 if (!global_flags.srt_passphrase.empty()) {
355 if (srt_setsockopt(sock, 0, SRTO_PASSPHRASE, global_flags.srt_passphrase.data(), global_flags.srt_passphrase.size()) < 0) {
356 fprintf(stderr, "srt_setsockopt(SRTO_PASSPHRASE): %s\n", srt_getlasterror_str());
365 int VideoEncoder::connect_to_srt()
367 // We need to specify SOCK_DGRAM as a hint, or we'll get all addresses
368 // three times (for each of TCP, UDP, raw).
370 memset(&hints, 0, sizeof(hints));
371 hints.ai_flags = AI_ADDRCONFIG;
372 hints.ai_socktype = SOCK_DGRAM;
375 int ret = getaddrinfo(global_flags.srt_destination_host.c_str(), global_flags.srt_destination_port.c_str(), &hints, &ai);
377 fprintf(stderr, "getaddrinfo(%s:%s): %s\n", global_flags.srt_destination_host.c_str(), global_flags.srt_destination_port.c_str(), gai_strerror(ret));
381 unique_ptr<addrinfo, decltype(freeaddrinfo) *> ai_cleanup(ai, &freeaddrinfo);
383 for (const addrinfo *cur = ai; cur != nullptr; cur = cur->ai_next) {
384 // Seemingly, srt_create_socket() isn't universal; once we try to connect,
385 // it gets locked to either IPv4 or IPv6. So we need to create a new one
386 // for every address we try.
387 int sock = open_srt_socket();
392 ++metric_srt_num_connection_attempts;
394 // We do a non-blocking connect, so that we can check should_quit
395 // every now and then.
397 if (srt_setsockopt(sock, 0, SRTO_RCVSYN, &blocking, sizeof(blocking)) < 0) {
398 fprintf(stderr, "srt_setsockopt(SRTO_SNDSYN=0): %s\n", srt_getlasterror_str());
402 if (srt_connect(sock, cur->ai_addr, cur->ai_addrlen) < 0) {
403 fprintf(stderr, "srt_connect(%s): %s\n", print_addrinfo(cur).c_str(), srt_getlasterror_str());
407 int eid = srt_epoll_create();
409 fprintf(stderr, "srt_epoll_create(): %s\n", srt_getlasterror_str());
413 int modes = SRT_EPOLL_ERR | SRT_EPOLL_OUT;
414 if (srt_epoll_add_usock(eid, sock, &modes) < 0) {
415 fprintf(stderr, "srt_epoll_usock(): %s\n", srt_getlasterror_str());
417 srt_epoll_release(eid);
421 while (!should_quit.load()) {
422 SRTSOCKET errfds[1], writefds[1];
423 int num_errfds = 1, num_writefds = 1;
424 int poll_time_ms = 100;
425 int ret = srt_epoll_wait(eid, errfds, &num_errfds, writefds, &num_writefds, poll_time_ms, 0, 0, 0, 0);
427 if (srt_getlasterror(nullptr) == SRT_ETIMEOUT) {
430 fprintf(stderr, "srt_epoll_wait(): %s\n", srt_getlasterror_str());
432 srt_epoll_release(eid);
435 } else if (ret > 0) {
436 // The SRT epoll framework is pretty odd, but seemingly,
437 // this is the way. Getting the same error code as srt_connect()
438 // would normally return seems to be impossible, though.
439 ok = (num_errfds == 0);
441 fprintf(stderr, "num_errfds=%d num_writefds=%d last_err=%s\n", num_errfds, num_writefds, srt_getlasterror_str());
445 srt_epoll_release(eid);
446 if (should_quit.load()) {
451 fprintf(stderr, "Connected to destination SRT endpoint at %s.\n", print_addrinfo(cur).c_str());
454 fprintf(stderr, "srt_connect(%s): %s\n", print_addrinfo(cur).c_str(), srt_getlasterror_str());
459 // Out of candidates, so give up.
463 int VideoEncoder::write_srt_packet(uint8_t *buf, int buf_size)
465 if (want_srt_metric_update.exchange(false) && srt_sock != -1) {
466 srt_metrics.update_srt_stats(srt_sock);
469 bool has_drained = false;
470 bool trying_reconnect = false;
471 steady_clock::time_point first_connect_start;
473 while (buf_size > 0 && !should_quit.load()) {
474 if (srt_sock == -1) {
475 if (!trying_reconnect) {
476 first_connect_start = steady_clock::now();
477 trying_reconnect = true;
479 srt_sock = connect_to_srt();
480 if (srt_sock == -1) {
482 if (!has_drained && duration<double>(steady_clock::now() - first_connect_start).count() >= global_flags.srt_output_latency_ms * 1e-3) {
483 // The entire concept for SRT is to have fixed, low latency.
484 // If we've been out for more than a latency period, we shouldn't
485 // try to send the entire backlog. (But we should be tolerant
486 // of a quick disconnect and reconnect.) Maybe it would be better
487 // to have a sliding window of how much we remove, but it quickly
488 // starts getting esoteric, so juts drop it all.
489 fprintf(stderr, "WARNING: No SRT connection for more than %d ms, dropping data.\n",
490 global_flags.srt_output_latency_ms);
496 srt_metrics.update_srt_stats(srt_sock);
499 // Now that we're reconnected, we can start accepting data again,
500 // but discard the rest of this write (it is very old by now).
504 int to_send = min(buf_size, SRT_LIVE_DEF_PLSIZE);
505 int ret = srt_send(srt_sock, (char *)buf, to_send);
507 fprintf(stderr, "srt_send(): %s\n", srt_getlasterror_str());
509 srt_metrics.metric_srt_uptime_seconds = 0.0 / 0.0;
510 if (!trying_reconnect) {
511 first_connect_start = steady_clock::now();
512 trying_reconnect = true;
514 srt_sock = connect_to_srt();