X-Git-Url: https://git.sesse.net/?a=blobdiff_plain;f=nageru%2Fmjpeg_encoder.cpp;h=9ddb9ff15af20598ead26b5b9a6c282f16861e6c;hb=873c8b204ab70622f6e231556cc94d3aab1889ce;hp=3587d78dcf3fa51a61a92af89ec362b3297b2d3e;hpb=eeda8995329601f9f4e35047358400833eeae68e;p=nageru diff --git a/nageru/mjpeg_encoder.cpp b/nageru/mjpeg_encoder.cpp index 3587d78..9ddb9ff 100644 --- a/nageru/mjpeg_encoder.cpp +++ b/nageru/mjpeg_encoder.cpp @@ -119,8 +119,6 @@ int MJPEGEncoder::write_packet2(uint8_t *buf, int buf_size, AVIODataMarkerType t MJPEGEncoder::MJPEGEncoder(HTTPD *httpd, const string &va_display) : httpd(httpd) { - encoder_thread = thread(&MJPEGEncoder::encoder_thread_func, this); - // Set up the mux. We don't use the Mux wrapper, because it's geared towards // a situation with only one video stream (and possibly one audio stream) // with known width/height, and we don't need the extra functionality it provides. @@ -132,7 +130,7 @@ MJPEGEncoder::MJPEGEncoder(HTTPD *httpd, const string &va_display) avctx->pb->write_data_type = &MJPEGEncoder::write_packet2_thunk; avctx->flags = AVFMT_FLAG_CUSTOM_IO; - for (int card_idx = 0; card_idx < global_flags.num_cards; ++card_idx) { + for (unsigned card_idx = 0; card_idx < global_flags.card_to_mjpeg_stream_export.size(); ++card_idx) { AVStream *stream = avformat_new_stream(avctx.get(), nullptr); if (stream == nullptr) { fprintf(stderr, "avformat_new_stream() failed\n"); @@ -174,9 +172,19 @@ MJPEGEncoder::MJPEGEncoder(HTTPD *httpd, const string &va_display) fprintf(stderr, "Could not initialize VA-API for MJPEG encoding: %s. JPEGs will be encoded in software if needed.\n", error.c_str()); } + encoder_thread = thread(&MJPEGEncoder::encoder_thread_func, this); + if (va_dpy != nullptr) { + va_receiver_thread = thread(&MJPEGEncoder::va_receiver_thread_func, this); + } + running = true; } +MJPEGEncoder::~MJPEGEncoder() +{ + av_free(avctx->pb->buffer); +} + void MJPEGEncoder::stop() { if (!running) { @@ -186,6 +194,9 @@ void MJPEGEncoder::stop() should_quit = true; any_frames_to_be_encoded.notify_all(); encoder_thread.join(); + if (va_dpy != nullptr) { + va_receiver_thread.join(); + } } unique_ptr MJPEGEncoder::try_open_va(const string &va_display, string *error, VAConfigID *config_id) @@ -253,6 +264,10 @@ void MJPEGEncoder::upload_frame(int64_t pts, unsigned card_index, RefCountedFram } lock_guard lock(mu); + if (frames_to_be_encoded.size() + frames_encoding.size() > 10) { + fprintf(stderr, "WARNING: MJPEG encoding doesn't keep up, discarding frame.\n"); + return; + } frames_to_be_encoded.push(QueuedFrame{ pts, card_index, frame, video_format, y_offset, cbcr_offset }); any_frames_to_be_encoded.notify_all(); } @@ -265,29 +280,47 @@ void MJPEGEncoder::encoder_thread_func() posix_memalign((void **)&tmp_cb, 4096, 4096 * 8); posix_memalign((void **)&tmp_cr, 4096, 4096 * 8); - unique_lock lock(mu); for (;;) { - any_frames_to_be_encoded.wait(lock, [this] { return !frames_to_be_encoded.empty() || should_quit; }); - if (should_quit) return; - QueuedFrame qf = move(frames_to_be_encoded.front()); - frames_to_be_encoded.pop(); - - vector jpeg = encode_jpeg(qf); - - AVPacket pkt; - memset(&pkt, 0, sizeof(pkt)); - pkt.buf = nullptr; - pkt.data = &jpeg[0]; - pkt.size = jpeg.size(); - pkt.stream_index = qf.card_index; - pkt.flags = AV_PKT_FLAG_KEY; - pkt.pts = pkt.dts = qf.pts; - - if (av_write_frame(avctx.get(), &pkt) < 0) { - fprintf(stderr, "av_write_frame() failed\n"); - exit(1); + QueuedFrame qf; + { + unique_lock lock(mu); + any_frames_to_be_encoded.wait(lock, [this] { return !frames_to_be_encoded.empty() || should_quit; }); + if (should_quit) break; + qf = move(frames_to_be_encoded.front()); + frames_to_be_encoded.pop(); + } + + if (va_dpy != nullptr) { + // Will call back in the receiver thread. + encode_jpeg_va(move(qf)); + } else { + // Encode synchronously, in the same thread. + vector jpeg = encode_jpeg_libjpeg(qf); + write_mjpeg_packet(qf.pts, qf.card_index, jpeg); } } + + free(tmp_y); + free(tmp_cbcr); + free(tmp_cb); + free(tmp_cr); +} + +void MJPEGEncoder::write_mjpeg_packet(int64_t pts, unsigned card_index, const vector &jpeg) +{ + AVPacket pkt; + memset(&pkt, 0, sizeof(pkt)); + pkt.buf = nullptr; + pkt.data = const_cast(&jpeg[0]); + pkt.size = jpeg.size(); + pkt.stream_index = card_index; + pkt.flags = AV_PKT_FLAG_KEY; + pkt.pts = pkt.dts = pts; + + if (av_write_frame(avctx.get(), &pkt) < 0) { + fprintf(stderr, "av_write_frame() failed\n"); + exit(1); + } } class VABufferDestroyer { @@ -389,6 +422,10 @@ void MJPEGEncoder::init_jpeg_422(unsigned width, unsigned height, VectorDestinat cinfo->comp_info[2].v_samp_factor = 1; cinfo->CCIR601_sampling = true; // Seems to be mostly ignored by libjpeg, though. jpeg_start_compress(cinfo, true); + + // This comment marker is private to FFmpeg. It signals limited Y'CbCr range + // (and nothing else). + jpeg_write_marker(cinfo, JPEG_COM, (const JOCTET *)"CS=ITU601", strlen("CS=ITU601")); } vector MJPEGEncoder::get_jpeg_header(unsigned width, unsigned height, jpeg_compress_struct *cinfo) @@ -535,16 +572,7 @@ MJPEGEncoder::VAData MJPEGEncoder::get_va_data_for_resolution(unsigned width, un return ret; } -vector MJPEGEncoder::encode_jpeg(const QueuedFrame &qf) -{ - if (va_dpy != nullptr) { - return encode_jpeg_va(qf); - } else { - return encode_jpeg_libjpeg(qf); - } -} - -vector MJPEGEncoder::encode_jpeg_va(const QueuedFrame &qf) +void MJPEGEncoder::encode_jpeg_va(QueuedFrame &&qf) { unsigned width = qf.video_format.width; unsigned height = qf.video_format.height; @@ -629,20 +657,42 @@ vector MJPEGEncoder::encode_jpeg_va(const QueuedFrame &qf) va_status = vaEndPicture(va_dpy->va_dpy, resources.context); CHECK_VASTATUS(va_status, "vaEndPicture"); - va_status = vaSyncSurface(va_dpy->va_dpy, resources.surface); - CHECK_VASTATUS(va_status, "vaSyncSurface"); + qf.resources = move(resources); + qf.resource_releaser = move(release); - VACodedBufferSegment *segment; - va_status = vaMapBuffer(va_dpy->va_dpy, resources.data_buffer, (void **)&segment); - CHECK_VASTATUS(va_status, "vaMapBuffer"); + lock_guard lock(mu); + frames_encoding.push(move(qf)); + any_frames_encoding.notify_all(); +} + +void MJPEGEncoder::va_receiver_thread_func() +{ + pthread_setname_np(pthread_self(), "MJPEG_Receive"); + for (;;) { + QueuedFrame qf; + { + unique_lock lock(mu); + any_frames_encoding.wait(lock, [this] { return !frames_encoding.empty() || should_quit; }); + if (should_quit) return; + qf = move(frames_encoding.front()); + frames_encoding.pop(); + } - const char *coded_buf = reinterpret_cast(segment->buf); - vector jpeg(coded_buf, coded_buf + segment->size); + VAStatus va_status = vaSyncSurface(va_dpy->va_dpy, qf.resources.surface); + CHECK_VASTATUS(va_status, "vaSyncSurface"); - va_status = vaUnmapBuffer(va_dpy->va_dpy, resources.data_buffer); - CHECK_VASTATUS(va_status, "vaUnmapBuffer"); + VACodedBufferSegment *segment; + va_status = vaMapBuffer(va_dpy->va_dpy, qf.resources.data_buffer, (void **)&segment); + CHECK_VASTATUS(va_status, "vaMapBuffer"); + + const char *coded_buf = reinterpret_cast(segment->buf); + vector jpeg(coded_buf, coded_buf + segment->size); - return jpeg; + va_status = vaUnmapBuffer(va_dpy->va_dpy, qf.resources.data_buffer); + CHECK_VASTATUS(va_status, "vaUnmapBuffer"); + + write_mjpeg_packet(qf.pts, qf.card_index, jpeg); + } } vector MJPEGEncoder::encode_jpeg_libjpeg(const QueuedFrame &qf)