X-Git-Url: https://git.sesse.net/?p=nageru;a=blobdiff_plain;f=nageru%2Fmjpeg_encoder.cpp;h=a39f1c8adc1a5be2bc11760987fce7457ad0e8fb;hp=867414d44127d5019cca822102de519ef3f6bcf8;hb=e01e8253288b3b706b22ea0c788c2c8f50bfac2e;hpb=6160ed8911e4ed3f7ca3589a5357ae813e27175e diff --git a/nageru/mjpeg_encoder.cpp b/nageru/mjpeg_encoder.cpp index 867414d..a39f1c8 100644 --- a/nageru/mjpeg_encoder.cpp +++ b/nageru/mjpeg_encoder.cpp @@ -343,19 +343,16 @@ void MJPEGEncoder::upload_frame(int64_t pts, unsigned card_index, RefCountedFram any_frames_to_be_encoded.notify_all(); } -int MJPEGEncoder::get_mjpeg_stream_for_card(unsigned card_index) +bool MJPEGEncoder::should_encode_mjpeg_for_card(unsigned card_index) { // Only bother doing MJPEG encoding if there are any connected clients // that want the stream. if (httpd->get_num_connected_multicam_clients() == 0) { - return -1; + return false; } auto it = global_flags.card_to_mjpeg_stream_export.find(card_index); - if (it == global_flags.card_to_mjpeg_stream_export.end()) { - return -1; - } - return it->second; + return (it != global_flags.card_to_mjpeg_stream_export.end()); } void MJPEGEncoder::encoder_thread_func() @@ -376,18 +373,21 @@ void MJPEGEncoder::encoder_thread_func() frames_to_be_encoded.pop(); } + assert(global_flags.card_to_mjpeg_stream_export.count(qf.card_index)); // Or should_encode_mjpeg_for_card() would have returned false. + int stream_index = global_flags.card_to_mjpeg_stream_export[qf.card_index]; + if (va_dpy != nullptr) { // Will call back in the receiver thread. encode_jpeg_va(move(qf)); } else { // Write audio before video, since Futatabi expects it. if (qf.audio.size() > 0) { - write_audio_packet(qf.pts, qf.card_index, qf.audio); + write_audio_packet(qf.pts, stream_index, qf.audio); } // Encode synchronously, in the same thread. vector jpeg = encode_jpeg_libjpeg(qf); - write_mjpeg_packet(qf.pts, qf.card_index, jpeg.data(), jpeg.size()); + write_mjpeg_packet(qf.pts, stream_index, jpeg.data(), jpeg.size()); } } @@ -890,9 +890,12 @@ void MJPEGEncoder::va_receiver_thread_func() frames_encoding.pop(); } + assert(global_flags.card_to_mjpeg_stream_export.count(qf.card_index)); // Or should_encode_mjpeg_for_card() would have returned false. + int stream_index = global_flags.card_to_mjpeg_stream_export[qf.card_index]; + // Write audio before video, since Futatabi expects it. if (qf.audio.size() > 0) { - write_audio_packet(qf.pts, qf.card_index, qf.audio); + write_audio_packet(qf.pts, stream_index, qf.audio); } VAStatus va_status = vaSyncSurface(va_dpy->va_dpy, qf.resources.surface); @@ -903,7 +906,7 @@ void MJPEGEncoder::va_receiver_thread_func() CHECK_VASTATUS(va_status, "vaMapBuffer"); const uint8_t *coded_buf = reinterpret_cast(segment->buf); - write_mjpeg_packet(qf.pts, qf.card_index, coded_buf, segment->size); + write_mjpeg_packet(qf.pts, stream_index, coded_buf, segment->size); va_status = vaUnmapBuffer(va_dpy->va_dpy, qf.resources.data_buffer); CHECK_VASTATUS(va_status, "vaUnmapBuffer");