]> git.sesse.net Git - nageru/commitdiff
Make the MJPEG card-to-stream mapping invisible to outside callers.
authorSteinar H. Gunderson <sgunderson@bigfoot.com>
Wed, 12 Feb 2020 23:39:52 +0000 (00:39 +0100)
committerSteinar H. Gunderson <sgunderson@bigfoot.com>
Wed, 12 Feb 2020 23:39:52 +0000 (00:39 +0100)
As an important side effect, this allows should_encode_mjpeg_for_card()
to contain more complex policy in the future.

nageru/mixer.cpp
nageru/mjpeg_encoder.cpp
nageru/mjpeg_encoder.h
nageru/pbo_frame_allocator.cpp

index 9a86682daa91ca19b2325e3858093f55e80a0a27..aac421126b904642ff38f131252faf50727f2be7 100644 (file)
@@ -1137,12 +1137,9 @@ void Mixer::thread_func()
                                last_received_neutral_color[card_index] = new_frame->neutral_color;
                        }
 
                                last_received_neutral_color[card_index] = new_frame->neutral_color;
                        }
 
-                       if (new_frame->frame->data_copy != nullptr) {
-                               int mjpeg_card_index = mjpeg_encoder->get_mjpeg_stream_for_card(card_index);
-                               if (mjpeg_card_index != -1) {
-                                       RGBTriplet neutral_color = theme->get_white_balance_for_signal(card_index);
-                                       mjpeg_encoder->upload_frame(pts_int, mjpeg_card_index, new_frame->frame, new_frame->video_format, new_frame->y_offset, new_frame->cbcr_offset, move(raw_audio[card_index]), neutral_color);
-                               }
+                       if (new_frame->frame->data_copy != nullptr && mjpeg_encoder->should_encode_mjpeg_for_card(card_index)) {
+                               RGBTriplet neutral_color = theme->get_white_balance_for_signal(card_index);
+                               mjpeg_encoder->upload_frame(pts_int, card_index, new_frame->frame, new_frame->video_format, new_frame->y_offset, new_frame->cbcr_offset, move(raw_audio[card_index]), neutral_color);
                        }
 
                }
                        }
 
                }
index 867414d44127d5019cca822102de519ef3f6bcf8..a39f1c8adc1a5be2bc11760987fce7457ad0e8fb 100644 (file)
@@ -343,19 +343,16 @@ void MJPEGEncoder::upload_frame(int64_t pts, unsigned card_index, RefCountedFram
        any_frames_to_be_encoded.notify_all();
 }
 
        any_frames_to_be_encoded.notify_all();
 }
 
-int MJPEGEncoder::get_mjpeg_stream_for_card(unsigned card_index)
+bool MJPEGEncoder::should_encode_mjpeg_for_card(unsigned card_index)
 {
        // Only bother doing MJPEG encoding if there are any connected clients
        // that want the stream.
        if (httpd->get_num_connected_multicam_clients() == 0) {
 {
        // Only bother doing MJPEG encoding if there are any connected clients
        // that want the stream.
        if (httpd->get_num_connected_multicam_clients() == 0) {
-               return -1;
+               return false;
        }
 
        auto it = global_flags.card_to_mjpeg_stream_export.find(card_index);
        }
 
        auto it = global_flags.card_to_mjpeg_stream_export.find(card_index);
-       if (it == global_flags.card_to_mjpeg_stream_export.end()) {
-               return -1;
-       }
-       return it->second;
+       return (it != global_flags.card_to_mjpeg_stream_export.end());
 }
 
 void MJPEGEncoder::encoder_thread_func()
 }
 
 void MJPEGEncoder::encoder_thread_func()
@@ -376,18 +373,21 @@ void MJPEGEncoder::encoder_thread_func()
                        frames_to_be_encoded.pop();
                }
 
                        frames_to_be_encoded.pop();
                }
 
+               assert(global_flags.card_to_mjpeg_stream_export.count(qf.card_index));  // Or should_encode_mjpeg_for_card() would have returned false.
+               int stream_index = global_flags.card_to_mjpeg_stream_export[qf.card_index];
+
                if (va_dpy != nullptr) {
                        // Will call back in the receiver thread.
                        encode_jpeg_va(move(qf));
                } else {
                        // Write audio before video, since Futatabi expects it.
                        if (qf.audio.size() > 0) {
                if (va_dpy != nullptr) {
                        // Will call back in the receiver thread.
                        encode_jpeg_va(move(qf));
                } else {
                        // Write audio before video, since Futatabi expects it.
                        if (qf.audio.size() > 0) {
-                               write_audio_packet(qf.pts, qf.card_index, qf.audio);
+                               write_audio_packet(qf.pts, stream_index, qf.audio);
                        }
 
                        // Encode synchronously, in the same thread.
                        vector<uint8_t> jpeg = encode_jpeg_libjpeg(qf);
                        }
 
                        // Encode synchronously, in the same thread.
                        vector<uint8_t> jpeg = encode_jpeg_libjpeg(qf);
-                       write_mjpeg_packet(qf.pts, qf.card_index, jpeg.data(), jpeg.size());
+                       write_mjpeg_packet(qf.pts, stream_index, jpeg.data(), jpeg.size());
                }
        }
 
                }
        }
 
@@ -890,9 +890,12 @@ void MJPEGEncoder::va_receiver_thread_func()
                        frames_encoding.pop();
                }
 
                        frames_encoding.pop();
                }
 
+               assert(global_flags.card_to_mjpeg_stream_export.count(qf.card_index));  // Or should_encode_mjpeg_for_card() would have returned false.
+               int stream_index = global_flags.card_to_mjpeg_stream_export[qf.card_index];
+
                // Write audio before video, since Futatabi expects it.
                if (qf.audio.size() > 0) {
                // Write audio before video, since Futatabi expects it.
                if (qf.audio.size() > 0) {
-                       write_audio_packet(qf.pts, qf.card_index, qf.audio);
+                       write_audio_packet(qf.pts, stream_index, qf.audio);
                }
 
                VAStatus va_status = vaSyncSurface(va_dpy->va_dpy, qf.resources.surface);
                }
 
                VAStatus va_status = vaSyncSurface(va_dpy->va_dpy, qf.resources.surface);
@@ -903,7 +906,7 @@ void MJPEGEncoder::va_receiver_thread_func()
                CHECK_VASTATUS(va_status, "vaMapBuffer");
 
                const uint8_t *coded_buf = reinterpret_cast<uint8_t *>(segment->buf);
                CHECK_VASTATUS(va_status, "vaMapBuffer");
 
                const uint8_t *coded_buf = reinterpret_cast<uint8_t *>(segment->buf);
-               write_mjpeg_packet(qf.pts, qf.card_index, coded_buf, segment->size);
+               write_mjpeg_packet(qf.pts, stream_index, coded_buf, segment->size);
 
                va_status = vaUnmapBuffer(va_dpy->va_dpy, qf.resources.data_buffer);
                CHECK_VASTATUS(va_status, "vaUnmapBuffer");
 
                va_status = vaUnmapBuffer(va_dpy->va_dpy, qf.resources.data_buffer);
                CHECK_VASTATUS(va_status, "vaUnmapBuffer");
index 8a0d8fadb6b198fc8e6e2104eb8da41da2e17ce1..362e93461e2ed92c6a09b73153fe7dfc34b6b307 100644 (file)
@@ -42,8 +42,7 @@ public:
        void upload_frame(int64_t pts, unsigned card_index, RefCountedFrame frame, const bmusb::VideoFormat &video_format, size_t y_offset, size_t cbcr_offset, std::vector<int32_t> audio, const movit::RGBTriplet &white_balance);
        bool using_vaapi() const { return va_dpy != nullptr; }
 
        void upload_frame(int64_t pts, unsigned card_index, RefCountedFrame frame, const bmusb::VideoFormat &video_format, size_t y_offset, size_t cbcr_offset, std::vector<int32_t> audio, const movit::RGBTriplet &white_balance);
        bool using_vaapi() const { return va_dpy != nullptr; }
 
-       // Returns -1 for inactive (ie., don't encode frames for this card right now).
-       int get_mjpeg_stream_for_card(unsigned card_index);
+       bool should_encode_mjpeg_for_card(unsigned card_index);
 
 private:
        static constexpr int quality = 90;
 
 private:
        static constexpr int quality = 90;
index c4c08a826624316fcca470a9eb4ff225b8aed619..133b65fe3310c2b0c69b27330098286e9f6cb994 100644 (file)
@@ -273,7 +273,7 @@ bmusb::FrameAllocator::Frame PBOFrameAllocator::alloc_frame()
        vf.overflow = 0;
 
        if (mjpeg_encoder != nullptr &&
        vf.overflow = 0;
 
        if (mjpeg_encoder != nullptr &&
-           mjpeg_encoder->get_mjpeg_stream_for_card(card_index) != -1 &&
+           mjpeg_encoder->should_encode_mjpeg_for_card(card_index) &&
            vf.userdata != nullptr) {
                Userdata *ud = (Userdata *)vf.userdata;
                vf.data_copy = ud->data_copy_malloc;
            vf.userdata != nullptr) {
                Userdata *ud = (Userdata *)vf.userdata;
                vf.data_copy = ud->data_copy_malloc;
@@ -307,7 +307,7 @@ bmusb::FrameAllocator::Frame PBOFrameAllocator::create_frame(size_t width, size_
        Userdata *userdata = (Userdata *)vf.userdata;
 
        if (mjpeg_encoder != nullptr &&
        Userdata *userdata = (Userdata *)vf.userdata;
 
        if (mjpeg_encoder != nullptr &&
-           mjpeg_encoder->get_mjpeg_stream_for_card(card_index) != -1) {
+           mjpeg_encoder->should_encode_mjpeg_for_card(card_index)) {
                if (mjpeg_encoder->using_vaapi()) {
                        VADisplay va_dpy = mjpeg_encoder->va_dpy->va_dpy;
                        MJPEGEncoder::VAResources resources = mjpeg_encoder->get_va_resources(width, height);
                if (mjpeg_encoder->using_vaapi()) {
                        VADisplay va_dpy = mjpeg_encoder->va_dpy->va_dpy;
                        MJPEGEncoder::VAResources resources = mjpeg_encoder->get_va_resources(width, height);