]> git.sesse.net Git - nageru/blobdiff - nageru/ffmpeg_capture.h
IWYU-fix nageru/*.h.
[nageru] / nageru / ffmpeg_capture.h
index 6084b68f398dd6a15727fc380c3ef1207a818db6..122bf86726ba4cb6d73a70a0ca81166d6e7d7431 100644 (file)
 // see the subtitle until the next frame.
 
 #include <assert.h>
+#include <stddef.h>
 #include <stdint.h>
+#include <time.h>
+
+#include <atomic>
+#include <chrono>
 #include <functional>
 #include <map>
 #include <memory>
 #include <set>
 #include <string>
 #include <thread>
+#include <vector>
 
 #include <movit/effect.h>
 #include <movit/ycbcr.h>
 
 extern "C" {
+#include <libavutil/frame.h>
 #include <libswresample/swresample.h>
+#include <libavutil/channel_layout.h>
 #include <libavutil/pixfmt.h>
 #include <libavutil/rational.h>
 #include <libavutil/samplefmt.h>
@@ -57,6 +65,10 @@ class FFmpegCapture : public bmusb::CaptureInterface
 {
 public:
        FFmpegCapture(const std::string &filename, unsigned width, unsigned height);
+#ifdef HAVE_SRT
+       // Takes ownership of the SRT client socket.
+       FFmpegCapture(int srt_sock, const std::string &stream_id);
+#endif
        ~FFmpegCapture();
 
        void set_card_index(int card_index)
@@ -155,9 +167,13 @@ public:
                        std::placeholders::_11);
        }
 
-       // FFmpegCapture-specific callback that gives the raw audio.
-       typedef std::function<void(const AVPacket *pkt, const AVRational timebase)> audio_callback_t;
-       void set_audio_callback(audio_callback_t callback)
+       // FFmpegCapture-specific callback that gives the raw audio/video.
+       typedef std::function<void(const AVPacket *pkt, const AVRational timebase)> packet_callback_t;
+       void set_video_callback(packet_callback_t callback)
+       {
+               video_callback = callback;
+       }
+       void set_audio_callback(packet_callback_t callback)
        {
                audio_callback = callback;
        }
@@ -195,6 +211,11 @@ public:
                has_dequeue_callbacks = true;
        }
 
+       void set_card_disconnected_callback(bmusb::card_disconnected_callback_t callback)
+       {
+               card_disconnected_callback = callback;
+       }
+
        std::string get_description() const override
        {
                return description;
@@ -203,7 +224,8 @@ public:
        void configure_card() override;
        void start_bm_capture() override;
        void stop_dequeue_thread() override;
-       bool get_disconnected() const override { return false; }  // We never unplug.
+       bool get_disconnected() const override { return disconnected; }  // Only if play_once == true.
+       int get_srt_sock() const { return srt_sock; }
 
        std::map<uint32_t, bmusb::VideoMode> get_available_video_modes() const override;
        void set_video_mode(uint32_t video_mode_id) override {}  // Ignore.
@@ -249,13 +271,22 @@ private:
        bmusb::VideoFormat construct_video_format(const AVFrame *frame, AVRational video_timebase);
        UniqueFrame make_video_frame(const AVFrame *frame, const std::string &pathname, bool *error);
 
-       static int interrupt_cb_thunk(void *unique);
+       static int interrupt_cb_thunk(void *opaque);
        int interrupt_cb();
 
+#ifdef HAVE_SRT
+       static int read_srt_thunk(void *opaque, uint8_t *buf, int buf_size);
+       int read_srt(uint8_t *buf, int buf_size);
+#endif
+
+       inline unsigned frame_width(const AVFrame *frame) const;
+       inline unsigned frame_height(const AVFrame *frame) const;
+
        mutable std::mutex filename_mu;
        std::string description, filename;
+       int srt_sock = -1;
        uint16_t timecode = 0;
-       unsigned width, height;
+       unsigned width, height;  // 0 means keep input size.
        bmusb::PixelFormat pixel_format = bmusb::PixelFormat_8BitBGRA;
        movit::YCbCrFormat current_frame_ycbcr_format;
        bool running = false;
@@ -265,16 +296,25 @@ private:
        std::atomic<bool> should_interrupt{false};
        bool last_frame_was_connected = true;
 
+       // TODO: Replace with std::optional if we go C++17.
+       bool frame_timeout_valid = false;  // If true, will time out any reads after ten seconds.
+       std::chrono::steady_clock::time_point frame_timeout_started;  // Only relevant if frame_timeout_valid == true.
+
        bool has_dequeue_callbacks = false;
        std::function<void()> dequeue_init_callback = nullptr;
        std::function<void()> dequeue_cleanup_callback = nullptr;
 
+       bmusb::card_disconnected_callback_t card_disconnected_callback = nullptr;
+       bool play_once = false;  // End thread after playing. Only for SRT, since the socket is ephemeral.
+       std::atomic<bool> disconnected{false};
+
        bmusb::FrameAllocator *video_frame_allocator = nullptr;
        bmusb::FrameAllocator *audio_frame_allocator = nullptr;
        std::unique_ptr<bmusb::FrameAllocator> owned_video_frame_allocator;
        std::unique_ptr<bmusb::FrameAllocator> owned_audio_frame_allocator;
        frame_callback_t frame_callback = nullptr;
-       audio_callback_t audio_callback = nullptr;
+       packet_callback_t video_callback = nullptr;
+       packet_callback_t audio_callback = nullptr;
 
        SwsContextWithDeleter sws_ctx;
        int sws_last_width = -1, sws_last_height = -1, sws_last_src_format = -1;
@@ -298,14 +338,19 @@ private:
        // Audio resampler.
        SwrContext *resampler = nullptr;
        AVSampleFormat last_src_format, last_dst_format;
-       int64_t last_channel_layout;
+       AVChannelLayout last_channel_layout;
        int last_sample_rate;
 
        // Subtitles (no decoding done, really).
        bool has_last_subtitle = false;
        std::string last_subtitle;
 
-       movit::RGBTriplet last_neutral_color;
+       movit::RGBTriplet last_neutral_color{1.0f, 1.0f, 1.0f};
+
+       // Used for suppressing repeated warnings. Reset when a video starts playing.
+       // -1 is strictly speaking outside the range of the enum, but hopefully, it will be alright.
+       AVColorSpace last_colorspace = static_cast<AVColorSpace>(-1);
+       AVChromaLocation last_chroma_location = static_cast<AVChromaLocation>(-1);
 };
 
 #endif  // !defined(_FFMPEG_CAPTURE_H)