qt5deps = dependency('qt5', modules: ['Core', 'Gui', 'Widgets', 'OpenGLExtensions', 'OpenGL', 'Network'])
sdl2_imagedep = dependency('SDL2_image', required: false)
sdl2dep = dependency('sdl2', required: false)
+srtdep = dependency('srt', required: false)
sqlite3dep = dependency('sqlite3')
threaddep = dependency('threads')
vadrmdep = dependency('libva-drm')
add_project_arguments('-DHAVE_CEF=1', language: 'cpp')
endif
+if srtdep.found()
+ # NOTE: Seemingly there's no way to figure out if we have OpenSSL libsrt
+ # or gnutls libsrt, so we cannot check license compatibility here.
+ add_project_arguments('-DHAVE_SRT=1', language: 'cpp')
+endif
+
top_include = include_directories('.')
subdir('shared')
nageru_deps = [shareddep, qt5deps, libjpegdep, movitdep, protobufdep,
vax11dep, vadrmdep, x11dep, libavformatdep, libswresampledep, libavcodecdep, libavutildep,
libswscaledep, libusbdep, luajitdep, dldep, x264dep, alsadep, zitaresamplerdep,
- qcustomplotdep, threaddep, eigendep]
+ qcustomplotdep, threaddep, eigendep, srtdep]
nageru_include_dirs = [include_directories('nageru')]
nageru_link_with = []
nageru_build_rpath = ''
#define LOCAL_DUMP_SUFFIX ".nut"
#define DEFAULT_STREAM_MUX_NAME "nut" // Only for HTTP. Local dump guesses from LOCAL_DUMP_SUFFIX.
#define DEFAULT_HTTPD_PORT 9095
+#define DEFAULT_SRT_PORT 9710
#include "shared/shared_defs.h"
#include "ref_counted_frame.h"
#include "shared/timebase.h"
+#ifdef HAVE_SRT
+#include <srt/srt.h>
+#endif
+
#define FRAME_SIZE (8 << 20) // 8 MB.
using namespace std;
avformat_network_init(); // In case someone wants this.
}
+#ifdef HAVE_SRT
+FFmpegCapture::FFmpegCapture(int srt_sock, const string &stream_id)
+ : srt_sock(srt_sock),
+ width(global_flags.width),
+ height(global_flags.height),
+ pixel_format(bmusb::PixelFormat_8BitYCbCrPlanar),
+ video_timebase{1, 1}
+{
+ if (stream_id.empty()) {
+ description = "SRT stream";
+ } else {
+ description = stream_id;
+ }
+ play_as_fast_as_possible = true;
+ play_once = true;
+ last_frame = steady_clock::now();
+}
+#endif
+
FFmpegCapture::~FFmpegCapture()
{
if (has_dequeue_callbacks) {
filename_copy = filename;
}
- string pathname = search_for_file(filename_copy);
+ string pathname;
+ if (srt_sock == -1) {
+ pathname = search_for_file(filename_copy);
+ } else {
+ pathname = description;
+ }
if (pathname.empty()) {
- fprintf(stderr, "%s not found, sleeping one second and trying again...\n", filename_copy.c_str());
send_disconnected_frame();
+ if (play_once) {
+ break;
+ }
producer_thread_should_quit.sleep_for(seconds(1));
+ fprintf(stderr, "%s not found, sleeping one second and trying again...\n", filename_copy.c_str());
continue;
}
should_interrupt = false;
if (!play_video(pathname)) {
// Error.
- fprintf(stderr, "Error when playing %s, sleeping one second and trying again...\n", pathname.c_str());
send_disconnected_frame();
+ if (play_once) {
+ break;
+ }
+ fprintf(stderr, "Error when playing %s, sleeping one second and trying again...\n", pathname.c_str());
producer_thread_should_quit.sleep_for(seconds(1));
continue;
}
+ if (play_once) {
+ send_disconnected_frame();
+ break;
+ }
+
// Probably just EOF, will exit the loop above on next test.
}
FrameAllocator::Frame(), /*audio_offset=*/0, AudioFormat());
last_frame_was_connected = false;
}
+
+ if (play_once) {
+ disconnected = true;
+ if (card_disconnected_callback != nullptr) {
+ card_disconnected_callback();
+ }
+ }
}
bool FFmpegCapture::play_video(const string &pathname)
last_modified = buf.st_mtim;
}
- auto format_ctx = avformat_open_input_unique(pathname.c_str(), nullptr, nullptr, AVIOInterruptCB{ &FFmpegCapture::interrupt_cb_thunk, this });
+ AVFormatContextWithCloser format_ctx;
+ if (srt_sock == -1) {
+ // Regular file.
+ format_ctx = avformat_open_input_unique(pathname.c_str(), /*fmt=*/nullptr,
+ /*options=*/nullptr,
+ AVIOInterruptCB{ &FFmpegCapture::interrupt_cb_thunk, this });
+ } else {
+#ifdef HAVE_SRT
+ // SRT socket, already opened.
+ AVInputFormat *mpegts_fmt = av_find_input_format("mpegts");
+ format_ctx = avformat_open_input_unique(&FFmpegCapture::read_srt_thunk, this,
+ mpegts_fmt, /*options=*/nullptr,
+ AVIOInterruptCB{ &FFmpegCapture::interrupt_cb_thunk, this });
+#else
+ assert(false);
+#endif
+ }
if (format_ctx == nullptr) {
fprintf(stderr, "%s: Error opening file\n", pathname.c_str());
return false;
return video_frame;
}
-int FFmpegCapture::interrupt_cb_thunk(void *unique)
+int FFmpegCapture::interrupt_cb_thunk(void *opaque)
{
- return reinterpret_cast<FFmpegCapture *>(unique)->interrupt_cb();
+ return reinterpret_cast<FFmpegCapture *>(opaque)->interrupt_cb();
}
int FFmpegCapture::interrupt_cb()
{
return should_interrupt.load();
}
+
+#ifdef HAVE_SRT
+int FFmpegCapture::read_srt_thunk(void *opaque, uint8_t *buf, int buf_size)
+{
+ return reinterpret_cast<FFmpegCapture *>(opaque)->read_srt(buf, buf_size);
+}
+
+int FFmpegCapture::read_srt(uint8_t *buf, int buf_size)
+{
+ SRT_MSGCTRL mc = srt_msgctrl_default;
+ return srt_recvmsg2(srt_sock, reinterpret_cast<char *>(buf), buf_size, &mc);
+}
+#endif
{
public:
FFmpegCapture(const std::string &filename, unsigned width, unsigned height);
+#ifdef HAVE_SRT
+ // Takes ownership of the SRT client socket.
+ FFmpegCapture(int srt_sock, const std::string &stream_id);
+#endif
~FFmpegCapture();
void set_card_index(int card_index)
has_dequeue_callbacks = true;
}
+ void set_card_disconnected_callback(bmusb::card_disconnected_callback_t callback)
+ {
+ card_disconnected_callback = callback;
+ }
+
std::string get_description() const override
{
return description;
void configure_card() override;
void start_bm_capture() override;
void stop_dequeue_thread() override;
- bool get_disconnected() const override { return false; } // We never unplug.
+ bool get_disconnected() const override { return disconnected; } // Only if play_once == true.
std::map<uint32_t, bmusb::VideoMode> get_available_video_modes() const override;
void set_video_mode(uint32_t video_mode_id) override {} // Ignore.
bmusb::VideoFormat construct_video_format(const AVFrame *frame, AVRational video_timebase);
UniqueFrame make_video_frame(const AVFrame *frame, const std::string &pathname, bool *error);
- static int interrupt_cb_thunk(void *unique);
+ static int interrupt_cb_thunk(void *opaque);
int interrupt_cb();
+#ifdef HAVE_SRT
+ static int read_srt_thunk(void *opaque, uint8_t *buf, int buf_size);
+ int read_srt(uint8_t *buf, int buf_size);
+#endif
+
mutable std::mutex filename_mu;
std::string description, filename;
+ int srt_sock = -1;
uint16_t timecode = 0;
unsigned width, height;
bmusb::PixelFormat pixel_format = bmusb::PixelFormat_8BitBGRA;
std::function<void()> dequeue_init_callback = nullptr;
std::function<void()> dequeue_cleanup_callback = nullptr;
+ bmusb::card_disconnected_callback_t card_disconnected_callback = nullptr;
+ bool play_once = false; // End thread after playing. Only for SRT, since the socket is ephemeral.
+ std::atomic<bool> disconnected{false};
+
bmusb::FrameAllocator *video_frame_allocator = nullptr;
bmusb::FrameAllocator *audio_frame_allocator = nullptr;
std::unique_ptr<bmusb::FrameAllocator> owned_video_frame_allocator;
OPTION_HTTP_AUDIO_CODEC,
OPTION_HTTP_AUDIO_BITRATE,
OPTION_HTTP_PORT,
+ OPTION_SRT_PORT,
+ OPTION_NO_SRT,
OPTION_NO_TRANSCODE_AUDIO,
OPTION_DISABLE_AUDIO,
OPTION_FLAT_AUDIO,
DEFAULT_AUDIO_OUTPUT_BIT_RATE / 1000);
fprintf(stderr, " --http-port=PORT which port to use for the built-in HTTP server\n");
fprintf(stderr, " (default is %d)\n", DEFAULT_HTTPD_PORT);
+ fprintf(stderr, " --srt-port=PORT which port to use for receiving SRT streams\n");
+ fprintf(stderr, " (default is %d)\n", DEFAULT_SRT_PORT);
+ fprintf(stderr, " --no-srt disable receiving SRT streams\n");
if (program == PROGRAM_KAERU) {
fprintf(stderr, " --no-transcode-audio copy encoded audio raw from the source stream\n");
fprintf(stderr, " (requires --http-audio-codec= to be set)\n");
{ "http-audio-codec", required_argument, 0, OPTION_HTTP_AUDIO_CODEC },
{ "http-audio-bitrate", required_argument, 0, OPTION_HTTP_AUDIO_BITRATE },
{ "http-port", required_argument, 0, OPTION_HTTP_PORT },
+ { "srt-port", required_argument, 0, OPTION_SRT_PORT },
+ { "no-srt", no_argument, 0, OPTION_NO_SRT },
{ "no-transcode-audio", no_argument, 0, OPTION_NO_TRANSCODE_AUDIO },
{ "disable-audio", no_argument, 0, OPTION_DISABLE_AUDIO },
{ "flat-audio", no_argument, 0, OPTION_FLAT_AUDIO },
case OPTION_HTTP_PORT:
global_flags.http_port = atoi(optarg);
break;
+ case OPTION_SRT_PORT:
+ global_flags.srt_port = atoi(optarg);
+ break;
+ case OPTION_NO_SRT:
+ global_flags.srt_port = -1;
+ break;
case OPTION_NO_TRANSCODE_AUDIO:
global_flags.transcode_audio = false;
break;
double output_slop_frames = 0.5;
int max_input_queue_frames = 6;
int http_port = DEFAULT_HTTPD_PORT;
+ int srt_port = DEFAULT_SRT_PORT; // -1 for none.
bool display_timecode_in_stream = false;
bool display_timecode_on_stdout = false;
bool enable_quick_cut_keys = false;
unsigned current_card = global_mixer->map_signal_to_card(signal_num);
bool is_ffmpeg = global_mixer->card_is_ffmpeg(current_card);
- if (!is_ffmpeg) { // FFmpeg inputs are not connected to any card; they're locked to a given input and have a given Y'CbCr interpretatio and have a given Y'CbCr interpretationn.
+ if (!is_ffmpeg) { // FFmpeg inputs are not connected to any card; they're locked to a given input and have a given Y'CbCr interpretatio and have a given Y'CbCr interpretation.
for (unsigned card_index = 0; card_index < num_cards; ++card_index) {
QString description(QString::fromStdString(global_mixer->get_card_description(card_index)));
QAction *action = new QAction(description, &card_group);
if (is_ffmpeg) {
// Add a menu to change the source URL if we're an FFmpeg card.
// (The theme can still override.)
- if (global_mixer->card_is_ffmpeg(current_card)) {
- change_url_action = new QAction("Change source filename/URL…", &menu);
- menu.addAction(change_url_action);
- }
+ change_url_action = new QAction("Change source filename/URL…", &menu);
+ menu.addAction(change_url_action);
} else {
// Add a submenu for selecting video input, with an action for each input.
std::map<uint32_t, string> video_inputs = global_mixer->get_available_video_inputs(current_card);
#include <cef_version.h>
#endif
+#ifdef HAVE_SRT
+#include <srt/srt.h>
+#endif
+
#include "basic_stats.h"
#ifdef HAVE_CEF
#include "nageru_cef_app.h"
av_register_all();
#endif
+#ifdef HAVE_SRT
+ if (global_flags.srt_port >= 0) {
+ srt_startup();
+ }
+#endif
+
QCoreApplication::setAttribute(Qt::AA_ShareOpenGLContexts, true);
QSurfaceFormat fmt;
int rc = app.exec();
delete global_mixer;
+#ifdef HAVE_SRT
+ if (global_flags.srt_port >= 0) {
+ srt_cleanup();
+ }
+#endif
return rc;
}
#include <google/protobuf/util/json_util.h>
#include "json.pb.h"
+#ifdef HAVE_SRT
+// Must come after CEF, since it includes <syslog.h>, which has #defines
+// that conflict with CEF logging constants.
+#include <srt/srt.h>
+#endif
+
class IDeckLink;
class QOpenGLContext;
BMUSBCapture::set_card_connected_callback(bind(&Mixer::bm_hotplug_add, this, _1));
BMUSBCapture::start_bm_thread();
+#ifdef HAVE_SRT
+ if (global_flags.srt_port >= 0) {
+ start_srt();
+ }
+#endif
+
for (unsigned card_index = 0; card_index < num_cards + num_video_inputs + num_html_inputs; ++card_index) {
cards[card_index].queue_length_policy.reset(card_index);
}
video_encoder.reset(nullptr);
}
-void Mixer::configure_card(unsigned card_index, CaptureInterface *capture, CardType card_type, DeckLinkOutput *output)
+void Mixer::configure_card(unsigned card_index, CaptureInterface *capture, CardType card_type, DeckLinkOutput *output, bool override_card_as_live)
{
printf("Configuring card %d...\n", card_index);
// NOTE: start_bm_capture() happens in thread_func().
+ if (override_card_as_live) {
+ assert(card_type == CardType::FFMPEG_INPUT);
+ }
+
DeviceSpec device;
- if (card_type == CardType::FFMPEG_INPUT) {
+ if (card_type == CardType::FFMPEG_INPUT && !override_card_as_live) {
device = DeviceSpec{InputSourceType::FFMPEG_VIDEO_INPUT, card_index - num_cards};
} else {
device = DeviceSpec{InputSourceType::CAPTURE_CARD, card_index};
assert(master_card_index < num_cards + num_video_inputs);
}
+ handle_hotplugged_cards();
+
vector<int32_t> raw_audio[MAX_VIDEO_CARDS]; // For MJPEG encoding.
OutputFrameInfo output_frame_info = get_one_frame_from_each_card(master_card_index, master_card_is_output, new_frames, has_new_frame, raw_audio);
schedule_audio_resampling_tasks(output_frame_info.dropped_frames, output_frame_info.num_samples, output_frame_info.frame_duration, output_frame_info.is_preroll, output_frame_info.frame_timestamp);
stats_dropped_frames += output_frame_info.dropped_frames;
- handle_hotplugged_cards();
-
for (unsigned card_index = 0; card_index < num_cards + num_video_inputs + num_html_inputs; ++card_index) {
DeviceSpec device = card_index_to_device(card_index, num_cards);
if (card_index == master_card_index || !has_new_frame[card_index]) {
// Check for cards that have been connected since last frame.
vector<libusb_device *> hotplugged_cards_copy;
+#ifdef HAVE_SRT
+ vector<int> hotplugged_srt_cards_copy;
+#endif
{
lock_guard<mutex> lock(hotplug_mutex);
swap(hotplugged_cards, hotplugged_cards_copy);
+#ifdef HAVE_SRT
+ swap(hotplugged_srt_cards, hotplugged_srt_cards_copy);
+#endif
}
for (libusb_device *new_dev : hotplugged_cards_copy) {
// Look for a fake capture card where we can stick this in.
capture->start_bm_capture();
}
}
+
+#ifdef HAVE_SRT
+ // Same, for SRT inputs.
+ // TODO: On disconnect and reconnect, we might want to use the stream ID
+ // to find the slot it used to go into?
+ for (SRTSOCKET sock : hotplugged_srt_cards_copy) {
+ // Look for a fake capture card where we can stick this in.
+ int free_card_index = -1;
+ for (unsigned card_index = 0; card_index < num_cards; ++card_index) {
+ if (cards[card_index].is_fake_capture) {
+ free_card_index = card_index;
+ break;
+ }
+ }
+
+ char name[256];
+ int namelen = sizeof(name);
+ srt_getsockopt(sock, /*ignored=*/0, SRTO_STREAMID, name, &namelen);
+ string stream_id(name, namelen);
+
+ if (free_card_index == -1) {
+ if (stream_id.empty()) {
+ stream_id = "no name";
+ }
+ fprintf(stderr, "New SRT stream connected (%s), but no free slots -- ignoring.\n", stream_id.c_str());
+ srt_close(sock);
+ } else {
+ // FFmpegCapture takes ownership.
+ if (stream_id.empty()) {
+ fprintf(stderr, "New unnamed SRT stream connected, choosing slot %d.\n", free_card_index);
+ } else {
+ fprintf(stderr, "New SRT stream connected (%s), choosing slot %d.\n", stream_id.c_str(), free_card_index);
+ }
+ CaptureCard *card = &cards[free_card_index];
+ FFmpegCapture *capture = new FFmpegCapture(sock, stream_id);
+ capture->set_card_index(free_card_index);
+ configure_card(free_card_index, capture, CardType::FFMPEG_INPUT, /*output=*/nullptr, /*override_card_as_live=*/true);
+ card->queue_length_policy.reset(free_card_index);
+ capture->set_card_disconnected_callback(bind(&Mixer::bm_hotplug_remove, this, free_card_index));
+ capture->start_bm_capture();
+ }
+ }
+#endif
}
audio_task_queue_changed.notify_one();
mixer_thread.join();
audio_thread.join();
+#ifdef HAVE_SRT
+ if (global_flags.srt_port >= 0) {
+ // There's seemingly no other reasonable way to wake up the thread
+ // (libsrt's epoll equivalent is busy-waiting).
+ int sock = srt_socket(AF_INET6, 0, 0);
+ if (sock != -1) {
+ sockaddr_in6 addr;
+ memset(&addr, 0, sizeof(addr));
+ addr.sin6_family = AF_INET6;
+ addr.sin6_addr = IN6ADDR_LOOPBACK_INIT;
+ addr.sin6_port = htons(global_flags.srt_port);
+ srt_connect(sock, (sockaddr *)&addr, sizeof(addr));
+ srt_close(sock);
+ }
+ srt_thread.join();
+ }
+#endif
}
void Mixer::transition_clicked(int transition_num)
color_updated_callback = callback;
}
+#ifdef HAVE_SRT
+void Mixer::start_srt()
+{
+ SRTSOCKET sock = srt_socket(AF_INET6, 0, 0);
+ sockaddr_in6 addr;
+ memset(&addr, 0, sizeof(addr));
+ addr.sin6_family = AF_INET6;
+ addr.sin6_port = htons(global_flags.srt_port);
+
+ int err = srt_bind(sock, (sockaddr *)&addr, sizeof(addr));
+ if (err != 0) {
+ fprintf(stderr, "srt_bind: %s\n", srt_getlasterror_str());
+ abort();
+ }
+ err = srt_listen(sock, MAX_VIDEO_CARDS);
+ if (err != 0) {
+ fprintf(stderr, "srt_listen: %s\n", srt_getlasterror_str());
+ abort();
+ }
+
+ srt_thread = thread([this, sock] {
+ sockaddr_in6 addr;
+ for ( ;; ) {
+ int sa_len = sizeof(addr);
+ int clientsock = srt_accept(sock, (sockaddr *)&addr, &sa_len);
+ if (should_quit) {
+ if (clientsock != -1) {
+ srt_close(clientsock);
+ }
+ break;
+ }
+ lock_guard<mutex> lock(hotplug_mutex);
+ hotplugged_srt_cards.push_back(clientsock);
+ }
+ srt_close(sock);
+ });
+}
+#endif
+
mutex RefCountedGLsync::fence_lock;
bool card_is_ffmpeg(unsigned card_index) const {
assert(card_index < num_cards + num_video_inputs);
+ if (card_index < num_cards) {
+ // SRT inputs are more like regular inputs than FFmpeg inputs,
+ // so show them as such. (This allows the user to right-click
+ // to select a different input.)
+ return false;
+ }
return cards[card_index].type == CardType::FFMPEG_INPUT;
}
FFMPEG_INPUT,
CEF_INPUT,
};
- void configure_card(unsigned card_index, bmusb::CaptureInterface *capture, CardType card_type, DeckLinkOutput *output);
+ void configure_card(unsigned card_index, bmusb::CaptureInterface *capture, CardType card_type, DeckLinkOutput *output, bool override_ffmpeg_to_live = false);
void set_output_card_internal(int card_index); // Should only be called from the mixer thread.
void bm_frame(unsigned card_index, uint16_t timecode,
bmusb::FrameAllocator::Frame video_frame, size_t video_offset, bmusb::VideoFormat video_format,
void render_one_frame(int64_t duration);
void audio_thread_func();
void release_display_frame(DisplayFrame *frame);
+#ifdef HAVE_SRT
+ void start_srt();
+#endif
double pts() { return double(pts_int) / TIMEBASE; }
void trim_queue(CaptureCard *card, size_t safe_queue_length);
std::pair<std::string, std::string> get_channels_json();
// Protected by its own mutex.
std::mutex hotplug_mutex;
std::vector<libusb_device *> hotplugged_cards;
+#ifdef HAVE_SRT
+ std::vector<int> hotplugged_srt_cards;
+#endif
class OutputChannel {
public:
std::thread mixer_thread;
std::thread audio_thread;
+#ifdef HAVE_SRT
+ std::thread srt_thread;
+#endif
std::atomic<bool> should_quit{false};
std::atomic<bool> should_cut{false};
if (lua_isnumber(L, 2)) {
block->alternatives.emplace_back(new EffectBlueprint(LIVE_INPUT_YCBCR));
block->alternatives.emplace_back(new EffectBlueprint(LIVE_INPUT_YCBCR_WITH_DEINTERLACE));
+#ifdef HAVE_SRT
+ if (global_flags.srt_port >= 0) {
+ block->alternatives.emplace_back(new EffectBlueprint(LIVE_INPUT_YCBCR_PLANAR));
+ }
+#endif
#ifdef HAVE_CEF
} else if (luaL_testudata(L, 2, "HTMLInput")) {
block->alternatives.emplace_back(new EffectBlueprint(LIVE_INPUT_BGRA));
InputStateInfo info(input_state);
for (Block *block : blocks) {
if (block->is_input && block->signal_type_to_connect == Block::CONNECT_SIGNAL) {
- EffectType chosen_type = current_type(block);
- assert(chosen_type == LIVE_INPUT_YCBCR || chosen_type == LIVE_INPUT_YCBCR_WITH_DEINTERLACE);
int card_index = theme->map_signal_to_card(block->signal_to_connect);
if (info.last_interlaced[card_index]) {
+ assert(info.last_pixel_format[card_index] == bmusb::PixelFormat_8BitYCbCr ||
+ info.last_pixel_format[card_index] == bmusb::PixelFormat_10BitYCbCr);
block->currently_chosen_alternative = find_index_of(block, LIVE_INPUT_YCBCR_WITH_DEINTERLACE);
+ } else if (info.last_pixel_format[card_index] == bmusb::PixelFormat_8BitYCbCrPlanar) {
+ block->currently_chosen_alternative = find_index_of(block, LIVE_INPUT_YCBCR_PLANAR);
+ } else if (info.last_pixel_format[card_index] == bmusb::PixelFormat_8BitBGRA) {
+ block->currently_chosen_alternative = find_index_of(block, LIVE_INPUT_BGRA);
} else {
block->currently_chosen_alternative = find_index_of(block, LIVE_INPUT_YCBCR);
}
+ if (block->currently_chosen_alternative == -1) {
+ fprintf(stderr, "ERROR: Input connected to a video card pixel format that it was not ready for.\n");
+ abort();
+ }
}
}
last_is_connected[signal_num] = userdata->last_is_connected;
last_frame_rate_nom[signal_num] = userdata->last_frame_rate_nom;
last_frame_rate_den[signal_num] = userdata->last_frame_rate_den;
+ last_pixel_format[signal_num] = userdata->pixel_format;
has_last_subtitle[signal_num] = userdata->has_last_subtitle;
last_subtitle[signal_num] = userdata->last_subtitle;
}
unsigned last_width[MAX_VIDEO_CARDS], last_height[MAX_VIDEO_CARDS];
bool last_interlaced[MAX_VIDEO_CARDS], last_has_signal[MAX_VIDEO_CARDS], last_is_connected[MAX_VIDEO_CARDS];
unsigned last_frame_rate_nom[MAX_VIDEO_CARDS], last_frame_rate_den[MAX_VIDEO_CARDS];
+ bmusb::PixelFormat last_pixel_format[MAX_VIDEO_CARDS];
bool has_last_subtitle[MAX_VIDEO_CARDS];
std::string last_subtitle[MAX_VIDEO_CARDS];
};
return AVFormatContextWithCloser(format_ctx);
}
+AVFormatContextWithCloser avformat_open_input_unique(
+ int (*read_packet)(void *opaque, uint8_t *buf, int buf_size),
+ void *opaque, AVInputFormat *fmt, AVDictionary **options,
+ const AVIOInterruptCB &interrupt_cb)
+{
+ AVFormatContext *format_ctx = avformat_alloc_context();
+ format_ctx->interrupt_callback = interrupt_cb;
+ constexpr size_t buf_size = 4096;
+ unsigned char *buf = (unsigned char *)av_malloc(buf_size);
+ format_ctx->pb = avio_alloc_context(buf, buf_size, /*write_flag=*/false, opaque,
+ read_packet, /*write_packet=*/nullptr, /*seek=*/nullptr);
+ if (avformat_open_input(&format_ctx, "", fmt, options) != 0) {
+ format_ctx = nullptr;
+ }
+ return AVFormatContextWithCloser(format_ctx);
+}
+
// AVCodecContext
void avcodec_free_context_unique::operator() (AVCodecContext *codec_ctx) const
AVDictionary **options,
const AVIOInterruptCB &interrupt_cb);
+AVFormatContextWithCloser avformat_open_input_unique(
+ int (*read_packet)(void *opaque, uint8_t *buf, int buf_size),
+ void *opaque, AVInputFormat *fmt, AVDictionary **options,
+ const AVIOInterruptCB &interrupt_cb);
+
// AVCodecContext
struct avcodec_free_context_unique {