#include <assert.h>
#include <errno.h>
+#include <epoxy/egl.h>
#include <limits>
#include <map>
#include <memory>
#include <string>
#include <sys/stat.h>
#include <thread>
+#include <QSurface>
extern "C" {
#include <libavcodec/avcodec.h>
#include <movit/colorspace_conversion_effect.h>
#include "bmusb/bmusb.h"
+#include "shared/context.h"
#include "shared/ffmpeg_raii.h"
#include "ffmpeg_util.h"
#include "flags.h"
} // namespace
-FFmpegCapture::FFmpegCapture(const string &filename, unsigned width, unsigned height)
- : filename(filename), width(width), height(height), video_timebase{1, 1}
+FFmpegCapture::FFmpegCapture(const string &filename, unsigned width, unsigned height, QSurface *surface)
+ : filename(filename), width(width), height(height), video_timebase{1, 1}, surface(surface)
{
description = "Video: " + filename;
}
#ifdef HAVE_SRT
-FFmpegCapture::FFmpegCapture(int srt_sock, const string &stream_id)
+FFmpegCapture::FFmpegCapture(int srt_sock, const string &stream_id, QSurface *surface)
: srt_sock(srt_sock),
width(0), // Don't resize; SRT streams typically have stable resolution, and should behave much like regular cards in general.
height(0),
pixel_format(bmusb::PixelFormat_8BitYCbCrPlanar),
- video_timebase{1, 1}
+ video_timebase{1, 1},
+ surface(surface)
{
if (stream_id.empty()) {
description = "SRT stream";
srt_close(srt_sock);
}
#endif
+ delete surface;
}
void FFmpegCapture::configure_card()
snprintf(thread_name, sizeof(thread_name), "FFmpeg_C_%d", card_index);
pthread_setname_np(pthread_self(), thread_name);
+ // We need a context in case create_frame() needs to reallocate something.
+ // (If none is given, we are probably in Kaeru, which uses MallocFrameAllocator
+ // anyway, which doesn't reallocate currently and definitely doesn't need
+ // an active OpenGL context to do so.)
+ QOpenGLContext *context = nullptr;
+ if (surface != nullptr) {
+ context = create_context(this->surface);
+ eglBindAPI(EGL_OPENGL_API);
+ if (!make_current(context, this->surface)) {
+ printf("display=%p surface=%p context=%p curr=%p err=%d\n", eglGetCurrentDisplay(), this->surface, context, eglGetCurrentContext(),
+ eglGetError());
+ abort();
+ }
+ }
+
while (!producer_thread_should_quit.should_quit()) {
string filename_copy;
{
dequeue_cleanup_callback();
has_dequeue_callbacks = false;
}
+
+ delete_context(context);
}
void FFmpegCapture::send_disconnected_frame()
fprintf(stderr, "%s: Cannot open video decoder\n", pathname.c_str());
return false;
}
- unique_ptr<AVCodecContext, decltype(avcodec_close)*> video_codec_ctx_cleanup(
- video_codec_ctx.get(), avcodec_close);
// Used in decode_ycbcr_format().
is_mjpeg = video_codecpar->codec_id == AV_CODEC_ID_MJPEG;
return false;
}
}
- unique_ptr<AVCodecContext, decltype(avcodec_close)*> audio_codec_ctx_cleanup(
- audio_codec_ctx.get(), avcodec_close);
internal_rewind();
int out_samples = swr_convert(resampler, &data, num_samples_room,
const_cast<const uint8_t **>(audio_avframe->data), audio_avframe->nb_samples);
if (out_samples < 0) {
- fprintf(stderr, "Audio conversion failed.\n");
- abort();
- }
+ fprintf(stderr, "Audio conversion failed.\n");
+ abort();
+ }
audio_frame->len += out_samples * bytes_per_sample;
}
{
*error = false;
- UniqueFrame video_frame(video_frame_allocator->alloc_frame());
+ UniqueFrame video_frame(video_frame_allocator->create_frame(frame->width, frame->height, frame->width));
if (video_frame->data == nullptr) {
return video_frame;
}
// FIXME: Currently, if the video is too high-res for one of the allocated
// frames, we simply refuse to scale it here to avoid crashes. It would be better
// if we could somehow signal getting larger frames, especially as 4K is a thing now.
- if (video_frame->len > FRAME_SIZE) {
- fprintf(stderr, "%s: Decoded frame would be larger than supported FRAME_SIZE (%zu > %u), not decoding.\n", pathname.c_str(), video_frame->len, FRAME_SIZE);
+ if (video_frame->len > video_frame->size) {
+ fprintf(stderr, "%s: Decoded frame would be larger than supported frame size (%zu > %zu), not decoding.\n", pathname.c_str(), video_frame->len, video_frame->size);
*error = true;
return video_frame;
}