#include <libavcodec/avcodec.h>
#include <libavformat/avio.h>
#include <libavutil/error.h>
-#include <libdrm/drm_fourcc.h>
+#include <drm_fourcc.h>
} // namespace
#define CHECK_VASTATUS(va_status, func) \
if (va_status != VA_STATUS_SUCCESS) { \
fprintf(stderr, "%s:%d (%s) failed with %d\n", __func__, __LINE__, func, va_status); \
- exit(1); \
+ abort(); \
}
#undef BUFFER_OFFSET
} else if (global_flags.x264_video_to_http) {
fprintf(stderr, "Disabling zerocopy H.264 encoding due to --http-x264-video.\n");
use_zerocopy = false;
+ } else if (global_flags.av1_video_to_http) {
+ fprintf(stderr, "Disabling zerocopy H.264 encoding due to --http-av1-video.\n");
+ use_zerocopy = false;
+ } else if (!global_flags.v4l_output_device.empty()) {
+ fprintf(stderr, "Disabling zerocopy H.264 encoding due to --v4l-output.\n");
+ use_zerocopy = false;
} else {
use_zerocopy = true;
}
global_flags.use_zerocopy = use_zerocopy;
}
-VADisplayWithCleanup::~VADisplayWithCleanup()
+static unique_ptr<VADisplayWithCleanup> try_open_va_h264(const string &va_display, VAProfile *h264_profile, string *error)
{
- if (va_dpy != nullptr) {
- vaTerminate(va_dpy);
- }
- if (x11_display != nullptr) {
- XCloseDisplay(x11_display);
- }
- if (drm_fd != -1) {
- close(drm_fd);
- }
-}
-
-unique_ptr<VADisplayWithCleanup> va_open_display(const string &va_display)
-{
- if (va_display.empty() || va_display[0] != '/') { // An X display.
- Display *x11_display = XOpenDisplay(va_display.empty() ? nullptr : va_display.c_str());
- if (x11_display == nullptr) {
- fprintf(stderr, "error: can't connect to X server!\n");
- return nullptr;
- }
-
- unique_ptr<VADisplayWithCleanup> ret(new VADisplayWithCleanup);
- ret->x11_display = x11_display;
- ret->can_use_zerocopy = true;
- ret->va_dpy = vaGetDisplay(x11_display);
- if (ret->va_dpy == nullptr) {
- return nullptr;
- }
- return ret;
- } else { // A DRM node on the filesystem (e.g. /dev/dri/renderD128).
- int drm_fd = open(va_display.c_str(), O_RDWR);
- if (drm_fd == -1) {
- perror(va_display.c_str());
- return NULL;
- }
- unique_ptr<VADisplayWithCleanup> ret(new VADisplayWithCleanup);
- ret->drm_fd = drm_fd;
- ret->can_use_zerocopy = false;
- ret->va_dpy = vaGetDisplayDRM(drm_fd);
- if (ret->va_dpy == nullptr) {
- return nullptr;
- }
- return ret;
- }
-}
-
-unique_ptr<VADisplayWithCleanup> try_open_va(const string &va_display, VAProfile *h264_profile, string *error)
-{
- unique_ptr<VADisplayWithCleanup> va_dpy = va_open_display(va_display);
- if (va_dpy == nullptr) {
- if (error) *error = "Opening VA display failed";
- return nullptr;
- }
- int major_ver, minor_ver;
- VAStatus va_status = vaInitialize(va_dpy->va_dpy, &major_ver, &minor_ver);
- if (va_status != VA_STATUS_SUCCESS) {
- char buf[256];
- snprintf(buf, sizeof(buf), "vaInitialize() failed with status %d\n", va_status);
- if (error != nullptr) *error = buf;
- return nullptr;
- }
-
- int num_entrypoints = vaMaxNumEntrypoints(va_dpy->va_dpy);
- unique_ptr<VAEntrypoint[]> entrypoints(new VAEntrypoint[num_entrypoints]);
- if (entrypoints == nullptr) {
- if (error != nullptr) *error = "Failed to allocate memory for VA entry points";
- return nullptr;
- }
-
- // Try the profiles from highest to lowest until we find one that can be encoded.
- constexpr VAProfile profile_list[] = { VAProfileH264High, VAProfileH264Main, VAProfileH264ConstrainedBaseline };
- for (unsigned i = 0; i < sizeof(profile_list) / sizeof(profile_list[0]); ++i) {
- vaQueryConfigEntrypoints(va_dpy->va_dpy, profile_list[i], entrypoints.get(), &num_entrypoints);
- for (int slice_entrypoint = 0; slice_entrypoint < num_entrypoints; slice_entrypoint++) {
- if (entrypoints[slice_entrypoint] != VAEntrypointEncSlice) {
- continue;
- }
-
- // We found a usable encoder, so return it.
- if (h264_profile != nullptr) {
- *h264_profile = profile_list[i];
- }
- return va_dpy;
- }
- }
-
- if (error != nullptr) *error = "Can't find VAEntrypointEncSlice for H264 profiles";
- return nullptr;
+ return try_open_va(va_display, { VAProfileH264High, VAProfileH264Main, VAProfileH264ConstrainedBaseline },
+ VAEntrypointEncSlice, /*desired_configs=*/{}, h264_profile, error);
}
int QuickSyncEncoderImpl::init_va(const string &va_display)
{
string error;
- va_dpy = try_open_va(va_display, &h264_profile, &error);
+ va_dpy = try_open_va_h264(va_display, &h264_profile, &error);
if (va_dpy == nullptr) {
fprintf(stderr, "error: %s\n", error.c_str());
- exit(1);
+ abort();
}
if (!va_dpy->can_use_zerocopy) {
use_zerocopy = false;
/* check the interested configattrib */
if ((attrib[VAConfigAttribRTFormat].value & VA_RT_FORMAT_YUV420) == 0) {
printf("Not find desired YUV420 RT format\n");
- exit(1);
+ abort();
} else {
config_attrib[config_attrib_num].type = VAConfigAttribRTFormat;
config_attrib[config_attrib_num].value = VA_RT_FORMAT_YUV420;
if (attrib[VAConfigAttribRateControl].value != VA_ATTRIB_NOT_SUPPORTED) {
if (!(attrib[VAConfigAttribRateControl].value & VA_RC_CQP)) {
fprintf(stderr, "ERROR: VA-API encoder does not support CQP mode.\n");
- exit(1);
+ abort();
}
config_attrib[config_attrib_num].type = VAConfigAttribRateControl;
gl_surfaces[i].y_tex = resource_pool->create_2d_texture(GL_R8, 1, 1);
gl_surfaces[i].cbcr_tex = resource_pool->create_2d_texture(GL_RG8, 1, 1);
} else {
- size_t bytes_per_pixel = (global_flags.x264_bit_depth > 8) ? 2 : 1;
+ size_t bytes_per_pixel = (global_flags.bit_depth > 8) ? 2 : 1;
// Generate a PBO to read into. It doesn't necessarily fit 1:1 with the VA-API
// buffers, due to potentially differing pitch.
pic_param.CurrPic.frame_idx = current_ref_frame_num;
CurrentCurrPic.flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE;
- unique_lock<mutex> lock(storage_task_queue_mutex);
+ lock_guard<mutex> lock(storage_task_queue_mutex);
// Insert the new frame at the start of the reference queue.
reference_frames.push_front(ReferenceFrame{ CurrentCurrPic, current_display_frame });
file_mux->add_packet(pkt, task.pts + global_delay(), task.dts + global_delay());
}
if (!global_flags.uncompressed_video_to_http &&
- !global_flags.x264_video_to_http) {
+ !global_flags.x264_video_to_http &&
+ !global_flags.av1_video_to_http) {
stream_mux->add_packet(pkt, task.pts + global_delay(), task.dts + global_delay());
}
}
// this is weird. but it seems to put a new frame onto the queue
void QuickSyncEncoderImpl::storage_task_enqueue(storage_task task)
{
- unique_lock<mutex> lock(storage_task_queue_mutex);
+ lock_guard<mutex> lock(storage_task_queue_mutex);
storage_task_queue.push(move(task));
storage_task_queue_changed.notify_all();
}
// Unlock the frame, and all its references.
{
- unique_lock<mutex> lock(storage_task_queue_mutex);
+ lock_guard<mutex> lock(storage_task_queue_mutex);
release_gl_surface(display_order);
for (size_t frame_num : ref_display_frame_numbers) {
void QuickSyncEncoderImpl::release_encode()
{
for (unsigned i = 0; i < SURFACE_NUM; i++) {
- vaDestroyBuffer(va_dpy->va_dpy, gl_surfaces[i].coded_buf);
- vaDestroySurfaces(va_dpy->va_dpy, &gl_surfaces[i].src_surface, 1);
- vaDestroySurfaces(va_dpy->va_dpy, &gl_surfaces[i].ref_surface, 1);
+ VAStatus va_status = vaDestroyBuffer(va_dpy->va_dpy, gl_surfaces[i].coded_buf);
+ CHECK_VASTATUS(va_status, "vaDestroyBuffer");
+ va_status = vaDestroySurfaces(va_dpy->va_dpy, &gl_surfaces[i].src_surface, 1);
+ CHECK_VASTATUS(va_status, "vaDestroySurfaces");
+ va_status = vaDestroySurfaces(va_dpy->va_dpy, &gl_surfaces[i].ref_surface, 1);
+ CHECK_VASTATUS(va_status, "vaDestroySurfaces");
}
- vaDestroyContext(va_dpy->va_dpy, context_id);
- vaDestroyConfig(va_dpy->va_dpy, config_id);
+ VAStatus va_status = vaDestroyContext(va_dpy->va_dpy, context_id);
+ CHECK_VASTATUS(va_status, "vaDestroyContext");
+ va_status = vaDestroyConfig(va_dpy->va_dpy, config_id);
+ CHECK_VASTATUS(va_status, "vaDestroyConfig");
}
void QuickSyncEncoderImpl::release_gl_resources()
has_released_gl_resources = true;
}
-QuickSyncEncoderImpl::QuickSyncEncoderImpl(const std::string &filename, ResourcePool *resource_pool, QSurface *surface, const string &va_display, int width, int height, AVOutputFormat *oformat, X264Encoder *x264_encoder, DiskSpaceEstimator *disk_space_estimator)
- : current_storage_frame(0), resource_pool(resource_pool), surface(surface), x264_encoder(x264_encoder), frame_width(width), frame_height(height), disk_space_estimator(disk_space_estimator)
+QuickSyncEncoderImpl::QuickSyncEncoderImpl(const std::string &filename, ResourcePool *resource_pool, QSurface *surface, const string &va_display, int width, int height, const AVOutputFormat *oformat, VideoCodecInterface *http_encoder, VideoCodecInterface *disk_encoder, DiskSpaceEstimator *disk_space_estimator)
+ : current_storage_frame(0), resource_pool(resource_pool), surface(surface), http_encoder(http_encoder), disk_encoder(disk_encoder), frame_width(width), frame_height(height), disk_space_estimator(disk_space_estimator)
{
file_audio_encoder.reset(new AudioEncoder(AUDIO_OUTPUT_CODEC_NAME, DEFAULT_AUDIO_OUTPUT_BIT_RATE, oformat));
open_output_file(filename);
//print_input();
if (global_flags.x264_video_to_http || global_flags.x264_video_to_disk) {
- assert(x264_encoder != nullptr);
+ assert(http_encoder != nullptr);
+ assert(disk_encoder != nullptr);
+ } else if (global_flags.av1_video_to_http) {
+ assert(http_encoder != nullptr);
} else {
- assert(x264_encoder == nullptr);
+ assert(http_encoder == nullptr);
+ assert(disk_encoder == nullptr);
}
enable_zerocopy_if_possible();
memset(&slice_param, 0, sizeof(slice_param));
}
+ if (!global_flags.v4l_output_device.empty()) {
+ v4l_output.reset(new V4LOutput(global_flags.v4l_output_device.c_str(), width, height));
+ }
+
call_once(quick_sync_metrics_inited, [](){
mixer_latency_histogram.init("mixer");
qs_latency_histogram.init("quick_sync");
if (!make_current(context, this->surface)) {
printf("display=%p surface=%p context=%p curr=%p err=%d\n", eglGetCurrentDisplay(), this->surface, context, eglGetCurrentContext(),
eglGetError());
- exit(1);
+ abort();
}
encode_thread_func();
delete_context(context);
assert(!is_shutdown);
if (!use_zerocopy) {
- GLenum type = global_flags.x264_bit_depth > 8 ? GL_UNSIGNED_SHORT : GL_UNSIGNED_BYTE;
+ GLenum type = global_flags.bit_depth > 8 ? GL_UNSIGNED_SHORT : GL_UNSIGNED_BYTE;
GLSurface *surf;
{
- unique_lock<mutex> lock(storage_task_queue_mutex);
+ lock_guard<mutex> lock(storage_task_queue_mutex);
surf = surface_for_frame[current_storage_frame];
assert(surf != nullptr);
}
check_error();
{
- unique_lock<mutex> lock(frame_queue_mutex);
+ lock_guard<mutex> lock(frame_queue_mutex);
current_video_frame.fence = fence;
pending_video_frames.push(move(current_video_frame));
++current_storage_frame;
}
{
- unique_lock<mutex> lock(frame_queue_mutex);
+ lock_guard<mutex> lock(frame_queue_mutex);
encode_thread_should_quit = true;
frame_queue_nonempty.notify_all();
}
encode_thread.join();
{
- unique_lock<mutex> lock(storage_task_queue_mutex);
+ lock_guard<mutex> lock(storage_task_queue_mutex);
storage_thread_should_quit = true;
frame_queue_nonempty.notify_all();
storage_task_queue_changed.notify_all();
{
AVFormatContext *avctx = avformat_alloc_context();
avctx->oformat = av_guess_format(NULL, filename.c_str(), NULL);
- assert(filename.size() < sizeof(avctx->filename) - 1);
- strcpy(avctx->filename, filename.c_str());
+ avctx->url = strdup(filename.c_str());
string url = "file:" + filename;
int ret = avio_open2(&avctx->pb, url.c_str(), AVIO_FLAG_WRITE, &avctx->interrupt_callback, NULL);
if (ret < 0) {
char tmp[AV_ERROR_MAX_STRING_SIZE];
fprintf(stderr, "%s: avio_open2() failed: %s\n", filename.c_str(), av_make_error_string(tmp, sizeof(tmp), ret));
- exit(1);
+ abort();
}
string video_extradata; // FIXME: See other comment about global headers.
if (global_flags.x264_video_to_disk) {
- video_extradata = x264_encoder->get_global_headers();
+ video_extradata = disk_encoder->get_global_headers();
}
current_file_mux_metrics.reset();
{
lock_guard<mutex> lock(file_audio_encoder_mutex);
AVCodecParametersWithDeleter audio_codecpar = file_audio_encoder->get_codec_parameters();
- file_mux.reset(new Mux(avctx, frame_width, frame_height, Mux::CODEC_H264, video_extradata, audio_codecpar.get(), get_color_space(global_flags.ycbcr_rec709_coefficients), Mux::WITH_AUDIO, TIMEBASE,
+ file_mux.reset(new Mux(avctx, frame_width, frame_height, Mux::CODEC_H264, video_extradata, audio_codecpar.get(), get_color_space(global_flags.ycbcr_rec709_coefficients), TIMEBASE,
std::bind(&DiskSpaceEstimator::report_append, disk_space_estimator, filename, _1),
Mux::WRITE_BACKGROUND,
{ ¤t_file_mux_metrics, &total_mux_metrics }));
metric_current_file_start_time_seconds = get_timestamp_for_metrics();
if (global_flags.x264_video_to_disk) {
- x264_encoder->add_mux(file_mux.get());
+ disk_encoder->add_mux(file_mux.get());
}
}
pass_frame(frame, display_frame_num, frame.pts, frame.duration);
if (global_flags.x264_video_to_disk) {
- unique_lock<mutex> lock(storage_task_queue_mutex);
+ lock_guard<mutex> lock(storage_task_queue_mutex);
release_gl_surface(display_frame_num);
continue;
}
if (frame_type == FRAME_IDR) {
// Release any reference frames from the previous GOP.
{
- unique_lock<mutex> lock(storage_task_queue_mutex);
+ lock_guard<mutex> lock(storage_task_queue_mutex);
for (const ReferenceFrame &frame : reference_frames) {
release_gl_surface(frame.display_number);
}
stream_mux->add_packet(pkt, pts, pts);
}
-namespace {
-
void memcpy_with_pitch(uint8_t *dst, const uint8_t *src, size_t src_width, size_t dst_pitch, size_t height)
{
if (src_width == dst_pitch) {
}
}
-} // namespace
-
void QuickSyncEncoderImpl::pass_frame(QuickSyncEncoderImpl::PendingFrame frame, int display_frame_num, int64_t pts, int64_t duration)
{
// Wait for the GPU to be done with the frame.
GLSurface *surf;
{
- unique_lock<mutex> lock(storage_task_queue_mutex);
+ lock_guard<mutex> lock(storage_task_queue_mutex);
surf = surface_for_frame[display_frame_num];
assert(surf != nullptr);
}
uint8_t *data = reinterpret_cast<uint8_t *>(surf->y_ptr);
if (global_flags.uncompressed_video_to_http) {
add_packet_for_uncompressed_frame(pts, duration, data);
- } else if (global_flags.x264_video_to_http || global_flags.x264_video_to_disk) {
- x264_encoder->add_frame(pts, duration, frame.ycbcr_coefficients, data, received_ts);
+ } else if (http_encoder != nullptr) {
+ http_encoder->add_frame(pts, duration, frame.ycbcr_coefficients, data, received_ts);
+ } if (disk_encoder != nullptr && disk_encoder != http_encoder) {
+ disk_encoder->add_frame(pts, duration, frame.ycbcr_coefficients, data, received_ts);
+ }
+
+ if (v4l_output != nullptr) {
+ v4l_output->send_frame(data);
}
}
GLSurface *surf;
{
- unique_lock<mutex> lock(storage_task_queue_mutex);
+ lock_guard<mutex> lock(storage_task_queue_mutex);
surf = surface_for_frame[display_frame_num];
assert(surf != nullptr);
}
// Lock the references for this frame; otherwise, they could be
// rendered to before this frame is done encoding.
{
- unique_lock<mutex> lock(storage_task_queue_mutex);
+ lock_guard<mutex> lock(storage_task_queue_mutex);
for (const ReferenceFrame &frame : reference_frames) {
assert(surface_for_frame.count(frame.display_number));
++surface_for_frame[frame.display_number]->refcount;
}
// Proxy object.
-QuickSyncEncoder::QuickSyncEncoder(const std::string &filename, ResourcePool *resource_pool, QSurface *surface, const string &va_display, int width, int height, AVOutputFormat *oformat, X264Encoder *x264_encoder, DiskSpaceEstimator *disk_space_estimator)
- : impl(new QuickSyncEncoderImpl(filename, resource_pool, surface, va_display, width, height, oformat, x264_encoder, disk_space_estimator)) {}
+QuickSyncEncoder::QuickSyncEncoder(const std::string &filename, ResourcePool *resource_pool, QSurface *surface, const string &va_display, int width, int height, const AVOutputFormat *oformat, VideoCodecInterface *http_encoder, VideoCodecInterface *disk_encoder, DiskSpaceEstimator *disk_space_estimator)
+ : impl(new QuickSyncEncoderImpl(filename, resource_pool, surface, va_display, width, height, oformat, http_encoder, disk_encoder, disk_space_estimator)) {}
// Must be defined here because unique_ptr<> destructor needs to know the impl.
QuickSyncEncoder::~QuickSyncEncoder() {}
}
// First try the default (ie., whatever $DISPLAY is set to).
- unique_ptr<VADisplayWithCleanup> va_dpy = try_open_va("", nullptr, nullptr);
+ unique_ptr<VADisplayWithCleanup> va_dpy = try_open_va_h264("", nullptr, nullptr);
if (va_dpy != nullptr) {
if (need_env_reset) {
unsetenv("LIBVA_MESSAGING_LEVEL");
} else {
for (size_t i = 0; i < g.gl_pathc; ++i) {
string path = g.gl_pathv[i];
- va_dpy = try_open_va(path, nullptr, nullptr);
+ va_dpy = try_open_va_h264(path, nullptr, nullptr);
if (va_dpy != nullptr) {
fprintf(stderr, "Autodetected %s as a suitable replacement; using it.\n",
path.c_str());
fprintf(stderr, "to expose Quick Sync. Alternatively, you can use --record-x264-video\n");
fprintf(stderr, "to use software instead of hardware H.264 encoding, at the expense\n");
fprintf(stderr, "of increased CPU usage and possibly bit rate.\n");
- exit(1);
+ abort();
}