X-Git-Url: https://git.sesse.net/?p=nageru;a=blobdiff_plain;f=nageru%2Fmjpeg_encoder.cpp;h=9ae018f81059584621bb383b3f4d6b30c01df0f4;hp=07e302c4e93148c2b9a83edfc1034e5465f1e8bc;hb=a839022c035b3d9387feabc02843c166ac78b469;hpb=f08eaef2d7c9f6815f7010a221302daccb57fda1 diff --git a/nageru/mjpeg_encoder.cpp b/nageru/mjpeg_encoder.cpp index 07e302c..9ae018f 100644 --- a/nageru/mjpeg_encoder.cpp +++ b/nageru/mjpeg_encoder.cpp @@ -30,12 +30,6 @@ using namespace std; extern void memcpy_with_pitch(uint8_t *dst, const uint8_t *src, size_t src_width, size_t dst_pitch, size_t height); -#define CHECK_VASTATUS(va_status, func) \ - if (va_status != VA_STATUS_SUCCESS) { \ - fprintf(stderr, "%s:%d (%s) failed with %d\n", __func__, __LINE__, func, va_status); \ - exit(1); \ - } - // From libjpeg (although it's of course identical between implementations). static const int jpeg_natural_order[DCTSIZE2] = { 0, 1, 8, 16, 9, 2, 3, 10, @@ -294,6 +288,37 @@ void MJPEGEncoder::upload_frame(int64_t pts, unsigned card_index, RefCountedFram any_frames_to_be_encoded.notify_all(); } +void MJPEGEncoder::finish_frame(RefCountedFrame frame) +{ + PBOFrameAllocator::Userdata *userdata = (PBOFrameAllocator::Userdata *)frame->userdata; + + if (userdata->data_copy_current_src == PBOFrameAllocator::Userdata::FROM_VA_API) { + VAResources resources __attribute__((unused)) = move(userdata->va_resources); + ReleaseVAResources release = move(userdata->va_resources_release); + VAImage image = move(userdata->va_image); + + VAStatus va_status = vaUnmapBuffer(va_dpy->va_dpy, image.buf); + CHECK_VASTATUS(va_status, "vaUnmapBuffer"); + va_status = vaDestroyImage(va_dpy->va_dpy, image.image_id); + CHECK_VASTATUS(va_status, "vaDestroyImage"); + } +} + +int MJPEGEncoder::get_mjpeg_stream_for_card(unsigned card_index) +{ + // Only bother doing MJPEG encoding if there are any connected clients + // that want the stream. + if (httpd->get_num_connected_multicam_clients() == 0) { + return -1; + } + + auto it = global_flags.card_to_mjpeg_stream_export.find(card_index); + if (it == global_flags.card_to_mjpeg_stream_export.end()) { + return -1; + } + return it->second; +} + void MJPEGEncoder::encoder_thread_func() { pthread_setname_np(pthread_self(), "MJPEG_Encode"); @@ -597,11 +622,20 @@ MJPEGEncoder::VAData MJPEGEncoder::get_va_data_for_resolution(unsigned width, un void MJPEGEncoder::encode_jpeg_va(QueuedFrame &&qf) { + PBOFrameAllocator::Userdata *userdata = (PBOFrameAllocator::Userdata *)qf.frame->userdata; unsigned width = qf.video_format.width; unsigned height = qf.video_format.height; - VAResources resources = get_va_resources(width, height); - ReleaseVAResources release(this, resources); + VAResources resources; + ReleaseVAResources release; + if (userdata->data_copy_current_src == PBOFrameAllocator::Userdata::FROM_VA_API) { + resources = move(userdata->va_resources); + release = move(userdata->va_resources_release); + } else { + assert(userdata->data_copy_current_src == PBOFrameAllocator::Userdata::FROM_MALLOC); + resources = get_va_resources(width, height); + release = ReleaseVAResources(this, resources); + } VAData va_data = get_va_data_for_resolution(width, height); va_data.pic_param.coded_buf = resources.data_buffer; @@ -627,20 +661,27 @@ void MJPEGEncoder::encode_jpeg_va(QueuedFrame &&qf) VABufferDestroyer destroy_slice_param(va_dpy->va_dpy, slice_param_buffer); VAImage image; - va_status = vaDeriveImage(va_dpy->va_dpy, resources.surface, &image); - CHECK_VASTATUS(va_status, "vaDeriveImage"); + if (userdata->data_copy_current_src == PBOFrameAllocator::Userdata::FROM_VA_API) { + // The pixel data is already uploaded by the caller. + image = move(userdata->va_image); + } else { + assert(userdata->data_copy_current_src == PBOFrameAllocator::Userdata::FROM_MALLOC); - // Upload the pixel data. - uint8_t *surface_p = nullptr; - vaMapBuffer(va_dpy->va_dpy, image.buf, (void **)&surface_p); + // Upload the pixel data. + va_status = vaDeriveImage(va_dpy->va_dpy, resources.surface, &image); + CHECK_VASTATUS(va_status, "vaDeriveImage"); - size_t field_start_line = qf.video_format.extra_lines_top; // No interlacing support. - size_t field_start = qf.cbcr_offset * 2 + qf.video_format.width * field_start_line * 2; + uint8_t *surface_p = nullptr; + vaMapBuffer(va_dpy->va_dpy, image.buf, (void **)&surface_p); - { - const uint8_t *src = qf.frame->data_copy + field_start; - uint8_t *dst = (unsigned char *)surface_p + image.offsets[0]; - memcpy_with_pitch(dst, src, qf.video_format.width * 2, image.pitches[0], qf.video_format.height); + size_t field_start_line = qf.video_format.extra_lines_top; // No interlacing support. + size_t field_start = qf.cbcr_offset * 2 + qf.video_format.width * field_start_line * 2; + + { + const uint8_t *src = qf.frame->data_copy + field_start; + uint8_t *dst = (unsigned char *)surface_p + image.offsets[0]; + memcpy_with_pitch(dst, src, qf.video_format.width * 2, image.pitches[0], qf.video_format.height); + } } va_status = vaUnmapBuffer(va_dpy->va_dpy, image.buf);