#include "flags.h"
#include "shared/httpd.h"
#include "shared/memcpy_interleaved.h"
+#include "shared/metrics.h"
#include "pbo_frame_allocator.h"
#include "shared/timebase.h"
#include "va_display_with_cleanup.h"
va_receiver_thread = thread(&MJPEGEncoder::va_receiver_thread_func, this);
}
+ global_metrics.add("mjpeg_frames", {{ "status", "dropped" }, { "reason", "zero_size" }}, &metric_mjpeg_frames_zero_size_dropped);
+ global_metrics.add("mjpeg_frames", {{ "status", "dropped" }, { "reason", "interlaced" }}, &metric_mjpeg_frames_interlaced_dropped);
+ global_metrics.add("mjpeg_frames", {{ "status", "dropped" }, { "reason", "unsupported_pixel_format" }}, &metric_mjpeg_frames_unsupported_pixel_format_dropped);
+ global_metrics.add("mjpeg_frames", {{ "status", "dropped" }, { "reason", "oversized" }}, &metric_mjpeg_frames_oversized_dropped);
+ global_metrics.add("mjpeg_frames", {{ "status", "dropped" }, { "reason", "overrun" }}, &metric_mjpeg_overrun_dropped);
+ global_metrics.add("mjpeg_frames", {{ "status", "submitted" }}, &metric_mjpeg_overrun_submitted);
+
running = true;
}
MJPEGEncoder::~MJPEGEncoder()
{
av_free(avctx->pb->buffer);
+
+ global_metrics.remove("mjpeg_frames", {{ "status", "dropped" }, { "reason", "zero_size" }});
+ global_metrics.remove("mjpeg_frames", {{ "status", "dropped" }, { "reason", "interlaced" }});
+ global_metrics.remove("mjpeg_frames", {{ "status", "dropped" }, { "reason", "unsupported_pixel_format" }});
+ global_metrics.remove("mjpeg_frames", {{ "status", "dropped" }, { "reason", "oversized" }});
+ global_metrics.remove("mjpeg_frames", {{ "status", "dropped" }, { "reason", "overrun" }});
+ global_metrics.remove("mjpeg_frames", {{ "status", "submitted" }});
}
void MJPEGEncoder::stop()
running = false;
should_quit = true;
any_frames_to_be_encoded.notify_all();
+ any_frames_encoding.notify_all();
encoder_thread.join();
if (va_dpy != nullptr) {
va_receiver_thread.join();
{
PBOFrameAllocator::Userdata *userdata = (PBOFrameAllocator::Userdata *)frame->userdata;
if (video_format.width == 0 || video_format.height == 0) {
+ ++metric_mjpeg_frames_zero_size_dropped;
return;
}
if (video_format.interlaced) {
fprintf(stderr, "Card %u: Ignoring JPEG encoding for interlaced frame\n", card_index);
+ ++metric_mjpeg_frames_interlaced_dropped;
return;
}
if (userdata->pixel_format != PixelFormat_8BitYCbCr ||
!frame->interleaved) {
fprintf(stderr, "Card %u: Ignoring JPEG encoding for unsupported pixel format\n", card_index);
+ ++metric_mjpeg_frames_unsupported_pixel_format_dropped;
return;
}
if (video_format.width > 4096 || video_format.height > 4096) {
fprintf(stderr, "Card %u: Ignoring JPEG encoding for oversized frame\n", card_index);
+ ++metric_mjpeg_frames_oversized_dropped;
return;
}
lock_guard<mutex> lock(mu);
if (frames_to_be_encoded.size() + frames_encoding.size() > 50) {
fprintf(stderr, "WARNING: MJPEG encoding doesn't keep up, discarding frame.\n");
+ ++metric_mjpeg_overrun_dropped;
return;
}
+ ++metric_mjpeg_overrun_submitted;
frames_to_be_encoded.push(QueuedFrame{ pts, card_index, frame, video_format, y_offset, cbcr_offset });
any_frames_to_be_encoded.notify_all();
}
pkt.size = jpeg.size();
pkt.stream_index = card_index;
pkt.flags = AV_PKT_FLAG_KEY;
- pkt.pts = pkt.dts = pts;
+ AVRational time_base = avctx->streams[pkt.stream_index]->time_base;
+ pkt.pts = pkt.dts = av_rescale_q(pts, AVRational{ 1, TIMEBASE }, time_base);
if (av_write_frame(avctx.get(), &pkt) < 0) {
fprintf(stderr, "av_write_frame() failed\n");