#include "jpeg_frame_view.h"
#include "defs.h"
+#include "flags.h"
#include "jpeg_destroyer.h"
+#include "jpeglib_error_wrapper.h"
+#include "shared/metrics.h"
#include "shared/post_to_main_thread.h"
#include "video_stream.h"
#include "ycbcr_converter.h"
shared_ptr<Frame> frame;
};
+// There can be multiple JPEGFrameView instances, so make all the metrics static.
+once_flag jpeg_metrics_inited;
+atomic<int64_t> metric_jpeg_cache_used_bytes{0}; // Same value as cache_bytes_used.
+atomic<int64_t> metric_jpeg_cache_limit_bytes{size_t(CACHE_SIZE_MB) * 1024 * 1024};
+atomic<int64_t> metric_jpeg_cache_given_up_frames{0};
+atomic<int64_t> metric_jpeg_cache_hit_frames{0};
+atomic<int64_t> metric_jpeg_cache_miss_frames{0};
+atomic<int64_t> metric_jpeg_software_decode_frames{0};
+atomic<int64_t> metric_jpeg_software_fail_frames{0};
+atomic<int64_t> metric_jpeg_vaapi_decode_frames{0};
+atomic<int64_t> metric_jpeg_vaapi_fail_frames{0};
+
} // namespace
thread JPEGFrameView::jpeg_decoder_thread;
extern QGLWidget *global_share_widget;
extern atomic<bool> should_quit;
-shared_ptr<Frame> decode_jpeg(const string &filename)
+shared_ptr<Frame> decode_jpeg(const string &jpeg)
{
shared_ptr<Frame> frame;
if (vaapi_jpeg_decoding_usable) {
- frame = decode_jpeg_vaapi(filename);
+ frame = decode_jpeg_vaapi(jpeg);
if (frame != nullptr) {
+ ++metric_jpeg_vaapi_decode_frames;
return frame;
}
fprintf(stderr, "VA-API hardware decoding failed; falling back to software.\n");
+ ++metric_jpeg_vaapi_fail_frames;
}
frame.reset(new Frame);
jpeg_decompress_struct dinfo;
- jpeg_error_mgr jerr;
- dinfo.err = jpeg_std_error(&jerr);
- jpeg_create_decompress(&dinfo);
+ JPEGWrapErrorManager error_mgr(&dinfo);
+ if (!error_mgr.run([&dinfo]{ jpeg_create_decompress(&dinfo); })) {
+ return get_black_frame();
+ }
JPEGDestroyer destroy_dinfo(&dinfo);
- FILE *fp = fopen(filename.c_str(), "rb");
- if (fp == nullptr) {
- perror(filename.c_str());
- exit(1);
+ if (!error_mgr.run([&dinfo, &jpeg]{
+ jpeg_mem_src(&dinfo, reinterpret_cast<const unsigned char *>(jpeg.data()), jpeg.size());
+ jpeg_read_header(&dinfo, true);
+ })) {
+ return get_black_frame();
}
- jpeg_stdio_src(&dinfo, fp);
-
- jpeg_read_header(&dinfo, true);
if (dinfo.num_components != 3) {
fprintf(stderr, "Not a color JPEG. (%d components, Y=%dx%d, Cb=%dx%d, Cr=%dx%d)\n",
dinfo.comp_info[0].h_samp_factor, dinfo.comp_info[0].v_samp_factor,
dinfo.comp_info[1].h_samp_factor, dinfo.comp_info[1].v_samp_factor,
dinfo.comp_info[2].h_samp_factor, dinfo.comp_info[2].v_samp_factor);
- exit(1);
+ return get_black_frame();
}
if (dinfo.comp_info[0].h_samp_factor != dinfo.max_h_samp_factor ||
dinfo.comp_info[0].v_samp_factor != dinfo.max_v_samp_factor || // Y' must not be subsampled.
}
dinfo.raw_data_out = true;
- jpeg_start_decompress(&dinfo);
+ if (!error_mgr.run([&dinfo]{
+ jpeg_start_decompress(&dinfo);
+ })) {
+ return get_black_frame();
+ }
frame->width = dinfo.output_width;
frame->height = dinfo.output_height;
frame->pitch_y = luma_width_blocks * DCTSIZE;
frame->pitch_chroma = chroma_width_blocks * DCTSIZE;
- JSAMPROW yptr[v_mcu_size], cbptr[v_mcu_size], crptr[v_mcu_size];
- JSAMPARRAY data[3] = { yptr, cbptr, crptr };
- for (unsigned y = 0; y < mcu_height_blocks; ++y) {
- // NOTE: The last elements of cbptr/crptr will be unused for vertically subsampled chroma.
- for (unsigned yy = 0; yy < v_mcu_size; ++yy) {
- yptr[yy] = frame->y.get() + (y * DCTSIZE * dinfo.max_v_samp_factor + yy) * frame->pitch_y;
- cbptr[yy] = frame->cb.get() + (y * DCTSIZE * dinfo.comp_info[1].v_samp_factor + yy) * frame->pitch_chroma;
- crptr[yy] = frame->cr.get() + (y * DCTSIZE * dinfo.comp_info[1].v_samp_factor + yy) * frame->pitch_chroma;
+ if (!error_mgr.run([&dinfo, &frame, v_mcu_size, mcu_height_blocks] {
+ JSAMPROW yptr[v_mcu_size], cbptr[v_mcu_size], crptr[v_mcu_size];
+ JSAMPARRAY data[3] = { yptr, cbptr, crptr };
+ for (unsigned y = 0; y < mcu_height_blocks; ++y) {
+ // NOTE: The last elements of cbptr/crptr will be unused for vertically subsampled chroma.
+ for (unsigned yy = 0; yy < v_mcu_size; ++yy) {
+ yptr[yy] = frame->y.get() + (y * DCTSIZE * dinfo.max_v_samp_factor + yy) * frame->pitch_y;
+ cbptr[yy] = frame->cb.get() + (y * DCTSIZE * dinfo.comp_info[1].v_samp_factor + yy) * frame->pitch_chroma;
+ crptr[yy] = frame->cr.get() + (y * DCTSIZE * dinfo.comp_info[1].v_samp_factor + yy) * frame->pitch_chroma;
+ }
+
+ jpeg_read_raw_data(&dinfo, data, v_mcu_size);
}
- jpeg_read_raw_data(&dinfo, data, v_mcu_size);
+ (void)jpeg_finish_decompress(&dinfo);
+ })) {
+ return get_black_frame();
}
- (void)jpeg_finish_decompress(&dinfo);
- fclose(fp);
-
+ ++metric_jpeg_software_decode_frames;
return frame;
}
for (auto it = cache.begin(); it != cache.end(); ) {
if (it->second.last_used <= lru_cutoff_point) {
cache_bytes_used -= frame_size(*it->second.frame);
+ metric_jpeg_cache_used_bytes = cache_bytes_used;
it = cache.erase(it);
} else {
++it;
unique_lock<mutex> lock(cache_mu);
auto it = cache.find(frame_spec);
if (it != cache.end()) {
+ ++metric_jpeg_cache_hit_frames;
it->second.last_used = event_counter++;
return it->second.frame;
}
}
if (cache_miss_behavior == RETURN_NULLPTR_IF_NOT_IN_CACHE) {
+ ++metric_jpeg_cache_given_up_frames;
return nullptr;
}
+ ++metric_jpeg_cache_miss_frames;
+
*did_decode = true;
shared_ptr<Frame> frame = decode_jpeg(frame_reader->read_frame(frame_spec));
unique_lock<mutex> lock(cache_mu);
cache_bytes_used += frame_size(*frame);
+ metric_jpeg_cache_used_bytes = cache_bytes_used;
cache[frame_spec] = LRUFrame{ frame, event_counter++ };
if (cache_bytes_used > size_t(CACHE_SIZE_MB) * 1024 * 1024) {
JPEGFrameView::JPEGFrameView(QWidget *parent)
: QGLWidget(parent, global_share_widget)
{
+ call_once(jpeg_metrics_inited, []{
+ global_metrics.add("jpeg_cache_used_bytes", &metric_jpeg_cache_used_bytes, Metrics::TYPE_GAUGE);
+ global_metrics.add("jpeg_cache_limit_bytes", &metric_jpeg_cache_limit_bytes, Metrics::TYPE_GAUGE);
+ global_metrics.add("jpeg_cache_frames", {{ "action", "given_up" }}, &metric_jpeg_cache_given_up_frames);
+ global_metrics.add("jpeg_cache_frames", {{ "action", "hit" }}, &metric_jpeg_cache_hit_frames);
+ global_metrics.add("jpeg_cache_frames", {{ "action", "miss" }}, &metric_jpeg_cache_miss_frames);
+ global_metrics.add("jpeg_decode_frames", {{ "decoder", "software" }, { "result", "decode" }}, &metric_jpeg_software_decode_frames);
+ global_metrics.add("jpeg_decode_frames", {{ "decoder", "software" }, { "result", "fail" }}, &metric_jpeg_software_fail_frames);
+ global_metrics.add("jpeg_decode_frames", {{ "decoder", "vaapi" }, { "result", "decode" }}, &metric_jpeg_vaapi_decode_frames);
+ global_metrics.add("jpeg_decode_frames", {{ "decoder", "vaapi" }, { "result", "fail" }}, &metric_jpeg_vaapi_fail_frames);
+ });
}
void JPEGFrameView::setFrame(unsigned stream_idx, FrameOnDisk frame, FrameOnDisk secondary_frame, float fade_alpha)
// Don't refresh immediately; we might not have an OpenGL context here.
overlay_input_needs_refresh = true;
}
+
+shared_ptr<Frame> get_black_frame()
+{
+ static shared_ptr<Frame> black_frame;
+ static once_flag flag;
+ call_once(flag, [] {
+ black_frame.reset(new Frame);
+ black_frame->y.reset(new uint8_t[global_flags.width * global_flags.height]);
+ black_frame->cb.reset(new uint8_t[(global_flags.width / 2) * (global_flags.height / 2)]);
+ black_frame->cr.reset(new uint8_t[(global_flags.width / 2) * (global_flags.height / 2)]);
+ black_frame->width = global_flags.width;
+ black_frame->height = global_flags.height;
+ black_frame->chroma_subsampling_x = 2;
+ black_frame->chroma_subsampling_y = 2;
+ black_frame->pitch_y = global_flags.width;
+ black_frame->pitch_chroma = global_flags.width / 2;
+ });
+ ++metric_jpeg_software_fail_frames;
+ return black_frame;
+}