#include "flags.h"
#include "jpeg_destroyer.h"
#include "jpeglib_error_wrapper.h"
+#include "pbo_pool.h"
+#include "shared/context.h"
#include "shared/metrics.h"
#include "shared/post_to_main_thread.h"
#include "video_stream.h"
atomic<int64_t> metric_jpeg_software_fail_frames{ 0 };
atomic<int64_t> metric_jpeg_vaapi_decode_frames{ 0 };
atomic<int64_t> metric_jpeg_vaapi_fail_frames{ 0 };
+atomic<int64_t> metric_jpeg_prepared_frames{ 0 };
+atomic<int64_t> metric_jpeg_displayed_frames{ 0 };
Summary metric_jpeg_decode_time_seconds;
} // namespace
}
JPEGDestroyer destroy_dinfo(&dinfo);
+ jpeg_save_markers(&dinfo, JPEG_APP0 + 1, 0xFFFF);
+
if (!error_mgr.run([&dinfo, &jpeg] {
jpeg_mem_src(&dinfo, reinterpret_cast<const unsigned char *>(jpeg.data()), jpeg.size());
jpeg_read_header(&dinfo, true);
return get_black_frame();
}
- jpeg_save_markers(&dinfo, JPEG_APP0 + 1, 0xFFFF);
-
if (dinfo.num_components != 3) {
fprintf(stderr, "Not a color JPEG. (%d components, Y=%dx%d, Cb=%dx%d, Cr=%dx%d)\n",
dinfo.num_components,
unsigned luma_width_blocks = mcu_width_blocks * dinfo.comp_info[0].h_samp_factor;
unsigned chroma_width_blocks = mcu_width_blocks * dinfo.comp_info[1].h_samp_factor;
- unsigned luma_height_blocks = mcu_height_blocks * dinfo.comp_info[0].v_samp_factor;
- unsigned chroma_height_blocks = mcu_height_blocks * dinfo.comp_info[1].v_samp_factor;
- // TODO: Decode into a PBO.
- frame->y.reset(new uint8_t[luma_width_blocks * luma_height_blocks * DCTSIZE2]);
- frame->cb.reset(new uint8_t[chroma_width_blocks * chroma_height_blocks * DCTSIZE2]);
- frame->cr.reset(new uint8_t[chroma_width_blocks * chroma_height_blocks * DCTSIZE2]);
- frame->pitch_y = luma_width_blocks * DCTSIZE;
- frame->pitch_chroma = chroma_width_blocks * DCTSIZE;
+ PBO pbo = global_pbo_pool->alloc_pbo();
+ const size_t chroma_width = dinfo.image_width / frame->chroma_subsampling_x;
+ const size_t chroma_height = dinfo.image_height / frame->chroma_subsampling_y;
+ size_t cb_offset = dinfo.image_width * dinfo.image_height;
+ size_t cr_offset = cb_offset + chroma_width * chroma_height;
+ uint8_t *y_pix = pbo.ptr;
+ uint8_t *cb_pix = pbo.ptr + cb_offset;
+ uint8_t *cr_pix = pbo.ptr + cr_offset;
+ unsigned pitch_y = luma_width_blocks * DCTSIZE;
+ unsigned pitch_chroma = chroma_width_blocks * DCTSIZE;
if (dinfo.marker_list != nullptr &&
dinfo.marker_list->marker == JPEG_APP0 + 1 &&
dinfo.marker_list->data_length);
}
- if (!error_mgr.run([&dinfo, &frame, v_mcu_size, mcu_height_blocks] {
+ if (!error_mgr.run([&dinfo, &y_pix, &cb_pix, &cr_pix, pitch_y, pitch_chroma, v_mcu_size, mcu_height_blocks] {
JSAMPROW yptr[v_mcu_size], cbptr[v_mcu_size], crptr[v_mcu_size];
JSAMPARRAY data[3] = { yptr, cbptr, crptr };
for (unsigned y = 0; y < mcu_height_blocks; ++y) {
// NOTE: The last elements of cbptr/crptr will be unused for vertically subsampled chroma.
for (unsigned yy = 0; yy < v_mcu_size; ++yy) {
- yptr[yy] = frame->y.get() + (y * DCTSIZE * dinfo.max_v_samp_factor + yy) * frame->pitch_y;
- cbptr[yy] = frame->cb.get() + (y * DCTSIZE * dinfo.comp_info[1].v_samp_factor + yy) * frame->pitch_chroma;
- crptr[yy] = frame->cr.get() + (y * DCTSIZE * dinfo.comp_info[1].v_samp_factor + yy) * frame->pitch_chroma;
+ yptr[yy] = y_pix + (y * DCTSIZE * dinfo.max_v_samp_factor + yy) * pitch_y;
+ cbptr[yy] = cb_pix + (y * DCTSIZE * dinfo.comp_info[1].v_samp_factor + yy) * pitch_chroma;
+ crptr[yy] = cr_pix + (y * DCTSIZE * dinfo.comp_info[1].v_samp_factor + yy) * pitch_chroma;
}
jpeg_read_raw_data(&dinfo, data, v_mcu_size);
return get_black_frame();
}
+ // FIXME: what about resolutions that are not divisible by the block factor?
+ glBindBuffer(GL_PIXEL_UNPACK_BUFFER, pbo.pbo);
+ frame->y = create_texture_2d(frame->width, frame->height, GL_R8, GL_RED, GL_UNSIGNED_BYTE, BUFFER_OFFSET(0));
+ frame->cb = create_texture_2d(chroma_width, chroma_height, GL_R8, GL_RED, GL_UNSIGNED_BYTE, BUFFER_OFFSET(cb_offset));
+ frame->cr = create_texture_2d(chroma_width, chroma_height, GL_R8, GL_RED, GL_UNSIGNED_BYTE, BUFFER_OFFSET(cr_offset));
+ glBindBuffer(GL_PIXEL_UNPACK_BUFFER, 0);
+
+ glFlushMappedNamedBufferRange(pbo.pbo, 0, dinfo.image_width * dinfo.image_height + chroma_width * chroma_height * 2);
+ glMemoryBarrier(GL_PIXEL_BUFFER_BARRIER_BIT);
+ pbo.upload_done = RefCountedGLsync(GL_SYNC_GPU_COMMANDS_COMPLETE, /*flags=*/0);
+ glFlush();
+ frame->uploaded_ui_thread = pbo.upload_done;
+ frame->uploaded_interpolation = pbo.upload_done;
+ global_pbo_pool->release_pbo(move(pbo));
+
++metric_jpeg_software_decode_frames;
steady_clock::time_point stop = steady_clock::now();
metric_jpeg_decode_time_seconds.count_event(duration<double>(stop - start).count());
size_t num_decoded = 0, num_dropped = 0;
pthread_setname_np(pthread_self(), "JPEGDecoder");
+ QSurface *surface = create_surface();
+ QOpenGLContext *context = create_context(surface);
+ bool ok = make_current(context, surface);
+ if (!ok) {
+ fprintf(stderr, "Video stream couldn't get an OpenGL context\n");
+ abort();
+ }
while (!should_quit.load()) {
PendingDecode decode;
CacheMissBehavior cache_miss_behavior = DECODE_IF_NOT_IN_CACHE;
global_metrics.add("jpeg_decode_frames", { { "decoder", "software" }, { "result", "fail" } }, &metric_jpeg_software_fail_frames);
global_metrics.add("jpeg_decode_frames", { { "decoder", "vaapi" }, { "result", "decode" } }, &metric_jpeg_vaapi_decode_frames);
global_metrics.add("jpeg_decode_frames", { { "decoder", "vaapi" }, { "result", "fail" } }, &metric_jpeg_vaapi_fail_frames);
+ global_metrics.add("jpeg_frames", { { "action", "prepared" } }, &metric_jpeg_prepared_frames);
+ global_metrics.add("jpeg_frames", { { "action", "displayed" } }, &metric_jpeg_displayed_frames);
vector<double> quantiles{ 0.01, 0.1, 0.25, 0.5, 0.75, 0.9, 0.99 };
metric_jpeg_decode_time_seconds.init(quantiles, 60.0);
global_metrics.add("jpeg_decode_time_seconds", &metric_jpeg_decode_time_seconds);
void JPEGFrameView::initializeGL()
{
+ init_pbo_pool();
+
glDisable(GL_BLEND);
glDisable(GL_DEPTH_TEST);
check_error();
return;
}
+ if (!displayed_this_frame) {
+ ++metric_jpeg_displayed_frames;
+ displayed_this_frame = true;
+ }
+ if (current_frame->uploaded_ui_thread != nullptr) {
+ glWaitSync(current_frame->uploaded_ui_thread.get(), /*flags=*/0, GL_TIMEOUT_IGNORED);
+ current_frame->uploaded_ui_thread.reset();
+ }
+ if (current_secondary_frame != nullptr && current_secondary_frame->uploaded_ui_thread != nullptr) {
+ glWaitSync(current_secondary_frame->uploaded_ui_thread.get(), /*flags=*/0, GL_TIMEOUT_IGNORED);
+ current_secondary_frame->uploaded_ui_thread.reset();
+ }
+
check_error();
current_chain->render_to_screen();
} else {
current_chain = ycbcr_converter->prepare_chain_for_conversion(frame);
}
+ ++metric_jpeg_prepared_frames;
+ displayed_this_frame = false;
update();
});
}
static shared_ptr<Frame> black_frame;
static once_flag flag;
call_once(flag, [] {
- black_frame.reset(new Frame);
- black_frame->y.reset(new uint8_t[global_flags.width * global_flags.height]);
- black_frame->cb.reset(new uint8_t[(global_flags.width / 2) * (global_flags.height / 2)]);
- black_frame->cr.reset(new uint8_t[(global_flags.width / 2) * (global_flags.height / 2)]);
- black_frame->width = global_flags.width;
- black_frame->height = global_flags.height;
- black_frame->chroma_subsampling_x = 2;
- black_frame->chroma_subsampling_y = 2;
- black_frame->pitch_y = global_flags.width;
- black_frame->pitch_chroma = global_flags.width / 2;
+ // Not really black, but whatever. :-)
+ uint8_t black[] = { 0, 0, 0, 255 };
+ RefCountedTexture black_tex = create_texture_2d(1, 1, GL_RGBA8, GL_RGBA, GL_UNSIGNED_BYTE, black);
+
+ black_frame->y = black_tex;
+ black_frame->cb = black_tex;
+ black_frame->cr = move(black_tex);
+ black_frame->width = 1;
+ black_frame->height = 1;
+ black_frame->chroma_subsampling_x = 1;
+ black_frame->chroma_subsampling_y = 1;
});
++metric_jpeg_software_fail_frames;
return black_frame;