X-Git-Url: https://git.sesse.net/?a=blobdiff_plain;ds=sidebyside;f=jpeg_frame_view.cpp;h=ef1cded69d9dc98fcf4d26864c6ac3eef76bd667;hb=58b5adcca3af1abbf4c69b00853bee037bb7fec7;hp=95f7453b7c6512caa85969e37bf41841eb8aabd7;hpb=c401472016861f0af52050c31416c33a453ff499;p=nageru diff --git a/jpeg_frame_view.cpp b/jpeg_frame_view.cpp index 95f7453..ef1cded 100644 --- a/jpeg_frame_view.cpp +++ b/jpeg_frame_view.cpp @@ -2,6 +2,7 @@ #include #include +#include #include #include @@ -10,25 +11,33 @@ #include #include +#include +#include + #include #include #include #include "defs.h" #include "post_to_main_thread.h" +#include "vaapi_jpeg_decoder.h" +#include "video_stream.h" using namespace movit; using namespace std; -string filename_for_frame(unsigned stream_idx, int64_t pts); - -struct JPEGID { - unsigned stream_idx; - int64_t pts; +// Just an arbitrary order for std::map. +struct JPEGIDLexicalOrder +{ + bool operator() (const JPEGID &a, const JPEGID &b) const + { + if (a.stream_idx != b.stream_idx) + return a.stream_idx < b.stream_idx; + if (a.pts != b.pts) + return a.pts < b.pts; + return a.interpolated < b.interpolated; + } }; -bool operator< (const JPEGID &a, const JPEGID &b) { - return make_pair(a.stream_idx, a.pts) < make_pair(b.stream_idx, b.pts); -} struct LRUFrame { shared_ptr frame; @@ -36,16 +45,24 @@ struct LRUFrame { }; mutex cache_mu; -map cache; // Under cache_mu. -condition_variable any_pending_decodes; +map cache; // Under cache_mu. +condition_variable any_pending_decodes, cache_updated; deque> pending_decodes; // Under cache_mu. atomic event_counter{0}; extern QGLWidget *global_share_widget; -// TODO: Decode using VA-API if available. shared_ptr decode_jpeg(const string &filename) { - shared_ptr frame(new Frame); + shared_ptr frame; + if (vaapi_jpeg_decoding_usable) { + frame = decode_jpeg_vaapi(filename); + if (frame != nullptr) { + return frame; + } + fprintf(stderr, "VA-API hardware decoding failed; falling back to software.\n"); + } + + frame.reset(new Frame); jpeg_decompress_struct dinfo; jpeg_error_mgr jerr; @@ -147,6 +164,35 @@ void prune_cache() } } +shared_ptr decode_jpeg_with_cache(JPEGID id, CacheMissBehavior cache_miss_behavior, bool *did_decode) +{ + *did_decode = false; + { + unique_lock lock(cache_mu); + auto it = cache.find(id); + if (it != cache.end()) { + it->second.last_used = event_counter++; + return it->second.frame; + } + } + + if (cache_miss_behavior == RETURN_NULLPTR_IF_NOT_IN_CACHE) { + return nullptr; + } + + assert(!id.interpolated); + *did_decode = true; + shared_ptr frame = decode_jpeg(filename_for_frame(id.stream_idx, id.pts)); + + unique_lock lock(cache_mu); + cache[id] = LRUFrame{ frame, event_counter++ }; + + if (cache.size() > CACHE_SIZE) { + prune_cache(); + } + return frame; +} + void jpeg_decoder_thread() { size_t num_decoded = 0, num_dropped = 0; @@ -155,9 +201,9 @@ void jpeg_decoder_thread() for ( ;; ) { JPEGID id; JPEGFrameView *dest; - shared_ptr frame; + CacheMissBehavior cache_miss_behavior = DECODE_IF_NOT_IN_CACHE; { - unique_lock lock(cache_mu); + unique_lock lock(cache_mu); // TODO: Perhaps under another lock? any_pending_decodes.wait(lock, [] { return !pending_decodes.empty(); }); @@ -165,17 +211,6 @@ void jpeg_decoder_thread() dest = pending_decodes.front().second; pending_decodes.pop_front(); - auto it = cache.find(id); - if (it != cache.end()) { - frame = it->second.frame; - it->second.last_used = event_counter++; - } - } - - if (frame == nullptr) { - // Not found in the cache, so we need to do a decode or drop the request. - // Prune the queue if there are too many pending for this destination. - // TODO: Could we get starvation here? size_t num_pending = 0; for (const pair &decode : pending_decodes) { if (decode.second == dest) { @@ -183,18 +218,43 @@ void jpeg_decoder_thread() } } if (num_pending > 3) { - ++num_dropped; - continue; + cache_miss_behavior = RETURN_NULLPTR_IF_NOT_IN_CACHE; } + } - frame = decode_jpeg(filename_for_frame(id.stream_idx, id.pts)); - + bool found_in_cache; + shared_ptr frame; + if (id.interpolated) { + // Interpolated frames are never decoded by us, + // put directly into the cache from VideoStream. unique_lock lock(cache_mu); - cache[id] = LRUFrame{ frame, event_counter++ }; + cache_updated.wait(lock, [id] { + return cache.count(id) != 0; + }); + found_in_cache = true; // Don't count it as a decode. - if (cache.size() > CACHE_SIZE) { - prune_cache(); + auto it = cache.find(id); + assert(it != cache.end()); + + it->second.last_used = event_counter++; + frame = it->second.frame; + if (frame == nullptr) { + // We inserted a nullptr as signal that the frame was never + // interpolated and that we should stop waiting. + // But don't let it linger in the cache anymore. + cache.erase(it); } + } else { + frame = decode_jpeg_with_cache(id, cache_miss_behavior, &found_in_cache); + } + + if (frame == nullptr) { + assert(id.interpolated || cache_miss_behavior == RETURN_NULLPTR_IF_NOT_IN_CACHE); + ++num_dropped; + continue; + } + + if (!found_in_cache) { ++num_decoded; if (num_decoded % 1000 == 0) { fprintf(stderr, "Decoded %zu images, dropped %zu (%.2f%% dropped)\n", @@ -202,6 +262,7 @@ void jpeg_decoder_thread() } } + // TODO: Could we get jitter between non-interpolated and interpolated frames here? dest->setDecodedFrame(frame); } } @@ -210,34 +271,46 @@ JPEGFrameView::JPEGFrameView(QWidget *parent) : QGLWidget(parent, global_share_widget) { } -void JPEGFrameView::update_frame() +void JPEGFrameView::setFrame(unsigned stream_idx, int64_t pts, bool interpolated) { + current_stream_idx = stream_idx; + unique_lock lock(cache_mu); - pending_decodes.emplace_back(JPEGID{ stream_idx, pts }, this); + pending_decodes.emplace_back(JPEGID{ stream_idx, pts, interpolated }, this); any_pending_decodes.notify_all(); } +void JPEGFrameView::insert_interpolated_frame(unsigned stream_idx, int64_t pts, shared_ptr frame) +{ + JPEGID id{ stream_idx, pts, true }; + + // We rely on the frame not being evicted from the cache before + // jpeg_decoder_thread() sees it and can display it (otherwise, + // that thread would hang). With a default cache of 1000 elements, + // that would sound like a reasonable assumption. + unique_lock lock(cache_mu); + cache[id] = LRUFrame{ std::move(frame), event_counter++ }; + cache_updated.notify_all(); +} + ResourcePool *resource_pool = nullptr; void JPEGFrameView::initializeGL() { glDisable(GL_BLEND); glDisable(GL_DEPTH_TEST); - glDepthMask(GL_FALSE); check_error(); static once_flag once; call_once(once, [] { - CHECK(init_movit(MOVIT_SHADER_DIR, MOVIT_DEBUG_OFF)); resource_pool = new ResourcePool; - std::thread(&jpeg_decoder_thread).detach(); }); - chain.reset(new EffectChain(1280, 720, resource_pool)); - ImageFormat image_format; - image_format.color_space = COLORSPACE_sRGB; - image_format.gamma_curve = GAMMA_sRGB; + ImageFormat inout_format; + inout_format.color_space = COLORSPACE_sRGB; + inout_format.gamma_curve = GAMMA_sRGB; + ycbcr_format.luma_coefficients = YCBCR_REC_709; ycbcr_format.full_range = false; ycbcr_format.num_levels = 256; @@ -247,19 +320,26 @@ void JPEGFrameView::initializeGL() ycbcr_format.cb_y_position = 0.5f; // Irrelevant. ycbcr_format.cr_x_position = 0.0f; ycbcr_format.cr_y_position = 0.5f; - ycbcr_input = (movit::YCbCrInput *)chain->add_input(new YCbCrInput(image_format, ycbcr_format, 1280, 720)); - ImageFormat inout_format; - inout_format.color_space = COLORSPACE_sRGB; - inout_format.gamma_curve = GAMMA_sRGB; - - check_error(); - chain->add_output(inout_format, OUTPUT_ALPHA_FORMAT_POSTMULTIPLIED); - check_error(); - chain->set_dither_bits(8); - check_error(); - chain->finalize(); - check_error(); + // Planar Y'CbCr decoding chain. + planar_chain.reset(new EffectChain(1280, 720, resource_pool)); + ycbcr_planar_input = (movit::YCbCrInput *)planar_chain->add_input(new YCbCrInput(inout_format, ycbcr_format, 1280, 720, YCBCR_INPUT_PLANAR)); + planar_chain->add_output(inout_format, OUTPUT_ALPHA_FORMAT_POSTMULTIPLIED); + planar_chain->set_dither_bits(8); + planar_chain->finalize(); + + // Semiplanar Y'CbCr decoding chain (for images coming from VA-API). + semiplanar_chain.reset(new EffectChain(1280, 720, resource_pool)); + ycbcr_semiplanar_input = (movit::YCbCrInput *)semiplanar_chain->add_input(new YCbCrInput(inout_format, ycbcr_format, 1280, 720, YCBCR_INPUT_SPLIT_Y_AND_CBCR)); + semiplanar_chain->add_output(inout_format, OUTPUT_ALPHA_FORMAT_POSTMULTIPLIED); + semiplanar_chain->set_dither_bits(8); + semiplanar_chain->finalize(); + + overlay_chain.reset(new EffectChain(overlay_base_width, overlay_base_height, resource_pool)); + overlay_input = (movit::FlatInput *)overlay_chain->add_input(new FlatInput(inout_format, FORMAT_GRAYSCALE, GL_UNSIGNED_BYTE, overlay_base_width, overlay_base_height)); + + overlay_chain->add_output(inout_format, OUTPUT_ALPHA_FORMAT_POSTMULTIPLIED); + overlay_chain->finalize(); } void JPEGFrameView::resizeGL(int width, int height) @@ -267,32 +347,99 @@ void JPEGFrameView::resizeGL(int width, int height) check_error(); glViewport(0, 0, width, height); check_error(); + + // Save these, as width() and height() will lie with DPI scaling. + gl_width = width; + gl_height = height; } void JPEGFrameView::paintGL() { - //glClearColor(0.0f, 1.0f, 0.0f, 1.0f); - //glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); + glViewport(0, 0, gl_width, gl_height); + if (current_frame == nullptr) { + glClearColor(0.0f, 0.0f, 0.0f, 1.0f); + glClear(GL_COLOR_BUFFER_BIT); + return; + } check_error(); - chain->render_to_screen(); + if (current_frame->is_semiplanar) { + semiplanar_chain->render_to_screen(); + } else { + planar_chain->render_to_screen(); + } + + if (overlay_image != nullptr) { + if (overlay_input_needs_refresh) { + overlay_input->set_width(overlay_width); + overlay_input->set_height(overlay_height); + overlay_input->set_pixel_data(overlay_image->bits()); + } + glViewport(gl_width - overlay_width, 0, overlay_width, overlay_height); + overlay_chain->render_to_screen(); + } } void JPEGFrameView::setDecodedFrame(std::shared_ptr frame) { post_to_main_thread([this, frame] { current_frame = frame; - ycbcr_input->set_width(frame->width); - ycbcr_input->set_height(frame->height); - ycbcr_input->set_pixel_data(0, frame->y.get()); - ycbcr_input->set_pixel_data(1, frame->cb.get()); - ycbcr_input->set_pixel_data(2, frame->cr.get()); - ycbcr_input->set_pitch(0, frame->pitch_y); - ycbcr_input->set_pitch(1, frame->pitch_chroma); - ycbcr_input->set_pitch(2, frame->pitch_chroma); ycbcr_format.chroma_subsampling_x = frame->chroma_subsampling_x; ycbcr_format.chroma_subsampling_y = frame->chroma_subsampling_y; - ycbcr_input->change_ycbcr_format(ycbcr_format); + + if (frame->is_semiplanar) { + ycbcr_semiplanar_input->change_ycbcr_format(ycbcr_format); + ycbcr_semiplanar_input->set_width(frame->width); + ycbcr_semiplanar_input->set_height(frame->height); + ycbcr_semiplanar_input->set_pixel_data(0, frame->y.get()); + ycbcr_semiplanar_input->set_pixel_data(1, frame->cbcr.get()); + ycbcr_semiplanar_input->set_pitch(0, frame->pitch_y); + ycbcr_semiplanar_input->set_pitch(1, frame->pitch_chroma); + } else { + ycbcr_planar_input->change_ycbcr_format(ycbcr_format); + ycbcr_planar_input->set_width(frame->width); + ycbcr_planar_input->set_height(frame->height); + ycbcr_planar_input->set_pixel_data(0, frame->y.get()); + ycbcr_planar_input->set_pixel_data(1, frame->cb.get()); + ycbcr_planar_input->set_pixel_data(2, frame->cr.get()); + ycbcr_planar_input->set_pitch(0, frame->pitch_y); + ycbcr_planar_input->set_pitch(1, frame->pitch_chroma); + ycbcr_planar_input->set_pitch(2, frame->pitch_chroma); + } update(); }); } + +void JPEGFrameView::mousePressEvent(QMouseEvent *event) +{ + if (event->type() == QEvent::MouseButtonPress && event->button() == Qt::LeftButton) { + emit clicked(); + } +} + +void JPEGFrameView::set_overlay(const string &text) +{ + if (text.empty()) { + overlay_image.reset(); + return; + } + + float dpr = QGuiApplication::primaryScreen()->devicePixelRatio(); + overlay_width = lrint(overlay_base_width * dpr); + overlay_height = lrint(overlay_base_height * dpr); + + overlay_image.reset(new QImage(overlay_width, overlay_height, QImage::Format_Grayscale8)); + overlay_image->setDevicePixelRatio(dpr); + overlay_image->fill(0); + QPainter painter(overlay_image.get()); + + painter.setPen(Qt::white); + QFont font = painter.font(); + font.setPointSize(12); + painter.setFont(font); + + painter.drawText(QRectF(0, 0, overlay_base_width, overlay_base_height), Qt::AlignCenter, QString::fromStdString(text)); + + // Don't refresh immediately; we might not have an OpenGL context here. + overlay_input_needs_refresh = true; +}