#include <utility>
#include <QMouseEvent>
+#include <QScreen>
#include <movit/resource_pool.h>
#include <movit/init.h>
#include "defs.h"
#include "post_to_main_thread.h"
+#include "vaapi_jpeg_decoder.h"
#include "video_stream.h"
using namespace movit;
size_t last_used;
};
+thread JPEGFrameView::jpeg_decoder_thread;
mutex cache_mu;
map<JPEGID, LRUFrame, JPEGIDLexicalOrder> cache; // Under cache_mu.
condition_variable any_pending_decodes, cache_updated;
deque<pair<JPEGID, JPEGFrameView *>> pending_decodes; // Under cache_mu.
atomic<size_t> event_counter{0};
extern QGLWidget *global_share_widget;
+extern atomic<bool> should_quit;
-// TODO: Decode using VA-API if available.
shared_ptr<Frame> decode_jpeg(const string &filename)
{
- shared_ptr<Frame> frame(new Frame);
+ shared_ptr<Frame> frame;
+ if (vaapi_jpeg_decoding_usable) {
+ frame = decode_jpeg_vaapi(filename);
+ if (frame != nullptr) {
+ return frame;
+ }
+ fprintf(stderr, "VA-API hardware decoding failed; falling back to software.\n");
+ }
+
+ frame.reset(new Frame);
jpeg_decompress_struct dinfo;
jpeg_error_mgr jerr;
return frame;
}
-void jpeg_decoder_thread()
+void jpeg_decoder_thread_func()
{
size_t num_decoded = 0, num_dropped = 0;
pthread_setname_np(pthread_self(), "JPEGDecoder");
- for ( ;; ) {
+ while (!should_quit.load()) {
JPEGID id;
JPEGFrameView *dest;
CacheMissBehavior cache_miss_behavior = DECODE_IF_NOT_IN_CACHE;
{
unique_lock<mutex> lock(cache_mu); // TODO: Perhaps under another lock?
any_pending_decodes.wait(lock, [] {
- return !pending_decodes.empty();
+ return !pending_decodes.empty() || should_quit.load();
});
+ if (should_quit.load()) break;
id = pending_decodes.front().first;
dest = pending_decodes.front().second;
pending_decodes.pop_front();
// put directly into the cache from VideoStream.
unique_lock<mutex> lock(cache_mu);
cache_updated.wait(lock, [id] {
- return cache.count(id) != 0;
+ return cache.count(id) != 0 || should_quit.load();
});
+ if (should_quit.load()) break;
found_in_cache = true; // Don't count it as a decode.
auto it = cache.find(id);
}
}
+void JPEGFrameView::shutdown()
+{
+ any_pending_decodes.notify_all();
+ jpeg_decoder_thread.join();
+}
+
JPEGFrameView::JPEGFrameView(QWidget *parent)
: QGLWidget(parent, global_share_widget) {
}
static once_flag once;
call_once(once, [] {
resource_pool = new ResourcePool;
- std::thread(&jpeg_decoder_thread).detach();
+ jpeg_decoder_thread = std::thread(jpeg_decoder_thread_func);
});
- chain.reset(new EffectChain(1280, 720, resource_pool));
- ImageFormat image_format;
- image_format.color_space = COLORSPACE_sRGB;
- image_format.gamma_curve = GAMMA_sRGB;
+ ImageFormat inout_format;
+ inout_format.color_space = COLORSPACE_sRGB;
+ inout_format.gamma_curve = GAMMA_sRGB;
+
ycbcr_format.luma_coefficients = YCBCR_REC_709;
ycbcr_format.full_range = false;
ycbcr_format.num_levels = 256;
ycbcr_format.cb_y_position = 0.5f; // Irrelevant.
ycbcr_format.cr_x_position = 0.0f;
ycbcr_format.cr_y_position = 0.5f;
- ycbcr_input = (movit::YCbCrInput *)chain->add_input(new YCbCrInput(image_format, ycbcr_format, 1280, 720));
- ImageFormat inout_format;
- inout_format.color_space = COLORSPACE_sRGB;
- inout_format.gamma_curve = GAMMA_sRGB;
+ // Planar Y'CbCr decoding chain.
+ planar_chain.reset(new EffectChain(1280, 720, resource_pool));
+ ycbcr_planar_input = (movit::YCbCrInput *)planar_chain->add_input(new YCbCrInput(inout_format, ycbcr_format, 1280, 720, YCBCR_INPUT_PLANAR));
+ planar_chain->add_output(inout_format, OUTPUT_ALPHA_FORMAT_POSTMULTIPLIED);
+ planar_chain->set_dither_bits(8);
+ planar_chain->finalize();
- check_error();
- chain->add_output(inout_format, OUTPUT_ALPHA_FORMAT_POSTMULTIPLIED);
- check_error();
- chain->set_dither_bits(8);
- check_error();
- chain->finalize();
- check_error();
+ // Semiplanar Y'CbCr decoding chain (for images coming from VA-API).
+ semiplanar_chain.reset(new EffectChain(1280, 720, resource_pool));
+ ycbcr_semiplanar_input = (movit::YCbCrInput *)semiplanar_chain->add_input(new YCbCrInput(inout_format, ycbcr_format, 1280, 720, YCBCR_INPUT_SPLIT_Y_AND_CBCR));
+ semiplanar_chain->add_output(inout_format, OUTPUT_ALPHA_FORMAT_POSTMULTIPLIED);
+ semiplanar_chain->set_dither_bits(8);
+ semiplanar_chain->finalize();
- overlay_chain.reset(new EffectChain(overlay_width, overlay_height, resource_pool));
- overlay_input = (movit::FlatInput *)overlay_chain->add_input(new FlatInput(image_format, FORMAT_GRAYSCALE, GL_UNSIGNED_BYTE, overlay_width, overlay_height));
+ overlay_chain.reset(new EffectChain(overlay_base_width, overlay_base_height, resource_pool));
+ overlay_input = (movit::FlatInput *)overlay_chain->add_input(new FlatInput(inout_format, FORMAT_GRAYSCALE, GL_UNSIGNED_BYTE, overlay_base_width, overlay_base_height));
overlay_chain->add_output(inout_format, OUTPUT_ALPHA_FORMAT_POSTMULTIPLIED);
overlay_chain->finalize();
check_error();
glViewport(0, 0, width, height);
check_error();
+
+ // Save these, as width() and height() will lie with DPI scaling.
+ gl_width = width;
+ gl_height = height;
}
void JPEGFrameView::paintGL()
{
- glViewport(0, 0, width(), height());
+ glViewport(0, 0, gl_width, gl_height);
if (current_frame == nullptr) {
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
}
check_error();
- chain->render_to_screen();
+ if (current_frame->is_semiplanar) {
+ semiplanar_chain->render_to_screen();
+ } else {
+ planar_chain->render_to_screen();
+ }
if (overlay_image != nullptr) {
if (overlay_input_needs_refresh) {
+ overlay_input->set_width(overlay_width);
+ overlay_input->set_height(overlay_height);
overlay_input->set_pixel_data(overlay_image->bits());
}
- glViewport(width() - overlay_width, 0, overlay_width, overlay_height);
+ glViewport(gl_width - overlay_width, 0, overlay_width, overlay_height);
overlay_chain->render_to_screen();
}
}
current_frame = frame;
ycbcr_format.chroma_subsampling_x = frame->chroma_subsampling_x;
ycbcr_format.chroma_subsampling_y = frame->chroma_subsampling_y;
- ycbcr_input->change_ycbcr_format(ycbcr_format);
- ycbcr_input->set_width(frame->width);
- ycbcr_input->set_height(frame->height);
- ycbcr_input->set_pixel_data(0, frame->y.get());
- ycbcr_input->set_pixel_data(1, frame->cb.get());
- ycbcr_input->set_pixel_data(2, frame->cr.get());
- ycbcr_input->set_pitch(0, frame->pitch_y);
- ycbcr_input->set_pitch(1, frame->pitch_chroma);
- ycbcr_input->set_pitch(2, frame->pitch_chroma);
+
+ if (frame->is_semiplanar) {
+ ycbcr_semiplanar_input->change_ycbcr_format(ycbcr_format);
+ ycbcr_semiplanar_input->set_width(frame->width);
+ ycbcr_semiplanar_input->set_height(frame->height);
+ ycbcr_semiplanar_input->set_pixel_data(0, frame->y.get());
+ ycbcr_semiplanar_input->set_pixel_data(1, frame->cbcr.get());
+ ycbcr_semiplanar_input->set_pitch(0, frame->pitch_y);
+ ycbcr_semiplanar_input->set_pitch(1, frame->pitch_chroma);
+ } else {
+ ycbcr_planar_input->change_ycbcr_format(ycbcr_format);
+ ycbcr_planar_input->set_width(frame->width);
+ ycbcr_planar_input->set_height(frame->height);
+ ycbcr_planar_input->set_pixel_data(0, frame->y.get());
+ ycbcr_planar_input->set_pixel_data(1, frame->cb.get());
+ ycbcr_planar_input->set_pixel_data(2, frame->cr.get());
+ ycbcr_planar_input->set_pitch(0, frame->pitch_y);
+ ycbcr_planar_input->set_pitch(1, frame->pitch_chroma);
+ ycbcr_planar_input->set_pitch(2, frame->pitch_chroma);
+ }
update();
});
}
return;
}
+ float dpr = QGuiApplication::primaryScreen()->devicePixelRatio();
+ overlay_width = lrint(overlay_base_width * dpr);
+ overlay_height = lrint(overlay_base_height * dpr);
+
overlay_image.reset(new QImage(overlay_width, overlay_height, QImage::Format_Grayscale8));
+ overlay_image->setDevicePixelRatio(dpr);
overlay_image->fill(0);
QPainter painter(overlay_image.get());
font.setPointSize(12);
painter.setFont(font);
- painter.drawText(QRectF(0, 0, overlay_width, overlay_height), Qt::AlignCenter, QString::fromStdString(text));
+ painter.drawText(QRectF(0, 0, overlay_base_width, overlay_base_height), Qt::AlignCenter, QString::fromStdString(text));
// Don't refresh immediately; we might not have an OpenGL context here.
overlay_input_needs_refresh = true;