1 #include "jpeg_frame_view.h"
4 #include "jpeg_destroyer.h"
5 #include "post_to_main_thread.h"
6 #include "video_stream.h"
7 #include "ycbcr_converter.h"
12 #include <condition_variable>
15 #include <movit/init.h>
16 #include <movit/resource_pool.h>
17 #include <movit/util.h>
24 // Must come after the Qt stuff.
25 #include "vaapi_jpeg_decoder.h"
27 using namespace movit;
32 // Just an arbitrary order for std::map.
33 struct JPEGIDLexicalOrder
35 bool operator() (const JPEGID &a, const JPEGID &b) const
37 if (a.stream_idx != b.stream_idx)
38 return a.stream_idx < b.stream_idx;
41 return a.interpolated < b.interpolated;
45 inline size_t frame_size(const Frame &frame)
47 size_t y_size = frame.width * frame.height;
48 size_t cbcr_size = y_size / frame.chroma_subsampling_x / frame.chroma_subsampling_y;
49 return y_size + cbcr_size * 2;
53 shared_ptr<Frame> frame;
57 struct PendingDecode {
58 JPEGID primary, secondary;
59 float fade_alpha; // Irrelevant if secondary.stream_idx == -1.
60 JPEGFrameView *destination;
65 thread JPEGFrameView::jpeg_decoder_thread;
67 map<JPEGID, LRUFrame, JPEGIDLexicalOrder> cache; // Under cache_mu.
68 size_t cache_bytes_used = 0; // Under cache_mu.
69 condition_variable any_pending_decodes;
70 deque<PendingDecode> pending_decodes; // Under cache_mu.
71 atomic<size_t> event_counter{0};
72 extern QGLWidget *global_share_widget;
73 extern atomic<bool> should_quit;
75 shared_ptr<Frame> decode_jpeg(const string &filename)
77 shared_ptr<Frame> frame;
78 if (vaapi_jpeg_decoding_usable) {
79 frame = decode_jpeg_vaapi(filename);
80 if (frame != nullptr) {
83 fprintf(stderr, "VA-API hardware decoding failed; falling back to software.\n");
86 frame.reset(new Frame);
88 jpeg_decompress_struct dinfo;
90 dinfo.err = jpeg_std_error(&jerr);
91 jpeg_create_decompress(&dinfo);
92 JPEGDestroyer destroy_dinfo(&dinfo);
94 FILE *fp = fopen(filename.c_str(), "rb");
96 perror(filename.c_str());
99 jpeg_stdio_src(&dinfo, fp);
101 jpeg_read_header(&dinfo, true);
103 if (dinfo.num_components != 3) {
104 fprintf(stderr, "Not a color JPEG. (%d components, Y=%dx%d, Cb=%dx%d, Cr=%dx%d)\n",
105 dinfo.num_components,
106 dinfo.comp_info[0].h_samp_factor, dinfo.comp_info[0].v_samp_factor,
107 dinfo.comp_info[1].h_samp_factor, dinfo.comp_info[1].v_samp_factor,
108 dinfo.comp_info[2].h_samp_factor, dinfo.comp_info[2].v_samp_factor);
111 if (dinfo.comp_info[0].h_samp_factor != dinfo.max_h_samp_factor ||
112 dinfo.comp_info[0].v_samp_factor != dinfo.max_v_samp_factor || // Y' must not be subsampled.
113 dinfo.comp_info[1].h_samp_factor != dinfo.comp_info[2].h_samp_factor ||
114 dinfo.comp_info[1].v_samp_factor != dinfo.comp_info[2].v_samp_factor || // Cb and Cr must be identically subsampled.
115 (dinfo.max_h_samp_factor % dinfo.comp_info[1].h_samp_factor) != 0 ||
116 (dinfo.max_v_samp_factor % dinfo.comp_info[1].v_samp_factor) != 0) { // No 2:3 subsampling or other weirdness.
117 fprintf(stderr, "Unsupported subsampling scheme. (Y=%dx%d, Cb=%dx%d, Cr=%dx%d)\n",
118 dinfo.comp_info[0].h_samp_factor, dinfo.comp_info[0].v_samp_factor,
119 dinfo.comp_info[1].h_samp_factor, dinfo.comp_info[1].v_samp_factor,
120 dinfo.comp_info[2].h_samp_factor, dinfo.comp_info[2].v_samp_factor);
123 dinfo.raw_data_out = true;
125 jpeg_start_decompress(&dinfo);
127 frame->width = dinfo.output_width;
128 frame->height = dinfo.output_height;
129 frame->chroma_subsampling_x = dinfo.max_h_samp_factor / dinfo.comp_info[1].h_samp_factor;
130 frame->chroma_subsampling_y = dinfo.max_v_samp_factor / dinfo.comp_info[1].v_samp_factor;
132 unsigned h_mcu_size = DCTSIZE * dinfo.max_h_samp_factor;
133 unsigned v_mcu_size = DCTSIZE * dinfo.max_v_samp_factor;
134 unsigned mcu_width_blocks = (dinfo.output_width + h_mcu_size - 1) / h_mcu_size;
135 unsigned mcu_height_blocks = (dinfo.output_height + v_mcu_size - 1) / v_mcu_size;
137 unsigned luma_width_blocks = mcu_width_blocks * dinfo.comp_info[0].h_samp_factor;
138 unsigned chroma_width_blocks = mcu_width_blocks * dinfo.comp_info[1].h_samp_factor;
139 unsigned luma_height_blocks = mcu_height_blocks * dinfo.comp_info[0].v_samp_factor;
140 unsigned chroma_height_blocks = mcu_height_blocks * dinfo.comp_info[1].v_samp_factor;
142 // TODO: Decode into a PBO.
143 frame->y.reset(new uint8_t[luma_width_blocks * luma_height_blocks * DCTSIZE2]);
144 frame->cb.reset(new uint8_t[chroma_width_blocks * chroma_height_blocks * DCTSIZE2]);
145 frame->cr.reset(new uint8_t[chroma_width_blocks * chroma_height_blocks * DCTSIZE2]);
146 frame->pitch_y = luma_width_blocks * DCTSIZE;
147 frame->pitch_chroma = chroma_width_blocks * DCTSIZE;
149 JSAMPROW yptr[v_mcu_size], cbptr[v_mcu_size], crptr[v_mcu_size];
150 JSAMPARRAY data[3] = { yptr, cbptr, crptr };
151 for (unsigned y = 0; y < mcu_height_blocks; ++y) {
152 // NOTE: The last elements of cbptr/crptr will be unused for vertically subsampled chroma.
153 for (unsigned yy = 0; yy < v_mcu_size; ++yy) {
154 yptr[yy] = frame->y.get() + (y * DCTSIZE * dinfo.max_v_samp_factor + yy) * frame->pitch_y;
155 cbptr[yy] = frame->cb.get() + (y * DCTSIZE * dinfo.comp_info[1].v_samp_factor + yy) * frame->pitch_chroma;
156 crptr[yy] = frame->cr.get() + (y * DCTSIZE * dinfo.comp_info[1].v_samp_factor + yy) * frame->pitch_chroma;
159 jpeg_read_raw_data(&dinfo, data, v_mcu_size);
162 (void)jpeg_finish_decompress(&dinfo);
170 // Assumes cache_mu is held.
171 int64_t bytes_still_to_remove = cache_bytes_used - (size_t(CACHE_SIZE_MB) * 1024 * 1024) * 9 / 10;
172 if (bytes_still_to_remove <= 0) return;
174 vector<pair<size_t, size_t>> lru_timestamps_and_size;
175 for (const auto &key_and_value : cache) {
176 lru_timestamps_and_size.emplace_back(
177 key_and_value.second.last_used,
178 frame_size(*key_and_value.second.frame));
180 sort(lru_timestamps_and_size.begin(), lru_timestamps_and_size.end());
182 // Remove the oldest ones until we are below 90% of the cache used.
183 size_t lru_cutoff_point = 0;
184 for (const pair<size_t, size_t> &it : lru_timestamps_and_size) {
185 lru_cutoff_point = it.first;
186 bytes_still_to_remove -= it.second;
187 if (bytes_still_to_remove <= 0) break;
190 for (auto it = cache.begin(); it != cache.end(); ) {
191 if (it->second.last_used <= lru_cutoff_point) {
192 cache_bytes_used -= frame_size(*it->second.frame);
193 it = cache.erase(it);
200 shared_ptr<Frame> decode_jpeg_with_cache(JPEGID id, CacheMissBehavior cache_miss_behavior, bool *did_decode)
204 unique_lock<mutex> lock(cache_mu);
205 auto it = cache.find(id);
206 if (it != cache.end()) {
207 it->second.last_used = event_counter++;
208 return it->second.frame;
212 if (cache_miss_behavior == RETURN_NULLPTR_IF_NOT_IN_CACHE) {
216 assert(!id.interpolated);
218 shared_ptr<Frame> frame = decode_jpeg(filename_for_frame(id.stream_idx, id.pts));
220 unique_lock<mutex> lock(cache_mu);
221 cache_bytes_used += frame_size(*frame);
222 cache[id] = LRUFrame{ frame, event_counter++ };
224 if (cache_bytes_used > size_t(CACHE_SIZE_MB) * 1024 * 1024) {
230 void jpeg_decoder_thread_func()
232 size_t num_decoded = 0, num_dropped = 0;
234 pthread_setname_np(pthread_self(), "JPEGDecoder");
235 while (!should_quit.load()) {
236 PendingDecode decode;
237 CacheMissBehavior cache_miss_behavior = DECODE_IF_NOT_IN_CACHE;
239 unique_lock<mutex> lock(cache_mu); // TODO: Perhaps under another lock?
240 any_pending_decodes.wait(lock, [] {
241 return !pending_decodes.empty() || should_quit.load();
243 if (should_quit.load())
245 decode = pending_decodes.front();
246 pending_decodes.pop_front();
248 size_t num_pending = 0;
249 for (const PendingDecode &other_decode : pending_decodes) {
250 if (other_decode.destination == decode.destination) {
254 if (num_pending > 3) {
255 cache_miss_behavior = RETURN_NULLPTR_IF_NOT_IN_CACHE;
259 shared_ptr<Frame> primary_frame, secondary_frame;
261 for (int subframe_idx = 0; subframe_idx < 2; ++subframe_idx) {
262 const JPEGID &id = (subframe_idx == 0 ? decode.primary : decode.secondary);
263 if (id.stream_idx == (unsigned)-1) {
264 // No secondary frame.
269 shared_ptr<Frame> frame;
270 if (id.interpolated) {
271 // Interpolated frames are never decoded by us,
272 // put directly into the cache from VideoStream.
273 unique_lock<mutex> lock(cache_mu);
274 auto it = cache.find(id);
275 if (it != cache.end()) {
276 it->second.last_used = event_counter++;
277 frame = it->second.frame;
279 // This can only really happen if it disappeared out of the
280 // LRU really, really fast. Which shouldn't happen.
281 fprintf(stderr, "WARNING: Interpolated JPEG was supposed to be in the cache, but was not\n");
283 found_in_cache = true; // Don't count it as a decode.
285 frame = decode_jpeg_with_cache(id, cache_miss_behavior, &found_in_cache);
288 if (frame == nullptr) {
289 assert(id.interpolated || cache_miss_behavior == RETURN_NULLPTR_IF_NOT_IN_CACHE);
294 if (!found_in_cache) {
296 if (num_decoded % 1000 == 0) {
297 fprintf(stderr, "Decoded %zu images, dropped %zu (%.2f%% dropped)\n",
298 num_decoded, num_dropped, (100.0 * num_dropped) / (num_decoded + num_dropped));
301 if (subframe_idx == 0) {
302 primary_frame = move(frame);
304 secondary_frame = move(frame);
312 // TODO: Could we get jitter between non-interpolated and interpolated frames here?
313 decode.destination->setDecodedFrame(primary_frame, secondary_frame, decode.fade_alpha);
317 void JPEGFrameView::shutdown()
319 any_pending_decodes.notify_all();
320 jpeg_decoder_thread.join();
323 JPEGFrameView::JPEGFrameView(QWidget *parent)
324 : QGLWidget(parent, global_share_widget)
328 void JPEGFrameView::setFrame(unsigned stream_idx, int64_t pts, bool interpolated, int secondary_stream_idx, int64_t secondary_pts, float fade_alpha)
330 current_stream_idx = stream_idx; // TODO: Does this interact with fades?
332 unique_lock<mutex> lock(cache_mu);
333 PendingDecode decode;
334 if (interpolated && secondary_stream_idx != -1) {
335 // The frame will already be faded for us, so ask for only one; we shouldn't fade it against anything.
336 decode.primary = create_jpegid_for_interpolated_fade(stream_idx, pts, secondary_stream_idx, secondary_pts);
337 decode.secondary = JPEGID{ (unsigned)-1, -1, /*interpolated=*/false };
339 decode.primary = JPEGID{ stream_idx, pts, interpolated };
340 decode.secondary = JPEGID{ (unsigned)secondary_stream_idx, secondary_pts, /*interpolated=*/false };
342 decode.fade_alpha = fade_alpha;
343 decode.destination = this;
344 pending_decodes.push_back(decode);
345 any_pending_decodes.notify_all();
348 void JPEGFrameView::insert_interpolated_frame(JPEGID id, shared_ptr<Frame> frame)
350 // We rely on the frame not being evicted from the cache before
351 // jpeg_decoder_thread() sees it and can display it (otherwise,
352 // that thread would hang). With a default cache of 1000 elements,
353 // that would sound like a reasonable assumption.
354 unique_lock<mutex> lock(cache_mu);
355 cache_bytes_used += frame_size(*frame);
356 cache[id] = LRUFrame{ std::move(frame), event_counter++ };
357 if (cache_bytes_used > size_t(CACHE_SIZE_MB) * 1024 * 1024) {
362 ResourcePool *resource_pool = nullptr;
364 void JPEGFrameView::initializeGL()
367 glDisable(GL_DEPTH_TEST);
370 static once_flag once;
372 resource_pool = new ResourcePool;
373 jpeg_decoder_thread = std::thread(jpeg_decoder_thread_func);
376 ycbcr_converter.reset(new YCbCrConverter(YCbCrConverter::OUTPUT_TO_RGBA, resource_pool));
378 ImageFormat inout_format;
379 inout_format.color_space = COLORSPACE_sRGB;
380 inout_format.gamma_curve = GAMMA_sRGB;
382 overlay_chain.reset(new EffectChain(overlay_base_width, overlay_base_height, resource_pool));
383 overlay_input = (movit::FlatInput *)overlay_chain->add_input(new FlatInput(inout_format, FORMAT_GRAYSCALE, GL_UNSIGNED_BYTE, overlay_base_width, overlay_base_height));
385 overlay_chain->add_output(inout_format, OUTPUT_ALPHA_FORMAT_POSTMULTIPLIED);
386 overlay_chain->finalize();
389 void JPEGFrameView::resizeGL(int width, int height)
392 glViewport(0, 0, width, height);
395 // Save these, as width() and height() will lie with DPI scaling.
400 void JPEGFrameView::paintGL()
402 glViewport(0, 0, gl_width, gl_height);
403 if (current_frame == nullptr) {
404 glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
405 glClear(GL_COLOR_BUFFER_BIT);
410 current_chain->render_to_screen();
412 if (overlay_image != nullptr) {
413 if (overlay_input_needs_refresh) {
414 overlay_input->set_width(overlay_width);
415 overlay_input->set_height(overlay_height);
416 overlay_input->set_pixel_data(overlay_image->bits());
418 glViewport(gl_width - overlay_width, 0, overlay_width, overlay_height);
419 overlay_chain->render_to_screen();
427 void JPEGFrameView::setDecodedFrame(shared_ptr<Frame> frame, shared_ptr<Frame> secondary_frame, float fade_alpha)
429 post_to_main_thread([this, frame, secondary_frame, fade_alpha] {
430 current_frame = frame;
431 current_secondary_frame = secondary_frame;
433 if (secondary_frame != nullptr) {
434 current_chain = ycbcr_converter->prepare_chain_for_fade(frame, secondary_frame, fade_alpha);
436 current_chain = ycbcr_converter->prepare_chain_for_conversion(frame);
442 void JPEGFrameView::mousePressEvent(QMouseEvent *event)
444 if (event->type() == QEvent::MouseButtonPress && event->button() == Qt::LeftButton) {
449 void JPEGFrameView::set_overlay(const string &text)
452 overlay_image.reset();
456 float dpr = QGuiApplication::primaryScreen()->devicePixelRatio();
457 overlay_width = lrint(overlay_base_width * dpr);
458 overlay_height = lrint(overlay_base_height * dpr);
460 overlay_image.reset(new QImage(overlay_width, overlay_height, QImage::Format_Grayscale8));
461 overlay_image->setDevicePixelRatio(dpr);
462 overlay_image->fill(0);
463 QPainter painter(overlay_image.get());
465 painter.setPen(Qt::white);
466 QFont font = painter.font();
467 font.setPointSize(12);
468 painter.setFont(font);
470 painter.drawText(QRectF(0, 0, overlay_base_width, overlay_base_height), Qt::AlignCenter, QString::fromStdString(text));
472 // Don't refresh immediately; we might not have an OpenGL context here.
473 overlay_input_needs_refresh = true;