]> git.sesse.net Git - nageru/blobdiff - futatabi/jpeg_frame_view.cpp
Stop refreshing the Futatabi channel overlay every frame.
[nageru] / futatabi / jpeg_frame_view.cpp
index c9b8090544b8313f7cf769812580fa4e11e0fd39..adc376455ec4eb06c7f8e5550b38037fa1aebeb8 100644 (file)
@@ -59,19 +59,6 @@ struct LRUFrame {
        size_t last_used;
 };
 
-struct PendingDecode {
-       JPEGFrameView *destination;
-
-       // For actual decodes (only if frame below is nullptr).
-       FrameOnDisk primary, secondary;
-       float fade_alpha;  // Irrelevant if secondary.stream_idx == -1.
-
-       // Already-decoded frames are also sent through PendingDecode,
-       // so that they get drawn in the right order. If frame is nullptr,
-       // it's a real decode.
-       shared_ptr<Frame> frame;
-};
-
 // There can be multiple JPEGFrameView instances, so make all the metrics static.
 once_flag jpeg_metrics_inited;
 atomic<int64_t> metric_jpeg_cache_used_bytes{ 0 };  // Same value as cache_bytes_used.
@@ -86,12 +73,9 @@ atomic<int64_t> metric_jpeg_vaapi_fail_frames{ 0 };
 
 }  // namespace
 
-thread JPEGFrameView::jpeg_decoder_thread;
 mutex cache_mu;
 map<FrameOnDisk, LRUFrame, FrameOnDiskLexicalOrder> cache;  // Under cache_mu.
 size_t cache_bytes_used = 0;  // Under cache_mu.
-condition_variable any_pending_decodes;
-deque<PendingDecode> pending_decodes;  // Under cache_mu.
 atomic<size_t> event_counter{ 0 };
 extern QGLWidget *global_share_widget;
 extern atomic<bool> should_quit;
@@ -125,6 +109,8 @@ shared_ptr<Frame> decode_jpeg(const string &jpeg)
                return get_black_frame();
        }
 
+       jpeg_save_markers(&dinfo, JPEG_APP0 + 1, 0xFFFF);
+
        if (dinfo.num_components != 3) {
                fprintf(stderr, "Not a color JPEG. (%d components, Y=%dx%d, Cb=%dx%d, Cr=%dx%d)\n",
                        dinfo.num_components,
@@ -143,7 +129,7 @@ shared_ptr<Frame> decode_jpeg(const string &jpeg)
                        dinfo.comp_info[0].h_samp_factor, dinfo.comp_info[0].v_samp_factor,
                        dinfo.comp_info[1].h_samp_factor, dinfo.comp_info[1].v_samp_factor,
                        dinfo.comp_info[2].h_samp_factor, dinfo.comp_info[2].v_samp_factor);
-               exit(1);
+               abort();
        }
        dinfo.raw_data_out = true;
 
@@ -175,6 +161,14 @@ shared_ptr<Frame> decode_jpeg(const string &jpeg)
        frame->pitch_y = luma_width_blocks * DCTSIZE;
        frame->pitch_chroma = chroma_width_blocks * DCTSIZE;
 
+       if (dinfo.marker_list != nullptr &&
+           dinfo.marker_list->marker == JPEG_APP0 + 1 &&
+           dinfo.marker_list->data_length >= 4 &&
+           memcmp(dinfo.marker_list->data, "Exif", 4) == 0) {
+               frame->exif_data.assign(reinterpret_cast<char *>(dinfo.marker_list->data),
+                       dinfo.marker_list->data_length);
+       }
+
        if (!error_mgr.run([&dinfo, &frame, v_mcu_size, mcu_height_blocks] {
                    JSAMPROW yptr[v_mcu_size], cbptr[v_mcu_size], crptr[v_mcu_size];
                    JSAMPARRAY data[3] = { yptr, cbptr, crptr };
@@ -254,7 +248,7 @@ shared_ptr<Frame> decode_jpeg_with_cache(FrameOnDisk frame_spec, CacheMissBehavi
        ++metric_jpeg_cache_miss_frames;
 
        *did_decode = true;
-       shared_ptr<Frame> frame = decode_jpeg(frame_reader->read_frame(frame_spec));
+       shared_ptr<Frame> frame = decode_jpeg(frame_reader->read_frame(frame_spec, /*read_video=*/true, /*read_audio=*/false).video);
 
        lock_guard<mutex> lock(cache_mu);
        cache_bytes_used += frame_size(*frame);
@@ -277,7 +271,7 @@ void JPEGFrameView::jpeg_decoder_thread_func()
                CacheMissBehavior cache_miss_behavior = DECODE_IF_NOT_IN_CACHE;
                {
                        unique_lock<mutex> lock(cache_mu);  // TODO: Perhaps under another lock?
-                       any_pending_decodes.wait(lock, [] {
+                       any_pending_decodes.wait(lock, [this] {
                                return !pending_decodes.empty() || should_quit.load();
                        });
                        if (should_quit.load())
@@ -285,20 +279,14 @@ void JPEGFrameView::jpeg_decoder_thread_func()
                        decode = pending_decodes.front();
                        pending_decodes.pop_front();
 
-                       size_t num_pending = 0;
-                       for (const PendingDecode &other_decode : pending_decodes) {
-                               if (other_decode.destination == decode.destination) {
-                                       ++num_pending;
-                               }
-                       }
-                       if (num_pending > 3) {
+                       if (pending_decodes.size() > 3) {
                                cache_miss_behavior = RETURN_NULLPTR_IF_NOT_IN_CACHE;
                        }
                }
 
                if (decode.frame != nullptr) {
                        // Already decoded, so just show it.
-                       decode.destination->setDecodedFrame(decode.frame, nullptr, 1.0f);
+                       setDecodedFrame(decode.frame, nullptr, 1.0f);
                        continue;
                }
 
@@ -312,7 +300,7 @@ void JPEGFrameView::jpeg_decoder_thread_func()
                        }
 
                        bool found_in_cache;
-                       shared_ptr<Frame> frame = decode_jpeg_with_cache(frame_spec, cache_miss_behavior, &decode.destination->frame_reader, &found_in_cache);
+                       shared_ptr<Frame> frame = decode_jpeg_with_cache(frame_spec, cache_miss_behavior, &frame_reader, &found_in_cache);
 
                        if (frame == nullptr) {
                                assert(cache_miss_behavior == RETURN_NULLPTR_IF_NOT_IN_CACHE);
@@ -339,11 +327,11 @@ void JPEGFrameView::jpeg_decoder_thread_func()
                }
 
                // TODO: Could we get jitter between non-interpolated and interpolated frames here?
-               decode.destination->setDecodedFrame(primary_frame, secondary_frame, decode.fade_alpha);
+               setDecodedFrame(primary_frame, secondary_frame, decode.fade_alpha);
        }
 }
 
-void JPEGFrameView::shutdown()
+JPEGFrameView::~JPEGFrameView()
 {
        any_pending_decodes.notify_all();
        jpeg_decoder_thread.join();
@@ -374,7 +362,6 @@ void JPEGFrameView::setFrame(unsigned stream_idx, FrameOnDisk frame, FrameOnDisk
        decode.primary = frame;
        decode.secondary = secondary_frame;
        decode.fade_alpha = fade_alpha;
-       decode.destination = this;
        pending_decodes.push_back(decode);
        any_pending_decodes.notify_all();
 }
@@ -384,24 +371,18 @@ void JPEGFrameView::setFrame(shared_ptr<Frame> frame)
        lock_guard<mutex> lock(cache_mu);
        PendingDecode decode;
        decode.frame = std::move(frame);
-       decode.destination = this;
        pending_decodes.push_back(decode);
        any_pending_decodes.notify_all();
 }
 
-ResourcePool *resource_pool = nullptr;
-
 void JPEGFrameView::initializeGL()
 {
        glDisable(GL_BLEND);
        glDisable(GL_DEPTH_TEST);
        check_error();
 
-       static once_flag once;
-       call_once(once, [] {
-               resource_pool = new ResourcePool;
-               jpeg_decoder_thread = std::thread(jpeg_decoder_thread_func);
-       });
+       resource_pool = new ResourcePool;
+       jpeg_decoder_thread = std::thread(&JPEGFrameView::jpeg_decoder_thread_func, this);
 
        ycbcr_converter.reset(new YCbCrConverter(YCbCrConverter::OUTPUT_TO_RGBA, resource_pool));
 
@@ -444,6 +425,7 @@ void JPEGFrameView::paintGL()
                        overlay_input->set_width(overlay_width);
                        overlay_input->set_height(overlay_height);
                        overlay_input->set_pixel_data(overlay_image->bits());
+                       overlay_input_needs_refresh = false;
                }
                glViewport(gl_width - overlay_width, 0, overlay_width, overlay_height);
                overlay_chain->render_to_screen();