]> git.sesse.net Git - nageru/commitdiff
Do not try to show a frame until we've computed it (do it in a callback instead).
authorSteinar H. Gunderson <sgunderson@bigfoot.com>
Thu, 25 Oct 2018 17:39:14 +0000 (19:39 +0200)
committerSteinar H. Gunderson <sgunderson@bigfoot.com>
Thu, 25 Oct 2018 17:39:14 +0000 (19:39 +0200)
Gets rid of the ugly waiting for the cache, and sets the stage for
some further cleanups coming up.

jpeg_frame_view.cpp
player.cpp
video_stream.cpp
video_stream.h

index 7870f1c090dd2cb2cfec6031c420689d2316c1be..82e34e42f9ed83528753d16d53452e23e5e444f9 100644 (file)
@@ -54,7 +54,7 @@ struct PendingDecode {
 thread JPEGFrameView::jpeg_decoder_thread;
 mutex cache_mu;
 map<JPEGID, LRUFrame, JPEGIDLexicalOrder> cache;  // Under cache_mu.
-condition_variable any_pending_decodes, cache_updated;
+condition_variable any_pending_decodes;
 deque<PendingDecode> pending_decodes;  // Under cache_mu.
 atomic<size_t> event_counter{0};
 extern QGLWidget *global_share_widget;
@@ -246,24 +246,16 @@ void jpeg_decoder_thread_func()
                                // Interpolated frames are never decoded by us,
                                // put directly into the cache from VideoStream.
                                unique_lock<mutex> lock(cache_mu);
-                               cache_updated.wait(lock, [id] {
-                                       return cache.count(id) != 0 || should_quit.load();
-                               });
-                               if (should_quit.load())
-                                       break;
-                               found_in_cache = true;  // Don't count it as a decode.
-
                                auto it = cache.find(id);
-                               assert(it != cache.end());
-
-                               it->second.last_used = event_counter++;
-                               frame = it->second.frame;
-                               if (frame == nullptr) {
-                                       // We inserted a nullptr as signal that the frame was never
-                                       // interpolated and that we should stop waiting.
-                                       // But don't let it linger in the cache anymore.
-                                       cache.erase(it);
+                               if (it != cache.end()) {
+                                       it->second.last_used = event_counter++;
+                                       frame = it->second.frame;
+                               } else {
+                                       // This can only really happen if it disappeared out of the
+                                       // LRU really, really fast. Which shouldn't happen.
+                                       fprintf(stderr, "WARNING: Interpolated JPEG was supposed to be in the cache, but was not\n");
                                }
+                               found_in_cache = true;  // Don't count it as a decode.
                        } else {
                                frame = decode_jpeg_with_cache(id, cache_miss_behavior, &found_in_cache);
                        }
@@ -336,7 +328,6 @@ void JPEGFrameView::insert_interpolated_frame(JPEGID id, shared_ptr<Frame> frame
        // that would sound like a reasonable assumption.
        unique_lock<mutex> lock(cache_mu);
        cache[id] = LRUFrame{ std::move(frame), event_counter++ };
-       cache_updated.notify_all();
 }
 
 ResourcePool *resource_pool = nullptr;
index fa4ea47d8615f72fcf48c7f863865f6760eba4f4..a6a8056b53290b9746abac5beec0f45d38f8135d 100644 (file)
@@ -73,7 +73,7 @@ wait_for_clip:
 
                if (!clip_ready) {
                        if (video_stream != nullptr) {
-                               video_stream->schedule_refresh_frame(pts);
+                               video_stream->schedule_refresh_frame(pts, /*display_func=*/nullptr);
                        }
                        continue;
                }
@@ -197,12 +197,16 @@ got_clip:
                        }
 
                        if (in_pts_lower == in_pts_upper) {
-                               destination->setFrame(primary_stream_idx, in_pts_lower, /*interpolated=*/false, secondary_stream_idx, secondary_pts, fade_alpha);
-                               if (video_stream != nullptr) {
+                               auto display_func = [this, primary_stream_idx, in_pts_lower, secondary_stream_idx, secondary_pts, fade_alpha]{
+                                       destination->setFrame(primary_stream_idx, in_pts_lower, /*interpolated=*/false, secondary_stream_idx, secondary_pts, fade_alpha);
+                               };
+                               if (video_stream == nullptr) {
+                                       display_func();
+                               } else {
                                        if (secondary_stream_idx == -1) {
-                                               video_stream->schedule_original_frame(pts, primary_stream_idx, in_pts_lower);
+                                               video_stream->schedule_original_frame(pts, display_func, primary_stream_idx, in_pts_lower);
                                        } else {
-                                               video_stream->schedule_faded_frame(pts, primary_stream_idx, in_pts_lower, secondary_stream_idx, secondary_pts, fade_alpha);
+                                               video_stream->schedule_faded_frame(pts, display_func, primary_stream_idx, in_pts_lower, secondary_stream_idx, secondary_pts, fade_alpha);
                                        }
                                }
                                continue;
@@ -215,12 +219,16 @@ got_clip:
                        for (int64_t snap_pts : { in_pts_lower, in_pts_upper }) {
                                double snap_pts_as_frameno = (snap_pts - in_pts_origin) * output_framerate / TIMEBASE / speed;
                                if (fabs(snap_pts_as_frameno - frameno) < 0.01) {
-                                       destination->setFrame(primary_stream_idx, snap_pts, /*interpolated=*/false, secondary_stream_idx, secondary_pts, fade_alpha);
-                                       if (video_stream != nullptr) {
+                                       auto display_func = [this, primary_stream_idx, snap_pts, secondary_stream_idx, secondary_pts, fade_alpha]{
+                                               destination->setFrame(primary_stream_idx, snap_pts, /*interpolated=*/false, secondary_stream_idx, secondary_pts, fade_alpha);
+                                       };
+                                       if (video_stream == nullptr) {
+                                               display_func();
+                                       } else {
                                                if (secondary_stream_idx == -1) {
-                                                       video_stream->schedule_original_frame(pts, primary_stream_idx, snap_pts);
+                                                       video_stream->schedule_original_frame(pts, display_func, primary_stream_idx, snap_pts);
                                                } else {
-                                                       video_stream->schedule_faded_frame(pts, primary_stream_idx, snap_pts, secondary_stream_idx, secondary_pts, fade_alpha);
+                                                       video_stream->schedule_faded_frame(pts, display_func, primary_stream_idx, snap_pts, secondary_stream_idx, secondary_pts, fade_alpha);
                                                }
                                        }
                                        in_pts_origin += snap_pts - in_pts;
@@ -247,8 +255,10 @@ got_clip:
                        } else {
                                // Calculate the interpolated frame. When it's done, the destination
                                // will be unblocked.
-                               destination->setFrame(primary_stream_idx, pts, /*interpolated=*/true, secondary_stream_idx, secondary_pts, fade_alpha);
-                               video_stream->schedule_interpolated_frame(pts, primary_stream_idx, in_pts_lower, in_pts_upper, alpha, secondary_stream_idx, secondary_pts, fade_alpha);
+                               auto display_func = [this, primary_stream_idx, pts, secondary_stream_idx, secondary_pts, fade_alpha]{
+                                       destination->setFrame(primary_stream_idx, pts, /*interpolated=*/true, secondary_stream_idx, secondary_pts, fade_alpha);
+                               };
+                               video_stream->schedule_interpolated_frame(pts, display_func, primary_stream_idx, in_pts_lower, in_pts_upper, alpha, secondary_stream_idx, secondary_pts, fade_alpha);
                        }
                }
 
index f4ad33564b71ce4ced49064cec7678315ba0c6a7..91b5df0c4ee26ee3166a79683fc37b22e77570e8 100644 (file)
@@ -281,7 +281,7 @@ void VideoStream::stop()
        encode_thread.join();
 }
 
-void VideoStream::schedule_original_frame(int64_t output_pts, unsigned stream_idx, int64_t input_pts)
+void VideoStream::schedule_original_frame(int64_t output_pts, function<void()> &&display_func, unsigned stream_idx, int64_t input_pts)
 {
        fprintf(stderr, "output_pts=%ld  original      input_pts=%ld\n", output_pts, input_pts);
 
@@ -290,13 +290,14 @@ void VideoStream::schedule_original_frame(int64_t output_pts, unsigned stream_id
        qf.output_pts = output_pts;
        qf.stream_idx = stream_idx;
        qf.input_first_pts = input_pts;
+       qf.display_func = move(display_func);
 
        unique_lock<mutex> lock(queue_lock);
        frame_queue.push_back(qf);
        queue_nonempty.notify_all();
 }
 
-void VideoStream::schedule_faded_frame(int64_t output_pts, unsigned stream_idx, int64_t input_pts, int secondary_stream_idx, int64_t secondary_input_pts, float fade_alpha)
+void VideoStream::schedule_faded_frame(int64_t output_pts, function<void()> &&display_func, unsigned stream_idx, int64_t input_pts, int secondary_stream_idx, int64_t secondary_input_pts, float fade_alpha)
 {
        fprintf(stderr, "output_pts=%ld  faded         input_pts=%ld,%ld  fade_alpha=%.2f\n", output_pts, input_pts, secondary_input_pts, fade_alpha);
 
@@ -337,6 +338,7 @@ void VideoStream::schedule_faded_frame(int64_t output_pts, unsigned stream_idx,
        qf.stream_idx = stream_idx;
        qf.resources = resources;
        qf.input_first_pts = input_pts;
+       qf.display_func = move(display_func);
 
        qf.secondary_stream_idx = secondary_stream_idx;
        qf.secondary_input_pts = secondary_input_pts;
@@ -367,7 +369,7 @@ void VideoStream::schedule_faded_frame(int64_t output_pts, unsigned stream_idx,
        queue_nonempty.notify_all();
 }
 
-void VideoStream::schedule_interpolated_frame(int64_t output_pts, unsigned stream_idx, int64_t input_first_pts, int64_t input_second_pts, float alpha, int secondary_stream_idx, int64_t secondary_input_pts, float fade_alpha)
+void VideoStream::schedule_interpolated_frame(int64_t output_pts, function<void()> &&display_func, unsigned stream_idx, int64_t input_first_pts, int64_t input_second_pts, float alpha, int secondary_stream_idx, int64_t secondary_input_pts, float fade_alpha)
 {
        if (secondary_stream_idx != -1) {
                fprintf(stderr, "output_pts=%ld  interpolated  input_pts1=%ld input_pts2=%ld alpha=%.3f  secondary_pts=%ld  fade_alpha=%.2f\n", output_pts, input_first_pts, input_second_pts, alpha, secondary_input_pts, fade_alpha);
@@ -388,7 +390,6 @@ void VideoStream::schedule_interpolated_frame(int64_t output_pts, unsigned strea
                unique_lock<mutex> lock(queue_lock);
                if (interpolate_resources.empty()) {
                        fprintf(stderr, "WARNING: Too many interpolated frames already in transit; dropping one.\n");
-                       JPEGFrameView::insert_interpolated_frame(id, nullptr);
                        return;
                }
                resources = interpolate_resources.front();
@@ -401,6 +402,7 @@ void VideoStream::schedule_interpolated_frame(int64_t output_pts, unsigned strea
        qf.stream_idx = stream_idx;
        qf.resources = resources;
        qf.id = id;
+       qf.display_func = move(display_func);
 
        check_error();
 
@@ -481,11 +483,12 @@ void VideoStream::schedule_interpolated_frame(int64_t output_pts, unsigned strea
        queue_nonempty.notify_all();
 }
 
-void VideoStream::schedule_refresh_frame(int64_t output_pts)
+void VideoStream::schedule_refresh_frame(int64_t output_pts, function<void()> &&display_func)
 {
        QueuedFrame qf;
        qf.type = QueuedFrame::REFRESH;
        qf.output_pts = output_pts;
+       qf.display_func = move(display_func);
 
        unique_lock<mutex> lock(queue_lock);
        frame_queue.push_back(qf);
@@ -611,6 +614,9 @@ void VideoStream::encode_thread_func()
                } else {
                        assert(false);
                }
+               if (qf.display_func != nullptr) {
+                       qf.display_func();
+               }
        }
 }
 
index 2dae6e06f01c7a626556090fa9050daf7e982727..c1b808e9baf069e0b6d65258fb6fe063ea1e60f2 100644 (file)
@@ -13,6 +13,7 @@ extern "C" {
 
 #include <condition_variable>
 #include <deque>
+#include <functional>
 #include <movit/effect_chain.h>
 #include <movit/mix_effect.h>
 #include <movit/ycbcr_input.h>
@@ -35,10 +36,12 @@ public:
        void start();
        void stop();
 
-       void schedule_original_frame(int64_t output_pts, unsigned stream_idx, int64_t input_pts);
-       void schedule_faded_frame(int64_t output_pts, unsigned stream_idx, int64_t input_pts, int secondary_stream_idx, int64_t secondary_input_pts, float fade_alpha);
-       void schedule_interpolated_frame(int64_t output_pts, unsigned stream_idx, int64_t input_first_pts, int64_t input_second_pts, float alpha, int secondary_stream_idx = -1, int64_t secondary_inputs_pts = -1, float fade_alpha = 0.0f);  // -1 = no secondary frame.
-       void schedule_refresh_frame(int64_t output_pts);
+       // “display_func” is called after the frame has been calculated (if needed)
+       // and has gone out to the stream.
+       void schedule_original_frame(int64_t output_pts, std::function<void()> &&display_func, unsigned stream_idx, int64_t input_pts);
+       void schedule_faded_frame(int64_t output_pts, std::function<void()> &&display_func, unsigned stream_idx, int64_t input_pts, int secondary_stream_idx, int64_t secondary_input_pts, float fade_alpha);
+       void schedule_interpolated_frame(int64_t output_pts, std::function<void()> &&display_func, unsigned stream_idx, int64_t input_first_pts, int64_t input_second_pts, float alpha, int secondary_stream_idx = -1, int64_t secondary_inputs_pts = -1, float fade_alpha = 0.0f);  // -1 = no secondary frame.
+       void schedule_refresh_frame(int64_t output_pts, std::function<void()> &&display_func);
 
 private:
        void encode_thread_func();
@@ -83,6 +86,8 @@ private:
                RefCountedGLsync fence;  // Set when the interpolated image is read back to the CPU.
                GLuint flow_tex, output_tex, cbcr_tex;  // Released in the receiving thread; not really used for anything else.
                JPEGID id;
+
+               std::function<void()> display_func;  // Called when the image is done decoding.
        };
        std::deque<QueuedFrame> frame_queue;  // Under <queue_lock>.
        std::mutex queue_lock;