thread JPEGFrameView::jpeg_decoder_thread;
mutex cache_mu;
map<JPEGID, LRUFrame, JPEGIDLexicalOrder> cache; // Under cache_mu.
-condition_variable any_pending_decodes, cache_updated;
+condition_variable any_pending_decodes;
deque<PendingDecode> pending_decodes; // Under cache_mu.
atomic<size_t> event_counter{0};
extern QGLWidget *global_share_widget;
// Interpolated frames are never decoded by us,
// put directly into the cache from VideoStream.
unique_lock<mutex> lock(cache_mu);
- cache_updated.wait(lock, [id] {
- return cache.count(id) != 0 || should_quit.load();
- });
- if (should_quit.load())
- break;
- found_in_cache = true; // Don't count it as a decode.
-
auto it = cache.find(id);
- assert(it != cache.end());
-
- it->second.last_used = event_counter++;
- frame = it->second.frame;
- if (frame == nullptr) {
- // We inserted a nullptr as signal that the frame was never
- // interpolated and that we should stop waiting.
- // But don't let it linger in the cache anymore.
- cache.erase(it);
+ if (it != cache.end()) {
+ it->second.last_used = event_counter++;
+ frame = it->second.frame;
+ } else {
+ // This can only really happen if it disappeared out of the
+ // LRU really, really fast. Which shouldn't happen.
+ fprintf(stderr, "WARNING: Interpolated JPEG was supposed to be in the cache, but was not\n");
}
+ found_in_cache = true; // Don't count it as a decode.
} else {
frame = decode_jpeg_with_cache(id, cache_miss_behavior, &found_in_cache);
}
// that would sound like a reasonable assumption.
unique_lock<mutex> lock(cache_mu);
cache[id] = LRUFrame{ std::move(frame), event_counter++ };
- cache_updated.notify_all();
}
ResourcePool *resource_pool = nullptr;
if (!clip_ready) {
if (video_stream != nullptr) {
- video_stream->schedule_refresh_frame(pts);
+ video_stream->schedule_refresh_frame(pts, /*display_func=*/nullptr);
}
continue;
}
}
if (in_pts_lower == in_pts_upper) {
- destination->setFrame(primary_stream_idx, in_pts_lower, /*interpolated=*/false, secondary_stream_idx, secondary_pts, fade_alpha);
- if (video_stream != nullptr) {
+ auto display_func = [this, primary_stream_idx, in_pts_lower, secondary_stream_idx, secondary_pts, fade_alpha]{
+ destination->setFrame(primary_stream_idx, in_pts_lower, /*interpolated=*/false, secondary_stream_idx, secondary_pts, fade_alpha);
+ };
+ if (video_stream == nullptr) {
+ display_func();
+ } else {
if (secondary_stream_idx == -1) {
- video_stream->schedule_original_frame(pts, primary_stream_idx, in_pts_lower);
+ video_stream->schedule_original_frame(pts, display_func, primary_stream_idx, in_pts_lower);
} else {
- video_stream->schedule_faded_frame(pts, primary_stream_idx, in_pts_lower, secondary_stream_idx, secondary_pts, fade_alpha);
+ video_stream->schedule_faded_frame(pts, display_func, primary_stream_idx, in_pts_lower, secondary_stream_idx, secondary_pts, fade_alpha);
}
}
continue;
for (int64_t snap_pts : { in_pts_lower, in_pts_upper }) {
double snap_pts_as_frameno = (snap_pts - in_pts_origin) * output_framerate / TIMEBASE / speed;
if (fabs(snap_pts_as_frameno - frameno) < 0.01) {
- destination->setFrame(primary_stream_idx, snap_pts, /*interpolated=*/false, secondary_stream_idx, secondary_pts, fade_alpha);
- if (video_stream != nullptr) {
+ auto display_func = [this, primary_stream_idx, snap_pts, secondary_stream_idx, secondary_pts, fade_alpha]{
+ destination->setFrame(primary_stream_idx, snap_pts, /*interpolated=*/false, secondary_stream_idx, secondary_pts, fade_alpha);
+ };
+ if (video_stream == nullptr) {
+ display_func();
+ } else {
if (secondary_stream_idx == -1) {
- video_stream->schedule_original_frame(pts, primary_stream_idx, snap_pts);
+ video_stream->schedule_original_frame(pts, display_func, primary_stream_idx, snap_pts);
} else {
- video_stream->schedule_faded_frame(pts, primary_stream_idx, snap_pts, secondary_stream_idx, secondary_pts, fade_alpha);
+ video_stream->schedule_faded_frame(pts, display_func, primary_stream_idx, snap_pts, secondary_stream_idx, secondary_pts, fade_alpha);
}
}
in_pts_origin += snap_pts - in_pts;
} else {
// Calculate the interpolated frame. When it's done, the destination
// will be unblocked.
- destination->setFrame(primary_stream_idx, pts, /*interpolated=*/true, secondary_stream_idx, secondary_pts, fade_alpha);
- video_stream->schedule_interpolated_frame(pts, primary_stream_idx, in_pts_lower, in_pts_upper, alpha, secondary_stream_idx, secondary_pts, fade_alpha);
+ auto display_func = [this, primary_stream_idx, pts, secondary_stream_idx, secondary_pts, fade_alpha]{
+ destination->setFrame(primary_stream_idx, pts, /*interpolated=*/true, secondary_stream_idx, secondary_pts, fade_alpha);
+ };
+ video_stream->schedule_interpolated_frame(pts, display_func, primary_stream_idx, in_pts_lower, in_pts_upper, alpha, secondary_stream_idx, secondary_pts, fade_alpha);
}
}
encode_thread.join();
}
-void VideoStream::schedule_original_frame(int64_t output_pts, unsigned stream_idx, int64_t input_pts)
+void VideoStream::schedule_original_frame(int64_t output_pts, function<void()> &&display_func, unsigned stream_idx, int64_t input_pts)
{
fprintf(stderr, "output_pts=%ld original input_pts=%ld\n", output_pts, input_pts);
qf.output_pts = output_pts;
qf.stream_idx = stream_idx;
qf.input_first_pts = input_pts;
+ qf.display_func = move(display_func);
unique_lock<mutex> lock(queue_lock);
frame_queue.push_back(qf);
queue_nonempty.notify_all();
}
-void VideoStream::schedule_faded_frame(int64_t output_pts, unsigned stream_idx, int64_t input_pts, int secondary_stream_idx, int64_t secondary_input_pts, float fade_alpha)
+void VideoStream::schedule_faded_frame(int64_t output_pts, function<void()> &&display_func, unsigned stream_idx, int64_t input_pts, int secondary_stream_idx, int64_t secondary_input_pts, float fade_alpha)
{
fprintf(stderr, "output_pts=%ld faded input_pts=%ld,%ld fade_alpha=%.2f\n", output_pts, input_pts, secondary_input_pts, fade_alpha);
qf.stream_idx = stream_idx;
qf.resources = resources;
qf.input_first_pts = input_pts;
+ qf.display_func = move(display_func);
qf.secondary_stream_idx = secondary_stream_idx;
qf.secondary_input_pts = secondary_input_pts;
queue_nonempty.notify_all();
}
-void VideoStream::schedule_interpolated_frame(int64_t output_pts, unsigned stream_idx, int64_t input_first_pts, int64_t input_second_pts, float alpha, int secondary_stream_idx, int64_t secondary_input_pts, float fade_alpha)
+void VideoStream::schedule_interpolated_frame(int64_t output_pts, function<void()> &&display_func, unsigned stream_idx, int64_t input_first_pts, int64_t input_second_pts, float alpha, int secondary_stream_idx, int64_t secondary_input_pts, float fade_alpha)
{
if (secondary_stream_idx != -1) {
fprintf(stderr, "output_pts=%ld interpolated input_pts1=%ld input_pts2=%ld alpha=%.3f secondary_pts=%ld fade_alpha=%.2f\n", output_pts, input_first_pts, input_second_pts, alpha, secondary_input_pts, fade_alpha);
unique_lock<mutex> lock(queue_lock);
if (interpolate_resources.empty()) {
fprintf(stderr, "WARNING: Too many interpolated frames already in transit; dropping one.\n");
- JPEGFrameView::insert_interpolated_frame(id, nullptr);
return;
}
resources = interpolate_resources.front();
qf.stream_idx = stream_idx;
qf.resources = resources;
qf.id = id;
+ qf.display_func = move(display_func);
check_error();
queue_nonempty.notify_all();
}
-void VideoStream::schedule_refresh_frame(int64_t output_pts)
+void VideoStream::schedule_refresh_frame(int64_t output_pts, function<void()> &&display_func)
{
QueuedFrame qf;
qf.type = QueuedFrame::REFRESH;
qf.output_pts = output_pts;
+ qf.display_func = move(display_func);
unique_lock<mutex> lock(queue_lock);
frame_queue.push_back(qf);
} else {
assert(false);
}
+ if (qf.display_func != nullptr) {
+ qf.display_func();
+ }
}
}
#include <condition_variable>
#include <deque>
+#include <functional>
#include <movit/effect_chain.h>
#include <movit/mix_effect.h>
#include <movit/ycbcr_input.h>
void start();
void stop();
- void schedule_original_frame(int64_t output_pts, unsigned stream_idx, int64_t input_pts);
- void schedule_faded_frame(int64_t output_pts, unsigned stream_idx, int64_t input_pts, int secondary_stream_idx, int64_t secondary_input_pts, float fade_alpha);
- void schedule_interpolated_frame(int64_t output_pts, unsigned stream_idx, int64_t input_first_pts, int64_t input_second_pts, float alpha, int secondary_stream_idx = -1, int64_t secondary_inputs_pts = -1, float fade_alpha = 0.0f); // -1 = no secondary frame.
- void schedule_refresh_frame(int64_t output_pts);
+ // “display_func” is called after the frame has been calculated (if needed)
+ // and has gone out to the stream.
+ void schedule_original_frame(int64_t output_pts, std::function<void()> &&display_func, unsigned stream_idx, int64_t input_pts);
+ void schedule_faded_frame(int64_t output_pts, std::function<void()> &&display_func, unsigned stream_idx, int64_t input_pts, int secondary_stream_idx, int64_t secondary_input_pts, float fade_alpha);
+ void schedule_interpolated_frame(int64_t output_pts, std::function<void()> &&display_func, unsigned stream_idx, int64_t input_first_pts, int64_t input_second_pts, float alpha, int secondary_stream_idx = -1, int64_t secondary_inputs_pts = -1, float fade_alpha = 0.0f); // -1 = no secondary frame.
+ void schedule_refresh_frame(int64_t output_pts, std::function<void()> &&display_func);
private:
void encode_thread_func();
RefCountedGLsync fence; // Set when the interpolated image is read back to the CPU.
GLuint flow_tex, output_tex, cbcr_tex; // Released in the receiving thread; not really used for anything else.
JPEGID id;
+
+ std::function<void()> display_func; // Called when the image is done decoding.
};
std::deque<QueuedFrame> frame_queue; // Under <queue_lock>.
std::mutex queue_lock;