X-Git-Url: https://git.sesse.net/?p=nageru;a=blobdiff_plain;f=futatabi%2Fplayer.cpp;h=94438a0c9713dff414bbff2f3594f6666ff38b11;hp=5ea34ea64936a3b4c9fc71b671d6a4b2538e2ffb;hb=2cb648106d32b9968f2026536fbead096308c7d1;hpb=6206d17dcc22cc22d8aaf1d7396546d1191b4360 diff --git a/futatabi/player.cpp b/futatabi/player.cpp index 5ea34ea..94438a0 100644 --- a/futatabi/player.cpp +++ b/futatabi/player.cpp @@ -1,12 +1,14 @@ #include "player.h" #include "clip_list.h" -#include "shared/context.h" #include "defs.h" -#include "shared/ffmpeg_raii.h" +#include "flags.h" #include "frame_on_disk.h" -#include "shared/httpd.h" #include "jpeg_frame_view.h" +#include "shared/context.h" +#include "shared/ffmpeg_raii.h" +#include "shared/httpd.h" +#include "shared/metrics.h" #include "shared/mux.h" #include "shared/timebase.h" #include "video_stream.h" @@ -25,7 +27,7 @@ using namespace std::chrono; extern HTTPD *global_httpd; -void Player::thread_func(bool also_output_to_stream) +void Player::thread_func(AVFormatContext *file_avctx) { pthread_setname_np(pthread_self(), "Player"); @@ -33,65 +35,146 @@ void Player::thread_func(bool also_output_to_stream) QOpenGLContext *context = create_context(surface); if (!make_current(context, surface)) { printf("oops\n"); - exit(1); + abort(); } check_error(); // Create the VideoStream object, now that we have an OpenGL context. - if (also_output_to_stream) { - video_stream.reset(new VideoStream); + if (stream_output != NO_STREAM_OUTPUT) { + video_stream.reset(new VideoStream(file_avctx)); video_stream->start(); } check_error(); - constexpr double output_framerate = 60000.0 / 1001.0; // FIXME: make configurable - int64_t pts = 0; - Clip next_clip; - size_t next_clip_idx = size_t(-1); - bool got_next_clip = false; - double next_clip_fade_time = -1.0; + while (!should_quit) { + play_playlist_once(); + } +} - for ( ;; ) { -wait_for_clip: - bool clip_ready; - steady_clock::time_point before_sleep = steady_clock::now(); +namespace { - // Wait until we're supposed to play something. - { - unique_lock lock(queue_state_mu); - clip_ready = new_clip_changed.wait_for(lock, milliseconds(100), [this] { - return new_clip_ready && current_clip.pts_in != -1; - }); +double calc_progress(const Clip &clip, int64_t pts) +{ + return double(pts - clip.pts_in) / (clip.pts_out - clip.pts_in); +} + +void do_splice(const vector &new_list, size_t playing_index1, ssize_t playing_index2, vector *old_list) +{ + assert(playing_index2 == -1 || size_t(playing_index2) == playing_index1 + 1); + + // First see if we can do the simple thing; find an element in the new + // list that we are already playing, which will serve as our splice point. + int splice_start_new_list = -1; + for (size_t clip_idx = 0; clip_idx < new_list.size(); ++clip_idx) { + if (new_list[clip_idx].id == (*old_list)[playing_index1].id) { + splice_start_new_list = clip_idx + 1; + } else if (playing_index2 != -1 && new_list[clip_idx].id == (*old_list)[playing_index2].id) { + splice_start_new_list = clip_idx + 1; + } + } + if (splice_start_new_list == -1) { + // OK, so the playing items are no longer in the new list. Most likely, + // that means we deleted some range that included them. But the ones + // before should stay put -- and we don't want to play them. So find + // the ones that we've already played, and ignore them. Hopefully, + // they're contiguous; the last one that's not seen will be our cut point. + // + // Keeping track of the playlist range explicitly in the UI would remove + // the need for these heuristics, but it would probably also mean we'd + // have to lock the playing clip, which sounds annoying. + unordered_map played_ids; + for (size_t clip_idx = 0; clip_idx < playing_index1; ++old_list) { + played_ids.emplace((*old_list)[clip_idx].id, clip_idx); + } + for (size_t clip_idx = 0; clip_idx < new_list.size(); ++clip_idx) { + if (played_ids.count(new_list[clip_idx].id)) { + splice_start_new_list = clip_idx + 1; + } + } + + if (splice_start_new_list == -1) { + // OK, we didn't find any matches; the lists are totally distinct. + // So probably the entire thing was deleted; leave it alone. + return; + } + } + + size_t splice_start_old_list = ((playing_index2 == -1) ? playing_index1 : playing_index2) + 1; + old_list->erase(old_list->begin() + splice_start_old_list, old_list->end()); + old_list->insert(old_list->end(), new_list.begin() + splice_start_new_list, new_list.end()); +} + +} // namespace + +void Player::play_playlist_once() +{ + vector clip_list; + bool clip_ready; + steady_clock::time_point before_sleep = steady_clock::now(); + string pause_status; + float master_speed = start_master_speed; + + // Wait until we're supposed to play something. + { + unique_lock lock(queue_state_mu); + playing = false; + clip_ready = new_clip_changed.wait_for(lock, milliseconds(100), [this] { + return should_quit || new_clip_ready; + }); + if (should_quit) { + return; + } + if (clip_ready) { new_clip_ready = false; playing = true; + clip_list = move(queued_clip_list); + queued_clip_list.clear(); + assert(!clip_list.empty()); + assert(!splice_ready); // This corner case should have been handled in splice_play(). + } else { + pause_status = this->pause_status; } + } - steady_clock::duration time_slept = steady_clock::now() - before_sleep; - pts += duration_cast>(time_slept).count(); - - if (!clip_ready) { - if (video_stream != nullptr) { - video_stream->schedule_refresh_frame(steady_clock::now(), pts, /*display_func=*/nullptr, QueueSpotHolder()); - } - continue; + steady_clock::duration time_slept = steady_clock::now() - before_sleep; + int64_t slept_pts = duration_cast>(time_slept).count(); + if (slept_pts > 0) { + if (video_stream != nullptr) { + // Add silence for the time we're waiting. + video_stream->schedule_silence(steady_clock::now(), pts, slept_pts, QueueSpotHolder()); } + pts += slept_pts; + } - Clip clip; - size_t clip_idx; - unsigned stream_idx; - { - lock_guard lock(mu); - clip = current_clip; - clip_idx = current_clip_idx; - stream_idx = current_stream_idx; + if (!clip_ready) { + if (video_stream != nullptr) { + ++metric_refresh_frame; + string subtitle = "Futatabi " NAGERU_VERSION ";PAUSED;0.000;" + pause_status; + video_stream->schedule_refresh_frame(steady_clock::now(), pts, /*display_func=*/nullptr, QueueSpotHolder(), + subtitle); } - steady_clock::time_point origin = steady_clock::now(); // TODO: Add a 100 ms buffer for ramp-up? - int64_t in_pts_origin = clip.pts_in; -got_clip: + return; + } + + should_skip_to_next = false; // To make sure we don't have a lingering click from before play. + steady_clock::time_point origin = steady_clock::now(); // TODO: Add a 100 ms buffer for ramp-up? + int64_t in_pts_origin = clip_list[0].clip.pts_in; + for (size_t clip_idx = 0; clip_idx < clip_list.size(); ++clip_idx) { + const Clip *clip = &clip_list[clip_idx].clip; + const Clip *next_clip = (clip_idx + 1 < clip_list.size()) ? &clip_list[clip_idx + 1].clip : nullptr; int64_t out_pts_origin = pts; + double next_clip_fade_time = -1.0; + if (next_clip != nullptr) { + double duration_this_clip = double(clip->pts_out - in_pts_origin) / TIMEBASE / clip->speed; + double duration_next_clip = double(next_clip->pts_out - next_clip->pts_in) / TIMEBASE / clip->speed; + next_clip_fade_time = min(min(duration_this_clip, duration_next_clip), clip->fade_time_seconds); + } + + int stream_idx = clip->stream_idx; + // Start playing exactly at a frame. // TODO: Snap secondary (fade-to) clips in the same fashion // so that we don't get jank here). @@ -99,51 +182,73 @@ got_clip: lock_guard lock(frame_mu); // Find the first frame such that frame.pts <= in_pts. - auto it = lower_bound(frames[stream_idx].begin(), - frames[stream_idx].end(), - in_pts_origin, - [](const FrameOnDisk &frame, int64_t pts) { return frame.pts < pts; }); + auto it = find_last_frame_before(frames[stream_idx], in_pts_origin); if (it != frames[stream_idx].end()) { in_pts_origin = it->pts; } } - // TODO: Lock to a rational multiple of the frame rate if possible. - double speed = 0.5; - - int64_t in_pts_start_next_clip = -1; steady_clock::time_point next_frame_start; - for (int frameno = 0; ; ++frameno) { // Ends when the clip ends. - double out_pts = out_pts_origin + TIMEBASE * frameno / output_framerate; + for (int64_t frameno = 0; !should_quit; ++frameno) { // Ends when the clip ends. + double out_pts = out_pts_origin + TIMEBASE * frameno / global_flags.output_framerate; next_frame_start = origin + microseconds(lrint((out_pts - out_pts_origin) * 1e6 / TIMEBASE)); - int64_t in_pts = lrint(in_pts_origin + TIMEBASE * frameno * speed / output_framerate); + int64_t in_pts = lrint(in_pts_origin + TIMEBASE * frameno * clip->speed * master_speed / global_flags.output_framerate); pts = lrint(out_pts); - if (in_pts >= clip.pts_out) { - break; + float new_master_speed = change_master_speed.exchange(0.0f / 0.0f); + if (!std::isnan(new_master_speed)) { + master_speed = new_master_speed; + in_pts_origin = in_pts - TIMEBASE * frameno * clip->speed * master_speed / global_flags.output_framerate; + out_pts_origin = out_pts - TIMEBASE * frameno / global_flags.output_framerate; } - steady_clock::duration time_behind = steady_clock::now() - next_frame_start; - if (time_behind >= milliseconds(200)) { - fprintf(stderr, "WARNING: %ld ms behind, dropping a frame (no matter the type).\n", - lrint(1e3 * duration(time_behind).count())); - continue; + if (should_skip_to_next.exchange(false)) { // Test and clear. + Clip *clip = &clip_list[clip_idx].clip; // Get a non-const pointer. + clip->pts_out = std::min(clip->pts_out, llrint(in_pts + clip->fade_time_seconds * clip->speed * TIMEBASE)); } - double time_left_this_clip = double(clip.pts_out - in_pts) / TIMEBASE / speed; - if (!got_next_clip && next_clip_callback != nullptr && time_left_this_clip <= clip.fade_time_seconds) { - // Find the next clip so that we can begin a fade. - tie(next_clip, next_clip_idx) = next_clip_callback(); - if (next_clip.pts_in != -1) { - got_next_clip = true; + if (in_pts >= clip->pts_out) { + break; + } - double duration_next_clip = (next_clip.pts_out - next_clip.pts_in) / TIMEBASE / speed; - next_clip_fade_time = std::min(time_left_this_clip, duration_next_clip); - in_pts_start_next_clip = next_clip.pts_in + lrint(next_clip_fade_time * TIMEBASE * speed); + // Only play audio if we're within 0.1% of normal speed. We could do + // stretching or pitch shift later if it becomes needed. + bool play_audio = clip->speed * master_speed >= 0.999 && clip->speed * master_speed <= 1.001; + + { + lock_guard lock(queue_state_mu); + if (splice_ready) { + if (next_clip == nullptr) { + do_splice(to_splice_clip_list, clip_idx, -1, &clip_list); + } else { + do_splice(to_splice_clip_list, clip_idx, clip_idx + 1, &clip_list); + } + to_splice_clip_list.clear(); + splice_ready = false; + + // Refresh the clip pointer, since the clip list may have been reallocated. + clip = &clip_list[clip_idx].clip; + + // Recompute next_clip and any needed fade times, since the next clip may have changed + // (or we may have gone from no new clip to having one, or the other way). + next_clip = (clip_idx + 1 < clip_list.size()) ? &clip_list[clip_idx + 1].clip : nullptr; + if (next_clip != nullptr) { + double duration_this_clip = double(clip->pts_out - in_pts) / TIMEBASE / clip->speed; + double duration_next_clip = double(next_clip->pts_out - next_clip->pts_in) / TIMEBASE / clip->speed; + next_clip_fade_time = min(min(duration_this_clip, duration_next_clip), clip->fade_time_seconds); + } } } + steady_clock::duration time_behind = steady_clock::now() - next_frame_start; + if (stream_output != FILE_STREAM_OUTPUT && time_behind >= milliseconds(200)) { + fprintf(stderr, "WARNING: %ld ms behind, dropping a frame (no matter the type).\n", + lrint(1e3 * duration(time_behind).count())); + ++metric_dropped_unconditional_frame; + continue; + } + // pts not affected by the swapping below. int64_t in_pts_for_progress = in_pts, in_pts_secondary_for_progress = -1; @@ -151,9 +256,11 @@ got_clip: FrameOnDisk secondary_frame; int secondary_stream_idx = -1; float fade_alpha = 0.0f; - if (got_next_clip && time_left_this_clip <= next_clip_fade_time) { - secondary_stream_idx = next_clip.stream_idx; - int64_t in_pts_secondary = lrint(next_clip.pts_in + (next_clip_fade_time - time_left_this_clip) * TIMEBASE * speed); + double time_left_this_clip = double(clip->pts_out - in_pts) / TIMEBASE / clip->speed; + if (next_clip != nullptr && time_left_this_clip <= next_clip_fade_time) { + // We're in a fade to the next clip-> + secondary_stream_idx = next_clip->stream_idx; + int64_t in_pts_secondary = lrint(next_clip->pts_in + (next_clip_fade_time - time_left_this_clip) * TIMEBASE * clip->speed); in_pts_secondary_for_progress = in_pts_secondary; fade_alpha = 1.0f - time_left_this_clip / next_clip_fade_time; @@ -167,23 +274,27 @@ got_clip: FrameOnDisk frame_lower, frame_upper; bool ok = find_surrounding_frames(in_pts_secondary, secondary_stream_idx, &frame_lower, &frame_upper); + if (ok) { secondary_frame = frame_lower; + } else { + secondary_stream_idx = -1; } } + // NOTE: None of this will take into account any snapping done below. + double clip_progress = calc_progress(*clip, in_pts_for_progress); + map progress{ { clip_list[clip_idx].id, clip_progress } }; + TimeRemaining time_remaining; + if (next_clip != nullptr && time_left_this_clip <= next_clip_fade_time) { + double next_clip_progress = calc_progress(*next_clip, in_pts_secondary_for_progress); + progress[clip_list[clip_idx + 1].id] = next_clip_progress; + time_remaining = compute_time_left(clip_list, clip_idx + 1, next_clip_progress); + } else { + time_remaining = compute_time_left(clip_list, clip_idx, clip_progress); + } if (progress_callback != nullptr) { - // NOTE: None of this will take into account any snapping done below. - double played_this_clip = double(in_pts_for_progress - clip.pts_in) / TIMEBASE / speed; - double total_length = double(clip.pts_out - clip.pts_in) / TIMEBASE / speed; - map progress{{ clip_idx, played_this_clip / total_length }}; - - if (got_next_clip && time_left_this_clip <= next_clip_fade_time) { - double played_next_clip = double(in_pts_secondary_for_progress - next_clip.pts_in) / TIMEBASE / speed; - double total_next_length = double(next_clip.pts_out - next_clip.pts_in) / TIMEBASE / speed; - progress[next_clip_idx] = played_next_clip / total_next_length; - } - progress_callback(progress); + progress_callback(progress, time_remaining); } FrameOnDisk frame_lower, frame_upper; @@ -192,13 +303,17 @@ got_clip: break; } + // Wait until we should, or (given buffering) can, output the frame. { unique_lock lock(queue_state_mu); if (video_stream == nullptr) { // No queue, just wait until the right time and then show the frame. - new_clip_changed.wait_until(lock, next_frame_start, [this]{ - return new_clip_ready || override_stream_idx != -1; + new_clip_changed.wait_until(lock, next_frame_start, [this] { + return should_quit || new_clip_ready || override_stream_idx != -1; }); + if (should_quit) { + return; + } } else { // If the queue is full (which is really the state we'd like to be in), // wait until there's room for one more frame (ie., one was output from @@ -206,22 +321,25 @@ got_clip: // // In this case, we don't sleep until next_frame_start; the displaying is // done by the queue. - new_clip_changed.wait(lock, [this]{ + new_clip_changed.wait(lock, [this] { if (num_queued_frames < max_queued_frames) { return true; } - return new_clip_ready || override_stream_idx != -1; + return should_quit || new_clip_ready || override_stream_idx != -1; }); } + if (should_quit) { + return; + } if (new_clip_ready) { if (video_stream != nullptr) { lock.unlock(); // Urg. video_stream->clear_queue(); lock.lock(); } - got_next_clip = false; - goto wait_for_clip; + return; } + // Honor if we got an override request for the camera. if (override_stream_idx != -1) { stream_idx = override_stream_idx; override_stream_idx = -1; @@ -229,54 +347,28 @@ got_clip: } } - if (frame_lower.pts == frame_upper.pts) { - auto display_func = [this, primary_stream_idx, frame_lower, secondary_frame, fade_alpha]{ - destination->setFrame(primary_stream_idx, frame_lower, secondary_frame, fade_alpha); - }; - if (video_stream == nullptr) { - display_func(); - } else { - if (secondary_stream_idx == -1) { - video_stream->schedule_original_frame( - next_frame_start, pts, display_func, QueueSpotHolder(this), - frame_lower); - } else { - assert(secondary_frame.pts != -1); - video_stream->schedule_faded_frame(next_frame_start, pts, display_func, - QueueSpotHolder(this), frame_lower, - secondary_frame, fade_alpha); - } - } - continue; + string subtitle; + { + stringstream ss; + ss.imbue(locale("C")); + ss.precision(3); + ss << "Futatabi " NAGERU_VERSION ";PLAYING;"; + ss << fixed << (time_remaining.num_infinite * 86400.0 + time_remaining.t); + ss << ";" << format_duration(time_remaining) << " left"; + subtitle = ss.str(); } // Snap to input frame: If we can do so with less than 1% jitter // (ie., move less than 1% of an _output_ frame), do so. // TODO: Snap secondary (fade-to) clips in the same fashion. + double pts_snap_tolerance = 0.01 * double(TIMEBASE) * clip->speed / global_flags.output_framerate; bool snapped = false; - for (int64_t snap_pts : { frame_lower.pts, frame_upper.pts }) { - double snap_pts_as_frameno = (snap_pts - in_pts_origin) * output_framerate / TIMEBASE / speed; - if (fabs(snap_pts_as_frameno - frameno) < 0.01) { - FrameOnDisk snap_frame = frame_lower; - snap_frame.pts = snap_pts; - auto display_func = [this, primary_stream_idx, snap_frame, secondary_frame, fade_alpha]{ - destination->setFrame(primary_stream_idx, snap_frame, secondary_frame, fade_alpha); - }; - if (video_stream == nullptr) { - display_func(); - } else { - if (secondary_stream_idx == -1) { - video_stream->schedule_original_frame( - next_frame_start, pts, display_func, - QueueSpotHolder(this), snap_frame); - } else { - assert(secondary_frame.pts != -1); - video_stream->schedule_faded_frame( - next_frame_start, pts, display_func, QueueSpotHolder(this), - snap_frame, secondary_frame, fade_alpha); - } - } - in_pts_origin += snap_pts - in_pts; + for (FrameOnDisk snap_frame : { frame_lower, frame_upper }) { + if (fabs(snap_frame.pts - in_pts) < pts_snap_tolerance) { + display_single_frame(primary_stream_idx, snap_frame, secondary_stream_idx, + secondary_frame, fade_alpha, next_frame_start, /*snapped=*/true, + subtitle, play_audio); + in_pts_origin += snap_frame.pts - in_pts; snapped = true; break; } @@ -285,81 +377,137 @@ got_clip: continue; } - if (time_behind >= milliseconds(100)) { + // If there's nothing to interpolate between, or if interpolation is turned off, + // or we're a preview, then just display the frame. + if (frame_lower.pts == frame_upper.pts || global_flags.interpolation_quality == 0 || video_stream == nullptr) { + display_single_frame(primary_stream_idx, frame_lower, secondary_stream_idx, + secondary_frame, fade_alpha, next_frame_start, /*snapped=*/false, + subtitle, play_audio); + continue; + } + + // The snapping above makes us lock to the input framerate, even in the presence + // of pts drift, for most typical cases where it's needed, like converting 60 → 2x60 + // or 60 → 2x59.94. However, there are some corner cases like 25 → 2x59.94, where we'd + // get a snap very rarely (in the given case, once every 24 output frames), and by + // that time, we'd have drifted out. We could have solved this by changing the overall + // speed ever so slightly, but it requires that we know the actual frame rate (which + // is difficult in the presence of jitter and missed frames), or at least do some kind + // of matching/clustering. Instead, we take the opportunity to lock to in-between rational + // points if we can. E.g., if we are converting 60 → 2x60, we would not only snap to + // an original frame every other frame; we would also snap to exactly alpha=0.5 every + // in-between frame. Of course, we will still need to interpolate, but we get a lot + // closer when we actually get close to an original frame. In other words: Snap more + // often, but snap less each time. Unless the input and output frame rates are completely + // decorrelated with no common factor, of course (e.g. 12.345 → 34.567, which we should + // really never see in practice). + for (double fraction : { 1.0 / 2.0, 1.0 / 3.0, 2.0 / 3.0, 1.0 / 4.0, 3.0 / 4.0, + 1.0 / 5.0, 2.0 / 5.0, 3.0 / 5.0, 4.0 / 5.0 }) { + double subsnap_pts = frame_lower.pts + fraction * (frame_upper.pts - frame_lower.pts); + if (fabs(subsnap_pts - in_pts) < pts_snap_tolerance) { + in_pts_origin += lrint(subsnap_pts) - in_pts; + in_pts = lrint(subsnap_pts); + break; + } + } + + if (stream_output != FILE_STREAM_OUTPUT && time_behind >= milliseconds(100)) { fprintf(stderr, "WARNING: %ld ms behind, dropping an interpolated frame.\n", - lrint(1e3 * duration(time_behind).count())); + lrint(1e3 * duration(time_behind).count())); + ++metric_dropped_interpolated_frame; continue; } double alpha = double(in_pts - frame_lower.pts) / (frame_upper.pts - frame_lower.pts); - - if (video_stream == nullptr) { - // Previews don't do any interpolation. - assert(secondary_stream_idx == -1); - destination->setFrame(primary_stream_idx, frame_lower); - } else { - auto display_func = [this](shared_ptr frame) { + auto display_func = [this](shared_ptr frame) { + if (destination != nullptr) { destination->setFrame(frame); - }; - video_stream->schedule_interpolated_frame( - next_frame_start, pts, display_func, QueueSpotHolder(this), - frame_lower, frame_upper, alpha, - secondary_frame, fade_alpha); + } + }; + if (secondary_stream_idx == -1) { + ++metric_interpolated_frame; + } else { + ++metric_interpolated_faded_frame; } + video_stream->schedule_interpolated_frame( + next_frame_start, pts, display_func, QueueSpotHolder(this), + frame_lower, frame_upper, alpha, + secondary_frame, fade_alpha, subtitle, play_audio); + last_pts_played = in_pts; // Not really needed; only previews use last_pts_played. } // The clip ended. - - // Last-ditch effort to get the next clip (if e.g. the fade time was zero seconds). - if (!got_next_clip && next_clip_callback != nullptr) { - tie(next_clip, next_clip_idx) = next_clip_callback(); - if (next_clip.pts_in != -1) { - got_next_clip = true; - in_pts_start_next_clip = next_clip.pts_in; - } + if (should_quit) { + return; } - // Switch to next clip if we got it. - if (got_next_clip) { - clip = next_clip; - clip_idx = next_clip_idx; - stream_idx = next_clip.stream_idx; // Override is used for previews only, and next_clip is used for live ony. - if (done_callback != nullptr) { - done_callback(); - } - got_next_clip = false; - - // Start the next clip from the point where the fade went out. + // Start the next clip from the point where the fade went out. + if (next_clip != nullptr) { origin = next_frame_start; - in_pts_origin = in_pts_start_next_clip; - goto got_clip; + in_pts_origin = next_clip->pts_in + lrint(next_clip_fade_time * TIMEBASE * clip->speed); } + } - { - unique_lock lock(queue_state_mu); - playing = false; + if (done_callback != nullptr) { + done_callback(); + } +} + +void Player::display_single_frame(int primary_stream_idx, const FrameOnDisk &primary_frame, int secondary_stream_idx, const FrameOnDisk &secondary_frame, double fade_alpha, steady_clock::time_point frame_start, bool snapped, const std::string &subtitle, bool play_audio) +{ + auto display_func = [this, primary_stream_idx, primary_frame, secondary_frame, fade_alpha] { + if (destination != nullptr) { + destination->setFrame(primary_stream_idx, primary_frame, secondary_frame, fade_alpha); } - if (done_callback != nullptr) { - done_callback(); + }; + if (video_stream == nullptr) { + display_func(); + } else { + if (secondary_stream_idx == -1) { + // NOTE: We could be increasing unused metrics for previews, but that's harmless. + if (snapped) { + ++metric_original_snapped_frame; + } else { + ++metric_original_frame; + } + video_stream->schedule_original_frame( + frame_start, pts, display_func, QueueSpotHolder(this), + primary_frame, subtitle, play_audio); + } else { + assert(secondary_frame.pts != -1); + // NOTE: We could be increasing unused metrics for previews, but that's harmless. + if (snapped) { + ++metric_faded_snapped_frame; + } else { + ++metric_faded_frame; + } + video_stream->schedule_faded_frame(frame_start, pts, display_func, + QueueSpotHolder(this), primary_frame, + secondary_frame, fade_alpha, subtitle); } } + last_pts_played = primary_frame.pts; } // Find the frame immediately before and after this point. +// If we have an exact match, return it immediately. bool Player::find_surrounding_frames(int64_t pts, int stream_idx, FrameOnDisk *frame_lower, FrameOnDisk *frame_upper) { lock_guard lock(frame_mu); // Find the first frame such that frame.pts >= pts. - auto it = lower_bound(frames[stream_idx].begin(), - frames[stream_idx].end(), - pts, - [](const FrameOnDisk &frame, int64_t pts) { return frame.pts < pts; }); + auto it = find_last_frame_before(frames[stream_idx], pts); if (it == frames[stream_idx].end()) { return false; } *frame_upper = *it; + // If we have an exact match, return it immediately. + if (frame_upper->pts == pts) { + *frame_lower = *it; + return true; + } + // Find the last frame such that in_pts <= frame.pts (if any). if (it == frames[stream_idx].begin()) { *frame_lower = *it; @@ -371,66 +519,90 @@ bool Player::find_surrounding_frames(int64_t pts, int stream_idx, FrameOnDisk *f return true; } -Player::Player(JPEGFrameView *destination, bool also_output_to_stream) - : destination(destination) +Player::Player(JPEGFrameView *destination, Player::StreamOutput stream_output, AVFormatContext *file_avctx) + : destination(destination), stream_output(stream_output) { - thread(&Player::thread_func, this, also_output_to_stream).detach(); + player_thread = thread(&Player::thread_func, this, file_avctx); + + if (stream_output == HTTPD_STREAM_OUTPUT) { + global_metrics.add("http_output_frames", { { "type", "original" }, { "reason", "edge_frame_or_no_interpolation" } }, &metric_original_frame); + global_metrics.add("http_output_frames", { { "type", "faded" }, { "reason", "edge_frame_or_no_interpolation" } }, &metric_faded_frame); + global_metrics.add("http_output_frames", { { "type", "original" }, { "reason", "snapped" } }, &metric_original_snapped_frame); + global_metrics.add("http_output_frames", { { "type", "faded" }, { "reason", "snapped" } }, &metric_faded_snapped_frame); + global_metrics.add("http_output_frames", { { "type", "interpolated" } }, &metric_interpolated_frame); + global_metrics.add("http_output_frames", { { "type", "interpolated_faded" } }, &metric_interpolated_faded_frame); + global_metrics.add("http_output_frames", { { "type", "refresh" } }, &metric_refresh_frame); + global_metrics.add("http_dropped_frames", { { "type", "interpolated" } }, &metric_dropped_interpolated_frame); + global_metrics.add("http_dropped_frames", { { "type", "unconditional" } }, &metric_dropped_unconditional_frame); + } } -void Player::play_clip(const Clip &clip, size_t clip_idx, unsigned stream_idx) +Player::~Player() { - { - lock_guard lock(mu); - current_clip = clip; - current_stream_idx = stream_idx; - current_clip_idx = clip_idx; + should_quit = true; + new_clip_changed.notify_all(); + player_thread.join(); + + if (video_stream != nullptr) { + video_stream->stop(); } +} - { - lock_guard lock(queue_state_mu); - new_clip_ready = true; - override_stream_idx = -1; - new_clip_changed.notify_all(); +void Player::play(const vector &clips) +{ + lock_guard lock(queue_state_mu); + new_clip_ready = true; + queued_clip_list = clips; + splice_ready = false; + override_stream_idx = -1; + new_clip_changed.notify_all(); +} + +void Player::splice_play(const vector &clips) +{ + lock_guard lock(queue_state_mu); + if (new_clip_ready) { + queued_clip_list = clips; + assert(!splice_ready); + return; } + + splice_ready = true; + to_splice_clip_list = clips; // Overwrite any queued but not executed splice. } void Player::override_angle(unsigned stream_idx) { + int64_t last_pts; + // Corner case: If a new clip is waiting to be played, change its stream and then we're done. { - unique_lock lock(queue_state_mu); + lock_guard lock(queue_state_mu); if (new_clip_ready) { - lock_guard lock2(mu); - current_stream_idx = stream_idx; + assert(queued_clip_list.size() == 1); + queued_clip_list[0].clip.stream_idx = stream_idx; return; } - } - // If we are playing a clip, set override_stream_idx, and the player thread will - // pick it up and change its internal index. - { - unique_lock lock(queue_state_mu); + // If we are playing a clip, set override_stream_idx, and the player thread will + // pick it up and change its internal index. if (playing) { override_stream_idx = stream_idx; new_clip_changed.notify_all(); + return; } - } - // OK, so we're standing still, presumably at the end of a clip. - // Look at the current pts_out (if it exists), and show the closest - // thing we've got. - int64_t pts_out; - { - lock_guard lock(mu); - if (current_clip.pts_out < 0) { + // OK, so we're standing still, presumably at the end of a clip. + // Look at the last frame played (if it exists), and show the closest + // thing we've got. + if (last_pts_played < 0) { return; } - pts_out = current_clip.pts_out; + last_pts = last_pts_played; } lock_guard lock(frame_mu); - auto it = upper_bound(frames[stream_idx].begin(), frames[stream_idx].end(), pts_out, - [](int64_t pts, const FrameOnDisk &frame) { return pts < frame.pts; }); + auto it = find_first_frame_at_or_after(frames[stream_idx], last_pts); if (it == frames[stream_idx].end()) { return; } @@ -439,14 +611,64 @@ void Player::override_angle(unsigned stream_idx) void Player::take_queue_spot() { - unique_lock lock(queue_state_mu); + lock_guard lock(queue_state_mu); ++num_queued_frames; } void Player::release_queue_spot() { - unique_lock lock(queue_state_mu); + lock_guard lock(queue_state_mu); assert(num_queued_frames > 0); --num_queued_frames; new_clip_changed.notify_all(); } + +TimeRemaining compute_time_left(const vector &clips, size_t currently_playing_idx, double progress_currently_playing) +{ + // Look at the last clip and then start counting from there. + TimeRemaining remaining { 0, 0.0 }; + double last_fade_time_seconds = 0.0; + for (size_t row = currently_playing_idx; row < clips.size(); ++row) { + const Clip &clip = clips[row].clip; + double clip_length = double(clip.pts_out - clip.pts_in) / TIMEBASE / clip.speed; + if (clip_length >= 86400.0 || clip.pts_out == -1) { // More than one day. + ++remaining.num_infinite; + } else { + if (row == currently_playing_idx) { + // A clip we're playing: Subtract the part we've already played. + remaining.t = clip_length * (1.0 - progress_currently_playing); + } else { + // A clip we haven't played yet: Subtract the part that's overlapping + // with a previous clip (due to fade). + remaining.t += max(clip_length - last_fade_time_seconds, 0.0); + } + } + last_fade_time_seconds = min(clip_length, clip.fade_time_seconds); + } + return remaining; +} + +string format_duration(TimeRemaining t) +{ + int t_ms = lrint(t.t * 1e3); + + int ms = t_ms % 1000; + t_ms /= 1000; + int s = t_ms % 60; + t_ms /= 60; + int m = t_ms; + + char buf[256]; + if (t.num_infinite > 1 && t.t > 0.0) { + snprintf(buf, sizeof(buf), "%zu clips + %d:%02d.%03d", t.num_infinite, m, s, ms); + } else if (t.num_infinite > 1) { + snprintf(buf, sizeof(buf), "%zu clips", t.num_infinite); + } else if (t.num_infinite == 1 && t.t > 0.0) { + snprintf(buf, sizeof(buf), "%zu clip + %d:%02d.%03d", t.num_infinite, m, s, ms); + } else if (t.num_infinite == 1) { + snprintf(buf, sizeof(buf), "%zu clip", t.num_infinite); + } else { + snprintf(buf, sizeof(buf), "%d:%02d.%03d", m, s, ms); + } + return buf; +}