X-Git-Url: https://git.sesse.net/?a=blobdiff_plain;f=futatabi%2Fplayer.cpp;h=b905be0f82a1b35dfcf8d0a3bb20c0825bf72afb;hb=refs%2Fheads%2Fmaster;hp=6f329ef506e7ae3df18865d685ef92682b538ae7;hpb=72f2abe1fe64ad62dfd6c7fa7b3435a5d4aa111b;p=nageru diff --git a/futatabi/player.cpp b/futatabi/player.cpp index 6f329ef..b905be0 100644 --- a/futatabi/player.cpp +++ b/futatabi/player.cpp @@ -35,7 +35,7 @@ void Player::thread_func(AVFormatContext *file_avctx) QOpenGLContext *context = create_context(surface); if (!make_current(context, surface)) { printf("oops\n"); - exit(1); + abort(); } check_error(); @@ -106,6 +106,236 @@ void do_splice(const vector &new_list, size_t playing_index1, ssize_ old_list->insert(old_list->end(), new_list.begin() + splice_start_new_list, new_list.end()); } +// Keeps track of the various timelines (wall clock time, output pts, +// position in the clip we are playing). Generally we keep an origin +// and assume we increase linearly from there; the intention is to +// avoid getting compounded accuracy errors, although with double, +// that is perhaps overkill. (Whenever we break the linear assumption, +// we need to reset said origin.) +class TimelineTracker +{ +public: + struct Instant { + steady_clock::time_point wallclock_time; + int64_t in_pts; + int64_t out_pts; + int64_t frameno; + }; + + TimelineTracker(double master_speed, int64_t out_pts_origin) + : master_speed(master_speed), last_out_pts(out_pts_origin) { + origin.out_pts = out_pts_origin; + master_speed_ease_target = master_speed; // Keeps GCC happy. + } + + void new_clip(steady_clock::time_point wallclock_origin, const Clip *clip, int64_t start_pts_offset) + { + this->clip = clip; + origin.wallclock_time = wallclock_origin; + origin.in_pts = clip->pts_in + start_pts_offset; + origin.out_pts = last_out_pts; + origin.frameno = 0; + } + + // Returns the current time for said frame. + Instant advance_to_frame(int64_t frameno); + + int64_t get_in_pts_origin() const { return origin.in_pts; } + bool playing_at_normal_speed() const { + if (in_easing) return false; + + const double effective_speed = clip->speed * master_speed; + return effective_speed >= 0.999 && effective_speed <= 1.001; + } + + void snap_by(int64_t offset) { + if (in_easing) { + // Easing will normally aim for a snap at the very end, + // so don't disturb it by jittering during the ease. + return; + } + origin.in_pts += offset; + } + + void change_master_speed(double new_master_speed, Instant now); + + float in_master_speed(float speed) const { + return (!in_easing && fabs(master_speed - speed) < 1e-6); + } + + // Instead of changing the speed instantly, change it over the course of + // about 200 ms. This is a simple linear ramp; I tried various forms of + // Bézier curves for more elegant/dramatic changing, but it seemed linear + // looked just as good in practical video. + void start_easing(double new_master_speed, int64_t length_out_pts, Instant now); + + int64_t find_easing_length(double master_speed_target, int64_t length_out_pts, const vector &frames, Instant now); + +private: + // Find out how far we are into the easing curve (0..1). + // We use this to adjust the input pts. + double find_ease_t(double out_pts) const; + double easing_out_pts_adjustment(double out_pts) const; + + double master_speed; + const Clip *clip = nullptr; + Instant origin; + int64_t last_out_pts; + + // If easing between new and old master speeds. + bool in_easing = false; + int64_t ease_started_pts = 0; + double master_speed_ease_target; + int64_t ease_length_out_pts = 0; +}; + +TimelineTracker::Instant TimelineTracker::advance_to_frame(int64_t frameno) +{ + Instant ret; + double in_pts_double = origin.in_pts + TIMEBASE * clip->speed * (frameno - origin.frameno) * master_speed / global_flags.output_framerate; + double out_pts_double = origin.out_pts + TIMEBASE * (frameno - origin.frameno) / global_flags.output_framerate; + + if (in_easing) { + double in_pts_adjustment = easing_out_pts_adjustment(out_pts_double) * clip->speed; + in_pts_double += in_pts_adjustment; + } + + ret.in_pts = lrint(in_pts_double); + ret.out_pts = lrint(out_pts_double); + ret.wallclock_time = origin.wallclock_time + microseconds(lrint((out_pts_double - origin.out_pts) * 1e6 / TIMEBASE)); + ret.frameno = frameno; + + last_out_pts = ret.out_pts; + + if (in_easing && ret.out_pts >= ease_started_pts + ease_length_out_pts) { + // We have ended easing. Add what we need for the entire easing period, + // then _actually_ change the speed as we go back into normal mode. + origin.out_pts += easing_out_pts_adjustment(out_pts_double); + change_master_speed(master_speed_ease_target, ret); + in_easing = false; + } + + return ret; +} + +void TimelineTracker::change_master_speed(double new_master_speed, Instant now) +{ + master_speed = new_master_speed; + + // Reset the origins, since the calculations depend on linear interpolation + // based on the master speed. + origin = now; +} + +void TimelineTracker::start_easing(double new_master_speed, int64_t length_out_pts, Instant now) +{ + if (in_easing) { + // Apply whatever we managed to complete of the previous easing. + origin.out_pts += easing_out_pts_adjustment(now.out_pts); + double reached_speed = master_speed + (master_speed_ease_target - master_speed) * find_ease_t(now.out_pts); + change_master_speed(reached_speed, now); + } + in_easing = true; + ease_started_pts = now.out_pts; + master_speed_ease_target = new_master_speed; + ease_length_out_pts = length_out_pts; +} + +double TimelineTracker::find_ease_t(double out_pts) const +{ + return (out_pts - ease_started_pts) / double(ease_length_out_pts); +} + +double TimelineTracker::easing_out_pts_adjustment(double out_pts) const +{ + double t = find_ease_t(out_pts); + double area_factor = (master_speed_ease_target - master_speed) * ease_length_out_pts; + double val = 0.5 * min(t, 1.0) * min(t, 1.0) * area_factor; + if (t > 1.0) { + val += area_factor * (t - 1.0); + } + return val; +} + +int64_t TimelineTracker::find_easing_length(double master_speed_target, int64_t desired_length_out_pts, const vector &frames, Instant now) +{ + // Find out what frame we would have hit (approximately) with the given ease length. + double in_pts_length = 0.5 * (master_speed_target + master_speed) * desired_length_out_pts * clip->speed; + const int input_frame_num = distance( + frames.begin(), + find_first_frame_at_or_after(frames, lrint(now.in_pts + in_pts_length))); + + // Round length_out_pts to the nearest amount of whole frames. + const double frame_length = TIMEBASE / global_flags.output_framerate; + const int length_out_frames = lrint(desired_length_out_pts / frame_length); + + // Time the easing so that we aim at 200 ms (or whatever length_out_pts + // was), but adjust it so that we hit exactly on a frame. Unless we are + // somehow unlucky and run in the middle of a bad fade, this should + // lock us nicely into a cadence where we hit original frames (of course + // assuming the new speed is a reasonable ratio). + // + // Assume for a moment that we are easing into a slowdown, and that + // we're slightly too late to hit the frame we want to. This means that + // we can shorten the ease a bit; this chops some of the total integrated + // velocity and arrive at the frame a bit sooner. Solve for the time + // we want to shorten the ease by (let's call it x, where the original + // length of the ease is called len) such that we hit exactly the in + // pts at the right time: + // + // 0.5 * (mst + ms) * (len - x) * cs + mst * x * cs = desired_len_in_pts + // + // gives + // + // x = (2 * desired_len_in_pts / cs - (mst + ms) * len) / (mst - ms) + // + // Conveniently, this holds even if we are too early; a negative x + // (surprisingly!) gives a lenghtening such that we don't hit the desired + // frame, but hit one slightly later. (x larger than len means that + // it's impossible to hit the desired frame, even if we dropped the ease + // altogether and just changed speeds instantly.) We also have sign invariance, + // so that these properties hold even if we are speeding up, not slowing + // down. Together, these two properties mean that we can cast a fairly + // wide net, trying various input and output frames and seeing which ones + // can be matched up with a minimal change to easing time. (This lets us + // e.g. end the ease close to the midpoint between two endpoint frames + // even if we don't know the frame rate, or deal fairly robustly with + // dropped input frames.) Many of these will give us the same answer, + // but that's fine, because the ease length is the only output. + int64_t best_length_out_pts = TIMEBASE * 10; // Infinite. + for (int output_frame_offset = -2; output_frame_offset <= 2; ++output_frame_offset) { + int64_t aim_length_out_pts = lrint((length_out_frames + output_frame_offset) * frame_length); + if (aim_length_out_pts < 0) { + continue; + } + + for (int input_frame_offset = -2; input_frame_offset <= 2; ++input_frame_offset) { + if (input_frame_num + input_frame_offset < 0 || + input_frame_num + input_frame_offset >= int(frames.size())) { + continue; + } + const int64_t in_pts = frames[input_frame_num + input_frame_offset].pts; + double shorten_by_out_pts = (2.0 * (in_pts - now.in_pts) / clip->speed - (master_speed_target + master_speed) * aim_length_out_pts) / (master_speed_target - master_speed); + int64_t length_out_pts = lrint(aim_length_out_pts - shorten_by_out_pts); + + if (length_out_pts >= 0 && + abs(length_out_pts - desired_length_out_pts) < abs(best_length_out_pts - desired_length_out_pts)) { + best_length_out_pts = length_out_pts; + } + } + } + + // If we need more than two seconds of easing, we give up -- + // this can happen if we're e.g. going from 101% to 100%. + // If so, it would be better to let other mechanisms, such as the switch + // to the next clip, deal with getting us back into sync. + if (best_length_out_pts > TIMEBASE * 2) { + return desired_length_out_pts; + } else { + return best_length_out_pts; + } +} + } // namespace void Player::play_playlist_once() @@ -113,6 +343,7 @@ void Player::play_playlist_once() vector clip_list; bool clip_ready; steady_clock::time_point before_sleep = steady_clock::now(); + string pause_status; // Wait until we're supposed to play something. { @@ -131,30 +362,42 @@ void Player::play_playlist_once() queued_clip_list.clear(); assert(!clip_list.empty()); assert(!splice_ready); // This corner case should have been handled in splice_play(). + } else { + pause_status = this->pause_status; } } steady_clock::duration time_slept = steady_clock::now() - before_sleep; - pts += duration_cast>(time_slept).count(); + int64_t slept_pts = duration_cast>(time_slept).count(); + if (slept_pts > 0) { + if (video_stream != nullptr) { + // Add silence for the time we're waiting. + video_stream->schedule_silence(steady_clock::now(), pts, slept_pts, QueueSpotHolder()); + } + pts += slept_pts; + } if (!clip_ready) { if (video_stream != nullptr) { ++metric_refresh_frame; - video_stream->schedule_refresh_frame(steady_clock::now(), pts, /*display_func=*/nullptr, QueueSpotHolder()); + string subtitle = "Futatabi " NAGERU_VERSION ";PAUSED;0.000;" + pause_status; + video_stream->schedule_refresh_frame(steady_clock::now(), pts, /*display_func=*/nullptr, QueueSpotHolder(), + subtitle); } return; } + should_skip_to_next = false; // To make sure we don't have a lingering click from before play. steady_clock::time_point origin = steady_clock::now(); // TODO: Add a 100 ms buffer for ramp-up? - int64_t in_pts_origin = clip_list[0].clip.pts_in; + TimelineTracker timeline(start_master_speed, pts); + timeline.new_clip(origin, &clip_list[0].clip, /*pts_offset=*/0); for (size_t clip_idx = 0; clip_idx < clip_list.size(); ++clip_idx) { const Clip *clip = &clip_list[clip_idx].clip; const Clip *next_clip = (clip_idx + 1 < clip_list.size()) ? &clip_list[clip_idx + 1].clip : nullptr; - int64_t out_pts_origin = pts; double next_clip_fade_time = -1.0; if (next_clip != nullptr) { - double duration_this_clip = double(clip->pts_out - in_pts_origin) / TIMEBASE / clip->speed; + double duration_this_clip = double(clip->pts_out - timeline.get_in_pts_origin()) / TIMEBASE / clip->speed; double duration_next_clip = double(next_clip->pts_out - next_clip->pts_in) / TIMEBASE / clip->speed; next_clip_fade_time = min(min(duration_this_clip, duration_next_clip), clip->fade_time_seconds); } @@ -168,28 +411,42 @@ void Player::play_playlist_once() lock_guard lock(frame_mu); // Find the first frame such that frame.pts <= in_pts. - auto it = find_last_frame_before(frames[stream_idx], in_pts_origin); + auto it = find_last_frame_before(frames[stream_idx], timeline.get_in_pts_origin()); if (it != frames[stream_idx].end()) { - in_pts_origin = it->pts; + timeline.snap_by(it->pts - timeline.get_in_pts_origin()); } } steady_clock::time_point next_frame_start; - for (int frameno = 0; !should_quit; ++frameno) { // Ends when the clip ends. - double out_pts = out_pts_origin + TIMEBASE * frameno / global_flags.output_framerate; - next_frame_start = - origin + microseconds(lrint((out_pts - out_pts_origin) * 1e6 / TIMEBASE)); - int64_t in_pts = lrint(in_pts_origin + TIMEBASE * frameno * clip->speed / global_flags.output_framerate); - pts = lrint(out_pts); + for (int64_t frameno = 0; !should_quit; ++frameno) { // Ends when the clip ends. + TimelineTracker::Instant instant = timeline.advance_to_frame(frameno); + int64_t in_pts = instant.in_pts; + pts = instant.out_pts; + next_frame_start = instant.wallclock_time; + + float new_master_speed = change_master_speed.exchange(0.0f / 0.0f); + if (!std::isnan(new_master_speed) && !timeline.in_master_speed(new_master_speed)) { + int64_t ease_length_out_pts = TIMEBASE / 5; // 200 ms. + int64_t recommended_pts_length = timeline.find_easing_length(new_master_speed, ease_length_out_pts, frames[clip->stream_idx], instant); + timeline.start_easing(new_master_speed, recommended_pts_length, instant); + } + + if (should_skip_to_next.exchange(false)) { // Test and clear. + Clip *clip = &clip_list[clip_idx].clip; // Get a non-const pointer. + clip->pts_out = std::min(clip->pts_out, llrint(in_pts + clip->fade_time_seconds * clip->speed * TIMEBASE)); + } if (in_pts >= clip->pts_out) { break; } + // Only play audio if we're within 0.1% of normal speed. We could do + // stretching or pitch shift later if it becomes needed. + const bool play_audio = timeline.playing_at_normal_speed(); + { lock_guard lock(queue_state_mu); if (splice_ready) { - fprintf(stderr, "splicing\n"); if (next_clip == nullptr) { do_splice(to_splice_clip_list, clip_idx, -1, &clip_list); } else { @@ -205,7 +462,7 @@ void Player::play_playlist_once() // (or we may have gone from no new clip to having one, or the other way). next_clip = (clip_idx + 1 < clip_list.size()) ? &clip_list[clip_idx + 1].clip : nullptr; if (next_clip != nullptr) { - double duration_this_clip = double(clip->pts_out - in_pts) / TIMEBASE / clip->speed; + double duration_this_clip = double(clip->pts_out - timeline.get_in_pts_origin()) / TIMEBASE / clip->speed; double duration_next_clip = double(next_clip->pts_out - next_clip->pts_in) / TIMEBASE / clip->speed; next_clip_fade_time = min(min(duration_this_clip, duration_next_clip), clip->fade_time_seconds); } @@ -213,6 +470,7 @@ void Player::play_playlist_once() } steady_clock::duration time_behind = steady_clock::now() - next_frame_start; + metric_player_ahead_seconds.count_event(-duration(time_behind).count()); if (stream_output != FILE_STREAM_OUTPUT && time_behind >= milliseconds(200)) { fprintf(stderr, "WARNING: %ld ms behind, dropping a frame (no matter the type).\n", lrint(1e3 * duration(time_behind).count())); @@ -245,23 +503,26 @@ void Player::play_playlist_once() FrameOnDisk frame_lower, frame_upper; bool ok = find_surrounding_frames(in_pts_secondary, secondary_stream_idx, &frame_lower, &frame_upper); + if (ok) { secondary_frame = frame_lower; + } else { + secondary_stream_idx = -1; } } + // NOTE: None of this will take into account any snapping done below. + double clip_progress = calc_progress(*clip, in_pts_for_progress); + map progress{ { clip_list[clip_idx].id, clip_progress } }; + TimeRemaining time_remaining; + if (next_clip != nullptr && time_left_this_clip <= next_clip_fade_time) { + double next_clip_progress = calc_progress(*next_clip, in_pts_secondary_for_progress); + progress[clip_list[clip_idx + 1].id] = next_clip_progress; + time_remaining = compute_time_left(clip_list, clip_idx + 1, next_clip_progress); + } else { + time_remaining = compute_time_left(clip_list, clip_idx, clip_progress); + } if (progress_callback != nullptr) { - // NOTE: None of this will take into account any snapping done below. - double clip_progress = calc_progress(*clip, in_pts_for_progress); - map progress{ { clip_list[clip_idx].id, clip_progress } }; - double time_remaining; - if (next_clip != nullptr && time_left_this_clip <= next_clip_fade_time) { - double next_clip_progress = calc_progress(*next_clip, in_pts_secondary_for_progress); - progress[clip_list[clip_idx + 1].id] = next_clip_progress; - time_remaining = compute_time_left(clip_list, clip_idx + 1, next_clip_progress); - } else { - time_remaining = compute_time_left(clip_list, clip_idx, clip_progress); - } progress_callback(progress, time_remaining); } @@ -315,24 +576,28 @@ void Player::play_playlist_once() } } - // If there's nothing to interpolate between, or if interpolation is turned off, - // or we're a preview, then just display the frame. - if (frame_lower.pts == frame_upper.pts || global_flags.interpolation_quality == 0 || video_stream == nullptr) { - display_single_frame(primary_stream_idx, frame_lower, secondary_stream_idx, - secondary_frame, fade_alpha, next_frame_start, /*snapped=*/false); - continue; + string subtitle; + { + stringstream ss; + ss.imbue(locale("C")); + ss.precision(3); + ss << "Futatabi " NAGERU_VERSION ";PLAYING;"; + ss << fixed << (time_remaining.num_infinite * 86400.0 + time_remaining.t); + ss << ";" << format_duration(time_remaining) << " left"; + subtitle = ss.str(); } // Snap to input frame: If we can do so with less than 1% jitter // (ie., move less than 1% of an _output_ frame), do so. // TODO: Snap secondary (fade-to) clips in the same fashion. - double pts_snap_tolerance = 0.01 * double(TIMEBASE) / global_flags.output_framerate; + double pts_snap_tolerance = 0.01 * double(TIMEBASE) * clip->speed / global_flags.output_framerate; bool snapped = false; for (FrameOnDisk snap_frame : { frame_lower, frame_upper }) { if (fabs(snap_frame.pts - in_pts) < pts_snap_tolerance) { display_single_frame(primary_stream_idx, snap_frame, secondary_stream_idx, - secondary_frame, fade_alpha, next_frame_start, /*snapped=*/true); - in_pts_origin += snap_frame.pts - in_pts; + secondary_frame, fade_alpha, next_frame_start, /*snapped=*/true, + subtitle, play_audio); + timeline.snap_by(snap_frame.pts - in_pts); snapped = true; break; } @@ -341,6 +606,15 @@ void Player::play_playlist_once() continue; } + // If there's nothing to interpolate between, or if interpolation is turned off, + // or we're a preview, then just display the frame. + if (frame_lower.pts == frame_upper.pts || global_flags.interpolation_quality == 0 || video_stream == nullptr) { + display_single_frame(primary_stream_idx, frame_lower, secondary_stream_idx, + secondary_frame, fade_alpha, next_frame_start, /*snapped=*/false, + subtitle, play_audio); + continue; + } + // The snapping above makes us lock to the input framerate, even in the presence // of pts drift, for most typical cases where it's needed, like converting 60 → 2x60 // or 60 → 2x59.94. However, there are some corner cases like 25 → 2x59.94, where we'd @@ -360,7 +634,7 @@ void Player::play_playlist_once() 1.0 / 5.0, 2.0 / 5.0, 3.0 / 5.0, 4.0 / 5.0 }) { double subsnap_pts = frame_lower.pts + fraction * (frame_upper.pts - frame_lower.pts); if (fabs(subsnap_pts - in_pts) < pts_snap_tolerance) { - in_pts_origin += lrint(subsnap_pts) - in_pts; + timeline.snap_by(lrint(subsnap_pts) - in_pts); in_pts = lrint(subsnap_pts); break; } @@ -387,7 +661,7 @@ void Player::play_playlist_once() video_stream->schedule_interpolated_frame( next_frame_start, pts, display_func, QueueSpotHolder(this), frame_lower, frame_upper, alpha, - secondary_frame, fade_alpha); + secondary_frame, fade_alpha, subtitle, play_audio); last_pts_played = in_pts; // Not really needed; only previews use last_pts_played. } @@ -398,8 +672,7 @@ void Player::play_playlist_once() // Start the next clip from the point where the fade went out. if (next_clip != nullptr) { - origin = next_frame_start; - in_pts_origin = next_clip->pts_in + lrint(next_clip_fade_time * TIMEBASE * clip->speed); + timeline.new_clip(next_frame_start, next_clip, /*pts_start_offset=*/lrint(next_clip_fade_time * TIMEBASE * clip->speed)); } } @@ -408,7 +681,7 @@ void Player::play_playlist_once() } } -void Player::display_single_frame(int primary_stream_idx, const FrameOnDisk &primary_frame, int secondary_stream_idx, const FrameOnDisk &secondary_frame, double fade_alpha, steady_clock::time_point frame_start, bool snapped) +void Player::display_single_frame(int primary_stream_idx, const FrameOnDisk &primary_frame, int secondary_stream_idx, const FrameOnDisk &secondary_frame, double fade_alpha, steady_clock::time_point frame_start, bool snapped, const std::string &subtitle, bool play_audio) { auto display_func = [this, primary_stream_idx, primary_frame, secondary_frame, fade_alpha] { if (destination != nullptr) { @@ -427,7 +700,7 @@ void Player::display_single_frame(int primary_stream_idx, const FrameOnDisk &pri } video_stream->schedule_original_frame( frame_start, pts, display_func, QueueSpotHolder(this), - primary_frame); + primary_frame, subtitle, play_audio); } else { assert(secondary_frame.pts != -1); // NOTE: We could be increasing unused metrics for previews, but that's harmless. @@ -438,13 +711,14 @@ void Player::display_single_frame(int primary_stream_idx, const FrameOnDisk &pri } video_stream->schedule_faded_frame(frame_start, pts, display_func, QueueSpotHolder(this), primary_frame, - secondary_frame, fade_alpha); + secondary_frame, fade_alpha, subtitle); } } last_pts_played = primary_frame.pts; } // Find the frame immediately before and after this point. +// If we have an exact match, return it immediately. bool Player::find_surrounding_frames(int64_t pts, int stream_idx, FrameOnDisk *frame_lower, FrameOnDisk *frame_upper) { lock_guard lock(frame_mu); @@ -456,6 +730,12 @@ bool Player::find_surrounding_frames(int64_t pts, int stream_idx, FrameOnDisk *f } *frame_upper = *it; + // If we have an exact match, return it immediately. + if (frame_upper->pts == pts) { + *frame_lower = *it; + return true; + } + // Find the last frame such that in_pts <= frame.pts (if any). if (it == frames[stream_idx].begin()) { *frame_lower = *it; @@ -482,17 +762,22 @@ Player::Player(JPEGFrameView *destination, Player::StreamOutput stream_output, A global_metrics.add("http_output_frames", { { "type", "refresh" } }, &metric_refresh_frame); global_metrics.add("http_dropped_frames", { { "type", "interpolated" } }, &metric_dropped_interpolated_frame); global_metrics.add("http_dropped_frames", { { "type", "unconditional" } }, &metric_dropped_unconditional_frame); + + vector quantiles{ 0.01, 0.1, 0.25, 0.5, 0.75, 0.9, 0.99 }; + metric_player_ahead_seconds.init(quantiles, 60.0); + global_metrics.add("player_ahead_seconds", &metric_player_ahead_seconds); } } Player::~Player() { should_quit = true; + new_clip_changed.notify_all(); + player_thread.join(); + if (video_stream != nullptr) { video_stream->stop(); } - new_clip_changed.notify_all(); - player_thread.join(); } void Player::play(const vector &clips) @@ -507,7 +792,7 @@ void Player::play(const vector &clips) void Player::splice_play(const vector &clips) { - lock_guard lock(queue_state_mu); + lock_guard lock(queue_state_mu); if (new_clip_ready) { queued_clip_list = clips; assert(!splice_ready); @@ -570,23 +855,52 @@ void Player::release_queue_spot() new_clip_changed.notify_all(); } -double compute_time_left(const vector &clips, size_t currently_playing_idx, double progress_currently_playing) +TimeRemaining compute_time_left(const vector &clips, size_t currently_playing_idx, double progress_currently_playing) { // Look at the last clip and then start counting from there. - double remaining = 0.0; + TimeRemaining remaining { 0, 0.0 }; double last_fade_time_seconds = 0.0; for (size_t row = currently_playing_idx; row < clips.size(); ++row) { const Clip &clip = clips[row].clip; double clip_length = double(clip.pts_out - clip.pts_in) / TIMEBASE / clip.speed; - if (row == currently_playing_idx) { - // A clip we're playing: Subtract the part we've already played. - remaining = clip_length * (1.0 - progress_currently_playing); + if (clip_length >= 86400.0 || clip.pts_out == -1) { // More than one day. + ++remaining.num_infinite; } else { - // A clip we haven't played yet: Subtract the part that's overlapping - // with a previous clip (due to fade). - remaining += max(clip_length - last_fade_time_seconds, 0.0); + if (row == currently_playing_idx) { + // A clip we're playing: Subtract the part we've already played. + remaining.t = clip_length * (1.0 - progress_currently_playing); + } else { + // A clip we haven't played yet: Subtract the part that's overlapping + // with a previous clip (due to fade). + remaining.t += max(clip_length - last_fade_time_seconds, 0.0); + } } last_fade_time_seconds = min(clip_length, clip.fade_time_seconds); } return remaining; } + +string format_duration(TimeRemaining t) +{ + int t_ms = lrint(t.t * 1e3); + + int ms = t_ms % 1000; + t_ms /= 1000; + int s = t_ms % 60; + t_ms /= 60; + int m = t_ms; + + char buf[256]; + if (t.num_infinite > 1 && t.t > 0.0) { + snprintf(buf, sizeof(buf), "%zu clips + %d:%02d.%03d", t.num_infinite, m, s, ms); + } else if (t.num_infinite > 1) { + snprintf(buf, sizeof(buf), "%zu clips", t.num_infinite); + } else if (t.num_infinite == 1 && t.t > 0.0) { + snprintf(buf, sizeof(buf), "%zu clip + %d:%02d.%03d", t.num_infinite, m, s, ms); + } else if (t.num_infinite == 1) { + snprintf(buf, sizeof(buf), "%zu clip", t.num_infinite); + } else { + snprintf(buf, sizeof(buf), "%d:%02d.%03d", m, s, ms); + } + return buf; +}