]> git.sesse.net Git - nageru/blob - futatabi/player.cpp
Mark a TODO.
[nageru] / futatabi / player.cpp
1 #include "player.h"
2
3 #include "clip_list.h"
4 #include "defs.h"
5 #include "flags.h"
6 #include "frame_on_disk.h"
7 #include "jpeg_frame_view.h"
8 #include "shared/context.h"
9 #include "shared/ffmpeg_raii.h"
10 #include "shared/httpd.h"
11 #include "shared/metrics.h"
12 #include "shared/mux.h"
13 #include "shared/timebase.h"
14 #include "video_stream.h"
15
16 #include <algorithm>
17 #include <chrono>
18 #include <condition_variable>
19 #include <movit/util.h>
20 #include <mutex>
21 #include <stdio.h>
22 #include <thread>
23 #include <vector>
24
25 using namespace std;
26 using namespace std::chrono;
27
28 extern HTTPD *global_httpd;
29
30 void Player::thread_func(AVFormatContext *file_avctx)
31 {
32         pthread_setname_np(pthread_self(), "Player");
33
34         QSurface *surface = create_surface();
35         QOpenGLContext *context = create_context(surface);
36         if (!make_current(context, surface)) {
37                 printf("oops\n");
38                 exit(1);
39         }
40
41         check_error();
42
43         // Create the VideoStream object, now that we have an OpenGL context.
44         if (stream_output != NO_STREAM_OUTPUT) {
45                 video_stream.reset(new VideoStream(file_avctx));
46                 video_stream->start();
47         }
48
49         check_error();
50
51         while (!should_quit) {
52                 play_playlist_once();
53         }
54 }
55
56 namespace {
57
58 double calc_progress(const Clip &clip, int64_t pts)
59 {
60         return double(pts - clip.pts_in) / (clip.pts_out - clip.pts_in);
61 }
62
63 void do_splice(const vector<ClipWithID> &new_list, size_t playing_index1, ssize_t playing_index2, vector<ClipWithID> *old_list)
64 {
65         assert(playing_index2 == -1 || size_t(playing_index2) == playing_index1 + 1);
66
67         // First see if we can do the simple thing; find an element in the new
68         // list that we are already playing, which will serve as our splice point.
69         int splice_start_new_list = -1;
70         for (size_t clip_idx = 0; clip_idx < new_list.size(); ++clip_idx) {
71                 if (new_list[clip_idx].id == (*old_list)[playing_index1].id) {
72                         splice_start_new_list = clip_idx + 1;
73                 } else if (playing_index2 != -1 && new_list[clip_idx].id == (*old_list)[playing_index2].id) {
74                         splice_start_new_list = clip_idx + 1;
75                 }
76         }
77         if (splice_start_new_list == -1) {
78                 // OK, so the playing items are no longer in the new list. Most likely,
79                 // that means we deleted some range that included them. But the ones
80                 // before should stay put -- and we don't want to play them. So find
81                 // the ones that we've already played, and ignore them. Hopefully,
82                 // they're contiguous; the last one that's not seen will be our cut point.
83                 //
84                 // Keeping track of the playlist range explicitly in the UI would remove
85                 // the need for these heuristics, but it would probably also mean we'd
86                 // have to lock the playing clip, which sounds annoying.
87                 unordered_map<uint64_t, size_t> played_ids;
88                 for (size_t clip_idx = 0; clip_idx < playing_index1; ++old_list) {
89                         played_ids.emplace((*old_list)[clip_idx].id, clip_idx);
90                 }
91                 for (size_t clip_idx = 0; clip_idx < new_list.size(); ++clip_idx) {
92                         if (played_ids.count(new_list[clip_idx].id)) {
93                                 splice_start_new_list = clip_idx + 1;
94                         }
95                 }
96
97                 if (splice_start_new_list == -1) {
98                         // OK, we didn't find any matches; the lists are totally distinct.
99                         // So probably the entire thing was deleted; leave it alone.
100                         return;
101                 }
102         }
103
104         size_t splice_start_old_list = ((playing_index2 == -1) ? playing_index1 : playing_index2) + 1;
105         old_list->erase(old_list->begin() + splice_start_old_list, old_list->end());
106         old_list->insert(old_list->end(), new_list.begin() + splice_start_new_list, new_list.end());
107 }
108
109 }  // namespace
110
111 void Player::play_playlist_once()
112 {
113         vector<ClipWithID> clip_list;
114         bool clip_ready;
115         steady_clock::time_point before_sleep = steady_clock::now();
116         string pause_status;
117         float master_speed = 1.0f;
118
119         // Wait until we're supposed to play something.
120         {
121                 unique_lock<mutex> lock(queue_state_mu);
122                 playing = false;
123                 clip_ready = new_clip_changed.wait_for(lock, milliseconds(100), [this] {
124                         return should_quit || new_clip_ready;
125                 });
126                 if (should_quit) {
127                         return;
128                 }
129                 if (clip_ready) {
130                         new_clip_ready = false;
131                         playing = true;
132                         clip_list = move(queued_clip_list);
133                         queued_clip_list.clear();
134                         assert(!clip_list.empty());
135                         assert(!splice_ready);  // This corner case should have been handled in splice_play().
136                 } else {
137                         pause_status = this->pause_status;
138                 }
139         }
140
141         steady_clock::duration time_slept = steady_clock::now() - before_sleep;
142         pts += duration_cast<duration<size_t, TimebaseRatio>>(time_slept).count();
143
144         if (!clip_ready) {
145                 if (video_stream != nullptr) {
146                         ++metric_refresh_frame;
147                         string subtitle = "Futatabi " NAGERU_VERSION ";PAUSED;0.000;" + pause_status;
148                         video_stream->schedule_refresh_frame(steady_clock::now(), pts, /*display_func=*/nullptr, QueueSpotHolder(),
149                                 subtitle);
150                 }
151                 return;
152         }
153
154         should_skip_to_next = false;  // To make sure we don't have a lingering click from before play.
155         steady_clock::time_point origin = steady_clock::now();  // TODO: Add a 100 ms buffer for ramp-up?
156         int64_t in_pts_origin = clip_list[0].clip.pts_in;
157         for (size_t clip_idx = 0; clip_idx < clip_list.size(); ++clip_idx) {
158                 const Clip *clip = &clip_list[clip_idx].clip;
159                 const Clip *next_clip = (clip_idx + 1 < clip_list.size()) ? &clip_list[clip_idx + 1].clip : nullptr;
160                 int64_t out_pts_origin = pts;
161
162                 double next_clip_fade_time = -1.0;
163                 if (next_clip != nullptr) {
164                         double duration_this_clip = double(clip->pts_out - in_pts_origin) / TIMEBASE / clip->speed;
165                         double duration_next_clip = double(next_clip->pts_out - next_clip->pts_in) / TIMEBASE / clip->speed;
166                         next_clip_fade_time = min(min(duration_this_clip, duration_next_clip), clip->fade_time_seconds);
167                 }
168
169                 int stream_idx = clip->stream_idx;
170
171                 // Start playing exactly at a frame.
172                 // TODO: Snap secondary (fade-to) clips in the same fashion
173                 // so that we don't get jank here).
174                 {
175                         lock_guard<mutex> lock(frame_mu);
176
177                         // Find the first frame such that frame.pts <= in_pts.
178                         auto it = find_last_frame_before(frames[stream_idx], in_pts_origin);
179                         if (it != frames[stream_idx].end()) {
180                                 in_pts_origin = it->pts;
181                         }
182                 }
183
184                 steady_clock::time_point next_frame_start;
185                 for (int64_t frameno = 0; !should_quit; ++frameno) {  // Ends when the clip ends.
186                         double out_pts = out_pts_origin + TIMEBASE * frameno / global_flags.output_framerate;
187                         next_frame_start =
188                                 origin + microseconds(lrint((out_pts - out_pts_origin) * 1e6 / TIMEBASE));
189                         int64_t in_pts = lrint(in_pts_origin + TIMEBASE * frameno * clip->speed * master_speed / global_flags.output_framerate);
190                         pts = lrint(out_pts);
191
192                         float new_master_speed = change_master_speed.exchange(0.0f / 0.0f);
193                         if (!std::isnan(new_master_speed)) {
194                                 master_speed = new_master_speed;
195                                 in_pts_origin = in_pts - TIMEBASE * frameno * clip->speed * master_speed / global_flags.output_framerate;
196                                 out_pts_origin = out_pts - TIMEBASE * frameno / global_flags.output_framerate;
197                         }
198
199                         if (should_skip_to_next.exchange(false)) {  // Test and clear.
200                                 Clip *clip = &clip_list[clip_idx].clip;  // Get a non-const pointer.
201                                 fprintf(stderr, "pts_out moving to first of %ld and %ld (currently at %f)\n", clip->pts_out, lrint(out_pts + clip->fade_time_seconds * TIMEBASE), out_pts);
202                                 clip->pts_out = std::min(clip->pts_out, lrint(in_pts + clip->fade_time_seconds * clip->speed * TIMEBASE));
203                         }
204
205                         if (in_pts >= clip->pts_out) {
206                                 break;
207                         }
208
209                         {
210                                 lock_guard<mutex> lock(queue_state_mu);
211                                 if (splice_ready) {
212                                         if (next_clip == nullptr) {
213                                                 do_splice(to_splice_clip_list, clip_idx, -1, &clip_list);
214                                         } else {
215                                                 do_splice(to_splice_clip_list, clip_idx, clip_idx + 1, &clip_list);
216                                         }
217                                         to_splice_clip_list.clear();
218                                         splice_ready = false;
219
220                                         // Refresh the clip pointer, since the clip list may have been reallocated.
221                                         clip = &clip_list[clip_idx].clip;
222
223                                         // Recompute next_clip and any needed fade times, since the next clip may have changed
224                                         // (or we may have gone from no new clip to having one, or the other way).
225                                         next_clip = (clip_idx + 1 < clip_list.size()) ? &clip_list[clip_idx + 1].clip : nullptr;
226                                         if (next_clip != nullptr) {
227                                                 double duration_this_clip = double(clip->pts_out - in_pts) / TIMEBASE / clip->speed;
228                                                 double duration_next_clip = double(next_clip->pts_out - next_clip->pts_in) / TIMEBASE / clip->speed;
229                                                 next_clip_fade_time = min(min(duration_this_clip, duration_next_clip), clip->fade_time_seconds);
230                                         }
231                                 }
232                         }
233
234                         steady_clock::duration time_behind = steady_clock::now() - next_frame_start;
235                         if (stream_output != FILE_STREAM_OUTPUT && time_behind >= milliseconds(200)) {
236                                 fprintf(stderr, "WARNING: %ld ms behind, dropping a frame (no matter the type).\n",
237                                         lrint(1e3 * duration<double>(time_behind).count()));
238                                 ++metric_dropped_unconditional_frame;
239                                 continue;
240                         }
241
242                         // pts not affected by the swapping below.
243                         int64_t in_pts_for_progress = in_pts, in_pts_secondary_for_progress = -1;
244
245                         int primary_stream_idx = stream_idx;
246                         FrameOnDisk secondary_frame;
247                         int secondary_stream_idx = -1;
248                         float fade_alpha = 0.0f;
249                         double time_left_this_clip = double(clip->pts_out - in_pts) / TIMEBASE / clip->speed;
250                         if (next_clip != nullptr && time_left_this_clip <= next_clip_fade_time) {
251                                 // We're in a fade to the next clip->
252                                 secondary_stream_idx = next_clip->stream_idx;
253                                 int64_t in_pts_secondary = lrint(next_clip->pts_in + (next_clip_fade_time - time_left_this_clip) * TIMEBASE * clip->speed);
254                                 in_pts_secondary_for_progress = in_pts_secondary;
255                                 fade_alpha = 1.0f - time_left_this_clip / next_clip_fade_time;
256
257                                 // If more than half-way through the fade, interpolate the next clip
258                                 // instead of the current one, since it's more visible.
259                                 if (fade_alpha >= 0.5f) {
260                                         swap(primary_stream_idx, secondary_stream_idx);
261                                         swap(in_pts, in_pts_secondary);
262                                         fade_alpha = 1.0f - fade_alpha;
263                                 }
264
265                                 FrameOnDisk frame_lower, frame_upper;
266                                 bool ok = find_surrounding_frames(in_pts_secondary, secondary_stream_idx, &frame_lower, &frame_upper);
267                                 if (ok) {
268                                         secondary_frame = frame_lower;
269                                 }
270                         }
271
272                         // NOTE: None of this will take into account any snapping done below.
273                         double clip_progress = calc_progress(*clip, in_pts_for_progress);
274                         map<uint64_t, double> progress{ { clip_list[clip_idx].id, clip_progress } };
275                         double time_remaining;
276                         if (next_clip != nullptr && time_left_this_clip <= next_clip_fade_time) {
277                                 double next_clip_progress = calc_progress(*next_clip, in_pts_secondary_for_progress);
278                                 progress[clip_list[clip_idx + 1].id] = next_clip_progress;
279                                 time_remaining = compute_time_left(clip_list, clip_idx + 1, next_clip_progress);
280                         } else {
281                                 time_remaining = compute_time_left(clip_list, clip_idx, clip_progress);
282                         }
283                         if (progress_callback != nullptr) {
284                                 progress_callback(progress, time_remaining);
285                         }
286
287                         FrameOnDisk frame_lower, frame_upper;
288                         bool ok = find_surrounding_frames(in_pts, primary_stream_idx, &frame_lower, &frame_upper);
289                         if (!ok) {
290                                 break;
291                         }
292
293                         // Wait until we should, or (given buffering) can, output the frame.
294                         {
295                                 unique_lock<mutex> lock(queue_state_mu);
296                                 if (video_stream == nullptr) {
297                                         // No queue, just wait until the right time and then show the frame.
298                                         new_clip_changed.wait_until(lock, next_frame_start, [this] {
299                                                 return should_quit || new_clip_ready || override_stream_idx != -1;
300                                         });
301                                         if (should_quit) {
302                                                 return;
303                                         }
304                                 } else {
305                                         // If the queue is full (which is really the state we'd like to be in),
306                                         // wait until there's room for one more frame (ie., one was output from
307                                         // VideoStream), or until or until there's a new clip we're supposed to play.
308                                         //
309                                         // In this case, we don't sleep until next_frame_start; the displaying is
310                                         // done by the queue.
311                                         new_clip_changed.wait(lock, [this] {
312                                                 if (num_queued_frames < max_queued_frames) {
313                                                         return true;
314                                                 }
315                                                 return should_quit || new_clip_ready || override_stream_idx != -1;
316                                         });
317                                 }
318                                 if (should_quit) {
319                                         return;
320                                 }
321                                 if (new_clip_ready) {
322                                         if (video_stream != nullptr) {
323                                                 lock.unlock();  // Urg.
324                                                 video_stream->clear_queue();
325                                                 lock.lock();
326                                         }
327                                         return;
328                                 }
329                                 // Honor if we got an override request for the camera.
330                                 if (override_stream_idx != -1) {
331                                         stream_idx = override_stream_idx;
332                                         override_stream_idx = -1;
333                                         continue;
334                                 }
335                         }
336
337                         string subtitle;
338                         {
339                                 stringstream ss;
340                                 ss.imbue(locale("C"));
341                                 ss.precision(3);
342                                 ss << "Futatabi " NAGERU_VERSION ";PLAYING;";
343                                 ss << fixed << time_remaining;
344                                 ss << ";" << format_duration(time_remaining) << " left";
345                                 subtitle = ss.str();
346                         }
347
348                         // If there's nothing to interpolate between, or if interpolation is turned off,
349                         // or we're a preview, then just display the frame.
350                         if (frame_lower.pts == frame_upper.pts || global_flags.interpolation_quality == 0 || video_stream == nullptr) {
351                                 display_single_frame(primary_stream_idx, frame_lower, secondary_stream_idx,
352                                                      secondary_frame, fade_alpha, next_frame_start, /*snapped=*/false,
353                                                      subtitle);
354                                 continue;
355                         }
356
357                         // Snap to input frame: If we can do so with less than 1% jitter
358                         // (ie., move less than 1% of an _output_ frame), do so.
359                         // TODO: Snap secondary (fade-to) clips in the same fashion.
360                         double pts_snap_tolerance = 0.01 * double(TIMEBASE) * clip->speed / global_flags.output_framerate;
361                         bool snapped = false;
362                         for (FrameOnDisk snap_frame : { frame_lower, frame_upper }) {
363                                 if (fabs(snap_frame.pts - in_pts) < pts_snap_tolerance) {
364                                         display_single_frame(primary_stream_idx, snap_frame, secondary_stream_idx,
365                                                              secondary_frame, fade_alpha, next_frame_start, /*snapped=*/true,
366                                                              subtitle);
367                                         in_pts_origin += snap_frame.pts - in_pts;
368                                         snapped = true;
369                                         break;
370                                 }
371                         }
372                         if (snapped) {
373                                 continue;
374                         }
375
376                         // The snapping above makes us lock to the input framerate, even in the presence
377                         // of pts drift, for most typical cases where it's needed, like converting 60 â†’ 2x60
378                         // or 60 â†’ 2x59.94. However, there are some corner cases like 25 â†’ 2x59.94, where we'd
379                         // get a snap very rarely (in the given case, once every 24 output frames), and by
380                         // that time, we'd have drifted out. We could have solved this by changing the overall
381                         // speed ever so slightly, but it requires that we know the actual frame rate (which
382                         // is difficult in the presence of jitter and missed frames), or at least do some kind
383                         // of matching/clustering. Instead, we take the opportunity to lock to in-between rational
384                         // points if we can. E.g., if we are converting 60 â†’ 2x60, we would not only snap to
385                         // an original frame every other frame; we would also snap to exactly alpha=0.5 every
386                         // in-between frame. Of course, we will still need to interpolate, but we get a lot
387                         // closer when we actually get close to an original frame. In other words: Snap more
388                         // often, but snap less each time. Unless the input and output frame rates are completely
389                         // decorrelated with no common factor, of course (e.g. 12.345 â†’ 34.567, which we should
390                         // really never see in practice).
391                         for (double fraction : { 1.0 / 2.0, 1.0 / 3.0, 2.0 / 3.0, 1.0 / 4.0, 3.0 / 4.0,
392                                                  1.0 / 5.0, 2.0 / 5.0, 3.0 / 5.0, 4.0 / 5.0 }) {
393                                 double subsnap_pts = frame_lower.pts + fraction * (frame_upper.pts - frame_lower.pts);
394                                 if (fabs(subsnap_pts - in_pts) < pts_snap_tolerance) {
395                                         in_pts_origin += lrint(subsnap_pts) - in_pts;
396                                         in_pts = lrint(subsnap_pts);
397                                         break;
398                                 }
399                         }
400
401                         if (stream_output != FILE_STREAM_OUTPUT && time_behind >= milliseconds(100)) {
402                                 fprintf(stderr, "WARNING: %ld ms behind, dropping an interpolated frame.\n",
403                                         lrint(1e3 * duration<double>(time_behind).count()));
404                                 ++metric_dropped_interpolated_frame;
405                                 continue;
406                         }
407
408                         double alpha = double(in_pts - frame_lower.pts) / (frame_upper.pts - frame_lower.pts);
409                         auto display_func = [this](shared_ptr<Frame> frame) {
410                                 if (destination != nullptr) {
411                                         destination->setFrame(frame);
412                                 }
413                         };
414                         if (secondary_stream_idx == -1) {
415                                 ++metric_interpolated_frame;
416                         } else {
417                                 ++metric_interpolated_faded_frame;
418                         }
419                         video_stream->schedule_interpolated_frame(
420                                 next_frame_start, pts, display_func, QueueSpotHolder(this),
421                                 frame_lower, frame_upper, alpha,
422                                 secondary_frame, fade_alpha, subtitle);
423                         last_pts_played = in_pts;  // Not really needed; only previews use last_pts_played.
424                 }
425
426                 // The clip ended.
427                 if (should_quit) {
428                         return;
429                 }
430
431                 // Start the next clip from the point where the fade went out.
432                 if (next_clip != nullptr) {
433                         origin = next_frame_start;
434                         in_pts_origin = next_clip->pts_in + lrint(next_clip_fade_time * TIMEBASE * clip->speed);
435                 }
436         }
437
438         if (done_callback != nullptr) {
439                 done_callback();
440         }
441 }
442
443 void Player::display_single_frame(int primary_stream_idx, const FrameOnDisk &primary_frame, int secondary_stream_idx, const FrameOnDisk &secondary_frame, double fade_alpha, steady_clock::time_point frame_start, bool snapped, const std::string &subtitle)
444 {
445         auto display_func = [this, primary_stream_idx, primary_frame, secondary_frame, fade_alpha] {
446                 if (destination != nullptr) {
447                         destination->setFrame(primary_stream_idx, primary_frame, secondary_frame, fade_alpha);
448                 }
449         };
450         if (video_stream == nullptr) {
451                 display_func();
452         } else {
453                 if (secondary_stream_idx == -1) {
454                         // NOTE: We could be increasing unused metrics for previews, but that's harmless.
455                         if (snapped) {
456                                 ++metric_original_snapped_frame;
457                         } else {
458                                 ++metric_original_frame;
459                         }
460                         video_stream->schedule_original_frame(
461                                 frame_start, pts, display_func, QueueSpotHolder(this),
462                                 primary_frame, subtitle);
463                 } else {
464                         assert(secondary_frame.pts != -1);
465                         // NOTE: We could be increasing unused metrics for previews, but that's harmless.
466                         if (snapped) {
467                                 ++metric_faded_snapped_frame;
468                         } else {
469                                 ++metric_faded_frame;
470                         }
471                         video_stream->schedule_faded_frame(frame_start, pts, display_func,
472                                                            QueueSpotHolder(this), primary_frame,
473                                                            secondary_frame, fade_alpha, subtitle);
474                 }
475         }
476         last_pts_played = primary_frame.pts;
477 }
478
479 // Find the frame immediately before and after this point.
480 bool Player::find_surrounding_frames(int64_t pts, int stream_idx, FrameOnDisk *frame_lower, FrameOnDisk *frame_upper)
481 {
482         lock_guard<mutex> lock(frame_mu);
483
484         // Find the first frame such that frame.pts >= pts.
485         auto it = find_last_frame_before(frames[stream_idx], pts);
486         if (it == frames[stream_idx].end()) {
487                 return false;
488         }
489         *frame_upper = *it;
490
491         // Find the last frame such that in_pts <= frame.pts (if any).
492         if (it == frames[stream_idx].begin()) {
493                 *frame_lower = *it;
494         } else {
495                 *frame_lower = *(it - 1);
496         }
497         assert(pts >= frame_lower->pts);
498         assert(pts <= frame_upper->pts);
499         return true;
500 }
501
502 Player::Player(JPEGFrameView *destination, Player::StreamOutput stream_output, AVFormatContext *file_avctx)
503         : destination(destination), stream_output(stream_output)
504 {
505         player_thread = thread(&Player::thread_func, this, file_avctx);
506
507         if (stream_output == HTTPD_STREAM_OUTPUT) {
508                 global_metrics.add("http_output_frames", { { "type", "original" }, { "reason", "edge_frame_or_no_interpolation" } }, &metric_original_frame);
509                 global_metrics.add("http_output_frames", { { "type", "faded" }, { "reason", "edge_frame_or_no_interpolation" } }, &metric_faded_frame);
510                 global_metrics.add("http_output_frames", { { "type", "original" }, { "reason", "snapped" } }, &metric_original_snapped_frame);
511                 global_metrics.add("http_output_frames", { { "type", "faded" }, { "reason", "snapped" } }, &metric_faded_snapped_frame);
512                 global_metrics.add("http_output_frames", { { "type", "interpolated" } }, &metric_interpolated_frame);
513                 global_metrics.add("http_output_frames", { { "type", "interpolated_faded" } }, &metric_interpolated_faded_frame);
514                 global_metrics.add("http_output_frames", { { "type", "refresh" } }, &metric_refresh_frame);
515                 global_metrics.add("http_dropped_frames", { { "type", "interpolated" } }, &metric_dropped_interpolated_frame);
516                 global_metrics.add("http_dropped_frames", { { "type", "unconditional" } }, &metric_dropped_unconditional_frame);
517         }
518 }
519
520 Player::~Player()
521 {
522         should_quit = true;
523         new_clip_changed.notify_all();
524         player_thread.join();
525
526         if (video_stream != nullptr) {
527                 video_stream->stop();
528         }
529 }
530
531 void Player::play(const vector<ClipWithID> &clips)
532 {
533         lock_guard<mutex> lock(queue_state_mu);
534         new_clip_ready = true;
535         queued_clip_list = clips;
536         splice_ready = false;
537         override_stream_idx = -1;
538         new_clip_changed.notify_all();
539 }
540
541 void Player::splice_play(const vector<ClipWithID> &clips)
542 {
543         lock_guard<mutex> lock(queue_state_mu);
544         if (new_clip_ready) {
545                 queued_clip_list = clips;
546                 assert(!splice_ready);
547                 return;
548         }
549
550         splice_ready = true;
551         to_splice_clip_list = clips;  // Overwrite any queued but not executed splice.
552 }
553
554 void Player::override_angle(unsigned stream_idx)
555 {
556         int64_t last_pts;
557
558         // Corner case: If a new clip is waiting to be played, change its stream and then we're done.
559         {
560                 lock_guard<mutex> lock(queue_state_mu);
561                 if (new_clip_ready) {
562                         assert(queued_clip_list.size() == 1);
563                         queued_clip_list[0].clip.stream_idx = stream_idx;
564                         return;
565                 }
566
567                 // If we are playing a clip, set override_stream_idx, and the player thread will
568                 // pick it up and change its internal index.
569                 if (playing) {
570                         override_stream_idx = stream_idx;
571                         new_clip_changed.notify_all();
572                         return;
573                 }
574
575                 // OK, so we're standing still, presumably at the end of a clip.
576                 // Look at the last frame played (if it exists), and show the closest
577                 // thing we've got.
578                 if (last_pts_played < 0) {
579                         return;
580                 }
581                 last_pts = last_pts_played;
582         }
583
584         lock_guard<mutex> lock(frame_mu);
585         auto it = find_first_frame_at_or_after(frames[stream_idx], last_pts);
586         if (it == frames[stream_idx].end()) {
587                 return;
588         }
589         destination->setFrame(stream_idx, *it);
590 }
591
592 void Player::take_queue_spot()
593 {
594         lock_guard<mutex> lock(queue_state_mu);
595         ++num_queued_frames;
596 }
597
598 void Player::release_queue_spot()
599 {
600         lock_guard<mutex> lock(queue_state_mu);
601         assert(num_queued_frames > 0);
602         --num_queued_frames;
603         new_clip_changed.notify_all();
604 }
605
606 double compute_time_left(const vector<ClipWithID> &clips, size_t currently_playing_idx, double progress_currently_playing)
607 {
608         // Look at the last clip and then start counting from there.
609         double remaining = 0.0;
610         double last_fade_time_seconds = 0.0;
611         for (size_t row = currently_playing_idx; row < clips.size(); ++row) {
612                 const Clip &clip = clips[row].clip;
613                 double clip_length = double(clip.pts_out - clip.pts_in) / TIMEBASE / clip.speed;
614                 if (row == currently_playing_idx) {
615                         // A clip we're playing: Subtract the part we've already played.
616                         remaining = clip_length * (1.0 - progress_currently_playing);
617                 } else {
618                         // A clip we haven't played yet: Subtract the part that's overlapping
619                         // with a previous clip (due to fade).
620                         remaining += max(clip_length - last_fade_time_seconds, 0.0);
621                 }
622                 last_fade_time_seconds = min(clip_length, clip.fade_time_seconds);
623         }
624         return remaining;
625 }
626
627 string format_duration(double t)
628 {
629         int t_ms = lrint(t * 1e3);
630
631         int ms = t_ms % 1000;
632         t_ms /= 1000;
633         int s = t_ms % 60;
634         t_ms /= 60;
635         int m = t_ms;
636
637         char buf[256];
638         snprintf(buf, sizeof(buf), "%d:%02d.%03d", m, s, ms);
639         return buf;
640 }