]> git.sesse.net Git - nageru/blob - futatabi/player.cpp
Factor out some common logic from Player::thread_func().
[nageru] / futatabi / player.cpp
1 #include "player.h"
2
3 #include "clip_list.h"
4 #include "shared/context.h"
5 #include "defs.h"
6 #include "shared/ffmpeg_raii.h"
7 #include "flags.h"
8 #include "frame_on_disk.h"
9 #include "shared/httpd.h"
10 #include "jpeg_frame_view.h"
11 #include "shared/metrics.h"
12 #include "shared/mux.h"
13 #include "shared/timebase.h"
14 #include "video_stream.h"
15
16 #include <algorithm>
17 #include <chrono>
18 #include <condition_variable>
19 #include <movit/util.h>
20 #include <mutex>
21 #include <stdio.h>
22 #include <thread>
23 #include <vector>
24
25 using namespace std;
26 using namespace std::chrono;
27
28 extern HTTPD *global_httpd;
29
30 void Player::thread_func(AVFormatContext *file_avctx)
31 {
32         pthread_setname_np(pthread_self(), "Player");
33
34         QSurface *surface = create_surface();
35         QOpenGLContext *context = create_context(surface);
36         if (!make_current(context, surface)) {
37                 printf("oops\n");
38                 exit(1);
39         }
40
41         check_error();
42
43         // Create the VideoStream object, now that we have an OpenGL context.
44         if (stream_output != NO_STREAM_OUTPUT) {
45                 video_stream.reset(new VideoStream(file_avctx));
46                 video_stream->start();
47         }
48
49         check_error();
50
51         while (!should_quit) {
52                 play_playlist_once();
53         }
54 }
55
56 void Player::play_playlist_once()
57 {
58         vector<Clip> clip_list;
59         bool clip_ready;
60         steady_clock::time_point before_sleep = steady_clock::now();
61
62         // Wait until we're supposed to play something.
63         {
64                 unique_lock<mutex> lock(queue_state_mu);
65                 playing = false;
66                 clip_ready = new_clip_changed.wait_for(lock, milliseconds(100), [this] {
67                         return should_quit || new_clip_ready;
68                 });
69                 if (should_quit) {
70                         return;
71                 }
72                 if (clip_ready) {
73                         new_clip_ready = false;
74                         playing = true;
75                         clip_list = move(queued_clip_list);
76                         queued_clip_list.clear();
77                         assert(!clip_list.empty());
78                 }
79         }
80
81         steady_clock::duration time_slept = steady_clock::now() - before_sleep;
82         pts += duration_cast<duration<size_t, TimebaseRatio>>(time_slept).count();
83
84         if (!clip_ready) {
85                 if (video_stream != nullptr) {
86                         ++metric_refresh_frame;
87                         video_stream->schedule_refresh_frame(steady_clock::now(), pts, /*display_func=*/nullptr, QueueSpotHolder());
88                 }
89                 return;
90         }
91
92         steady_clock::time_point origin = steady_clock::now();  // TODO: Add a 100 ms buffer for ramp-up?
93         int64_t in_pts_origin = clip_list[0].pts_in;
94         for (size_t clip_idx = 0; clip_idx < clip_list.size(); ++clip_idx) {
95                 const Clip &clip = clip_list[clip_idx];
96                 const Clip *next_clip = (clip_idx + 1 < clip_list.size()) ? &clip_list[clip_idx + 1] : nullptr;
97                 int64_t out_pts_origin = pts;
98
99                 double next_clip_fade_time = -1.0;
100                 if (next_clip != nullptr) {
101                         double duration_this_clip = double(clip.pts_out - in_pts_origin) / TIMEBASE / clip.speed;
102                         double duration_next_clip = double(next_clip->pts_out - next_clip->pts_in) / TIMEBASE / clip.speed;
103                         next_clip_fade_time = min(min(duration_this_clip, duration_next_clip), clip.fade_time_seconds);
104                 }
105
106                 int stream_idx = clip.stream_idx;
107
108                 // Start playing exactly at a frame.
109                 // TODO: Snap secondary (fade-to) clips in the same fashion
110                 // so that we don't get jank here).
111                 {
112                         lock_guard<mutex> lock(frame_mu);
113
114                         // Find the first frame such that frame.pts <= in_pts.
115                         auto it = find_last_frame_before(frames[stream_idx], in_pts_origin);
116                         if (it != frames[stream_idx].end()) {
117                                 in_pts_origin = it->pts;
118                         }
119                 }
120
121                 steady_clock::time_point next_frame_start;
122                 for (int frameno = 0; !should_quit; ++frameno) {  // Ends when the clip ends.
123                         double out_pts = out_pts_origin + TIMEBASE * frameno / global_flags.output_framerate;
124                         next_frame_start =
125                                 origin + microseconds(lrint((out_pts - out_pts_origin) * 1e6 / TIMEBASE));
126                         int64_t in_pts = lrint(in_pts_origin + TIMEBASE * frameno * clip.speed / global_flags.output_framerate);
127                         pts = lrint(out_pts);
128
129                         if (in_pts >= clip.pts_out) {
130                                 break;
131                         }
132
133                         steady_clock::duration time_behind = steady_clock::now() - next_frame_start;
134                         if (stream_output != FILE_STREAM_OUTPUT && time_behind >= milliseconds(200)) {
135                                 fprintf(stderr, "WARNING: %ld ms behind, dropping a frame (no matter the type).\n",
136                                         lrint(1e3 * duration<double>(time_behind).count()));
137                                 ++metric_dropped_unconditional_frame;
138                                 continue;
139                         }
140
141
142                         // pts not affected by the swapping below.
143                         int64_t in_pts_for_progress = in_pts, in_pts_secondary_for_progress = -1;
144
145                         int primary_stream_idx = stream_idx;
146                         FrameOnDisk secondary_frame;
147                         int secondary_stream_idx = -1;
148                         float fade_alpha = 0.0f;
149                         double time_left_this_clip = double(clip.pts_out - in_pts) / TIMEBASE / clip.speed;
150                         if (next_clip != nullptr && time_left_this_clip <= next_clip_fade_time) {
151                                 // We're in a fade to the next clip.
152                                 secondary_stream_idx = next_clip->stream_idx;
153                                 int64_t in_pts_secondary = lrint(next_clip->pts_in + (next_clip_fade_time - time_left_this_clip) * TIMEBASE * clip.speed);
154                                 in_pts_secondary_for_progress = in_pts_secondary;
155                                 fade_alpha = 1.0f - time_left_this_clip / next_clip_fade_time;
156
157                                 // If more than half-way through the fade, interpolate the next clip
158                                 // instead of the current one, since it's more visible.
159                                 if (fade_alpha >= 0.5f) {
160                                         swap(primary_stream_idx, secondary_stream_idx);
161                                         swap(in_pts, in_pts_secondary);
162                                         fade_alpha = 1.0f - fade_alpha;
163                                 }
164
165                                 FrameOnDisk frame_lower, frame_upper;
166                                 bool ok = find_surrounding_frames(in_pts_secondary, secondary_stream_idx, &frame_lower, &frame_upper);
167                                 if (ok) {
168                                         secondary_frame = frame_lower;
169                                 }
170                         }
171
172                         if (progress_callback != nullptr) {
173                                 // NOTE: None of this will take into account any snapping done below.
174                                 double played_this_clip = double(in_pts_for_progress - clip.pts_in) / TIMEBASE / clip.speed;
175                                 double total_length = double(clip.pts_out - clip.pts_in) / TIMEBASE / clip.speed;
176                                 map<size_t, double> progress{{ clip_idx, played_this_clip / total_length }};
177
178                                 if (next_clip != nullptr && time_left_this_clip <= next_clip_fade_time) {
179                                         double played_next_clip = double(in_pts_secondary_for_progress - next_clip->pts_in) / TIMEBASE / next_clip->speed;
180                                         double total_next_length = double(next_clip->pts_out - next_clip->pts_in) / TIMEBASE / next_clip->speed;
181                                         progress[clip_idx + 1] = played_next_clip / total_next_length;
182                                 }
183                                 progress_callback(progress);
184                         }
185
186                         FrameOnDisk frame_lower, frame_upper;
187                         bool ok = find_surrounding_frames(in_pts, primary_stream_idx, &frame_lower, &frame_upper);
188                         if (!ok) {
189                                 break;
190                         }
191
192                         // Wait until we should, or (given buffering) can, output the frame.
193                         {
194                                 unique_lock<mutex> lock(queue_state_mu);
195                                 if (video_stream == nullptr) {
196                                         // No queue, just wait until the right time and then show the frame.
197                                         new_clip_changed.wait_until(lock, next_frame_start, [this]{
198                                                 return should_quit || new_clip_ready || override_stream_idx != -1;
199                                         });
200                                         if (should_quit) {
201                                                 return;
202                                         }
203                                 } else {
204                                         // If the queue is full (which is really the state we'd like to be in),
205                                         // wait until there's room for one more frame (ie., one was output from
206                                         // VideoStream), or until or until there's a new clip we're supposed to play.
207                                         //
208                                         // In this case, we don't sleep until next_frame_start; the displaying is
209                                         // done by the queue.
210                                         new_clip_changed.wait(lock, [this]{
211                                                 if (num_queued_frames < max_queued_frames) {
212                                                         return true;
213                                                 }
214                                                 return should_quit || new_clip_ready || override_stream_idx != -1;
215                                         });
216                                 }
217                                 if (should_quit) {
218                                         return;
219                                 }
220                                 if (new_clip_ready) {
221                                         if (video_stream != nullptr) {
222                                                 lock.unlock();  // Urg.
223                                                 video_stream->clear_queue();
224                                                 lock.lock();
225                                         }
226                                         return;
227                                 }
228                                 // Honor if we got an override request for the camera.
229                                 if (override_stream_idx != -1) {
230                                         stream_idx = override_stream_idx;
231                                         override_stream_idx = -1;
232                                         continue;
233                                 }
234                         }
235
236                         if (frame_lower.pts == frame_upper.pts || global_flags.interpolation_quality == 0) {
237                                 display_single_frame(primary_stream_idx, frame_lower, secondary_stream_idx,
238                                                      secondary_frame, fade_alpha, next_frame_start, /*snapped=*/false);
239                                 continue;
240                         }
241
242                         // Snap to input frame: If we can do so with less than 1% jitter
243                         // (ie., move less than 1% of an _output_ frame), do so.
244                         // TODO: Snap secondary (fade-to) clips in the same fashion.
245                         double pts_snap_tolerance = 0.01 * double(TIMEBASE) / global_flags.output_framerate;
246                         bool snapped = false;
247                         for (FrameOnDisk snap_frame : { frame_lower, frame_upper }) {
248                                 if (fabs(snap_frame.pts - in_pts) < pts_snap_tolerance) {
249                                         display_single_frame(primary_stream_idx, snap_frame, secondary_stream_idx,
250                                                              secondary_frame, fade_alpha, next_frame_start, /*snapped=*/true);
251                                         in_pts_origin += snap_frame.pts - in_pts;
252                                         snapped = true;
253                                         break;
254                                 }
255                         }
256                         if (snapped) {
257                                 continue;
258                         }
259
260                         // The snapping above makes us lock to the input framerate, even in the presence
261                         // of pts drift, for most typical cases where it's needed, like converting 60 â†’ 2x60
262                         // or 60 â†’ 2x59.94. However, there are some corner cases like 25 â†’ 2x59.94, where we'd
263                         // get a snap very rarely (in the given case, once every 24 output frames), and by
264                         // that time, we'd have drifted out. We could have solved this by changing the overall
265                         // speed ever so slightly, but it requires that we know the actual frame rate (which
266                         // is difficult in the presence of jitter and missed frames), or at least do some kind
267                         // of matching/clustering. Instead, we take the opportunity to lock to in-between rational
268                         // points if we can. E.g., if we are converting 60 â†’ 2x60, we would not only snap to
269                         // an original frame every other frame; we would also snap to exactly alpha=0.5 every
270                         // in-between frame. Of course, we will still need to interpolate, but we get a lot
271                         // closer when we actually get close to an original frame. In other words: Snap more
272                         // often, but snap less each time. Unless the input and output frame rates are completely
273                         // decorrelated with no common factor, of course (e.g. 12.345 â†’ 34.567, which we should
274                         // really never see in practice).
275                         for (double fraction : { 1.0 / 2.0, 1.0 / 3.0, 2.0 / 3.0, 1.0 / 4.0, 3.0 / 4.0,
276                                                  1.0 / 5.0, 2.0 / 5.0, 3.0 / 5.0, 4.0 / 5.0 }) {
277                                 double subsnap_pts = frame_lower.pts + fraction * (frame_upper.pts - frame_lower.pts);
278                                 if (fabs(subsnap_pts - in_pts) < pts_snap_tolerance) {
279                                         in_pts_origin += lrint(subsnap_pts) - in_pts;
280                                         in_pts = lrint(subsnap_pts);
281                                         break;
282                                 }
283                         }
284
285                         if (stream_output != FILE_STREAM_OUTPUT && time_behind >= milliseconds(100)) {
286                                 fprintf(stderr, "WARNING: %ld ms behind, dropping an interpolated frame.\n",
287                                         lrint(1e3 * duration<double>(time_behind).count()));
288                                 ++metric_dropped_interpolated_frame;
289                                 continue;
290                         }
291
292                         double alpha = double(in_pts - frame_lower.pts) / (frame_upper.pts - frame_lower.pts);
293
294                         if (video_stream == nullptr) {
295                                 // Previews don't do any interpolation.
296                                 assert(secondary_stream_idx == -1);
297                                 if (destination != nullptr) {
298                                         destination->setFrame(primary_stream_idx, frame_lower);
299                                 }
300                                 last_pts_played = frame_lower.pts;
301                         } else {
302                                 auto display_func = [this](shared_ptr<Frame> frame) {
303                                         if (destination != nullptr) {
304                                                 destination->setFrame(frame);
305                                         }
306                                 };
307                                 if (secondary_stream_idx == -1) {
308                                         ++metric_interpolated_frame;
309                                 } else {
310                                         ++metric_interpolated_faded_frame;
311                                 }
312                                 video_stream->schedule_interpolated_frame(
313                                         next_frame_start, pts, display_func, QueueSpotHolder(this),
314                                         frame_lower, frame_upper, alpha,
315                                         secondary_frame, fade_alpha);
316                                 last_pts_played = in_pts;  // Not really needed; only previews use last_pts_played.
317                         }
318                 }
319
320                 // The clip ended.
321                 if (should_quit) {
322                         return;
323                 }
324                 if (done_callback != nullptr) {
325                         done_callback();
326                 }
327
328                 // Start the next clip from the point where the fade went out.
329                 if (next_clip != nullptr) {
330                         origin = next_frame_start;
331                         in_pts_origin = next_clip->pts_in + lrint(next_clip_fade_time * TIMEBASE * clip.speed);
332                 }
333         }
334
335         if (done_callback != nullptr) {
336                 done_callback();
337         }
338 }
339
340 void Player::display_single_frame(int primary_stream_idx, const FrameOnDisk &primary_frame, int secondary_stream_idx, const FrameOnDisk &secondary_frame, double fade_alpha, steady_clock::time_point frame_start, bool snapped)
341 {
342                 auto display_func = [this, primary_stream_idx, primary_frame, secondary_frame, fade_alpha]{
343                 if (destination != nullptr) {
344                         destination->setFrame(primary_stream_idx, primary_frame, secondary_frame, fade_alpha);
345                 }
346         };
347         if (video_stream == nullptr) {
348                 display_func();
349         } else {
350                 if (secondary_stream_idx == -1) {
351                         if (snapped) {
352                                 ++metric_original_snapped_frame;
353                         } else {
354                                 ++metric_original_frame;
355                         }
356                         video_stream->schedule_original_frame(
357                                 frame_start, pts, display_func, QueueSpotHolder(this),
358                                 primary_frame);
359                 } else {
360                         assert(secondary_frame.pts != -1);
361                         if (snapped) {
362                                 ++metric_faded_snapped_frame;
363                         } else {
364                                 ++metric_faded_frame;
365                         }
366                         video_stream->schedule_faded_frame(frame_start, pts, display_func,
367                                 QueueSpotHolder(this), primary_frame,
368                                 secondary_frame, fade_alpha);
369                 }
370         }
371         last_pts_played = primary_frame.pts;
372 }
373
374 // Find the frame immediately before and after this point.
375 bool Player::find_surrounding_frames(int64_t pts, int stream_idx, FrameOnDisk *frame_lower, FrameOnDisk *frame_upper)
376 {
377         lock_guard<mutex> lock(frame_mu);
378
379         // Find the first frame such that frame.pts >= pts.
380         auto it = find_last_frame_before(frames[stream_idx], pts);
381         if (it == frames[stream_idx].end()) {
382                 return false;
383         }
384         *frame_upper = *it;
385
386         // Find the last frame such that in_pts <= frame.pts (if any).
387         if (it == frames[stream_idx].begin()) {
388                 *frame_lower = *it;
389         } else {
390                 *frame_lower = *(it - 1);
391         }
392         assert(pts >= frame_lower->pts);
393         assert(pts <= frame_upper->pts);
394         return true;
395 }
396
397 Player::Player(JPEGFrameView *destination, Player::StreamOutput stream_output, AVFormatContext *file_avctx)
398         : destination(destination), stream_output(stream_output)
399 {
400         player_thread = thread(&Player::thread_func, this, file_avctx);
401
402         if (stream_output == HTTPD_STREAM_OUTPUT) {
403                 global_metrics.add("http_output_frames", {{ "type", "original" }, { "reason", "edge_frame_or_no_interpolation" }}, &metric_original_frame);
404                 global_metrics.add("http_output_frames", {{ "type", "faded" }, { "reason", "edge_frame_or_no_interpolation" }}, &metric_faded_frame);
405                 global_metrics.add("http_output_frames", {{ "type", "original" }, { "reason", "snapped" }}, &metric_original_snapped_frame);
406                 global_metrics.add("http_output_frames", {{ "type", "faded" }, { "reason", "snapped" }}, &metric_faded_snapped_frame);
407                 global_metrics.add("http_output_frames", {{ "type", "interpolated" }}, &metric_interpolated_frame);
408                 global_metrics.add("http_output_frames", {{ "type", "interpolated_faded" }}, &metric_interpolated_faded_frame);
409                 global_metrics.add("http_output_frames", {{ "type", "refresh" }}, &metric_refresh_frame);
410                 global_metrics.add("http_dropped_frames", {{ "type", "interpolated" }}, &metric_dropped_interpolated_frame);
411                 global_metrics.add("http_dropped_frames", {{ "type", "unconditional" }}, &metric_dropped_unconditional_frame);
412         }
413 }
414
415 Player::~Player()
416 {
417         should_quit = true;
418         if (video_stream != nullptr) {
419                 video_stream->stop();
420         }
421         new_clip_changed.notify_all();
422         player_thread.join();
423 }
424
425 void Player::play(const vector<Clip> &clips)
426 {
427         lock_guard<mutex> lock(queue_state_mu);
428         new_clip_ready = true;
429         queued_clip_list = clips;
430         override_stream_idx = -1;
431         new_clip_changed.notify_all();
432 }
433
434 void Player::override_angle(unsigned stream_idx)
435 {
436         int64_t last_pts;
437
438         // Corner case: If a new clip is waiting to be played, change its stream and then we're done.
439         {
440                 lock_guard<mutex> lock(queue_state_mu);
441                 if (new_clip_ready) {
442                         assert(queued_clip_list.size() == 1);
443                         queued_clip_list[0].stream_idx = stream_idx;
444                         return;
445                 }
446
447                 // If we are playing a clip, set override_stream_idx, and the player thread will
448                 // pick it up and change its internal index.
449                 if (playing) {
450                         override_stream_idx = stream_idx;
451                         new_clip_changed.notify_all();
452                         return;
453                 }
454
455                 // OK, so we're standing still, presumably at the end of a clip.
456                 // Look at the last frame played (if it exists), and show the closest
457                 // thing we've got.
458                 if (last_pts_played < 0) {
459                         return;
460                 }
461                 last_pts = last_pts_played;
462         }
463
464         lock_guard<mutex> lock(frame_mu);
465         auto it = find_first_frame_at_or_after(frames[stream_idx], last_pts);
466         if (it == frames[stream_idx].end()) {
467                 return;
468         }
469         destination->setFrame(stream_idx, *it);
470 }
471
472 void Player::take_queue_spot()
473 {
474         lock_guard<mutex> lock(queue_state_mu);
475         ++num_queued_frames;
476 }
477
478 void Player::release_queue_spot()
479 {
480         lock_guard<mutex> lock(queue_state_mu);
481         assert(num_queued_frames > 0);
482         --num_queued_frames;
483         new_clip_changed.notify_all();
484 }
485
486 double compute_time_left(const vector<Clip> &clips, const map<size_t, double> &progress)
487 {
488         // Look at the last clip and then start counting from there.
489         assert(!progress.empty());
490         auto last_it = progress.end();
491         --last_it;
492         double remaining = 0.0;
493         double last_fade_time_seconds = 0.0;
494         for (size_t row = last_it->first; row < clips.size(); ++row) {
495                 const Clip &clip = clips[row];
496                 double clip_length = double(clip.pts_out - clip.pts_in) / TIMEBASE / clip.speed;
497                 if (row == last_it->first) {
498                         // A clip we're playing: Subtract the part we've already played.
499                         remaining = clip_length * (1.0 - last_it->second);
500                 } else {
501                         // A clip we haven't played yet: Subtract the part that's overlapping
502                         // with a previous clip (due to fade).
503                         remaining += max(clip_length - last_fade_time_seconds, 0.0);
504                 }
505                 last_fade_time_seconds = min(clip_length, clip.fade_time_seconds);
506         }
507         return remaining;
508 }