extern HTTPD *global_httpd;
-void Player::thread_func(bool also_output_to_stream)
+void Player::thread_func(Player::StreamOutput stream_output, AVFormatContext *file_avctx)
{
pthread_setname_np(pthread_self(), "Player");
check_error();
// Create the VideoStream object, now that we have an OpenGL context.
- if (also_output_to_stream) {
- video_stream.reset(new VideoStream);
+ if (stream_output != NO_STREAM_OUTPUT) {
+ video_stream.reset(new VideoStream(file_avctx));
video_stream->start();
}
check_error();
- constexpr double output_framerate = 60000.0 / 1001.0; // FIXME: make configurable
int64_t pts = 0;
Clip next_clip;
size_t next_clip_idx = size_t(-1);
int64_t in_pts_start_next_clip = -1;
steady_clock::time_point next_frame_start;
for (int frameno = 0; !should_quit; ++frameno) { // Ends when the clip ends.
- double out_pts = out_pts_origin + TIMEBASE * frameno / output_framerate;
+ double out_pts = out_pts_origin + TIMEBASE * frameno / global_flags.output_framerate;
next_frame_start =
origin + microseconds(lrint((out_pts - out_pts_origin) * 1e6 / TIMEBASE));
- int64_t in_pts = lrint(in_pts_origin + TIMEBASE * frameno * speed / output_framerate);
+ int64_t in_pts = lrint(in_pts_origin + TIMEBASE * frameno * speed / global_flags.output_framerate);
pts = lrint(out_pts);
if (in_pts >= clip.pts_out) {
}
steady_clock::duration time_behind = steady_clock::now() - next_frame_start;
- if (time_behind >= milliseconds(200)) {
+ if (stream_output != FILE_STREAM_OUTPUT && time_behind >= milliseconds(200)) {
fprintf(stderr, "WARNING: %ld ms behind, dropping a frame (no matter the type).\n",
lrint(1e3 * duration<double>(time_behind).count()));
continue;
if (next_clip.pts_in != -1) {
got_next_clip = true;
- double duration_next_clip = (next_clip.pts_out - next_clip.pts_in) / TIMEBASE / speed;
+ double duration_next_clip = double(next_clip.pts_out - next_clip.pts_in) / TIMEBASE / speed;
next_clip_fade_time = std::min(time_left_this_clip, duration_next_clip);
in_pts_start_next_clip = next_clip.pts_in + lrint(next_clip_fade_time * TIMEBASE * speed);
}
if (frame_lower.pts == frame_upper.pts || global_flags.interpolation_quality == 0) {
auto display_func = [this, primary_stream_idx, frame_lower, secondary_frame, fade_alpha]{
- destination->setFrame(primary_stream_idx, frame_lower, secondary_frame, fade_alpha);
+ if (destination != nullptr) {
+ destination->setFrame(primary_stream_idx, frame_lower, secondary_frame, fade_alpha);
+ }
};
if (video_stream == nullptr) {
display_func();
// TODO: Snap secondary (fade-to) clips in the same fashion.
bool snapped = false;
for (FrameOnDisk snap_frame : { frame_lower, frame_upper }) {
- double snap_pts_as_frameno = (snap_frame.pts - in_pts_origin) * output_framerate / TIMEBASE / speed;
+ double snap_pts_as_frameno = (snap_frame.pts - in_pts_origin) * global_flags.output_framerate / TIMEBASE / speed;
if (fabs(snap_pts_as_frameno - frameno) < 0.01) {
auto display_func = [this, primary_stream_idx, snap_frame, secondary_frame, fade_alpha]{
- destination->setFrame(primary_stream_idx, snap_frame, secondary_frame, fade_alpha);
+ if (destination != nullptr) {
+ destination->setFrame(primary_stream_idx, snap_frame, secondary_frame, fade_alpha);
+ }
};
if (video_stream == nullptr) {
display_func();
continue;
}
- if (time_behind >= milliseconds(100)) {
+ if (stream_output != FILE_STREAM_OUTPUT && time_behind >= milliseconds(100)) {
fprintf(stderr, "WARNING: %ld ms behind, dropping an interpolated frame.\n",
lrint(1e3 * duration<double>(time_behind).count()));
continue;
if (video_stream == nullptr) {
// Previews don't do any interpolation.
assert(secondary_stream_idx == -1);
- destination->setFrame(primary_stream_idx, frame_lower);
+ if (destination != nullptr) {
+ destination->setFrame(primary_stream_idx, frame_lower);
+ }
} else {
auto display_func = [this](shared_ptr<Frame> frame) {
- destination->setFrame(frame);
+ if (destination != nullptr) {
+ destination->setFrame(frame);
+ }
};
video_stream->schedule_interpolated_frame(
next_frame_start, pts, display_func, QueueSpotHolder(this),
return true;
}
-Player::Player(JPEGFrameView *destination, bool also_output_to_stream)
+Player::Player(JPEGFrameView *destination, Player::StreamOutput stream_output, AVFormatContext *file_avctx)
: destination(destination)
{
- player_thread = thread(&Player::thread_func, this, also_output_to_stream);
+ player_thread = thread(&Player::thread_func, this, stream_output, file_avctx);
}
Player::~Player()
--num_queued_frames;
new_clip_changed.notify_all();
}
+
+double compute_time_left(const vector<Clip> &clips, const map<size_t, double> &progress)
+{
+ // Look at the last clip and then start counting from there.
+ assert(!progress.empty());
+ auto last_it = progress.end();
+ --last_it;
+ double remaining = 0.0;
+ double last_fade_time_seconds = 0.0;
+ for (size_t row = last_it->first; row < clips.size(); ++row) {
+ const Clip &clip = clips[row];
+ double clip_length = double(clip.pts_out - clip.pts_in) / TIMEBASE / 0.5; // FIXME: stop hardcoding speed.
+ if (row == last_it->first) {
+ // A clip we're playing: Subtract the part we've already played.
+ remaining = clip_length * (1.0 - last_it->second);
+ } else {
+ // A clip we haven't played yet: Subtract the part that's overlapping
+ // with a previous clip (due to fade).
+ remaining += max(clip_length - last_fade_time_seconds, 0.0);
+ }
+ last_fade_time_seconds = min(clip_length, clip.fade_time_seconds);
+ }
+ return remaining;
+}