ui->stop_btn->setEnabled(false);
}
-static string format_duration(double t)
-{
- int t_ms = lrint(t * 1e3);
-
- int ms = t_ms % 1000;
- t_ms /= 1000;
- int s = t_ms % 60;
- t_ms /= 60;
- int m = t_ms;
-
- char buf[256];
- snprintf(buf, sizeof(buf), "%d:%02d.%03d", m, s, ms);
- return buf;
-}
-
void MainWindow::live_player_clip_progress(const map<uint64_t, double> &progress, double time_remaining)
{
playlist_clips->set_progress(progress);
void MainWindow::set_output_status(const string &status)
{
ui->live_label->setText(QString::fromStdString("Current output (" + status + ")"));
+ if (live_player != nullptr) {
+ live_player->set_pause_status(status);
+ }
lock_guard<mutex> lock(queue_status_mu);
queue_status = status;
vector<ClipWithID> clip_list;
bool clip_ready;
steady_clock::time_point before_sleep = steady_clock::now();
+ string pause_status;
// Wait until we're supposed to play something.
{
queued_clip_list.clear();
assert(!clip_list.empty());
assert(!splice_ready); // This corner case should have been handled in splice_play().
+ } else {
+ pause_status = this->pause_status;
}
}
if (!clip_ready) {
if (video_stream != nullptr) {
++metric_refresh_frame;
- video_stream->schedule_refresh_frame(steady_clock::now(), pts, /*display_func=*/nullptr, QueueSpotHolder());
+ string subtitle = "Futatabi " NAGERU_VERSION ";PAUSED;" + pause_status;
+ video_stream->schedule_refresh_frame(steady_clock::now(), pts, /*display_func=*/nullptr, QueueSpotHolder(),
+ subtitle);
}
return;
}
}
}
+ // NOTE: None of this will take into account any snapping done below.
+ double clip_progress = calc_progress(*clip, in_pts_for_progress);
+ map<uint64_t, double> progress{ { clip_list[clip_idx].id, clip_progress } };
+ double time_remaining;
+ if (next_clip != nullptr && time_left_this_clip <= next_clip_fade_time) {
+ double next_clip_progress = calc_progress(*next_clip, in_pts_secondary_for_progress);
+ progress[clip_list[clip_idx + 1].id] = next_clip_progress;
+ time_remaining = compute_time_left(clip_list, clip_idx + 1, next_clip_progress);
+ } else {
+ time_remaining = compute_time_left(clip_list, clip_idx, clip_progress);
+ }
if (progress_callback != nullptr) {
- // NOTE: None of this will take into account any snapping done below.
- double clip_progress = calc_progress(*clip, in_pts_for_progress);
- map<uint64_t, double> progress{ { clip_list[clip_idx].id, clip_progress } };
- double time_remaining;
- if (next_clip != nullptr && time_left_this_clip <= next_clip_fade_time) {
- double next_clip_progress = calc_progress(*next_clip, in_pts_secondary_for_progress);
- progress[clip_list[clip_idx + 1].id] = next_clip_progress;
- time_remaining = compute_time_left(clip_list, clip_idx + 1, next_clip_progress);
- } else {
- time_remaining = compute_time_left(clip_list, clip_idx, clip_progress);
- }
progress_callback(progress, time_remaining);
}
}
}
+ string subtitle;
+ {
+ stringstream ss;
+ ss.imbue(locale("C"));
+ ss.precision(3);
+ ss << "Futatabi " NAGERU_VERSION ";PLAYING;";
+ ss << fixed << time_remaining;
+ ss << ";" << format_duration(time_remaining) << " left";
+ subtitle = ss.str();
+ }
+
// If there's nothing to interpolate between, or if interpolation is turned off,
// or we're a preview, then just display the frame.
if (frame_lower.pts == frame_upper.pts || global_flags.interpolation_quality == 0 || video_stream == nullptr) {
display_single_frame(primary_stream_idx, frame_lower, secondary_stream_idx,
- secondary_frame, fade_alpha, next_frame_start, /*snapped=*/false);
+ secondary_frame, fade_alpha, next_frame_start, /*snapped=*/false,
+ subtitle);
continue;
}
for (FrameOnDisk snap_frame : { frame_lower, frame_upper }) {
if (fabs(snap_frame.pts - in_pts) < pts_snap_tolerance) {
display_single_frame(primary_stream_idx, snap_frame, secondary_stream_idx,
- secondary_frame, fade_alpha, next_frame_start, /*snapped=*/true);
+ secondary_frame, fade_alpha, next_frame_start, /*snapped=*/true,
+ subtitle);
in_pts_origin += snap_frame.pts - in_pts;
snapped = true;
break;
video_stream->schedule_interpolated_frame(
next_frame_start, pts, display_func, QueueSpotHolder(this),
frame_lower, frame_upper, alpha,
- secondary_frame, fade_alpha);
+ secondary_frame, fade_alpha, subtitle);
last_pts_played = in_pts; // Not really needed; only previews use last_pts_played.
}
}
}
-void Player::display_single_frame(int primary_stream_idx, const FrameOnDisk &primary_frame, int secondary_stream_idx, const FrameOnDisk &secondary_frame, double fade_alpha, steady_clock::time_point frame_start, bool snapped)
+void Player::display_single_frame(int primary_stream_idx, const FrameOnDisk &primary_frame, int secondary_stream_idx, const FrameOnDisk &secondary_frame, double fade_alpha, steady_clock::time_point frame_start, bool snapped, const std::string &subtitle)
{
auto display_func = [this, primary_stream_idx, primary_frame, secondary_frame, fade_alpha] {
if (destination != nullptr) {
}
video_stream->schedule_original_frame(
frame_start, pts, display_func, QueueSpotHolder(this),
- primary_frame);
+ primary_frame, subtitle);
} else {
assert(secondary_frame.pts != -1);
// NOTE: We could be increasing unused metrics for previews, but that's harmless.
}
video_stream->schedule_faded_frame(frame_start, pts, display_func,
QueueSpotHolder(this), primary_frame,
- secondary_frame, fade_alpha);
+ secondary_frame, fade_alpha, subtitle);
}
}
last_pts_played = primary_frame.pts;
}
return remaining;
}
+
+string format_duration(double t)
+{
+ int t_ms = lrint(t * 1e3);
+
+ int ms = t_ms % 1000;
+ t_ms /= 1000;
+ int s = t_ms % 60;
+ t_ms /= 60;
+ int m = t_ms;
+
+ char buf[256];
+ snprintf(buf, sizeof(buf), "%d:%02d.%03d", m, s, ms);
+ return buf;
+}
// If nothing is playing, the call will be ignored.
void splice_play(const std::vector<ClipWithID> &clips);
+ // Set the status string that will be used for the video stream's status subtitles
+ // whenever we are not playing anything.
+ void set_pause_status(const std::string &status)
+ {
+ std::lock_guard<std::mutex> lock(queue_state_mu);
+ pause_status = status;
+ }
+
// Not thread-safe to set concurrently with playing.
// Will be called back from the player thread.
using done_callback_func = std::function<void()>;
private:
void thread_func(AVFormatContext *file_avctx);
void play_playlist_once();
- void display_single_frame(int primary_stream_idx, const FrameOnDisk &primary_frame, int secondary_stream_idx, const FrameOnDisk &secondary_frame, double fade_alpha, std::chrono::steady_clock::time_point frame_start, bool snapped);
+ void display_single_frame(int primary_stream_idx, const FrameOnDisk &primary_frame, int secondary_stream_idx, const FrameOnDisk &secondary_frame, double fade_alpha, std::chrono::steady_clock::time_point frame_start, bool snapped, const std::string &subtitle);
void open_output_stream();
static int write_packet2_thunk(void *opaque, uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time);
int write_packet2(uint8_t *buf, int buf_size, AVIODataMarkerType type, int64_t time);
bool splice_ready = false; // Under queue_state_mu.
std::vector<ClipWithID> to_splice_clip_list; // Under queue_state_mu.
+ std::string pause_status = "paused"; // Under queue_state_mu.
std::unique_ptr<VideoStream> video_stream; // Can be nullptr.
return compute_time_left(clips, 0, 0.0);
}
+std::string format_duration(double t);
+
#endif // !defined(_PLAYER_H)
size_t width = global_flags.width, height = global_flags.height; // Doesn't matter for MJPEG.
mux.reset(new Mux(avctx, width, height, Mux::CODEC_MJPEG, /*video_extradata=*/"", /*audio_codec_parameters=*/nullptr,
- AVCOL_SPC_BT709, COARSE_TIMEBASE, /*write_callback=*/nullptr, Mux::WRITE_FOREGROUND, {}));
+ AVCOL_SPC_BT709, COARSE_TIMEBASE, /*write_callback=*/nullptr, Mux::WRITE_FOREGROUND, {}, Mux::WITH_SUBTITLES));
encode_thread = thread(&VideoStream::encode_thread_func, this);
}
void VideoStream::schedule_original_frame(steady_clock::time_point local_pts,
int64_t output_pts, function<void()> &&display_func,
QueueSpotHolder &&queue_spot_holder,
- FrameOnDisk frame)
+ FrameOnDisk frame, const string &subtitle)
{
fprintf(stderr, "output_pts=%ld original input_pts=%ld\n", output_pts, frame.pts);
qf.frame1 = frame;
qf.display_func = move(display_func);
qf.queue_spot_holder = move(queue_spot_holder);
+ qf.subtitle = subtitle;
lock_guard<mutex> lock(queue_lock);
frame_queue.push_back(move(qf));
function<void()> &&display_func,
QueueSpotHolder &&queue_spot_holder,
FrameOnDisk frame1_spec, FrameOnDisk frame2_spec,
- float fade_alpha)
+ float fade_alpha, const string &subtitle)
{
fprintf(stderr, "output_pts=%ld faded input_pts=%ld,%ld fade_alpha=%.2f\n", output_pts, frame1_spec.pts, frame2_spec.pts, fade_alpha);
qf.frame1 = frame1_spec;
qf.display_func = move(display_func);
qf.queue_spot_holder = move(queue_spot_holder);
+ qf.subtitle = subtitle;
qf.secondary_frame = frame2_spec;
int64_t output_pts, function<void(shared_ptr<Frame>)> &&display_func,
QueueSpotHolder &&queue_spot_holder,
FrameOnDisk frame1, FrameOnDisk frame2,
- float alpha, FrameOnDisk secondary_frame, float fade_alpha)
+ float alpha, FrameOnDisk secondary_frame, float fade_alpha, const string &subtitle)
{
if (secondary_frame.pts != -1) {
fprintf(stderr, "output_pts=%ld interpolated input_pts1=%ld input_pts2=%ld alpha=%.3f secondary_pts=%ld fade_alpha=%.2f\n", output_pts, frame1.pts, frame2.pts, alpha, secondary_frame.pts, fade_alpha);
qf.display_decoded_func = move(display_func);
qf.queue_spot_holder = move(queue_spot_holder);
qf.local_pts = local_pts;
+ qf.subtitle = subtitle;
check_error();
void VideoStream::schedule_refresh_frame(steady_clock::time_point local_pts,
int64_t output_pts, function<void()> &&display_func,
- QueueSpotHolder &&queue_spot_holder)
+ QueueSpotHolder &&queue_spot_holder, const string &subtitle)
{
QueuedFrame qf;
qf.type = QueuedFrame::REFRESH;
qf.output_pts = output_pts;
qf.display_func = move(display_func);
qf.queue_spot_holder = move(queue_spot_holder);
+ qf.subtitle = subtitle;
lock_guard<mutex> lock(queue_lock);
frame_queue.push_back(move(qf));
} else {
assert(false);
}
+
+ if (!qf.subtitle.empty()) {
+ AVPacket pkt;
+ av_init_packet(&pkt);
+ pkt.stream_index = mux->get_subtitle_stream_idx();
+ assert(pkt.stream_index != -1);
+ pkt.data = (uint8_t *)qf.subtitle.data();
+ pkt.size = qf.subtitle.size();
+ pkt.flags = 0;
+ pkt.duration = lrint(TIMEBASE / global_flags.output_framerate); // Doesn't really matter for Nageru.
+ mux->add_packet(pkt, qf.output_pts, qf.output_pts);
+ }
+
if (qf.display_func != nullptr) {
qf.display_func();
}
void schedule_original_frame(std::chrono::steady_clock::time_point,
int64_t output_pts, std::function<void()> &&display_func,
QueueSpotHolder &&queue_spot_holder,
- FrameOnDisk frame);
+ FrameOnDisk frame, const std::string &subtitle);
void schedule_faded_frame(std::chrono::steady_clock::time_point, int64_t output_pts,
std::function<void()> &&display_func,
QueueSpotHolder &&queue_spot_holder,
FrameOnDisk frame1, FrameOnDisk frame2,
- float fade_alpha);
+ float fade_alpha, const std::string &subtitle);
void schedule_interpolated_frame(std::chrono::steady_clock::time_point, int64_t output_pts,
std::function<void(std::shared_ptr<Frame>)> &&display_func,
QueueSpotHolder &&queue_spot_holder,
FrameOnDisk frame1, FrameOnDisk frame2,
- float alpha, FrameOnDisk secondary_frame = {}, // Empty = no secondary (fade) frame.
- float fade_alpha = 0.0f);
+ float alpha, FrameOnDisk secondary_frame, // Empty = no secondary (fade) frame.
+ float fade_alpha, const std::string &subtitle);
void schedule_refresh_frame(std::chrono::steady_clock::time_point, int64_t output_pts,
std::function<void()> &&display_func,
- QueueSpotHolder &&queue_spot_holder);
+ QueueSpotHolder &&queue_spot_holder, const std::string &subtitle);
private:
FrameReader frame_reader;
std::function<void()> display_func; // Called when the image is done decoding.
std::function<void(std::shared_ptr<Frame>)> display_decoded_func; // Same, except for INTERPOLATED and FADED_INTERPOLATED.
+ std::string subtitle; // Blank for none.
+
QueueSpotHolder queue_spot_holder;
};
std::deque<QueuedFrame> frame_queue; // Under <queue_lock>.
const AVFormatContext * const ctx;
};
-Mux::Mux(AVFormatContext *avctx, int width, int height, Codec video_codec, const string &video_extradata, const AVCodecParameters *audio_codecpar, AVColorSpace color_space, int time_base, function<void(int64_t)> write_callback, WriteStrategy write_strategy, const vector<MuxMetrics *> &metrics)
+Mux::Mux(AVFormatContext *avctx, int width, int height, Codec video_codec, const string &video_extradata, const AVCodecParameters *audio_codecpar, AVColorSpace color_space, int time_base, function<void(int64_t)> write_callback, WriteStrategy write_strategy, const vector<MuxMetrics *> &metrics, WithSubtitles with_subtitles)
: write_strategy(write_strategy), avctx(avctx), write_callback(write_callback), metrics(metrics)
{
AVStream *avstream_video = avformat_new_stream(avctx, nullptr);
streams.push_back(avstream_audio);
}
+ if (with_subtitles == WITH_SUBTITLES) {
+ AVStream *avstream_subtitles = avformat_new_stream(avctx, nullptr);
+ if (avstream_subtitles == nullptr) {
+ fprintf(stderr, "avformat_new_stream() failed\n");
+ exit(1);
+ }
+ avstream_subtitles->time_base = AVRational{1, time_base};
+ avstream_subtitles->codecpar->codec_type = AVMEDIA_TYPE_SUBTITLE;
+ avstream_subtitles->codecpar->codec_id = AV_CODEC_ID_WEBVTT;
+ avstream_subtitles->disposition = AV_DISPOSITION_METADATA;
+ streams.push_back(avstream_subtitles);
+ subtitle_stream_idx = streams.size() - 1;
+ }
+
AVDictionary *options = NULL;
vector<pair<string, string>> opts = MUX_OPTS;
for (pair<string, string> opt : opts) {
// higher overhead.
WRITE_BACKGROUND,
};
+ enum WithSubtitles {
+ WITH_SUBTITLES,
+ WITHOUT_SUBTITLES
+ };
// Takes ownership of avctx. <write_callback> will be called every time
// a write has been made to the video stream (id 0), with the pts of
// will be added to.
//
// If audio_codecpar is nullptr, there will be no audio stream.
- Mux(AVFormatContext *avctx, int width, int height, Codec video_codec, const std::string &video_extradata, const AVCodecParameters *audio_codecpar, AVColorSpace color_space, int time_base, std::function<void(int64_t)> write_callback, WriteStrategy write_strategy, const std::vector<MuxMetrics *> &metrics);
+ Mux(AVFormatContext *avctx, int width, int height, Codec video_codec, const std::string &video_extradata, const AVCodecParameters *audio_codecpar, AVColorSpace color_space, int time_base, std::function<void(int64_t)> write_callback, WriteStrategy write_strategy, const std::vector<MuxMetrics *> &metrics, WithSubtitles with_subtitles = WITHOUT_SUBTITLES);
~Mux();
void add_packet(const AVPacket &pkt, int64_t pts, int64_t dts, AVRational timebase = { 1, TIMEBASE }, int stream_index_override = -1);
+ int get_subtitle_stream_idx() const { return subtitle_stream_idx; }
// As long as the mux is plugged, it will not actually write anything to disk,
// just queue the packets. Once it is unplugged, the packets are reordered by pts
std::condition_variable packet_queue_ready;
std::vector<AVStream *> streams;
+ int subtitle_stream_idx = -1;
std::function<void(int64_t)> write_callback;
std::vector<MuxMetrics *> metrics;