X-Git-Url: https://git.sesse.net/?a=blobdiff_plain;f=futatabi%2Fvideo_stream.cpp;h=ef56ee4a96711a703b900b3ab60121298803160d;hb=eaba7288c4fb39ca195c9355970293bcaf088dbc;hp=2f8f8848e37f4d962e72f339230f05aa7e453e1c;hpb=52336c086b8bc355b55e2046e3a055b1b4c70ef7;p=nageru diff --git a/futatabi/video_stream.cpp b/futatabi/video_stream.cpp index 2f8f884..ef56ee4 100644 --- a/futatabi/video_stream.cpp +++ b/futatabi/video_stream.cpp @@ -129,7 +129,8 @@ vector encode_jpeg(const uint8_t *y_data, const uint8_t *cb_data, const return move(dest.dest); } -VideoStream::VideoStream() +VideoStream::VideoStream(AVFormatContext *file_avctx) + : avctx(file_avctx), output_fast_forward(file_avctx != nullptr) { ycbcr_converter.reset(new YCbCrConverter(YCbCrConverter::OUTPUT_TO_DUAL_YCBCR, /*resource_pool=*/nullptr)); ycbcr_semiplanar_converter.reset(new YCbCrConverter(YCbCrConverter::OUTPUT_TO_SEMIPLANAR, /*resource_pool=*/nullptr)); @@ -207,7 +208,10 @@ VideoStream::VideoStream() check_error(); OperatingPoint op; - if (global_flags.interpolation_quality == 1) { + if (global_flags.interpolation_quality == 0) { + // Allocate something just for simplicity; we won't be using it. + op = operating_point1; + } else if (global_flags.interpolation_quality == 1) { op = operating_point1; } else if (global_flags.interpolation_quality == 2) { op = operating_point2; @@ -237,27 +241,25 @@ VideoStream::~VideoStream() {} void VideoStream::start() { - AVFormatContext *avctx = avformat_alloc_context(); - - // We use Matroska, because it's pretty much the only mux where FFmpeg - // allows writing chroma location to override JFIF's default center placement. - // (Note that at the time of writing, however, FFmpeg does not correctly - // _read_ this information!) - avctx->oformat = av_guess_format("matroska", nullptr, nullptr); - - uint8_t *buf = (uint8_t *)av_malloc(MUX_BUFFER_SIZE); - avctx->pb = avio_alloc_context(buf, MUX_BUFFER_SIZE, 1, this, nullptr, nullptr, nullptr); - avctx->pb->write_data_type = &VideoStream::write_packet2_thunk; - avctx->pb->ignore_boundary_point = 1; + if (avctx == nullptr) { + avctx = avformat_alloc_context(); - Mux::Codec video_codec = Mux::CODEC_MJPEG; + // We use Matroska, because it's pretty much the only mux where FFmpeg + // allows writing chroma location to override JFIF's default center placement. + // (Note that at the time of writing, however, FFmpeg does not correctly + // _read_ this information!) + avctx->oformat = av_guess_format("matroska", nullptr, nullptr); - avctx->flags = AVFMT_FLAG_CUSTOM_IO; + uint8_t *buf = (uint8_t *)av_malloc(MUX_BUFFER_SIZE); + avctx->pb = avio_alloc_context(buf, MUX_BUFFER_SIZE, 1, this, nullptr, nullptr, nullptr); + avctx->pb->write_data_type = &VideoStream::write_packet2_thunk; + avctx->pb->ignore_boundary_point = 1; - string video_extradata; + avctx->flags = AVFMT_FLAG_CUSTOM_IO; + } size_t width = global_flags.width, height = global_flags.height; // Doesn't matter for MJPEG. - stream_mux.reset(new Mux(avctx, width, height, video_codec, video_extradata, /*audio_codec_parameters=*/nullptr, + mux.reset(new Mux(avctx, width, height, Mux::CODEC_MJPEG, /*video_extradata=*/"", /*audio_codec_parameters=*/nullptr, AVCOL_SPC_BT709, Mux::WITHOUT_AUDIO, COARSE_TIMEBASE, /*write_callback=*/nullptr, Mux::WRITE_FOREGROUND, {})); @@ -266,6 +268,8 @@ void VideoStream::start() void VideoStream::stop() { + should_quit = true; + clear_queue(); encode_thread.join(); } @@ -549,7 +553,7 @@ void VideoStream::encode_thread_func() exit(1); } - for ( ;; ) { + while (!should_quit) { QueuedFrame qf; { unique_lock lock(queue_lock); @@ -562,9 +566,14 @@ void VideoStream::encode_thread_func() // Now sleep until the frame is supposed to start (the usual case), // _or_ clear_queue() happened. - bool aborted = queue_changed.wait_until(lock, frame_start, [this, frame_start]{ - return frame_queue.empty() || frame_queue.front().local_pts != frame_start; - }); + bool aborted; + if (output_fast_forward) { + aborted = frame_queue.empty() || frame_queue.front().local_pts != frame_start; + } else { + aborted = queue_changed.wait_until(lock, frame_start, [this, frame_start]{ + return frame_queue.empty() || frame_queue.front().local_pts != frame_start; + }); + } if (aborted) { // clear_queue() happened, so don't play this frame after all. continue; @@ -581,7 +590,7 @@ void VideoStream::encode_thread_func() pkt.stream_index = 0; pkt.data = (uint8_t *)jpeg.data(); pkt.size = jpeg.size(); - stream_mux->add_packet(pkt, qf.output_pts, qf.output_pts); + mux->add_packet(pkt, qf.output_pts, qf.output_pts); last_frame.assign(&jpeg[0], &jpeg[0] + jpeg.size()); } else if (qf.type == QueuedFrame::FADED) { @@ -597,7 +606,7 @@ void VideoStream::encode_thread_func() pkt.stream_index = 0; pkt.data = (uint8_t *)jpeg.data(); pkt.size = jpeg.size(); - stream_mux->add_packet(pkt, qf.output_pts, qf.output_pts); + mux->add_packet(pkt, qf.output_pts, qf.output_pts); last_frame = move(jpeg); } else if (qf.type == QueuedFrame::INTERPOLATED || qf.type == QueuedFrame::FADED_INTERPOLATED) { glClientWaitSync(qf.fence.get(), /*flags=*/0, GL_TIMEOUT_IGNORED); @@ -621,7 +630,7 @@ void VideoStream::encode_thread_func() pkt.stream_index = 0; pkt.data = (uint8_t *)jpeg.data(); pkt.size = jpeg.size(); - stream_mux->add_packet(pkt, qf.output_pts, qf.output_pts); + mux->add_packet(pkt, qf.output_pts, qf.output_pts); last_frame = move(jpeg); } else if (qf.type == QueuedFrame::REFRESH) { AVPacket pkt; @@ -629,7 +638,7 @@ void VideoStream::encode_thread_func() pkt.stream_index = 0; pkt.data = (uint8_t *)last_frame.data(); pkt.size = last_frame.size(); - stream_mux->add_packet(pkt, qf.output_pts, qf.output_pts); + mux->add_packet(pkt, qf.output_pts, qf.output_pts); } else { assert(false); }