X-Git-Url: https://git.sesse.net/?a=blobdiff_plain;f=futatabi%2Fvideo_stream.cpp;h=591ee7e219883e5f4eb9209cbb17e5100108ea83;hb=refs%2Fheads%2Fmaster;hp=9a120b51344a70b6b4744519ee07f328c50f7b3b;hpb=32b87c91cf51d730ff5abc8347884219918fad66;p=nageru diff --git a/futatabi/video_stream.cpp b/futatabi/video_stream.cpp index 9a120b5..c12acdf 100644 --- a/futatabi/video_stream.cpp +++ b/futatabi/video_stream.cpp @@ -3,16 +3,21 @@ extern "C" { #include #include +#include } #include "chroma_subsampler.h" +#include "exif_parser.h" #include "flags.h" #include "flow.h" #include "jpeg_frame_view.h" #include "movit/util.h" +#include "pbo_pool.h" #include "player.h" #include "shared/context.h" +#include "shared/ffmpeg_raii.h" #include "shared/httpd.h" +#include "shared/metrics.h" #include "shared/shared_defs.h" #include "shared/mux.h" #include "util.h" @@ -22,9 +27,29 @@ extern "C" { #include #include +using namespace movit; using namespace std; using namespace std::chrono; +namespace { + +once_flag video_metrics_inited; +Summary metric_jpeg_encode_time_seconds; +Summary metric_fade_latency_seconds; +Summary metric_interpolation_latency_seconds; +Summary metric_fade_fence_wait_time_seconds; +Summary metric_interpolation_fence_wait_time_seconds; + +void wait_for_upload(shared_ptr &frame) +{ + if (frame->uploaded_interpolation != nullptr) { + glWaitSync(frame->uploaded_interpolation.get(), /*flags=*/0, GL_TIMEOUT_IGNORED); + frame->uploaded_interpolation.reset(); + } +} + +} // namespace + extern HTTPD *global_httpd; struct VectorDestinationManager { @@ -81,6 +106,7 @@ static_assert(std::is_standard_layout::value, ""); string encode_jpeg(const uint8_t *y_data, const uint8_t *cb_data, const uint8_t *cr_data, unsigned width, unsigned height, const string exif_data) { + steady_clock::time_point start = steady_clock::now(); VectorDestinationManager dest; jpeg_compress_struct cinfo; @@ -131,12 +157,39 @@ string encode_jpeg(const uint8_t *y_data, const uint8_t *cb_data, const uint8_t jpeg_finish_compress(&cinfo); jpeg_destroy_compress(&cinfo); + steady_clock::time_point stop = steady_clock::now(); + metric_jpeg_encode_time_seconds.count_event(duration(stop - start).count()); + return move(dest.dest); } +string encode_jpeg_from_pbo(void *contents, unsigned width, unsigned height, const string exif_data) +{ + unsigned chroma_width = width / 2; + + const uint8_t *y = (const uint8_t *)contents; + const uint8_t *cb = (const uint8_t *)contents + width * height; + const uint8_t *cr = (const uint8_t *)contents + width * height + chroma_width * height; + return encode_jpeg(y, cb, cr, width, height, move(exif_data)); +} + VideoStream::VideoStream(AVFormatContext *file_avctx) : avctx(file_avctx), output_fast_forward(file_avctx != nullptr) { + call_once(video_metrics_inited, [] { + vector quantiles{ 0.01, 0.1, 0.25, 0.5, 0.75, 0.9, 0.99 }; + metric_jpeg_encode_time_seconds.init(quantiles, 60.0); + global_metrics.add("jpeg_encode_time_seconds", &metric_jpeg_encode_time_seconds); + metric_fade_fence_wait_time_seconds.init(quantiles, 60.0); + global_metrics.add("fade_fence_wait_time_seconds", &metric_fade_fence_wait_time_seconds); + metric_interpolation_fence_wait_time_seconds.init(quantiles, 60.0); + global_metrics.add("interpolation_fence_wait_time_seconds", &metric_interpolation_fence_wait_time_seconds); + metric_fade_latency_seconds.init(quantiles, 60.0); + global_metrics.add("fade_latency_seconds", &metric_fade_latency_seconds); + metric_interpolation_latency_seconds.init(quantiles, 60.0); + global_metrics.add("interpolation_latency_seconds", &metric_interpolation_latency_seconds); + }); + ycbcr_converter.reset(new YCbCrConverter(YCbCrConverter::OUTPUT_TO_DUAL_YCBCR, /*resource_pool=*/nullptr)); ycbcr_semiplanar_converter.reset(new YCbCrConverter(YCbCrConverter::OUTPUT_TO_SEMIPLANAR, /*resource_pool=*/nullptr)); @@ -301,8 +354,9 @@ void VideoStream::start() audio_codecpar->codec_type = AVMEDIA_TYPE_AUDIO; audio_codecpar->codec_id = AV_CODEC_ID_PCM_S32LE; - audio_codecpar->channel_layout = AV_CH_LAYOUT_STEREO; - audio_codecpar->channels = 2; + audio_codecpar->ch_layout.order = AV_CHANNEL_ORDER_NATIVE; + audio_codecpar->ch_layout.nb_channels = 2; + audio_codecpar->ch_layout.u.mask = AV_CH_LAYOUT_STEREO; audio_codecpar->sample_rate = OUTPUT_FREQUENCY; size_t width = global_flags.width, height = global_flags.height; // Doesn't matter for MJPEG. @@ -398,6 +452,8 @@ void VideoStream::schedule_faded_frame(steady_clock::time_point local_pts, int64 shared_ptr frame1 = decode_jpeg_with_cache(frame1_spec, DECODE_IF_NOT_IN_CACHE, &frame_reader, &did_decode); shared_ptr frame2 = decode_jpeg_with_cache(frame2_spec, DECODE_IF_NOT_IN_CACHE, &frame_reader, &did_decode); + wait_for_upload(frame1); + wait_for_upload(frame2); ycbcr_semiplanar_converter->prepare_chain_for_fade(frame1, frame2, fade_alpha)->render_to_fbo(resources->fade_fbo, global_flags.width, global_flags.height); @@ -430,6 +486,7 @@ void VideoStream::schedule_faded_frame(steady_clock::time_point local_pts, int64 // Set a fence we can wait for to make sure the CPU sees the read. glMemoryBarrier(GL_CLIENT_MAPPED_BUFFER_BARRIER_BIT); check_error(); + qf.fence_created = steady_clock::now(); qf.fence = RefCountedGLsync(GL_SYNC_GPU_COMMANDS_COMPLETE, /*flags=*/0); check_error(); qf.resources = move(resources); @@ -484,6 +541,7 @@ void VideoStream::schedule_interpolated_frame(steady_clock::time_point local_pts FrameOnDisk frame_spec = frame_no == 1 ? frame2 : frame1; bool did_decode; shared_ptr frame = decode_jpeg_with_cache(frame_spec, DECODE_IF_NOT_IN_CACHE, &frame_reader, &did_decode); + wait_for_upload(frame); ycbcr_converter->prepare_chain_for_conversion(frame)->render_to_fbo(resources->input_fbos[frame_no], global_flags.width, global_flags.height); if (frame_no == 1) { qf.exif_data = frame->exif_data; // Use the white point from the last frame. @@ -524,14 +582,19 @@ void VideoStream::schedule_interpolated_frame(steady_clock::time_point local_pts // Now decode the image we are fading against. bool did_decode; shared_ptr frame2 = decode_jpeg_with_cache(secondary_frame, DECODE_IF_NOT_IN_CACHE, &frame_reader, &did_decode); + wait_for_upload(frame2); // Then fade against it, putting it into the fade Y' and CbCr textures. - ycbcr_semiplanar_converter->prepare_chain_for_fade_from_texture(qf.output_tex, global_flags.width, global_flags.height, frame2, fade_alpha)->render_to_fbo(resources->fade_fbo, global_flags.width, global_flags.height); + RGBTriplet neutral_color = get_neutral_color(qf.exif_data); + ycbcr_semiplanar_converter->prepare_chain_for_fade_from_texture(qf.output_tex, neutral_color, global_flags.width, global_flags.height, frame2, fade_alpha)->render_to_fbo(resources->fade_fbo, global_flags.width, global_flags.height); // Subsample and split Cb/Cr. chroma_subsampler->subsample_chroma(resources->fade_cbcr_output_tex, global_flags.width, global_flags.height, resources->cb_tex, resources->cr_tex); interpolate_no_split->release_texture(qf.output_tex); + + // We already applied the white balance, so don't have the client redo it. + qf.exif_data.clear(); } else { tie(qf.output_tex, qf.cbcr_tex) = interpolate->exec(resources->input_tex, resources->gray_tex, flow_tex, global_flags.width, global_flags.height, alpha); check_error(); @@ -566,6 +629,7 @@ void VideoStream::schedule_interpolated_frame(steady_clock::time_point local_pts // Set a fence we can wait for to make sure the CPU sees the read. glMemoryBarrier(GL_CLIENT_MAPPED_BUFFER_BARRIER_BIT); check_error(); + qf.fence_created = steady_clock::now(); qf.fence = RefCountedGLsync(GL_SYNC_GPU_COMMANDS_COMPLETE, /*flags=*/0); check_error(); qf.resources = move(resources); @@ -607,31 +671,27 @@ void VideoStream::schedule_silence(steady_clock::time_point local_pts, int64_t o namespace { -shared_ptr frame_from_pbo(void *contents, size_t width, size_t height) +RefCountedTexture clone_r8_texture(GLuint src_tex, unsigned width, unsigned height) { - size_t chroma_width = width / 2; - - const uint8_t *y = (const uint8_t *)contents; - const uint8_t *cb = (const uint8_t *)contents + width * height; - const uint8_t *cr = (const uint8_t *)contents + width * height + chroma_width * height; + GLuint tex; + glCreateTextures(GL_TEXTURE_2D, 1, &tex); + check_error(); + glTextureStorage2D(tex, 1, GL_R8, width, height); + check_error(); + glCopyImageSubData(src_tex, GL_TEXTURE_2D, 0, 0, 0, 0, + tex, GL_TEXTURE_2D, 0, 0, 0, 0, + width, height, 1); + check_error(); + glTextureParameteri(tex, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + check_error(); + glTextureParameteri(tex, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + check_error(); + glTextureParameteri(tex, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + check_error(); + glTextureParameteri(tex, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + check_error(); - shared_ptr frame(new Frame); - frame->y.reset(new uint8_t[width * height]); - frame->cb.reset(new uint8_t[chroma_width * height]); - frame->cr.reset(new uint8_t[chroma_width * height]); - for (unsigned yy = 0; yy < height; ++yy) { - memcpy(frame->y.get() + width * yy, y + width * yy, width); - memcpy(frame->cb.get() + chroma_width * yy, cb + chroma_width * yy, chroma_width); - memcpy(frame->cr.get() + chroma_width * yy, cr + chroma_width * yy, chroma_width); - } - frame->is_semiplanar = false; - frame->width = width; - frame->height = height; - frame->chroma_subsampling_x = 2; - frame->chroma_subsampling_y = 1; - frame->pitch_y = width; - frame->pitch_chroma = chroma_width; - return frame; + return RefCountedTexture(new GLuint(tex), TextureDeleter()); } } // namespace @@ -647,6 +707,8 @@ void VideoStream::encode_thread_func() abort(); } + init_pbo_pool(); + while (!should_quit) { QueuedFrame qf; { @@ -682,59 +744,74 @@ void VideoStream::encode_thread_func() // Hack: We mux the subtitle packet one time unit before the actual frame, // so that Nageru is sure to get it first. if (!qf.subtitle.empty() && with_subtitles == Mux::WITH_SUBTITLES) { - AVPacket pkt; - av_init_packet(&pkt); - pkt.stream_index = mux->get_subtitle_stream_idx(); - assert(pkt.stream_index != -1); - pkt.data = (uint8_t *)qf.subtitle.data(); - pkt.size = qf.subtitle.size(); - pkt.flags = 0; - pkt.duration = lrint(TIMEBASE / global_flags.output_framerate); // Doesn't really matter for Nageru. - mux->add_packet(pkt, qf.output_pts - 1, qf.output_pts - 1); + AVPacketWithDeleter pkt = av_packet_alloc_unique(); + pkt->stream_index = mux->get_subtitle_stream_idx(); + assert(pkt->stream_index != -1); + pkt->data = (uint8_t *)qf.subtitle.data(); + pkt->size = qf.subtitle.size(); + pkt->flags = 0; + pkt->duration = lrint(TIMEBASE / global_flags.output_framerate); // Doesn't really matter for Nageru. + mux->add_packet(*pkt, qf.output_pts - 1, qf.output_pts - 1); } if (qf.type == QueuedFrame::ORIGINAL) { // Send the JPEG frame on, unchanged. string jpeg = move(*qf.encoded_jpeg); - AVPacket pkt; - av_init_packet(&pkt); - pkt.stream_index = 0; - pkt.data = (uint8_t *)jpeg.data(); - pkt.size = jpeg.size(); - pkt.flags = AV_PKT_FLAG_KEY; - mux->add_packet(pkt, qf.output_pts, qf.output_pts); + AVPacketWithDeleter pkt = av_packet_alloc_unique(); + pkt->stream_index = 0; + pkt->data = (uint8_t *)jpeg.data(); + pkt->size = jpeg.size(); + pkt->flags = AV_PKT_FLAG_KEY; + mux->add_packet(*pkt, qf.output_pts, qf.output_pts); last_frame = move(jpeg); add_audio_or_silence(qf); } else if (qf.type == QueuedFrame::FADED) { + steady_clock::time_point start = steady_clock::now(); glClientWaitSync(qf.fence.get(), /*flags=*/0, GL_TIMEOUT_IGNORED); - - shared_ptr frame = frame_from_pbo(qf.resources->pbo_contents, global_flags.width, global_flags.height); + steady_clock::time_point stop = steady_clock::now(); + metric_fade_fence_wait_time_seconds.count_event(duration(stop - start).count()); + metric_fade_latency_seconds.count_event(duration(stop - qf.fence_created).count()); // Now JPEG encode it, and send it on to the stream. - string jpeg = encode_jpeg(frame->y.get(), frame->cb.get(), frame->cr.get(), global_flags.width, global_flags.height, move(frame->exif_data)); - - AVPacket pkt; - av_init_packet(&pkt); - pkt.stream_index = 0; - pkt.data = (uint8_t *)jpeg.data(); - pkt.size = jpeg.size(); - pkt.flags = AV_PKT_FLAG_KEY; - mux->add_packet(pkt, qf.output_pts, qf.output_pts); + string jpeg = encode_jpeg_from_pbo(qf.resources->pbo_contents, global_flags.width, global_flags.height, /*exif_data=*/""); + + AVPacketWithDeleter pkt = av_packet_alloc_unique(); + pkt->stream_index = 0; + pkt->data = (uint8_t *)jpeg.data(); + pkt->size = jpeg.size(); + pkt->flags = AV_PKT_FLAG_KEY; + mux->add_packet(*pkt, qf.output_pts, qf.output_pts); last_frame = move(jpeg); add_audio_or_silence(qf); } else if (qf.type == QueuedFrame::INTERPOLATED || qf.type == QueuedFrame::FADED_INTERPOLATED) { + steady_clock::time_point start = steady_clock::now(); glClientWaitSync(qf.fence.get(), /*flags=*/0, GL_TIMEOUT_IGNORED); + steady_clock::time_point stop = steady_clock::now(); + metric_interpolation_fence_wait_time_seconds.count_event(duration(stop - start).count()); + metric_interpolation_latency_seconds.count_event(duration(stop - qf.fence_created).count()); // Send it on to display. - shared_ptr frame = frame_from_pbo(qf.resources->pbo_contents, global_flags.width, global_flags.height); if (qf.display_decoded_func != nullptr) { - qf.display_decoded_func(frame); + shared_ptr frame(new Frame); + if (qf.type == QueuedFrame::FADED_INTERPOLATED) { + frame->y = clone_r8_texture(qf.resources->fade_y_output_tex, global_flags.width, global_flags.height); + } else { + frame->y = clone_r8_texture(qf.output_tex, global_flags.width, global_flags.height); + } + frame->cb = clone_r8_texture(qf.resources->cb_tex, global_flags.width / 2, global_flags.height); + frame->cr = clone_r8_texture(qf.resources->cr_tex, global_flags.width / 2, global_flags.height); + frame->width = global_flags.width; + frame->height = global_flags.height; + frame->chroma_subsampling_x = 2; + frame->chroma_subsampling_y = 1; + frame->uploaded_ui_thread = RefCountedGLsync(GL_SYNC_GPU_COMMANDS_COMPLETE, /*flags=*/0); + qf.display_decoded_func(move(frame)); } // Now JPEG encode it, and send it on to the stream. - string jpeg = encode_jpeg(frame->y.get(), frame->cb.get(), frame->cr.get(), global_flags.width, global_flags.height, move(qf.exif_data)); + string jpeg = encode_jpeg_from_pbo(qf.resources->pbo_contents, global_flags.width, global_flags.height, move(qf.exif_data)); if (qf.flow_tex != 0) { compute_flow->release_texture(qf.flow_tex); } @@ -743,24 +820,22 @@ void VideoStream::encode_thread_func() interpolate->release_texture(qf.cbcr_tex); } - AVPacket pkt; - av_init_packet(&pkt); - pkt.stream_index = 0; - pkt.data = (uint8_t *)jpeg.data(); - pkt.size = jpeg.size(); - pkt.flags = AV_PKT_FLAG_KEY; - mux->add_packet(pkt, qf.output_pts, qf.output_pts); + AVPacketWithDeleter pkt = av_packet_alloc_unique(); + pkt->stream_index = 0; + pkt->data = (uint8_t *)jpeg.data(); + pkt->size = jpeg.size(); + pkt->flags = AV_PKT_FLAG_KEY; + mux->add_packet(*pkt, qf.output_pts, qf.output_pts); last_frame = move(jpeg); add_audio_or_silence(qf); } else if (qf.type == QueuedFrame::REFRESH) { - AVPacket pkt; - av_init_packet(&pkt); - pkt.stream_index = 0; - pkt.data = (uint8_t *)last_frame.data(); - pkt.size = last_frame.size(); - pkt.flags = AV_PKT_FLAG_KEY; - mux->add_packet(pkt, qf.output_pts, qf.output_pts); + AVPacketWithDeleter pkt = av_packet_alloc_unique(); + pkt->stream_index = 0; + pkt->data = (uint8_t *)last_frame.data(); + pkt->size = last_frame.size(); + pkt->flags = AV_PKT_FLAG_KEY; + mux->add_packet(*pkt, qf.output_pts, qf.output_pts); add_audio_or_silence(qf); // Definitely silence. } else if (qf.type == QueuedFrame::SILENCE) { @@ -790,11 +865,12 @@ int VideoStream::write_packet2(uint8_t *buf, int buf_size, AVIODataMarkerType ty type = AVIO_DATA_MARKER_SYNC_POINT; } + HTTPD::StreamID stream_id{ HTTPD::MAIN_STREAM, 0 }; if (type == AVIO_DATA_MARKER_HEADER) { stream_mux_header.append((char *)buf, buf_size); - global_httpd->set_header(HTTPD::MAIN_STREAM, stream_mux_header); + global_httpd->set_header(stream_id, stream_mux_header); } else { - global_httpd->add_data(HTTPD::MAIN_STREAM, (char *)buf, buf_size, type == AVIO_DATA_MARKER_SYNC_POINT, time, AVRational{ AV_TIME_BASE, 1 }); + global_httpd->add_data(stream_id, (char *)buf, buf_size, type == AVIO_DATA_MARKER_SYNC_POINT, time, AVRational{ AV_TIME_BASE, 1 }); } return buf_size; } @@ -807,13 +883,12 @@ void VideoStream::add_silence(int64_t pts, int64_t length_pts) long num_samples = lrint(length_pts * double(OUTPUT_FREQUENCY) / double(TIMEBASE)) * 2; uint8_t *zero = (uint8_t *)calloc(num_samples, sizeof(int32_t)); - AVPacket pkt; - av_init_packet(&pkt); - pkt.stream_index = 1; - pkt.data = zero; - pkt.size = num_samples * sizeof(int32_t); - pkt.flags = AV_PKT_FLAG_KEY; - mux->add_packet(pkt, pts, pts); + AVPacketWithDeleter pkt = av_packet_alloc_unique(); + pkt->stream_index = 1; + pkt->data = zero; + pkt->size = num_samples * sizeof(int32_t); + pkt->flags = AV_PKT_FLAG_KEY; + mux->add_packet(*pkt, pts, pts); free(zero); } @@ -824,12 +899,11 @@ void VideoStream::add_audio_or_silence(const QueuedFrame &qf) int64_t frame_length = lrint(double(TIMEBASE) / global_flags.output_framerate); add_silence(qf.output_pts, frame_length); } else { - AVPacket pkt; - av_init_packet(&pkt); - pkt.stream_index = 1; - pkt.data = (uint8_t *)qf.audio.data(); - pkt.size = qf.audio.size(); - pkt.flags = AV_PKT_FLAG_KEY; - mux->add_packet(pkt, qf.output_pts, qf.output_pts); + AVPacketWithDeleter pkt = av_packet_alloc_unique(); + pkt->stream_index = 1; + pkt->data = (uint8_t *)qf.audio.data(); + pkt->size = qf.audio.size(); + pkt->flags = AV_PKT_FLAG_KEY; + mux->add_packet(*pkt, qf.output_pts, qf.output_pts); } }