]> git.sesse.net Git - nageru/blobdiff - futatabi/video_stream.cpp
Add metrics for how many frames we are decoding, but did not have the time to display.
[nageru] / futatabi / video_stream.cpp
index 9a120b51344a70b6b4744519ee07f328c50f7b3b..5a36801b5e6ebf787cc5f84874650d63db2862fb 100644 (file)
@@ -6,6 +6,7 @@ extern "C" {
 }
 
 #include "chroma_subsampler.h"
+#include "exif_parser.h"
 #include "flags.h"
 #include "flow.h"
 #include "jpeg_frame_view.h"
@@ -13,6 +14,7 @@ extern "C" {
 #include "player.h"
 #include "shared/context.h"
 #include "shared/httpd.h"
+#include "shared/metrics.h"
 #include "shared/shared_defs.h"
 #include "shared/mux.h"
 #include "util.h"
@@ -22,9 +24,21 @@ extern "C" {
 #include <jpeglib.h>
 #include <unistd.h>
 
+using namespace movit;
 using namespace std;
 using namespace std::chrono;
 
+namespace {
+
+once_flag video_metrics_inited;
+Summary metric_jpeg_encode_time_seconds;
+Summary metric_fade_latency_seconds;
+Summary metric_interpolation_latency_seconds;
+Summary metric_fade_fence_wait_time_seconds;
+Summary metric_interpolation_fence_wait_time_seconds;
+
+}  // namespace
+
 extern HTTPD *global_httpd;
 
 struct VectorDestinationManager {
@@ -81,6 +95,7 @@ static_assert(std::is_standard_layout<VectorDestinationManager>::value, "");
 
 string encode_jpeg(const uint8_t *y_data, const uint8_t *cb_data, const uint8_t *cr_data, unsigned width, unsigned height, const string exif_data)
 {
+       steady_clock::time_point start = steady_clock::now();
        VectorDestinationManager dest;
 
        jpeg_compress_struct cinfo;
@@ -131,12 +146,29 @@ string encode_jpeg(const uint8_t *y_data, const uint8_t *cb_data, const uint8_t
        jpeg_finish_compress(&cinfo);
        jpeg_destroy_compress(&cinfo);
 
+       steady_clock::time_point stop = steady_clock::now();
+       metric_jpeg_encode_time_seconds.count_event(duration<double>(stop - start).count());
+
        return move(dest.dest);
 }
 
 VideoStream::VideoStream(AVFormatContext *file_avctx)
        : avctx(file_avctx), output_fast_forward(file_avctx != nullptr)
 {
+       call_once(video_metrics_inited, [] {
+               vector<double> quantiles{ 0.01, 0.1, 0.25, 0.5, 0.75, 0.9, 0.99 };
+               metric_jpeg_encode_time_seconds.init(quantiles, 60.0);
+               global_metrics.add("jpeg_encode_time_seconds", &metric_jpeg_encode_time_seconds);
+               metric_fade_fence_wait_time_seconds.init(quantiles, 60.0);
+               global_metrics.add("fade_fence_wait_time_seconds", &metric_fade_fence_wait_time_seconds);
+               metric_interpolation_fence_wait_time_seconds.init(quantiles, 60.0);
+               global_metrics.add("interpolation_fence_wait_time_seconds", &metric_interpolation_fence_wait_time_seconds);
+               metric_fade_latency_seconds.init(quantiles, 60.0);
+               global_metrics.add("fade_latency_seconds", &metric_fade_latency_seconds);
+               metric_interpolation_latency_seconds.init(quantiles, 60.0);
+               global_metrics.add("interpolation_latency_seconds", &metric_interpolation_latency_seconds);
+       });
+
        ycbcr_converter.reset(new YCbCrConverter(YCbCrConverter::OUTPUT_TO_DUAL_YCBCR, /*resource_pool=*/nullptr));
        ycbcr_semiplanar_converter.reset(new YCbCrConverter(YCbCrConverter::OUTPUT_TO_SEMIPLANAR, /*resource_pool=*/nullptr));
 
@@ -430,6 +462,7 @@ void VideoStream::schedule_faded_frame(steady_clock::time_point local_pts, int64
        // Set a fence we can wait for to make sure the CPU sees the read.
        glMemoryBarrier(GL_CLIENT_MAPPED_BUFFER_BARRIER_BIT);
        check_error();
+       qf.fence_created = steady_clock::now();
        qf.fence = RefCountedGLsync(GL_SYNC_GPU_COMMANDS_COMPLETE, /*flags=*/0);
        check_error();
        qf.resources = move(resources);
@@ -526,12 +559,16 @@ void VideoStream::schedule_interpolated_frame(steady_clock::time_point local_pts
                shared_ptr<Frame> frame2 = decode_jpeg_with_cache(secondary_frame, DECODE_IF_NOT_IN_CACHE, &frame_reader, &did_decode);
 
                // Then fade against it, putting it into the fade Y' and CbCr textures.
-               ycbcr_semiplanar_converter->prepare_chain_for_fade_from_texture(qf.output_tex, global_flags.width, global_flags.height, frame2, fade_alpha)->render_to_fbo(resources->fade_fbo, global_flags.width, global_flags.height);
+               RGBTriplet neutral_color = get_neutral_color(qf.exif_data);
+               ycbcr_semiplanar_converter->prepare_chain_for_fade_from_texture(qf.output_tex, neutral_color, global_flags.width, global_flags.height, frame2, fade_alpha)->render_to_fbo(resources->fade_fbo, global_flags.width, global_flags.height);
 
                // Subsample and split Cb/Cr.
                chroma_subsampler->subsample_chroma(resources->fade_cbcr_output_tex, global_flags.width, global_flags.height, resources->cb_tex, resources->cr_tex);
 
                interpolate_no_split->release_texture(qf.output_tex);
+
+               // We already applied the white balance, so don't have the client redo it.
+               qf.exif_data.clear();
        } else {
                tie(qf.output_tex, qf.cbcr_tex) = interpolate->exec(resources->input_tex, resources->gray_tex, flow_tex, global_flags.width, global_flags.height, alpha);
                check_error();
@@ -566,6 +603,7 @@ void VideoStream::schedule_interpolated_frame(steady_clock::time_point local_pts
        // Set a fence we can wait for to make sure the CPU sees the read.
        glMemoryBarrier(GL_CLIENT_MAPPED_BUFFER_BARRIER_BIT);
        check_error();
+       qf.fence_created = steady_clock::now();
        qf.fence = RefCountedGLsync(GL_SYNC_GPU_COMMANDS_COMPLETE, /*flags=*/0);
        check_error();
        qf.resources = move(resources);
@@ -707,12 +745,17 @@ void VideoStream::encode_thread_func()
 
                        add_audio_or_silence(qf);
                } else if (qf.type == QueuedFrame::FADED) {
+                       steady_clock::time_point start = steady_clock::now();
                        glClientWaitSync(qf.fence.get(), /*flags=*/0, GL_TIMEOUT_IGNORED);
+                       steady_clock::time_point stop = steady_clock::now();
+                       metric_fade_fence_wait_time_seconds.count_event(duration<double>(stop - start).count());
+                       metric_fade_latency_seconds.count_event(duration<double>(stop - qf.fence_created).count());
 
                        shared_ptr<Frame> frame = frame_from_pbo(qf.resources->pbo_contents, global_flags.width, global_flags.height);
+                       assert(frame->exif_data.empty());
 
                        // Now JPEG encode it, and send it on to the stream.
-                       string jpeg = encode_jpeg(frame->y.get(), frame->cb.get(), frame->cr.get(), global_flags.width, global_flags.height, move(frame->exif_data));
+                       string jpeg = encode_jpeg(frame->y.get(), frame->cb.get(), frame->cr.get(), global_flags.width, global_flags.height, /*exif_data=*/"");
 
                        AVPacket pkt;
                        av_init_packet(&pkt);
@@ -725,7 +768,11 @@ void VideoStream::encode_thread_func()
 
                        add_audio_or_silence(qf);
                } else if (qf.type == QueuedFrame::INTERPOLATED || qf.type == QueuedFrame::FADED_INTERPOLATED) {
+                       steady_clock::time_point start = steady_clock::now();
                        glClientWaitSync(qf.fence.get(), /*flags=*/0, GL_TIMEOUT_IGNORED);
+                       steady_clock::time_point stop = steady_clock::now();
+                       metric_interpolation_fence_wait_time_seconds.count_event(duration<double>(stop - start).count());
+                       metric_interpolation_latency_seconds.count_event(duration<double>(stop - qf.fence_created).count());
 
                        // Send it on to display.
                        shared_ptr<Frame> frame = frame_from_pbo(qf.resources->pbo_contents, global_flags.width, global_flags.height);
@@ -790,11 +837,12 @@ int VideoStream::write_packet2(uint8_t *buf, int buf_size, AVIODataMarkerType ty
                type = AVIO_DATA_MARKER_SYNC_POINT;
        }
 
+       HTTPD::StreamID stream_id{ HTTPD::MAIN_STREAM, 0 };
        if (type == AVIO_DATA_MARKER_HEADER) {
                stream_mux_header.append((char *)buf, buf_size);
-               global_httpd->set_header(HTTPD::MAIN_STREAM, stream_mux_header);
+               global_httpd->set_header(stream_id, stream_mux_header);
        } else {
-               global_httpd->add_data(HTTPD::MAIN_STREAM, (char *)buf, buf_size, type == AVIO_DATA_MARKER_SYNC_POINT, time, AVRational{ AV_TIME_BASE, 1 });
+               global_httpd->add_data(stream_id, (char *)buf, buf_size, type == AVIO_DATA_MARKER_SYNC_POINT, time, AVRational{ AV_TIME_BASE, 1 });
        }
        return buf_size;
 }