From: Steinar H. Gunderson Date: Sun, 30 Aug 2020 22:25:40 +0000 (+0200) Subject: Reset jitter history when frame rate changes. X-Git-Tag: 2.0.2~13 X-Git-Url: https://git.sesse.net/?p=nageru;a=commitdiff_plain;h=ccc8294ad233bf3b85545c15c63100d54e485e63 Reset jitter history when frame rate changes. This is seemingly especially important when we have input format autodetect and PAL input rates; it gives us one 59.97 fps frame, then a delay as the card autodetects and resyncs (might be as much as 30–50 ms; not entirely sure), and then a steady stream of 50 fps frames. This then causes us to overestimate the jitter by a lot until we get more than 1000 frames and can reject that very first event as the outlier it is. --- diff --git a/nageru/mixer.cpp b/nageru/mixer.cpp index 2fdbf64..ce8a732 100644 --- a/nageru/mixer.cpp +++ b/nageru/mixer.cpp @@ -234,6 +234,16 @@ void JitterHistory::unregister_metrics(const vector> &label void JitterHistory::frame_arrived(steady_clock::time_point now, int64_t frame_duration, size_t dropped_frames) { + if (frame_duration != last_duration) { + // If the frame rate changed, the input clock is also going to change, + // so our historical data doesn't make much sense anymore. + // Also, format changes typically introduce blips that are not representative + // of the typical frame stream. (We make the assumption that format changes + // don't happen all the time in regular use; if they did, we should probably + // rather keep the history so that we take jitter they may introduce into account.) + clear(); + last_duration = frame_duration; + } if (expected_timestamp > steady_clock::time_point::min()) { expected_timestamp += dropped_frames * nanoseconds(frame_duration * 1000000000 / TIMEBASE); double jitter_seconds = fabs(duration(expected_timestamp - now).count()); diff --git a/nageru/queue_length_policy.h b/nageru/queue_length_policy.h index 329eb82..0b8a420 100644 --- a/nageru/queue_length_policy.h +++ b/nageru/queue_length_policy.h @@ -54,6 +54,7 @@ private: std::deque::iterator> history; std::chrono::steady_clock::time_point expected_timestamp = std::chrono::steady_clock::time_point::min(); + int64_t last_duration = 0; // Metrics. There are no direct summaries for jitter, since we already have latency summaries. std::atomic metric_input_underestimated_jitter_frames{0};