Include duration of pauses into sum of squared frames duration.

Bug: webrtc:10502
Change-Id: Ie905c0c9e8ca8fe07be585ce5a0d75e9eed6e865
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/130499
Commit-Queue: Sergey Silkin <ssilkin@webrtc.org>
Reviewed-by: Ilya Nikolaevskiy <ilnik@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#27435}
diff --git a/video/receive_statistics_proxy_unittest.cc b/video/receive_statistics_proxy_unittest.cc
index e050137..6f04588 100644
--- a/video/receive_statistics_proxy_unittest.cc
+++ b/video/receive_statistics_proxy_unittest.cc
@@ -1263,30 +1263,41 @@
 }
 
 TEST_P(ReceiveStatisticsProxyTestWithContent, HarmonicFrameRateIsReported) {
-  const int kInterFrameDelayMs = 33;
-  const int kFreezeDelayMs = 200;
-  const int kCallDurationMs =
-      kMinRequiredSamples * kInterFrameDelayMs + kFreezeDelayMs;
+  const int kFrameDurationMs = 33;
+  const int kFreezeDurationMs = 200;
+  const int kPauseDurationMs = 10000;
+  const int kCallDurationMs = kMinRequiredSamples * kFrameDurationMs +
+                              kFreezeDurationMs + kPauseDurationMs;
   webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight);
 
   for (int i = 0; i < kMinRequiredSamples; ++i) {
-    fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs);
+    fake_clock_.AdvanceTimeMilliseconds(kFrameDurationMs);
     statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, content_type_);
     statistics_proxy_->OnRenderedFrame(frame);
   }
-  // Add extra freeze.
-  fake_clock_.AdvanceTimeMilliseconds(kFreezeDelayMs);
+
+  // Freezes and pauses should be included into harmonic frame rate.
+  // Add freeze.
+  fake_clock_.AdvanceTimeMilliseconds(kFreezeDurationMs);
+  statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, content_type_);
+  statistics_proxy_->OnRenderedFrame(frame);
+
+  // Add pause.
+  fake_clock_.AdvanceTimeMilliseconds(kPauseDurationMs);
+  statistics_proxy_->OnStreamInactive();
   statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, content_type_);
   statistics_proxy_->OnRenderedFrame(frame);
 
   statistics_proxy_.reset();
-  double kSumSquaredInterframeDelaysSecs =
+  double kSumSquaredFrameDurationSecs =
       (kMinRequiredSamples - 1) *
-      (kInterFrameDelayMs / 1000.0 * kInterFrameDelayMs / 1000.0);
-  kSumSquaredInterframeDelaysSecs +=
-      kFreezeDelayMs / 1000.0 * kFreezeDelayMs / 1000.0;
+      (kFrameDurationMs / 1000.0 * kFrameDurationMs / 1000.0);
+  kSumSquaredFrameDurationSecs +=
+      kFreezeDurationMs / 1000.0 * kFreezeDurationMs / 1000.0;
+  kSumSquaredFrameDurationSecs +=
+      kPauseDurationMs / 1000.0 * kPauseDurationMs / 1000.0;
   const int kExpectedHarmonicFrameRateFps =
-      std::round(kCallDurationMs / (1000 * kSumSquaredInterframeDelaysSecs));
+      std::round(kCallDurationMs / (1000 * kSumSquaredFrameDurationSecs));
   if (videocontenttypehelpers::IsScreenshare(content_type_)) {
     EXPECT_EQ(kExpectedHarmonicFrameRateFps,
               metrics::MinSample("WebRTC.Video.Screenshare.HarmonicFrameRate"));
diff --git a/video/video_quality_observer.cc b/video/video_quality_observer.cc
index 2f1bb1b..df7d52f 100644
--- a/video/video_quality_observer.cc
+++ b/video/video_quality_observer.cc
@@ -146,35 +146,42 @@
 
   auto blocky_frame_it = blocky_frames_.find(frame.timestamp());
 
-  if (!is_paused_ && num_frames_rendered_ > 0) {
+  if (num_frames_rendered_ > 0) {
     // Process inter-frame delay.
     const int64_t interframe_delay_ms = now_ms - last_frame_rendered_ms_;
     const double interframe_delays_secs = interframe_delay_ms / 1000.0;
+
+    // Sum of squared inter frame intervals is used to calculate the harmonic
+    // frame rate metric. The metric aims to reflect overall experience related
+    // to smoothness of video playback and includes both freezes and pauses.
     sum_squared_interframe_delays_secs_ +=
         interframe_delays_secs * interframe_delays_secs;
-    render_interframe_delays_.AddSample(interframe_delay_ms);
 
-    bool was_freeze = false;
-    if (render_interframe_delays_.Size() >= kMinFrameSamplesToDetectFreeze) {
-      const absl::optional<int64_t> avg_interframe_delay =
-          render_interframe_delays_.GetAverageRoundedDown();
-      RTC_DCHECK(avg_interframe_delay);
-      was_freeze = interframe_delay_ms >=
-                   std::max(3 * *avg_interframe_delay,
-                            *avg_interframe_delay + kMinIncreaseForFreezeMs);
-    }
+    if (!is_paused_) {
+      render_interframe_delays_.AddSample(interframe_delay_ms);
 
-    if (was_freeze) {
-      freezes_durations_.Add(interframe_delay_ms);
-      smooth_playback_durations_.Add(last_frame_rendered_ms_ -
-                                     last_unfreeze_time_ms_);
-      last_unfreeze_time_ms_ = now_ms;
-    } else {
-      // Count spatial metrics if there were no freeze.
-      time_in_resolution_ms_[current_resolution_] += interframe_delay_ms;
+      bool was_freeze = false;
+      if (render_interframe_delays_.Size() >= kMinFrameSamplesToDetectFreeze) {
+        const absl::optional<int64_t> avg_interframe_delay =
+            render_interframe_delays_.GetAverageRoundedDown();
+        RTC_DCHECK(avg_interframe_delay);
+        was_freeze = interframe_delay_ms >=
+                     std::max(3 * *avg_interframe_delay,
+                              *avg_interframe_delay + kMinIncreaseForFreezeMs);
+      }
 
-      if (is_last_frame_blocky_) {
-        time_in_blocky_video_ms_ += interframe_delay_ms;
+      if (was_freeze) {
+        freezes_durations_.Add(interframe_delay_ms);
+        smooth_playback_durations_.Add(last_frame_rendered_ms_ -
+                                       last_unfreeze_time_ms_);
+        last_unfreeze_time_ms_ = now_ms;
+      } else {
+        // Count spatial metrics if there were no freeze.
+        time_in_resolution_ms_[current_resolution_] += interframe_delay_ms;
+
+        if (is_last_frame_blocky_) {
+          time_in_blocky_video_ms_ += interframe_delay_ms;
+        }
       }
     }
   }