Implement VideoQualityObserver
This class receives data about video frames from ReceiveStatisticsProxy,
calculates spatial and temporal quality metrics and outputs them to UMA
stats. It is all done in a separate class because it will be further
extended to calculate aggregated quality metrics in the future.
Bug: webrtc:9295
Change-Id: Ie36db83e10c0e8da0b9baa392651cb9a67a54a80
Reviewed-on: https://webrtc-review.googlesource.com/78220
Commit-Queue: Ilya Nikolaevskiy <ilnik@webrtc.org>
Reviewed-by: Erik Språng <sprang@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#23387}
diff --git a/video/BUILD.gn b/video/BUILD.gn
index bc026d2..2ef8473 100644
--- a/video/BUILD.gn
+++ b/video/BUILD.gn
@@ -38,6 +38,8 @@
"stream_synchronization.h",
"transport_adapter.cc",
"transport_adapter.h",
+ "video_quality_observer.cc",
+ "video_quality_observer.h",
"video_receive_stream.cc",
"video_receive_stream.h",
"video_send_stream.cc",
diff --git a/video/receive_statistics_proxy.cc b/video/receive_statistics_proxy.cc
index 2cccd5a..2fd5d9b 100644
--- a/video/receive_statistics_proxy.cc
+++ b/video/receive_statistics_proxy.cc
@@ -106,11 +106,14 @@
render_fps_tracker_(100, 10u),
render_pixel_tracker_(100, 10u),
total_byte_tracker_(100, 10u), // bucket_interval_ms, bucket_count
+ video_quality_observer_(
+ new VideoQualityObserver(VideoContentType::UNSPECIFIED)),
interframe_delay_max_moving_(kMovingMaxWindowMs),
freq_offset_counter_(clock, nullptr, kFreqOffsetProcessIntervalMs),
first_report_block_time_ms_(-1),
avg_rtt_ms_(0),
last_content_type_(VideoContentType::UNSPECIFIED),
+ last_codec_type_(kVideoCodecVP8),
timing_frame_info_counter_(kMovingMaxWindowMs) {
decode_thread_.DetachFromThread();
network_thread_.DetachFromThread();
@@ -147,13 +150,14 @@
<< stream_duration_sec << '\n';
}
- log_stream << "Frames decoded " << stats_.frames_decoded;
+ log_stream << "Frames decoded " << stats_.frames_decoded << '\n';
if (num_unique_frames_) {
int num_dropped_frames = *num_unique_frames_ - stats_.frames_decoded;
RTC_HISTOGRAM_COUNTS_1000("WebRTC.Video.DroppedFrames.Receiver",
num_dropped_frames);
- log_stream << "WebRTC.Video.DroppedFrames.Receiver " << num_dropped_frames;
+ log_stream << "WebRTC.Video.DroppedFrames.Receiver " << num_dropped_frames
+ << '\n';
}
if (first_report_block_time_ms_ != -1 &&
@@ -680,12 +684,30 @@
total_byte_tracker_.AddSamples(total_bytes - last_total_bytes);
}
+// Deprecated. TODO(ilnik): remove once all depending projects are updated.
void ReceiveStatisticsProxy::OnDecodedFrame(rtc::Optional<uint8_t> qp,
VideoContentType content_type) {
+ OnDecodedFrame(qp, 0, 0, content_type);
+}
+
+void ReceiveStatisticsProxy::OnDecodedFrame(rtc::Optional<uint8_t> qp,
+ int width,
+ int height,
+ VideoContentType content_type) {
rtc::CritScope lock(&crit_);
uint64_t now = clock_->TimeInMilliseconds();
+ if (videocontenttypehelpers::IsScreenshare(content_type) !=
+ videocontenttypehelpers::IsScreenshare(last_content_type_)) {
+ // Reset the quality observer if content type is switched. This will
+ // report stats for the previous part of the call.
+ video_quality_observer_.reset(new VideoQualityObserver(content_type));
+ }
+
+ video_quality_observer_->OnDecodedFrame(qp, width, height, now,
+ last_codec_type_);
+
ContentSpecificStats* content_specific_stats =
&content_specific_stats_[content_type];
++stats_.frames_decoded;
@@ -809,9 +831,10 @@
if (!codec_specific_info || encoded_image.qp_ == -1) {
return;
}
- if (codec_specific_info->codecType == kVideoCodecVP8) {
+ rtc::CritScope lock(&crit_);
+ last_codec_type_ = codec_specific_info->codecType;
+ if (last_codec_type_ == kVideoCodecVP8) {
qp_counters_.vp8.Add(encoded_image.qp_);
- rtc::CritScope lock(&crit_);
qp_sample_.Add(encoded_image.qp_);
}
}
@@ -822,6 +845,7 @@
rtc::CritScope lock(&crit_);
// Don't report inter-frame delay if stream was paused.
last_decoded_frame_time_ms_.reset();
+ video_quality_observer_->OnStreamInactive();
}
void ReceiveStatisticsProxy::OnRttUpdate(int64_t avg_rtt_ms,
diff --git a/video/receive_statistics_proxy.h b/video/receive_statistics_proxy.h
index 9a2185e..72fd2b2 100644
--- a/video/receive_statistics_proxy.h
+++ b/video/receive_statistics_proxy.h
@@ -12,6 +12,7 @@
#define VIDEO_RECEIVE_STATISTICS_PROXY_H_
#include <map>
+#include <memory>
#include <string>
#include <vector>
@@ -31,6 +32,7 @@
#include "video/quality_threshold.h"
#include "video/report_block_stats.h"
#include "video/stats_counter.h"
+#include "video/video_quality_observer.h"
#include "video/video_stream_decoder.h"
namespace webrtc {
@@ -50,7 +52,14 @@
VideoReceiveStream::Stats GetStats() const;
- void OnDecodedFrame(rtc::Optional<uint8_t> qp, VideoContentType content_type);
+ // Deprecated. TODO(ilnik): remove once all depending projects are updated.
+ RTC_DEPRECATED void OnDecodedFrame(rtc::Optional<uint8_t> qp,
+ VideoContentType content_type);
+
+ void OnDecodedFrame(rtc::Optional<uint8_t> qp,
+ int width,
+ int height,
+ VideoContentType content_type);
void OnSyncOffsetUpdated(int64_t sync_offset_ms, double estimated_freq_khz);
void OnRenderedFrame(const VideoFrame& frame);
void OnIncomingPayloadType(int payload_type);
@@ -163,6 +172,8 @@
rtc::SampleCounter target_delay_counter_ RTC_GUARDED_BY(crit_);
rtc::SampleCounter current_delay_counter_ RTC_GUARDED_BY(crit_);
rtc::SampleCounter delay_counter_ RTC_GUARDED_BY(crit_);
+ std::unique_ptr<VideoQualityObserver> video_quality_observer_
+ RTC_GUARDED_BY(crit_);
mutable rtc::MovingMaxCounter<int> interframe_delay_max_moving_
RTC_GUARDED_BY(crit_);
std::map<VideoContentType, ContentSpecificStats> content_specific_stats_
@@ -175,6 +186,7 @@
int64_t avg_rtt_ms_ RTC_GUARDED_BY(crit_);
mutable std::map<int64_t, size_t> frame_window_ RTC_GUARDED_BY(&crit_);
VideoContentType last_content_type_ RTC_GUARDED_BY(&crit_);
+ VideoCodecType last_codec_type_ RTC_GUARDED_BY(&crit_);
rtc::Optional<int64_t> first_decoded_frame_time_ms_ RTC_GUARDED_BY(&crit_);
rtc::Optional<int64_t> last_decoded_frame_time_ms_ RTC_GUARDED_BY(&crit_);
// Mutable because calling Max() on MovingMaxCounter is not const. Yet it is
diff --git a/video/receive_statistics_proxy_unittest.cc b/video/receive_statistics_proxy_unittest.cc
index d604429..f61f086 100644
--- a/video/receive_statistics_proxy_unittest.cc
+++ b/video/receive_statistics_proxy_unittest.cc
@@ -28,6 +28,10 @@
const uint32_t kLocalSsrc = 123;
const uint32_t kRemoteSsrc = 456;
const int kMinRequiredSamples = 200;
+
+const int kWidth = 1280;
+const int kHeight = 720;
+
} // namespace
// TODO(sakal): ReceiveStatisticsProxy is lacking unittesting.
@@ -70,7 +74,7 @@
TEST_F(ReceiveStatisticsProxyTest, OnDecodedFrameIncreasesFramesDecoded) {
EXPECT_EQ(0u, statistics_proxy_->GetStats().frames_decoded);
for (uint32_t i = 1; i <= 3; ++i) {
- statistics_proxy_->OnDecodedFrame(rtc::nullopt,
+ statistics_proxy_->OnDecodedFrame(rtc::nullopt, kWidth, kHeight,
VideoContentType::UNSPECIFIED);
EXPECT_EQ(i, statistics_proxy_->GetStats().frames_decoded);
}
@@ -80,7 +84,7 @@
const int kFps = 20;
const int kRequiredSamples = metrics::kMinRunTimeInSeconds * kFps;
for (int i = 0; i < kRequiredSamples; ++i) {
- statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(),
+ statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(), kWidth, kHeight,
VideoContentType::UNSPECIFIED);
fake_clock_.AdvanceTimeMilliseconds(1000 / kFps);
}
@@ -93,7 +97,7 @@
const int kFps = 20;
const int kRequiredSamples = metrics::kMinRunTimeInSeconds * kFps;
for (int i = 0; i < kRequiredSamples - 1; ++i) {
- statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(),
+ statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(), kWidth, kHeight,
VideoContentType::UNSPECIFIED);
fake_clock_.AdvanceTimeMilliseconds(1000 / kFps);
}
@@ -104,15 +108,15 @@
TEST_F(ReceiveStatisticsProxyTest, DecodedFpsIsReportedWithQpReset) {
const int kFps1 = 10;
for (int i = 0; i < metrics::kMinRunTimeInSeconds * kFps1; ++i) {
- statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(),
+ statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(), kWidth, kHeight,
VideoContentType::UNSPECIFIED);
fake_clock_.AdvanceTimeMilliseconds(1000 / kFps1);
}
// First QP value received, resets frames decoded.
const int kFps2 = 20;
for (int i = 0; i < metrics::kMinRunTimeInSeconds * kFps2; ++i) {
- statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(1u),
- VideoContentType::UNSPECIFIED);
+ statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(1u), kWidth,
+ kHeight, VideoContentType::UNSPECIFIED);
fake_clock_.AdvanceTimeMilliseconds(1000 / kFps2);
}
statistics_proxy_.reset();
@@ -124,19 +128,22 @@
TEST_F(ReceiveStatisticsProxyTest, OnDecodedFrameWithQpResetsFramesDecoded) {
EXPECT_EQ(0u, statistics_proxy_->GetStats().frames_decoded);
for (uint32_t i = 1; i <= 3; ++i) {
- statistics_proxy_->OnDecodedFrame(rtc::nullopt,
+ statistics_proxy_->OnDecodedFrame(rtc::nullopt, kWidth, kHeight,
VideoContentType::UNSPECIFIED);
EXPECT_EQ(i, statistics_proxy_->GetStats().frames_decoded);
}
- statistics_proxy_->OnDecodedFrame(1u, VideoContentType::UNSPECIFIED);
+ statistics_proxy_->OnDecodedFrame(1u, kWidth, kHeight,
+ VideoContentType::UNSPECIFIED);
EXPECT_EQ(1u, statistics_proxy_->GetStats().frames_decoded);
}
TEST_F(ReceiveStatisticsProxyTest, OnDecodedFrameIncreasesQpSum) {
EXPECT_EQ(rtc::nullopt, statistics_proxy_->GetStats().qp_sum);
- statistics_proxy_->OnDecodedFrame(3u, VideoContentType::UNSPECIFIED);
+ statistics_proxy_->OnDecodedFrame(3u, kWidth, kHeight,
+ VideoContentType::UNSPECIFIED);
EXPECT_EQ(3u, statistics_proxy_->GetStats().qp_sum);
- statistics_proxy_->OnDecodedFrame(127u, VideoContentType::UNSPECIFIED);
+ statistics_proxy_->OnDecodedFrame(127u, kWidth, kHeight,
+ VideoContentType::UNSPECIFIED);
EXPECT_EQ(130u, statistics_proxy_->GetStats().qp_sum);
}
@@ -145,10 +152,12 @@
const std::string kScreenshareString("screen");
EXPECT_EQ(kRealtimeString, videocontenttypehelpers::ToString(
statistics_proxy_->GetStats().content_type));
- statistics_proxy_->OnDecodedFrame(3u, VideoContentType::SCREENSHARE);
+ statistics_proxy_->OnDecodedFrame(3u, kWidth, kHeight,
+ VideoContentType::SCREENSHARE);
EXPECT_EQ(kScreenshareString, videocontenttypehelpers::ToString(
statistics_proxy_->GetStats().content_type));
- statistics_proxy_->OnDecodedFrame(3u, VideoContentType::UNSPECIFIED);
+ statistics_proxy_->OnDecodedFrame(3u, kWidth, kHeight,
+ VideoContentType::UNSPECIFIED);
EXPECT_EQ(kRealtimeString, videocontenttypehelpers::ToString(
statistics_proxy_->GetStats().content_type));
}
@@ -158,21 +167,25 @@
const int64_t kInterframeDelayMs2 = 200;
const int64_t kInterframeDelayMs3 = 100;
EXPECT_EQ(-1, statistics_proxy_->GetStats().interframe_delay_max_ms);
- statistics_proxy_->OnDecodedFrame(3u, VideoContentType::UNSPECIFIED);
+ statistics_proxy_->OnDecodedFrame(3u, kWidth, kHeight,
+ VideoContentType::UNSPECIFIED);
EXPECT_EQ(-1, statistics_proxy_->GetStats().interframe_delay_max_ms);
fake_clock_.AdvanceTimeMilliseconds(kInterframeDelayMs1);
- statistics_proxy_->OnDecodedFrame(127u, VideoContentType::UNSPECIFIED);
+ statistics_proxy_->OnDecodedFrame(127u, kWidth, kHeight,
+ VideoContentType::UNSPECIFIED);
EXPECT_EQ(kInterframeDelayMs1,
statistics_proxy_->GetStats().interframe_delay_max_ms);
fake_clock_.AdvanceTimeMilliseconds(kInterframeDelayMs2);
- statistics_proxy_->OnDecodedFrame(127u, VideoContentType::UNSPECIFIED);
+ statistics_proxy_->OnDecodedFrame(127u, kWidth, kHeight,
+ VideoContentType::UNSPECIFIED);
EXPECT_EQ(kInterframeDelayMs2,
statistics_proxy_->GetStats().interframe_delay_max_ms);
fake_clock_.AdvanceTimeMilliseconds(kInterframeDelayMs3);
- statistics_proxy_->OnDecodedFrame(127u, VideoContentType::UNSPECIFIED);
+ statistics_proxy_->OnDecodedFrame(127u, kWidth, kHeight,
+ VideoContentType::UNSPECIFIED);
// kInterframeDelayMs3 is smaller than kInterframeDelayMs2.
EXPECT_EQ(kInterframeDelayMs2,
statistics_proxy_->GetStats().interframe_delay_max_ms);
@@ -183,22 +196,26 @@
const int64_t kInterframeDelayMs2 = 750;
const int64_t kInterframeDelayMs3 = 700;
EXPECT_EQ(-1, statistics_proxy_->GetStats().interframe_delay_max_ms);
- statistics_proxy_->OnDecodedFrame(3u, VideoContentType::UNSPECIFIED);
+ statistics_proxy_->OnDecodedFrame(3u, kWidth, kHeight,
+ VideoContentType::UNSPECIFIED);
EXPECT_EQ(-1, statistics_proxy_->GetStats().interframe_delay_max_ms);
fake_clock_.AdvanceTimeMilliseconds(kInterframeDelayMs1);
- statistics_proxy_->OnDecodedFrame(127u, VideoContentType::UNSPECIFIED);
+ statistics_proxy_->OnDecodedFrame(127u, kWidth, kHeight,
+ VideoContentType::UNSPECIFIED);
EXPECT_EQ(kInterframeDelayMs1,
statistics_proxy_->GetStats().interframe_delay_max_ms);
fake_clock_.AdvanceTimeMilliseconds(kInterframeDelayMs2);
- statistics_proxy_->OnDecodedFrame(127u, VideoContentType::UNSPECIFIED);
+ statistics_proxy_->OnDecodedFrame(127u, kWidth, kHeight,
+ VideoContentType::UNSPECIFIED);
// Still first delay is the maximum
EXPECT_EQ(kInterframeDelayMs1,
statistics_proxy_->GetStats().interframe_delay_max_ms);
fake_clock_.AdvanceTimeMilliseconds(kInterframeDelayMs3);
- statistics_proxy_->OnDecodedFrame(127u, VideoContentType::UNSPECIFIED);
+ statistics_proxy_->OnDecodedFrame(127u, kWidth, kHeight,
+ VideoContentType::UNSPECIFIED);
// Now the first sample is out of the window, so the second is the maximum.
EXPECT_EQ(kInterframeDelayMs2,
statistics_proxy_->GetStats().interframe_delay_max_ms);
@@ -206,16 +223,17 @@
TEST_F(ReceiveStatisticsProxyTest, OnDecodedFrameWithoutQpQpSumWontExist) {
EXPECT_EQ(rtc::nullopt, statistics_proxy_->GetStats().qp_sum);
- statistics_proxy_->OnDecodedFrame(rtc::nullopt,
+ statistics_proxy_->OnDecodedFrame(rtc::nullopt, kWidth, kHeight,
VideoContentType::UNSPECIFIED);
EXPECT_EQ(rtc::nullopt, statistics_proxy_->GetStats().qp_sum);
}
TEST_F(ReceiveStatisticsProxyTest, OnDecodedFrameWithoutQpResetsQpSum) {
EXPECT_EQ(rtc::nullopt, statistics_proxy_->GetStats().qp_sum);
- statistics_proxy_->OnDecodedFrame(3u, VideoContentType::UNSPECIFIED);
+ statistics_proxy_->OnDecodedFrame(3u, kWidth, kHeight,
+ VideoContentType::UNSPECIFIED);
EXPECT_EQ(3u, statistics_proxy_->GetStats().qp_sum);
- statistics_proxy_->OnDecodedFrame(rtc::nullopt,
+ statistics_proxy_->OnDecodedFrame(rtc::nullopt, kWidth, kHeight,
VideoContentType::UNSPECIFIED);
EXPECT_EQ(rtc::nullopt, statistics_proxy_->GetStats().qp_sum);
}
@@ -718,7 +736,7 @@
// Since OnRenderedFrame is never called the fps in each sample will be 0,
// i.e. bad
frame.set_ntp_time_ms(fake_clock_.CurrentNtpInMilliseconds());
- statistics_proxy_->OnDecodedFrame(rtc::nullopt,
+ statistics_proxy_->OnDecodedFrame(rtc::nullopt, kWidth, kHeight,
VideoContentType::UNSPECIFIED);
statistics_proxy_->OnRenderedFrame(frame);
fake_clock_.AdvanceTimeMilliseconds(1000 / kDefaultFps);
@@ -833,12 +851,14 @@
const VideoContentType content_type = GetParam();
const int kInterFrameDelayMs = 33;
for (int i = 0; i < kMinRequiredSamples; ++i) {
- statistics_proxy_->OnDecodedFrame(rtc::nullopt, content_type);
+ statistics_proxy_->OnDecodedFrame(rtc::nullopt, kWidth, kHeight,
+ content_type);
fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs);
}
// One extra with double the interval.
fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs);
- statistics_proxy_->OnDecodedFrame(rtc::nullopt, content_type);
+ statistics_proxy_->OnDecodedFrame(rtc::nullopt, kWidth, kHeight,
+ content_type);
statistics_proxy_.reset();
const int kExpectedInterFrame =
@@ -866,16 +886,19 @@
const int kLastFivePercentsSamples = kMinRequiredSamples * 5 / 100;
for (int i = 0; i <= kMinRequiredSamples - kLastFivePercentsSamples; ++i) {
fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs);
- statistics_proxy_->OnDecodedFrame(rtc::nullopt, content_type);
+ statistics_proxy_->OnDecodedFrame(rtc::nullopt, kWidth, kHeight,
+ content_type);
}
// Last 5% of intervals are double in size.
for (int i = 0; i < kLastFivePercentsSamples; ++i) {
fake_clock_.AdvanceTimeMilliseconds(2 * kInterFrameDelayMs);
- statistics_proxy_->OnDecodedFrame(rtc::nullopt, content_type);
+ statistics_proxy_->OnDecodedFrame(rtc::nullopt, kWidth, kHeight,
+ content_type);
}
// Final sample is outlier and 10 times as big.
fake_clock_.AdvanceTimeMilliseconds(10 * kInterFrameDelayMs);
- statistics_proxy_->OnDecodedFrame(rtc::nullopt, content_type);
+ statistics_proxy_->OnDecodedFrame(rtc::nullopt, kWidth, kHeight,
+ content_type);
statistics_proxy_.reset();
const int kExpectedInterFrame = kInterFrameDelayMs * 2;
@@ -894,7 +917,8 @@
const VideoContentType content_type = GetParam();
const int kInterFrameDelayMs = 33;
for (int i = 0; i < kMinRequiredSamples; ++i) {
- statistics_proxy_->OnDecodedFrame(rtc::nullopt, content_type);
+ statistics_proxy_->OnDecodedFrame(rtc::nullopt, kWidth, kHeight,
+ content_type);
fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs);
}
@@ -913,7 +937,8 @@
const VideoContentType content_type = GetParam();
const int kInterFrameDelayMs = 33;
for (int i = 0; i <= kMinRequiredSamples; ++i) {
- statistics_proxy_->OnDecodedFrame(rtc::nullopt, content_type);
+ statistics_proxy_->OnDecodedFrame(rtc::nullopt, kWidth, kHeight,
+ content_type);
fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs);
}
@@ -924,9 +949,11 @@
// Insert two more frames. The interval during the pause should be disregarded
// in the stats.
- statistics_proxy_->OnDecodedFrame(rtc::nullopt, content_type);
+ statistics_proxy_->OnDecodedFrame(rtc::nullopt, kWidth, kHeight,
+ content_type);
fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs);
- statistics_proxy_->OnDecodedFrame(rtc::nullopt, content_type);
+ statistics_proxy_->OnDecodedFrame(rtc::nullopt, kWidth, kHeight,
+ content_type);
statistics_proxy_.reset();
if (videocontenttypehelpers::IsScreenshare(content_type)) {
@@ -950,6 +977,174 @@
}
}
+TEST_P(ReceiveStatisticsProxyTest, FreezesAreReported) {
+ const VideoContentType content_type = GetParam();
+ const int kInterFrameDelayMs = 33;
+ const int kFreezeDelayMs = 200;
+ for (int i = 0; i < kMinRequiredSamples; ++i) {
+ statistics_proxy_->OnDecodedFrame(rtc::nullopt, kWidth, kHeight,
+ content_type);
+ fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs);
+ }
+ // Add extra freeze.
+ fake_clock_.AdvanceTimeMilliseconds(kFreezeDelayMs);
+ statistics_proxy_->OnDecodedFrame(rtc::nullopt, kWidth, kHeight,
+ content_type);
+
+ statistics_proxy_.reset();
+ const int kExpectedTimeBetweenFreezes =
+ kInterFrameDelayMs * (kMinRequiredSamples - 1);
+ if (videocontenttypehelpers::IsScreenshare(content_type)) {
+ EXPECT_EQ(
+ kFreezeDelayMs + kInterFrameDelayMs,
+ metrics::MinSample("WebRTC.Video.Screenshare.MeanFreezeDurationMs"));
+ EXPECT_EQ(kExpectedTimeBetweenFreezes,
+ metrics::MinSample(
+ "WebRTC.Video.Screenshare.MeanTimeBetweenFreezesMs"));
+ } else {
+ EXPECT_EQ(kFreezeDelayMs + kInterFrameDelayMs,
+ metrics::MinSample("WebRTC.Video.MeanFreezeDurationMs"));
+ EXPECT_EQ(kExpectedTimeBetweenFreezes,
+ metrics::MinSample("WebRTC.Video.MeanTimeBetweenFreezesMs"));
+ }
+}
+
+TEST_P(ReceiveStatisticsProxyTest, PausesAreIgnored) {
+ const VideoContentType content_type = GetParam();
+ const int kInterFrameDelayMs = 33;
+ const int kPauseDurationMs = 10000;
+ for (int i = 0; i <= kMinRequiredSamples; ++i) {
+ statistics_proxy_->OnDecodedFrame(rtc::nullopt, kWidth, kHeight,
+ content_type);
+ fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs);
+ }
+ // Add a pause.
+ fake_clock_.AdvanceTimeMilliseconds(kPauseDurationMs);
+ statistics_proxy_->OnStreamInactive();
+
+ // Second playback interval with triple the length.
+ for (int i = 0; i <= kMinRequiredSamples * 3; ++i) {
+ statistics_proxy_->OnDecodedFrame(rtc::nullopt, kWidth, kHeight,
+ content_type);
+ fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs);
+ }
+
+ statistics_proxy_.reset();
+ // Average of two playback intervals.
+ const int kExpectedTimeBetweenFreezes =
+ kInterFrameDelayMs * kMinRequiredSamples * 2;
+ if (videocontenttypehelpers::IsScreenshare(content_type)) {
+ EXPECT_EQ(-1, metrics::MinSample(
+ "WebRTC.Video.Screenshare.MeanFreezeDurationMs"));
+ EXPECT_EQ(kExpectedTimeBetweenFreezes,
+ metrics::MinSample(
+ "WebRTC.Video.Screenshare.MeanTimeBetweenFreezesMs"));
+ } else {
+ EXPECT_EQ(-1, metrics::MinSample("WebRTC.Video.MeanFreezeDurationMs"));
+ EXPECT_EQ(kExpectedTimeBetweenFreezes,
+ metrics::MinSample("WebRTC.Video.MeanTimeBetweenFreezesMs"));
+ }
+}
+
+TEST_P(ReceiveStatisticsProxyTest, TimeInHdReported) {
+ const VideoContentType content_type = GetParam();
+ const int kInterFrameDelayMs = 20;
+ // HD frames.
+ for (int i = 0; i < kMinRequiredSamples; ++i) {
+ statistics_proxy_->OnDecodedFrame(rtc::nullopt, kWidth, kHeight,
+ content_type);
+ fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs);
+ }
+ // SD frames.
+ for (int i = 0; i < 2 * kMinRequiredSamples; ++i) {
+ statistics_proxy_->OnDecodedFrame(rtc::nullopt, kWidth / 2, kHeight / 2,
+ content_type);
+ fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs);
+ }
+ // Extra last frame.
+ statistics_proxy_->OnDecodedFrame(rtc::nullopt, kWidth / 2, kHeight / 2,
+ content_type);
+ statistics_proxy_.reset();
+ const int kExpectedTimeInHdPercents = 33;
+ if (videocontenttypehelpers::IsScreenshare(content_type)) {
+ EXPECT_EQ(
+ kExpectedTimeInHdPercents,
+ metrics::MinSample("WebRTC.Video.Screenshare.TimeInHdPercentage"));
+ } else {
+ EXPECT_EQ(kExpectedTimeInHdPercents,
+ metrics::MinSample("WebRTC.Video.TimeInHdPercentage"));
+ }
+}
+
+TEST_P(ReceiveStatisticsProxyTest, TimeInBlockyVideoReported) {
+ const VideoContentType content_type = GetParam();
+ const int kInterFrameDelayMs = 20;
+ const int kHighQp = 80;
+ const int kLowQp = 30;
+ // High quality frames.
+ for (int i = 0; i < kMinRequiredSamples; ++i) {
+ statistics_proxy_->OnDecodedFrame(kLowQp, kWidth, kHeight, content_type);
+ fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs);
+ }
+ // Blocky frames.
+ for (int i = 0; i < 2 * kMinRequiredSamples; ++i) {
+ statistics_proxy_->OnDecodedFrame(kHighQp, kWidth, kHeight, content_type);
+ fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs);
+ }
+ // Extra last frame.
+ statistics_proxy_->OnDecodedFrame(kHighQp, kWidth, kHeight, content_type);
+ statistics_proxy_.reset();
+ const int kExpectedTimeInHdPercents = 66;
+ if (videocontenttypehelpers::IsScreenshare(content_type)) {
+ EXPECT_EQ(kExpectedTimeInHdPercents,
+ metrics::MinSample(
+ "WebRTC.Video.Screenshare.TimeInBlockyVideoPercentage"));
+ } else {
+ EXPECT_EQ(kExpectedTimeInHdPercents,
+ metrics::MinSample("WebRTC.Video.TimeInBlockyVideoPercentage"));
+ }
+}
+
+TEST_P(ReceiveStatisticsProxyTest, DownscalesReported) {
+ const VideoContentType content_type = GetParam();
+ const int kInterFrameDelayMs = 1000; // To ensure long enough call duration.
+ const int kLowQp = 30;
+
+ statistics_proxy_->OnDecodedFrame(kLowQp, kWidth / 2, kHeight / 2,
+ content_type);
+ fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs);
+
+ // Downscale.
+ statistics_proxy_->OnDecodedFrame(kLowQp, kWidth, kHeight, content_type);
+ fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs);
+
+ statistics_proxy_->OnDecodedFrame(kLowQp, kWidth / 2, kHeight / 2,
+ content_type);
+ fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs);
+
+ statistics_proxy_->OnDecodedFrame(kLowQp, kWidth / 2, kHeight / 2,
+ content_type);
+ fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs);
+
+ // Downscale.
+ statistics_proxy_->OnDecodedFrame(kLowQp, kWidth / 4, kHeight / 4,
+ content_type);
+ fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs);
+
+ statistics_proxy_.reset();
+ const int kExpectedDownscales = 30; // 2 per 5 seconds = 30 per minute.
+ if (videocontenttypehelpers::IsScreenshare(content_type)) {
+ EXPECT_EQ(
+ kExpectedDownscales,
+ metrics::MinSample(
+ "WebRTC.Video.Screenshare.NumberResolutionDownswitchesPerMinute"));
+ } else {
+ EXPECT_EQ(kExpectedDownscales,
+ metrics::MinSample(
+ "WebRTC.Video.NumberResolutionDownswitchesPerMinute"));
+ }
+}
+
TEST_P(ReceiveStatisticsProxyTest, StatsAreSlicedOnSimulcastAndExperiment) {
VideoContentType content_type = GetParam();
const uint8_t experiment_id = 1;
@@ -960,13 +1155,15 @@
videocontenttypehelpers::SetSimulcastId(&content_type, 1);
for (int i = 0; i <= kMinRequiredSamples; ++i) {
fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs1);
- statistics_proxy_->OnDecodedFrame(rtc::nullopt, content_type);
+ statistics_proxy_->OnDecodedFrame(rtc::nullopt, kWidth, kHeight,
+ content_type);
}
videocontenttypehelpers::SetSimulcastId(&content_type, 2);
for (int i = 0; i <= kMinRequiredSamples; ++i) {
fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs2);
- statistics_proxy_->OnDecodedFrame(rtc::nullopt, content_type);
+ statistics_proxy_->OnDecodedFrame(rtc::nullopt, kWidth, kHeight,
+ content_type);
}
statistics_proxy_.reset();
diff --git a/video/video_quality_observer.cc b/video/video_quality_observer.cc
new file mode 100644
index 0000000..bcadb64
--- /dev/null
+++ b/video/video_quality_observer.cc
@@ -0,0 +1,194 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video/video_quality_observer.h"
+
+#include <algorithm>
+#include <string>
+
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/strings/string_builder.h"
+#include "system_wrappers/include/metrics.h"
+
+namespace webrtc {
+
+namespace {
+const int kMinFrameSamplesToDetectFreeze = 5;
+const int kMinCallDurationMs = 3000;
+const int kMinRequiredSamples = 1;
+const int kMinIncreaseForFreezeMs = 150;
+const int kPixelsInHighResolution = 960 * 540; // CPU-adapted HD still counts.
+const int kPixelsInMediumResolution = 640 * 360;
+const int kBlockyQpThresholdVp8 = 70;
+const int kBlockyQpThresholdVp9 = 60; // TODO(ilnik): tune this value.
+// TODO(ilnik): Add H264/HEVC thresholds.
+} // namespace
+
+VideoQualityObserver::VideoQualityObserver(VideoContentType content_type)
+ : last_frame_decoded_ms_(-1),
+ num_frames_decoded_(0),
+ first_frame_decoded_ms_(-1),
+ last_frame_pixels_(0),
+ last_frame_qp_(0),
+ last_unfreeze_time_(0),
+ time_in_resolution_ms_(3, 0),
+ current_resolution_(Resolution::Low),
+ num_resolution_downgrades_(0),
+ time_in_blocky_video_ms_(0),
+ content_type_(content_type),
+ is_paused_(false) {}
+
+VideoQualityObserver::~VideoQualityObserver() {
+ UpdateHistograms();
+}
+
+void VideoQualityObserver::UpdateHistograms() {
+ // Don't report anything on an empty video stream.
+ if (num_frames_decoded_ == 0) {
+ return;
+ }
+
+ char log_stream_buf[2 * 1024];
+ rtc::SimpleStringBuilder log_stream(log_stream_buf);
+
+ if (last_frame_decoded_ms_ > last_unfreeze_time_) {
+ smooth_playback_durations_.Add(last_frame_decoded_ms_ -
+ last_unfreeze_time_);
+ }
+
+ std::string uma_prefix = videocontenttypehelpers::IsScreenshare(content_type_)
+ ? "WebRTC.Video.Screenshare"
+ : "WebRTC.Video";
+
+ auto mean_time_between_freezes =
+ smooth_playback_durations_.Avg(kMinRequiredSamples);
+ if (mean_time_between_freezes) {
+ RTC_HISTOGRAM_COUNTS_SPARSE_100000(uma_prefix + ".MeanTimeBetweenFreezesMs",
+ *mean_time_between_freezes);
+ log_stream << uma_prefix << ".MeanTimeBetweenFreezesMs "
+ << *mean_time_between_freezes << "\n";
+ }
+ auto avg_freeze_length = freezes_durations_.Avg(kMinRequiredSamples);
+ if (avg_freeze_length) {
+ RTC_HISTOGRAM_COUNTS_SPARSE_100000(uma_prefix + ".MeanFreezeDurationMs",
+ *avg_freeze_length);
+ log_stream << uma_prefix << ".MeanFreezeDurationMs " << *avg_freeze_length
+ << "\n";
+ }
+
+ int64_t call_duration_ms = last_frame_decoded_ms_ - first_frame_decoded_ms_;
+
+ if (call_duration_ms >= kMinCallDurationMs) {
+ int time_spent_in_hd_percentage = static_cast<int>(
+ time_in_resolution_ms_[Resolution::High] * 100 / call_duration_ms);
+ int time_with_blocky_video_percentage =
+ static_cast<int>(time_in_blocky_video_ms_ * 100 / call_duration_ms);
+
+ RTC_HISTOGRAM_COUNTS_SPARSE_100(uma_prefix + ".TimeInHdPercentage",
+ time_spent_in_hd_percentage);
+ log_stream << uma_prefix << ".TimeInHdPercentage "
+ << time_spent_in_hd_percentage << "\n";
+ RTC_HISTOGRAM_COUNTS_SPARSE_100(uma_prefix + ".TimeInBlockyVideoPercentage",
+ time_with_blocky_video_percentage);
+ log_stream << uma_prefix << ".TimeInBlockyVideoPercentage "
+ << time_with_blocky_video_percentage << "\n";
+ RTC_HISTOGRAM_COUNTS_SPARSE_100(
+ uma_prefix + ".NumberResolutionDownswitchesPerMinute",
+ num_resolution_downgrades_ * 60000 / call_duration_ms);
+ log_stream << uma_prefix << ".NumberResolutionDownswitchesPerMinute "
+ << num_resolution_downgrades_ * 60000 / call_duration_ms << "\n";
+ }
+ RTC_LOG(LS_INFO) << log_stream.str();
+}
+
+void VideoQualityObserver::OnDecodedFrame(rtc::Optional<uint8_t> qp,
+ int width,
+ int height,
+ int64_t now_ms,
+ VideoCodecType codec) {
+ if (num_frames_decoded_ == 0) {
+ first_frame_decoded_ms_ = now_ms;
+ last_unfreeze_time_ = now_ms;
+ }
+
+ ++num_frames_decoded_;
+
+ if (!is_paused_ && num_frames_decoded_ > 1) {
+ // Process inter-frame delay.
+ int64_t interframe_delay_ms = now_ms - last_frame_decoded_ms_;
+ interframe_delays_.Add(interframe_delay_ms);
+ rtc::Optional<int> avg_interframe_delay =
+ interframe_delays_.Avg(kMinFrameSamplesToDetectFreeze);
+ // Check if it was a freeze.
+ if (num_frames_decoded_ > kMinFrameSamplesToDetectFreeze &&
+ interframe_delay_ms >=
+ std::max(3 * *avg_interframe_delay,
+ *avg_interframe_delay + kMinIncreaseForFreezeMs)) {
+ freezes_durations_.Add(interframe_delay_ms);
+ smooth_playback_durations_.Add(last_frame_decoded_ms_ -
+ last_unfreeze_time_);
+ last_unfreeze_time_ = now_ms;
+ } else {
+ // Only count inter-frame delay as playback time if there
+ // was no freeze.
+ time_in_resolution_ms_[current_resolution_] += interframe_delay_ms;
+ rtc::Optional<int> qp_blocky_threshold;
+ // TODO(ilnik): add other codec types when we have QP for them.
+ switch (codec) {
+ case kVideoCodecVP8:
+ qp_blocky_threshold = kBlockyQpThresholdVp8;
+ break;
+ case kVideoCodecVP9:
+ qp_blocky_threshold = kBlockyQpThresholdVp9;
+ break;
+ default:
+ qp_blocky_threshold = rtc::nullopt;
+ }
+ if (qp_blocky_threshold && qp.value_or(0) > *qp_blocky_threshold) {
+ time_in_blocky_video_ms_ += interframe_delay_ms;
+ }
+ }
+ }
+
+ if (is_paused_) {
+ // If the stream was paused since the previous frame, do not count the
+ // pause toward smooth playback. Explicitly count the part before it and
+ // start the new smooth playback interval from this frame.
+ is_paused_ = false;
+ if (last_frame_decoded_ms_ > last_unfreeze_time_) {
+ smooth_playback_durations_.Add(last_frame_decoded_ms_ -
+ last_unfreeze_time_);
+ }
+ last_unfreeze_time_ = now_ms;
+ }
+
+ int64_t pixels = width * height;
+ if (pixels >= kPixelsInHighResolution) {
+ current_resolution_ = Resolution::High;
+ } else if (pixels >= kPixelsInMediumResolution) {
+ current_resolution_ = Resolution::Medium;
+ } else {
+ current_resolution_ = Resolution::Low;
+ }
+
+ if (pixels < last_frame_pixels_) {
+ ++num_resolution_downgrades_;
+ }
+
+ last_frame_decoded_ms_ = now_ms;
+ last_frame_qp_ = qp.value_or(0);
+ last_frame_pixels_ = pixels;
+}
+
+void VideoQualityObserver::OnStreamInactive() {
+ is_paused_ = true;
+}
+} // namespace webrtc
diff --git a/video/video_quality_observer.h b/video/video_quality_observer.h
new file mode 100644
index 0000000..cbeab43
--- /dev/null
+++ b/video/video_quality_observer.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef VIDEO_VIDEO_QUALITY_OBSERVER_H_
+#define VIDEO_VIDEO_QUALITY_OBSERVER_H_
+
+#include <stdint.h>
+#include <vector>
+
+#include "api/optional.h"
+#include "api/video/video_content_type.h"
+#include "common_types.h" // NOLINT(build/include)
+#include "rtc_base/numerics/sample_counter.h"
+
+namespace webrtc {
+
+// Calculates spatial and temporal quality metrics and reports them to UMA
+// stats.
+class VideoQualityObserver {
+ public:
+ // Use either VideoQualityObserver::kBlockyQpThresholdVp8 or
+ // VideoQualityObserver::kBlockyQpThresholdVp9.
+ explicit VideoQualityObserver(VideoContentType content_type);
+ ~VideoQualityObserver();
+
+ void OnDecodedFrame(rtc::Optional<uint8_t> qp,
+ int width,
+ int height,
+ int64_t now_ms,
+ VideoCodecType codec);
+
+ void OnStreamInactive();
+
+ private:
+ void UpdateHistograms();
+
+ enum Resolution {
+ Low = 0,
+ Medium = 1,
+ High = 2,
+ };
+
+ int64_t last_frame_decoded_ms_;
+ int64_t num_frames_decoded_;
+ int64_t first_frame_decoded_ms_;
+ int64_t last_frame_pixels_;
+ uint8_t last_frame_qp_;
+ // Decoded timestamp of the last delayed frame.
+ int64_t last_unfreeze_time_;
+ rtc::SampleCounter interframe_delays_;
+ // An inter-frame delay is counted as a freeze if it's significantly longer
+ // than average inter-frame delay.
+ rtc::SampleCounter freezes_durations_;
+ // Time between freezes.
+ rtc::SampleCounter smooth_playback_durations_;
+ // Counters for time spent in different resolutions. Time between each two
+ // Consecutive frames is counted to bin corresponding to the first frame
+ // resolution.
+ std::vector<int64_t> time_in_resolution_ms_;
+ // Resolution of the last decoded frame. Resolution enum is used as an index.
+ Resolution current_resolution_;
+ int num_resolution_downgrades_;
+ // Similar to resolution, time spent in high-QP video.
+ int64_t time_in_blocky_video_ms_;
+ // Content type of the last decoded frame.
+ VideoContentType content_type_;
+ bool is_paused_;
+};
+
+} // namespace webrtc
+
+#endif // VIDEO_VIDEO_QUALITY_OBSERVER_H_
diff --git a/video/video_stream_decoder.cc b/video/video_stream_decoder.cc
index e766767c..5557c6c 100644
--- a/video/video_stream_decoder.cc
+++ b/video/video_stream_decoder.cc
@@ -77,7 +77,8 @@
int32_t VideoStreamDecoder::FrameToRender(VideoFrame& video_frame,
rtc::Optional<uint8_t> qp,
VideoContentType content_type) {
- receive_stats_callback_->OnDecodedFrame(qp, content_type);
+ receive_stats_callback_->OnDecodedFrame(qp, video_frame.width(),
+ video_frame.height(), content_type);
incoming_video_stream_->OnFrame(video_frame);
return 0;
}