Reland "Remove sent framerate and bitrate calculations from MediaOptimization."

TBR=sprang@webrtc.org

This is a reland of af721b72cc1bdc5d945629ad78fbea701b6f82b9
Original change's description:
> Remove sent framerate and bitrate calculations from MediaOptimization.
> 
> Add RateTracker for sent framerate and bitrate in SendStatisticsProxy.
> 
> Store sent frame info in map to solve potential issue where sent framerate statistics could be
> incorrect.
> 
> Bug: webrtc:8375
> Change-Id: I4a6e3956013438a711b8c2e73a8cd90c52dd1210
> Reviewed-on: https://webrtc-review.googlesource.com/7880
> Reviewed-by: Erik Språng <sprang@webrtc.org>
> Commit-Queue: Åsa Persson <asapersson@webrtc.org>
> Cr-Commit-Position: refs/heads/master@{#20225}

Bug: webrtc:8375
Change-Id: I06ea90ae8646ba11ddd8ddceb82ea82d75ae2109
Reviewed-on: https://webrtc-review.googlesource.com/11320
Commit-Queue: Åsa Persson <asapersson@webrtc.org>
Reviewed-by: Åsa Persson <asapersson@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#20308}
diff --git a/modules/video_coding/media_optimization.cc b/modules/video_coding/media_optimization.cc
index 4cf2fd3..fce1be5 100644
--- a/modules/video_coding/media_optimization.cc
+++ b/modules/video_coding/media_optimization.cc
@@ -13,26 +13,10 @@
 #include <limits>
 
 #include "modules/video_coding/utility/frame_dropper.h"
-#include "rtc_base/logging.h"
 #include "system_wrappers/include/clock.h"
 
 namespace webrtc {
 namespace media_optimization {
-const int kMsPerSec = 1000;
-const int kBitsPerByte = 8;
-
-struct MediaOptimization::EncodedFrameSample {
-  EncodedFrameSample(size_t size_bytes,
-                     uint32_t timestamp,
-                     int64_t time_complete_ms)
-      : size_bytes(size_bytes),
-        timestamp(timestamp),
-        time_complete_ms(time_complete_ms) {}
-
-  size_t size_bytes;
-  uint32_t timestamp;
-  int64_t time_complete_ms;
-};
 
 MediaOptimization::MediaOptimization(Clock* clock)
     : clock_(clock),
@@ -40,9 +24,7 @@
       user_frame_rate_(0),
       frame_dropper_(new FrameDropper),
       video_target_bitrate_(0),
-      incoming_frame_rate_(0),
-      encoded_frame_samples_(),
-      avg_sent_framerate_(0) {
+      incoming_frame_rate_(0) {
   memset(incoming_frame_times_, -1, sizeof(incoming_frame_times_));
 }
 
@@ -58,7 +40,6 @@
   frame_dropper_->SetRates(0, 0);
   video_target_bitrate_ = 0;
   user_frame_rate_ = 0;
-  encoded_frame_samples_.clear();
 }
 
 void MediaOptimization::SetEncodingData(int32_t max_bit_rate,
@@ -117,51 +98,14 @@
   return framerate;
 }
 
-uint32_t MediaOptimization::SentFrameRate() {
-  rtc::CritScope lock(&crit_sect_);
-  return SentFrameRateInternal();
-}
-
-uint32_t MediaOptimization::SentFrameRateInternal() {
-  PurgeOldFrameSamples(clock_->TimeInMilliseconds() - kBitrateAverageWinMs);
-  UpdateSentFramerate();
-  return avg_sent_framerate_;
-}
-
-uint32_t MediaOptimization::SentBitRate() {
-  rtc::CritScope lock(&crit_sect_);
-  PurgeOldFrameSamples(clock_->TimeInMilliseconds() - kBitrateAverageWinMs);
-  size_t sent_bytes = 0;
-  for (auto& frame_sample : encoded_frame_samples_) {
-    sent_bytes += frame_sample.size_bytes;
-  }
-  return sent_bytes * kBitsPerByte * kMsPerSec / kBitrateAverageWinMs;
-}
-
 int32_t MediaOptimization::UpdateWithEncodedData(
     const EncodedImage& encoded_image) {
   size_t encoded_length = encoded_image._length;
-  uint32_t timestamp = encoded_image._timeStamp;
   rtc::CritScope lock(&crit_sect_);
-  const int64_t now_ms = clock_->TimeInMilliseconds();
-  PurgeOldFrameSamples(now_ms - kBitrateAverageWinMs);
-  if (encoded_frame_samples_.size() > 0 &&
-      encoded_frame_samples_.back().timestamp == timestamp) {
-    // Frames having the same timestamp are generated from the same input
-    // frame. We don't want to double count them, but only increment the
-    // size_bytes.
-    encoded_frame_samples_.back().size_bytes += encoded_length;
-    encoded_frame_samples_.back().time_complete_ms = now_ms;
-  } else {
-    encoded_frame_samples_.push_back(
-        EncodedFrameSample(encoded_length, timestamp, now_ms));
-  }
-  UpdateSentFramerate();
   if (encoded_length > 0) {
     const bool delta_frame = encoded_image._frameType != kVideoFrameKey;
     frame_dropper_->Fill(encoded_length, delta_frame);
   }
-
   return VCM_OK;
 }
 
@@ -192,31 +136,6 @@
   ProcessIncomingFrameRate(now);
 }
 
-void MediaOptimization::PurgeOldFrameSamples(int64_t threshold_ms) {
-  while (!encoded_frame_samples_.empty()) {
-    if (encoded_frame_samples_.front().time_complete_ms < threshold_ms) {
-      encoded_frame_samples_.pop_front();
-    } else {
-      break;
-    }
-  }
-}
-
-void MediaOptimization::UpdateSentFramerate() {
-  if (encoded_frame_samples_.size() <= 1) {
-    avg_sent_framerate_ = encoded_frame_samples_.size();
-    return;
-  }
-  int denom = encoded_frame_samples_.back().timestamp -
-              encoded_frame_samples_.front().timestamp;
-  if (denom > 0) {
-    avg_sent_framerate_ =
-        (90000 * (encoded_frame_samples_.size() - 1) + denom / 2) / denom;
-  } else {
-    avg_sent_framerate_ = encoded_frame_samples_.size();
-  }
-}
-
 // Allowing VCM to keep track of incoming frame rate.
 void MediaOptimization::ProcessIncomingFrameRate(int64_t now) {
   int32_t num = 0;
diff --git a/modules/video_coding/media_optimization.h b/modules/video_coding/media_optimization.h
index fae4877..c72c0a5 100644
--- a/modules/video_coding/media_optimization.h
+++ b/modules/video_coding/media_optimization.h
@@ -57,22 +57,12 @@
 
   // InputFrameRate 0 = no frame rate estimate available.
   uint32_t InputFrameRate();
-  uint32_t SentFrameRate();
-  uint32_t SentBitRate();
 
  private:
   enum { kFrameCountHistorySize = 90 };
   enum { kFrameHistoryWinMs = 2000 };
-  enum { kBitrateAverageWinMs = 1000 };
-
-  struct EncodedFrameSample;
-  typedef std::list<EncodedFrameSample> FrameSampleList;
 
   void UpdateIncomingFrameRate() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
-  void PurgeOldFrameSamples(int64_t threshold_ms)
-      RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
-  void UpdateSentFramerate() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
-
   void ProcessIncomingFrameRate(int64_t now)
       RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
 
@@ -88,8 +78,6 @@
 
   uint32_t InputFrameRateInternal() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
 
-  uint32_t SentFrameRateInternal() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
-
   // Protect all members.
   rtc::CriticalSection crit_sect_;
 
@@ -101,9 +89,6 @@
   float incoming_frame_rate_ RTC_GUARDED_BY(crit_sect_);
   int64_t incoming_frame_times_[kFrameCountHistorySize] RTC_GUARDED_BY(
       crit_sect_);
-  std::list<EncodedFrameSample> encoded_frame_samples_
-      RTC_GUARDED_BY(crit_sect_);
-  uint32_t avg_sent_framerate_ RTC_GUARDED_BY(crit_sect_);
 };
 }  // namespace media_optimization
 }  // namespace webrtc
diff --git a/modules/video_coding/video_sender.cc b/modules/video_coding/video_sender.cc
index fe8af72..fd35245 100644
--- a/modules/video_coding/video_sender.cc
+++ b/modules/video_coding/video_sender.cc
@@ -52,14 +52,15 @@
 
 VideoSender::~VideoSender() {}
 
+// TODO(asapersson): Remove _sendStatsTimer and send_stats_callback_.
 void VideoSender::Process() {
   if (_sendStatsTimer.TimeUntilProcess() == 0) {
     // |_sendStatsTimer.Processed()| must be called. Otherwise
     // VideoSender::Process() will be called in an infinite loop.
     _sendStatsTimer.Processed();
     if (send_stats_callback_) {
-      uint32_t bitRate = _mediaOpt.SentBitRate();
-      uint32_t frameRate = _mediaOpt.SentFrameRate();
+      uint32_t bitRate = 0;
+      uint32_t frameRate = 0;
       send_stats_callback_->SendStatistics(bitRate, frameRate);
     }
   }
diff --git a/video/send_statistics_proxy.cc b/video/send_statistics_proxy.cc
index 4042a51..d72b741 100644
--- a/video/send_statistics_proxy.cc
+++ b/video/send_statistics_proxy.cc
@@ -25,6 +25,10 @@
 namespace webrtc {
 namespace {
 const float kEncodeTimeWeigthFactor = 0.5f;
+const size_t kMaxEncodedFrameMapSize = 150;
+const int64_t kMaxEncodedFrameWindowMs = 800;
+const int64_t kBucketSizeMs = 100;
+const size_t kBucketCount = 10;
 
 const char kVp8ForcedFallbackEncoderFieldTrial[] =
     "WebRTC-VP8-Forced-Fallback-Encoder";
@@ -120,10 +124,11 @@
       min_first_fallback_interval_ms_(GetFallbackIntervalFromFieldTrial()),
       content_type_(content_type),
       start_ms_(clock->TimeInMilliseconds()),
-      last_sent_frame_timestamp_(0),
       encode_time_(kEncodeTimeWeigthFactor),
       quality_downscales_(-1),
       cpu_downscales_(-1),
+      media_byte_rate_tracker_(kBucketSizeMs, kBucketCount),
+      encoded_frame_rate_tracker_(kBucketSizeMs, kBucketCount),
       uma_container_(
           new UmaSamplesContainer(GetUmaPrefix(content_type_), stats_, clock)) {
 }
@@ -146,8 +151,6 @@
     Clock* const clock)
     : uma_prefix_(prefix),
       clock_(clock),
-      max_sent_width_per_timestamp_(0),
-      max_sent_height_per_timestamp_(0),
       input_frame_rate_tracker_(100, 10u),
       input_fps_counter_(clock, nullptr, true),
       sent_fps_counter_(clock, nullptr, true),
@@ -186,6 +189,44 @@
   }
 }
 
+void SendStatisticsProxy::UmaSamplesContainer::RemoveOld(int64_t now_ms) {
+  while (!encoded_frames_.empty()) {
+    auto it = encoded_frames_.begin();
+    if (now_ms - it->second.send_ms < kMaxEncodedFrameWindowMs)
+      break;
+
+    // Use max per timestamp.
+    sent_width_counter_.Add(it->second.max_width);
+    sent_height_counter_.Add(it->second.max_height);
+    encoded_frames_.erase(it);
+  }
+}
+
+bool SendStatisticsProxy::UmaSamplesContainer::InsertEncodedFrame(
+    const EncodedImage& encoded_frame) {
+  int64_t now_ms = clock_->TimeInMilliseconds();
+  RemoveOld(now_ms);
+  if (encoded_frames_.size() > kMaxEncodedFrameMapSize) {
+    encoded_frames_.clear();
+  }
+
+  auto it = encoded_frames_.find(encoded_frame._timeStamp);
+  if (it == encoded_frames_.end()) {
+    // First frame with this timestamp.
+    encoded_frames_.insert(std::make_pair(
+        encoded_frame._timeStamp, Frame(now_ms, encoded_frame._encodedWidth,
+                                        encoded_frame._encodedHeight)));
+    sent_fps_counter_.Add(1);
+    return true;
+  }
+
+  it->second.max_width =
+      std::max(it->second.max_width, encoded_frame._encodedWidth);
+  it->second.max_height =
+      std::max(it->second.max_height, encoded_frame._encodedHeight);
+  return false;
+}
+
 void SendStatisticsProxy::UmaSamplesContainer::UpdateHistograms(
     const VideoSendStream::Config::Rtp& rtp_config,
     const VideoSendStream::Stats& current_stats) {
@@ -535,13 +576,6 @@
   }
 }
 
-void SendStatisticsProxy::OnEncoderStatsUpdate(uint32_t framerate,
-                                               uint32_t bitrate) {
-  rtc::CritScope lock(&crit_);
-  stats_.encode_frame_rate = framerate;
-  stats_.media_bitrate_bps = bitrate;
-}
-
 void SendStatisticsProxy::OnEncodedFrameTimeMeasured(
     int encode_time_ms,
     const CpuOveruseMetrics& metrics) {
@@ -596,6 +630,8 @@
       content_type_ == VideoEncoderConfig::ContentType::kRealtimeVideo
           ? VideoContentType::UNSPECIFIED
           : VideoContentType::SCREENSHARE;
+  stats_.encode_frame_rate = round(encoded_frame_rate_tracker_.ComputeRate());
+  stats_.media_bitrate_bps = media_byte_rate_tracker_.ComputeRate() * 8;
   return stats_;
 }
 
@@ -802,31 +838,14 @@
     }
   }
 
-  // TODO(asapersson): This is incorrect if simulcast layers are encoded on
-  // different threads and there is no guarantee that one frame of all layers
-  // are encoded before the next start.
-  if (last_sent_frame_timestamp_ > 0 &&
-      encoded_image._timeStamp != last_sent_frame_timestamp_) {
-    uma_container_->sent_fps_counter_.Add(1);
-    uma_container_->sent_width_counter_.Add(
-        uma_container_->max_sent_width_per_timestamp_);
-    uma_container_->sent_height_counter_.Add(
-        uma_container_->max_sent_height_per_timestamp_);
-    uma_container_->max_sent_width_per_timestamp_ = 0;
-    uma_container_->max_sent_height_per_timestamp_ = 0;
-  }
-  last_sent_frame_timestamp_ = encoded_image._timeStamp;
-  uma_container_->max_sent_width_per_timestamp_ =
-      std::max(uma_container_->max_sent_width_per_timestamp_,
-               static_cast<int>(encoded_image._encodedWidth));
-  uma_container_->max_sent_height_per_timestamp_ =
-      std::max(uma_container_->max_sent_height_per_timestamp_,
-               static_cast<int>(encoded_image._encodedHeight));
+  media_byte_rate_tracker_.AddSamples(encoded_image._length);
+  if (uma_container_->InsertEncodedFrame(encoded_image))
+    encoded_frame_rate_tracker_.AddSamples(1);
 }
 
 int SendStatisticsProxy::GetSendFrameRate() const {
   rtc::CritScope lock(&crit_);
-  return stats_.encode_frame_rate;
+  return round(encoded_frame_rate_tracker_.ComputeRate());
 }
 
 void SendStatisticsProxy::OnIncomingFrame(int width, int height) {
diff --git a/video/send_statistics_proxy.h b/video/send_statistics_proxy.h
index 3516830..fb133de 100644
--- a/video/send_statistics_proxy.h
+++ b/video/send_statistics_proxy.h
@@ -68,7 +68,6 @@
       const VideoStreamEncoder::AdaptCounts& cpu_counts,
       const VideoStreamEncoder::AdaptCounts& quality_counts);
 
-  void OnEncoderStatsUpdate(uint32_t framerate, uint32_t bitrate);
   void OnSuspendChange(bool is_suspended);
   void OnInactiveSsrc(uint32_t ssrc);
 
@@ -170,6 +169,25 @@
     SampleCounter vp9;   // QP range: 0-255.
     SampleCounter h264;  // QP range: 0-51.
   };
+
+  // Map holding encoded frames (mapped by timestamp).
+  // If simulcast layers are encoded on different threads, there is no guarantee
+  // that one frame of all layers are encoded before the next start.
+  struct TimestampOlderThan {
+    bool operator()(uint32_t ts1, uint32_t ts2) const {
+      return IsNewerTimestamp(ts2, ts1);
+    }
+  };
+  struct Frame {
+    Frame(int64_t send_ms, uint32_t width, uint32_t height)
+        : send_ms(send_ms), max_width(width), max_height(height) {}
+    const int64_t
+        send_ms;          // Time when first frame with this timestamp is sent.
+    uint32_t max_width;   // Max width with this timestamp.
+    uint32_t max_height;  // Max height with this timestamp.
+  };
+  typedef std::map<uint32_t, Frame, TimestampOlderThan> EncodedFrameMap;
+
   void PurgeOldStats() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
   VideoSendStream::StreamStats* GetStatsEntry(uint32_t ssrc)
       RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
@@ -192,11 +210,12 @@
   VideoEncoderConfig::ContentType content_type_ RTC_GUARDED_BY(crit_);
   const int64_t start_ms_;
   VideoSendStream::Stats stats_ RTC_GUARDED_BY(crit_);
-  uint32_t last_sent_frame_timestamp_ RTC_GUARDED_BY(crit_);
   std::map<uint32_t, StatsUpdateTimes> update_times_ RTC_GUARDED_BY(crit_);
   rtc::ExpFilter encode_time_ RTC_GUARDED_BY(crit_);
   int quality_downscales_ RTC_GUARDED_BY(crit_);
   int cpu_downscales_ RTC_GUARDED_BY(crit_);
+  rtc::RateTracker media_byte_rate_tracker_ RTC_GUARDED_BY(crit_);
+  rtc::RateTracker encoded_frame_rate_tracker_ RTC_GUARDED_BY(crit_);
 
   // Contains stats used for UMA histograms. These stats will be reset if
   // content type changes between real-time video and screenshare, since these
@@ -212,10 +231,11 @@
 
     void InitializeBitrateCounters(const VideoSendStream::Stats& stats);
 
+    bool InsertEncodedFrame(const EncodedImage& encoded_frame);
+    void RemoveOld(int64_t now_ms);
+
     const std::string uma_prefix_;
     Clock* const clock_;
-    int max_sent_width_per_timestamp_;
-    int max_sent_height_per_timestamp_;
     SampleCounter input_width_counter_;
     SampleCounter input_height_counter_;
     SampleCounter sent_width_counter_;
@@ -248,6 +268,7 @@
     FallbackEncoderInfo fallback_info_;
     ReportBlockStats report_block_stats_;
     const VideoSendStream::Stats start_stats_;
+    EncodedFrameMap encoded_frames_;
 
     std::map<int, QpCounters>
         qp_counters_;  // QP counters mapped by spatial idx.
diff --git a/video/send_statistics_proxy_unittest.cc b/video/send_statistics_proxy_unittest.cc
index af9f674..56184b9 100644
--- a/video/send_statistics_proxy_unittest.cc
+++ b/video/send_statistics_proxy_unittest.cc
@@ -185,17 +185,6 @@
   ExpectEqual(expected_, stats);
 }
 
-TEST_F(SendStatisticsProxyTest, EncodedBitrateAndFramerate) {
-  int media_bitrate_bps = 500;
-  int encode_fps = 29;
-
-  statistics_proxy_->OnEncoderStatsUpdate(encode_fps, media_bitrate_bps);
-
-  VideoSendStream::Stats stats = statistics_proxy_->GetStats();
-  EXPECT_EQ(media_bitrate_bps, stats.media_bitrate_bps);
-  EXPECT_EQ(encode_fps, stats.encode_frame_rate);
-}
-
 TEST_F(SendStatisticsProxyTest, Suspended) {
   // Verify that the value is false by default.
   EXPECT_FALSE(statistics_proxy_->GetStats().suspended);
@@ -819,13 +808,36 @@
 }
 
 TEST_F(SendStatisticsProxyTest, SentResolutionHistogramsAreUpdated) {
+  const int64_t kMaxEncodedFrameWindowMs = 800;
+  const int kFps = 20;
+  const int kNumFramesPerWindow = kFps * kMaxEncodedFrameWindowMs / 1000;
+  const int kMinSamples =  // Sample added when removed from EncodedFrameMap.
+      SendStatisticsProxy::kMinRequiredMetricsSamples + kNumFramesPerWindow;
   EncodedImage encoded_image;
-  encoded_image._encodedWidth = kWidth;
-  encoded_image._encodedHeight = kHeight;
-  for (int i = 0; i <= SendStatisticsProxy::kMinRequiredMetricsSamples; ++i) {
-    encoded_image._timeStamp = i + 1;
+
+  // Not enough samples, stats should not be updated.
+  for (int i = 0; i < kMinSamples - 1; ++i) {
+    fake_clock_.AdvanceTimeMilliseconds(1000 / kFps);
+    ++encoded_image._timeStamp;
     statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr);
   }
+  SetUp();  // Reset stats proxy also causes histograms to be reported.
+  EXPECT_EQ(0, metrics::NumSamples("WebRTC.Video.SentWidthInPixels"));
+  EXPECT_EQ(0, metrics::NumSamples("WebRTC.Video.SentHeightInPixels"));
+
+  // Enough samples, max resolution per frame should be reported.
+  encoded_image._timeStamp = 0xfffffff0;  // Will wrap.
+  for (int i = 0; i < kMinSamples; ++i) {
+    fake_clock_.AdvanceTimeMilliseconds(1000 / kFps);
+    ++encoded_image._timeStamp;
+    encoded_image._encodedWidth = kWidth;
+    encoded_image._encodedHeight = kHeight;
+    statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr);
+    encoded_image._encodedWidth = kWidth / 2;
+    encoded_image._encodedHeight = kHeight / 2;
+    statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr);
+  }
+
   statistics_proxy_.reset();
   EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.SentWidthInPixels"));
   EXPECT_EQ(1, metrics::NumEvents("WebRTC.Video.SentWidthInPixels", kWidth));
@@ -851,9 +863,11 @@
   const int kFps = 20;
   const int kMinPeriodicSamples = 6;
   int frames = kMinPeriodicSamples * kFpsPeriodicIntervalMs * kFps / 1000 + 1;
-  for (int i = 0; i <= frames; ++i) {
+  for (int i = 0; i < frames; ++i) {
     fake_clock_.AdvanceTimeMilliseconds(1000 / kFps);
-    encoded_image._timeStamp = i + 1;
+    ++encoded_image._timeStamp;
+    statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr);
+    // Frame with same timestamp should not be counted.
     statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr);
   }
   statistics_proxy_.reset();
@@ -890,7 +904,7 @@
   const int kSuspendTimeMs = 10000;
   const int kMinPeriodicSamples = 6;
   int frames = kMinPeriodicSamples * kFpsPeriodicIntervalMs * kFps / 1000;
-  for (int i = 0; i <= frames; ++i) {
+  for (int i = 0; i < frames; ++i) {
     fake_clock_.AdvanceTimeMilliseconds(1000 / kFps);
     encoded_image._timeStamp = i + 1;
     statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr);
@@ -899,7 +913,7 @@
   statistics_proxy_->OnSuspendChange(true);
   fake_clock_.AdvanceTimeMilliseconds(kSuspendTimeMs);
 
-  for (int i = 0; i <= frames; ++i) {
+  for (int i = 0; i < frames; ++i) {
     fake_clock_.AdvanceTimeMilliseconds(1000 / kFps);
     encoded_image._timeStamp = i + 1;
     statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr);
diff --git a/video/video_stream_encoder.cc b/video/video_stream_encoder.cc
index e37050c..f839665 100644
--- a/video/video_stream_encoder.cc
+++ b/video/video_stream_encoder.cc
@@ -375,12 +375,13 @@
   RTC_DISALLOW_COPY_AND_ASSIGN(VideoSourceProxy);
 };
 
-VideoStreamEncoder::VideoStreamEncoder(uint32_t number_of_cores,
-                       SendStatisticsProxy* stats_proxy,
-                       const VideoSendStream::Config::EncoderSettings& settings,
-                       rtc::VideoSinkInterface<VideoFrame>* pre_encode_callback,
-                       EncodedFrameObserver* encoder_timing,
-                       std::unique_ptr<OveruseFrameDetector> overuse_detector)
+VideoStreamEncoder::VideoStreamEncoder(
+    uint32_t number_of_cores,
+    SendStatisticsProxy* stats_proxy,
+    const VideoSendStream::Config::EncoderSettings& settings,
+    rtc::VideoSinkInterface<VideoFrame>* pre_encode_callback,
+    EncodedFrameObserver* encoder_timing,
+    std::unique_ptr<OveruseFrameDetector> overuse_detector)
     : shutdown_event_(true /* manual_reset */, false),
       number_of_cores_(number_of_cores),
       initial_rampup_(0),
@@ -388,7 +389,7 @@
       sink_(nullptr),
       settings_(settings),
       codec_type_(PayloadStringToCodecType(settings.payload_name)),
-      video_sender_(Clock::GetRealTimeClock(), this, this),
+      video_sender_(Clock::GetRealTimeClock(), this, nullptr),
       overuse_detector_(
           overuse_detector.get()
               ? overuse_detector.release()
@@ -868,12 +869,6 @@
   });
 }
 
-void VideoStreamEncoder::SendStatistics(uint32_t bit_rate,
-                                        uint32_t frame_rate) {
-  RTC_DCHECK(module_process_thread_checker_.CalledOnValidThread());
-  stats_proxy_->OnEncoderStatsUpdate(frame_rate, bit_rate);
-}
-
 void VideoStreamEncoder::OnReceivedIntraFrameRequest(size_t stream_index) {
   if (!encoder_queue_.IsCurrent()) {
     encoder_queue_.PostTask(
diff --git a/video/video_stream_encoder.h b/video/video_stream_encoder.h
index 3cc46a3..1e00397 100644
--- a/video/video_stream_encoder.h
+++ b/video/video_stream_encoder.h
@@ -50,7 +50,6 @@
 //  Call Stop() when done.
 class VideoStreamEncoder : public rtc::VideoSinkInterface<VideoFrame>,
                            public EncodedImageCallback,
-                           public VCMSendStatisticsCallback,
                            public AdaptationObserverInterface {
  public:
   // Interface for receiving encoded video frames and notifications about
@@ -161,10 +160,6 @@
   // Implements VideoSinkInterface.
   void OnFrame(const VideoFrame& video_frame) override;
 
-  // Implements VideoSendStatisticsCallback.
-  void SendStatistics(uint32_t bit_rate,
-                      uint32_t frame_rate) override;
-
   void EncodeVideoFrame(const VideoFrame& frame,
                         int64_t time_when_posted_in_ms);