Refactor LossBasedBandwidthEstimation

- Reset functionality based on loss history
- BWE rampup/down moved to SendSideBandwidthEstimation::UpdateEstimate to align with other estimators.


Bug: None
Change-Id: Ic13795c7ed1852b38baf8359c5c9f4dae6e9ea04
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/207427
Commit-Queue: Per Kjellander <perkj@webrtc.org>
Reviewed-by: Christoffer Rodbro <crodbro@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#33288}
diff --git a/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.cc b/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.cc
index 0d36fbe..2211d26 100644
--- a/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.cc
+++ b/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.cc
@@ -22,6 +22,10 @@
 namespace {
 const char kBweLossBasedControl[] = "WebRTC-Bwe-LossBasedControl";
 
+// Expecting RTCP feedback to be sent with roughly 1s intervals, a 5s gap
+// indicates a channel outage.
+constexpr TimeDelta kMaxRtcpFeedbackInterval = TimeDelta::Millis(5000);
+
 // Increase slower when RTT is high.
 double GetIncreaseFactor(const LossBasedControlConfig& config, TimeDelta rtt) {
   // Clamp the RTT
@@ -94,6 +98,8 @@
                                       DataRate::KilobitsPerSec(0.5)),
       loss_bandwidth_balance_decrease("balance_decr",
                                       DataRate::KilobitsPerSec(4)),
+      loss_bandwidth_balance_reset("balance_reset",
+                                   DataRate::KilobitsPerSec(0.1)),
       loss_bandwidth_balance_exponent("exponent", 0.5),
       allow_resets("resets", false),
       decrease_interval("decr_intvl", TimeDelta::Millis(300)),
@@ -103,8 +109,8 @@
        &increase_high_rtt, &decrease_factor, &loss_window, &loss_max_window,
        &acknowledged_rate_max_window, &increase_offset,
        &loss_bandwidth_balance_increase, &loss_bandwidth_balance_decrease,
-       &loss_bandwidth_balance_exponent, &allow_resets, &decrease_interval,
-       &loss_report_timeout},
+       &loss_bandwidth_balance_reset, &loss_bandwidth_balance_exponent,
+       &allow_resets, &decrease_interval, &loss_report_timeout},
       key_value_config->Lookup(kBweLossBasedControl));
 }
 LossBasedControlConfig::LossBasedControlConfig(const LossBasedControlConfig&) =
@@ -170,9 +176,14 @@
   }
 }
 
-void LossBasedBandwidthEstimation::Update(Timestamp at_time,
-                                          DataRate min_bitrate,
-                                          TimeDelta last_round_trip_time) {
+DataRate LossBasedBandwidthEstimation::Update(Timestamp at_time,
+                                              DataRate min_bitrate,
+                                              DataRate wanted_bitrate,
+                                              TimeDelta last_round_trip_time) {
+  if (loss_based_bitrate_.IsZero()) {
+    loss_based_bitrate_ = wanted_bitrate;
+  }
+
   // Only increase if loss has been low for some time.
   const double loss_estimate_for_increase = average_loss_max_;
   // Avoid multiple decreases from averaging over one loss spike.
@@ -182,8 +193,15 @@
       !has_decreased_since_last_loss_report_ &&
       (at_time - time_last_decrease_ >=
        last_round_trip_time + config_.decrease_interval);
+  // If packet lost reports are too old, dont increase bitrate.
+  const bool loss_report_valid =
+      at_time - last_loss_packet_report_ < 1.2 * kMaxRtcpFeedbackInterval;
 
-  if (loss_estimate_for_increase < loss_increase_threshold()) {
+  if (loss_report_valid && config_.allow_resets &&
+      loss_estimate_for_increase < loss_reset_threshold()) {
+    loss_based_bitrate_ = wanted_bitrate;
+  } else if (loss_report_valid &&
+             loss_estimate_for_increase < loss_increase_threshold()) {
     // Increase bitrate by RTT-adaptive ratio.
     DataRate new_increased_bitrate =
         min_bitrate * GetIncreaseFactor(config_, last_round_trip_time) +
@@ -209,14 +227,21 @@
       loss_based_bitrate_ = new_decreased_bitrate;
     }
   }
+  return loss_based_bitrate_;
 }
 
-void LossBasedBandwidthEstimation::Reset(DataRate bitrate) {
+void LossBasedBandwidthEstimation::Initialize(DataRate bitrate) {
   loss_based_bitrate_ = bitrate;
   average_loss_ = 0;
   average_loss_max_ = 0;
 }
 
+double LossBasedBandwidthEstimation::loss_reset_threshold() const {
+  return LossFromBitrate(loss_based_bitrate_,
+                         config_.loss_bandwidth_balance_reset,
+                         config_.loss_bandwidth_balance_exponent);
+}
+
 double LossBasedBandwidthEstimation::loss_increase_threshold() const {
   return LossFromBitrate(loss_based_bitrate_,
                          config_.loss_bandwidth_balance_increase,
@@ -232,14 +257,4 @@
 DataRate LossBasedBandwidthEstimation::decreased_bitrate() const {
   return config_.decrease_factor * acknowledged_bitrate_max_;
 }
-
-void LossBasedBandwidthEstimation::MaybeReset(DataRate bitrate) {
-  if (config_.allow_resets)
-    Reset(bitrate);
-}
-
-void LossBasedBandwidthEstimation::SetInitialBitrate(DataRate bitrate) {
-  Reset(bitrate);
-}
-
 }  // namespace webrtc
diff --git a/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.h b/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.h
index 2032c3e..20ff092 100644
--- a/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.h
+++ b/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.h
@@ -39,24 +39,34 @@
   FieldTrialParameter<DataRate> increase_offset;
   FieldTrialParameter<DataRate> loss_bandwidth_balance_increase;
   FieldTrialParameter<DataRate> loss_bandwidth_balance_decrease;
+  FieldTrialParameter<DataRate> loss_bandwidth_balance_reset;
   FieldTrialParameter<double> loss_bandwidth_balance_exponent;
   FieldTrialParameter<bool> allow_resets;
   FieldTrialParameter<TimeDelta> decrease_interval;
   FieldTrialParameter<TimeDelta> loss_report_timeout;
 };
 
+// Estimates an upper BWE limit based on loss.
+// It requires knowledge about lost packets and acknowledged bitrate.
+// Ie, this class require transport feedback.
 class LossBasedBandwidthEstimation {
  public:
   explicit LossBasedBandwidthEstimation(
       const WebRtcKeyValueConfig* key_value_config);
-  void Update(Timestamp at_time,
-              DataRate min_bitrate,
-              TimeDelta last_round_trip_time);
+  // Returns the new estimate.
+  DataRate Update(Timestamp at_time,
+                  DataRate min_bitrate,
+                  DataRate wanted_bitrate,
+                  TimeDelta last_round_trip_time);
   void UpdateAcknowledgedBitrate(DataRate acknowledged_bitrate,
                                  Timestamp at_time);
-  void MaybeReset(DataRate bitrate);
-  void SetInitialBitrate(DataRate bitrate);
+  void Initialize(DataRate bitrate);
   bool Enabled() const { return config_.enabled; }
+  // Returns true if LossBasedBandwidthEstimation is enabled and have
+  // received loss statistics. Ie, this class require transport feedback.
+  bool InUse() const {
+    return Enabled() && last_loss_packet_report_.IsFinite();
+  }
   void UpdateLossStatistics(const std::vector<PacketResult>& packet_results,
                             Timestamp at_time);
   DataRate GetEstimate() const { return loss_based_bitrate_; }
@@ -66,9 +76,11 @@
   void Reset(DataRate bitrate);
   double loss_increase_threshold() const;
   double loss_decrease_threshold() const;
+  double loss_reset_threshold() const;
+
   DataRate decreased_bitrate() const;
 
-  LossBasedControlConfig config_;
+  const LossBasedControlConfig config_;
   double average_loss_;
   double average_loss_max_;
   DataRate loss_based_bitrate_;
diff --git a/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc b/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc
index f459464..a2865d9 100644
--- a/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc
+++ b/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc
@@ -288,9 +288,6 @@
   RTC_DCHECK_GT(bitrate, DataRate::Zero());
   // Reset to avoid being capped by the estimate.
   delay_based_limit_ = DataRate::PlusInfinity();
-  if (loss_based_bandwidth_estimation_.Enabled()) {
-    loss_based_bandwidth_estimation_.MaybeReset(bitrate);
-  }
   UpdateTargetBitrate(bitrate, at_time);
   // Clear last sent bitrate history so the new value can be used directly
   // and not capped.
@@ -463,7 +460,7 @@
     if (delay_based_limit_.IsFinite())
       new_bitrate = std::max(delay_based_limit_, new_bitrate);
     if (loss_based_bandwidth_estimation_.Enabled()) {
-      loss_based_bandwidth_estimation_.SetInitialBitrate(new_bitrate);
+      loss_based_bandwidth_estimation_.Initialize(new_bitrate);
     }
 
     if (new_bitrate != current_target_) {
@@ -486,10 +483,10 @@
     return;
   }
 
-  if (loss_based_bandwidth_estimation_.Enabled()) {
-    loss_based_bandwidth_estimation_.Update(
-        at_time, min_bitrate_history_.front().second, last_round_trip_time_);
-    DataRate new_bitrate = MaybeRampupOrBackoff(current_target_, at_time);
+  if (loss_based_bandwidth_estimation_.InUse()) {
+    DataRate new_bitrate = loss_based_bandwidth_estimation_.Update(
+        at_time, min_bitrate_history_.front().second, delay_based_limit_,
+        last_round_trip_time_);
     UpdateTargetBitrate(new_bitrate, at_time);
     return;
   }
@@ -586,29 +583,11 @@
   min_bitrate_history_.push_back(std::make_pair(at_time, current_target_));
 }
 
-DataRate SendSideBandwidthEstimation::MaybeRampupOrBackoff(DataRate new_bitrate,
-                                                           Timestamp at_time) {
-  // TODO(crodbro): reuse this code in UpdateEstimate instead of current
-  // inlining of very similar functionality.
-  const TimeDelta time_since_loss_packet_report =
-      at_time - last_loss_packet_report_;
-  if (time_since_loss_packet_report < 1.2 * kMaxRtcpFeedbackInterval) {
-    new_bitrate = min_bitrate_history_.front().second * 1.08;
-    new_bitrate += DataRate::BitsPerSec(1000);
-  }
-  return new_bitrate;
-}
-
 DataRate SendSideBandwidthEstimation::GetUpperLimit() const {
   DataRate upper_limit = delay_based_limit_;
   if (!receiver_limit_caps_only_)
     upper_limit = std::min(upper_limit, receiver_limit_);
   upper_limit = std::min(upper_limit, max_bitrate_configured_);
-  if (loss_based_bandwidth_estimation_.Enabled() &&
-      loss_based_bandwidth_estimation_.GetEstimate() > DataRate::Zero()) {
-    upper_limit =
-        std::min(upper_limit, loss_based_bandwidth_estimation_.GetEstimate());
-  }
   return upper_limit;
 }
 
diff --git a/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h b/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h
index 3fa8c4b..b97b940 100644
--- a/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h
+++ b/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h
@@ -131,8 +131,6 @@
   // min bitrate used during last kBweIncreaseIntervalMs.
   void UpdateMinHistory(Timestamp at_time);
 
-  DataRate MaybeRampupOrBackoff(DataRate new_bitrate, Timestamp at_time);
-
   // Gets the upper limit for the target bitrate. This is the minimum of the
   // delay based limit, the receiver limit and the loss based controller limit.
   DataRate GetUpperLimit() const;