Configure and use max bitrate to limit the AIMD controller estimates.
Bug: webrtc:9275
Change-Id: I9625cd473e1cb198abe08020f5462f1bd64bf2a5
Reviewed-on: https://webrtc-review.googlesource.com/77081
Reviewed-by: Stefan Holmer <stefan@webrtc.org>
Reviewed-by: Sebastian Jansson <srte@webrtc.org>
Commit-Queue: Björn Terelius <terelius@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#23287}
diff --git a/modules/congestion_controller/delay_based_bwe.cc b/modules/congestion_controller/delay_based_bwe.cc
index b0024e7..95f235c 100644
--- a/modules/congestion_controller/delay_based_bwe.cc
+++ b/modules/congestion_controller/delay_based_bwe.cc
@@ -314,10 +314,11 @@
rate_control_.SetStartBitrate(start_bitrate_bps);
}
-void DelayBasedBwe::SetMinBitrate(int min_bitrate_bps) {
+void DelayBasedBwe::SetBitrateConstraints(int min_bitrate_bps,
+ int max_bitrate_bps) {
// Called from both the configuration thread and the network thread. Shouldn't
// be called from the network thread in the future.
- rate_control_.SetMinBitrate(min_bitrate_bps);
+ rate_control_.SetBitrateConstraints(min_bitrate_bps, max_bitrate_bps);
}
int64_t DelayBasedBwe::GetExpectedBwePeriodMs() const {
diff --git a/modules/congestion_controller/delay_based_bwe.h b/modules/congestion_controller/delay_based_bwe.h
index dbe759e..caa4661 100644
--- a/modules/congestion_controller/delay_based_bwe.h
+++ b/modules/congestion_controller/delay_based_bwe.h
@@ -52,7 +52,7 @@
bool LatestEstimate(std::vector<uint32_t>* ssrcs,
uint32_t* bitrate_bps) const;
void SetStartBitrate(int start_bitrate_bps);
- void SetMinBitrate(int min_bitrate_bps);
+ void SetBitrateConstraints(int min_bitrate_bps, int max_bitrate_bps);
int64_t GetExpectedBwePeriodMs() const;
private:
diff --git a/modules/congestion_controller/goog_cc/delay_based_bwe.cc b/modules/congestion_controller/goog_cc/delay_based_bwe.cc
index fea5a66..f96b843 100644
--- a/modules/congestion_controller/goog_cc/delay_based_bwe.cc
+++ b/modules/congestion_controller/goog_cc/delay_based_bwe.cc
@@ -318,10 +318,11 @@
rate_control_.SetStartBitrate(start_bitrate_bps);
}
-void DelayBasedBwe::SetMinBitrate(int min_bitrate_bps) {
+void DelayBasedBwe::SetBitrateConstraints(int min_bitrate_bps,
+ int max_bitrate_bps) {
// Called from both the configuration thread and the network thread. Shouldn't
// be called from the network thread in the future.
- rate_control_.SetMinBitrate(min_bitrate_bps);
+ rate_control_.SetBitrateConstraints(min_bitrate_bps, max_bitrate_bps);
}
int64_t DelayBasedBwe::GetExpectedBwePeriodMs() const {
diff --git a/modules/congestion_controller/goog_cc/delay_based_bwe.h b/modules/congestion_controller/goog_cc/delay_based_bwe.h
index 485f687..91f233e 100644
--- a/modules/congestion_controller/goog_cc/delay_based_bwe.h
+++ b/modules/congestion_controller/goog_cc/delay_based_bwe.h
@@ -53,8 +53,9 @@
void OnRttUpdate(int64_t avg_rtt_ms);
bool LatestEstimate(std::vector<uint32_t>* ssrcs,
uint32_t* bitrate_bps) const;
+
void SetStartBitrate(int start_bitrate_bps);
- void SetMinBitrate(int min_bitrate_bps);
+ void SetBitrateConstraints(int min_bitrate_bps, int max_bitrate_bps);
int64_t GetExpectedBwePeriodMs() const;
private:
diff --git a/modules/congestion_controller/goog_cc/goog_cc_network_control.cc b/modules/congestion_controller/goog_cc/goog_cc_network_control.cc
index b0bf6f3..5f5760e 100644
--- a/modules/congestion_controller/goog_cc/goog_cc_network_control.cc
+++ b/modules/congestion_controller/goog_cc/goog_cc_network_control.cc
@@ -129,7 +129,9 @@
max_total_allocated_bitrate_(DataRate::Zero()),
in_cwnd_experiment_(CwndExperimentEnabled()),
accepted_queue_ms_(kDefaultAcceptedQueueMs) {
- delay_based_bwe_->SetMinBitrate(congestion_controller::GetMinBitrateBps());
+ constexpr int kMaxBitrateUnchanged = -1;
+ delay_based_bwe_->SetBitrateConstraints(
+ congestion_controller::GetMinBitrateBps(), kMaxBitrateUnchanged);
UpdateBitrateConstraints(config.constraints, config.starting_bandwidth);
OnStreamsConfig(config.stream_based_config);
if (in_cwnd_experiment_ &&
@@ -163,7 +165,7 @@
delay_based_bwe_.reset(new DelayBasedBwe(event_log_));
acknowledged_bitrate_estimator_.reset(new AcknowledgedBitrateEstimator());
delay_based_bwe_->SetStartBitrate(start_bitrate_bps);
- delay_based_bwe_->SetMinBitrate(min_bitrate_bps);
+ delay_based_bwe_->SetBitrateConstraints(min_bitrate_bps, max_bitrate_bps);
probe_controller_->Reset(msg.at_time.ms());
probe_controller_->SetBitrates(min_bitrate_bps, start_bitrate_bps,
@@ -262,9 +264,10 @@
bandwidth_estimation_->SetBitrates(start_bitrate_bps, min_bitrate_bps,
max_bitrate_bps);
+
if (start_bitrate_bps > 0)
delay_based_bwe_->SetStartBitrate(start_bitrate_bps);
- delay_based_bwe_->SetMinBitrate(min_bitrate_bps);
+ delay_based_bwe_->SetBitrateConstraints(min_bitrate_bps, max_bitrate_bps);
}
NetworkControlUpdate GoogCcNetworkController::OnTransportLossReport(
diff --git a/modules/congestion_controller/send_side_congestion_controller.cc b/modules/congestion_controller/send_side_congestion_controller.cc
index 4a17933..a6fd091 100644
--- a/modules/congestion_controller/send_side_congestion_controller.cc
+++ b/modules/congestion_controller/send_side_congestion_controller.cc
@@ -137,7 +137,9 @@
webrtc::field_trial::IsEnabled("WebRTC-SendSideBwe-WithOverhead")),
transport_overhead_bytes_per_packet_(0),
pacer_pushback_experiment_(IsPacerPushbackExperimentEnabled()) {
- delay_based_bwe_->SetMinBitrate(min_bitrate_bps_);
+ constexpr int kMaxBitrateUnchanged = -1;
+ delay_based_bwe_->SetBitrateConstraints(min_bitrate_bps_,
+ kMaxBitrateUnchanged);
if (in_cwnd_experiment_ &&
!ReadCwndExperimentParameter(&accepted_queue_ms_)) {
RTC_LOG(LS_WARNING) << "Failed to parse parameters for CwndExperiment "
@@ -186,7 +188,7 @@
if (start_bitrate_bps > 0)
delay_based_bwe_->SetStartBitrate(start_bitrate_bps);
min_bitrate_bps_ = min_bitrate_bps;
- delay_based_bwe_->SetMinBitrate(min_bitrate_bps_);
+ delay_based_bwe_->SetBitrateConstraints(min_bitrate_bps, max_bitrate_bps);
}
MaybeTriggerOnNetworkChanged();
}
@@ -220,8 +222,8 @@
min_bitrate_bps_ = min_bitrate_bps;
delay_based_bwe_.reset(new DelayBasedBwe(event_log_, clock_));
acknowledged_bitrate_estimator_.reset(new AcknowledgedBitrateEstimator());
+ delay_based_bwe_->SetBitrateConstraints(min_bitrate_bps, max_bitrate_bps);
delay_based_bwe_->SetStartBitrate(bitrate_bps);
- delay_based_bwe_->SetMinBitrate(min_bitrate_bps);
}
probe_controller_->Reset();
diff --git a/modules/remote_bitrate_estimator/aimd_rate_control.cc b/modules/remote_bitrate_estimator/aimd_rate_control.cc
index c494f50..0b8a2ec 100644
--- a/modules/remote_bitrate_estimator/aimd_rate_control.cc
+++ b/modules/remote_bitrate_estimator/aimd_rate_control.cc
@@ -28,9 +28,14 @@
namespace webrtc {
-static const int64_t kDefaultRttMs = 200;
-static const int64_t kMaxFeedbackIntervalMs = 1000;
-static const float kDefaultBackoffFactor = 0.85f;
+namespace {
+constexpr int64_t kDefaultRttMs = 200;
+constexpr int64_t kMinFeedbackIntervalMs = 200;
+constexpr int64_t kMaxFeedbackIntervalMs = 1000;
+constexpr float kDefaultBackoffFactor = 0.85f;
+constexpr int kDefaultMaxBandwidthBps = 30000000;
+constexpr int kDefaultStartBandwidthBps = 300000;
+} // namespace
const char kBweBackOffFactorExperiment[] = "WebRTC-BweBackOffFactor";
@@ -56,8 +61,8 @@
AimdRateControl::AimdRateControl()
: min_configured_bitrate_bps_(congestion_controller::GetMinBitrateBps()),
- max_configured_bitrate_bps_(30000000),
- current_bitrate_bps_(max_configured_bitrate_bps_),
+ max_configured_bitrate_bps_(kDefaultMaxBandwidthBps),
+ current_bitrate_bps_(kDefaultStartBandwidthBps),
avg_max_bitrate_kbps_(-1.0f),
var_max_bitrate_kbps_(0.4f),
rate_control_state_(kRcHold),
@@ -78,13 +83,30 @@
AimdRateControl::~AimdRateControl() {}
void AimdRateControl::SetStartBitrate(int start_bitrate_bps) {
- current_bitrate_bps_ = start_bitrate_bps;
- bitrate_is_initialized_ = true;
+ // TODO(terelius): It would make sense to not change the estimate if we have
+ // already initialized a bitrate. However, some code recreates and resets
+ // parameters multiple times and those tests break when changing this.
+ if (start_bitrate_bps > 0) {
+ // TODO(terelius): I think this should clamp the bitrate to the configured
+ // max and min, but that would be a brekaing change because SetStartBitrate
+ // and SetBitrateConstraints are separate calls that can occur in either
+ // order. Consider merging the calls in the future.
+ current_bitrate_bps_ = start_bitrate_bps;
+ bitrate_is_initialized_ = true;
+ }
}
-void AimdRateControl::SetMinBitrate(int min_bitrate_bps) {
- min_configured_bitrate_bps_ = min_bitrate_bps;
- current_bitrate_bps_ = std::max<int>(min_bitrate_bps, current_bitrate_bps_);
+void AimdRateControl::SetBitrateConstraints(int min_bitrate_bps,
+ int max_bitrate_bps) {
+ if (min_bitrate_bps > 0)
+ min_configured_bitrate_bps_ = min_bitrate_bps;
+ if (max_bitrate_bps > 0) {
+ max_configured_bitrate_bps_ =
+ rtc::SafeMax(min_configured_bitrate_bps_, max_bitrate_bps);
+ }
+ current_bitrate_bps_ = rtc::SafeClamp<uint32_t>(current_bitrate_bps_,
+ min_configured_bitrate_bps_,
+ max_configured_bitrate_bps_);
}
bool AimdRateControl::ValidEstimate() const {
@@ -97,7 +119,6 @@
static const int kRtcpSize = 80;
const int64_t interval = static_cast<int64_t>(
kRtcpSize * 8.0 * 1000.0 / (0.05 * current_bitrate_bps_) + 0.5);
- const int64_t kMinFeedbackIntervalMs = 200;
return rtc::SafeClamp(interval, kMinFeedbackIntervalMs,
kMaxFeedbackIntervalMs);
}
@@ -177,10 +198,9 @@
if (!last_decrease_)
return smoothing_experiment_ ? kMinPeriodMs : kDefaultPeriodMs;
- return std::min(kMaxPeriodMs,
- std::max<int>(1000 * static_cast<int64_t>(*last_decrease_) /
- increase_rate,
- kMinPeriodMs));
+ return rtc::SafeClamp<int>(
+ 1000 * static_cast<int64_t>(*last_decrease_) / increase_rate,
+ kMinPeriodMs, kMaxPeriodMs);
}
uint32_t AimdRateControl::ChangeBitrate(uint32_t new_bitrate_bps,
@@ -280,7 +300,8 @@
// We allow a bit more lag at very low rates to not too easily get stuck if
// the encoder produces uneven outputs.
const uint32_t max_bitrate_bps =
- static_cast<uint32_t>(1.5f * incoming_bitrate_bps) + 10000;
+ rtc::SafeMin(static_cast<uint32_t>(1.5f * incoming_bitrate_bps) + 10000,
+ max_configured_bitrate_bps_);
if (new_bitrate_bps > current_bitrate_bps_ &&
new_bitrate_bps > max_bitrate_bps) {
new_bitrate_bps = std::max(current_bitrate_bps_, max_bitrate_bps);
diff --git a/modules/remote_bitrate_estimator/aimd_rate_control.h b/modules/remote_bitrate_estimator/aimd_rate_control.h
index a9ecb67..19fdbe4 100644
--- a/modules/remote_bitrate_estimator/aimd_rate_control.h
+++ b/modules/remote_bitrate_estimator/aimd_rate_control.h
@@ -30,7 +30,7 @@
// otherwise.
bool ValidEstimate() const;
void SetStartBitrate(int start_bitrate_bps);
- void SetMinBitrate(int min_bitrate_bps);
+ void SetBitrateConstraints(int min_bitrate_bps, int max_bitrate_bps);
int64_t GetFeedbackInterval() const;
// Returns true if the bitrate estimate hasn't been changed for more than
// an RTT, or if the incoming_bitrate is less than half of the current
diff --git a/modules/remote_bitrate_estimator/aimd_rate_control_unittest.cc b/modules/remote_bitrate_estimator/aimd_rate_control_unittest.cc
index f6844b0..c56ed48 100644
--- a/modules/remote_bitrate_estimator/aimd_rate_control_unittest.cc
+++ b/modules/remote_bitrate_estimator/aimd_rate_control_unittest.cc
@@ -105,6 +105,50 @@
states.aimd_rate_control->GetExpectedBandwidthPeriodMs());
}
+TEST(AimdRateControlTest, BweLimitedByConfiguredMin) {
+ auto states = CreateAimdRateControlStates();
+ constexpr int kMinBitrate = 50000;
+ constexpr int kMaxBitrate = 200000;
+ constexpr int kStartBitrate = kMinBitrate + 1;
+ states.aimd_rate_control->SetBitrateConstraints(kMinBitrate, kMaxBitrate);
+ states.aimd_rate_control->SetStartBitrate(kStartBitrate);
+ states.aimd_rate_control->SetEstimate(
+ kStartBitrate, states.simulated_clock->TimeInMilliseconds());
+ while (states.simulated_clock->TimeInMilliseconds() - kClockInitialTime <
+ 20000) {
+ // Don't decrease below minimum even if the actual throughput is lower.
+ constexpr int kAckedBitrate = kMinBitrate - 1000;
+ UpdateRateControl(states, BandwidthUsage::kBwOverusing, kAckedBitrate,
+ states.simulated_clock->TimeInMilliseconds());
+ states.simulated_clock->AdvanceTimeMilliseconds(100);
+ }
+ ASSERT_TRUE(states.aimd_rate_control->ValidEstimate());
+ EXPECT_EQ(static_cast<uint32_t>(kMinBitrate),
+ states.aimd_rate_control->LatestEstimate());
+}
+
+TEST(AimdRateControlTest, BweLimitedByConfiguredMax) {
+ auto states = CreateAimdRateControlStates();
+ constexpr int kMinBitrate = 50000;
+ constexpr int kMaxBitrate = 200000;
+ constexpr int kStartBitrate = kMaxBitrate - 1;
+ states.aimd_rate_control->SetBitrateConstraints(kMinBitrate, kMaxBitrate);
+ states.aimd_rate_control->SetStartBitrate(kStartBitrate);
+ states.aimd_rate_control->SetEstimate(
+ kStartBitrate, states.simulated_clock->TimeInMilliseconds());
+ while (states.simulated_clock->TimeInMilliseconds() - kClockInitialTime <
+ 20000) {
+ // Don't increase above maximum even if the actual throughput is higher.
+ constexpr int kAckedBitrate = kMaxBitrate + 10000;
+ UpdateRateControl(states, BandwidthUsage::kBwNormal, kAckedBitrate,
+ states.simulated_clock->TimeInMilliseconds());
+ states.simulated_clock->AdvanceTimeMilliseconds(100);
+ }
+ ASSERT_TRUE(states.aimd_rate_control->ValidEstimate());
+ EXPECT_EQ(static_cast<uint32_t>(kMaxBitrate),
+ states.aimd_rate_control->LatestEstimate());
+}
+
TEST(AimdRateControlTest, BweLimitedByAckedBitrate) {
auto states = CreateAimdRateControlStates();
constexpr int kAckedBitrate = 10000;
@@ -222,6 +266,7 @@
test::ScopedFieldTrials override_field_trials(kSmoothingExpFieldTrial);
auto states = CreateAimdRateControlStates();
constexpr int kInitialBitrate = 50000000;
+ states.aimd_rate_control->SetBitrateConstraints(10000, 60000000);
states.aimd_rate_control->SetEstimate(
kInitialBitrate, states.simulated_clock->TimeInMilliseconds());
states.simulated_clock->AdvanceTimeMilliseconds(100);
diff --git a/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc b/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc
index fdc1523..edcefe5 100644
--- a/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc
+++ b/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc
@@ -412,6 +412,7 @@
// Called from both the configuration thread and the network thread. Shouldn't
// be called from the network thread in the future.
rtc::CritScope lock(&crit_);
- remote_rate_.SetMinBitrate(min_bitrate_bps);
+ constexpr int kMaxBitrateUnchanged = -1;
+ remote_rate_.SetBitrateConstraints(min_bitrate_bps, kMaxBitrateUnchanged);
}
} // namespace webrtc
diff --git a/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc b/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc
index a914a84..f59494e 100644
--- a/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc
+++ b/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc
@@ -250,7 +250,8 @@
void RemoteBitrateEstimatorSingleStream::SetMinBitrate(int min_bitrate_bps) {
rtc::CritScope cs(&crit_sect_);
- remote_rate_->SetMinBitrate(min_bitrate_bps);
+ constexpr int kMaxBitrateUnchanged = -1;
+ remote_rate_->SetBitrateConstraints(min_bitrate_bps, kMaxBitrateUnchanged);
}
} // namespace webrtc
diff --git a/modules/remote_bitrate_estimator/test/estimators/send_side.cc b/modules/remote_bitrate_estimator/test/estimators/send_side.cc
index 6e03eee..4bafb67 100644
--- a/modules/remote_bitrate_estimator/test/estimators/send_side.cc
+++ b/modules/remote_bitrate_estimator/test/estimators/send_side.cc
@@ -44,7 +44,7 @@
bitrate_controller_->SetStartBitrate(1000 * kbps);
bitrate_controller_->SetMinMaxBitrate(1000 * kMinBitrateKbps,
1000 * kMaxBitrateKbps);
- bwe_->SetMinBitrate(1000 * kMinBitrateKbps);
+ bwe_->SetBitrateConstraints(1000 * kMinBitrateKbps, 1000 * kMaxBitrateKbps);
}
SendSideBweSender::~SendSideBweSender() {}