Remove VideoSendStream::StartPerRtpStream
Instead, always use VideoSendStream::Start.
VideoSendStream::StartPerRtpStream was used for controlling if
individual rtp stream for a RtpEncodingParameter should be able to send RTP packets. It was not used for controlling the actual encoder layers.
With this change RtpEncodingParameter.active still controls actual encoder layers but it does not control if RTP packets can be sent or not.
The cleanup is done to simplify code and in the future allow sending
probe packet on a RtpTransceiver that allows sending, regardless of the
RtpEncodingParameter.active flag.
Bug: webrtc:14928
Change-Id: I896c055ed4de76db58d76f452147c29783f77ae1
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/335042
Reviewed-by: Henrik Boström <hbos@webrtc.org>
Reviewed-by: Erik Språng <sprang@webrtc.org>
Commit-Queue: Per Kjellander <perkj@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#41619}
diff --git a/call/video_send_stream.h b/call/video_send_stream.h
index c305d60..2c3fe55 100644
--- a/call/video_send_stream.h
+++ b/call/video_send_stream.h
@@ -212,20 +212,8 @@
Config(const Config&);
};
- // Updates the sending state for all simulcast layers that the video send
- // stream owns. This can mean updating the activity one or for multiple
- // layers. The ordering of active layers is the order in which the
- // rtp modules are stored in the VideoSendStream.
- // Note: This starts stream activity if it is inactive and one of the layers
- // is active. This stops stream activity if it is active and all layers are
- // inactive.
- // `active_layers` should have the same size as the number of configured
- // simulcast layers or one if only one rtp stream is used.
- virtual void StartPerRtpStream(std::vector<bool> active_layers) = 0;
-
// Starts stream activity.
// When a stream is active, it can receive, process and deliver packets.
- // Prefer to use StartPerRtpStream.
virtual void Start() = 0;
// Stops stream activity.
@@ -234,11 +222,8 @@
// Accessor for determining if the stream is active. This is an inexpensive
// call that must be made on the same thread as `Start()` and `Stop()` methods
- // are called on and will return `true` iff activity has been started either
- // via `Start()` or `StartPerRtpStream()`. If activity is either
- // stopped or is in the process of being stopped as a result of a call to
- // either `Stop()` or `StartPerRtpStream()` where all layers were
- // deactivated, the return value will be `false`.
+ // are called on and will return `true` iff activity has been started
+ // via `Start()`.
virtual bool started() = 0;
// If the resource is overusing, the VideoSendStream will try to reduce
diff --git a/media/engine/fake_webrtc_call.cc b/media/engine/fake_webrtc_call.cc
index 16e7169..2536c9d 100644
--- a/media/engine/fake_webrtc_call.cc
+++ b/media/engine/fake_webrtc_call.cc
@@ -339,17 +339,6 @@
webrtc::InvokeSetParametersCallback(callback, webrtc::RTCError::OK());
}
-void FakeVideoSendStream::StartPerRtpStream(
- const std::vector<bool> active_layers) {
- sending_ = false;
- for (const bool active_layer : active_layers) {
- if (active_layer) {
- sending_ = true;
- break;
- }
- }
-}
-
void FakeVideoSendStream::Start() {
sending_ = true;
}
diff --git a/media/engine/fake_webrtc_call.h b/media/engine/fake_webrtc_call.h
index 3dd6bdf..d67a7ee 100644
--- a/media/engine/fake_webrtc_call.h
+++ b/media/engine/fake_webrtc_call.h
@@ -199,7 +199,6 @@
void OnFrame(const webrtc::VideoFrame& frame) override;
// webrtc::VideoSendStream implementation.
- void StartPerRtpStream(std::vector<bool> active_layers) override;
void Start() override;
void Stop() override;
bool started() override { return IsSending(); }
diff --git a/media/engine/webrtc_video_engine.cc b/media/engine/webrtc_video_engine.cc
index a5b46d3..902b536 100644
--- a/media/engine/webrtc_video_engine.cc
+++ b/media/engine/webrtc_video_engine.cc
@@ -2013,6 +2013,7 @@
new_send_state = true;
}
}
+
rtp_parameters_ = new_parameters;
// Codecs are currently handled at the WebRtcVideoSendChannel level.
rtp_parameters_.codecs.clear();
@@ -2021,9 +2022,6 @@
ReconfigureEncoder(std::move(callback));
callback = nullptr;
}
- if (new_send_state) {
- UpdateSendState();
- }
if (new_degradation_preference) {
if (source_ && stream_) {
stream_->SetSource(source_, GetDegradationPreference());
@@ -2082,27 +2080,9 @@
RTC_DCHECK_RUN_ON(&thread_checker_);
if (sending_) {
RTC_DCHECK(stream_ != nullptr);
- size_t num_layers = rtp_parameters_.encodings.size();
- if (parameters_.encoder_config.number_of_streams == 1) {
- // SVC is used. Only one simulcast layer is present.
- num_layers = 1;
- }
- std::vector<bool> active_layers(num_layers);
- for (size_t i = 0; i < num_layers; ++i) {
- active_layers[i] = IsLayerActive(rtp_parameters_.encodings[i]);
- }
- if (parameters_.encoder_config.number_of_streams == 1 &&
- rtp_parameters_.encodings.size() > 1) {
- // SVC is used.
- // The only present simulcast layer should be active if any of the
- // configured SVC layers is active.
- active_layers[0] =
- absl::c_any_of(rtp_parameters_.encodings,
- [](const auto& encoding) { return encoding.active; });
- }
- // This updates what simulcast layers are sending, and possibly starts
- // or stops the VideoSendStream.
- stream_->StartPerRtpStream(active_layers);
+ // This allows the the Stream to be used. Ie, DTLS is connected and the
+ // RtpTransceiver direction allows sending.
+ stream_->Start();
} else {
if (stream_ != nullptr) {
stream_->Stop();
diff --git a/media/engine/webrtc_video_engine.h b/media/engine/webrtc_video_engine.h
index e4b1b27..57de0b0 100644
--- a/media/engine/webrtc_video_engine.h
+++ b/media/engine/webrtc_video_engine.h
@@ -402,8 +402,7 @@
const VideoCodec& codec) const;
void ReconfigureEncoder(webrtc::SetParametersCallback callback);
- // Calls Start or Stop according to whether or not `sending_` is true,
- // and whether or not the encoding in `rtp_parameters_` is active.
+ // Calls Start or Stop according to whether or not `sending_` is true.
void UpdateSendState();
webrtc::DegradationPreference GetDegradationPreference() const
diff --git a/media/engine/webrtc_video_engine_unittest.cc b/media/engine/webrtc_video_engine_unittest.cc
index 148223f..4493c9c 100644
--- a/media/engine/webrtc_video_engine_unittest.cc
+++ b/media/engine/webrtc_video_engine_unittest.cc
@@ -9078,105 +9078,6 @@
stream->GetVideoStreams()[0].target_bitrate_bps);
}
-// Test that a stream will not be sending if its encoding is made inactive
-// through SetRtpSendParameters.
-TEST_F(WebRtcVideoChannelTest, SetRtpSendParametersOneEncodingActive) {
- FakeVideoSendStream* stream = AddSendStream();
- EXPECT_TRUE(send_channel_->SetSend(true));
- EXPECT_TRUE(stream->IsSending());
-
- // Get current parameters and change "active" to false.
- webrtc::RtpParameters parameters =
- send_channel_->GetRtpSendParameters(last_ssrc_);
- ASSERT_EQ(1u, parameters.encodings.size());
- ASSERT_TRUE(parameters.encodings[0].active);
- parameters.encodings[0].active = false;
- EXPECT_TRUE(send_channel_->SetRtpSendParameters(last_ssrc_, parameters).ok());
- EXPECT_FALSE(stream->IsSending());
-
- // Now change it back to active and verify we resume sending.
- parameters.encodings[0].active = true;
- EXPECT_TRUE(send_channel_->SetRtpSendParameters(last_ssrc_, parameters).ok());
- EXPECT_TRUE(stream->IsSending());
-}
-
-// Tests that when active is updated for any simulcast layer then the send
-// stream's sending state will be updated and it will be reconfigured with the
-// new appropriate active simulcast streams.
-TEST_F(WebRtcVideoChannelTest, SetRtpSendParametersMultipleEncodingsActive) {
- // Create the stream params with multiple ssrcs for simulcast.
- const size_t kNumSimulcastStreams = 3;
- std::vector<uint32_t> ssrcs = MAKE_VECTOR(kSsrcs3);
- StreamParams stream_params = CreateSimStreamParams("cname", ssrcs);
- FakeVideoSendStream* fake_video_send_stream = AddSendStream(stream_params);
- uint32_t primary_ssrc = stream_params.first_ssrc();
-
- // Using the FrameForwarder, we manually send a full size
- // frame. This allows us to test that ReconfigureEncoder is called
- // appropriately.
- webrtc::test::FrameForwarder frame_forwarder;
- VideoOptions options;
- EXPECT_TRUE(
- send_channel_->SetVideoSend(primary_ssrc, &options, &frame_forwarder));
- send_channel_->SetSend(true);
- frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame(
- 1920, 1080, webrtc::VideoRotation::kVideoRotation_0,
- rtc::kNumMicrosecsPerSec / 30));
-
- // Check that all encodings are initially active.
- webrtc::RtpParameters parameters =
- send_channel_->GetRtpSendParameters(primary_ssrc);
- EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size());
- EXPECT_TRUE(parameters.encodings[0].active);
- EXPECT_TRUE(parameters.encodings[1].active);
- EXPECT_TRUE(parameters.encodings[2].active);
- EXPECT_TRUE(fake_video_send_stream->IsSending());
-
- // Only turn on only the middle stream.
- parameters.encodings[0].active = false;
- parameters.encodings[1].active = true;
- parameters.encodings[2].active = false;
- EXPECT_TRUE(
- send_channel_->SetRtpSendParameters(primary_ssrc, parameters).ok());
- // Verify that the active fields are set on the VideoChannel.
- parameters = send_channel_->GetRtpSendParameters(primary_ssrc);
- EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size());
- EXPECT_FALSE(parameters.encodings[0].active);
- EXPECT_TRUE(parameters.encodings[1].active);
- EXPECT_FALSE(parameters.encodings[2].active);
- // Check that the VideoSendStream is updated appropriately. This means its
- // send state was updated and it was reconfigured.
- EXPECT_TRUE(fake_video_send_stream->IsSending());
- std::vector<webrtc::VideoStream> simulcast_streams =
- fake_video_send_stream->GetVideoStreams();
- EXPECT_EQ(kNumSimulcastStreams, simulcast_streams.size());
- EXPECT_FALSE(simulcast_streams[0].active);
- EXPECT_TRUE(simulcast_streams[1].active);
- EXPECT_FALSE(simulcast_streams[2].active);
-
- // Turn off all streams.
- parameters.encodings[0].active = false;
- parameters.encodings[1].active = false;
- parameters.encodings[2].active = false;
- EXPECT_TRUE(
- send_channel_->SetRtpSendParameters(primary_ssrc, parameters).ok());
- // Verify that the active fields are set on the VideoChannel.
- parameters = send_channel_->GetRtpSendParameters(primary_ssrc);
- EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size());
- EXPECT_FALSE(parameters.encodings[0].active);
- EXPECT_FALSE(parameters.encodings[1].active);
- EXPECT_FALSE(parameters.encodings[2].active);
- // Check that the VideoSendStream is off.
- EXPECT_FALSE(fake_video_send_stream->IsSending());
- simulcast_streams = fake_video_send_stream->GetVideoStreams();
- EXPECT_EQ(kNumSimulcastStreams, simulcast_streams.size());
- EXPECT_FALSE(simulcast_streams[0].active);
- EXPECT_FALSE(simulcast_streams[1].active);
- EXPECT_FALSE(simulcast_streams[2].active);
-
- EXPECT_TRUE(send_channel_->SetVideoSend(primary_ssrc, nullptr, nullptr));
-}
-
// Tests that when some streams are disactivated then the lowest
// stream min_bitrate would be reused for the first active stream.
TEST_F(WebRtcVideoChannelTest,
@@ -9232,47 +9133,6 @@
EXPECT_TRUE(send_channel_->SetVideoSend(primary_ssrc, nullptr, nullptr));
}
-// Test that if a stream is reconfigured (due to a codec change or other
-// change) while its encoding is still inactive, it doesn't start sending.
-TEST_F(WebRtcVideoChannelTest,
- InactiveStreamDoesntStartSendingWhenReconfigured) {
- // Set an initial codec list, which will be modified later.
- cricket::VideoSenderParameters parameters1;
- parameters1.codecs.push_back(GetEngineCodec("VP8"));
- parameters1.codecs.push_back(GetEngineCodec("VP9"));
- EXPECT_TRUE(send_channel_->SetSenderParameters(parameters1));
-
- FakeVideoSendStream* stream = AddSendStream();
- EXPECT_TRUE(send_channel_->SetSend(true));
- EXPECT_TRUE(stream->IsSending());
-
- // Get current parameters and change "active" to false.
- webrtc::RtpParameters parameters =
- send_channel_->GetRtpSendParameters(last_ssrc_);
- ASSERT_EQ(1u, parameters.encodings.size());
- ASSERT_TRUE(parameters.encodings[0].active);
- parameters.encodings[0].active = false;
- EXPECT_EQ(1u, GetFakeSendStreams().size());
- EXPECT_EQ(1, fake_call_->GetNumCreatedSendStreams());
- EXPECT_TRUE(send_channel_->SetRtpSendParameters(last_ssrc_, parameters).ok());
- EXPECT_FALSE(stream->IsSending());
-
- // Reorder the codec list, causing the stream to be reconfigured.
- cricket::VideoSenderParameters parameters2;
- parameters2.codecs.push_back(GetEngineCodec("VP9"));
- parameters2.codecs.push_back(GetEngineCodec("VP8"));
- EXPECT_TRUE(send_channel_->SetSenderParameters(parameters2));
- auto new_streams = GetFakeSendStreams();
- // Assert that a new underlying stream was created due to the codec change.
- // Otherwise, this test isn't testing what it set out to test.
- EXPECT_EQ(1u, GetFakeSendStreams().size());
- EXPECT_EQ(2, fake_call_->GetNumCreatedSendStreams());
-
- // Verify that we still are not sending anything, due to the inactive
- // encoding.
- EXPECT_FALSE(new_streams[0]->IsSending());
-}
-
// Test that GetRtpSendParameters returns the currently configured codecs.
TEST_F(WebRtcVideoChannelTest, GetRtpSendParametersCodecs) {
AddSendStream();
diff --git a/pc/peer_connection_encodings_integrationtest.cc b/pc/peer_connection_encodings_integrationtest.cc
index 4a93e91..06299d7 100644
--- a/pc/peer_connection_encodings_integrationtest.cc
+++ b/pc/peer_connection_encodings_integrationtest.cc
@@ -9,6 +9,7 @@
*/
#include <string>
+#include <string_view>
#include <vector>
#include "absl/strings/match.h"
@@ -41,6 +42,7 @@
#include "pc/test/simulcast_layer_util.h"
#include "rtc_base/gunit.h"
#include "rtc_base/physical_socket_server.h"
+#include "rtc_base/thread.h"
#include "test/gmock.h"
#include "test/gtest.h"
@@ -269,6 +271,34 @@
return num_sending_layers == num_active_layers;
}
+ int EncodedFrames(rtc::scoped_refptr<PeerConnectionTestWrapper> pc_wrapper,
+ std::string_view rid) {
+ rtc::scoped_refptr<const RTCStatsReport> report = GetStats(pc_wrapper);
+ std::vector<const RTCOutboundRtpStreamStats*> outbound_rtps =
+ report->GetStatsOfType<RTCOutboundRtpStreamStats>();
+ for (const auto* outbound_rtp : outbound_rtps) {
+ if (outbound_rtp->rid.value_or("") == rid) {
+ return outbound_rtp->frames_encoded.value_or(0);
+ }
+ }
+ return 0;
+ }
+
+ bool EncodingIsActive(
+ rtc::scoped_refptr<PeerConnectionTestWrapper> pc_wrapper,
+ std::string_view rid) {
+ rtc::scoped_refptr<const RTCStatsReport> report = GetStats(pc_wrapper);
+ std::vector<const RTCOutboundRtpStreamStats*> outbound_rtps =
+ report->GetStatsOfType<RTCOutboundRtpStreamStats>();
+ for (const auto* outbound_rtp : outbound_rtps) {
+ if (outbound_rtp->rid.value_or("") == rid) {
+ return *outbound_rtp->active;
+ }
+ }
+ RTC_CHECK(false) << "Rid not found: " << rid;
+ return false;
+ }
+
bool HasOutboundRtpWithRidAndScalabilityMode(
rtc::scoped_refptr<PeerConnectionTestWrapper> pc_wrapper,
absl::string_view rid,
@@ -1964,6 +1994,65 @@
EXPECT_THAT(*outbound_rtps[2]->scalability_mode, StrEq("L1T3"));
}
+TEST_P(PeerConnectionEncodingsIntegrationParameterizedTest,
+ SimulcastEncodingStopWhenRtpEncodingChangeToInactive) {
+ rtc::scoped_refptr<PeerConnectionTestWrapper> local_pc_wrapper = CreatePc();
+ if (SkipTestDueToAv1Missing(local_pc_wrapper)) {
+ return;
+ }
+ rtc::scoped_refptr<PeerConnectionTestWrapper> remote_pc_wrapper = CreatePc();
+ ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper);
+
+ std::vector<cricket::SimulcastLayer> layers =
+ CreateLayers({"q", "h", "f"}, /*active=*/true);
+ rtc::scoped_refptr<RtpTransceiverInterface> transceiver =
+ AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper,
+ layers);
+ std::vector<RtpCodecCapability> codecs =
+ GetCapabilitiesAndRestrictToCodec(local_pc_wrapper, codec_name_);
+ transceiver->SetCodecPreferences(codecs);
+
+ rtc::scoped_refptr<RtpSenderInterface> sender = transceiver->sender();
+ RtpParameters parameters = sender->GetParameters();
+ ASSERT_THAT(parameters.encodings, SizeIs(3));
+ ASSERT_EQ(parameters.encodings[0].rid, "q");
+ parameters.encodings[0].scalability_mode = "L1T3";
+ parameters.encodings[0].scale_resolution_down_by = 4;
+ ASSERT_EQ(parameters.encodings[1].rid, "h");
+ parameters.encodings[1].scalability_mode = "L1T3";
+ parameters.encodings[1].scale_resolution_down_by = 2;
+ ASSERT_EQ(parameters.encodings[2].rid, "f");
+ parameters.encodings[2].scalability_mode = "L1T3";
+ parameters.encodings[2].scale_resolution_down_by = 1;
+ sender->SetParameters(parameters);
+
+ NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper);
+ local_pc_wrapper->WaitForConnection();
+ remote_pc_wrapper->WaitForConnection();
+
+ ASSERT_TRUE_WAIT(EncodedFrames(local_pc_wrapper, "f") > 1,
+ kLongTimeoutForRampingUp.ms());
+
+ // Switch higest layer to Inactive.
+ parameters = sender->GetParameters();
+ ASSERT_THAT(parameters.encodings, SizeIs(3));
+ parameters.encodings[2].active = false;
+ sender->SetParameters(parameters);
+ ASSERT_TRUE_WAIT(!EncodingIsActive(local_pc_wrapper, "f"),
+ kDefaultTimeout.ms());
+
+ int encoded_frames_f = EncodedFrames(local_pc_wrapper, "f");
+ int encoded_frames_h = EncodedFrames(local_pc_wrapper, "h");
+ int encoded_frames_q = EncodedFrames(local_pc_wrapper, "q");
+
+ // Wait until the encoder has encoded another 10 frames on lower layers.
+ ASSERT_TRUE_WAIT(EncodedFrames(local_pc_wrapper, "q") > encoded_frames_q + 10,
+ kDefaultTimeout.ms());
+ ASSERT_TRUE_WAIT(EncodedFrames(local_pc_wrapper, "h") > encoded_frames_h + 10,
+ kDefaultTimeout.ms());
+ EXPECT_LE(EncodedFrames(local_pc_wrapper, "f") - encoded_frames_f, 2);
+}
+
INSTANTIATE_TEST_SUITE_P(StandardPath,
PeerConnectionEncodingsIntegrationParameterizedTest,
::testing::Values("VP8",
diff --git a/test/call_test.cc b/test/call_test.cc
index 6cdd8da..f26a44a 100644
--- a/test/call_test.cc
+++ b/test/call_test.cc
@@ -657,9 +657,7 @@
void CallTest::StartVideoStreams() {
StartVideoSources();
for (size_t i = 0; i < video_send_streams_.size(); ++i) {
- std::vector<bool> active_rtp_streams(
- video_send_configs_[i].rtp.ssrcs.size(), true);
- video_send_streams_[i]->StartPerRtpStream(active_rtp_streams);
+ video_send_streams_[i]->Start();
}
for (VideoReceiveStreamInterface* video_recv_stream : video_receive_streams_)
video_recv_stream->Start();
diff --git a/test/scenario/video_stream.cc b/test/scenario/video_stream.cc
index 654aed7..c3f0da7 100644
--- a/test/scenario/video_stream.cc
+++ b/test/scenario/video_stream.cc
@@ -491,10 +491,6 @@
void SendVideoStream::UpdateActiveLayers(std::vector<bool> active_layers) {
sender_->task_queue_.PostTask([=] {
MutexLock lock(&mutex_);
- if (config_.encoder.codec ==
- VideoStreamConfig::Encoder::Codec::kVideoCodecVP8) {
- send_stream_->StartPerRtpStream(active_layers);
- }
VideoEncoderConfig encoder_config = CreateVideoEncoderConfig(config_);
RTC_CHECK_EQ(encoder_config.simulcast_layers.size(), active_layers.size());
for (size_t i = 0; i < encoder_config.simulcast_layers.size(); ++i)
diff --git a/video/video_send_stream_impl.cc b/video/video_send_stream_impl.cc
index 23dbb71..e5ff2cd 100644
--- a/video/video_send_stream_impl.cc
+++ b/video/video_send_stream_impl.cc
@@ -362,6 +362,15 @@
encoder_selector);
}
+bool HasActiveEncodings(const VideoEncoderConfig& config) {
+ for (const VideoStream& stream : config.simulcast_layers) {
+ if (stream.active) {
+ return true;
+ }
+ }
+ return false;
+}
+
} // namespace
PacingConfig::PacingConfig(const FieldTrialsView& field_trials)
@@ -438,6 +447,7 @@
timed_out_(false),
bitrate_allocator_(bitrate_allocator),
+ has_active_encodings_(HasActiveEncodings(encoder_config)),
disable_padding_(true),
max_padding_bitrate_(0),
encoder_min_bitrate_bps_(0),
@@ -545,6 +555,13 @@
RTC_DCHECK_EQ(content_type_, config.content_type);
RTC_LOG(LS_VERBOSE) << "Encoder config: " << config.ToString()
<< " VideoSendStream config: " << config_.ToString();
+
+ has_active_encodings_ = HasActiveEncodings(config);
+ if (has_active_encodings_ && rtp_video_sender_->IsActive() && !IsRunning()) {
+ StartupVideoSendStream();
+ } else if (!has_active_encodings_ && IsRunning()) {
+ StopVideoSendStream();
+ }
video_stream_encoder_->ConfigureEncoder(
std::move(config),
config_.rtp.max_packet_size - CalculateMaxHeaderSize(config_.rtp),
@@ -609,40 +626,25 @@
}
void VideoSendStreamImpl::Start() {
- const std::vector<bool> active_layers(config_.rtp.ssrcs.size(), true);
- StartPerRtpStream(active_layers);
-}
-
-void VideoSendStreamImpl::StartPerRtpStream(
- const std::vector<bool> active_layers) {
RTC_DCHECK_RUN_ON(&thread_checker_);
-
- rtc::StringBuilder active_layers_string;
- active_layers_string << "{";
- for (size_t i = 0; i < active_layers.size(); ++i) {
- if (active_layers[i]) {
- active_layers_string << "1";
- } else {
- active_layers_string << "0";
- }
- if (i < active_layers.size() - 1) {
- active_layers_string << ", ";
- }
- }
- active_layers_string << "}";
- RTC_LOG(LS_INFO) << "StartPerRtpStream: " << active_layers_string.str();
-
- bool previously_active = rtp_video_sender_->IsActive();
+ const std::vector<bool> active_layers(config_.rtp.ssrcs.size(), true);
+ // This sender is allowed to send RTP packets. Start monitoring and allocating
+ // a rate if there is also active encodings. (has_active_encodings_).
rtp_video_sender_->SetActiveModules(active_layers);
- if (!rtp_video_sender_->IsActive() && previously_active) {
- StopVideoSendStream();
- } else if (rtp_video_sender_->IsActive() && !previously_active) {
+ if (!IsRunning() && has_active_encodings_) {
StartupVideoSendStream();
}
}
+bool VideoSendStreamImpl::IsRunning() const {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ return check_encoder_activity_task_.Running();
+}
+
void VideoSendStreamImpl::StartupVideoSendStream() {
RTC_DCHECK_RUN_ON(&thread_checker_);
+ RTC_DCHECK(rtp_video_sender_->IsActive());
+ RTC_DCHECK(has_active_encodings_);
bitrate_allocator_->AddObserver(this, GetAllocationConfig());
// Start monitoring encoder activity.
@@ -680,7 +682,9 @@
TRACE_EVENT_INSTANT0("webrtc", "VideoSendStream::Stop");
rtp_video_sender_->Stop();
- StopVideoSendStream();
+ if (IsRunning()) {
+ StopVideoSendStream();
+ }
}
void VideoSendStreamImpl::StopVideoSendStream() {
@@ -805,7 +809,9 @@
double stream_bitrate_priority_sum = 0;
for (const auto& stream : streams) {
// We don't want to allocate more bitrate than needed to inactive streams.
- encoder_max_bitrate_bps_ += stream.active ? stream.max_bitrate_bps : 0;
+ if (stream.active) {
+ encoder_max_bitrate_bps_ += stream.max_bitrate_bps;
+ }
if (stream.bitrate_priority) {
RTC_DCHECK_GT(*stream.bitrate_priority, 0);
stream_bitrate_priority_sum += *stream.bitrate_priority;
@@ -833,7 +839,7 @@
rtp_video_sender_->SetEncodingData(streams[0].width, streams[0].height,
num_temporal_layers);
- if (rtp_video_sender_->IsActive()) {
+ if (IsRunning()) {
// The send stream is started already. Update the allocator with new
// bitrate limits.
bitrate_allocator_->AddObserver(this, GetAllocationConfig());
diff --git a/video/video_send_stream_impl.h b/video/video_send_stream_impl.h
index 758e12c..8cff4b8 100644
--- a/video/video_send_stream_impl.h
+++ b/video/video_send_stream_impl.h
@@ -96,7 +96,6 @@
// webrtc::VideoSendStream implementation.
void Start() override;
- void StartPerRtpStream(std::vector<bool> active_layers) override;
void Stop() override;
bool started() override;
@@ -182,6 +181,9 @@
void ConfigureSsrcs();
void SignalEncoderTimedOut();
void SignalEncoderActive();
+ // A video send stream is running if VideoSendStream::Start has been invoked
+ // and there is an active encoding.
+ bool IsRunning() const;
MediaStreamAllocationConfig GetAllocationConfig() const
RTC_RUN_ON(thread_checker_);
@@ -212,6 +214,7 @@
BitrateAllocatorInterface* const bitrate_allocator_;
+ bool has_active_encodings_ RTC_GUARDED_BY(thread_checker_);
bool disable_padding_ RTC_GUARDED_BY(thread_checker_);
int max_padding_bitrate_ RTC_GUARDED_BY(thread_checker_);
int encoder_min_bitrate_bps_ RTC_GUARDED_BY(thread_checker_);
diff --git a/video/video_send_stream_impl_unittest.cc b/video/video_send_stream_impl_unittest.cc
index ba492ae..e3a1618 100644
--- a/video/video_send_stream_impl_unittest.cc
+++ b/video/video_send_stream_impl_unittest.cc
@@ -54,6 +54,7 @@
#include "video/send_delay_stats.h"
#include "video/send_statistics_proxy.h"
#include "video/test/mock_video_stream_encoder.h"
+#include "video/video_stream_encoder.h"
#include "video/video_stream_encoder_interface.h"
namespace webrtc {
@@ -71,6 +72,7 @@
using ::testing::AllOf;
using ::testing::Field;
using ::testing::Invoke;
+using ::testing::Mock;
using ::testing::NiceMock;
using ::testing::Return;
@@ -172,21 +174,23 @@
}
~VideoSendStreamImplTest() {}
- std::unique_ptr<VideoSendStreamImpl> CreateVideoSendStreamImpl(
- int initial_encoder_max_bitrate,
- double initial_encoder_bitrate_priority,
- VideoEncoderConfig::ContentType content_type,
- absl::optional<std::string> codec = std::nullopt) {
- EXPECT_CALL(bitrate_allocator_, GetStartBitrate(_))
- .WillOnce(Return(123000));
-
+ VideoEncoderConfig TestVideoEncoderConfig(
+ VideoEncoderConfig::ContentType content_type =
+ VideoEncoderConfig::ContentType::kRealtimeVideo,
+ int initial_encoder_max_bitrate = kDefaultInitialBitrateBps,
+ double initial_encoder_bitrate_priority = kDefaultBitratePriority) {
VideoEncoderConfig encoder_config;
encoder_config.max_bitrate_bps = initial_encoder_max_bitrate;
encoder_config.bitrate_priority = initial_encoder_bitrate_priority;
encoder_config.content_type = content_type;
- if (codec) {
- config_.rtp.payload_name = *codec;
- }
+ encoder_config.simulcast_layers.push_back(VideoStream());
+ encoder_config.simulcast_layers.back().active = true;
+ return encoder_config;
+ }
+
+ std::unique_ptr<VideoSendStreamImpl> CreateVideoSendStreamImpl(
+ VideoEncoderConfig encoder_config) {
+ EXPECT_CALL(bitrate_allocator_, GetStartBitrate).WillOnce(Return(123000));
std::map<uint32_t, RtpState> suspended_ssrcs;
std::map<uint32_t, RtpPayloadState> suspended_payload_states;
@@ -200,7 +204,7 @@
/*num_cpu_cores=*/1, time_controller_.GetTaskQueueFactory(),
/*call_stats=*/nullptr, &transport_controller_,
/*metronome=*/nullptr, &bitrate_allocator_, &send_delay_stats_,
- /*event_log=*/nullptr, config_.Copy(), encoder_config.Copy(),
+ /*event_log=*/nullptr, config_.Copy(), std::move(encoder_config),
suspended_ssrcs, suspended_payload_states,
/*fec_controller=*/nullptr, field_trials_,
std::move(video_stream_encoder));
@@ -231,26 +235,50 @@
PacketRouter packet_router_;
};
-TEST_F(VideoSendStreamImplTest, RegistersAsBitrateObserverOnStart) {
- auto vss_impl = CreateVideoSendStreamImpl(
- kDefaultInitialBitrateBps, kDefaultBitratePriority,
- VideoEncoderConfig::ContentType::kRealtimeVideo);
- const bool kSuspend = false;
- config_.suspend_below_min_bitrate = kSuspend;
- EXPECT_CALL(bitrate_allocator_, AddObserver(vss_impl.get(), _))
- .WillOnce(Invoke(
- [&](BitrateAllocatorObserver*, MediaStreamAllocationConfig config) {
- EXPECT_EQ(config.min_bitrate_bps, 0u);
- EXPECT_EQ(config.max_bitrate_bps, kDefaultInitialBitrateBps);
- EXPECT_EQ(config.pad_up_bitrate_bps, 0u);
- EXPECT_EQ(config.enforce_min_bitrate, !kSuspend);
- EXPECT_EQ(config.bitrate_priority, kDefaultBitratePriority);
- }));
- vss_impl->StartPerRtpStream({true});
+TEST_F(VideoSendStreamImplTest,
+ NotRegistersAsBitrateObserverOnStartIfNoActiveEncodings) {
+ VideoEncoderConfig encoder_config = TestVideoEncoderConfig();
+ encoder_config.simulcast_layers[0].active = false;
+ auto vss_impl = CreateVideoSendStreamImpl(std::move(encoder_config));
+ EXPECT_CALL(bitrate_allocator_, AddObserver(vss_impl.get(), _)).Times(0);
+ EXPECT_CALL(bitrate_allocator_, RemoveObserver(vss_impl.get())).Times(0);
+
+ vss_impl->Start();
+ time_controller_.AdvanceTime(TimeDelta::Zero());
+ vss_impl->Stop();
+}
+
+TEST_F(VideoSendStreamImplTest,
+ RegistersAsBitrateObserverOnStartIfHasActiveEncodings) {
+ auto vss_impl = CreateVideoSendStreamImpl(TestVideoEncoderConfig());
+
+ EXPECT_CALL(bitrate_allocator_, AddObserver(vss_impl.get(), _));
+ vss_impl->Start();
+ time_controller_.AdvanceTime(TimeDelta::Zero());
+
EXPECT_CALL(bitrate_allocator_, RemoveObserver(vss_impl.get())).Times(1);
vss_impl->Stop();
}
+TEST_F(VideoSendStreamImplTest,
+ DeRegistersAsBitrateObserverIfNoActiveEncodings) {
+ auto vss_impl = CreateVideoSendStreamImpl(TestVideoEncoderConfig());
+ EXPECT_CALL(bitrate_allocator_, AddObserver(vss_impl.get(), _));
+ vss_impl->Start();
+ time_controller_.AdvanceTime(TimeDelta::Zero());
+
+ EXPECT_CALL(bitrate_allocator_, RemoveObserver(vss_impl.get())).Times(1);
+ VideoEncoderConfig no_active_encodings = TestVideoEncoderConfig();
+ no_active_encodings.simulcast_layers[0].active = false;
+
+ vss_impl->ReconfigureVideoEncoder(std::move(no_active_encodings));
+
+ time_controller_.AdvanceTime(TimeDelta::Zero());
+ ::testing::Mock::VerifyAndClearExpectations(&bitrate_allocator_);
+
+ vss_impl->Stop();
+}
+
TEST_F(VideoSendStreamImplTest, UpdatesObserverOnConfigurationChange) {
const bool kSuspend = false;
config_.suspend_below_min_bitrate = kSuspend;
@@ -259,11 +287,9 @@
config_.rtp.ssrcs.emplace_back(1);
config_.rtp.ssrcs.emplace_back(2);
- auto vss_impl = CreateVideoSendStreamImpl(
- kDefaultInitialBitrateBps, kDefaultBitratePriority,
- VideoEncoderConfig::ContentType::kRealtimeVideo);
+ auto vss_impl = CreateVideoSendStreamImpl(TestVideoEncoderConfig());
- vss_impl->StartPerRtpStream({true});
+ vss_impl->Start();
// QVGA + VGA configuration matching defaults in
// media/engine/simulcast.cc.
@@ -326,9 +352,8 @@
config_.rtp.ssrcs.emplace_back(2);
auto vss_impl = CreateVideoSendStreamImpl(
- kDefaultInitialBitrateBps, kDefaultBitratePriority,
- VideoEncoderConfig::ContentType::kScreen);
- vss_impl->StartPerRtpStream({true});
+ TestVideoEncoderConfig(VideoEncoderConfig::ContentType::kScreen));
+ vss_impl->Start();
// Simulcast screenshare.
VideoStream low_stream;
@@ -388,11 +413,9 @@
config_.rtp.ssrcs.emplace_back(1);
config_.rtp.ssrcs.emplace_back(2);
- auto vss_impl = CreateVideoSendStreamImpl(
- kDefaultInitialBitrateBps, kDefaultBitratePriority,
- VideoEncoderConfig::ContentType::kRealtimeVideo);
+ auto vss_impl = CreateVideoSendStreamImpl(TestVideoEncoderConfig());
- vss_impl->StartPerRtpStream({true});
+ vss_impl->Start();
// 2-layer video simulcast.
VideoStream low_stream;
low_stream.width = 320;
@@ -451,31 +474,28 @@
SetPacingFactor(kAlrProbingExperimentPaceMultiplier))
.Times(1);
auto vss_impl = CreateVideoSendStreamImpl(
- kDefaultInitialBitrateBps, kDefaultBitratePriority,
- VideoEncoderConfig::ContentType::kScreen);
- vss_impl->StartPerRtpStream({true});
+ TestVideoEncoderConfig(VideoEncoderConfig::ContentType::kScreen));
+ vss_impl->Start();
vss_impl->Stop();
}
TEST_F(VideoSendStreamImplTest, DoesNotSetPacingFactorWithoutFeedback) {
test::ScopedFieldTrials alr_experiment(GetAlrProbingExperimentString());
auto vss_impl = CreateVideoSendStreamImpl(
- kDefaultInitialBitrateBps, kDefaultBitratePriority,
- VideoEncoderConfig::ContentType::kScreen);
+ TestVideoEncoderConfig(VideoEncoderConfig::ContentType::kScreen));
EXPECT_CALL(transport_controller_, SetPacingFactor(_)).Times(0);
- vss_impl->StartPerRtpStream({true});
+ vss_impl->Start();
vss_impl->Stop();
}
TEST_F(VideoSendStreamImplTest, ForwardsVideoBitrateAllocationWhenEnabled) {
auto vss_impl = CreateVideoSendStreamImpl(
- kDefaultInitialBitrateBps, kDefaultBitratePriority,
- VideoEncoderConfig::ContentType::kScreen);
+ TestVideoEncoderConfig(VideoEncoderConfig::ContentType::kScreen));
EXPECT_CALL(transport_controller_, SetPacingFactor(_)).Times(0);
VideoStreamEncoderInterface::EncoderSink* const sink =
static_cast<VideoStreamEncoderInterface::EncoderSink*>(vss_impl.get());
- vss_impl->StartPerRtpStream({true});
+ vss_impl->Start();
// Populate a test instance of video bitrate allocation.
VideoBitrateAllocation alloc;
alloc.SetBitrate(0, 0, 10000);
@@ -515,9 +535,8 @@
TEST_F(VideoSendStreamImplTest, ThrottlesVideoBitrateAllocationWhenTooSimilar) {
auto vss_impl = CreateVideoSendStreamImpl(
- kDefaultInitialBitrateBps, kDefaultBitratePriority,
- VideoEncoderConfig::ContentType::kScreen);
- vss_impl->StartPerRtpStream({true});
+ TestVideoEncoderConfig(VideoEncoderConfig::ContentType::kScreen));
+ vss_impl->Start();
// Unpause encoder, to allows allocations to be passed through.
const uint32_t kBitrateBps = 100000;
EXPECT_CALL(rtp_video_sender_, GetPayloadBitrateBps())
@@ -574,10 +593,9 @@
TEST_F(VideoSendStreamImplTest, ForwardsVideoBitrateAllocationOnLayerChange) {
auto vss_impl = CreateVideoSendStreamImpl(
- kDefaultInitialBitrateBps, kDefaultBitratePriority,
- VideoEncoderConfig::ContentType::kScreen);
+ TestVideoEncoderConfig(VideoEncoderConfig::ContentType::kScreen));
- vss_impl->StartPerRtpStream({true});
+ vss_impl->Start();
// Unpause encoder, to allows allocations to be passed through.
const uint32_t kBitrateBps = 100000;
EXPECT_CALL(rtp_video_sender_, GetPayloadBitrateBps())
@@ -616,9 +634,8 @@
TEST_F(VideoSendStreamImplTest, ForwardsVideoBitrateAllocationAfterTimeout) {
auto vss_impl = CreateVideoSendStreamImpl(
- kDefaultInitialBitrateBps, kDefaultBitratePriority,
- VideoEncoderConfig::ContentType::kScreen);
- vss_impl->StartPerRtpStream({true});
+ TestVideoEncoderConfig(VideoEncoderConfig::ContentType::kScreen));
+ vss_impl->Start();
const uint32_t kBitrateBps = 100000;
// Unpause encoder, to allows allocations to be passed through.
EXPECT_CALL(rtp_video_sender_, GetPayloadBitrateBps())
@@ -720,15 +737,13 @@
}
TEST_F(VideoSendStreamImplTest, PriorityBitrateConfigInactiveByDefault) {
- auto vss_impl = CreateVideoSendStreamImpl(
- kDefaultInitialBitrateBps, kDefaultBitratePriority,
- VideoEncoderConfig::ContentType::kRealtimeVideo);
+ auto vss_impl = CreateVideoSendStreamImpl(TestVideoEncoderConfig());
EXPECT_CALL(
bitrate_allocator_,
AddObserver(
vss_impl.get(),
Field(&MediaStreamAllocationConfig::priority_bitrate_bps, 0)));
- vss_impl->StartPerRtpStream({true});
+ vss_impl->Start();
EXPECT_CALL(bitrate_allocator_, RemoveObserver(vss_impl.get())).Times(1);
vss_impl->Stop();
}
@@ -736,15 +751,14 @@
TEST_F(VideoSendStreamImplTest, PriorityBitrateConfigAffectsAV1) {
test::ScopedFieldTrials override_priority_bitrate(
"WebRTC-AV1-OverridePriorityBitrate/bitrate:20000/");
- auto vss_impl = CreateVideoSendStreamImpl(
- kDefaultInitialBitrateBps, kDefaultBitratePriority,
- VideoEncoderConfig::ContentType::kRealtimeVideo, "AV1");
+ config_.rtp.payload_name = "AV1";
+ auto vss_impl = CreateVideoSendStreamImpl(TestVideoEncoderConfig());
EXPECT_CALL(
bitrate_allocator_,
AddObserver(
vss_impl.get(),
Field(&MediaStreamAllocationConfig::priority_bitrate_bps, 20000)));
- vss_impl->StartPerRtpStream({true});
+ vss_impl->Start();
EXPECT_CALL(bitrate_allocator_, RemoveObserver(vss_impl.get())).Times(1);
vss_impl->Stop();
}
@@ -765,16 +779,15 @@
test::ScopedFieldTrials override_priority_bitrate(
"WebRTC-AV1-OverridePriorityBitrate/bitrate:20000/");
- auto vss_impl = CreateVideoSendStreamImpl(
- kDefaultInitialBitrateBps, kDefaultBitratePriority,
- VideoEncoderConfig::ContentType::kRealtimeVideo, "AV1");
+ config_.rtp.payload_name = "AV1";
+ auto vss_impl = CreateVideoSendStreamImpl(TestVideoEncoderConfig());
EXPECT_CALL(
bitrate_allocator_,
AddObserver(
vss_impl.get(),
Field(&MediaStreamAllocationConfig::priority_bitrate_bps, 20000)))
.Times(2);
- vss_impl->StartPerRtpStream({true});
+ vss_impl->Start();
encoder_queue_->PostTask([&] {
static_cast<VideoStreamEncoderInterface::EncoderSink*>(vss_impl.get())
@@ -794,10 +807,9 @@
config_.suspend_below_min_bitrate = kSuspend;
config_.rtp.extensions.emplace_back(RtpExtension::kTransportSequenceNumberUri,
1);
- auto vss_impl = CreateVideoSendStreamImpl(
- kDefaultInitialBitrateBps, kDefaultBitratePriority,
- VideoEncoderConfig::ContentType::kRealtimeVideo);
- vss_impl->StartPerRtpStream({true});
+ auto vss_impl = CreateVideoSendStreamImpl(TestVideoEncoderConfig());
+
+ vss_impl->Start();
VideoStream qvga_stream;
qvga_stream.width = 320;
qvga_stream.height = 180;
@@ -893,9 +905,7 @@
TEST_F(VideoSendStreamImplTest, DisablesPaddingOnPausedEncoder) {
int padding_bitrate = 0;
- std::unique_ptr<VideoSendStreamImpl> vss_impl = CreateVideoSendStreamImpl(
- kDefaultInitialBitrateBps, kDefaultBitratePriority,
- VideoEncoderConfig::ContentType::kRealtimeVideo);
+ auto vss_impl = CreateVideoSendStreamImpl(TestVideoEncoderConfig());
// Capture padding bitrate for testing.
EXPECT_CALL(bitrate_allocator_, AddObserver(vss_impl.get(), _))
@@ -928,7 +938,7 @@
int min_transmit_bitrate_bps = 30000;
config_.rtp.ssrcs.emplace_back(1);
- vss_impl->StartPerRtpStream({true});
+ vss_impl->Start();
// Starts without padding.
EXPECT_EQ(0, padding_bitrate);
encoder_queue_->PostTask([&] {
@@ -972,11 +982,9 @@
}
TEST_F(VideoSendStreamImplTest, KeepAliveOnDroppedFrame) {
- std::unique_ptr<VideoSendStreamImpl> vss_impl = CreateVideoSendStreamImpl(
- kDefaultInitialBitrateBps, kDefaultBitratePriority,
- VideoEncoderConfig::ContentType::kRealtimeVideo);
+ auto vss_impl = CreateVideoSendStreamImpl(TestVideoEncoderConfig());
EXPECT_CALL(bitrate_allocator_, RemoveObserver(vss_impl.get())).Times(0);
- vss_impl->StartPerRtpStream({true});
+ vss_impl->Start();
const uint32_t kBitrateBps = 100000;
EXPECT_CALL(rtp_video_sender_, GetPayloadBitrateBps())
.Times(1)
@@ -1015,13 +1023,12 @@
config_.rtp.extensions.emplace_back(
RtpExtension::kTransportSequenceNumberUri, 1);
config_.periodic_alr_bandwidth_probing = test_config.alr;
- auto vss_impl = CreateVideoSendStreamImpl(
- kDefaultInitialBitrateBps, kDefaultBitratePriority,
+ auto vss_impl = CreateVideoSendStreamImpl(TestVideoEncoderConfig(
test_config.screenshare
? VideoEncoderConfig::ContentType::kScreen
- : VideoEncoderConfig::ContentType::kRealtimeVideo);
+ : VideoEncoderConfig::ContentType::kRealtimeVideo));
- vss_impl->StartPerRtpStream({true});
+ vss_impl->Start();
// Svc
VideoStream stream;
@@ -1100,7 +1107,7 @@
->OnEncodedImage(encoded_image, &codec_specific);
});
time_controller_.AdvanceTime(TimeDelta::Zero());
- ::testing::Mock::VerifyAndClearExpectations(&bitrate_allocator_);
+ Mock::VerifyAndClearExpectations(&bitrate_allocator_);
vss_impl->Stop();
}