Porting auto mute to new ViE API
This CL also includes tests for the auto mute function. A few minor lint
warnings were fixed too. Note that the auto mute function is still work
in progress.
The callback ViEEncoderObserver::VideoAutoMuted was not ported from the
old API. This is TBD; see issue 2457.
BUG=2436
R=holmer@google.com, mflodman@webrtc.org, pbos@webrtc.org
Review URL: https://webrtc-codereview.appspot.com/2340004
git-svn-id: http://webrtc.googlecode.com/svn/trunk@5021 4adac7df-926f-26a2-2b94-8c16560cd09d
diff --git a/webrtc/video_engine/internal/video_send_stream.cc b/webrtc/video_engine/internal/video_send_stream.cc
index d238b95..242f7fa 100644
--- a/webrtc/video_engine/internal/video_send_stream.cc
+++ b/webrtc/video_engine/internal/video_send_stream.cc
@@ -12,6 +12,7 @@
#include <string.h>
+#include <string>
#include <vector>
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
@@ -195,6 +196,13 @@
image_process_ = ViEImageProcess::GetInterface(video_engine);
image_process_->RegisterPreEncodeCallback(channel_,
config_.pre_encode_callback);
+
+ if (config.auto_muter.threshold_bps > 0) {
+ assert(config.auto_muter.window_bps >= 0);
+ codec_->EnableAutoMuting(channel_,
+ config.auto_muter.threshold_bps,
+ config.auto_muter.window_bps);
+ }
}
VideoSendStream::~VideoSendStream() {
diff --git a/webrtc/video_engine/new_include/video_send_stream.h b/webrtc/video_engine/new_include/video_send_stream.h
index c85ed78..59da54b 100644
--- a/webrtc/video_engine/new_include/video_send_stream.h
+++ b/webrtc/video_engine/new_include/video_send_stream.h
@@ -146,6 +146,16 @@
// Set to resume a previously destroyed send stream.
SendStreamState* start_state;
+
+ // Parameters for auto muter. If threshold_bps > 0, video will be muted when
+ // the bandwidth estimate drops below this limit, and enabled again when the
+ // bandwidth estimate goes above threshold_bps + window_bps. Setting the
+ // threshold to zero disables the auto muter.
+ struct AutoMuter {
+ AutoMuter() : threshold_bps(0), window_bps(0) {}
+ int threshold_bps;
+ int window_bps;
+ } auto_muter;
};
// Gets interface used to insert captured frames. Valid as long as the
diff --git a/webrtc/video_engine/test/common/fake_encoder.cc b/webrtc/video_engine/test/common/fake_encoder.cc
index c5e58f5..1db0f37 100644
--- a/webrtc/video_engine/test/common/fake_encoder.cc
+++ b/webrtc/video_engine/test/common/fake_encoder.cc
@@ -100,9 +100,8 @@
int stream_bits = (bits_available > max_stream_bits) ? max_stream_bits :
bits_available;
int stream_bytes = (stream_bits + 7) / 8;
- EXPECT_LT(static_cast<size_t>(stream_bytes), sizeof(encoded_buffer_));
if (static_cast<size_t>(stream_bytes) > sizeof(encoded_buffer_))
- return -1;
+ stream_bytes = sizeof(encoded_buffer_);
EncodedImage encoded(
encoded_buffer_, stream_bytes, sizeof(encoded_buffer_));
diff --git a/webrtc/video_engine/test/send_stream_tests.cc b/webrtc/video_engine/test/send_stream_tests.cc
index 69923a7..68f9685 100644
--- a/webrtc/video_engine/test/send_stream_tests.cc
+++ b/webrtc/video_engine/test/send_stream_tests.cc
@@ -8,15 +8,18 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/common_video/interface/i420_video_frame.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_sender.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
+#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/event_wrapper.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/system_wrappers/interface/sleep.h"
#include "webrtc/system_wrappers/interface/thread_wrapper.h"
#include "webrtc/video_engine/internal/transport_adapter.h"
#include "webrtc/video_engine/new_include/call.h"
+#include "webrtc/video_engine/new_include/frame_callback.h"
#include "webrtc/video_engine/new_include/video_send_stream.h"
#include "webrtc/video_engine/test/common/direct_transport.h"
#include "webrtc/video_engine/test/common/fake_encoder.h"
@@ -178,7 +181,7 @@
static const uint8_t kTOffsetExtensionId = 13;
class DelayedEncoder : public test::FakeEncoder {
public:
- DelayedEncoder(Clock* clock) : test::FakeEncoder(clock) {}
+ explicit DelayedEncoder(Clock* clock) : test::FakeEncoder(clock) {}
virtual int32_t Encode(
const I420VideoFrame& input_image,
const CodecSpecificInfo* codec_specific_info,
@@ -220,12 +223,12 @@
RunSendTest(call.get(), send_config, &observer);
}
-class LossyReceiveStatistics : public NullReceiveStatistics {
+class FakeReceiveStatistics : public NullReceiveStatistics {
public:
- LossyReceiveStatistics(uint32_t send_ssrc,
- uint32_t last_sequence_number,
- uint32_t cumulative_lost,
- uint8_t fraction_lost)
+ FakeReceiveStatistics(uint32_t send_ssrc,
+ uint32_t last_sequence_number,
+ uint32_t cumulative_lost,
+ uint8_t fraction_lost)
: lossy_stats_(new LossyStatistician(last_sequence_number,
cumulative_lost,
fraction_lost)) {
@@ -300,7 +303,7 @@
// Send lossy receive reports to trigger FEC enabling.
if (send_count_++ % 2 != 0) {
// Receive statistics reporting having lost 50% of the packets.
- LossyReceiveStatistics lossy_receive_stats(
+ FakeReceiveStatistics lossy_receive_stats(
kSendSsrc, header.sequenceNumber, send_count_ / 2, 127);
RTCPSender rtcp_sender(
0, false, Clock::GetRealTimeClock(), &lossy_receive_stats);
@@ -353,7 +356,7 @@
void VideoSendStreamTest::TestNackRetransmission(uint32_t retransmit_ssrc) {
class NackObserver : public SendTransportObserver {
public:
- NackObserver(uint32_t retransmit_ssrc)
+ explicit NackObserver(uint32_t retransmit_ssrc)
: SendTransportObserver(30 * 1000),
transport_adapter_(&transport_),
send_count_(0),
@@ -402,6 +405,7 @@
return true;
}
+
private:
internal::TransportAdapter transport_adapter_;
test::DirectTransport transport_;
@@ -477,4 +481,122 @@
RunSendTest(call.get(), send_config, &observer);
}
+// The test will go through a number of phases.
+// 1. Start sending packets.
+// 2. As soon as the RTP stream has been detected, signal a low REMB value to
+// activate the auto muter.
+// 3. Wait until |kMuteTimeFrames| have been captured without seeing any RTP
+// packets.
+// 4. Signal a high REMB and the wait for the RTP stream to start again.
+// When the stream is detected again, the test ends.
+TEST_F(VideoSendStreamTest, AutoMute) {
+ static const int kMuteTimeFrames = 60; // Mute for 2 seconds @ 30 fps.
+ static const int kMuteThresholdBps = 70000;
+ static const int kMuteWindowBps = 10000;
+ // Let the low REMB value be 10 kbps lower than the muter threshold, and the
+ // high REMB value be 5 kbps higher than the re-enabling threshold.
+ static const int kLowRembBps = kMuteThresholdBps - 10000;
+ static const int kHighRembBps = kMuteThresholdBps + kMuteWindowBps + 5000;
+
+ class RembObserver : public SendTransportObserver, public I420FrameCallback {
+ public:
+ RembObserver()
+ : SendTransportObserver(30 * 1000), // Timeout after 30 seconds.
+ transport_adapter_(&transport_),
+ clock_(Clock::GetRealTimeClock()),
+ test_state_(kBeforeMute),
+ rtp_count_(0),
+ last_sequence_number_(0),
+ mute_frame_count_(0),
+ crit_sect_(CriticalSectionWrapper::CreateCriticalSection()) {}
+
+ void SetReceiver(PacketReceiver* receiver) {
+ transport_.SetReceiver(receiver);
+ }
+
+ virtual bool SendRTCP(const uint8_t* packet, size_t length) OVERRIDE {
+ // Receive statistics reporting having lost 0% of the packets.
+ // This is needed for the send-side bitrate controller to work properly.
+ CriticalSectionScoped lock(crit_sect_.get());
+ SendRtcpFeedback(0); // REMB is only sent if value is > 0.
+ return true;
+ }
+
+ virtual bool SendRTP(const uint8_t* packet, size_t length) OVERRIDE {
+ CriticalSectionScoped lock(crit_sect_.get());
+ ++rtp_count_;
+ RTPHeader header;
+ EXPECT_TRUE(
+ rtp_header_parser_->Parse(packet, static_cast<int>(length), &header));
+ last_sequence_number_ = header.sequenceNumber;
+
+ if (test_state_ == kBeforeMute) {
+ // The stream has started. Try to mute it.
+ SendRtcpFeedback(kLowRembBps);
+ test_state_ = kDuringMute;
+ } else if (test_state_ == kDuringMute) {
+ mute_frame_count_ = 0;
+ } else if (test_state_ == kWaitingForPacket) {
+ send_test_complete_->Set();
+ }
+
+ return true;
+ }
+
+ // This method implements the I420FrameCallback.
+ void FrameCallback(I420VideoFrame* video_frame) OVERRIDE {
+ CriticalSectionScoped lock(crit_sect_.get());
+ if (test_state_ == kDuringMute && ++mute_frame_count_ > kMuteTimeFrames) {
+ SendRtcpFeedback(kHighRembBps);
+ test_state_ = kWaitingForPacket;
+ }
+ }
+
+ private:
+ enum TestState {
+ kBeforeMute,
+ kDuringMute,
+ kWaitingForPacket,
+ kAfterMute
+ };
+
+ virtual void SendRtcpFeedback(int remb_value) {
+ FakeReceiveStatistics receive_stats(
+ kSendSsrc, last_sequence_number_, rtp_count_, 0);
+ RTCPSender rtcp_sender(0, false, clock_, &receive_stats);
+ EXPECT_EQ(0, rtcp_sender.RegisterSendTransport(&transport_adapter_));
+
+ rtcp_sender.SetRTCPStatus(kRtcpNonCompound);
+ rtcp_sender.SetRemoteSSRC(kSendSsrc);
+ if (remb_value > 0) {
+ rtcp_sender.SetREMBStatus(true);
+ rtcp_sender.SetREMBData(remb_value, 0, NULL);
+ }
+ RTCPSender::FeedbackState feedback_state;
+ EXPECT_EQ(0, rtcp_sender.SendRTCP(feedback_state, kRtcpRr));
+ }
+
+ internal::TransportAdapter transport_adapter_;
+ test::DirectTransport transport_;
+ Clock* clock_;
+ TestState test_state_;
+ int rtp_count_;
+ int last_sequence_number_;
+ int mute_frame_count_;
+ scoped_ptr<CriticalSectionWrapper> crit_sect_;
+ } observer;
+
+ Call::Config call_config(&observer);
+ scoped_ptr<Call> call(Call::Create(call_config));
+ observer.SetReceiver(call->Receiver());
+
+ VideoSendStream::Config send_config = GetSendTestConfig(call.get());
+ send_config.rtp.nack.rtp_history_ms = 1000;
+ send_config.auto_muter.threshold_bps = kMuteThresholdBps;
+ send_config.auto_muter.window_bps = kMuteWindowBps;
+ send_config.pre_encode_callback = &observer;
+
+ RunSendTest(call.get(), send_config, &observer);
+}
+
} // namespace webrtc