blob: f5736679be3311731ac0f1b5b85739450e1821fa [file] [log] [blame]
/*
* Copyright (c) 2004 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "media/engine/webrtc_video_engine.h"
#include <algorithm>
#include <cstdint>
#include <map>
#include <memory>
#include <string>
#include <utility>
#include <vector>
#include "absl/algorithm/container.h"
#include "absl/strings/match.h"
#include "api/environment/environment.h"
#include "api/environment/environment_factory.h"
#include "api/rtp_parameters.h"
#include "api/test/mock_encoder_selector.h"
#include "api/test/mock_video_bitrate_allocator.h"
#include "api/test/mock_video_bitrate_allocator_factory.h"
#include "api/test/mock_video_decoder_factory.h"
#include "api/test/mock_video_encoder_factory.h"
#include "api/test/video/function_video_decoder_factory.h"
#include "api/transport/field_trial_based_config.h"
#include "api/units/time_delta.h"
#include "api/units/timestamp.h"
#include "api/video/builtin_video_bitrate_allocator_factory.h"
#include "api/video/i420_buffer.h"
#include "api/video/video_bitrate_allocation.h"
#include "api/video_codecs/h264_profile_level_id.h"
#include "api/video_codecs/sdp_video_format.h"
#include "api/video_codecs/video_codec.h"
#include "api/video_codecs/video_decoder_factory.h"
#include "api/video_codecs/video_decoder_factory_template.h"
#include "api/video_codecs/video_decoder_factory_template_dav1d_adapter.h"
#include "api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h"
#include "api/video_codecs/video_decoder_factory_template_libvpx_vp9_adapter.h"
#include "api/video_codecs/video_decoder_factory_template_open_h264_adapter.h"
#include "api/video_codecs/video_encoder.h"
#include "api/video_codecs/video_encoder_factory.h"
#include "api/video_codecs/video_encoder_factory_template.h"
#include "api/video_codecs/video_encoder_factory_template_libaom_av1_adapter.h"
#include "api/video_codecs/video_encoder_factory_template_libvpx_vp8_adapter.h"
#include "api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h"
#include "api/video_codecs/video_encoder_factory_template_open_h264_adapter.h"
#include "call/flexfec_receive_stream.h"
#include "media/base/fake_frame_source.h"
#include "media/base/fake_network_interface.h"
#include "media/base/fake_video_renderer.h"
#include "media/base/media_channel.h"
#include "media/base/media_constants.h"
#include "media/base/rtp_utils.h"
#include "media/base/test_utils.h"
#include "media/engine/fake_webrtc_call.h"
#include "media/engine/fake_webrtc_video_engine.h"
#include "media/engine/webrtc_voice_engine.h"
#include "modules/rtp_rtcp/include/rtp_header_extension_map.h"
#include "modules/rtp_rtcp/source/rtcp_packet.h"
#include "modules/rtp_rtcp/source/rtp_packet.h"
#include "modules/rtp_rtcp/source/rtp_packet_received.h"
#include "modules/video_coding/svc/scalability_mode_util.h"
#include "rtc_base/arraysize.h"
#include "rtc_base/event.h"
#include "rtc_base/experiments/min_video_bitrate_experiment.h"
#include "rtc_base/fake_clock.h"
#include "rtc_base/gunit.h"
#include "rtc_base/numerics/safe_conversions.h"
#include "rtc_base/time_utils.h"
#include "test/fake_decoder.h"
#include "test/frame_forwarder.h"
#include "test/gmock.h"
#include "test/rtcp_packet_parser.h"
#include "test/scoped_key_value_config.h"
#include "test/time_controller/simulated_time_controller.h"
#include "video/config/simulcast.h"
using ::testing::_;
using ::testing::Contains;
using ::testing::Each;
using ::testing::ElementsAre;
using ::testing::ElementsAreArray;
using ::testing::Eq;
using ::testing::Field;
using ::testing::Gt;
using ::testing::IsEmpty;
using ::testing::Lt;
using ::testing::Pair;
using ::testing::Return;
using ::testing::SizeIs;
using ::testing::StrNe;
using ::testing::Values;
using ::testing::WithArg;
using ::webrtc::BitrateConstraints;
using ::webrtc::Call;
using ::webrtc::CallConfig;
using ::webrtc::CreateEnvironment;
using ::webrtc::Environment;
using ::webrtc::kDefaultScalabilityModeStr;
using ::webrtc::RtpExtension;
using ::webrtc::RtpPacket;
using ::webrtc::RtpPacketReceived;
using ::webrtc::ScalabilityMode;
using ::webrtc::TimeDelta;
using ::webrtc::Timestamp;
using ::webrtc::test::RtcpPacketParser;
namespace {
static const uint8_t kRedRtxPayloadType = 125;
static const uint32_t kSsrc = 1234u;
static const uint32_t kSsrcs4[] = {1, 2, 3, 4};
static const int kVideoWidth = 640;
static const int kVideoHeight = 360;
static const int kFramerate = 30;
static constexpr TimeDelta kFrameDuration =
TimeDelta::Millis(1000 / kFramerate);
static const uint32_t kSsrcs1[] = {1};
static const uint32_t kSsrcs3[] = {1, 2, 3};
static const uint32_t kRtxSsrcs1[] = {4};
static const uint32_t kFlexfecSsrc = 5;
static const uint32_t kIncomingUnsignalledSsrc = 0xC0FFEE;
static const int64_t kUnsignalledReceiveStreamCooldownMs = 500;
constexpr uint32_t kRtpHeaderSize = 12;
constexpr size_t kNumSimulcastStreams = 3;
static const char kUnsupportedExtensionName[] =
"urn:ietf:params:rtp-hdrext:unsupported";
cricket::VideoCodec RemoveFeedbackParams(cricket::VideoCodec&& codec) {
codec.feedback_params = cricket::FeedbackParams();
return std::move(codec);
}
void VerifyCodecHasDefaultFeedbackParams(const cricket::VideoCodec& codec,
bool lntf_expected) {
EXPECT_EQ(lntf_expected,
codec.HasFeedbackParam(cricket::FeedbackParam(
cricket::kRtcpFbParamLntf, cricket::kParamValueEmpty)));
EXPECT_TRUE(codec.HasFeedbackParam(cricket::FeedbackParam(
cricket::kRtcpFbParamNack, cricket::kParamValueEmpty)));
EXPECT_TRUE(codec.HasFeedbackParam(cricket::FeedbackParam(
cricket::kRtcpFbParamNack, cricket::kRtcpFbNackParamPli)));
EXPECT_TRUE(codec.HasFeedbackParam(cricket::FeedbackParam(
cricket::kRtcpFbParamRemb, cricket::kParamValueEmpty)));
EXPECT_TRUE(codec.HasFeedbackParam(cricket::FeedbackParam(
cricket::kRtcpFbParamTransportCc, cricket::kParamValueEmpty)));
EXPECT_TRUE(codec.HasFeedbackParam(cricket::FeedbackParam(
cricket::kRtcpFbParamCcm, cricket::kRtcpFbCcmParamFir)));
}
// Return true if any codec in `codecs` is an RTX codec with associated
// payload type `payload_type`.
bool HasRtxCodec(const std::vector<cricket::VideoCodec>& codecs,
int payload_type) {
for (const cricket::VideoCodec& codec : codecs) {
int associated_payload_type;
if (absl::EqualsIgnoreCase(codec.name.c_str(), "rtx") &&
codec.GetParam(cricket::kCodecParamAssociatedPayloadType,
&associated_payload_type) &&
associated_payload_type == payload_type) {
return true;
}
}
return false;
}
// Return true if any codec in `codecs` is an RTX codec, independent of
// payload type.
bool HasAnyRtxCodec(const std::vector<cricket::VideoCodec>& codecs) {
for (const cricket::VideoCodec& codec : codecs) {
if (absl::EqualsIgnoreCase(codec.name.c_str(), "rtx")) {
return true;
}
}
return false;
}
const int* FindKeyByValue(const std::map<int, int>& m, int v) {
for (const auto& kv : m) {
if (kv.second == v)
return &kv.first;
}
return nullptr;
}
bool HasRtxReceiveAssociation(
const webrtc::VideoReceiveStreamInterface::Config& config,
int payload_type) {
return FindKeyByValue(config.rtp.rtx_associated_payload_types,
payload_type) != nullptr;
}
// Check that there's an Rtx payload type for each decoder.
bool VerifyRtxReceiveAssociations(
const webrtc::VideoReceiveStreamInterface::Config& config) {
for (const auto& decoder : config.decoders) {
if (!HasRtxReceiveAssociation(config, decoder.payload_type))
return false;
}
return true;
}
rtc::scoped_refptr<webrtc::VideoFrameBuffer> CreateBlackFrameBuffer(
int width,
int height) {
rtc::scoped_refptr<webrtc::I420Buffer> buffer =
webrtc::I420Buffer::Create(width, height);
webrtc::I420Buffer::SetBlack(buffer.get());
return buffer;
}
void VerifySendStreamHasRtxTypes(const webrtc::VideoSendStream::Config& config,
const std::map<int, int>& rtx_types) {
std::map<int, int>::const_iterator it;
it = rtx_types.find(config.rtp.payload_type);
EXPECT_TRUE(it != rtx_types.end() &&
it->second == config.rtp.rtx.payload_type);
if (config.rtp.ulpfec.red_rtx_payload_type != -1) {
it = rtx_types.find(config.rtp.ulpfec.red_payload_type);
EXPECT_TRUE(it != rtx_types.end() &&
it->second == config.rtp.ulpfec.red_rtx_payload_type);
}
}
cricket::MediaConfig GetMediaConfig() {
cricket::MediaConfig media_config;
media_config.video.enable_cpu_adaptation = false;
return media_config;
}
// Values from GetMaxDefaultVideoBitrateKbps in webrtcvideoengine.cc.
int GetMaxDefaultBitrateBps(size_t width, size_t height) {
if (width * height <= 320 * 240) {
return 600000;
} else if (width * height <= 640 * 480) {
return 1700000;
} else if (width * height <= 960 * 540) {
return 2000000;
} else {
return 2500000;
}
}
class MockVideoSource : public rtc::VideoSourceInterface<webrtc::VideoFrame> {
public:
MOCK_METHOD(void,
AddOrUpdateSink,
(rtc::VideoSinkInterface<webrtc::VideoFrame> * sink,
const rtc::VideoSinkWants& wants),
(override));
MOCK_METHOD(void,
RemoveSink,
(rtc::VideoSinkInterface<webrtc::VideoFrame> * sink),
(override));
};
class MockNetworkInterface : public cricket::MediaChannelNetworkInterface {
public:
MOCK_METHOD(bool,
SendPacket,
(rtc::CopyOnWriteBuffer * packet,
const rtc::PacketOptions& options),
(override));
MOCK_METHOD(bool,
SendRtcp,
(rtc::CopyOnWriteBuffer * packet,
const rtc::PacketOptions& options),
(override));
MOCK_METHOD(int,
SetOption,
(SocketType type, rtc::Socket::Option opt, int option),
(override));
};
std::vector<webrtc::Resolution> GetStreamResolutions(
const std::vector<webrtc::VideoStream>& streams) {
std::vector<webrtc::Resolution> res;
for (const auto& s : streams) {
if (s.active) {
res.push_back(
{rtc::checked_cast<int>(s.width), rtc::checked_cast<int>(s.height)});
}
}
return res;
}
RtpPacketReceived BuildVp8KeyFrame(uint32_t ssrc, uint8_t payload_type) {
RtpPacketReceived packet;
packet.SetMarker(true);
packet.SetPayloadType(payload_type);
packet.SetSsrc(ssrc);
// VP8 Keyframe + 1 byte payload
uint8_t* buf_ptr = packet.AllocatePayload(11);
memset(buf_ptr, 0, 11); // Pass MSAN (don't care about bytes 1-9)
buf_ptr[0] = 0x10; // Partition ID 0 + beginning of partition.
constexpr unsigned width = 1080;
constexpr unsigned height = 720;
buf_ptr[6] = width & 255;
buf_ptr[7] = width >> 8;
buf_ptr[8] = height & 255;
buf_ptr[9] = height >> 8;
return packet;
}
RtpPacketReceived BuildRtxPacket(uint32_t rtx_ssrc,
uint8_t rtx_payload_type,
const RtpPacketReceived& original_packet) {
constexpr size_t kRtxHeaderSize = 2;
RtpPacketReceived packet(original_packet);
packet.SetPayloadType(rtx_payload_type);
packet.SetSsrc(rtx_ssrc);
uint8_t* rtx_payload =
packet.AllocatePayload(original_packet.payload_size() + kRtxHeaderSize);
// Add OSN (original sequence number).
rtx_payload[0] = packet.SequenceNumber() >> 8;
rtx_payload[1] = packet.SequenceNumber();
// Add original payload data.
if (!original_packet.payload().empty()) {
memcpy(rtx_payload + kRtxHeaderSize, original_packet.payload().data(),
original_packet.payload().size());
}
return packet;
}
} // namespace
// TODO(tommi): Consider replacing these macros with custom matchers.
#define EXPECT_FRAME(c, w, h) \
EXPECT_EQ((c), renderer_.num_rendered_frames()); \
EXPECT_EQ((w), renderer_.width()); \
EXPECT_EQ((h), renderer_.height());
#define EXPECT_FRAME_ON_RENDERER(r, c, w, h) \
EXPECT_EQ((c), (r).num_rendered_frames()); \
EXPECT_EQ((w), (r).width()); \
EXPECT_EQ((h), (r).height());
namespace cricket {
class WebRtcVideoEngineTest : public ::testing::Test {
public:
WebRtcVideoEngineTest() : WebRtcVideoEngineTest("") {}
explicit WebRtcVideoEngineTest(const std::string& field_trials)
: field_trials_(field_trials),
time_controller_(webrtc::Timestamp::Millis(4711)),
env_(CreateEnvironment(&field_trials_,
time_controller_.CreateTaskQueueFactory(),
time_controller_.GetClock())),
call_(Call::Create(CallConfig(env_))),
encoder_factory_(new cricket::FakeWebRtcVideoEncoderFactory),
decoder_factory_(new cricket::FakeWebRtcVideoDecoderFactory),
video_bitrate_allocator_factory_(
webrtc::CreateBuiltinVideoBitrateAllocatorFactory()),
engine_(std::unique_ptr<cricket::FakeWebRtcVideoEncoderFactory>(
encoder_factory_),
std::unique_ptr<cricket::FakeWebRtcVideoDecoderFactory>(
decoder_factory_),
field_trials_) {}
protected:
void AssignDefaultAptRtxTypes();
void AssignDefaultCodec();
// Find the index of the codec in the engine with the given name. The codec
// must be present.
size_t GetEngineCodecIndex(const std::string& name) const;
// Find the codec in the engine with the given name. The codec must be
// present.
cricket::VideoCodec GetEngineCodec(const std::string& name) const;
void AddSupportedVideoCodecType(
const std::string& name,
const std::vector<webrtc::ScalabilityMode>& scalability_modes = {});
std::unique_ptr<VideoMediaSendChannelInterface>
SetSendParamsWithAllSupportedCodecs();
std::unique_ptr<VideoMediaReceiveChannelInterface>
SetRecvParamsWithAllSupportedCodecs();
std::unique_ptr<VideoMediaReceiveChannelInterface>
SetRecvParamsWithSupportedCodecs(const std::vector<VideoCodec>& codecs);
void ExpectRtpCapabilitySupport(const char* uri, bool supported) const;
webrtc::test::ScopedKeyValueConfig field_trials_;
webrtc::GlobalSimulatedTimeController time_controller_;
Environment env_;
// Used in WebRtcVideoEngineVoiceTest, but defined here so it's properly
// initialized when the constructor is called.
std::unique_ptr<Call> call_;
cricket::FakeWebRtcVideoEncoderFactory* encoder_factory_;
cricket::FakeWebRtcVideoDecoderFactory* decoder_factory_;
std::unique_ptr<webrtc::VideoBitrateAllocatorFactory>
video_bitrate_allocator_factory_;
WebRtcVideoEngine engine_;
absl::optional<VideoCodec> default_codec_;
std::map<int, int> default_apt_rtx_types_;
};
TEST_F(WebRtcVideoEngineTest, DefaultRtxCodecHasAssociatedPayloadTypeSet) {
encoder_factory_->AddSupportedVideoCodecType("VP8");
AssignDefaultCodec();
std::vector<VideoCodec> engine_codecs = engine_.send_codecs();
for (size_t i = 0; i < engine_codecs.size(); ++i) {
if (engine_codecs[i].name != kRtxCodecName)
continue;
int associated_payload_type;
EXPECT_TRUE(engine_codecs[i].GetParam(kCodecParamAssociatedPayloadType,
&associated_payload_type));
EXPECT_EQ(default_codec_->id, associated_payload_type);
return;
}
FAIL() << "No RTX codec found among default codecs.";
}
TEST_F(WebRtcVideoEngineTest, SupportsTimestampOffsetHeaderExtension) {
ExpectRtpCapabilitySupport(RtpExtension::kTimestampOffsetUri, true);
}
TEST_F(WebRtcVideoEngineTest, SupportsAbsoluteSenderTimeHeaderExtension) {
ExpectRtpCapabilitySupport(RtpExtension::kAbsSendTimeUri, true);
}
TEST_F(WebRtcVideoEngineTest, SupportsTransportSequenceNumberHeaderExtension) {
ExpectRtpCapabilitySupport(RtpExtension::kTransportSequenceNumberUri, true);
}
TEST_F(WebRtcVideoEngineTest, SupportsVideoRotationHeaderExtension) {
ExpectRtpCapabilitySupport(RtpExtension::kVideoRotationUri, true);
}
TEST_F(WebRtcVideoEngineTest, SupportsPlayoutDelayHeaderExtension) {
ExpectRtpCapabilitySupport(RtpExtension::kPlayoutDelayUri, true);
}
TEST_F(WebRtcVideoEngineTest, SupportsVideoContentTypeHeaderExtension) {
ExpectRtpCapabilitySupport(RtpExtension::kVideoContentTypeUri, true);
}
TEST_F(WebRtcVideoEngineTest, SupportsVideoTimingHeaderExtension) {
ExpectRtpCapabilitySupport(RtpExtension::kVideoTimingUri, true);
}
TEST_F(WebRtcVideoEngineTest, SupportsColorSpaceHeaderExtension) {
ExpectRtpCapabilitySupport(RtpExtension::kColorSpaceUri, true);
}
TEST_F(WebRtcVideoEngineTest, AdvertiseGenericDescriptor00) {
ExpectRtpCapabilitySupport(RtpExtension::kGenericFrameDescriptorUri00, false);
}
class WebRtcVideoEngineTestWithGenericDescriptor
: public WebRtcVideoEngineTest {
public:
WebRtcVideoEngineTestWithGenericDescriptor()
: WebRtcVideoEngineTest("WebRTC-GenericDescriptorAdvertised/Enabled/") {}
};
TEST_F(WebRtcVideoEngineTestWithGenericDescriptor,
AdvertiseGenericDescriptor00) {
ExpectRtpCapabilitySupport(RtpExtension::kGenericFrameDescriptorUri00, true);
}
class WebRtcVideoEngineTestWithDependencyDescriptor
: public WebRtcVideoEngineTest {
public:
WebRtcVideoEngineTestWithDependencyDescriptor()
: WebRtcVideoEngineTest(
"WebRTC-DependencyDescriptorAdvertised/Enabled/") {}
};
TEST_F(WebRtcVideoEngineTestWithDependencyDescriptor,
AdvertiseDependencyDescriptor) {
ExpectRtpCapabilitySupport(RtpExtension::kDependencyDescriptorUri, true);
}
TEST_F(WebRtcVideoEngineTest, AdvertiseVideoLayersAllocation) {
ExpectRtpCapabilitySupport(RtpExtension::kVideoLayersAllocationUri, false);
}
class WebRtcVideoEngineTestWithVideoLayersAllocation
: public WebRtcVideoEngineTest {
public:
WebRtcVideoEngineTestWithVideoLayersAllocation()
: WebRtcVideoEngineTest(
"WebRTC-VideoLayersAllocationAdvertised/Enabled/") {}
};
TEST_F(WebRtcVideoEngineTestWithVideoLayersAllocation,
AdvertiseVideoLayersAllocation) {
ExpectRtpCapabilitySupport(RtpExtension::kVideoLayersAllocationUri, true);
}
class WebRtcVideoFrameTrackingId : public WebRtcVideoEngineTest {
public:
WebRtcVideoFrameTrackingId()
: WebRtcVideoEngineTest(
"WebRTC-VideoFrameTrackingIdAdvertised/Enabled/") {}
};
TEST_F(WebRtcVideoFrameTrackingId, AdvertiseVideoFrameTrackingId) {
ExpectRtpCapabilitySupport(RtpExtension::kVideoFrameTrackingIdUri, true);
}
TEST_F(WebRtcVideoEngineTest, CVOSetHeaderExtensionBeforeCapturer) {
// Allocate the source first to prevent early destruction before channel's
// dtor is called.
::testing::NiceMock<MockVideoSource> video_source;
AddSupportedVideoCodecType("VP8");
auto send_channel = SetSendParamsWithAllSupportedCodecs();
EXPECT_TRUE(send_channel->AddSendStream(StreamParams::CreateLegacy(kSsrc)));
// Add CVO extension.
const int id = 1;
cricket::VideoSenderParameters parameters;
parameters.codecs.push_back(GetEngineCodec("VP8"));
parameters.extensions.push_back(
RtpExtension(RtpExtension::kVideoRotationUri, id));
EXPECT_TRUE(send_channel->SetSenderParameters(parameters));
EXPECT_CALL(
video_source,
AddOrUpdateSink(_, Field(&rtc::VideoSinkWants::rotation_applied, false)));
// Set capturer.
EXPECT_TRUE(send_channel->SetVideoSend(kSsrc, nullptr, &video_source));
// Verify capturer has turned off applying rotation.
::testing::Mock::VerifyAndClear(&video_source);
// Verify removing header extension turns on applying rotation.
parameters.extensions.clear();
EXPECT_CALL(
video_source,
AddOrUpdateSink(_, Field(&rtc::VideoSinkWants::rotation_applied, true)));
EXPECT_TRUE(send_channel->SetSenderParameters(parameters));
}
TEST_F(WebRtcVideoEngineTest, CVOSetHeaderExtensionBeforeAddSendStream) {
// Allocate the source first to prevent early destruction before channel's
// dtor is called.
::testing::NiceMock<MockVideoSource> video_source;
AddSupportedVideoCodecType("VP8");
auto send_channel = SetSendParamsWithAllSupportedCodecs();
// Add CVO extension.
const int id = 1;
cricket::VideoSenderParameters parameters;
parameters.codecs.push_back(GetEngineCodec("VP8"));
parameters.extensions.push_back(
RtpExtension(RtpExtension::kVideoRotationUri, id));
EXPECT_TRUE(send_channel->SetSenderParameters(parameters));
EXPECT_TRUE(send_channel->AddSendStream(StreamParams::CreateLegacy(kSsrc)));
// Set source.
EXPECT_CALL(
video_source,
AddOrUpdateSink(_, Field(&rtc::VideoSinkWants::rotation_applied, false)));
EXPECT_TRUE(send_channel->SetVideoSend(kSsrc, nullptr, &video_source));
}
TEST_F(WebRtcVideoEngineTest, CVOSetHeaderExtensionAfterCapturer) {
::testing::NiceMock<MockVideoSource> video_source;
AddSupportedVideoCodecType("VP8");
AddSupportedVideoCodecType("VP9");
auto send_channel = SetSendParamsWithAllSupportedCodecs();
EXPECT_TRUE(send_channel->AddSendStream(StreamParams::CreateLegacy(kSsrc)));
// Set capturer.
EXPECT_CALL(
video_source,
AddOrUpdateSink(_, Field(&rtc::VideoSinkWants::rotation_applied, true)));
EXPECT_TRUE(send_channel->SetVideoSend(kSsrc, nullptr, &video_source));
// Verify capturer has turned on applying rotation.
::testing::Mock::VerifyAndClear(&video_source);
// Add CVO extension.
const int id = 1;
cricket::VideoSenderParameters parameters;
parameters.codecs.push_back(GetEngineCodec("VP8"));
parameters.codecs.push_back(GetEngineCodec("VP9"));
parameters.extensions.push_back(
RtpExtension(RtpExtension::kVideoRotationUri, id));
// Also remove the first codec to trigger a codec change as well.
parameters.codecs.erase(parameters.codecs.begin());
EXPECT_CALL(
video_source,
AddOrUpdateSink(_, Field(&rtc::VideoSinkWants::rotation_applied, false)));
EXPECT_TRUE(send_channel->SetSenderParameters(parameters));
// Verify capturer has turned off applying rotation.
::testing::Mock::VerifyAndClear(&video_source);
// Verify removing header extension turns on applying rotation.
parameters.extensions.clear();
EXPECT_CALL(
video_source,
AddOrUpdateSink(_, Field(&rtc::VideoSinkWants::rotation_applied, true)));
EXPECT_TRUE(send_channel->SetSenderParameters(parameters));
}
TEST_F(WebRtcVideoEngineTest, SetSendFailsBeforeSettingCodecs) {
AddSupportedVideoCodecType("VP8");
std::unique_ptr<VideoMediaSendChannelInterface> send_channel =
engine_.CreateSendChannel(call_.get(), GetMediaConfig(), VideoOptions(),
webrtc::CryptoOptions(),
video_bitrate_allocator_factory_.get());
EXPECT_TRUE(send_channel->AddSendStream(StreamParams::CreateLegacy(123)));
EXPECT_FALSE(send_channel->SetSend(true))
<< "Channel should not start without codecs.";
EXPECT_TRUE(send_channel->SetSend(false))
<< "Channel should be stoppable even without set codecs.";
}
TEST_F(WebRtcVideoEngineTest, GetStatsWithoutCodecsSetDoesNotCrash) {
AddSupportedVideoCodecType("VP8");
std::unique_ptr<VideoMediaSendChannelInterface> send_channel =
engine_.CreateSendChannel(call_.get(), GetMediaConfig(), VideoOptions(),
webrtc::CryptoOptions(),
video_bitrate_allocator_factory_.get());
EXPECT_TRUE(send_channel->AddSendStream(StreamParams::CreateLegacy(123)));
VideoMediaSendInfo send_info;
send_channel->GetStats(&send_info);
std::unique_ptr<VideoMediaReceiveChannelInterface> receive_channel =
engine_.CreateReceiveChannel(call_.get(), GetMediaConfig(),
VideoOptions(), webrtc::CryptoOptions());
EXPECT_TRUE(receive_channel->AddRecvStream(StreamParams::CreateLegacy(123)));
VideoMediaReceiveInfo receive_info;
receive_channel->GetStats(&receive_info);
}
TEST_F(WebRtcVideoEngineTest, UseFactoryForVp8WhenSupported) {
AddSupportedVideoCodecType("VP8");
auto send_channel = SetSendParamsWithAllSupportedCodecs();
send_channel->OnReadyToSend(true);
EXPECT_TRUE(
send_channel->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc)));
EXPECT_EQ(0, encoder_factory_->GetNumCreatedEncoders());
EXPECT_TRUE(send_channel->SetSend(true));
webrtc::test::FrameForwarder frame_forwarder;
cricket::FakeFrameSource frame_source(1280, 720,
rtc::kNumMicrosecsPerSec / 30);
EXPECT_TRUE(send_channel->SetVideoSend(kSsrc, nullptr, &frame_forwarder));
frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame());
time_controller_.AdvanceTime(webrtc::TimeDelta::Zero());
// Sending one frame will have allocate the encoder.
ASSERT_TRUE(encoder_factory_->WaitForCreatedVideoEncoders(1));
EXPECT_GT(encoder_factory_->encoders()[0]->GetNumEncodedFrames(), 0);
int num_created_encoders = encoder_factory_->GetNumCreatedEncoders();
EXPECT_EQ(num_created_encoders, 1);
// Setting codecs of the same type should not reallocate any encoders
// (expecting a no-op).
cricket::VideoSenderParameters parameters;
parameters.codecs.push_back(GetEngineCodec("VP8"));
EXPECT_TRUE(send_channel->SetSenderParameters(parameters));
EXPECT_EQ(num_created_encoders, encoder_factory_->GetNumCreatedEncoders());
// Remove stream previously added to free the external encoder instance.
EXPECT_TRUE(send_channel->RemoveSendStream(kSsrc));
EXPECT_EQ(0u, encoder_factory_->encoders().size());
}
// Test that when an encoder factory supports H264, we add an RTX
// codec for it.
// TODO(deadbeef): This test should be updated if/when we start
// adding RTX codecs for unrecognized codec names.
TEST_F(WebRtcVideoEngineTest, RtxCodecAddedForH264Codec) {
using webrtc::H264Level;
using webrtc::H264Profile;
using webrtc::H264ProfileLevelId;
using webrtc::H264ProfileLevelIdToString;
webrtc::SdpVideoFormat h264_constrained_baseline("H264");
h264_constrained_baseline.parameters[kH264FmtpProfileLevelId] =
*H264ProfileLevelIdToString(H264ProfileLevelId(
H264Profile::kProfileConstrainedBaseline, H264Level::kLevel1));
webrtc::SdpVideoFormat h264_constrained_high("H264");
h264_constrained_high.parameters[kH264FmtpProfileLevelId] =
*H264ProfileLevelIdToString(H264ProfileLevelId(
H264Profile::kProfileConstrainedHigh, H264Level::kLevel1));
webrtc::SdpVideoFormat h264_high("H264");
h264_high.parameters[kH264FmtpProfileLevelId] = *H264ProfileLevelIdToString(
H264ProfileLevelId(H264Profile::kProfileHigh, H264Level::kLevel1));
encoder_factory_->AddSupportedVideoCodec(h264_constrained_baseline);
encoder_factory_->AddSupportedVideoCodec(h264_constrained_high);
encoder_factory_->AddSupportedVideoCodec(h264_high);
// First figure out what payload types the test codecs got assigned.
const std::vector<cricket::VideoCodec> codecs = engine_.send_codecs();
// Now search for RTX codecs for them. Expect that they all have associated
// RTX codecs.
EXPECT_TRUE(HasRtxCodec(
codecs, FindMatchingVideoCodec(
codecs, cricket::CreateVideoCodec(h264_constrained_baseline))
->id));
EXPECT_TRUE(HasRtxCodec(
codecs, FindMatchingVideoCodec(
codecs, cricket::CreateVideoCodec(h264_constrained_high))
->id));
EXPECT_TRUE(HasRtxCodec(
codecs,
FindMatchingVideoCodec(codecs, cricket::CreateVideoCodec(h264_high))
->id));
}
#if defined(RTC_ENABLE_VP9)
TEST_F(WebRtcVideoEngineTest, CanConstructDecoderForVp9EncoderFactory) {
AddSupportedVideoCodecType("VP9");
auto receive_channel = SetRecvParamsWithAllSupportedCodecs();
EXPECT_TRUE(receive_channel->AddRecvStream(
cricket::StreamParams::CreateLegacy(kSsrc)));
}
#endif // defined(RTC_ENABLE_VP9)
TEST_F(WebRtcVideoEngineTest, PropagatesInputFrameTimestamp) {
AddSupportedVideoCodecType("VP8");
FakeCall* fake_call = new FakeCall();
call_.reset(fake_call);
auto send_channel = SetSendParamsWithAllSupportedCodecs();
EXPECT_TRUE(
send_channel->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc)));
webrtc::test::FrameForwarder frame_forwarder;
cricket::FakeFrameSource frame_source(1280, 720,
rtc::kNumMicrosecsPerSec / 60);
EXPECT_TRUE(send_channel->SetVideoSend(kSsrc, nullptr, &frame_forwarder));
send_channel->SetSend(true);
FakeVideoSendStream* stream = fake_call->GetVideoSendStreams()[0];
frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame());
int64_t last_timestamp = stream->GetLastTimestamp();
for (int i = 0; i < 10; i++) {
frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame());
int64_t timestamp = stream->GetLastTimestamp();
int64_t interval = timestamp - last_timestamp;
// Precision changes from nanosecond to millisecond.
// Allow error to be no more than 1.
EXPECT_NEAR(cricket::VideoFormat::FpsToInterval(60) / 1E6, interval, 1);
last_timestamp = timestamp;
}
frame_forwarder.IncomingCapturedFrame(
frame_source.GetFrame(1280, 720, webrtc::VideoRotation::kVideoRotation_0,
rtc::kNumMicrosecsPerSec / 30));
last_timestamp = stream->GetLastTimestamp();
for (int i = 0; i < 10; i++) {
frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame(
1280, 720, webrtc::VideoRotation::kVideoRotation_0,
rtc::kNumMicrosecsPerSec / 30));
int64_t timestamp = stream->GetLastTimestamp();
int64_t interval = timestamp - last_timestamp;
// Precision changes from nanosecond to millisecond.
// Allow error to be no more than 1.
EXPECT_NEAR(cricket::VideoFormat::FpsToInterval(30) / 1E6, interval, 1);
last_timestamp = timestamp;
}
// Remove stream previously added to free the external encoder instance.
EXPECT_TRUE(send_channel->RemoveSendStream(kSsrc));
}
void WebRtcVideoEngineTest::AssignDefaultAptRtxTypes() {
std::vector<VideoCodec> engine_codecs = engine_.send_codecs();
RTC_DCHECK(!engine_codecs.empty());
for (const cricket::VideoCodec& codec : engine_codecs) {
if (codec.name == "rtx") {
int associated_payload_type;
if (codec.GetParam(kCodecParamAssociatedPayloadType,
&associated_payload_type)) {
default_apt_rtx_types_[associated_payload_type] = codec.id;
}
}
}
}
void WebRtcVideoEngineTest::AssignDefaultCodec() {
std::vector<VideoCodec> engine_codecs = engine_.send_codecs();
RTC_DCHECK(!engine_codecs.empty());
bool codec_set = false;
for (const cricket::VideoCodec& codec : engine_codecs) {
if (!codec_set && codec.name != "rtx" && codec.name != "red" &&
codec.name != "ulpfec" && codec.name != "flexfec-03") {
default_codec_ = codec;
codec_set = true;
}
}
RTC_DCHECK(codec_set);
}
size_t WebRtcVideoEngineTest::GetEngineCodecIndex(
const std::string& name) const {
const std::vector<cricket::VideoCodec> codecs = engine_.send_codecs();
for (size_t i = 0; i < codecs.size(); ++i) {
const cricket::VideoCodec engine_codec = codecs[i];
if (!absl::EqualsIgnoreCase(name, engine_codec.name))
continue;
// The tests only use H264 Constrained Baseline. Make sure we don't return
// an internal H264 codec from the engine with a different H264 profile.
if (absl::EqualsIgnoreCase(name.c_str(), kH264CodecName)) {
const absl::optional<webrtc::H264ProfileLevelId> profile_level_id =
webrtc::ParseSdpForH264ProfileLevelId(engine_codec.params);
if (profile_level_id->profile !=
webrtc::H264Profile::kProfileConstrainedBaseline) {
continue;
}
}
return i;
}
// This point should never be reached.
ADD_FAILURE() << "Unrecognized codec name: " << name;
return -1;
}
cricket::VideoCodec WebRtcVideoEngineTest::GetEngineCodec(
const std::string& name) const {
return engine_.send_codecs()[GetEngineCodecIndex(name)];
}
void WebRtcVideoEngineTest::AddSupportedVideoCodecType(
const std::string& name,
const std::vector<webrtc::ScalabilityMode>& scalability_modes) {
encoder_factory_->AddSupportedVideoCodecType(name, scalability_modes);
decoder_factory_->AddSupportedVideoCodecType(name);
}
std::unique_ptr<VideoMediaSendChannelInterface>
WebRtcVideoEngineTest::SetSendParamsWithAllSupportedCodecs() {
std::unique_ptr<VideoMediaSendChannelInterface> channel =
engine_.CreateSendChannel(call_.get(), GetMediaConfig(), VideoOptions(),
webrtc::CryptoOptions(),
video_bitrate_allocator_factory_.get());
cricket::VideoSenderParameters parameters;
// We need to look up the codec in the engine to get the correct payload type.
for (const webrtc::SdpVideoFormat& format :
encoder_factory_->GetSupportedFormats()) {
cricket::VideoCodec engine_codec = GetEngineCodec(format.name);
if (!absl::c_linear_search(parameters.codecs, engine_codec)) {
parameters.codecs.push_back(engine_codec);
}
}
EXPECT_TRUE(channel->SetSenderParameters(parameters));
return channel;
}
std::unique_ptr<VideoMediaReceiveChannelInterface>
WebRtcVideoEngineTest::SetRecvParamsWithSupportedCodecs(
const std::vector<VideoCodec>& codecs) {
std::unique_ptr<VideoMediaReceiveChannelInterface> channel =
engine_.CreateReceiveChannel(call_.get(), GetMediaConfig(),
VideoOptions(), webrtc::CryptoOptions());
cricket::VideoReceiverParameters parameters;
parameters.codecs = codecs;
EXPECT_TRUE(channel->SetReceiverParameters(parameters));
return channel;
}
std::unique_ptr<VideoMediaReceiveChannelInterface>
WebRtcVideoEngineTest::SetRecvParamsWithAllSupportedCodecs() {
std::vector<VideoCodec> codecs;
for (const webrtc::SdpVideoFormat& format :
decoder_factory_->GetSupportedFormats()) {
cricket::VideoCodec engine_codec = GetEngineCodec(format.name);
if (!absl::c_linear_search(codecs, engine_codec)) {
codecs.push_back(engine_codec);
}
}
return SetRecvParamsWithSupportedCodecs(codecs);
}
void WebRtcVideoEngineTest::ExpectRtpCapabilitySupport(const char* uri,
bool supported) const {
const std::vector<webrtc::RtpExtension> header_extensions =
GetDefaultEnabledRtpHeaderExtensions(engine_);
if (supported) {
EXPECT_THAT(header_extensions, Contains(Field(&RtpExtension::uri, uri)));
} else {
EXPECT_THAT(header_extensions, Each(Field(&RtpExtension::uri, StrNe(uri))));
}
}
TEST_F(WebRtcVideoEngineTest, SendsFeedbackAfterUnsignaledRtxPacket) {
// Setup a channel with VP8, RTX and transport sequence number header
// extension. Receive stream is not explicitly configured.
AddSupportedVideoCodecType("VP8");
std::vector<VideoCodec> supported_codecs =
engine_.recv_codecs(/*include_rtx=*/true);
ASSERT_EQ(supported_codecs[1].name, "rtx");
int rtx_payload_type = supported_codecs[1].id;
MockNetworkInterface network;
RtcpPacketParser rtcp_parser;
ON_CALL(network, SendRtcp)
.WillByDefault(
testing::DoAll(WithArg<0>([&](rtc::CopyOnWriteBuffer* packet) {
ASSERT_TRUE(rtcp_parser.Parse(*packet));
}),
Return(true)));
std::unique_ptr<VideoMediaSendChannelInterface> send_channel =
engine_.CreateSendChannel(call_.get(), GetMediaConfig(), VideoOptions(),
webrtc::CryptoOptions(),
video_bitrate_allocator_factory_.get());
std::unique_ptr<VideoMediaReceiveChannelInterface> receive_channel =
engine_.CreateReceiveChannel(call_.get(), GetMediaConfig(),
VideoOptions(), webrtc::CryptoOptions());
cricket::VideoReceiverParameters parameters;
parameters.codecs = supported_codecs;
const int kTransportSeqExtensionId = 1;
parameters.extensions.push_back(RtpExtension(
RtpExtension::kTransportSequenceNumberUri, kTransportSeqExtensionId));
ASSERT_TRUE(receive_channel->SetReceiverParameters(parameters));
send_channel->SetInterface(&network);
receive_channel->SetInterface(&network);
send_channel->OnReadyToSend(true);
receive_channel->SetReceive(true);
// Inject a RTX packet.
webrtc::RtpHeaderExtensionMap extension_map(parameters.extensions);
webrtc::RtpPacketReceived packet(&extension_map);
packet.SetMarker(true);
packet.SetPayloadType(rtx_payload_type);
packet.SetSsrc(999);
packet.SetExtension<webrtc::TransportSequenceNumber>(7);
uint8_t* buf_ptr = packet.AllocatePayload(11);
memset(buf_ptr, 0, 11); // Pass MSAN (don't care about bytes 1-9)
receive_channel->OnPacketReceived(packet);
// Expect that feedback is sent after a while.
time_controller_.AdvanceTime(webrtc::TimeDelta::Seconds(1));
EXPECT_GT(rtcp_parser.transport_feedback()->num_packets(), 0);
send_channel->SetInterface(nullptr);
receive_channel->SetInterface(nullptr);
}
TEST_F(WebRtcVideoEngineTest, ReceiveBufferSizeViaFieldTrial) {
webrtc::test::ScopedKeyValueConfig override_field_trials(
field_trials_, "WebRTC-ReceiveBufferSize/size_bytes:10000/");
std::unique_ptr<VideoMediaReceiveChannelInterface> receive_channel =
engine_.CreateReceiveChannel(call_.get(), GetMediaConfig(),
VideoOptions(), webrtc::CryptoOptions());
cricket::FakeNetworkInterface network;
receive_channel->SetInterface(&network);
EXPECT_EQ(10000, network.recvbuf_size());
receive_channel->SetInterface(nullptr);
}
TEST_F(WebRtcVideoEngineTest, TooLowReceiveBufferSizeViaFieldTrial) {
// 10000001 is too high, it will revert to the default
// kVideoRtpRecvBufferSize.
webrtc::test::ScopedKeyValueConfig override_field_trials(
field_trials_, "WebRTC-ReceiveBufferSize/size_bytes:10000001/");
std::unique_ptr<VideoMediaReceiveChannelInterface> receive_channel =
engine_.CreateReceiveChannel(call_.get(), GetMediaConfig(),
VideoOptions(), webrtc::CryptoOptions());
cricket::FakeNetworkInterface network;
receive_channel->SetInterface(&network);
EXPECT_EQ(kVideoRtpRecvBufferSize, network.recvbuf_size());
receive_channel->SetInterface(nullptr);
}
TEST_F(WebRtcVideoEngineTest, TooHighReceiveBufferSizeViaFieldTrial) {
// 9999 is too low, it will revert to the default kVideoRtpRecvBufferSize.
webrtc::test::ScopedKeyValueConfig override_field_trials(
field_trials_, "WebRTC-ReceiveBufferSize/size_bytes:9999/");
std::unique_ptr<VideoMediaReceiveChannelInterface> receive_channel =
engine_.CreateReceiveChannel(call_.get(), GetMediaConfig(),
VideoOptions(), webrtc::CryptoOptions());
cricket::FakeNetworkInterface network;
receive_channel->SetInterface(&network);
EXPECT_EQ(kVideoRtpRecvBufferSize, network.recvbuf_size());
receive_channel->SetInterface(nullptr);
}
TEST_F(WebRtcVideoEngineTest, UpdatesUnsignaledRtxSsrcAndRecoversPayload) {
// Setup a channel with VP8, RTX and transport sequence number header
// extension. Receive stream is not explicitly configured.
AddSupportedVideoCodecType("VP8");
std::vector<VideoCodec> supported_codecs =
engine_.recv_codecs(/*include_rtx=*/true);
ASSERT_EQ(supported_codecs[1].name, "rtx");
int rtx_payload_type = supported_codecs[1].id;
std::unique_ptr<VideoMediaReceiveChannelInterface> receive_channel =
engine_.CreateReceiveChannel(call_.get(), GetMediaConfig(),
VideoOptions(), webrtc::CryptoOptions());
cricket::VideoReceiverParameters parameters;
parameters.codecs = supported_codecs;
ASSERT_TRUE(receive_channel->SetReceiverParameters(parameters));
receive_channel->SetReceive(true);
// Receive a normal payload packet. It is not a complete frame since the
// marker bit is not set.
RtpPacketReceived packet_1 =
BuildVp8KeyFrame(/*ssrc*/ 123, supported_codecs[0].id);
packet_1.SetMarker(false);
receive_channel->OnPacketReceived(packet_1);
time_controller_.AdvanceTime(webrtc::TimeDelta::Millis(100));
// No complete frame received. No decoder created yet.
EXPECT_THAT(decoder_factory_->decoders(), IsEmpty());
RtpPacketReceived packet_2;
packet_2.SetSsrc(123);
packet_2.SetPayloadType(supported_codecs[0].id);
packet_2.SetSequenceNumber(packet_1.SequenceNumber() + 1);
memset(packet_2.AllocatePayload(500), 0, 1);
packet_2.SetMarker(true); // Frame is complete.
RtpPacketReceived rtx_packet =
BuildRtxPacket(345, rtx_payload_type, packet_2);
receive_channel->OnPacketReceived(rtx_packet);
time_controller_.AdvanceTime(webrtc::TimeDelta::Millis(0));
ASSERT_THAT(decoder_factory_->decoders(), Not(IsEmpty()));
EXPECT_EQ(decoder_factory_->decoders()[0]->GetNumFramesReceived(), 1);
}
TEST_F(WebRtcVideoEngineTest, UsesSimulcastAdapterForVp8Factories) {
AddSupportedVideoCodecType("VP8");
auto send_channel = SetSendParamsWithAllSupportedCodecs();
std::vector<uint32_t> ssrcs = MAKE_VECTOR(kSsrcs3);
EXPECT_TRUE(
send_channel->AddSendStream(CreateSimStreamParams("cname", ssrcs)));
EXPECT_TRUE(send_channel->SetSend(true));
webrtc::test::FrameForwarder frame_forwarder;
cricket::FakeFrameSource frame_source(1280, 720,
rtc::kNumMicrosecsPerSec / 60);
EXPECT_TRUE(
send_channel->SetVideoSend(ssrcs.front(), nullptr, &frame_forwarder));
frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame());
time_controller_.AdvanceTime(webrtc::TimeDelta::Zero());
ASSERT_TRUE(encoder_factory_->WaitForCreatedVideoEncoders(2));
// Verify that encoders are configured for simulcast through adapter
// (increasing resolution and only configured to send one stream each).
int prev_width = -1;
for (size_t i = 0; i < encoder_factory_->encoders().size(); ++i) {
ASSERT_TRUE(encoder_factory_->encoders()[i]->WaitForInitEncode());
webrtc::VideoCodec codec_settings =
encoder_factory_->encoders()[i]->GetCodecSettings();
EXPECT_EQ(0, codec_settings.numberOfSimulcastStreams);
EXPECT_GT(codec_settings.width, prev_width);
prev_width = codec_settings.width;
}
EXPECT_TRUE(send_channel->SetVideoSend(ssrcs.front(), nullptr, nullptr));
send_channel.reset();
ASSERT_EQ(0u, encoder_factory_->encoders().size());
}
TEST_F(WebRtcVideoEngineTest, ChannelWithH264CanChangeToVp8) {
AddSupportedVideoCodecType("VP8");
AddSupportedVideoCodecType("H264");
// Frame source.
webrtc::test::FrameForwarder frame_forwarder;
cricket::FakeFrameSource frame_source(1280, 720,
rtc::kNumMicrosecsPerSec / 30);
std::unique_ptr<VideoMediaSendChannelInterface> send_channel =
engine_.CreateSendChannel(call_.get(), GetMediaConfig(), VideoOptions(),
webrtc::CryptoOptions(),
video_bitrate_allocator_factory_.get());
cricket::VideoSenderParameters parameters;
parameters.codecs.push_back(GetEngineCodec("H264"));
EXPECT_TRUE(send_channel->SetSenderParameters(parameters));
EXPECT_TRUE(
send_channel->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc)));
EXPECT_TRUE(send_channel->SetVideoSend(kSsrc, nullptr, &frame_forwarder));
// Sending one frame will have allocate the encoder.
frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame());
time_controller_.AdvanceTime(webrtc::TimeDelta::Zero());
ASSERT_EQ(1u, encoder_factory_->encoders().size());
cricket::VideoSenderParameters new_parameters;
new_parameters.codecs.push_back(GetEngineCodec("VP8"));
EXPECT_TRUE(send_channel->SetSenderParameters(new_parameters));
// Sending one frame will switch encoder.
frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame());
time_controller_.AdvanceTime(webrtc::TimeDelta::Zero());
EXPECT_EQ(1u, encoder_factory_->encoders().size());
}
TEST_F(WebRtcVideoEngineTest,
UsesSimulcastAdapterForVp8WithCombinedVP8AndH264Factory) {
AddSupportedVideoCodecType("VP8");
AddSupportedVideoCodecType("H264");
std::unique_ptr<VideoMediaSendChannelInterface> send_channel =
engine_.CreateSendChannel(call_.get(), GetMediaConfig(), VideoOptions(),
webrtc::CryptoOptions(),
video_bitrate_allocator_factory_.get());
cricket::VideoSenderParameters parameters;
parameters.codecs.push_back(GetEngineCodec("VP8"));
EXPECT_TRUE(send_channel->SetSenderParameters(parameters));
std::vector<uint32_t> ssrcs = MAKE_VECTOR(kSsrcs3);
EXPECT_TRUE(
send_channel->AddSendStream(CreateSimStreamParams("cname", ssrcs)));
EXPECT_TRUE(send_channel->SetSend(true));
// Send a fake frame, or else the media engine will configure the simulcast
// encoder adapter at a low-enough size that it'll only create a single
// encoder layer.
webrtc::test::FrameForwarder frame_forwarder;
cricket::FakeFrameSource frame_source(1280, 720,
rtc::kNumMicrosecsPerSec / 30);
EXPECT_TRUE(
send_channel->SetVideoSend(ssrcs.front(), nullptr, &frame_forwarder));
frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame());
time_controller_.AdvanceTime(webrtc::TimeDelta::Zero());
ASSERT_TRUE(encoder_factory_->WaitForCreatedVideoEncoders(2));
ASSERT_TRUE(encoder_factory_->encoders()[0]->WaitForInitEncode());
EXPECT_EQ(webrtc::kVideoCodecVP8,
encoder_factory_->encoders()[0]->GetCodecSettings().codecType);
send_channel.reset();
// Make sure DestroyVideoEncoder was called on the factory.
EXPECT_EQ(0u, encoder_factory_->encoders().size());
}
TEST_F(WebRtcVideoEngineTest,
DestroysNonSimulcastEncoderFromCombinedVP8AndH264Factory) {
AddSupportedVideoCodecType("VP8");
AddSupportedVideoCodecType("H264");
std::unique_ptr<VideoMediaSendChannelInterface> send_channel =
engine_.CreateSendChannel(call_.get(), GetMediaConfig(), VideoOptions(),
webrtc::CryptoOptions(),
video_bitrate_allocator_factory_.get());
cricket::VideoSenderParameters parameters;
parameters.codecs.push_back(GetEngineCodec("H264"));
EXPECT_TRUE(send_channel->SetSenderParameters(parameters));
EXPECT_TRUE(
send_channel->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc)));
// Send a frame of 720p. This should trigger a "real" encoder initialization.
webrtc::test::FrameForwarder frame_forwarder;
cricket::FakeFrameSource frame_source(1280, 720,
rtc::kNumMicrosecsPerSec / 30);
EXPECT_TRUE(send_channel->SetVideoSend(kSsrc, nullptr, &frame_forwarder));
frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame());
time_controller_.AdvanceTime(webrtc::TimeDelta::Zero());
ASSERT_TRUE(encoder_factory_->WaitForCreatedVideoEncoders(1));
ASSERT_EQ(1u, encoder_factory_->encoders().size());
ASSERT_TRUE(encoder_factory_->encoders()[0]->WaitForInitEncode());
EXPECT_EQ(webrtc::kVideoCodecH264,
encoder_factory_->encoders()[0]->GetCodecSettings().codecType);
send_channel.reset();
// Make sure DestroyVideoEncoder was called on the factory.
ASSERT_EQ(0u, encoder_factory_->encoders().size());
}
TEST_F(WebRtcVideoEngineTest, SimulcastEnabledForH264) {
AddSupportedVideoCodecType("H264");
std::unique_ptr<VideoMediaSendChannelInterface> send_channel =
engine_.CreateSendChannel(call_.get(), GetMediaConfig(), VideoOptions(),
webrtc::CryptoOptions(),
video_bitrate_allocator_factory_.get());
cricket::VideoSenderParameters parameters;
parameters.codecs.push_back(GetEngineCodec("H264"));
EXPECT_TRUE(send_channel->SetSenderParameters(parameters));
const std::vector<uint32_t> ssrcs = MAKE_VECTOR(kSsrcs3);
EXPECT_TRUE(send_channel->AddSendStream(
cricket::CreateSimStreamParams("cname", ssrcs)));
// Send a frame of 720p. This should trigger a "real" encoder initialization.
webrtc::test::FrameForwarder frame_forwarder;
cricket::FakeFrameSource frame_source(1280, 720,
rtc::kNumMicrosecsPerSec / 30);
EXPECT_TRUE(send_channel->SetVideoSend(ssrcs[0], nullptr, &frame_forwarder));
frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame());
time_controller_.AdvanceTime(webrtc::TimeDelta::Zero());
ASSERT_TRUE(encoder_factory_->WaitForCreatedVideoEncoders(1));
ASSERT_EQ(1u, encoder_factory_->encoders().size());
FakeWebRtcVideoEncoder* encoder = encoder_factory_->encoders()[0];
ASSERT_TRUE(encoder_factory_->encoders()[0]->WaitForInitEncode());
EXPECT_EQ(webrtc::kVideoCodecH264, encoder->GetCodecSettings().codecType);
EXPECT_LT(1u, encoder->GetCodecSettings().numberOfSimulcastStreams);
EXPECT_TRUE(send_channel->SetVideoSend(ssrcs[0], nullptr, nullptr));
}
// Test that FlexFEC is not supported as a send video codec by default.
// Only enabling field trial should allow advertising FlexFEC send codec.
TEST_F(WebRtcVideoEngineTest, Flexfec03SendCodecEnablesWithFieldTrial) {
encoder_factory_->AddSupportedVideoCodecType("VP8");
auto flexfec = Field("name", &VideoCodec::name, "flexfec-03");
EXPECT_THAT(engine_.send_codecs(), Not(Contains(flexfec)));
webrtc::test::ScopedKeyValueConfig override_field_trials(
field_trials_, "WebRTC-FlexFEC-03-Advertised/Enabled/");
EXPECT_THAT(engine_.send_codecs(), Contains(flexfec));
}
// Test that the FlexFEC "codec" gets assigned to the lower payload type range
TEST_F(WebRtcVideoEngineTest, Flexfec03LowerPayloadTypeRange) {
encoder_factory_->AddSupportedVideoCodecType("VP8");
auto flexfec = Field("name", &VideoCodec::name, "flexfec-03");
// FlexFEC is active with field trial.
webrtc::test::ScopedKeyValueConfig override_field_trials(
field_trials_, "WebRTC-FlexFEC-03-Advertised/Enabled/");
auto send_codecs = engine_.send_codecs();
auto it = std::find_if(send_codecs.begin(), send_codecs.end(),
[](const cricket::VideoCodec& codec) {
return codec.name == "flexfec-03";
});
ASSERT_NE(it, send_codecs.end());
EXPECT_LE(35, it->id);
EXPECT_GE(65, it->id);
}
// Test that codecs are added in the order they are reported from the factory.
TEST_F(WebRtcVideoEngineTest, ReportSupportedCodecs) {
encoder_factory_->AddSupportedVideoCodecType("VP8");
const char* kFakeCodecName = "FakeCodec";
encoder_factory_->AddSupportedVideoCodecType(kFakeCodecName);
// The last reported codec should appear after the first codec in the vector.
const size_t vp8_index = GetEngineCodecIndex("VP8");
const size_t fake_codec_index = GetEngineCodecIndex(kFakeCodecName);
EXPECT_LT(vp8_index, fake_codec_index);
}
// Test that a codec that was added after the engine was initialized
// does show up in the codec list after it was added.
TEST_F(WebRtcVideoEngineTest, ReportSupportedAddedCodec) {
const char* kFakeExternalCodecName1 = "FakeExternalCodec1";
const char* kFakeExternalCodecName2 = "FakeExternalCodec2";
// Set up external encoder factory with first codec, and initialize engine.
encoder_factory_->AddSupportedVideoCodecType(kFakeExternalCodecName1);
std::vector<cricket::VideoCodec> codecs_before(engine_.send_codecs());
// Add second codec.
encoder_factory_->AddSupportedVideoCodecType(kFakeExternalCodecName2);
std::vector<cricket::VideoCodec> codecs_after(engine_.send_codecs());
// The codec itself and RTX should have been added.
EXPECT_EQ(codecs_before.size() + 2, codecs_after.size());
// Check that both fake codecs are present and that the second fake codec
// appears after the first fake codec.
const size_t fake_codec_index1 = GetEngineCodecIndex(kFakeExternalCodecName1);
const size_t fake_codec_index2 = GetEngineCodecIndex(kFakeExternalCodecName2);
EXPECT_LT(fake_codec_index1, fake_codec_index2);
}
TEST_F(WebRtcVideoEngineTest, ReportRtxForExternalCodec) {
const char* kFakeCodecName = "FakeCodec";
encoder_factory_->AddSupportedVideoCodecType(kFakeCodecName);
const size_t fake_codec_index = GetEngineCodecIndex(kFakeCodecName);
EXPECT_EQ("rtx", engine_.send_codecs().at(fake_codec_index + 1).name);
}
TEST_F(WebRtcVideoEngineTest, RegisterDecodersIfSupported) {
AddSupportedVideoCodecType("VP8");
cricket::VideoReceiverParameters parameters;
parameters.codecs.push_back(GetEngineCodec("VP8"));
auto receive_channel = SetRecvParamsWithSupportedCodecs(parameters.codecs);
EXPECT_TRUE(receive_channel->AddRecvStream(
cricket::StreamParams::CreateLegacy(kSsrc)));
// Decoders are not created until they are used.
time_controller_.AdvanceTime(webrtc::TimeDelta::Zero());
EXPECT_EQ(0u, decoder_factory_->decoders().size());
// Setting codecs of the same type should not reallocate the decoder.
EXPECT_TRUE(receive_channel->SetReceiverParameters(parameters));
EXPECT_EQ(0, decoder_factory_->GetNumCreatedDecoders());
// Remove stream previously added to free the external decoder instance.
EXPECT_TRUE(receive_channel->RemoveRecvStream(kSsrc));
EXPECT_EQ(0u, decoder_factory_->decoders().size());
}
// Verifies that we can set up decoders.
TEST_F(WebRtcVideoEngineTest, RegisterH264DecoderIfSupported) {
// TODO(pbos): Do not assume that encoder/decoder support is symmetric. We
// can't even query the WebRtcVideoDecoderFactory for supported codecs.
// For now we add a FakeWebRtcVideoEncoderFactory to add H264 to supported
// codecs.
AddSupportedVideoCodecType("H264");
std::vector<cricket::VideoCodec> codecs;
codecs.push_back(GetEngineCodec("H264"));
auto receive_channel = SetRecvParamsWithSupportedCodecs(codecs);
EXPECT_TRUE(receive_channel->AddRecvStream(
cricket::StreamParams::CreateLegacy(kSsrc)));
// Decoders are not created until they are used.
time_controller_.AdvanceTime(webrtc::TimeDelta::Zero());
ASSERT_EQ(0u, decoder_factory_->decoders().size());
}
// Tests when GetSources is called with non-existing ssrc, it will return an
// empty list of RtpSource without crashing.
TEST_F(WebRtcVideoEngineTest, GetSourcesWithNonExistingSsrc) {
// Setup an recv stream with `kSsrc`.
AddSupportedVideoCodecType("VP8");
cricket::VideoReceiverParameters parameters;
parameters.codecs.push_back(GetEngineCodec("VP8"));
auto receive_channel = SetRecvParamsWithSupportedCodecs(parameters.codecs);
EXPECT_TRUE(receive_channel->AddRecvStream(
cricket::StreamParams::CreateLegacy(kSsrc)));
// Call GetSources with |kSsrc + 1| which doesn't exist.
std::vector<webrtc::RtpSource> sources =
receive_channel->GetSources(kSsrc + 1);
EXPECT_EQ(0u, sources.size());
}
TEST(WebRtcVideoEngineNewVideoCodecFactoryTest, NullFactories) {
std::unique_ptr<webrtc::VideoEncoderFactory> encoder_factory;
std::unique_ptr<webrtc::VideoDecoderFactory> decoder_factory;
webrtc::FieldTrialBasedConfig trials;
WebRtcVideoEngine engine(std::move(encoder_factory),
std::move(decoder_factory), trials);
EXPECT_EQ(0u, engine.send_codecs().size());
EXPECT_EQ(0u, engine.recv_codecs().size());
}
TEST(WebRtcVideoEngineNewVideoCodecFactoryTest, EmptyFactories) {
// `engine` take ownership of the factories.
webrtc::MockVideoEncoderFactory* encoder_factory =
new webrtc::MockVideoEncoderFactory();
webrtc::MockVideoDecoderFactory* decoder_factory =
new webrtc::MockVideoDecoderFactory();
webrtc::FieldTrialBasedConfig trials;
WebRtcVideoEngine engine(
(std::unique_ptr<webrtc::VideoEncoderFactory>(encoder_factory)),
(std::unique_ptr<webrtc::VideoDecoderFactory>(decoder_factory)), trials);
// TODO(kron): Change to Times(1) once send and receive codecs are changed
// to be treated independently.
EXPECT_CALL(*encoder_factory, GetSupportedFormats()).Times(1);
EXPECT_EQ(0u, engine.send_codecs().size());
EXPECT_EQ(0u, engine.recv_codecs().size());
EXPECT_CALL(*encoder_factory, Die());
EXPECT_CALL(*decoder_factory, Die());
}
// Test full behavior in the video engine when video codec factories of the new
// type are injected supporting the single codec Vp8. Check the returned codecs
// from the engine and that we will create a Vp8 encoder and decoder using the
// new factories.
TEST(WebRtcVideoEngineNewVideoCodecFactoryTest, Vp8) {
// `engine` take ownership of the factories.
webrtc::MockVideoEncoderFactory* encoder_factory =
new webrtc::MockVideoEncoderFactory();
webrtc::MockVideoDecoderFactory* decoder_factory =
new webrtc::MockVideoDecoderFactory();
std::unique_ptr<webrtc::MockVideoBitrateAllocatorFactory>
rate_allocator_factory =
std::make_unique<webrtc::MockVideoBitrateAllocatorFactory>();
EXPECT_CALL(*rate_allocator_factory,
CreateVideoBitrateAllocator(Field(&webrtc::VideoCodec::codecType,
webrtc::kVideoCodecVP8)))
.WillOnce(
[] { return std::make_unique<webrtc::MockVideoBitrateAllocator>(); });
webrtc::FieldTrialBasedConfig trials;
WebRtcVideoEngine engine(
(std::unique_ptr<webrtc::VideoEncoderFactory>(encoder_factory)),
(std::unique_ptr<webrtc::VideoDecoderFactory>(decoder_factory)), trials);
const webrtc::SdpVideoFormat vp8_format("VP8");
const std::vector<webrtc::SdpVideoFormat> supported_formats = {vp8_format};
EXPECT_CALL(*encoder_factory, GetSupportedFormats())
.WillRepeatedly(Return(supported_formats));
EXPECT_CALL(*decoder_factory, GetSupportedFormats())
.WillRepeatedly(Return(supported_formats));
// Verify the codecs from the engine.
const std::vector<VideoCodec> engine_codecs = engine.send_codecs();
// Verify default codecs has been added correctly.
EXPECT_EQ(5u, engine_codecs.size());
EXPECT_EQ("VP8", engine_codecs.at(0).name);
// RTX codec for VP8.
EXPECT_EQ("rtx", engine_codecs.at(1).name);
int vp8_associated_payload;
EXPECT_TRUE(engine_codecs.at(1).GetParam(kCodecParamAssociatedPayloadType,
&vp8_associated_payload));
EXPECT_EQ(vp8_associated_payload, engine_codecs.at(0).id);
EXPECT_EQ(kRedCodecName, engine_codecs.at(2).name);
// RTX codec for RED.
EXPECT_EQ("rtx", engine_codecs.at(3).name);
int red_associated_payload;
EXPECT_TRUE(engine_codecs.at(3).GetParam(kCodecParamAssociatedPayloadType,
&red_associated_payload));
EXPECT_EQ(red_associated_payload, engine_codecs.at(2).id);
EXPECT_EQ(kUlpfecCodecName, engine_codecs.at(4).name);
int associated_payload_type;
EXPECT_TRUE(engine_codecs.at(1).GetParam(
cricket::kCodecParamAssociatedPayloadType, &associated_payload_type));
EXPECT_EQ(engine_codecs.at(0).id, associated_payload_type);
// Verify default parameters has been added to the VP8 codec.
VerifyCodecHasDefaultFeedbackParams(engine_codecs.at(0),
/*lntf_expected=*/false);
// Mock encoder creation. `engine` take ownership of the encoder.
const webrtc::SdpVideoFormat format("VP8");
EXPECT_CALL(*encoder_factory, CreateVideoEncoder(format)).WillOnce([&] {
return std::make_unique<FakeWebRtcVideoEncoder>(nullptr);
});
// Expect no decoder to be created at this point. The decoder will only be
// created if we receive payload data.
EXPECT_CALL(*decoder_factory, CreateVideoDecoder(format)).Times(0);
// Create a call.
webrtc::GlobalSimulatedTimeController time_controller(
webrtc::Timestamp::Millis(4711));
CallConfig call_config(CreateEnvironment(
time_controller.CreateTaskQueueFactory(), time_controller.GetClock()));
const std::unique_ptr<Call> call = Call::Create(call_config);
// Create send channel.
const int send_ssrc = 123;
std::unique_ptr<VideoMediaSendChannelInterface> send_channel =
engine.CreateSendChannel(call.get(), GetMediaConfig(), VideoOptions(),
webrtc::CryptoOptions(),
rate_allocator_factory.get());
cricket::VideoSenderParameters send_parameters;
send_parameters.codecs.push_back(engine_codecs.at(0));
EXPECT_TRUE(send_channel->SetSenderParameters(send_parameters));
send_channel->OnReadyToSend(true);
EXPECT_TRUE(
send_channel->AddSendStream(StreamParams::CreateLegacy(send_ssrc)));
EXPECT_TRUE(send_channel->SetSend(true));
// Set capturer.
webrtc::test::FrameForwarder frame_forwarder;
cricket::FakeFrameSource frame_source(1280, 720,
rtc::kNumMicrosecsPerSec / 30);
EXPECT_TRUE(send_channel->SetVideoSend(send_ssrc, nullptr, &frame_forwarder));
// Sending one frame will allocate the encoder.
frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame());
time_controller.AdvanceTime(webrtc::TimeDelta::Zero());
// Create recv channel.
const int recv_ssrc = 321;
std::unique_ptr<VideoMediaReceiveChannelInterface> receive_channel =
engine.CreateReceiveChannel(call.get(), GetMediaConfig(), VideoOptions(),
webrtc::CryptoOptions());
cricket::VideoReceiverParameters recv_parameters;
recv_parameters.codecs.push_back(engine_codecs.at(0));
EXPECT_TRUE(receive_channel->SetReceiverParameters(recv_parameters));
EXPECT_TRUE(receive_channel->AddRecvStream(
cricket::StreamParams::CreateLegacy(recv_ssrc)));
// Remove streams previously added to free the encoder and decoder instance.
EXPECT_CALL(*encoder_factory, Die());
EXPECT_CALL(*decoder_factory, Die());
EXPECT_CALL(*rate_allocator_factory, Die());
EXPECT_TRUE(send_channel->RemoveSendStream(send_ssrc));
EXPECT_TRUE(receive_channel->RemoveRecvStream(recv_ssrc));
}
TEST_F(WebRtcVideoEngineTest, DISABLED_RecreatesEncoderOnContentTypeChange) {
encoder_factory_->AddSupportedVideoCodecType("VP8");
std::unique_ptr<FakeCall> fake_call(new FakeCall());
auto send_channel = SetSendParamsWithAllSupportedCodecs();
ASSERT_TRUE(
send_channel->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc)));
cricket::VideoCodec codec = GetEngineCodec("VP8");
cricket::VideoSenderParameters parameters;
parameters.codecs.push_back(codec);
send_channel->OnReadyToSend(true);
send_channel->SetSend(true);
ASSERT_TRUE(send_channel->SetSenderParameters(parameters));
webrtc::test::FrameForwarder frame_forwarder;
cricket::FakeFrameSource frame_source(1280, 720,
rtc::kNumMicrosecsPerSec / 30);
VideoOptions options;
EXPECT_TRUE(send_channel->SetVideoSend(kSsrc, &options, &frame_forwarder));
frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame());
ASSERT_TRUE(encoder_factory_->WaitForCreatedVideoEncoders(1));
EXPECT_EQ(webrtc::VideoCodecMode::kRealtimeVideo,
encoder_factory_->encoders().back()->GetCodecSettings().mode);
EXPECT_TRUE(send_channel->SetVideoSend(kSsrc, &options, &frame_forwarder));
frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame());
// No change in content type, keep current encoder.
EXPECT_EQ(1, encoder_factory_->GetNumCreatedEncoders());
options.is_screencast.emplace(true);
EXPECT_TRUE(send_channel->SetVideoSend(kSsrc, &options, &frame_forwarder));
frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame());
// Change to screen content, recreate encoder. For the simulcast encoder
// adapter case, this will result in two calls since InitEncode triggers a
// a new instance.
ASSERT_TRUE(encoder_factory_->WaitForCreatedVideoEncoders(2));
EXPECT_EQ(webrtc::VideoCodecMode::kScreensharing,
encoder_factory_->encoders().back()->GetCodecSettings().mode);
EXPECT_TRUE(send_channel->SetVideoSend(kSsrc, &options, &frame_forwarder));
frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame());
// Still screen content, no need to update encoder.
EXPECT_EQ(2, encoder_factory_->GetNumCreatedEncoders());
options.is_screencast.emplace(false);
options.video_noise_reduction.emplace(false);
EXPECT_TRUE(send_channel->SetVideoSend(kSsrc, &options, &frame_forwarder));
// Change back to regular video content, update encoder. Also change
// a non `is_screencast` option just to verify it doesn't affect recreation.
frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame());
ASSERT_TRUE(encoder_factory_->WaitForCreatedVideoEncoders(3));
EXPECT_EQ(webrtc::VideoCodecMode::kRealtimeVideo,
encoder_factory_->encoders().back()->GetCodecSettings().mode);
// Remove stream previously added to free the external encoder instance.
EXPECT_TRUE(send_channel->RemoveSendStream(kSsrc));
EXPECT_EQ(0u, encoder_factory_->encoders().size());
}
TEST_F(WebRtcVideoEngineTest, SetVideoRtxEnabled) {
AddSupportedVideoCodecType("VP8");
std::vector<VideoCodec> send_codecs;
std::vector<VideoCodec> recv_codecs;
webrtc::test::ScopedKeyValueConfig field_trials;
// Don't want RTX
send_codecs = engine_.send_codecs(false);
EXPECT_FALSE(HasAnyRtxCodec(send_codecs));
recv_codecs = engine_.recv_codecs(false);
EXPECT_FALSE(HasAnyRtxCodec(recv_codecs));
// Want RTX
send_codecs = engine_.send_codecs(true);
EXPECT_TRUE(HasAnyRtxCodec(send_codecs));
recv_codecs = engine_.recv_codecs(true);
EXPECT_TRUE(HasAnyRtxCodec(recv_codecs));
}
class WebRtcVideoChannelEncodedFrameCallbackTest : public ::testing::Test {
protected:
WebRtcVideoChannelEncodedFrameCallbackTest()
: env_(CreateEnvironment(&field_trials_,
time_controller_.CreateTaskQueueFactory(),
time_controller_.GetClock())),
call_(Call::Create(CallConfig(env_))),
video_bitrate_allocator_factory_(
webrtc::CreateBuiltinVideoBitrateAllocatorFactory()),
engine_(
std::make_unique<webrtc::VideoEncoderFactoryTemplate<
webrtc::LibvpxVp8EncoderTemplateAdapter,
webrtc::LibvpxVp9EncoderTemplateAdapter,
webrtc::OpenH264EncoderTemplateAdapter,
webrtc::LibaomAv1EncoderTemplateAdapter>>(),
std::make_unique<webrtc::test::FunctionVideoDecoderFactory>(
[]() { return std::make_unique<webrtc::test::FakeDecoder>(); },
kSdpVideoFormats),
field_trials_) {
send_channel_ = engine_.CreateSendChannel(
call_.get(), cricket::MediaConfig(), cricket::VideoOptions(),
webrtc::CryptoOptions(), video_bitrate_allocator_factory_.get());
receive_channel_ = engine_.CreateReceiveChannel(
call_.get(), cricket::MediaConfig(), cricket::VideoOptions(),
webrtc::CryptoOptions());
network_interface_.SetDestination(receive_channel_.get());
send_channel_->SetInterface(&network_interface_);
receive_channel_->SetInterface(&network_interface_);
cricket::VideoReceiverParameters parameters;
parameters.codecs = engine_.recv_codecs();
receive_channel_->SetReceiverParameters(parameters);
receive_channel_->SetReceive(true);
}
~WebRtcVideoChannelEncodedFrameCallbackTest() override {
send_channel_->SetInterface(nullptr);
receive_channel_->SetInterface(nullptr);
send_channel_.reset();
receive_channel_.reset();
}
void DeliverKeyFrame(uint32_t ssrc) {
receive_channel_->OnPacketReceived(BuildVp8KeyFrame(ssrc, 96));
}
void DeliverKeyFrameAndWait(uint32_t ssrc) {
DeliverKeyFrame(ssrc);
time_controller_.AdvanceTime(kFrameDuration);
EXPECT_EQ(renderer_.num_rendered_frames(), 1);
}
static const std::vector<webrtc::SdpVideoFormat> kSdpVideoFormats;
webrtc::GlobalSimulatedTimeController time_controller_{
Timestamp::Seconds(1000)};
webrtc::test::ScopedKeyValueConfig field_trials_;
Environment env_;
std::unique_ptr<Call> call_;
std::unique_ptr<webrtc::VideoBitrateAllocatorFactory>
video_bitrate_allocator_factory_;
WebRtcVideoEngine engine_;
std::unique_ptr<VideoMediaSendChannelInterface> send_channel_;
std::unique_ptr<VideoMediaReceiveChannelInterface> receive_channel_;
cricket::FakeNetworkInterface network_interface_;
cricket::FakeVideoRenderer renderer_;
};
const std::vector<webrtc::SdpVideoFormat>
WebRtcVideoChannelEncodedFrameCallbackTest::kSdpVideoFormats = {
webrtc::SdpVideoFormat("VP8")};
TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest,
SetEncodedFrameBufferFunction_DefaultStream) {
testing::MockFunction<void(const webrtc::RecordableEncodedFrame&)> callback;
EXPECT_CALL(callback, Call);
EXPECT_TRUE(receive_channel_->AddDefaultRecvStreamForTesting(
cricket::StreamParams::CreateLegacy(kSsrc)));
receive_channel_->SetRecordableEncodedFrameCallback(/*ssrc=*/0,
callback.AsStdFunction());
EXPECT_TRUE(receive_channel_->SetSink(kSsrc, &renderer_));
DeliverKeyFrame(kSsrc);
time_controller_.AdvanceTime(kFrameDuration);
EXPECT_EQ(renderer_.num_rendered_frames(), 1);
receive_channel_->RemoveRecvStream(kSsrc);
}
TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest,
SetEncodedFrameBufferFunction_MatchSsrcWithDefaultStream) {
testing::MockFunction<void(const webrtc::RecordableEncodedFrame&)> callback;
EXPECT_CALL(callback, Call);
EXPECT_TRUE(receive_channel_->AddDefaultRecvStreamForTesting(
cricket::StreamParams::CreateLegacy(kSsrc)));
EXPECT_TRUE(receive_channel_->SetSink(kSsrc, &renderer_));
receive_channel_->SetRecordableEncodedFrameCallback(kSsrc,
callback.AsStdFunction());
DeliverKeyFrame(kSsrc);
time_controller_.AdvanceTime(kFrameDuration);
EXPECT_EQ(renderer_.num_rendered_frames(), 1);
receive_channel_->RemoveRecvStream(kSsrc);
}
TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest,
SetEncodedFrameBufferFunction_MatchSsrc) {
testing::MockFunction<void(const webrtc::RecordableEncodedFrame&)> callback;
EXPECT_CALL(callback, Call);
EXPECT_TRUE(receive_channel_->AddRecvStream(
cricket::StreamParams::CreateLegacy(kSsrc)));
EXPECT_TRUE(receive_channel_->SetSink(kSsrc, &renderer_));
receive_channel_->SetRecordableEncodedFrameCallback(kSsrc,
callback.AsStdFunction());
DeliverKeyFrame(kSsrc);
time_controller_.AdvanceTime(kFrameDuration);
EXPECT_EQ(renderer_.num_rendered_frames(), 1);
receive_channel_->RemoveRecvStream(kSsrc);
}
TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest,
SetEncodedFrameBufferFunction_MismatchSsrc) {
testing::StrictMock<
testing::MockFunction<void(const webrtc::RecordableEncodedFrame&)>>
callback;
EXPECT_TRUE(receive_channel_->AddRecvStream(
cricket::StreamParams::CreateLegacy(kSsrc + 1)));
EXPECT_TRUE(receive_channel_->SetSink(kSsrc + 1, &renderer_));
receive_channel_->SetRecordableEncodedFrameCallback(kSsrc,
callback.AsStdFunction());
DeliverKeyFrame(kSsrc); // Expected to not cause function to fire.
DeliverKeyFrameAndWait(kSsrc + 1);
receive_channel_->RemoveRecvStream(kSsrc + 1);
}
TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest,
SetEncodedFrameBufferFunction_MismatchSsrcWithDefaultStream) {
testing::StrictMock<
testing::MockFunction<void(const webrtc::RecordableEncodedFrame&)>>
callback;
EXPECT_TRUE(receive_channel_->AddDefaultRecvStreamForTesting(
cricket::StreamParams::CreateLegacy(kSsrc + 1)));
EXPECT_TRUE(receive_channel_->SetSink(kSsrc + 1, &renderer_));
receive_channel_->SetRecordableEncodedFrameCallback(kSsrc,
callback.AsStdFunction());
receive_channel_->SetDefaultSink(&renderer_);
DeliverKeyFrame(kSsrc); // Expected to not cause function to fire.
DeliverKeyFrameAndWait(kSsrc + 1);
receive_channel_->RemoveRecvStream(kSsrc + 1);
}
TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest, DoesNotDecodeWhenDisabled) {
testing::MockFunction<void(const webrtc::RecordableEncodedFrame&)> callback;
EXPECT_CALL(callback, Call);
EXPECT_TRUE(receive_channel_->AddDefaultRecvStreamForTesting(
cricket::StreamParams::CreateLegacy(kSsrc)));
receive_channel_->SetRecordableEncodedFrameCallback(/*ssrc=*/0,
callback.AsStdFunction());
EXPECT_TRUE(receive_channel_->SetSink(kSsrc, &renderer_));
receive_channel_->SetReceive(false);
DeliverKeyFrame(kSsrc);
time_controller_.AdvanceTime(kFrameDuration);
EXPECT_EQ(renderer_.num_rendered_frames(), 0);
receive_channel_->SetReceive(true);
DeliverKeyFrame(kSsrc);
time_controller_.AdvanceTime(kFrameDuration);
EXPECT_EQ(renderer_.num_rendered_frames(), 1);
receive_channel_->SetReceive(false);
DeliverKeyFrame(kSsrc);
time_controller_.AdvanceTime(kFrameDuration);
EXPECT_EQ(renderer_.num_rendered_frames(), 1);
receive_channel_->RemoveRecvStream(kSsrc);
}
class WebRtcVideoChannelBaseTest : public ::testing::Test {
protected:
WebRtcVideoChannelBaseTest()
: env_(CreateEnvironment(&field_trials_,
time_controller_.CreateTaskQueueFactory(),
time_controller_.GetClock())),
video_bitrate_allocator_factory_(
webrtc::CreateBuiltinVideoBitrateAllocatorFactory()),
engine_(std::make_unique<webrtc::VideoEncoderFactoryTemplate<
webrtc::LibvpxVp8EncoderTemplateAdapter,
webrtc::LibvpxVp9EncoderTemplateAdapter,
webrtc::OpenH264EncoderTemplateAdapter,
webrtc::LibaomAv1EncoderTemplateAdapter>>(),
std::make_unique<webrtc::VideoDecoderFactoryTemplate<
webrtc::LibvpxVp8DecoderTemplateAdapter,
webrtc::LibvpxVp9DecoderTemplateAdapter,
webrtc::OpenH264DecoderTemplateAdapter,
webrtc::Dav1dDecoderTemplateAdapter>>(),
field_trials_) {}
void SetUp() override {
// One testcase calls SetUp in a loop, only create call_ once.
if (!call_) {
call_ = Call::Create(CallConfig(env_));
}
cricket::MediaConfig media_config;
// Disabling cpu overuse detection actually disables quality scaling too; it
// implies DegradationPreference kMaintainResolution. Automatic scaling
// needs to be disabled, otherwise, tests which check the size of received
// frames become flaky.
media_config.video.enable_cpu_adaptation = false;
send_channel_ = engine_.CreateSendChannel(
call_.get(), media_config, cricket::VideoOptions(),
webrtc::CryptoOptions(), video_bitrate_allocator_factory_.get());
receive_channel_ = engine_.CreateReceiveChannel(call_.get(), media_config,
cricket::VideoOptions(),
webrtc::CryptoOptions());
send_channel_->OnReadyToSend(true);
receive_channel_->SetReceive(true);
network_interface_.SetDestination(receive_channel_.get());
send_channel_->SetInterface(&network_interface_);
receive_channel_->SetInterface(&network_interface_);
cricket::VideoReceiverParameters parameters;
parameters.codecs = engine_.send_codecs();
receive_channel_->SetReceiverParameters(parameters);
EXPECT_TRUE(send_channel_->AddSendStream(DefaultSendStreamParams()));
frame_forwarder_ = std::make_unique<webrtc::test::FrameForwarder>();
frame_source_ = std::make_unique<cricket::FakeFrameSource>(
640, 480, rtc::kNumMicrosecsPerSec / kFramerate);
EXPECT_TRUE(
send_channel_->SetVideoSend(kSsrc, nullptr, frame_forwarder_.get()));
}
// Returns pointer to implementation of the send channel.
WebRtcVideoSendChannel* SendImpl() {
// Note that this function requires intimate knowledge of how the channel
// was created.
return static_cast<cricket::WebRtcVideoSendChannel*>(send_channel_.get());
}
// Utility method to setup an additional stream to send and receive video.
// Used to test send and recv between two streams.
void SetUpSecondStream() {
SetUpSecondStreamWithNoRecv();
// Setup recv for second stream.
EXPECT_TRUE(receive_channel_->AddRecvStream(
cricket::StreamParams::CreateLegacy(kSsrc + 2)));
// Make the second renderer available for use by a new stream.
EXPECT_TRUE(receive_channel_->SetSink(kSsrc + 2, &renderer2_));
}
// Setup an additional stream just to send video. Defer add recv stream.
// This is required if you want to test unsignalled recv of video rtp packets.
void SetUpSecondStreamWithNoRecv() {
// SetUp() already added kSsrc make sure duplicate SSRCs cant be added.
EXPECT_TRUE(receive_channel_->AddRecvStream(
cricket::StreamParams::CreateLegacy(kSsrc)));
EXPECT_TRUE(receive_channel_->SetSink(kSsrc, &renderer_));
EXPECT_FALSE(send_channel_->AddSendStream(
cricket::StreamParams::CreateLegacy(kSsrc)));
EXPECT_TRUE(send_channel_->AddSendStream(
cricket::StreamParams::CreateLegacy(kSsrc + 2)));
// We dont add recv for the second stream.
// Setup the receive and renderer for second stream after send.
frame_forwarder_2_ = std::make_unique<webrtc::test::FrameForwarder>();
EXPECT_TRUE(send_channel_->SetVideoSend(kSsrc + 2, nullptr,
frame_forwarder_2_.get()));
}
void TearDown() override {
send_channel_->SetInterface(nullptr);
receive_channel_->SetInterface(nullptr);
send_channel_.reset();
receive_channel_.reset();
}
void ResetTest() {
TearDown();
SetUp();
}
bool SetDefaultCodec() { return SetOneCodec(DefaultCodec()); }
bool SetOneCodec(const cricket::VideoCodec& codec) {
frame_source_ = std::make_unique<cricket::FakeFrameSource>(
kVideoWidth, kVideoHeight, rtc::kNumMicrosecsPerSec / kFramerate);
bool sending = SendImpl()->sending();
bool success = SetSend(false);
if (success) {
cricket::VideoSenderParameters parameters;
parameters.codecs.push_back(codec);
success = send_channel_->SetSenderParameters(parameters);
}
if (success) {
success = SetSend(sending);
}
return success;
}
bool SetSend(bool send) { return send_channel_->SetSend(send); }
void SendFrame() {
if (frame_forwarder_2_) {
frame_forwarder_2_->IncomingCapturedFrame(frame_source_->GetFrame());
}
frame_forwarder_->IncomingCapturedFrame(frame_source_->GetFrame());
time_controller_.AdvanceTime(kFrameDuration);
}
bool WaitAndSendFrame(int wait_ms) {
time_controller_.AdvanceTime(TimeDelta::Millis(wait_ms));
SendFrame();
return true;
}
int NumRtpBytes() { return network_interface_.NumRtpBytes(); }
int NumRtpBytes(uint32_t ssrc) {
return network_interface_.NumRtpBytes(ssrc);
}
int NumRtpPackets() { return network_interface_.NumRtpPackets(); }
int NumRtpPackets(uint32_t ssrc) {
return network_interface_.NumRtpPackets(ssrc);
}
int NumSentSsrcs() { return network_interface_.NumSentSsrcs(); }
rtc::CopyOnWriteBuffer GetRtpPacket(int index) {
return network_interface_.GetRtpPacket(index);
}
static int GetPayloadType(rtc::CopyOnWriteBuffer p) {
RtpPacket header;
EXPECT_TRUE(header.Parse(std::move(p)));
return header.PayloadType();
}
// Tests that we can send and receive frames.
void SendAndReceive(const cricket::VideoCodec& codec) {
EXPECT_TRUE(SetOneCodec(codec));
EXPECT_TRUE(SetSend(true));
receive_channel_->SetDefaultSink(&renderer_);
EXPECT_EQ(0, renderer_.num_rendered_frames());
SendFrame();
EXPECT_FRAME(1, kVideoWidth, kVideoHeight);
EXPECT_EQ(codec.id, GetPayloadType(GetRtpPacket(0)));
}
void SendReceiveManyAndGetStats(const cricket::VideoCodec& codec,
int duration_sec,
int fps) {
EXPECT_TRUE(SetOneCodec(codec));
EXPECT_TRUE(SetSend(true));
receive_channel_->SetDefaultSink(&renderer_);
EXPECT_EQ(0, renderer_.num_rendered_frames());
for (int i = 0; i < duration_sec; ++i) {
for (int frame = 1; frame <= fps; ++frame) {
EXPECT_TRUE(WaitAndSendFrame(1000 / fps));
EXPECT_FRAME(frame + i * fps, kVideoWidth, kVideoHeight);
}
}
EXPECT_EQ(codec.id, GetPayloadType(GetRtpPacket(0)));
}
cricket::VideoSenderInfo GetSenderStats(size_t i) {
VideoMediaSendInfo send_info;
EXPECT_TRUE(send_channel_->GetStats(&send_info));
return send_info.senders[i];
}
cricket::VideoReceiverInfo GetReceiverStats(size_t i) {
cricket::VideoMediaReceiveInfo info;
EXPECT_TRUE(receive_channel_->GetStats(&info));
return info.receivers[i];
}
// Two streams one channel tests.
// Tests that we can send and receive frames.
void TwoStreamsSendAndReceive(const cricket::VideoCodec& codec) {
SetUpSecondStream();
// Test sending and receiving on first stream.
SendAndReceive(codec);
// Test sending and receiving on second stream.
EXPECT_EQ(renderer2_.num_rendered_frames(), 1);
EXPECT_GT(NumRtpPackets(), 0);
}
cricket::VideoCodec GetEngineCodec(const std::string& name) {
for (const cricket::VideoCodec& engine_codec : engine_.send_codecs()) {
if (absl::EqualsIgnoreCase(name, engine_codec.name))
return engine_codec;
}
// This point should never be reached.
ADD_FAILURE() << "Unrecognized codec name: " << name;
return cricket::CreateVideoCodec(0, "");
}
cricket::VideoCodec DefaultCodec() { return GetEngineCodec("VP8"); }
cricket::StreamParams DefaultSendStreamParams() {
return cricket::StreamParams::CreateLegacy(kSsrc);
}
webrtc::GlobalSimulatedTimeController time_controller_{
Timestamp::Seconds(1000)};
webrtc::test::ScopedKeyValueConfig field_trials_;
std::unique_ptr<webrtc::test::ScopedKeyValueConfig> override_field_trials_;
Environment env_;
std::unique_ptr<Call> call_;
std::unique_ptr<webrtc::VideoBitrateAllocatorFactory>
video_bitrate_allocator_factory_;
WebRtcVideoEngine engine_;
std::unique_ptr<cricket::FakeFrameSource> frame_source_;
std::unique_ptr<webrtc::test::FrameForwarder> frame_forwarder_;
std::unique_ptr<webrtc::test::FrameForwarder> frame_forwarder_2_;
std::unique_ptr<VideoMediaSendChannelInterface> send_channel_;
std::unique_ptr<VideoMediaReceiveChannelInterface> receive_channel_;
cricket::FakeNetworkInterface network_interface_;
cricket::FakeVideoRenderer renderer_;
// Used by test cases where 2 streams are run on the same channel.
cricket::FakeVideoRenderer renderer2_;
};
// Test that SetSend works.
TEST_F(WebRtcVideoChannelBaseTest, SetSend) {
EXPECT_FALSE(SendImpl()->sending());
EXPECT_TRUE(
send_channel_->SetVideoSend(kSsrc, nullptr, frame_forwarder_.get()));
EXPECT_TRUE(SetOneCodec(DefaultCodec()));
EXPECT_FALSE(SendImpl()->sending());
EXPECT_TRUE(SetSend(true));
EXPECT_TRUE(SendImpl()->sending());
SendFrame();
EXPECT_GT(NumRtpPackets(), 0);
EXPECT_TRUE(SetSend(false));
EXPECT_FALSE(SendImpl()->sending());
}
// Test that SetSend fails without codecs being set.
TEST_F(WebRtcVideoChannelBaseTest, SetSendWithoutCodecs) {
EXPECT_FALSE(SendImpl()->sending());
EXPECT_FALSE(SetSend(true));
EXPECT_FALSE(SendImpl()->sending());
}
// Test that we properly set the send and recv buffer sizes by the time
// SetSend is called.
TEST_F(WebRtcVideoChannelBaseTest, SetSendSetsTransportBufferSizes) {
EXPECT_TRUE(SetOneCodec(DefaultCodec()));
EXPECT_TRUE(SetSend(true));
EXPECT_EQ(kVideoRtpSendBufferSize, network_interface_.sendbuf_size());
EXPECT_EQ(kVideoRtpRecvBufferSize, network_interface_.recvbuf_size());
}
// Test that stats work properly for a 1-1 call.
TEST_F(WebRtcVideoChannelBaseTest, GetStats) {
const int kDurationSec = 3;
const int kFps = 10;
SendReceiveManyAndGetStats(DefaultCodec(), kDurationSec, kFps);
cricket::VideoMediaSendInfo send_info;
cricket::VideoMediaReceiveInfo receive_info;
EXPECT_TRUE(send_channel_->GetStats(&send_info));
EXPECT_TRUE(receive_channel_->GetStats(&receive_info));
ASSERT_EQ(1U, send_info.senders.size());
// TODO(whyuan): bytes_sent and bytes_received are different. Are both
// payload? For webrtc, bytes_sent does not include the RTP header length.
EXPECT_EQ(send_info.senders[0].payload_bytes_sent,
NumRtpBytes() - kRtpHeaderSize * NumRtpPackets());
EXPECT_EQ(NumRtpPackets(), send_info.senders[0].packets_sent);
EXPECT_EQ(0.0, send_info.senders[0].fraction_lost);
ASSERT_TRUE(send_info.senders[0].codec_payload_type);
EXPECT_EQ(DefaultCodec().id, *send_info.senders[0].codec_payload_type);
EXPECT_EQ(0, send_info.senders[0].firs_received);
EXPECT_EQ(0, send_info.senders[0].plis_received);
EXPECT_EQ(0u, send_info.senders[0].nacks_received);
EXPECT_EQ(kVideoWidth, send_info.senders[0].send_frame_width);
EXPECT_EQ(kVideoHeight, send_info.senders[0].send_frame_height);
EXPECT_GT(send_info.senders[0].framerate_input, 0);
EXPECT_GT(send_info.senders[0].framerate_sent, 0);
EXPECT_EQ(1U, send_info.send_codecs.count(DefaultCodec().id));
EXPECT_EQ(DefaultCodec().ToCodecParameters(),
send_info.send_codecs[DefaultCodec().id]);
ASSERT_EQ(1U, receive_info.receivers.size());
EXPECT_EQ(1U, send_info.senders[0].ssrcs().size());
EXPECT_EQ(1U, receive_info.receivers[0].ssrcs().size());
EXPECT_EQ(send_info.senders[0].ssrcs()[0],
receive_info.receivers[0].ssrcs()[0]);
ASSERT_TRUE(receive_info.receivers[0].codec_payload_type);
EXPECT_EQ(DefaultCodec().id, *receive_info.receivers[0].codec_payload_type);
EXPECT_EQ(NumRtpBytes() - kRtpHeaderSize * NumRtpPackets(),
receive_info.receivers[0].payload_bytes_received);
EXPECT_EQ(NumRtpPackets(), receive_info.receivers[0].packets_received);
EXPECT_EQ(0, receive_info.receivers[0].packets_lost);
// TODO(asapersson): Not set for webrtc. Handle missing stats.
// EXPECT_EQ(0, receive_info.receivers[0].packets_concealed);
EXPECT_EQ(0, receive_info.receivers[0].firs_sent);
EXPECT_EQ(0, receive_info.receivers[0].plis_sent);
EXPECT_EQ(0U, receive_info.receivers[0].nacks_sent);
EXPECT_EQ(kVideoWidth, receive_info.receivers[0].frame_width);
EXPECT_EQ(kVideoHeight, receive_info.receivers[0].frame_height);
EXPECT_GT(receive_info.receivers[0].framerate_received, 0);
EXPECT_GT(receive_info.receivers[0].framerate_decoded, 0);
EXPECT_GT(receive_info.receivers[0].framerate_output, 0);
EXPECT_GT(receive_info.receivers[0].jitter_buffer_delay_seconds, 0.0);
EXPECT_GT(receive_info.receivers[0].jitter_buffer_emitted_count, 0u);
EXPECT_EQ(1U, receive_info.receive_codecs.count(DefaultCodec().id));
EXPECT_EQ(DefaultCodec().ToCodecParameters(),
receive_info.receive_codecs[DefaultCodec().id]);
}
// Test that stats work properly for a conf call with multiple recv streams.
TEST_F(WebRtcVideoChannelBaseTest, GetStatsMultipleRecvStreams) {
cricket::FakeVideoRenderer renderer1, renderer2;
EXPECT_TRUE(SetOneCodec(DefaultCodec()));
cricket::VideoSenderParameters parameters;
parameters.codecs.push_back(DefaultCodec());
parameters.conference_mode = true;
EXPECT_TRUE(send_channel_->SetSenderParameters(parameters));
EXPECT_TRUE(SetSend(true));
EXPECT_TRUE(
receive_channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(1)));
EXPECT_TRUE(
receive_channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(2)));
EXPECT_TRUE(receive_channel_->SetSink(1, &renderer1));
EXPECT_TRUE(receive_channel_->SetSink(2, &renderer2));
EXPECT_EQ(0, renderer1.num_rendered_frames());
EXPECT_EQ(0, renderer2.num_rendered_frames());
std::vector<uint32_t> ssrcs;
ssrcs.push_back(1);
ssrcs.push_back(2);
network_interface_.SetConferenceMode(true, ssrcs);
SendFrame();
EXPECT_FRAME_ON_RENDERER(renderer1, 1, kVideoWidth, kVideoHeight);
EXPECT_FRAME_ON_RENDERER(renderer2, 1, kVideoWidth, kVideoHeight);
EXPECT_TRUE(send_channel_->SetSend(false));
cricket::VideoMediaSendInfo send_info;
cricket::VideoMediaReceiveInfo receive_info;
EXPECT_TRUE(send_channel_->GetStats(&send_info));
EXPECT_TRUE(receive_channel_->GetStats(&receive_info));
ASSERT_EQ(1U, send_info.senders.size());
// TODO(whyuan): bytes_sent and bytes_received are different. Are both
// payload? For webrtc, bytes_sent does not include the RTP header length.
EXPECT_EQ(NumRtpBytes() - kRtpHeaderSize * NumRtpPackets(),
GetSenderStats(0).payload_bytes_sent);
EXPECT_EQ(NumRtpPackets(), GetSenderStats(0).packets_sent);
EXPECT_EQ(kVideoWidth, GetSenderStats(0).send_frame_width);
EXPECT_EQ(kVideoHeight, GetSenderStats(0).send_frame_height);
ASSERT_EQ(2U, receive_info.receivers.size());
for (size_t i = 0; i < receive_info.receivers.size(); ++i) {
EXPECT_EQ(1U, GetReceiverStats(i).ssrcs().size());
EXPECT_EQ(i + 1, GetReceiverStats(i).ssrcs()[0]);
EXPECT_EQ(NumRtpBytes() - kRtpHeaderSize * NumRtpPackets(),
GetReceiverStats(i).payload_bytes_received);
EXPECT_EQ(NumRtpPackets(), GetReceiverStats(i).packets_received);
EXPECT_EQ(kVideoWidth, GetReceiverStats(i).frame_width);
EXPECT_EQ(kVideoHeight, GetReceiverStats(i).frame_height);
}
}
// Test that stats work properly for a conf call with multiple send streams.
TEST_F(WebRtcVideoChannelBaseTest, GetStatsMultipleSendStreams) {
// Normal setup; note that we set the SSRC explicitly to ensure that
// it will come first in the senders map.
EXPECT_TRUE(SetOneCodec(DefaultCodec()));
cricket::VideoSenderParameters parameters;
parameters.codecs.push_back(DefaultCodec());
parameters.conference_mode = true;
EXPECT_TRUE(send_channel_->SetSenderParameters(parameters));
EXPECT_TRUE(receive_channel_->AddRecvStream(
cricket::StreamParams::CreateLegacy(kSsrc)));
EXPECT_TRUE(receive_channel_->SetSink(kSsrc, &renderer_));
EXPECT_TRUE(SetSend(true));
SendFrame();
EXPECT_GT(NumRtpPackets(), 0);
EXPECT_FRAME(1, kVideoWidth, kVideoHeight);
// Add an additional capturer, and hook up a renderer to receive it.
cricket::FakeVideoRenderer renderer2;
webrtc::test::FrameForwarder frame_forwarder;
const int kTestWidth = 160;
const int kTestHeight = 120;
cricket::FakeFrameSource frame_source(kTestWidth, kTestHeight,
rtc::kNumMicrosecsPerSec / 5);
EXPECT_TRUE(
send_channel_->AddSendStream(cricket::StreamParams::CreateLegacy(5678)));
EXPECT_TRUE(send_channel_->SetVideoSend(5678, nullptr, &frame_forwarder));
EXPECT_TRUE(receive_channel_->AddRecvStream(
cricket::StreamParams::CreateLegacy(5678)));
EXPECT_TRUE(receive_channel_->SetSink(5678, &renderer2));
frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame());
time_controller_.AdvanceTime(kFrameDuration);
EXPECT_FRAME_ON_RENDERER(renderer2, 1, kTestWidth, kTestHeight);
// Get stats, and make sure they are correct for two senders
cricket::VideoMediaSendInfo send_info;
EXPECT_TRUE(send_channel_->GetStats(&send_info));
ASSERT_EQ(2U, send_info.senders.size());
EXPECT_EQ(NumRtpPackets(), send_info.senders[0].packets_sent +
send_info.senders[1].packets_sent);
EXPECT_EQ(1U, send_info.senders[0].ssrcs().size());
EXPECT_EQ(1234U, send_info.senders[0].ssrcs()[0]);
EXPECT_EQ(kVideoWidth, send_info.senders[0].send_frame_width);
EXPECT_EQ(kVideoHeight, send_info.senders[0].send_frame_height);
EXPECT_EQ(1U, send_info.senders[1].ssrcs().size());
EXPECT_EQ(5678U, send_info.senders[1].ssrcs()[0]);
EXPECT_EQ(kTestWidth, send_info.senders[1].send_frame_width);
EXPECT_EQ(kTestHeight, send_info.senders[1].send_frame_height);
// The capturer must be unregistered here as it runs out of it's scope next.
send_channel_->SetVideoSend(5678, nullptr, nullptr);
}
// Test that we can set the bandwidth.
TEST_F(WebRtcVideoChannelBaseTest, SetSendBandwidth) {
cricket::VideoSenderParameters parameters;
parameters.codecs.push_back(DefaultCodec());
parameters.max_bandwidth_bps = -1; // <= 0 means unlimited.
EXPECT_TRUE(send_channel_->SetSenderParameters(parameters));
parameters.max_bandwidth_bps = 128 * 1024;
EXPECT_TRUE(send_channel_->SetSenderParameters(parameters));
}
// Test that we can set the SSRC for the default send source.
TEST_F(WebRtcVideoChannelBaseTest, SetSendSsrc) {
EXPECT_TRUE(SetDefaultCodec());
EXPECT_TRUE(SetSend(true));
SendFrame();
EXPECT_GT(NumRtpPackets(), 0);
RtpPacket header;
EXPECT_TRUE(header.Parse(GetRtpPacket(0)));
EXPECT_EQ(kSsrc, header.Ssrc());
// Packets are being paced out, so these can mismatch between the first and
// second call to NumRtpPackets until pending packets are paced out.
EXPECT_EQ(NumRtpPackets(), NumRtpPackets(header.Ssrc()));
EXPECT_EQ(NumRtpBytes(), NumRtpBytes(header.Ssrc()));
EXPECT_EQ(1, NumSentSsrcs());
EXPECT_EQ(0, NumRtpPackets(kSsrc - 1));
EXPECT_EQ(0, NumRtpBytes(kSsrc - 1));
}
// Test that we can set the SSRC even after codecs are set.
TEST_F(WebRtcVideoChannelBaseTest, SetSendSsrcAfterSetCodecs) {
// Remove stream added in Setup.
EXPECT_TRUE(send_channel_->RemoveSendStream(kSsrc));
EXPECT_TRUE(SetDefaultCodec());
EXPECT_TRUE(
send_channel_->AddSendStream(cricket::StreamParams::CreateLegacy(999)));
EXPECT_TRUE(
send_channel_->SetVideoSend(999u, nullptr, frame_forwarder_.get()));
EXPECT_TRUE(SetSend(true));
EXPECT_TRUE(WaitAndSendFrame(0));
EXPECT_GT(NumRtpPackets(), 0);
RtpPacket header;
EXPECT_TRUE(header.Parse(GetRtpPacket(0)));
EXPECT_EQ(999u, header.Ssrc());
// Packets are being paced out, so these can mismatch between the first and
// second call to NumRtpPackets until pending packets are paced out.
EXPECT_EQ(NumRtpPackets(), NumRtpPackets(header.Ssrc()));
EXPECT_EQ(NumRtpBytes(), NumRtpBytes(header.Ssrc()));
EXPECT_EQ(1, NumSentSsrcs());
EXPECT_EQ(0, NumRtpPackets(kSsrc));
EXPECT_EQ(0, NumRtpBytes(kSsrc));
}
// Test that we can set the default video renderer before and after
// media is received.
TEST_F(WebRtcVideoChannelBaseTest, SetSink) {
RtpPacketReceived packet;
packet.SetSsrc(kSsrc);
receive_channel_->SetDefaultSink(NULL);
EXPECT_TRUE(SetDefaultCodec());
EXPECT_TRUE(SetSend(true));
EXPECT_EQ(0, renderer_.num_rendered_frames());
receive_channel_->SetDefaultSink(&renderer_);
receive_channel_->OnPacketReceived(packet);
SendFrame();
EXPECT_FRAME(1, kVideoWidth, kVideoHeight);
}
// Tests setting up and configuring a send stream.
TEST_F(WebRtcVideoChannelBaseTest, AddRemoveSendStreams) {
EXPECT_TRUE(SetOneCodec(DefaultCodec()));
EXPECT_TRUE(SetSend(true));
receive_channel_->SetDefaultSink(&renderer_);
SendFrame();
EXPECT_FRAME(1, kVideoWidth, kVideoHeight);
EXPECT_GT(NumRtpPackets(), 0);
RtpPacket header;
size_t last_packet = NumRtpPackets() - 1;
EXPECT_TRUE(header.Parse(GetRtpPacket(static_cast<int>(last_packet))));
EXPECT_EQ(kSsrc, header.Ssrc());
// Remove the send stream that was added during Setup.
EXPECT_TRUE(send_channel_->RemoveSendStream(kSsrc));
int rtp_packets = NumRtpPackets();
EXPECT_TRUE(
send_channel_->AddSendStream(cricket::StreamParams::CreateLegacy(789u)));
EXPECT_TRUE(
send_channel_->SetVideoSend(789u, nullptr, frame_forwarder_.get()));
EXPECT_EQ(rtp_packets, NumRtpPackets());
// Wait 30ms to guarantee the engine does not drop the frame.
EXPECT_TRUE(WaitAndSendFrame(30));
EXPECT_GT(NumRtpPackets(), rtp_packets);
last_packet = NumRtpPackets() - 1;
EXPECT_TRUE(header.Parse(GetRtpPacket(static_cast<int>(last_packet))));
EXPECT_EQ(789u, header.Ssrc());
}
// Tests the behavior of incoming streams in a conference scenario.
TEST_F(WebRtcVideoChannelBaseTest, SimulateConference) {
cricket::FakeVideoRenderer renderer1, renderer2;
EXPECT_TRUE(SetDefaultCodec());
cricket::VideoSenderParameters parameters;
parameters.codecs.push_back(DefaultCodec());
parameters.conference_mode = true;
EXPECT_TRUE(send_channel_->SetSenderParameters(parameters));
EXPECT_TRUE(SetSend(true));
EXPECT_TRUE(
receive_channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(1)));
EXPECT_TRUE(
receive_channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(2)));
EXPECT_TRUE(receive_channel_->SetSink(1, &renderer1));
EXPECT_TRUE(receive_channel_->SetSink(2, &renderer2));
EXPECT_EQ(0, renderer1.num_rendered_frames());
EXPECT_EQ(0, renderer2.num_rendered_frames());
std::vector<uint32_t> ssrcs;
ssrcs.push_back(1);
ssrcs.push_back(2);
network_interface_.SetConferenceMode(true, ssrcs);
SendFrame();
EXPECT_FRAME_ON_RENDERER(renderer1, 1, kVideoWidth, kVideoHeight);
EXPECT_FRAME_ON_RENDERER(renderer2, 1, kVideoWidth, kVideoHeight);
EXPECT_EQ(DefaultCodec().id, GetPayloadType(GetRtpPacket(0)));
EXPECT_EQ(kVideoWidth, renderer1.width());
EXPECT_EQ(kVideoHeight, renderer1.height());
EXPECT_EQ(kVideoWidth, renderer2.width());
EXPECT_EQ(kVideoHeight, renderer2.height());
EXPECT_TRUE(receive_channel_->RemoveRecvStream(2));
EXPECT_TRUE(receive_channel_->RemoveRecvStream(1));
}
// Tests that we can add and remove capturers and frames are sent out properly
TEST_F(WebRtcVideoChannelBaseTest, DISABLED_AddRemoveCapturer) {
using cricket::FOURCC_I420;
using cricket::VideoCodec;
using cricket::VideoFormat;
using cricket::VideoOptions;
VideoCodec codec = DefaultCodec();
const int time_between_send_ms = VideoFormat::FpsToInterval(kFramerate);
EXPECT_TRUE(SetOneCodec(codec));
EXPECT_TRUE(SetSend(true));
receive_channel_->SetDefaultSink(&renderer_);
EXPECT_EQ(0, renderer_.num_rendered_frames());
SendFrame();
EXPECT_FRAME(1, kVideoWidth, kVideoHeight);
webrtc::test::FrameForwarder frame_forwarder;
cricket::FakeFrameSource frame_source(480, 360, rtc::kNumMicrosecsPerSec / 30,
rtc::kNumMicrosecsPerSec / 30);
// TODO(nisse): This testcase fails if we don't configure
// screencast. It's unclear why, I see nothing obvious in this
// test which is related to screencast logic.
VideoOptions video_options;
video_options.is_screencast = true;
send_channel_->SetVideoSend(kSsrc, &video_options, nullptr);
int captured_frames = 1;
for (int iterations = 0; iterations < 2; ++iterations) {
EXPECT_TRUE(send_channel_->SetVideoSend(kSsrc, nullptr, &frame_forwarder));
time_controller_.AdvanceTime(TimeDelta::Millis(time_between_send_ms));
frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame());
++captured_frames;
// Check if the right size was captured.
EXPECT_TRUE(renderer_.num_rendered_frames() >= captured_frames &&
480 == renderer_.width() && 360 == renderer_.height() &&
!renderer_.black_frame());
EXPECT_GE(renderer_.num_rendered_frames(), captured_frames);
EXPECT_EQ(480, renderer_.width());
EXPECT_EQ(360, renderer_.height());
captured_frames = renderer_.num_rendered_frames() + 1;
EXPECT_FALSE(renderer_.black_frame());
EXPECT_TRUE(send_channel_->SetVideoSend(kSsrc, nullptr, nullptr));
// Make sure a black frame was generated.
// The black frame should have the resolution of the previous frame to
// prevent expensive encoder reconfigurations.
EXPECT_TRUE(renderer_.num_rendered_frames() >= captured_frames &&
480 == renderer_.width() && 360 == renderer_.height() &&
renderer_.black_frame());
EXPECT_GE(renderer_.num_rendered_frames(), captured_frames);
EXPECT_EQ(480, renderer_.width());
EXPECT_EQ(360, renderer_.height());
EXPECT_TRUE(renderer_.black_frame());
// The black frame has the same timestamp as the next frame since it's
// timestamp is set to the last frame's timestamp + interval. WebRTC will
// not render a frame with the same timestamp so capture another frame
// with the frame capturer to increment the next frame's timestamp.
frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame());
}
}
// Tests that if SetVideoSend is called with a NULL capturer after the
// capturer was already removed, the application doesn't crash (and no black
// frame is sent).
TEST_F(WebRtcVideoChannelBaseTest, RemoveCapturerWithoutAdd) {
EXPECT_TRUE(SetOneCodec(DefaultCodec()));
EXPECT_TRUE(SetSend(true));
receive_channel_->SetDefaultSink(&renderer_);
EXPECT_EQ(0, renderer_.num_rendered_frames());
SendFrame();
EXPECT_FRAME(1, kVideoWidth, kVideoHeight);
// Allow one frame so they don't get dropped because we send frames too
// tightly.
time_controller_.AdvanceTime(kFrameDuration);
// Remove the capturer.
EXPECT_TRUE(send_channel_->SetVideoSend(kSsrc, nullptr, nullptr));
// No capturer was added, so this SetVideoSend shouldn't do anything.
EXPECT_TRUE(send_channel_->SetVideoSend(kSsrc, nullptr, nullptr));
time_controller_.AdvanceTime(TimeDelta::Millis(300));
// Verify no more frames were sent.
EXPECT_EQ(1, renderer_.num_rendered_frames());
}
// Tests that we can add and remove capturer as unique sources.
TEST_F(WebRtcVideoChannelBaseTest, AddRemoveCapturerMultipleSources) {
// Set up the stream associated with the engine.
EXPECT_TRUE(receive_channel_->AddRecvStream(
cricket::StreamParams::CreateLegacy(kSsrc)));
EXPECT_TRUE(receive_channel_->SetSink(kSsrc, &renderer_));
cricket::VideoFormat capture_format(
kVideoWidth, kVideoHeight,
cricket::VideoFormat::FpsToInterval(kFramerate), cricket::FOURCC_I420);
// Set up additional stream 1.
cricket::FakeVideoRenderer renderer1;
EXPECT_FALSE(receive_channel_->SetSink(1, &renderer1));
EXPECT_TRUE(
receive_channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(1)));
EXPECT_TRUE(receive_channel_->SetSink(1, &renderer1));
EXPECT_TRUE(
send_channel_->AddSendStream(cricket::StreamParams::CreateLegacy(1)));
webrtc::test::FrameForwarder frame_forwarder1;
cricket::FakeFrameSource frame_source(kVideoWidth, kVideoHeight,
rtc::kNumMicrosecsPerSec / kFramerate);
// Set up additional stream 2.
cricket::FakeVideoRenderer renderer2;
EXPECT_FALSE(receive_channel_->SetSink(2, &renderer2));
EXPECT_TRUE(
receive_channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(2)));
EXPECT_TRUE(receive_channel_->SetSink(2, &renderer2));
EXPECT_TRUE(
send_channel_->AddSendStream(cricket::StreamParams::CreateLegacy(2)));
webrtc::test::FrameForwarder frame_forwarder2;
// State for all the streams.
EXPECT_TRUE(SetOneCodec(DefaultCodec()));
// A limitation in the lmi implementation requires that SetVideoSend() is
// called after SetOneCodec().
// TODO(hellner): this seems like an unnecessary constraint, fix it.
EXPECT_TRUE(send_channel_->SetVideoSend(1, nullptr, &frame_forwarder1));
EXPECT_TRUE(send_channel_->SetVideoSend(2, nullptr, &frame_forwarder2));
EXPECT_TRUE(SetSend(true));
// Test capturer associated with engine.
const int kTestWidth = 160;
const int kTestHeight = 120;
frame_forwarder1.IncomingCapturedFrame(frame_source.GetFrame(
kTestWidth, kTestHeight, webrtc::VideoRotation::kVideoRotation_0,
rtc::kNumMicrosecsPerSec / kFramerate));
time_controller_.AdvanceTime(kFrameDuration);
EXPECT_FRAME_ON_RENDERER(renderer1, 1, kTestWidth, kTestHeight);
// Capture a frame with additional capturer2, frames should be received
frame_forwarder2.IncomingCapturedFrame(frame_source.GetFrame(
kTestWidth, kTestHeight, webrtc::VideoRotation::kVideoRotation_0,
rtc::kNumMicrosecsPerSec / kFramerate));
time_controller_.AdvanceTime(kFrameDuration);
EXPECT_FRAME_ON_RENDERER(renderer2, 1, kTestWidth, kTestHeight);
// Successfully remove the capturer.
EXPECT_TRUE(send_channel_->SetVideoSend(kSsrc, nullptr, nullptr));
// The capturers must be unregistered here as it runs out of it's scope
// next.
EXPECT_TRUE(send_channel_->SetVideoSend(1, nullptr, nullptr));
EXPECT_TRUE(send_channel_->SetVideoSend(2, nullptr, nullptr));
}
// Tests empty StreamParams is rejected.
TEST_F(WebRtcVideoChannelBaseTest, RejectEmptyStreamParams) {
// Remove the send stream that was added during Setup.
EXPECT_TRUE(send_channel_->RemoveSendStream(kSsrc));
cricket::StreamParams empty;
EXPECT_FALSE(send_channel_->AddSendStream(empty));
EXPECT_TRUE(
send_channel_->AddSendStream(cricket::StreamParams::CreateLegacy(789u)));
}
// Test that multiple send streams can be created and deleted properly.
TEST_F(WebRtcVideoChannelBaseTest, MultipleSendStreams) {
// Remove stream added in Setup. I.e. remove stream corresponding to default
// channel.
EXPECT_TRUE(send_channel_->RemoveSendStream(kSsrc));
const unsigned int kSsrcsSize = sizeof(kSsrcs4) / sizeof(kSsrcs4[0]);
for (unsigned int i = 0; i < kSsrcsSize; ++i) {
EXPECT_TRUE(send_channel_->AddSendStream(
cricket::StreamParams::CreateLegacy(kSsrcs4[i])));
}
// Delete one of the non default channel streams, let the destructor delete
// the remaining ones.
EXPECT_TRUE(send_channel_->RemoveSendStream(kSsrcs4[kSsrcsSize - 1]));
// Stream should already be deleted.
EXPECT_FALSE(send_channel_->RemoveSendStream(kSsrcs4[kSsrcsSize - 1]));
}
TEST_F(WebRtcVideoChannelBaseTest, SendAndReceiveVp8Vga) {
SendAndReceive(GetEngineCodec("VP8"));
}
TEST_F(WebRtcVideoChannelBaseTest, SendAndReceiveVp8Qvga) {
SendAndReceive(GetEngineCodec("VP8"));
}
TEST_F(WebRtcVideoChannelBaseTest, SendAndReceiveVp8SvcQqvga) {
SendAndReceive(GetEngineCodec("VP8"));
}
TEST_F(WebRtcVideoChannelBaseTest, TwoStreamsSendAndReceive) {
// Set a high bitrate to not be downscaled by VP8 due to low initial start
// bitrates. This currently happens at <250k, and two streams sharing 300k
// initially will use QVGA instead of VGA.
// TODO(pbos): Set up the quality scaler so that both senders reliably start
// at QVGA, then verify that instead.
cricket::VideoCodec codec = GetEngineCodec("VP8");
codec.params[kCodecParamStartBitrate] = "1000000";
TwoStreamsSendAndReceive(codec);
}
#if defined(RTC_ENABLE_VP9)
TEST_F(WebRtcVideoChannelBaseTest, RequestEncoderFallback) {
cricket::VideoSenderParameters parameters;
parameters.codecs.push_back(GetEngineCodec("VP9"));
parameters.codecs.push_back(GetEngineCodec("VP8"));
EXPECT_TRUE(send_channel_->SetSenderParameters(parameters));
absl::optional<VideoCodec> codec = send_channel_->GetSendCodec();
ASSERT_TRUE(codec);
EXPECT_EQ("VP9", codec->name);
// RequestEncoderFallback will post a task to the worker thread (which is also
// the current thread), hence the ProcessMessages call.
SendImpl()->RequestEncoderFallback();
time_controller_.AdvanceTime(kFrameDuration);
codec = send_channel_->GetSendCodec();
ASSERT_TRUE(codec);
EXPECT_EQ("VP8", codec->name);
// No other codec to fall back to, keep using VP8.
SendImpl()->RequestEncoderFallback();
time_controller_.AdvanceTime(kFrameDuration);
codec = send_channel_->GetSendCodec();
ASSERT_TRUE(codec);
EXPECT_EQ("VP8", codec->name);
}
TEST_F(WebRtcVideoChannelBaseTest, RequestEncoderSwitchDefaultFallback) {
cricket::VideoSenderParameters parameters;
parameters.codecs.push_back(GetEngineCodec("VP9"));
parameters.codecs.push_back(GetEngineCodec("VP8"));
EXPECT_TRUE(send_channel_->SetSenderParameters(parameters));
absl::optional<VideoCodec> codec = send_channel_->GetSendCodec();
ASSERT_TRUE(codec);
EXPECT_EQ("VP9", codec->name);
// RequestEncoderSwitch will post a task to the worker thread (which is also
// the current thread), hence the ProcessMessages call.
SendImpl()->RequestEncoderSwitch(webrtc::SdpVideoFormat("UnavailableCodec"),
/*allow_default_fallback=*/true);
time_controller_.AdvanceTime(kFrameDuration);
// Requested encoder is not available. Default fallback is allowed. Switch to
// the next negotiated codec, VP8.
codec = send_channel_->GetSendCodec();
ASSERT_TRUE(codec);
EXPECT_EQ("VP8", codec->name);
}
TEST_F(WebRtcVideoChannelBaseTest, RequestEncoderSwitchStrictPreference) {
VideoCodec vp9 = GetEngineCodec("VP9");
vp9.params["profile-id"] = "0";
cricket::VideoSenderParameters parameters;
parameters.codecs.push_back(GetEngineCodec("VP8"));
parameters.codecs.push_back(vp9);
EXPECT_TRUE(send_channel_->SetSenderParameters(parameters));
absl::optional<VideoCodec> codec = send_channel_->GetSendCodec();
ASSERT_TRUE(codec);
EXPECT_EQ("VP8", codec->name);
SendImpl()->RequestEncoderSwitch(
webrtc::SdpVideoFormat("VP9", {{"profile-id", "1"}}),
/*allow_default_fallback=*/false);
time_controller_.AdvanceTime(kFrameDuration);
// VP9 profile_id=1 is not available. Default fallback is not allowed. Switch
// is not performed.
codec = send_channel_->GetSendCodec();
ASSERT_TRUE(codec);
EXPECT_EQ("VP8", codec->name);
SendImpl()->RequestEncoderSwitch(
webrtc::SdpVideoFormat("VP9", {{"profile-id", "0"}}),
/*allow_default_fallback=*/false);
time_controller_.AdvanceTime(kFrameDuration);
// VP9 profile_id=0 is available. Switch encoder.
codec = send_channel_->GetSendCodec();
ASSERT_TRUE(codec);
EXPECT_EQ("VP9", codec->name);
}
TEST_F(WebRtcVideoChannelBaseTest, SendCodecIsMovedToFrontInRtpParameters) {
cricket::VideoSenderParameters parameters;
parameters.codecs.push_back(GetEngineCodec("VP9"));
parameters.codecs.push_back(GetEngineCodec("VP8"));
EXPECT_TRUE(send_channel_->SetSenderParameters(parameters));
auto send_codecs = send_channel_->GetRtpSendParameters(kSsrc).codecs;
ASSERT_EQ(send_codecs.size(), 2u);
EXPECT_THAT("VP9", send_codecs[0].name);
// RequestEncoderFallback will post a task to the worker thread (which is also
// the current thread), hence the ProcessMessages call.
SendImpl()->RequestEncoderFallback();
time_controller_.AdvanceTime(kFrameDuration);
send_codecs = send_channel_->GetRtpSendParameters(kSsrc).codecs;
ASSERT_EQ(send_codecs.size(), 2u);
EXPECT_THAT("VP8", send_codecs[0].name);
}
#endif // defined(RTC_ENABLE_VP9)
class WebRtcVideoChannelTest : public WebRtcVideoEngineTest {
public:
WebRtcVideoChannelTest() : WebRtcVideoChannelTest("") {}
explicit WebRtcVideoChannelTest(const char* field_trials)
: WebRtcVideoEngineTest(field_trials),
frame_source_(1280, 720, rtc::kNumMicrosecsPerSec / 30),
last_ssrc_(0) {}
void SetUp() override {
AddSupportedVideoCodecType("VP8");
AddSupportedVideoCodecType("VP9");
AddSupportedVideoCodecType(
"AV1", {ScalabilityMode::kL1T3, ScalabilityMode::kL2T3});
#if defined(WEBRTC_USE_H264)
AddSupportedVideoCodecType("H264");
#endif
fake_call_.reset(new FakeCall(&field_trials_));
send_channel_ = engine_.CreateSendChannel(
fake_call_.get(), GetMediaConfig(), VideoOptions(),
webrtc::CryptoOptions(), video_bitrate_allocator_factory_.get());
receive_channel_ =
engine_.CreateReceiveChannel(fake_call_.get(), GetMediaConfig(),
VideoOptions(), webrtc::CryptoOptions());
send_channel_->SetSsrcListChangedCallback(
[receive_channel =
receive_channel_.get()](const std::set<uint32_t>& choices) {
receive_channel->ChooseReceiverReportSsrc(choices);
});
send_channel_->SetSendCodecChangedCallback([this]() {
receive_channel_->SetReceiverFeedbackParameters(
send_channel_->SendCodecHasLntf(), send_channel_->SendCodecHasNack(),
send_channel_->SendCodecRtcpMode(),
send_channel_->SendCodecRtxTime());
});
send_channel_->OnReadyToSend(true);
receive_channel_->SetReceive(true);
last_ssrc_ = 123;
send_parameters_.codecs = engine_.send_codecs();
recv_parameters_.codecs = engine_.recv_codecs();
ASSERT_TRUE(send_channel_->SetSenderParameters(send_parameters_));
}
void TearDown() override {
send_channel_->SetInterface(nullptr);
receive_channel_->SetInterface(nullptr);
send_channel_.reset();
receive_channel_.reset();
fake_call_ = nullptr;
}
void ResetTest() {
TearDown();
SetUp();
}
// Returns pointer to implementation of the send channel.
WebRtcVideoSendChannel* SendImpl() {
// Note that this function requires intimate knowledge of how the channel
// was created.
return static_cast<cricket::WebRtcVideoSendChannel*>(send_channel_.get());
}
// Casts a shim channel to a webrtc::Transport. Used once.
webrtc::Transport* ChannelImplAsTransport(
cricket::VideoMediaSendChannelInterface* channel) {
return static_cast<cricket::WebRtcVideoSendChannel*>(channel)->transport();
}
cricket::VideoCodec GetEngineCodec(const std::string& name) {
for (const cricket::VideoCodec& engine_codec : engine_.send_codecs()) {
if (absl::EqualsIgnoreCase(name, engine_codec.name))
return engine_codec;
}
// This point should never be reached.
ADD_FAILURE() << "Unrecognized codec name: " << name;
return cricket::CreateVideoCodec(0, "");
}
cricket::VideoCodec DefaultCodec() { return GetEngineCodec("VP8"); }
// After receciving and processing the packet, enough time is advanced that
// the unsignalled receive stream cooldown is no longer in effect.
void ReceivePacketAndAdvanceTime(const RtpPacketReceived& packet) {
receive_channel_->OnPacketReceived(packet);
time_controller_.AdvanceTime(
webrtc::TimeDelta::Millis(kUnsignalledReceiveStreamCooldownMs));
}
protected:
FakeVideoSendStream* AddSendStream() {
return AddSendStream(StreamParams::CreateLegacy(++last_ssrc_));
}
FakeVideoSendStream* AddSendStream(const StreamParams& sp) {
size_t num_streams = fake_call_->GetVideoSendStreams().size();
EXPECT_TRUE(send_channel_->AddSendStream(sp));
std::vector<FakeVideoSendStream*> streams =
fake_call_->GetVideoSendStreams();
EXPECT_EQ(num_streams + 1, streams.size());