Enable use of MediaTransportInterface for video streams.
Bug: webrtc:9719
Change-Id: I8c6027b4b15ed641e42fd210b3ea87d121508a69
Reviewed-on: https://webrtc-review.googlesource.com/c/111751
Reviewed-by: Philip Eliasson <philipel@webrtc.org>
Reviewed-by: Karl Wiberg <kwiberg@webrtc.org>
Reviewed-by: Stefan Holmer <stefan@webrtc.org>
Reviewed-by: Peter Slatala <psla@webrtc.org>
Reviewed-by: Erik Språng <sprang@webrtc.org>
Commit-Queue: Niels Moller <nisse@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#26219}
diff --git a/api/media_transport_interface.cc b/api/media_transport_interface.cc
index b201eae..2164198 100644
--- a/api/media_transport_interface.cc
+++ b/api/media_transport_interface.cc
@@ -169,11 +169,15 @@
void MediaTransportInterface::SetNetworkChangeCallback(
MediaTransportNetworkChangeCallback* callback) {}
-void MediaTransportInterface::RemoveTargetTransferRateObserver(
- webrtc::TargetTransferRateObserver* observer) {}
-
void MediaTransportInterface::AddTargetTransferRateObserver(
- webrtc::TargetTransferRateObserver* observer) {}
+ TargetTransferRateObserver* observer) {}
+void MediaTransportInterface::RemoveTargetTransferRateObserver(
+ TargetTransferRateObserver* observer) {}
+
+void MediaTransportInterface::AddRttObserver(
+ MediaTransportRttObserver* observer) {}
+void MediaTransportInterface::RemoveRttObserver(
+ MediaTransportRttObserver* observer) {}
size_t MediaTransportInterface::GetAudioPacketOverhead() const {
return 0;
diff --git a/api/media_transport_interface.h b/api/media_transport_interface.h
index e0a762a..4531fe4 100644
--- a/api/media_transport_interface.h
+++ b/api/media_transport_interface.h
@@ -274,6 +274,19 @@
virtual void OnStateChanged(MediaTransportState state) = 0;
};
+// Callback for RTT measurements on the receive side.
+// TODO(nisse): Related interfaces: CallStatsObserver and RtcpRttStats. It's
+// somewhat unclear what type of measurement is needed. It's used to configure
+// NACK generation and playout buffer. Either raw measurement values or recent
+// maximum would make sense for this use. Need consolidation of RTT signalling.
+class MediaTransportRttObserver {
+ public:
+ virtual ~MediaTransportRttObserver() = default;
+
+ // Invoked when a new RTT measurement is available, typically once per ACK.
+ virtual void OnRttUpdated(int64_t rtt_ms) = 0;
+};
+
// Supported types of application data messages.
enum class DataMessageType {
// Application data buffer with the binary bit unset.
@@ -379,12 +392,18 @@
// A newly registered observer will be called back with the latest recorded
// target rate, if available.
virtual void AddTargetTransferRateObserver(
- webrtc::TargetTransferRateObserver* observer);
+ TargetTransferRateObserver* observer);
// Removes an existing |observer| from observers. If observer was never
// registered, an error is logged and method does nothing.
virtual void RemoveTargetTransferRateObserver(
- webrtc::TargetTransferRateObserver* observer);
+ TargetTransferRateObserver* observer);
+
+ // Intended for receive side. AddRttObserver registers an observer to be
+ // called for each RTT measurement, typically once per ACK. Before media
+ // transport is destructed the observer must be unregistered.
+ virtual void AddRttObserver(MediaTransportRttObserver* observer);
+ virtual void RemoveRttObserver(MediaTransportRttObserver* observer);
// Returns the last known target transfer rate as reported to the above
// observers.
diff --git a/api/test/loopback_media_transport.cc b/api/test/loopback_media_transport.cc
index 8fbaea5..40415e2 100644
--- a/api/test/loopback_media_transport.cc
+++ b/api/test/loopback_media_transport.cc
@@ -11,6 +11,7 @@
#include "api/test/loopback_media_transport.h"
#include "absl/memory/memory.h"
+#include "rtc_base/timeutils.h"
namespace webrtc {
@@ -51,6 +52,16 @@
wrapped_->SetReceiveVideoSink(sink);
}
+ void AddTargetTransferRateObserver(
+ TargetTransferRateObserver* observer) override {
+ wrapped_->AddTargetTransferRateObserver(observer);
+ }
+
+ void RemoveTargetTransferRateObserver(
+ TargetTransferRateObserver* observer) override {
+ wrapped_->RemoveTargetTransferRateObserver(observer);
+ }
+
void SetMediaTransportStateCallback(
MediaTransportStateCallback* callback) override {
wrapped_->SetMediaTransportStateCallback(callback);
@@ -98,6 +109,8 @@
RTC_CHECK(audio_sink_ == nullptr);
RTC_CHECK(video_sink_ == nullptr);
RTC_CHECK(data_sink_ == nullptr);
+ RTC_CHECK(target_transfer_rate_observers_.empty());
+ RTC_CHECK(rtt_observers_.empty());
}
RTCError MediaTransportPair::LoopbackMediaTransport::SendAudioFrame(
@@ -165,6 +178,80 @@
video_sink_ = sink;
}
+void MediaTransportPair::LoopbackMediaTransport::AddTargetTransferRateObserver(
+ TargetTransferRateObserver* observer) {
+ RTC_CHECK(observer);
+ {
+ rtc::CritScope cs(&sink_lock_);
+ RTC_CHECK(std::find(target_transfer_rate_observers_.begin(),
+ target_transfer_rate_observers_.end(),
+ observer) == target_transfer_rate_observers_.end());
+ target_transfer_rate_observers_.push_back(observer);
+ }
+ invoker_.AsyncInvoke<void>(RTC_FROM_HERE, thread_, [this] {
+ RTC_DCHECK_RUN_ON(thread_);
+ const DataRate kBitrate = DataRate::kbps(300);
+ const Timestamp now = Timestamp::us(rtc::TimeMicros());
+
+ TargetTransferRate transfer_rate;
+ transfer_rate.at_time = now;
+ transfer_rate.target_rate = kBitrate;
+ transfer_rate.network_estimate.at_time = now;
+ transfer_rate.network_estimate.round_trip_time = TimeDelta::ms(20);
+ transfer_rate.network_estimate.bwe_period = TimeDelta::seconds(3);
+ transfer_rate.network_estimate.bandwidth = kBitrate;
+
+ rtc::CritScope cs(&sink_lock_);
+
+ for (auto* o : target_transfer_rate_observers_) {
+ o->OnTargetTransferRate(transfer_rate);
+ }
+ });
+}
+
+void MediaTransportPair::LoopbackMediaTransport::
+ RemoveTargetTransferRateObserver(TargetTransferRateObserver* observer) {
+ rtc::CritScope cs(&sink_lock_);
+ auto it = std::find(target_transfer_rate_observers_.begin(),
+ target_transfer_rate_observers_.end(), observer);
+ if (it == target_transfer_rate_observers_.end()) {
+ RTC_LOG(LS_WARNING)
+ << "Attempt to remove an unknown TargetTransferRate observer";
+ return;
+ }
+ target_transfer_rate_observers_.erase(it);
+}
+
+void MediaTransportPair::LoopbackMediaTransport::AddRttObserver(
+ MediaTransportRttObserver* observer) {
+ RTC_CHECK(observer);
+ {
+ rtc::CritScope cs(&sink_lock_);
+ RTC_CHECK(std::find(rtt_observers_.begin(), rtt_observers_.end(),
+ observer) == rtt_observers_.end());
+ rtt_observers_.push_back(observer);
+ }
+ invoker_.AsyncInvoke<void>(RTC_FROM_HERE, thread_, [this] {
+ RTC_DCHECK_RUN_ON(thread_);
+
+ rtc::CritScope cs(&sink_lock_);
+ for (auto* o : rtt_observers_) {
+ o->OnRttUpdated(20);
+ }
+ });
+}
+
+void MediaTransportPair::LoopbackMediaTransport::RemoveRttObserver(
+ MediaTransportRttObserver* observer) {
+ rtc::CritScope cs(&sink_lock_);
+ auto it = std::find(rtt_observers_.begin(), rtt_observers_.end(), observer);
+ if (it == rtt_observers_.end()) {
+ RTC_LOG(LS_WARNING) << "Attempt to remove an unknown RTT observer";
+ return;
+ }
+ rtt_observers_.erase(it);
+}
+
void MediaTransportPair::LoopbackMediaTransport::SetMediaTransportStateCallback(
MediaTransportStateCallback* callback) {
rtc::CritScope lock(&sink_lock_);
diff --git a/api/test/loopback_media_transport.h b/api/test/loopback_media_transport.h
index e1d2246..bd26305 100644
--- a/api/test/loopback_media_transport.h
+++ b/api/test/loopback_media_transport.h
@@ -13,6 +13,7 @@
#include <memory>
#include <utility>
+#include <vector>
#include "absl/memory/memory.h"
#include "api/media_transport_interface.h"
@@ -100,6 +101,15 @@
void SetReceiveVideoSink(MediaTransportVideoSinkInterface* sink) override;
+ void AddTargetTransferRateObserver(
+ TargetTransferRateObserver* observer) override;
+
+ void RemoveTargetTransferRateObserver(
+ TargetTransferRateObserver* observer) override;
+
+ void AddRttObserver(MediaTransportRttObserver* observer) override;
+ void RemoveRttObserver(MediaTransportRttObserver* observer) override;
+
void SetMediaTransportStateCallback(
MediaTransportStateCallback* callback) override;
@@ -148,6 +158,11 @@
MediaTransportStateCallback* state_callback_ RTC_GUARDED_BY(sink_lock_) =
nullptr;
+ std::vector<TargetTransferRateObserver*> target_transfer_rate_observers_
+ RTC_GUARDED_BY(sink_lock_);
+ std::vector<MediaTransportRttObserver*> rtt_observers_
+ RTC_GUARDED_BY(sink_lock_);
+
MediaTransportState state_ RTC_GUARDED_BY(thread_) =
MediaTransportState::kPending;
diff --git a/call/call.cc b/call/call.cc
index 0bd0a5c..3ffea84 100644
--- a/call/call.cc
+++ b/call/call.cc
@@ -257,6 +257,10 @@
// media transport.
void RegisterRateObserver() RTC_LOCKS_EXCLUDED(target_observer_crit_);
+ // Intended for DCHECKs, to avoid locking in production builds.
+ MediaTransportInterface* media_transport()
+ RTC_LOCKS_EXCLUDED(target_observer_crit_);
+
Clock* const clock_;
const int num_cpu_cores_;
@@ -516,6 +520,11 @@
}
}
+MediaTransportInterface* Call::media_transport() {
+ rtc::CritScope lock(&target_observer_crit_);
+ return media_transport_;
+}
+
void Call::MediaTransportChange(MediaTransportInterface* media_transport) {
rtc::CritScope lock(&target_observer_crit_);
@@ -625,10 +634,7 @@
TRACE_EVENT0("webrtc", "Call::CreateAudioSendStream");
RTC_DCHECK_CALLED_SEQUENTIALLY(&configuration_sequence_checker_);
- {
- rtc::CritScope lock(&target_observer_crit_);
- RTC_DCHECK(media_transport_ == config.media_transport);
- }
+ RTC_DCHECK(media_transport() == config.media_transport);
RegisterRateObserver();
@@ -762,6 +768,8 @@
TRACE_EVENT0("webrtc", "Call::CreateVideoSendStream");
RTC_DCHECK_CALLED_SEQUENTIALLY(&configuration_sequence_checker_);
+ RTC_DCHECK(media_transport() == config.media_transport);
+
RegisterRateObserver();
video_send_delay_stats_->AddSsrcs(config);
diff --git a/call/video_receive_stream.cc b/call/video_receive_stream.cc
index 4984523..b0156fc 100644
--- a/call/video_receive_stream.cc
+++ b/call/video_receive_stream.cc
@@ -67,8 +67,12 @@
VideoReceiveStream::Config::Config(const Config&) = default;
VideoReceiveStream::Config::Config(Config&&) = default;
+VideoReceiveStream::Config::Config(Transport* rtcp_send_transport,
+ MediaTransportInterface* media_transport)
+ : rtcp_send_transport(rtcp_send_transport),
+ media_transport(media_transport) {}
VideoReceiveStream::Config::Config(Transport* rtcp_send_transport)
- : rtcp_send_transport(rtcp_send_transport) {}
+ : Config(rtcp_send_transport, nullptr) {}
VideoReceiveStream::Config& VideoReceiveStream::Config::operator=(Config&&) =
default;
diff --git a/call/video_receive_stream.h b/call/video_receive_stream.h
index 54af2bd..27f6e95 100644
--- a/call/video_receive_stream.h
+++ b/call/video_receive_stream.h
@@ -18,6 +18,7 @@
#include "api/call/transport.h"
#include "api/crypto/cryptooptions.h"
+#include "api/media_transport_interface.h"
#include "api/rtp_headers.h"
#include "api/rtpparameters.h"
#include "api/rtpreceiverinterface.h"
@@ -113,6 +114,8 @@
public:
Config() = delete;
Config(Config&&);
+ Config(Transport* rtcp_send_transport,
+ MediaTransportInterface* media_transport);
explicit Config(Transport* rtcp_send_transport);
Config& operator=(Config&&);
Config& operator=(const Config&) = delete;
@@ -188,6 +191,8 @@
// Transport for outgoing packets (RTCP).
Transport* rtcp_send_transport = nullptr;
+ MediaTransportInterface* media_transport = nullptr;
+
// Must not be 'nullptr' when the stream is started.
rtc::VideoSinkInterface<VideoFrame>* renderer = nullptr;
diff --git a/call/video_send_stream.cc b/call/video_send_stream.cc
index 868b2f3..ebd37a7 100644
--- a/call/video_send_stream.cc
+++ b/call/video_send_stream.cc
@@ -67,8 +67,11 @@
VideoSendStream::Config::Config(const Config&) = default;
VideoSendStream::Config::Config(Config&&) = default;
+VideoSendStream::Config::Config(Transport* send_transport,
+ MediaTransportInterface* media_transport)
+ : send_transport(send_transport), media_transport(media_transport) {}
VideoSendStream::Config::Config(Transport* send_transport)
- : send_transport(send_transport) {}
+ : Config(send_transport, nullptr) {}
VideoSendStream::Config& VideoSendStream::Config::operator=(Config&&) = default;
VideoSendStream::Config::Config::~Config() = default;
@@ -80,6 +83,8 @@
<< (encoder_settings.experiment_cpu_load_estimator ? "on" : "off") << "}}";
ss << ", rtp: " << rtp.ToString();
ss << ", rtcp_report_interval_ms: " << rtcp_report_interval_ms;
+ ss << ", send_transport: " << (send_transport ? "(Transport)" : "nullptr");
+ ss << ", media_transport: " << (media_transport ? "(Transport)" : "nullptr");
ss << ", render_delay_ms: " << render_delay_ms;
ss << ", target_delay_ms: " << target_delay_ms;
ss << ", suspend_below_min_bitrate: "
diff --git a/call/video_send_stream.h b/call/video_send_stream.h
index 74a1e8b..8ee2991 100644
--- a/call/video_send_stream.h
+++ b/call/video_send_stream.h
@@ -19,6 +19,7 @@
#include "absl/types/optional.h"
#include "api/call/transport.h"
#include "api/crypto/cryptooptions.h"
+#include "api/media_transport_interface.h"
#include "api/rtpparameters.h"
#include "api/video/video_content_type.h"
#include "api/video/video_frame.h"
@@ -97,6 +98,7 @@
public:
Config() = delete;
Config(Config&&);
+ Config(Transport* send_transport, MediaTransportInterface* media_transport);
explicit Config(Transport* send_transport);
Config& operator=(Config&&);
@@ -119,6 +121,8 @@
// Transport for outgoing packets.
Transport* send_transport = nullptr;
+ MediaTransportInterface* media_transport = nullptr;
+
// Expected delay needed by the renderer, i.e. the frame will be delivered
// this many milliseconds, if possible, earlier than expected render time.
// Only valid if |local_renderer| is set.
diff --git a/media/engine/webrtcvideoengine.cc b/media/engine/webrtcvideoengine.cc
index 69fae56..babe05d 100644
--- a/media/engine/webrtcvideoengine.cc
+++ b/media/engine/webrtcvideoengine.cc
@@ -1072,7 +1072,7 @@
for (uint32_t used_ssrc : sp.ssrcs)
send_ssrcs_.insert(used_ssrc);
- webrtc::VideoSendStream::Config config(this);
+ webrtc::VideoSendStream::Config config(this, media_transport());
config.suspend_below_min_bitrate = video_config_.suspend_below_min_bitrate;
config.periodic_alr_bandwidth_probing =
video_config_.periodic_alr_bandwidth_probing;
@@ -1196,7 +1196,7 @@
for (uint32_t used_ssrc : sp.ssrcs)
receive_ssrcs_.insert(used_ssrc);
- webrtc::VideoReceiveStream::Config config(this);
+ webrtc::VideoReceiveStream::Config config(this, media_transport());
webrtc::FlexfecReceiveStream::Config flexfec_config(this);
ConfigureReceiverRtp(&config, &flexfec_config, sp);
@@ -1330,6 +1330,7 @@
FillSendAndReceiveCodecStats(info);
// TODO(holmer): We should either have rtt available as a metric on
// VideoSend/ReceiveStreams, or we should remove rtt from VideoSenderInfo.
+ // TODO(nisse): Arrange to get correct RTT also when using MediaTransport.
webrtc::Call::Stats stats = call_->GetStats();
if (stats.rtt_ms != -1) {
for (size_t i = 0; i < info->senders.size(); ++i) {
@@ -1476,9 +1477,6 @@
void WebRtcVideoChannel::SetInterface(
NetworkInterface* iface,
webrtc::MediaTransportInterface* media_transport) {
- // TODO(sukhanov): Video is not currently supported with media transport.
- RTC_CHECK(media_transport == nullptr);
-
MediaChannel::SetInterface(iface, media_transport);
// Set the RTP recv/send buffer to a bigger size.
diff --git a/pc/channelmanager.cc b/pc/channelmanager.cc
index 0c86cf6..d5cbd9c 100644
--- a/pc/channelmanager.cc
+++ b/pc/channelmanager.cc
@@ -226,6 +226,7 @@
webrtc::Call* call,
const cricket::MediaConfig& media_config,
webrtc::RtpTransportInternal* rtp_transport,
+ webrtc::MediaTransportInterface* media_transport,
rtc::Thread* signaling_thread,
const std::string& content_name,
bool srtp_required,
@@ -234,8 +235,8 @@
if (!worker_thread_->IsCurrent()) {
return worker_thread_->Invoke<VideoChannel*>(RTC_FROM_HERE, [&] {
return CreateVideoChannel(call, media_config, rtp_transport,
- signaling_thread, content_name, srtp_required,
- crypto_options, options);
+ media_transport, signaling_thread, content_name,
+ srtp_required, crypto_options, options);
});
}
@@ -257,8 +258,7 @@
absl::WrapUnique(media_channel), content_name, srtp_required,
crypto_options);
- // TODO(sukhanov): Add media_transport support for video channel.
- video_channel->Init_w(rtp_transport, /*media_transport=*/nullptr);
+ video_channel->Init_w(rtp_transport, media_transport);
VideoChannel* video_channel_ptr = video_channel.get();
video_channels_.push_back(std::move(video_channel));
diff --git a/pc/channelmanager.h b/pc/channelmanager.h
index 2b7a30a..8bd30d1 100644
--- a/pc/channelmanager.h
+++ b/pc/channelmanager.h
@@ -107,14 +107,16 @@
// Creates a video channel, synced with the specified voice channel, and
// associated with the specified session.
// Version of the above that takes PacketTransportInternal.
- VideoChannel* CreateVideoChannel(webrtc::Call* call,
- const cricket::MediaConfig& media_config,
- webrtc::RtpTransportInternal* rtp_transport,
- rtc::Thread* signaling_thread,
- const std::string& content_name,
- bool srtp_required,
- const webrtc::CryptoOptions& crypto_options,
- const VideoOptions& options);
+ VideoChannel* CreateVideoChannel(
+ webrtc::Call* call,
+ const cricket::MediaConfig& media_config,
+ webrtc::RtpTransportInternal* rtp_transport,
+ webrtc::MediaTransportInterface* media_transport,
+ rtc::Thread* signaling_thread,
+ const std::string& content_name,
+ bool srtp_required,
+ const webrtc::CryptoOptions& crypto_options,
+ const VideoOptions& options);
// Destroys a video channel created by CreateVideoChannel.
void DestroyVideoChannel(VideoChannel* video_channel);
diff --git a/pc/channelmanager_unittest.cc b/pc/channelmanager_unittest.cc
index 4d16614..ed09b72 100644
--- a/pc/channelmanager_unittest.cc
+++ b/pc/channelmanager_unittest.cc
@@ -86,7 +86,7 @@
webrtc::CryptoOptions(), AudioOptions());
EXPECT_TRUE(voice_channel != nullptr);
cricket::VideoChannel* video_channel = cm_->CreateVideoChannel(
- &fake_call_, cricket::MediaConfig(), rtp_transport,
+ &fake_call_, cricket::MediaConfig(), rtp_transport, media_transport,
rtc::Thread::Current(), cricket::CN_VIDEO, kDefaultSrtpRequired,
webrtc::CryptoOptions(), VideoOptions());
EXPECT_TRUE(video_channel != nullptr);
diff --git a/pc/peerconnection.cc b/pc/peerconnection.cc
index 518a2e6..2cf14a1 100644
--- a/pc/peerconnection.cc
+++ b/pc/peerconnection.cc
@@ -5922,10 +5922,13 @@
cricket::VideoChannel* PeerConnection::CreateVideoChannel(
const std::string& mid) {
RtpTransportInternal* rtp_transport = GetRtpTransport(mid);
+ MediaTransportInterface* media_transport = nullptr;
+ if (configuration_.use_media_transport) {
+ media_transport = GetMediaTransport(mid);
+ }
- // TODO(sukhanov): Propagate media_transport to video channel.
cricket::VideoChannel* video_channel = channel_manager()->CreateVideoChannel(
- call_.get(), configuration_.media_config, rtp_transport,
+ call_.get(), configuration_.media_config, rtp_transport, media_transport,
signaling_thread(), mid, SrtpRequired(), GetCryptoOptions(),
video_options_);
if (!video_channel) {
diff --git a/pc/peerconnection_integrationtest.cc b/pc/peerconnection_integrationtest.cc
index ea1b5ea..0bd3d09 100644
--- a/pc/peerconnection_integrationtest.cc
+++ b/pc/peerconnection_integrationtest.cc
@@ -3598,6 +3598,41 @@
EXPECT_GE(first_stats.sent_audio_frames, second_stats.received_audio_frames);
}
+TEST_P(PeerConnectionIntegrationTest, MediaTransportBidirectionalVideo) {
+ PeerConnectionInterface::RTCConfiguration rtc_config;
+ rtc_config.use_media_transport = true;
+ rtc_config.enable_dtls_srtp = false; // SDES is required for media transport.
+ ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory(
+ rtc_config, rtc_config, loopback_media_transports()->first_factory(),
+ loopback_media_transports()->second_factory()));
+ ConnectFakeSignaling();
+
+ caller()->AddVideoTrack();
+ callee()->AddVideoTrack();
+ // Start offer/answer exchange and wait for it to complete.
+ caller()->CreateAndSetAndSignalOffer();
+ ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout);
+
+ // Ensure that the media transport is ready.
+ loopback_media_transports()->SetState(webrtc::MediaTransportState::kWritable);
+ loopback_media_transports()->FlushAsyncInvokes();
+
+ MediaExpectations media_expectations;
+ media_expectations.ExpectBidirectionalVideo();
+ ASSERT_TRUE(ExpectNewFrames(media_expectations));
+
+ webrtc::MediaTransportPair::Stats first_stats =
+ loopback_media_transports()->FirstStats();
+ webrtc::MediaTransportPair::Stats second_stats =
+ loopback_media_transports()->SecondStats();
+
+ EXPECT_GT(first_stats.received_video_frames, 0);
+ EXPECT_GE(second_stats.sent_video_frames, first_stats.received_video_frames);
+
+ EXPECT_GT(second_stats.received_video_frames, 0);
+ EXPECT_GE(first_stats.sent_video_frames, second_stats.received_video_frames);
+}
+
// Test that the ICE connection and gathering states eventually reach
// "complete".
TEST_P(PeerConnectionIntegrationTest, IceStatesReachCompletion) {
diff --git a/pc/rtpsenderreceiver_unittest.cc b/pc/rtpsenderreceiver_unittest.cc
index 037446f..ec1e949 100644
--- a/pc/rtpsenderreceiver_unittest.cc
+++ b/pc/rtpsenderreceiver_unittest.cc
@@ -108,8 +108,8 @@
srtp_required, webrtc::CryptoOptions(), cricket::AudioOptions());
video_channel_ = channel_manager_.CreateVideoChannel(
&fake_call_, cricket::MediaConfig(), rtp_transport_.get(),
- rtc::Thread::Current(), cricket::CN_VIDEO, srtp_required,
- webrtc::CryptoOptions(), cricket::VideoOptions());
+ /*media_transport=*/nullptr, rtc::Thread::Current(), cricket::CN_VIDEO,
+ srtp_required, webrtc::CryptoOptions(), cricket::VideoOptions());
voice_channel_->Enable(true);
video_channel_->Enable(true);
voice_media_channel_ = media_engine_->GetVoiceChannel(0);
diff --git a/video/video_receive_stream.cc b/video/video_receive_stream.cc
index 5745f65..b34e1b7 100644
--- a/video/video_receive_stream.cc
+++ b/video/video_receive_stream.cc
@@ -12,6 +12,7 @@
#include <stdlib.h>
#include <string.h>
+#include <algorithm>
#include <set>
#include <string>
#include <utility>
@@ -116,6 +117,42 @@
const char* ImplementationName() const override { return "NullVideoDecoder"; }
};
+// Inherit video_coding::EncodedFrame, which is the class used by
+// video_coding::FrameBuffer and other components in the receive pipeline. It's
+// a subclass of EncodedImage, and it always owns the buffer.
+class EncodedFrameForMediaTransport : public video_coding::EncodedFrame {
+ public:
+ explicit EncodedFrameForMediaTransport(
+ MediaTransportEncodedVideoFrame frame) {
+ // TODO(nisse): This is too ugly. We copy the EncodedImage (a base class of
+ // ours, in several steps), to get all the meta data. But we then need to
+ // reset the buffer and allocate a new copy, since EncodedFrame must own it.
+ *static_cast<class EncodedImage*>(this) = frame.encoded_image();
+ // Don't use the copied _buffer pointer.
+ set_buffer(nullptr, 0);
+
+ VerifyAndAllocate(frame.encoded_image().size());
+ set_size(frame.encoded_image().size());
+ memcpy(_buffer, frame.encoded_image()._buffer, size());
+
+ _payloadType = static_cast<uint8_t>(frame.payload_type());
+
+ // TODO(nisse): frame_id and picture_id are probably not the same thing. For
+ // a single layer, this should be good enough.
+ id.picture_id = frame.frame_id();
+ id.spatial_layer = frame.encoded_image().SpatialIndex().value_or(0);
+ num_references = std::min(static_cast<size_t>(kMaxFrameReferences),
+ frame.referenced_frame_ids().size());
+ for (size_t i = 0; i < num_references; i++) {
+ references[i] = frame.referenced_frame_ids()[i];
+ }
+ }
+
+ // TODO(nisse): Implement. Not sure how they are used.
+ int64_t ReceivedTime() const override { return 0; }
+ int64_t RenderTime() const override { return 0; }
+};
+
// TODO(https://bugs.webrtc.org/9974): Consider removing this workaround.
// Maximum time between frames before resetting the FrameBuffer to avoid RTP
// timestamps wraparound to affect FrameBuffer.
@@ -185,18 +222,23 @@
process_thread_->RegisterModule(&rtp_stream_sync_, RTC_FROM_HERE);
- // Register with RtpStreamReceiverController.
- media_receiver_ = receiver_controller->CreateReceiver(
- config_.rtp.remote_ssrc, &rtp_video_stream_receiver_);
- if (config_.rtp.rtx_ssrc) {
- rtx_receive_stream_ = absl::make_unique<RtxReceiveStream>(
- &rtp_video_stream_receiver_, config.rtp.rtx_associated_payload_types,
- config_.rtp.remote_ssrc, rtp_receive_statistics_.get());
- rtx_receiver_ = receiver_controller->CreateReceiver(
- config_.rtp.rtx_ssrc, rtx_receive_stream_.get());
+ if (config_.media_transport) {
+ config_.media_transport->SetReceiveVideoSink(this);
+ config_.media_transport->AddRttObserver(this);
} else {
- rtp_receive_statistics_->EnableRetransmitDetection(config.rtp.remote_ssrc,
- true);
+ // Register with RtpStreamReceiverController.
+ media_receiver_ = receiver_controller->CreateReceiver(
+ config_.rtp.remote_ssrc, &rtp_video_stream_receiver_);
+ if (config_.rtp.rtx_ssrc) {
+ rtx_receive_stream_ = absl::make_unique<RtxReceiveStream>(
+ &rtp_video_stream_receiver_, config.rtp.rtx_associated_payload_types,
+ config_.rtp.remote_ssrc, rtp_receive_statistics_.get());
+ rtx_receiver_ = receiver_controller->CreateReceiver(
+ config_.rtp.rtx_ssrc, rtx_receive_stream_.get());
+ } else {
+ rtp_receive_statistics_->EnableRetransmitDetection(config.rtp.remote_ssrc,
+ true);
+ }
}
}
@@ -204,7 +246,10 @@
RTC_DCHECK_CALLED_SEQUENTIALLY(&worker_sequence_checker_);
RTC_LOG(LS_INFO) << "~VideoReceiveStream: " << config_.ToString();
Stop();
-
+ if (config_.media_transport) {
+ config_.media_transport->SetReceiveVideoSink(nullptr);
+ config_.media_transport->RemoveRttObserver(this);
+ }
process_thread_->DeRegisterModule(&rtp_stream_sync_);
}
@@ -376,7 +421,11 @@
}
void VideoReceiveStream::RequestKeyFrame() {
- rtp_video_stream_receiver_.RequestKeyFrame();
+ if (config_.media_transport) {
+ config_.media_transport->RequestKeyFrame(config_.rtp.remote_ssrc);
+ } else {
+ rtp_video_stream_receiver_.RequestKeyFrame();
+ }
}
void VideoReceiveStream::OnCompleteFrame(
@@ -394,6 +443,12 @@
rtp_video_stream_receiver_.FrameContinuous(last_continuous_pid);
}
+void VideoReceiveStream::OnData(uint64_t channel_id,
+ MediaTransportEncodedVideoFrame frame) {
+ OnCompleteFrame(
+ absl::make_unique<EncodedFrameForMediaTransport>(std::move(frame)));
+}
+
void VideoReceiveStream::OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) {
RTC_DCHECK_CALLED_SEQUENTIALLY(&module_process_sequence_checker_);
frame_buffer_->UpdateRtt(max_rtt_ms);
@@ -401,6 +456,10 @@
video_stream_decoder_->UpdateRtt(max_rtt_ms);
}
+void VideoReceiveStream::OnRttUpdated(int64_t rtt_ms) {
+ frame_buffer_->UpdateRtt(rtt_ms);
+}
+
int VideoReceiveStream::id() const {
RTC_DCHECK_CALLED_SEQUENTIALLY(&worker_sequence_checker_);
return config_.rtp.remote_ssrc;
diff --git a/video/video_receive_stream.h b/video/video_receive_stream.h
index 5c9fd83..30b227d 100644
--- a/video/video_receive_stream.h
+++ b/video/video_receive_stream.h
@@ -14,6 +14,7 @@
#include <memory>
#include <vector>
+#include "api/media_transport_interface.h"
#include "call/rtp_packet_sink_interface.h"
#include "call/syncable.h"
#include "call/video_receive_stream.h"
@@ -47,7 +48,9 @@
public KeyFrameRequestSender,
public video_coding::OnCompleteFrameCallback,
public Syncable,
- public CallStatsObserver {
+ public CallStatsObserver,
+ public MediaTransportVideoSinkInterface,
+ public MediaTransportRttObserver {
public:
VideoReceiveStream(RtpStreamReceiverControllerInterface* receiver_controller,
int num_cpu_cores,
@@ -86,9 +89,17 @@
void OnCompleteFrame(
std::unique_ptr<video_coding::EncodedFrame> frame) override;
+ // Implements MediaTransportVideoSinkInterface, converts the received frame to
+ // OnCompleteFrameCallback
+ void OnData(uint64_t channel_id,
+ MediaTransportEncodedVideoFrame frame) override;
+
// Implements CallStatsObserver::OnRttUpdate
void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) override;
+ // Implements MediaTransportRttObserver::OnRttUpdated
+ void OnRttUpdated(int64_t rtt_ms) override;
+
// Implements Syncable.
int id() const override;
absl::optional<Syncable::Info> GetInfo() const override;
diff --git a/video/video_send_stream.cc b/video/video_send_stream.cc
index 2ec348b..db90e3f 100644
--- a/video/video_send_stream.cc
+++ b/video/video_send_stream.cc
@@ -102,7 +102,7 @@
event_log, &config_, encoder_config.max_bitrate_bps,
encoder_config.bitrate_priority, suspended_ssrcs,
suspended_payload_states, encoder_config.content_type,
- std::move(fec_controller)));
+ std::move(fec_controller), config_.media_transport));
},
[this]() { thread_sync_event_.Set(); }));
diff --git a/video/video_send_stream_impl.cc b/video/video_send_stream_impl.cc
index 64ab6e1..42ec4a0 100644
--- a/video/video_send_stream_impl.cc
+++ b/video/video_send_stream_impl.cc
@@ -246,7 +246,8 @@
std::map<uint32_t, RtpState> suspended_ssrcs,
std::map<uint32_t, RtpPayloadState> suspended_payload_states,
VideoEncoderConfig::ContentType content_type,
- std::unique_ptr<FecController> fec_controller)
+ std::unique_ptr<FecController> fec_controller,
+ MediaTransportInterface* media_transport)
: has_alr_probing_(config->periodic_alr_bandwidth_probing ||
GetAlrSettings(content_type)),
pacing_config_(PacingConfig()),
@@ -280,12 +281,19 @@
event_log,
std::move(fec_controller),
CreateFrameEncryptionConfig(config_))),
- weak_ptr_factory_(this) {
+ weak_ptr_factory_(this),
+ media_transport_(media_transport) {
RTC_DCHECK_RUN_ON(worker_queue_);
RTC_LOG(LS_INFO) << "VideoSendStreamInternal: " << config_->ToString();
weak_ptr_ = weak_ptr_factory_.GetWeakPtr();
- RTC_DCHECK(!config_->rtp.ssrcs.empty());
+ if (media_transport_) {
+ // The configured ssrc is interpreted as a channel id, so there must be
+ // exactly one.
+ RTC_DCHECK_EQ(config_->rtp.ssrcs.size(), 1);
+ } else {
+ RTC_DCHECK(!config_->rtp.ssrcs.empty());
+ }
RTC_DCHECK(call_stats_);
RTC_DCHECK(transport_);
RTC_DCHECK_NE(initial_encoder_max_bitrate, 0);
@@ -599,9 +607,32 @@
check_encoder_activity_task_->UpdateEncoderActivity();
}
- EncodedImageCallback::Result result = rtp_video_sender_->OnEncodedImage(
- encoded_image, codec_specific_info, fragmentation);
-
+ EncodedImageCallback::Result result(EncodedImageCallback::Result::OK);
+ if (media_transport_) {
+ int64_t frame_id;
+ {
+ // TODO(nisse): Responsibility for allocation of frame ids should move to
+ // VideoStreamEncoder.
+ rtc::CritScope cs(&media_transport_id_lock_);
+ frame_id = media_transport_frame_id_++;
+ }
+ // TODO(nisse): Responsibility for reference meta data should be moved
+ // upstream, ideally close to the encoders, but probably VideoStreamEncoder
+ // will need to do some translation to produce reference info using frame
+ // ids.
+ std::vector<int64_t> referenced_frame_ids;
+ if (encoded_image._frameType != kVideoFrameKey) {
+ RTC_DCHECK_GT(frame_id, 0);
+ referenced_frame_ids.push_back(frame_id - 1);
+ }
+ media_transport_->SendVideoFrame(
+ config_->rtp.ssrcs[0], webrtc::MediaTransportEncodedVideoFrame(
+ frame_id, referenced_frame_ids,
+ config_->rtp.payload_type, encoded_image));
+ } else {
+ result = rtp_video_sender_->OnEncodedImage(
+ encoded_image, codec_specific_info, fragmentation);
+ }
// Check if there's a throttled VideoBitrateAllocation that we should try
// sending.
rtc::WeakPtr<VideoSendStreamImpl> send_stream = weak_ptr_;
diff --git a/video/video_send_stream_impl.h b/video/video_send_stream_impl.h
index b8ba293..fc89bf1 100644
--- a/video/video_send_stream_impl.h
+++ b/video/video_send_stream_impl.h
@@ -82,7 +82,8 @@
std::map<uint32_t, RtpState> suspended_ssrcs,
std::map<uint32_t, RtpPayloadState> suspended_payload_states,
VideoEncoderConfig::ContentType content_type,
- std::unique_ptr<FecController> fec_controller);
+ std::unique_ptr<FecController> fec_controller,
+ MediaTransportInterface* media_transport);
~VideoSendStreamImpl() override;
// RegisterProcessThread register |module_process_thread| with those objects
@@ -185,6 +186,10 @@
};
absl::optional<VbaSendContext> video_bitrate_allocation_context_
RTC_GUARDED_BY(worker_queue_);
+ MediaTransportInterface* const media_transport_;
+ rtc::CriticalSection media_transport_id_lock_;
+ int64_t media_transport_frame_id_ RTC_GUARDED_BY(media_transport_id_lock_) =
+ 0;
};
} // namespace internal
} // namespace webrtc
diff --git a/video/video_send_stream_impl_unittest.cc b/video/video_send_stream_impl_unittest.cc
index 750b6cd..5eb6d7b 100644
--- a/video/video_send_stream_impl_unittest.cc
+++ b/video/video_send_stream_impl_unittest.cc
@@ -125,7 +125,8 @@
&event_log_, &config_, initial_encoder_max_bitrate,
initial_encoder_bitrate_priority, suspended_ssrcs,
suspended_payload_states, content_type,
- absl::make_unique<FecControllerDefault>(&clock_));
+ absl::make_unique<FecControllerDefault>(&clock_),
+ /*media_transport=*/nullptr);
}
protected: