Fixes two issues in how we handle OfferToReceiveX for CreateOffer:
1. the options set in the first CreateOffer call should not affect the result of a second CreateOffer call, if SetLocalDescription is not called after the first CreateOffer. So the member var options_ of MediaStreamSignaling is removed to make each CreateOffer independent.
Instead, MediaSession is responsible to make sure that an m-line in the current local description is never removed from the newly created offer.
2. OfferToReceiveAudio used to default to true, i.e. always having m=audio line even if no audio track. This is changed so that by default m=audio is only added if there are any audio tracks.
BUG=2108
R=pthatcher@webrtc.org
Committed: https://code.google.com/p/webrtc/source/detail?r=7068
Review URL: https://webrtc-codereview.appspot.com/16309004
git-svn-id: http://webrtc.googlecode.com/svn/trunk@7124 4adac7df-926f-26a2-2b94-8c16560cd09d
diff --git a/talk/app/webrtc/mediastreamsignaling.cc b/talk/app/webrtc/mediastreamsignaling.cc
index 3a63a60..df51ba1 100644
--- a/talk/app/webrtc/mediastreamsignaling.cc
+++ b/talk/app/webrtc/mediastreamsignaling.cc
@@ -51,9 +51,9 @@
using rtc::scoped_ptr;
using rtc::scoped_refptr;
-static bool ParseConstraints(
+static bool ParseConstraintsForAnswer(
const MediaConstraintsInterface* constraints,
- cricket::MediaSessionOptions* options, bool is_answer) {
+ cricket::MediaSessionOptions* options) {
bool value;
size_t mandatory_constraints_satisfied = 0;
@@ -82,7 +82,7 @@
// kOfferToReceiveVideo defaults to false according to spec. But
// if it is an answer and video is offered, we should still accept video
// per default.
- options->has_video |= is_answer;
+ options->has_video = true;
}
if (FindConstraint(constraints,
@@ -133,6 +133,55 @@
(value <= Options::kMaxOfferToReceiveMedia);
}
+// Add the stream and RTP data channel info to |session_options|.
+static void SetStreams(
+ cricket::MediaSessionOptions* session_options,
+ rtc::scoped_refptr<StreamCollection> streams,
+ const MediaStreamSignaling::RtpDataChannels& rtp_data_channels) {
+ session_options->streams.clear();
+ if (streams != NULL) {
+ for (size_t i = 0; i < streams->count(); ++i) {
+ MediaStreamInterface* stream = streams->at(i);
+
+ AudioTrackVector audio_tracks(stream->GetAudioTracks());
+
+ // For each audio track in the stream, add it to the MediaSessionOptions.
+ for (size_t j = 0; j < audio_tracks.size(); ++j) {
+ scoped_refptr<MediaStreamTrackInterface> track(audio_tracks[j]);
+ session_options->AddStream(
+ cricket::MEDIA_TYPE_AUDIO, track->id(), stream->label());
+ }
+
+ VideoTrackVector video_tracks(stream->GetVideoTracks());
+
+ // For each video track in the stream, add it to the MediaSessionOptions.
+ for (size_t j = 0; j < video_tracks.size(); ++j) {
+ scoped_refptr<MediaStreamTrackInterface> track(video_tracks[j]);
+ session_options->AddStream(
+ cricket::MEDIA_TYPE_VIDEO, track->id(), stream->label());
+ }
+ }
+ }
+
+ // Check for data channels.
+ MediaStreamSignaling::RtpDataChannels::const_iterator data_channel_it =
+ rtp_data_channels.begin();
+ for (; data_channel_it != rtp_data_channels.end(); ++data_channel_it) {
+ const DataChannel* channel = data_channel_it->second;
+ if (channel->state() == DataChannel::kConnecting ||
+ channel->state() == DataChannel::kOpen) {
+ // |streamid| and |sync_label| are both set to the DataChannel label
+ // here so they can be signaled the same way as MediaStreams and Tracks.
+ // For MediaStreams, the sync_label is the MediaStream label and the
+ // track label is the same as |streamid|.
+ const std::string& streamid = channel->label();
+ const std::string& sync_label = channel->label();
+ session_options->AddStream(
+ cricket::MEDIA_TYPE_DATA, streamid, sync_label);
+ }
+ }
+}
+
// Factory class for creating remote MediaStreams and MediaStreamTracks.
class RemoteMediaStreamFactory {
public:
@@ -192,8 +241,6 @@
channel_manager)),
last_allocated_sctp_even_sid_(-2),
last_allocated_sctp_odd_sid_(-1) {
- options_.has_video = false;
- options_.has_audio = false;
}
MediaStreamSignaling::~MediaStreamSignaling() {
@@ -378,40 +425,38 @@
return false;
}
- UpdateSessionOptions();
+ session_options->has_audio = false;
+ session_options->has_video = false;
+ SetStreams(session_options, local_streams_, rtp_data_channels_);
- // |options.has_audio| and |options.has_video| can only change from false to
- // true, but never change from true to false. This is to make sure
- // CreateOffer / CreateAnswer doesn't remove a media content
- // description that has been created.
- if (rtc_options.offer_to_receive_audio > 0) {
- options_.has_audio = true;
+ // If |offer_to_receive_[audio/video]| is undefined, respect the flags set
+ // from SetStreams. Otherwise, overwrite it based on |rtc_options|.
+ if (rtc_options.offer_to_receive_audio != RTCOfferAnswerOptions::kUndefined) {
+ session_options->has_audio = rtc_options.offer_to_receive_audio > 0;
}
- if (rtc_options.offer_to_receive_video > 0) {
- options_.has_video = true;
+ if (rtc_options.offer_to_receive_video != RTCOfferAnswerOptions::kUndefined) {
+ session_options->has_video = rtc_options.offer_to_receive_video > 0;
}
- options_.vad_enabled = rtc_options.voice_activity_detection;
- options_.transport_options.ice_restart = rtc_options.ice_restart;
- options_.bundle_enabled = rtc_options.use_rtp_mux;
- options_.bundle_enabled = EvaluateNeedForBundle(options_);
- *session_options = options_;
+ session_options->vad_enabled = rtc_options.voice_activity_detection;
+ session_options->transport_options.ice_restart = rtc_options.ice_restart;
+ session_options->bundle_enabled = rtc_options.use_rtp_mux;
+
+ session_options->bundle_enabled = EvaluateNeedForBundle(*session_options);
return true;
}
bool MediaStreamSignaling::GetOptionsForAnswer(
const MediaConstraintsInterface* constraints,
cricket::MediaSessionOptions* options) {
- UpdateSessionOptions();
+ options->has_audio = false;
+ options->has_video = false;
+ SetStreams(options, local_streams_, rtp_data_channels_);
- // Copy the |options_| to not let the flag MediaSessionOptions::has_audio and
- // MediaSessionOptions::has_video affect subsequent offers.
- cricket::MediaSessionOptions current_options = options_;
- if (!ParseConstraints(constraints, ¤t_options, true)) {
+ if (!ParseConstraintsForAnswer(constraints, options)) {
return false;
}
- current_options.bundle_enabled = EvaluateNeedForBundle(current_options);
- *options = current_options;
+ options->bundle_enabled = EvaluateNeedForBundle(*options);
return true;
}
@@ -546,54 +591,6 @@
}
}
-void MediaStreamSignaling::UpdateSessionOptions() {
- options_.streams.clear();
- if (local_streams_ != NULL) {
- for (size_t i = 0; i < local_streams_->count(); ++i) {
- MediaStreamInterface* stream = local_streams_->at(i);
-
- AudioTrackVector audio_tracks(stream->GetAudioTracks());
- if (!audio_tracks.empty()) {
- options_.has_audio = true;
- }
-
- // For each audio track in the stream, add it to the MediaSessionOptions.
- for (size_t j = 0; j < audio_tracks.size(); ++j) {
- scoped_refptr<MediaStreamTrackInterface> track(audio_tracks[j]);
- options_.AddStream(cricket::MEDIA_TYPE_AUDIO, track->id(),
- stream->label());
- }
-
- VideoTrackVector video_tracks(stream->GetVideoTracks());
- if (!video_tracks.empty()) {
- options_.has_video = true;
- }
- // For each video track in the stream, add it to the MediaSessionOptions.
- for (size_t j = 0; j < video_tracks.size(); ++j) {
- scoped_refptr<MediaStreamTrackInterface> track(video_tracks[j]);
- options_.AddStream(cricket::MEDIA_TYPE_VIDEO, track->id(),
- stream->label());
- }
- }
- }
-
- // Check for data channels.
- RtpDataChannels::const_iterator data_channel_it = rtp_data_channels_.begin();
- for (; data_channel_it != rtp_data_channels_.end(); ++data_channel_it) {
- const DataChannel* channel = data_channel_it->second;
- if (channel->state() == DataChannel::kConnecting ||
- channel->state() == DataChannel::kOpen) {
- // |streamid| and |sync_label| are both set to the DataChannel label
- // here so they can be signaled the same way as MediaStreams and Tracks.
- // For MediaStreams, the sync_label is the MediaStream label and the
- // track label is the same as |streamid|.
- const std::string& streamid = channel->label();
- const std::string& sync_label = channel->label();
- options_.AddStream(cricket::MEDIA_TYPE_DATA, streamid, sync_label);
- }
- }
-}
-
void MediaStreamSignaling::UpdateRemoteStreamsList(
const cricket::StreamParamsVec& streams,
cricket::MediaType media_type,
diff --git a/talk/app/webrtc/mediastreamsignaling.h b/talk/app/webrtc/mediastreamsignaling.h
index 7f17971..d4b1be8 100644
--- a/talk/app/webrtc/mediastreamsignaling.h
+++ b/talk/app/webrtc/mediastreamsignaling.h
@@ -160,6 +160,9 @@
class MediaStreamSignaling : public sigslot::has_slots<> {
public:
+ typedef std::map<std::string, rtc::scoped_refptr<DataChannel> >
+ RtpDataChannels;
+
MediaStreamSignaling(rtc::Thread* signaling_thread,
MediaStreamSignalingObserver* stream_observer,
cricket::ChannelManager* channel_manager);
@@ -289,8 +292,6 @@
};
typedef std::vector<TrackInfo> TrackInfos;
- void UpdateSessionOptions();
-
// Makes sure a MediaStream Track is created for each StreamParam in
// |streams|. |media_type| is the type of the |streams| and can be either
// audio or video.
@@ -378,7 +379,6 @@
RemotePeerInfo remote_info_;
rtc::Thread* signaling_thread_;
DataChannelFactory* data_channel_factory_;
- cricket::MediaSessionOptions options_;
MediaStreamSignalingObserver* stream_observer_;
rtc::scoped_refptr<StreamCollection> local_streams_;
rtc::scoped_refptr<StreamCollection> remote_streams_;
@@ -392,8 +392,6 @@
int last_allocated_sctp_even_sid_;
int last_allocated_sctp_odd_sid_;
- typedef std::map<std::string, rtc::scoped_refptr<DataChannel> >
- RtpDataChannels;
typedef std::vector<rtc::scoped_refptr<DataChannel> > SctpDataChannels;
RtpDataChannels rtp_data_channels_;
diff --git a/talk/app/webrtc/mediastreamsignaling_unittest.cc b/talk/app/webrtc/mediastreamsignaling_unittest.cc
index 39f3c4d..84f67b9 100644
--- a/talk/app/webrtc/mediastreamsignaling_unittest.cc
+++ b/talk/app/webrtc/mediastreamsignaling_unittest.cc
@@ -782,8 +782,10 @@
RTCOfferAnswerOptions default_rtc_options;
EXPECT_TRUE(signaling_->GetOptionsForOffer(default_rtc_options,
&updated_offer_options));
- EXPECT_TRUE(updated_offer_options.has_audio);
- EXPECT_TRUE(updated_offer_options.has_video);
+ // By default, |has_audio| or |has_video| are false if there is no media
+ // track.
+ EXPECT_FALSE(updated_offer_options.has_audio);
+ EXPECT_FALSE(updated_offer_options.has_video);
}
// This test verifies that the remote MediaStreams corresponding to a received
diff --git a/talk/app/webrtc/peerconnection.cc b/talk/app/webrtc/peerconnection.cc
index 3fba862..3698381 100644
--- a/talk/app/webrtc/peerconnection.cc
+++ b/talk/app/webrtc/peerconnection.cc
@@ -508,10 +508,6 @@
return;
}
RTCOfferAnswerOptions options;
- // Defaults to receiving audio and not receiving video.
- options.offer_to_receive_audio =
- RTCOfferAnswerOptions::kOfferToReceiveMediaTrue;
- options.offer_to_receive_video = 0;
bool value;
size_t mandatory_constraints = 0;
diff --git a/talk/app/webrtc/webrtcsession_unittest.cc b/talk/app/webrtc/webrtcsession_unittest.cc
index 2cf422b..7aa87fb 100644
--- a/talk/app/webrtc/webrtcsession_unittest.cc
+++ b/talk/app/webrtc/webrtcsession_unittest.cc
@@ -512,7 +512,7 @@
}
void VerifyAnswerFromNonCryptoOffer() {
- // Create a SDP without Crypto.
+ // Create an SDP without Crypto.
cricket::MediaSessionOptions options;
options.has_video = true;
JsepSessionDescription* offer(
@@ -1272,14 +1272,13 @@
rtc::FromString<uint64>(offer->session_version()));
SetLocalDescriptionWithoutError(offer);
+ EXPECT_EQ(0u, video_channel_->send_streams().size());
+ EXPECT_EQ(0u, voice_channel_->send_streams().size());
mediastream_signaling_.SendAudioVideoStream2();
answer = CreateRemoteAnswer(session_->local_description());
SetRemoteDescriptionWithoutError(answer);
- EXPECT_EQ(0u, video_channel_->send_streams().size());
- EXPECT_EQ(0u, voice_channel_->send_streams().size());
-
// Make sure the receive streams have not changed.
ASSERT_EQ(1u, video_channel_->recv_streams().size());
EXPECT_TRUE(kVideoTrack2 == video_channel_->recv_streams()[0].id);
@@ -2053,13 +2052,21 @@
const cricket::ContentInfo* content =
cricket::GetFirstAudioContent(offer->description());
-
EXPECT_TRUE(content != NULL);
+
content = cricket::GetFirstVideoContent(offer->description());
EXPECT_TRUE(content != NULL);
- // TODO(perkj): Should the direction be set to SEND_ONLY if
- // The constraints is set to not receive audio or video but a track is added?
+ // Sets constraints to false and verifies that audio/video contents are
+ // removed.
+ options.offer_to_receive_audio = 0;
+ options.offer_to_receive_video = 0;
+ offer.reset(CreateOffer(options));
+
+ content = cricket::GetFirstAudioContent(offer->description());
+ EXPECT_TRUE(content == NULL);
+ content = cricket::GetFirstVideoContent(offer->description());
+ EXPECT_TRUE(content == NULL);
}
// Test that an answer can not be created if the last remote description is not
@@ -2098,8 +2105,7 @@
Init(NULL);
// Create a remote offer with audio only.
cricket::MediaSessionOptions options;
- options.has_audio = true;
- options.has_video = false;
+
rtc::scoped_ptr<JsepSessionDescription> offer(
CreateRemoteOffer(options));
ASSERT_TRUE(cricket::GetFirstVideoContent(offer->description()) == NULL);
@@ -2235,7 +2241,6 @@
SessionDescriptionInterface* offer = CreateOffer();
cricket::MediaSessionOptions options;
- options.has_video = false;
SessionDescriptionInterface* answer = CreateRemoteAnswer(offer, options);
// SetLocalDescription and SetRemoteDescriptions takes ownership of offer
@@ -2948,7 +2953,6 @@
TEST_F(WebRtcSessionTest, TestCreateAnswerWithNewUfragAndPassword) {
Init(NULL);
cricket::MediaSessionOptions options;
- options.has_audio = true;
options.has_video = true;
rtc::scoped_ptr<JsepSessionDescription> offer(
CreateRemoteOffer(options));
@@ -2980,7 +2984,6 @@
TEST_F(WebRtcSessionTest, TestCreateAnswerWithOldUfragAndPassword) {
Init(NULL);
cricket::MediaSessionOptions options;
- options.has_audio = true;
options.has_video = true;
rtc::scoped_ptr<JsepSessionDescription> offer(
CreateRemoteOffer(options));
@@ -3046,7 +3049,6 @@
TEST_F(WebRtcSessionTest, SetSdpFailedOnSessionError) {
Init(NULL);
cricket::MediaSessionOptions options;
- options.has_audio = true;
options.has_video = true;
cricket::BaseSession::Error error_code = cricket::BaseSession::ERROR_CONTENT;
diff --git a/talk/session/media/mediasession.cc b/talk/session/media/mediasession.cc
index 45e321f..92dd257 100644
--- a/talk/session/media/mediasession.cc
+++ b/talk/session/media/mediasession.cc
@@ -1170,28 +1170,28 @@
if (current_description) {
ContentInfos::const_iterator it = current_description->contents().begin();
for (; it != current_description->contents().end(); ++it) {
- if (IsMediaContentOfType(&*it, MEDIA_TYPE_AUDIO) && options.has_audio) {
+ if (IsMediaContentOfType(&*it, MEDIA_TYPE_AUDIO)) {
if (!AddAudioContentForOffer(options, current_description,
audio_rtp_extensions, audio_codecs,
¤t_streams, offer.get())) {
return NULL;
}
audio_added = true;
- } else if (IsMediaContentOfType(&*it, MEDIA_TYPE_VIDEO) &&
- options.has_video) {
+ } else if (IsMediaContentOfType(&*it, MEDIA_TYPE_VIDEO)) {
if (!AddVideoContentForOffer(options, current_description,
video_rtp_extensions, video_codecs,
¤t_streams, offer.get())) {
return NULL;
}
video_added = true;
- } else if (IsMediaContentOfType(&*it, MEDIA_TYPE_DATA) &&
- options.has_data()) {
+ } else if (IsMediaContentOfType(&*it, MEDIA_TYPE_DATA)) {
if (!AddDataContentForOffer(options, current_description, &data_codecs,
¤t_streams, offer.get())) {
return NULL;
}
data_added = true;
+ } else {
+ ASSERT(false);
}
}
}
@@ -1459,6 +1459,7 @@
bool secure_transport = (transport_desc_factory_->secure() != SEC_DISABLED);
SetMediaProtocol(secure_transport, audio.get());
+
desc->AddContent(CN_AUDIO, NS_JINGLE_RTP, audio.release());
if (!AddTransportOffer(CN_AUDIO, options.transport_options,
current_description, desc)) {