Change RtpReceivers to interact with the media channel directly

Currently, the RtpReceivers take a BaseChannel which is (mostly)
just used for proxying calls to the MediaChannel. This change
removes the extra layer and moves the proxying logic to RtpReceiver.

Bug: webrtc:8587
Change-Id: I01b0e3d57b4629e43d9d148cc94d6dd2941d320e
Reviewed-on: https://webrtc-review.googlesource.com/38120
Commit-Queue: Steve Anton <steveanton@webrtc.org>
Reviewed-by: Taylor Brandstetter <deadbeef@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#21562}
diff --git a/ortc/ortcrtpreceiveradapter.cc b/ortc/ortcrtpreceiveradapter.cc
index 403ddca..67f4de6 100644
--- a/ortc/ortcrtpreceiveradapter.cc
+++ b/ortc/ortcrtpreceiveradapter.cc
@@ -150,17 +150,20 @@
   }
   internal_receiver_ = nullptr;
   switch (kind_) {
-    case cricket::MEDIA_TYPE_AUDIO:
-      internal_receiver_ =
-          new AudioRtpReceiver(rtc::CreateRandomUuid(), {}, ssrc,
-                               rtp_transport_controller_->voice_channel());
+    case cricket::MEDIA_TYPE_AUDIO: {
+      auto* voice_channel = rtp_transport_controller_->voice_channel();
+      internal_receiver_ = new AudioRtpReceiver(
+          rtp_transport_controller_->worker_thread(), rtc::CreateRandomUuid(),
+          {}, ssrc, (voice_channel ? voice_channel->media_channel() : nullptr));
       break;
-    case cricket::MEDIA_TYPE_VIDEO:
-      internal_receiver_ =
-          new VideoRtpReceiver(rtc::CreateRandomUuid(), {},
-                               rtp_transport_controller_->worker_thread(), ssrc,
-                               rtp_transport_controller_->video_channel());
+    }
+    case cricket::MEDIA_TYPE_VIDEO: {
+      auto* video_channel = rtp_transport_controller_->video_channel();
+      internal_receiver_ = new VideoRtpReceiver(
+          rtp_transport_controller_->worker_thread(), rtc::CreateRandomUuid(),
+          {}, ssrc, (video_channel ? video_channel->media_channel() : nullptr));
       break;
+    }
     case cricket::MEDIA_TYPE_DATA:
       RTC_NOTREACHED();
   }
diff --git a/pc/channel.cc b/pc/channel.cc
index 14ab3df..5c91c0b 100644
--- a/pc/channel.cc
+++ b/pc/channel.cc
@@ -38,13 +38,6 @@
 using webrtc::SdpType;
 
 namespace {
-// See comment below for why we need to use a pointer to a unique_ptr.
-bool SetRawAudioSink_w(VoiceMediaChannel* channel,
-                       uint32_t ssrc,
-                       std::unique_ptr<webrtc::AudioSinkInterface>* sink) {
-  channel->SetRawAudioSink(ssrc, std::move(*sink));
-  return true;
-}
 
 struct SendPacketMessageData : public rtc::MessageData {
   rtc::CopyOnWriteBuffer packet;
@@ -1232,22 +1225,6 @@
       Bind(&VoiceChannel::InsertDtmf_w, this, ssrc, event_code, duration));
 }
 
-bool VoiceChannel::SetOutputVolume(uint32_t ssrc, double volume) {
-  return InvokeOnWorker<bool>(
-      RTC_FROM_HERE,
-      Bind(&VoiceMediaChannel::SetOutputVolume, media_channel(), ssrc, volume));
-}
-
-void VoiceChannel::SetRawAudioSink(
-    uint32_t ssrc,
-    std::unique_ptr<webrtc::AudioSinkInterface> sink) {
-  // We need to work around Bind's lack of support for unique_ptr and ownership
-  // passing.  So we invoke to our own little routine that gets a pointer to
-  // our local variable.  This is OK since we're synchronously invoking.
-  InvokeOnWorker<bool>(RTC_FROM_HERE,
-                       Bind(&SetRawAudioSink_w, media_channel(), ssrc, &sink));
-}
-
 webrtc::RtpParameters VoiceChannel::GetRtpSendParameters(uint32_t ssrc) const {
   return worker_thread()->Invoke<webrtc::RtpParameters>(
       RTC_FROM_HERE, Bind(&VoiceChannel::GetRtpSendParameters_w, this, ssrc));
@@ -1271,46 +1248,11 @@
   return media_channel()->SetRtpSendParameters(ssrc, parameters);
 }
 
-webrtc::RtpParameters VoiceChannel::GetRtpReceiveParameters(
-    uint32_t ssrc) const {
-  return worker_thread()->Invoke<webrtc::RtpParameters>(
-      RTC_FROM_HERE,
-      Bind(&VoiceChannel::GetRtpReceiveParameters_w, this, ssrc));
-}
-
-webrtc::RtpParameters VoiceChannel::GetRtpReceiveParameters_w(
-    uint32_t ssrc) const {
-  return media_channel()->GetRtpReceiveParameters(ssrc);
-}
-
-bool VoiceChannel::SetRtpReceiveParameters(
-    uint32_t ssrc,
-    const webrtc::RtpParameters& parameters) {
-  return InvokeOnWorker<bool>(
-      RTC_FROM_HERE,
-      Bind(&VoiceChannel::SetRtpReceiveParameters_w, this, ssrc, parameters));
-}
-
-bool VoiceChannel::SetRtpReceiveParameters_w(uint32_t ssrc,
-                                             webrtc::RtpParameters parameters) {
-  return media_channel()->SetRtpReceiveParameters(ssrc, parameters);
-}
-
 bool VoiceChannel::GetStats(VoiceMediaInfo* stats) {
   return InvokeOnWorker<bool>(RTC_FROM_HERE, Bind(&VoiceMediaChannel::GetStats,
                                                   media_channel(), stats));
 }
 
-std::vector<webrtc::RtpSource> VoiceChannel::GetSources(uint32_t ssrc) const {
-  return worker_thread()->Invoke<std::vector<webrtc::RtpSource>>(
-      RTC_FROM_HERE, Bind(&VoiceChannel::GetSources_w, this, ssrc));
-}
-
-std::vector<webrtc::RtpSource> VoiceChannel::GetSources_w(uint32_t ssrc) const {
-  RTC_DCHECK(worker_thread()->IsCurrent());
-  return media_channel()->GetSources(ssrc);
-}
-
 void VoiceChannel::StartMediaMonitor(int cms) {
   media_monitor_.reset(new VoiceMediaMonitor(media_channel(), worker_thread(),
       rtc::Thread::Current()));
@@ -1559,14 +1501,6 @@
   Deinit();
 }
 
-bool VideoChannel::SetSink(uint32_t ssrc,
-                           rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) {
-  worker_thread()->Invoke<void>(
-      RTC_FROM_HERE,
-      Bind(&VideoMediaChannel::SetSink, media_channel(), ssrc, sink));
-  return true;
-}
-
 bool VideoChannel::SetVideoSend(
     uint32_t ssrc,
     bool mute,
@@ -1600,31 +1534,6 @@
   return media_channel()->SetRtpSendParameters(ssrc, parameters);
 }
 
-webrtc::RtpParameters VideoChannel::GetRtpReceiveParameters(
-    uint32_t ssrc) const {
-  return worker_thread()->Invoke<webrtc::RtpParameters>(
-      RTC_FROM_HERE,
-      Bind(&VideoChannel::GetRtpReceiveParameters_w, this, ssrc));
-}
-
-webrtc::RtpParameters VideoChannel::GetRtpReceiveParameters_w(
-    uint32_t ssrc) const {
-  return media_channel()->GetRtpReceiveParameters(ssrc);
-}
-
-bool VideoChannel::SetRtpReceiveParameters(
-    uint32_t ssrc,
-    const webrtc::RtpParameters& parameters) {
-  return InvokeOnWorker<bool>(
-      RTC_FROM_HERE,
-      Bind(&VideoChannel::SetRtpReceiveParameters_w, this, ssrc, parameters));
-}
-
-bool VideoChannel::SetRtpReceiveParameters_w(uint32_t ssrc,
-                                             webrtc::RtpParameters parameters) {
-  return media_channel()->SetRtpReceiveParameters(ssrc, parameters);
-}
-
 void VideoChannel::UpdateMediaSendRecvState_w() {
   // Send outgoing data if we're the active call, we have the remote content,
   // and we have had some form of connectivity.
diff --git a/pc/channel.h b/pc/channel.h
index a7d140e..ec9f913 100644
--- a/pc/channel.h
+++ b/pc/channel.h
@@ -490,22 +490,13 @@
   // The valid value for the |event| are 0 which corresponding to DTMF
   // event 0-9, *, #, A-D.
   bool InsertDtmf(uint32_t ssrc, int event_code, int duration);
-  bool SetOutputVolume(uint32_t ssrc, double volume);
-  void SetRawAudioSink(uint32_t ssrc,
-                       std::unique_ptr<webrtc::AudioSinkInterface> sink);
   webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const;
   bool SetRtpSendParameters(uint32_t ssrc,
                             const webrtc::RtpParameters& parameters);
-  webrtc::RtpParameters GetRtpReceiveParameters(uint32_t ssrc) const;
-  bool SetRtpReceiveParameters(uint32_t ssrc,
-                               const webrtc::RtpParameters& parameters);
 
   // Get statistics about the current media session.
   bool GetStats(VoiceMediaInfo* stats);
 
-  std::vector<webrtc::RtpSource> GetSources(uint32_t ssrc) const;
-  std::vector<webrtc::RtpSource> GetSources_w(uint32_t ssrc) const;
-
   // Monitoring functions
   sigslot::signal2<VoiceChannel*, const std::vector<ConnectionInfo>&>
       SignalConnectionMonitor;
@@ -524,9 +515,6 @@
   void GetActiveStreams_w(AudioInfo::StreamList* actives);
   webrtc::RtpParameters GetRtpSendParameters_w(uint32_t ssrc) const;
   bool SetRtpSendParameters_w(uint32_t ssrc, webrtc::RtpParameters parameters);
-  webrtc::RtpParameters GetRtpReceiveParameters_w(uint32_t ssrc) const;
-  bool SetRtpReceiveParameters_w(uint32_t ssrc,
-                                 webrtc::RtpParameters parameters);
   cricket::MediaType media_type() override { return cricket::MEDIA_TYPE_AUDIO; }
 
  private:
@@ -543,7 +531,6 @@
                           std::string* error_desc) override;
   void HandleEarlyMediaTimeout();
   bool InsertDtmf_w(uint32_t ssrc, int event, int duration);
-  bool SetOutputVolume_w(uint32_t ssrc, double volume);
 
   void OnMessage(rtc::Message* pmsg) override;
   void OnConnectionMonitorUpdate(
@@ -584,8 +571,6 @@
     return static_cast<VideoMediaChannel*>(BaseChannel::media_channel());
   }
 
-  bool SetSink(uint32_t ssrc,
-               rtc::VideoSinkInterface<webrtc::VideoFrame>* sink);
   void FillBitrateInfo(BandwidthEstimationInfo* bwe_info);
   // Get statistics about the current media session.
   bool GetStats(VideoMediaInfo* stats);
@@ -606,9 +591,6 @@
   webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const;
   bool SetRtpSendParameters(uint32_t ssrc,
                             const webrtc::RtpParameters& parameters);
-  webrtc::RtpParameters GetRtpReceiveParameters(uint32_t ssrc) const;
-  bool SetRtpReceiveParameters(uint32_t ssrc,
-                               const webrtc::RtpParameters& parameters);
   cricket::MediaType media_type() override { return cricket::MEDIA_TYPE_VIDEO; }
 
  private:
@@ -623,9 +605,6 @@
   bool GetStats_w(VideoMediaInfo* stats);
   webrtc::RtpParameters GetRtpSendParameters_w(uint32_t ssrc) const;
   bool SetRtpSendParameters_w(uint32_t ssrc, webrtc::RtpParameters parameters);
-  webrtc::RtpParameters GetRtpReceiveParameters_w(uint32_t ssrc) const;
-  bool SetRtpReceiveParameters_w(uint32_t ssrc,
-                                 webrtc::RtpParameters parameters);
 
   void OnConnectionMonitorUpdate(
       ConnectionMonitor* monitor,
diff --git a/pc/channel_unittest.cc b/pc/channel_unittest.cc
index 0d155c4..ae5782c 100644
--- a/pc/channel_unittest.cc
+++ b/pc/channel_unittest.cc
@@ -2399,68 +2399,6 @@
   Base::TestOnTransportReadyToSendWithRtcpMux();
 }
 
-// Test that we can scale the output volume properly for 1:1 calls.
-TEST_F(VoiceChannelSingleThreadTest, TestScaleVolume1to1Call) {
-  CreateChannels(0, 0);
-  EXPECT_TRUE(SendInitiate());
-  EXPECT_TRUE(SendAccept());
-  double volume;
-
-  // Default is (1.0).
-  EXPECT_TRUE(media_channel1_->GetOutputVolume(0, &volume));
-  EXPECT_DOUBLE_EQ(1.0, volume);
-  // invalid ssrc.
-  EXPECT_FALSE(media_channel1_->GetOutputVolume(3, &volume));
-
-  // Set scale to (1.5).
-  EXPECT_TRUE(channel1_->SetOutputVolume(0, 1.5));
-  EXPECT_TRUE(media_channel1_->GetOutputVolume(0, &volume));
-  EXPECT_DOUBLE_EQ(1.5, volume);
-
-  // Set scale to (0).
-  EXPECT_TRUE(channel1_->SetOutputVolume(0, 0.0));
-  EXPECT_TRUE(media_channel1_->GetOutputVolume(0, &volume));
-  EXPECT_DOUBLE_EQ(0.0, volume);
-}
-
-// Test that we can scale the output volume properly for multiway calls.
-TEST_F(VoiceChannelSingleThreadTest, TestScaleVolumeMultiwayCall) {
-  CreateChannels(0, 0);
-  EXPECT_TRUE(SendInitiate());
-  EXPECT_TRUE(SendAccept());
-  EXPECT_TRUE(AddStream1(1));
-  EXPECT_TRUE(AddStream1(2));
-
-  double volume;
-  // Default is (1.0).
-  EXPECT_TRUE(media_channel1_->GetOutputVolume(0, &volume));
-  EXPECT_DOUBLE_EQ(1.0, volume);
-  EXPECT_TRUE(media_channel1_->GetOutputVolume(1, &volume));
-  EXPECT_DOUBLE_EQ(1.0, volume);
-  EXPECT_TRUE(media_channel1_->GetOutputVolume(2, &volume));
-  EXPECT_DOUBLE_EQ(1.0, volume);
-  // invalid ssrc.
-  EXPECT_FALSE(media_channel1_->GetOutputVolume(3, &volume));
-
-  // Set scale to (1.5) for ssrc = 1.
-  EXPECT_TRUE(channel1_->SetOutputVolume(1, 1.5));
-  EXPECT_TRUE(media_channel1_->GetOutputVolume(1, &volume));
-  EXPECT_DOUBLE_EQ(1.5, volume);
-  EXPECT_TRUE(media_channel1_->GetOutputVolume(2, &volume));
-  EXPECT_DOUBLE_EQ(1.0, volume);
-  EXPECT_TRUE(media_channel1_->GetOutputVolume(0, &volume));
-  EXPECT_DOUBLE_EQ(1.0, volume);
-
-  // Set scale to (0) for all ssrcs.
-  EXPECT_TRUE(channel1_->SetOutputVolume(0,  0.0));
-  EXPECT_TRUE(media_channel1_->GetOutputVolume(0, &volume));
-  EXPECT_DOUBLE_EQ(0.0, volume);
-  EXPECT_TRUE(media_channel1_->GetOutputVolume(1, &volume));
-  EXPECT_DOUBLE_EQ(0.0, volume);
-  EXPECT_TRUE(media_channel1_->GetOutputVolume(2, &volume));
-  EXPECT_DOUBLE_EQ(0.0, volume);
-}
-
 TEST_F(VoiceChannelSingleThreadTest, SendBundleToBundle) {
   Base::SendBundleToBundle(kAudioPts, arraysize(kAudioPts), false, false);
 }
@@ -2750,68 +2688,6 @@
   Base::TestOnTransportReadyToSendWithRtcpMux();
 }
 
-// Test that we can scale the output volume properly for 1:1 calls.
-TEST_F(VoiceChannelDoubleThreadTest, TestScaleVolume1to1Call) {
-  CreateChannels(0, 0);
-  EXPECT_TRUE(SendInitiate());
-  EXPECT_TRUE(SendAccept());
-  double volume;
-
-  // Default is (1.0).
-  EXPECT_TRUE(media_channel1_->GetOutputVolume(0, &volume));
-  EXPECT_DOUBLE_EQ(1.0, volume);
-  // invalid ssrc.
-  EXPECT_FALSE(media_channel1_->GetOutputVolume(3, &volume));
-
-  // Set scale to (1.5).
-  EXPECT_TRUE(channel1_->SetOutputVolume(0, 1.5));
-  EXPECT_TRUE(media_channel1_->GetOutputVolume(0, &volume));
-  EXPECT_DOUBLE_EQ(1.5, volume);
-
-  // Set scale to (0).
-  EXPECT_TRUE(channel1_->SetOutputVolume(0, 0.0));
-  EXPECT_TRUE(media_channel1_->GetOutputVolume(0, &volume));
-  EXPECT_DOUBLE_EQ(0.0, volume);
-}
-
-// Test that we can scale the output volume properly for multiway calls.
-TEST_F(VoiceChannelDoubleThreadTest, TestScaleVolumeMultiwayCall) {
-  CreateChannels(0, 0);
-  EXPECT_TRUE(SendInitiate());
-  EXPECT_TRUE(SendAccept());
-  EXPECT_TRUE(AddStream1(1));
-  EXPECT_TRUE(AddStream1(2));
-
-  double volume;
-  // Default is (1.0).
-  EXPECT_TRUE(media_channel1_->GetOutputVolume(0, &volume));
-  EXPECT_DOUBLE_EQ(1.0, volume);
-  EXPECT_TRUE(media_channel1_->GetOutputVolume(1, &volume));
-  EXPECT_DOUBLE_EQ(1.0, volume);
-  EXPECT_TRUE(media_channel1_->GetOutputVolume(2, &volume));
-  EXPECT_DOUBLE_EQ(1.0, volume);
-  // invalid ssrc.
-  EXPECT_FALSE(media_channel1_->GetOutputVolume(3, &volume));
-
-  // Set scale to (1.5) for ssrc = 1.
-  EXPECT_TRUE(channel1_->SetOutputVolume(1, 1.5));
-  EXPECT_TRUE(media_channel1_->GetOutputVolume(1, &volume));
-  EXPECT_DOUBLE_EQ(1.5, volume);
-  EXPECT_TRUE(media_channel1_->GetOutputVolume(2, &volume));
-  EXPECT_DOUBLE_EQ(1.0, volume);
-  EXPECT_TRUE(media_channel1_->GetOutputVolume(0, &volume));
-  EXPECT_DOUBLE_EQ(1.0, volume);
-
-  // Set scale to (0) for all ssrcs.
-  EXPECT_TRUE(channel1_->SetOutputVolume(0, 0.0));
-  EXPECT_TRUE(media_channel1_->GetOutputVolume(0, &volume));
-  EXPECT_DOUBLE_EQ(0.0, volume);
-  EXPECT_TRUE(media_channel1_->GetOutputVolume(1, &volume));
-  EXPECT_DOUBLE_EQ(0.0, volume);
-  EXPECT_TRUE(media_channel1_->GetOutputVolume(2, &volume));
-  EXPECT_DOUBLE_EQ(0.0, volume);
-}
-
 TEST_F(VoiceChannelDoubleThreadTest, SendBundleToBundle) {
   Base::SendBundleToBundle(kAudioPts, arraysize(kAudioPts), false, false);
 }
diff --git a/pc/peerconnection.cc b/pc/peerconnection.cc
index 020f3c3..3bab2e1d 100644
--- a/pc/peerconnection.cc
+++ b/pc/peerconnection.cc
@@ -1283,14 +1283,15 @@
     sender = RtpSenderProxyWithInternal<RtpSenderInternal>::Create(
         signaling_thread(), new AudioRtpSender(nullptr, stats_.get()));
     receiver = RtpReceiverProxyWithInternal<RtpReceiverInternal>::Create(
-        signaling_thread(), new AudioRtpReceiver(receiver_id, {}, 0, nullptr));
+        signaling_thread(),
+        new AudioRtpReceiver(worker_thread(), receiver_id, {}, 0, nullptr));
   } else {
     RTC_DCHECK_EQ(cricket::MEDIA_TYPE_VIDEO, media_type);
     sender = RtpSenderProxyWithInternal<RtpSenderInternal>::Create(
         signaling_thread(), new VideoRtpSender(nullptr));
     receiver = RtpReceiverProxyWithInternal<RtpReceiverInternal>::Create(
         signaling_thread(),
-        new VideoRtpReceiver(receiver_id, {}, worker_thread(), 0, nullptr));
+        new VideoRtpReceiver(worker_thread(), receiver_id, {}, 0, nullptr));
   }
   rtc::scoped_refptr<RtpTransceiverProxyWithInternal<RtpTransceiver>>
       transceiver = RtpTransceiverProxyWithInternal<RtpTransceiver>::Create(
@@ -2713,8 +2714,9 @@
   rtc::scoped_refptr<RtpReceiverProxyWithInternal<RtpReceiverInternal>>
       receiver = RtpReceiverProxyWithInternal<RtpReceiverInternal>::Create(
           signaling_thread(),
-          new AudioRtpReceiver(remote_sender_info.sender_id, streams,
-                               remote_sender_info.first_ssrc, voice_channel()));
+          new AudioRtpReceiver(worker_thread(), remote_sender_info.sender_id,
+                               streams, remote_sender_info.first_ssrc,
+                               voice_media_channel()));
   stream->AddTrack(
       static_cast<AudioTrackInterface*>(receiver->internal()->track().get()));
   GetAudioTransceiver()->internal()->AddReceiver(receiver);
@@ -2729,9 +2731,9 @@
   rtc::scoped_refptr<RtpReceiverProxyWithInternal<RtpReceiverInternal>>
       receiver = RtpReceiverProxyWithInternal<RtpReceiverInternal>::Create(
           signaling_thread(),
-          new VideoRtpReceiver(remote_sender_info.sender_id, streams,
-                               worker_thread(), remote_sender_info.first_ssrc,
-                               video_channel()));
+          new VideoRtpReceiver(worker_thread(), remote_sender_info.sender_id,
+                               streams, remote_sender_info.first_ssrc,
+                               video_media_channel()));
   stream->AddTrack(
       static_cast<VideoTrackInterface*>(receiver->internal()->track().get()));
   GetVideoTransceiver()->internal()->AddReceiver(receiver);
diff --git a/pc/peerconnection.h b/pc/peerconnection.h
index 3fd944c..4c7973b 100644
--- a/pc/peerconnection.h
+++ b/pc/peerconnection.h
@@ -317,6 +317,14 @@
   // Implements MessageHandler.
   void OnMessage(rtc::Message* msg) override;
 
+  cricket::VoiceMediaChannel* voice_media_channel() const {
+    return voice_channel() ? voice_channel()->media_channel() : nullptr;
+  }
+
+  cricket::VideoMediaChannel* video_media_channel() const {
+    return video_channel() ? video_channel()->media_channel() : nullptr;
+  }
+
   std::vector<rtc::scoped_refptr<RtpSenderProxyWithInternal<RtpSenderInternal>>>
   GetSendersInternal() const;
   std::vector<
diff --git a/pc/remoteaudiosource.cc b/pc/remoteaudiosource.cc
index d88fa69..a260be8 100644
--- a/pc/remoteaudiosource.cc
+++ b/pc/remoteaudiosource.cc
@@ -18,6 +18,7 @@
 #include "rtc_base/checks.h"
 #include "rtc_base/constructormagic.h"
 #include "rtc_base/logging.h"
+#include "rtc_base/ptr_util.h"
 #include "rtc_base/thread.h"
 
 namespace webrtc {
@@ -38,11 +39,12 @@
 };
 
 rtc::scoped_refptr<RemoteAudioSource> RemoteAudioSource::Create(
-    uint32_t ssrc,
-    cricket::VoiceChannel* channel) {
+    rtc::Thread* worker_thread,
+    cricket::VoiceMediaChannel* media_channel,
+    uint32_t ssrc) {
   rtc::scoped_refptr<RemoteAudioSource> ret(
       new rtc::RefCountedObject<RemoteAudioSource>());
-  ret->Initialize(ssrc, channel);
+  ret->Initialize(worker_thread, media_channel, ssrc);
   return ret;
 }
 
@@ -58,14 +60,16 @@
   RTC_DCHECK(sinks_.empty());
 }
 
-void RemoteAudioSource::Initialize(uint32_t ssrc,
-                                   cricket::VoiceChannel* channel) {
+void RemoteAudioSource::Initialize(rtc::Thread* worker_thread,
+                                   cricket::VoiceMediaChannel* media_channel,
+                                   uint32_t ssrc) {
   RTC_DCHECK(main_thread_->IsCurrent());
   // To make sure we always get notified when the channel goes out of scope,
   // we register for callbacks here and not on demand in AddSink.
-  if (channel) {  // May be null in tests.
-    channel->SetRawAudioSink(
-        ssrc, std::unique_ptr<AudioSinkInterface>(new Sink(this)));
+  if (media_channel) {  // May be null in tests.
+    worker_thread->Invoke<void>(RTC_FROM_HERE, [&] {
+      media_channel->SetRawAudioSink(ssrc, rtc::MakeUnique<Sink>(this));
+    });
   }
 }
 
diff --git a/pc/remoteaudiosource.h b/pc/remoteaudiosource.h
index d35fb04..eadbcca 100644
--- a/pc/remoteaudiosource.h
+++ b/pc/remoteaudiosource.h
@@ -33,8 +33,9 @@
  public:
   // Creates an instance of RemoteAudioSource.
   static rtc::scoped_refptr<RemoteAudioSource> Create(
-      uint32_t ssrc,
-      cricket::VoiceChannel* channel);
+      rtc::Thread* worker_thread,
+      cricket::VoiceMediaChannel* media_channel,
+      uint32_t ssrc);
 
   // MediaSourceInterface implementation.
   MediaSourceInterface::SourceState state() const override;
@@ -49,7 +50,9 @@
 
   // Post construction initialize where we can do things like save a reference
   // to ourselves (need to be fully constructed).
-  void Initialize(uint32_t ssrc, cricket::VoiceChannel* channel);
+  void Initialize(rtc::Thread* worker_thread,
+                  cricket::VoiceMediaChannel* media_channel,
+                  uint32_t ssrc);
 
  private:
   typedef std::list<AudioObserver*> AudioObserverList;
diff --git a/pc/rtpreceiver.cc b/pc/rtpreceiver.cc
index 4968ac1..33ad899 100644
--- a/pc/rtpreceiver.cc
+++ b/pc/rtpreceiver.cc
@@ -22,27 +22,27 @@
 namespace webrtc {
 
 AudioRtpReceiver::AudioRtpReceiver(
+    rtc::Thread* worker_thread,
     const std::string& receiver_id,
     std::vector<rtc::scoped_refptr<MediaStreamInterface>> streams,
     uint32_t ssrc,
-    cricket::VoiceChannel* channel)
-    : id_(receiver_id),
+    cricket::VoiceMediaChannel* media_channel)
+    : worker_thread_(worker_thread),
+      id_(receiver_id),
       ssrc_(ssrc),
-      channel_(channel),
       track_(AudioTrackProxy::Create(
           rtc::Thread::Current(),
-          AudioTrack::Create(receiver_id,
-                             RemoteAudioSource::Create(ssrc, channel)))),
+          AudioTrack::Create(
+              receiver_id,
+              RemoteAudioSource::Create(worker_thread, media_channel, ssrc)))),
       streams_(std::move(streams)),
       cached_track_enabled_(track_->enabled()) {
+  RTC_DCHECK(worker_thread_);
   RTC_DCHECK(track_->GetSource()->remote());
   track_->RegisterObserver(this);
   track_->GetSource()->RegisterAudioObserver(this);
+  SetMediaChannel(media_channel);
   Reconfigure();
-  if (channel_) {
-    channel_->SignalFirstPacketReceived.connect(
-        this, &AudioRtpReceiver::OnFirstPacketReceived);
-  }
 }
 
 AudioRtpReceiver::~AudioRtpReceiver() {
@@ -58,11 +58,20 @@
   }
 }
 
+bool AudioRtpReceiver::SetOutputVolume(double volume) {
+  RTC_DCHECK_GE(volume, 0.0);
+  RTC_DCHECK_LE(volume, 10.0);
+  RTC_DCHECK(media_channel_);
+  return worker_thread_->Invoke<bool>(RTC_FROM_HERE, [&] {
+    return media_channel_->SetOutputVolume(ssrc_, volume);
+  });
+}
+
 void AudioRtpReceiver::OnSetVolume(double volume) {
   RTC_DCHECK_GE(volume, 0);
   RTC_DCHECK_LE(volume, 10);
   cached_volume_ = volume;
-  if (!channel_) {
+  if (!media_channel_) {
     RTC_LOG(LS_ERROR)
         << "AudioRtpReceiver::OnSetVolume: No audio channel exists.";
     return;
@@ -71,25 +80,29 @@
   // corresponding WebRtc Voice Engine channel will be 0. So we do not allow
   // setting the volume to the source when the track is disabled.
   if (!stopped_ && track_->enabled()) {
-    if (!channel_->SetOutputVolume(ssrc_, cached_volume_)) {
+    if (!SetOutputVolume(cached_volume_)) {
       RTC_NOTREACHED();
     }
   }
 }
 
 RtpParameters AudioRtpReceiver::GetParameters() const {
-  if (!channel_ || stopped_) {
+  if (!media_channel_ || stopped_) {
     return RtpParameters();
   }
-  return channel_->GetRtpReceiveParameters(ssrc_);
+  return worker_thread_->Invoke<RtpParameters>(RTC_FROM_HERE, [&] {
+    return media_channel_->GetRtpReceiveParameters(ssrc_);
+  });
 }
 
 bool AudioRtpReceiver::SetParameters(const RtpParameters& parameters) {
   TRACE_EVENT0("webrtc", "AudioRtpReceiver::SetParameters");
-  if (!channel_ || stopped_) {
+  if (!media_channel_ || stopped_) {
     return false;
   }
-  return channel_->SetRtpReceiveParameters(ssrc_, parameters);
+  return worker_thread_->Invoke<bool>(RTC_FROM_HERE, [&] {
+    return media_channel_->SetRtpReceiveParameters(ssrc_, parameters);
+  });
 }
 
 void AudioRtpReceiver::Stop() {
@@ -97,27 +110,27 @@
   if (stopped_) {
     return;
   }
-  if (channel_) {
+  if (media_channel_) {
     // Allow that SetOutputVolume fail. This is the normal case when the
     // underlying media channel has already been deleted.
-    channel_->SetOutputVolume(ssrc_, 0);
+    SetOutputVolume(0.0);
   }
   stopped_ = true;
 }
 
 std::vector<RtpSource> AudioRtpReceiver::GetSources() const {
-  return channel_->GetSources(ssrc_);
+  return worker_thread_->Invoke<std::vector<RtpSource>>(
+      RTC_FROM_HERE, [&] { return media_channel_->GetSources(ssrc_); });
 }
 
 void AudioRtpReceiver::Reconfigure() {
   RTC_DCHECK(!stopped_);
-  if (!channel_) {
+  if (!media_channel_) {
     RTC_LOG(LS_ERROR)
         << "AudioRtpReceiver::Reconfigure: No audio channel exists.";
     return;
   }
-  if (!channel_->SetOutputVolume(ssrc_,
-                                 track_->enabled() ? cached_volume_ : 0)) {
+  if (!SetOutputVolume(track_->enabled() ? cached_volume_ : 0)) {
     RTC_NOTREACHED();
   }
 }
@@ -130,18 +143,12 @@
   }
 }
 
-void AudioRtpReceiver::SetChannel(cricket::VoiceChannel* channel) {
-  if (channel_) {
-    channel_->SignalFirstPacketReceived.disconnect(this);
-  }
-  channel_ = channel;
-  if (channel_) {
-    channel_->SignalFirstPacketReceived.connect(
-        this, &AudioRtpReceiver::OnFirstPacketReceived);
-  }
+void AudioRtpReceiver::SetMediaChannel(
+    cricket::VoiceMediaChannel* media_channel) {
+  media_channel_ = media_channel;
 }
 
-void AudioRtpReceiver::OnFirstPacketReceived(cricket::BaseChannel* channel) {
+void AudioRtpReceiver::NotifyFirstPacketReceived() {
   if (observer_) {
     observer_->OnFirstPacketReceived(media_type());
   }
@@ -149,14 +156,14 @@
 }
 
 VideoRtpReceiver::VideoRtpReceiver(
+    rtc::Thread* worker_thread,
     const std::string& track_id,
     std::vector<rtc::scoped_refptr<MediaStreamInterface>> streams,
-    rtc::Thread* worker_thread,
     uint32_t ssrc,
-    cricket::VideoChannel* channel)
-    : id_(track_id),
+    cricket::VideoMediaChannel* media_channel)
+    : worker_thread_(worker_thread),
+      id_(track_id),
       ssrc_(ssrc),
-      channel_(channel),
       source_(new RefCountedObject<VideoTrackSource>(&broadcaster_,
                                                      true /* remote */)),
       track_(VideoTrackProxy::Create(
@@ -169,19 +176,9 @@
                                             source_),
               worker_thread))),
       streams_(std::move(streams)) {
+  RTC_DCHECK(worker_thread_);
   source_->SetState(MediaSourceInterface::kLive);
-  if (!channel_) {
-    RTC_LOG(LS_ERROR)
-        << "VideoRtpReceiver::VideoRtpReceiver: No video channel exists.";
-  } else {
-    if (!channel_->SetSink(ssrc_, &broadcaster_)) {
-      RTC_NOTREACHED();
-    }
-  }
-  if (channel_) {
-    channel_->SignalFirstPacketReceived.connect(
-        this, &VideoRtpReceiver::OnFirstPacketReceived);
-  }
+  SetMediaChannel(media_channel);
 }
 
 VideoRtpReceiver::~VideoRtpReceiver() {
@@ -190,19 +187,29 @@
   Stop();
 }
 
+bool VideoRtpReceiver::SetSink(rtc::VideoSinkInterface<VideoFrame>* sink) {
+  RTC_DCHECK(media_channel_);
+  return worker_thread_->Invoke<bool>(
+      RTC_FROM_HERE, [&] { return media_channel_->SetSink(ssrc_, sink); });
+}
+
 RtpParameters VideoRtpReceiver::GetParameters() const {
-  if (!channel_ || stopped_) {
+  if (!media_channel_ || stopped_) {
     return RtpParameters();
   }
-  return channel_->GetRtpReceiveParameters(ssrc_);
+  return worker_thread_->Invoke<RtpParameters>(RTC_FROM_HERE, [&] {
+    return media_channel_->GetRtpReceiveParameters(ssrc_);
+  });
 }
 
 bool VideoRtpReceiver::SetParameters(const RtpParameters& parameters) {
   TRACE_EVENT0("webrtc", "VideoRtpReceiver::SetParameters");
-  if (!channel_ || stopped_) {
+  if (!media_channel_ || stopped_) {
     return false;
   }
-  return channel_->SetRtpReceiveParameters(ssrc_, parameters);
+  return worker_thread_->Invoke<bool>(RTC_FROM_HERE, [&] {
+    return media_channel_->SetRtpReceiveParameters(ssrc_, parameters);
+  });
 }
 
 void VideoRtpReceiver::Stop() {
@@ -212,12 +219,12 @@
   }
   source_->SetState(MediaSourceInterface::kEnded);
   source_->OnSourceDestroyed();
-  if (!channel_) {
+  if (!media_channel_) {
     RTC_LOG(LS_WARNING) << "VideoRtpReceiver::Stop: No video channel exists.";
   } else {
     // Allow that SetSink fail. This is the normal case when the underlying
     // media channel has already been deleted.
-    channel_->SetSink(ssrc_, nullptr);
+    SetSink(nullptr);
   }
   stopped_ = true;
 }
@@ -230,22 +237,20 @@
   }
 }
 
-void VideoRtpReceiver::SetChannel(cricket::VideoChannel* channel) {
-  if (channel_) {
-    channel_->SignalFirstPacketReceived.disconnect(this);
-    channel_->SetSink(ssrc_, nullptr);
+void VideoRtpReceiver::SetMediaChannel(
+    cricket::VideoMediaChannel* media_channel) {
+  if (media_channel_) {
+    SetSink(nullptr);
   }
-  channel_ = channel;
-  if (channel_) {
-    if (!channel_->SetSink(ssrc_, &broadcaster_)) {
+  media_channel_ = media_channel;
+  if (media_channel_) {
+    if (!SetSink(&broadcaster_)) {
       RTC_NOTREACHED();
     }
-    channel_->SignalFirstPacketReceived.connect(
-        this, &VideoRtpReceiver::OnFirstPacketReceived);
   }
 }
 
-void VideoRtpReceiver::OnFirstPacketReceived(cricket::BaseChannel* channel) {
+void VideoRtpReceiver::NotifyFirstPacketReceived() {
   if (observer_) {
     observer_->OnFirstPacketReceived(media_type());
   }
diff --git a/pc/rtpreceiver.h b/pc/rtpreceiver.h
index 6df4642..0ce6373 100644
--- a/pc/rtpreceiver.h
+++ b/pc/rtpreceiver.h
@@ -23,11 +23,9 @@
 #include "api/mediastreaminterface.h"
 #include "api/rtpreceiverinterface.h"
 #include "media/base/videobroadcaster.h"
-#include "pc/channel.h"
 #include "pc/remoteaudiosource.h"
 #include "pc/videotracksource.h"
 #include "rtc_base/basictypes.h"
-#include "rtc_base/sigslot.h"
 
 namespace webrtc {
 
@@ -38,22 +36,26 @@
   // This SSRC is used as an identifier for the receiver between the API layer
   // and the WebRtcVideoEngine, WebRtcVoiceEngine layer.
   virtual uint32_t ssrc() const = 0;
+
+  // Call this to notify the RtpReceiver when the first packet has been received
+  // on the corresponding channel.
+  virtual void NotifyFirstPacketReceived() = 0;
 };
 
 class AudioRtpReceiver : public ObserverInterface,
                          public AudioSourceInterface::AudioObserver,
-                         public rtc::RefCountedObject<RtpReceiverInternal>,
-                         public sigslot::has_slots<> {
+                         public rtc::RefCountedObject<RtpReceiverInternal> {
  public:
   // An SSRC of 0 will create a receiver that will match the first SSRC it
   // sees.
   // TODO(deadbeef): Use rtc::Optional, or have another constructor that
   // doesn't take an SSRC, and make this one DCHECK(ssrc != 0).
   AudioRtpReceiver(
+      rtc::Thread* worker_thread,
       const std::string& receiver_id,
       std::vector<rtc::scoped_refptr<MediaStreamInterface>> streams,
       uint32_t ssrc,
-      cricket::VoiceChannel* channel);
+      cricket::VoiceMediaChannel* media_channel);
   virtual ~AudioRtpReceiver();
 
   // ObserverInterface implementation
@@ -87,22 +89,24 @@
   // RtpReceiverInternal implementation.
   void Stop() override;
   uint32_t ssrc() const override { return ssrc_; }
+  void NotifyFirstPacketReceived() override;
 
   void SetObserver(RtpReceiverObserverInterface* observer) override;
 
   // Does not take ownership.
-  // Should call SetChannel(nullptr) before |channel| is destroyed.
-  void SetChannel(cricket::VoiceChannel* channel);
+  // Should call SetMediaChannel(nullptr) before |media_channel| is destroyed.
+  void SetMediaChannel(cricket::VoiceMediaChannel* media_channel);
 
   std::vector<RtpSource> GetSources() const override;
 
  private:
   void Reconfigure();
-  void OnFirstPacketReceived(cricket::BaseChannel* channel);
+  bool SetOutputVolume(double volume);
 
+  rtc::Thread* const worker_thread_;
   const std::string id_;
   const uint32_t ssrc_;
-  cricket::VoiceChannel* channel_;
+  cricket::VoiceMediaChannel* media_channel_ = nullptr;
   const rtc::scoped_refptr<AudioTrackInterface> track_;
   std::vector<rtc::scoped_refptr<MediaStreamInterface>> streams_;
   bool cached_track_enabled_;
@@ -112,17 +116,16 @@
   bool received_first_packet_ = false;
 };
 
-class VideoRtpReceiver : public rtc::RefCountedObject<RtpReceiverInternal>,
-                         public sigslot::has_slots<> {
+class VideoRtpReceiver : public rtc::RefCountedObject<RtpReceiverInternal> {
  public:
   // An SSRC of 0 will create a receiver that will match the first SSRC it
   // sees.
   VideoRtpReceiver(
+      rtc::Thread* worker_thread,
       const std::string& track_id,
       std::vector<rtc::scoped_refptr<MediaStreamInterface>> streams,
-      rtc::Thread* worker_thread,
       uint32_t ssrc,
-      cricket::VideoChannel* channel);
+      cricket::VideoMediaChannel* media_channel);
 
   virtual ~VideoRtpReceiver();
 
@@ -151,19 +154,21 @@
   // RtpReceiverInternal implementation.
   void Stop() override;
   uint32_t ssrc() const override { return ssrc_; }
+  void NotifyFirstPacketReceived() override;
 
   void SetObserver(RtpReceiverObserverInterface* observer) override;
 
   // Does not take ownership.
-  // Should call SetChannel(nullptr) before |channel| is destroyed.
-  void SetChannel(cricket::VideoChannel* channel);
+  // Should call SetMediaChannel(nullptr) before |media_channel| is destroyed.
+  void SetMediaChannel(cricket::VideoMediaChannel* media_channel);
 
  private:
-  void OnFirstPacketReceived(cricket::BaseChannel* channel);
+  bool SetSink(rtc::VideoSinkInterface<VideoFrame>* sink);
 
+  rtc::Thread* const worker_thread_;
   std::string id_;
   uint32_t ssrc_;
-  cricket::VideoChannel* channel_;
+  cricket::VideoMediaChannel* media_channel_ = nullptr;
   // |broadcaster_| is needed since the decoder can only handle one sink.
   // It might be better if the decoder can handle multiple sinks and consider
   // the VideoSinkWants.
diff --git a/pc/rtpsenderreceiver_unittest.cc b/pc/rtpsenderreceiver_unittest.cc
index 6334f72..f1ba6d8 100644
--- a/pc/rtpsenderreceiver_unittest.cc
+++ b/pc/rtpsenderreceiver_unittest.cc
@@ -169,7 +169,8 @@
   void CreateAudioRtpReceiver(
       std::vector<rtc::scoped_refptr<MediaStreamInterface>> streams = {}) {
     audio_rtp_receiver_ = new AudioRtpReceiver(
-        kAudioTrackId, std::move(streams), kAudioSsrc, voice_channel_);
+        rtc::Thread::Current(), kAudioTrackId, std::move(streams), kAudioSsrc,
+        voice_media_channel_);
     audio_track_ = audio_rtp_receiver_->audio_track();
     VerifyVoiceChannelOutput();
   }
@@ -177,8 +178,8 @@
   void CreateVideoRtpReceiver(
       std::vector<rtc::scoped_refptr<MediaStreamInterface>> streams = {}) {
     video_rtp_receiver_ = new VideoRtpReceiver(
-        kVideoTrackId, std::move(streams), rtc::Thread::Current(), kVideoSsrc,
-        video_channel_);
+        rtc::Thread::Current(), kVideoTrackId, std::move(streams), kVideoSsrc,
+        video_media_channel_);
     video_track_ = video_rtp_receiver_->video_track();
     VerifyVideoChannelOutput();
   }
diff --git a/pc/rtptransceiver.cc b/pc/rtptransceiver.cc
index 61ebdc7..0dfc8b5 100644
--- a/pc/rtptransceiver.cc
+++ b/pc/rtptransceiver.cc
@@ -46,7 +46,18 @@
   if (channel) {
     RTC_DCHECK_EQ(media_type(), channel->media_type());
   }
+
+  if (channel_) {
+    channel_->SignalFirstPacketReceived.disconnect(this);
+  }
+
   channel_ = channel;
+
+  if (channel_) {
+    channel_->SignalFirstPacketReceived.connect(
+        this, &RtpTransceiver::OnFirstPacketReceived);
+  }
+
   for (auto sender : senders_) {
     if (media_type() == cricket::MEDIA_TYPE_AUDIO) {
       static_cast<AudioRtpSender*>(sender->internal())
@@ -56,16 +67,21 @@
           ->SetChannel(static_cast<cricket::VideoChannel*>(channel));
     }
   }
+
   for (auto receiver : receivers_) {
     if (!channel) {
       receiver->internal()->Stop();
     }
     if (media_type() == cricket::MEDIA_TYPE_AUDIO) {
+      auto* voice_channel = static_cast<cricket::VoiceChannel*>(channel);
       static_cast<AudioRtpReceiver*>(receiver->internal())
-          ->SetChannel(static_cast<cricket::VoiceChannel*>(channel));
+          ->SetMediaChannel(voice_channel ? voice_channel->media_channel()
+                                          : nullptr);
     } else {
+      auto* video_channel = static_cast<cricket::VideoChannel*>(channel);
       static_cast<VideoRtpReceiver*>(receiver->internal())
-          ->SetChannel(static_cast<cricket::VideoChannel*>(channel));
+          ->SetMediaChannel(video_channel ? video_channel->media_channel()
+                                          : nullptr);
     }
   }
 }
@@ -136,6 +152,12 @@
   return mid_;
 }
 
+void RtpTransceiver::OnFirstPacketReceived(cricket::BaseChannel* channel) {
+  for (auto receiver : receivers_) {
+    receiver->internal()->NotifyFirstPacketReceived();
+  }
+}
+
 rtc::scoped_refptr<RtpSenderInterface> RtpTransceiver::sender() const {
   RTC_DCHECK(unified_plan_);
   RTC_CHECK_EQ(1u, senders_.size());
diff --git a/pc/rtptransceiver.h b/pc/rtptransceiver.h
index 9e8565b..40ab2f0 100644
--- a/pc/rtptransceiver.h
+++ b/pc/rtptransceiver.h
@@ -51,7 +51,8 @@
 // AudioRtpSenders, AudioRtpReceivers, and a VoiceChannel. Video RtpTransceivers
 // will have VideoRtpSenders, VideoRtpReceivers, and a VideoChannel.
 class RtpTransceiver final
-    : public rtc::RefCountedObject<RtpTransceiverInterface> {
+    : public rtc::RefCountedObject<RtpTransceiverInterface>,
+      public sigslot::has_slots<> {
  public:
   // Construct a Plan B-style RtpTransceiver with no senders, receivers, or
   // channel set.
@@ -167,6 +168,8 @@
   void SetCodecPreferences(rtc::ArrayView<RtpCodecCapability> codecs) override;
 
  private:
+  void OnFirstPacketReceived(cricket::BaseChannel* channel);
+
   const bool unified_plan_;
   const cricket::MediaType media_type_;
   std::vector<rtc::scoped_refptr<RtpSenderProxyWithInternal<RtpSenderInternal>>>