This reverts commit 8eb37a39e79fe1098d3503dcb8c8c2d196203fed. Chrome now have its own implementation of TaskQueues that is based on Chrome threads.

cl was originally reviewed here:
https://codereview.webrtc.org/2060403002/

- Add task queue to Call with the intent of replacing the use of one of the process threads.

- Split VideoSendStream in two. VideoSendStreamInternal is created and used on the new task queue.

- BitrateAllocator is now created on libjingle's worker thread but always used on the new task queue instead of both encoder threads and the process thread.

- VideoEncoderConfig and VideoSendStream::Config support move semantics.

- The encoder thread is moved from VideoSendStream to ViEEncoder. Frames are forwarded directly to ViEEncoder which is responsible for timestamping ? and encoding the frames.

TBR=mflodman@webrtc.org
BUG=webrtc:5687

Review-Url: https://codereview.webrtc.org/2250123002
Cr-Commit-Position: refs/heads/master@{#14014}
diff --git a/webrtc/audio/audio_send_stream.cc b/webrtc/audio/audio_send_stream.cc
index 17979d5..417720c 100644
--- a/webrtc/audio/audio_send_stream.cc
+++ b/webrtc/audio/audio_send_stream.cc
@@ -16,7 +16,9 @@
 #include "webrtc/audio/conversion.h"
 #include "webrtc/audio/scoped_voe_interface.h"
 #include "webrtc/base/checks.h"
+#include "webrtc/base/event.h"
 #include "webrtc/base/logging.h"
+#include "webrtc/base/task_queue.h"
 #include "webrtc/modules/congestion_controller/include/congestion_controller.h"
 #include "webrtc/modules/pacing/paced_sender.h"
 #include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
@@ -59,9 +61,11 @@
 AudioSendStream::AudioSendStream(
     const webrtc::AudioSendStream::Config& config,
     const rtc::scoped_refptr<webrtc::AudioState>& audio_state,
+    rtc::TaskQueue* worker_queue,
     CongestionController* congestion_controller,
     BitrateAllocator* bitrate_allocator)
-    : config_(config),
+    : worker_queue_(worker_queue),
+      config_(config),
       audio_state_(audio_state),
       bitrate_allocator_(bitrate_allocator) {
   LOG(LS_INFO) << "AudioSendStream: " << config_.ToString();
@@ -109,8 +113,13 @@
   RTC_DCHECK(thread_checker_.CalledOnValidThread());
   if (config_.min_bitrate_kbps != -1 && config_.max_bitrate_kbps != -1) {
     RTC_DCHECK_GE(config_.max_bitrate_kbps, config_.min_bitrate_kbps);
-    bitrate_allocator_->AddObserver(this, config_.min_bitrate_kbps * 1000,
-                                    config_.max_bitrate_kbps * 1000, 0, true);
+    rtc::Event thread_sync_event(false /* manual_reset */, false);
+    worker_queue_->PostTask([this, &thread_sync_event] {
+      bitrate_allocator_->AddObserver(this, config_.min_bitrate_kbps * 1000,
+                                      config_.max_bitrate_kbps * 1000, 0, true);
+      thread_sync_event.Set();
+    });
+    thread_sync_event.Wait(rtc::Event::kForever);
   }
 
   ScopedVoEInterface<VoEBase> base(voice_engine());
@@ -122,7 +131,13 @@
 
 void AudioSendStream::Stop() {
   RTC_DCHECK(thread_checker_.CalledOnValidThread());
-  bitrate_allocator_->RemoveObserver(this);
+  rtc::Event thread_sync_event(false /* manual_reset */, false);
+  worker_queue_->PostTask([this, &thread_sync_event] {
+    bitrate_allocator_->RemoveObserver(this);
+    thread_sync_event.Set();
+  });
+  thread_sync_event.Wait(rtc::Event::kForever);
+
   ScopedVoEInterface<VoEBase> base(voice_engine());
   int error = base->StopSend(config_.voe_channel_id);
   if (error != 0) {
diff --git a/webrtc/audio/audio_send_stream.h b/webrtc/audio/audio_send_stream.h
index ec2a4db..e92c326 100644
--- a/webrtc/audio/audio_send_stream.h
+++ b/webrtc/audio/audio_send_stream.h
@@ -33,6 +33,7 @@
  public:
   AudioSendStream(const webrtc::AudioSendStream::Config& config,
                   const rtc::scoped_refptr<webrtc::AudioState>& audio_state,
+                  rtc::TaskQueue* worker_queue,
                   CongestionController* congestion_controller,
                   BitrateAllocator* bitrate_allocator);
   ~AudioSendStream() override;
@@ -59,6 +60,7 @@
   VoiceEngine* voice_engine() const;
 
   rtc::ThreadChecker thread_checker_;
+  rtc::TaskQueue* worker_queue_;
   const webrtc::AudioSendStream::Config config_;
   rtc::scoped_refptr<webrtc::AudioState> audio_state_;
   std::unique_ptr<voe::ChannelProxy> channel_proxy_;
diff --git a/webrtc/audio/audio_send_stream_unittest.cc b/webrtc/audio/audio_send_stream_unittest.cc
index 7f940fc..9172064 100644
--- a/webrtc/audio/audio_send_stream_unittest.cc
+++ b/webrtc/audio/audio_send_stream_unittest.cc
@@ -16,6 +16,7 @@
 #include "webrtc/audio/audio_send_stream.h"
 #include "webrtc/audio/audio_state.h"
 #include "webrtc/audio/conversion.h"
+#include "webrtc/base/task_queue.h"
 #include "webrtc/modules/congestion_controller/include/mock/mock_congestion_controller.h"
 #include "webrtc/call/mock/mock_rtc_event_log.h"
 #include "webrtc/modules/congestion_controller/include/congestion_controller.h"
@@ -65,7 +66,8 @@
                                &bitrate_observer_,
                                &remote_bitrate_observer_,
                                &event_log_),
-        bitrate_allocator_(&limit_observer_) {
+        bitrate_allocator_(&limit_observer_),
+        worker_queue_("ConfigHelper_worker_queue") {
     using testing::Invoke;
     using testing::StrEq;
 
@@ -125,6 +127,7 @@
     return &congestion_controller_;
   }
   BitrateAllocator* bitrate_allocator() { return &bitrate_allocator_; }
+  rtc::TaskQueue* worker_queue() { return &worker_queue_; }
 
   void SetupMockForSendTelephoneEvent() {
     EXPECT_TRUE(channel_proxy_);
@@ -181,6 +184,9 @@
   MockRtcEventLog event_log_;
   testing::NiceMock<MockLimitObserver> limit_observer_;
   BitrateAllocator bitrate_allocator_;
+  // |worker_queue| is defined last to ensure all pending tasks are cancelled
+  // and deleted before any other members.
+  rtc::TaskQueue worker_queue_;
 };
 }  // namespace
 
@@ -202,16 +208,16 @@
 
 TEST(AudioSendStreamTest, ConstructDestruct) {
   ConfigHelper helper;
-  internal::AudioSendStream send_stream(helper.config(), helper.audio_state(),
-                                        helper.congestion_controller(),
-                                        helper.bitrate_allocator());
+  internal::AudioSendStream send_stream(
+      helper.config(), helper.audio_state(), helper.worker_queue(),
+      helper.congestion_controller(), helper.bitrate_allocator());
 }
 
 TEST(AudioSendStreamTest, SendTelephoneEvent) {
   ConfigHelper helper;
-  internal::AudioSendStream send_stream(helper.config(), helper.audio_state(),
-                                        helper.congestion_controller(),
-                                        helper.bitrate_allocator());
+  internal::AudioSendStream send_stream(
+      helper.config(), helper.audio_state(), helper.worker_queue(),
+      helper.congestion_controller(), helper.bitrate_allocator());
   helper.SetupMockForSendTelephoneEvent();
   EXPECT_TRUE(send_stream.SendTelephoneEvent(kTelephoneEventPayloadType,
       kTelephoneEventCode, kTelephoneEventDuration));
@@ -219,18 +225,18 @@
 
 TEST(AudioSendStreamTest, SetMuted) {
   ConfigHelper helper;
-  internal::AudioSendStream send_stream(helper.config(), helper.audio_state(),
-                                        helper.congestion_controller(),
-                                        helper.bitrate_allocator());
+  internal::AudioSendStream send_stream(
+      helper.config(), helper.audio_state(), helper.worker_queue(),
+      helper.congestion_controller(), helper.bitrate_allocator());
   EXPECT_CALL(*helper.channel_proxy(), SetInputMute(true));
   send_stream.SetMuted(true);
 }
 
 TEST(AudioSendStreamTest, GetStats) {
   ConfigHelper helper;
-  internal::AudioSendStream send_stream(helper.config(), helper.audio_state(),
-                                        helper.congestion_controller(),
-                                        helper.bitrate_allocator());
+  internal::AudioSendStream send_stream(
+      helper.config(), helper.audio_state(), helper.worker_queue(),
+      helper.congestion_controller(), helper.bitrate_allocator());
   helper.SetupMockForGetStats();
   AudioSendStream::Stats stats = send_stream.GetStats();
   EXPECT_EQ(kSsrc, stats.local_ssrc);
@@ -257,9 +263,9 @@
 
 TEST(AudioSendStreamTest, GetStatsTypingNoiseDetected) {
   ConfigHelper helper;
-  internal::AudioSendStream send_stream(helper.config(), helper.audio_state(),
-                                        helper.congestion_controller(),
-                                        helper.bitrate_allocator());
+  internal::AudioSendStream send_stream(
+      helper.config(), helper.audio_state(), helper.worker_queue(),
+      helper.congestion_controller(), helper.bitrate_allocator());
   helper.SetupMockForGetStats();
   EXPECT_FALSE(send_stream.GetStats().typing_noise_detected);
 
diff --git a/webrtc/call.h b/webrtc/call.h
index 5f64590..193124e 100644
--- a/webrtc/call.h
+++ b/webrtc/call.h
@@ -113,8 +113,8 @@
       AudioReceiveStream* receive_stream) = 0;
 
   virtual VideoSendStream* CreateVideoSendStream(
-      const VideoSendStream::Config& config,
-      const VideoEncoderConfig& encoder_config) = 0;
+      VideoSendStream::Config config,
+      VideoEncoderConfig encoder_config) = 0;
   virtual void DestroyVideoSendStream(VideoSendStream* send_stream) = 0;
 
   virtual VideoReceiveStream* CreateVideoReceiveStream(
diff --git a/webrtc/call/BUILD.gn b/webrtc/call/BUILD.gn
index 2f3ce8c..eed2b07 100644
--- a/webrtc/call/BUILD.gn
+++ b/webrtc/call/BUILD.gn
@@ -30,6 +30,7 @@
     "..:webrtc_common",
     "../api:call_api",
     "../audio",
+    "../base:rtc_task_queue",
     "../modules/congestion_controller",
     "../modules/rtp_rtcp",
     "../system_wrappers",
@@ -52,6 +53,7 @@
       "//testing/gmock",
       "//testing/gtest",
     ]
+    configs += [ "..:common_config" ]
     if (is_clang) {
       # Suppress warnings from the Chromium Clang plugin.
       # See http://code.google.com/p/webrtc/issues/detail?id=163 for details.
diff --git a/webrtc/call/bitrate_allocator.cc b/webrtc/call/bitrate_allocator.cc
index 34b06b1..085fdf9 100644
--- a/webrtc/call/bitrate_allocator.cc
+++ b/webrtc/call/bitrate_allocator.cc
@@ -54,7 +54,9 @@
       last_rtt_(0),
       num_pause_events_(0),
       clock_(Clock::GetRealTimeClock()),
-      last_bwe_log_time_(0) {}
+      last_bwe_log_time_(0) {
+  sequenced_checker_.Detach();
+}
 
 BitrateAllocator::~BitrateAllocator() {
   RTC_LOGGED_HISTOGRAM_COUNTS_100("WebRTC.Call.NumberOfPauseEvents",
@@ -64,7 +66,7 @@
 void BitrateAllocator::OnNetworkChanged(uint32_t target_bitrate_bps,
                                         uint8_t fraction_loss,
                                         int64_t rtt) {
-  rtc::CritScope lock(&crit_sect_);
+  RTC_DCHECK_CALLED_SEQUENTIALLY(&sequenced_checker_);
   last_bitrate_bps_ = target_bitrate_bps;
   last_non_zero_bitrate_bps_ =
       target_bitrate_bps > 0 ? target_bitrate_bps : last_non_zero_bitrate_bps_;
@@ -117,7 +119,7 @@
                                    uint32_t max_bitrate_bps,
                                    uint32_t pad_up_bitrate_bps,
                                    bool enforce_min_bitrate) {
-  rtc::CritScope lock(&crit_sect_);
+  RTC_DCHECK_CALLED_SEQUENTIALLY(&sequenced_checker_);
   auto it = FindObserverConfig(observer);
 
   // Update settings if the observer already exists, create a new one otherwise.
@@ -155,17 +157,15 @@
 }
 
 void BitrateAllocator::UpdateAllocationLimits() {
+  RTC_DCHECK_CALLED_SEQUENTIALLY(&sequenced_checker_);
   uint32_t total_requested_padding_bitrate = 0;
   uint32_t total_requested_min_bitrate = 0;
 
-  {
-    rtc::CritScope lock(&crit_sect_);
-    for (const auto& config : bitrate_observer_configs_) {
-      if (config.enforce_min_bitrate) {
-        total_requested_min_bitrate += config.min_bitrate_bps;
-      }
-      total_requested_padding_bitrate += config.pad_up_bitrate_bps;
+  for (const auto& config : bitrate_observer_configs_) {
+    if (config.enforce_min_bitrate) {
+      total_requested_min_bitrate += config.min_bitrate_bps;
     }
+    total_requested_padding_bitrate += config.pad_up_bitrate_bps;
   }
 
   LOG(LS_INFO) << "UpdateAllocationLimits : total_requested_min_bitrate: "
@@ -177,27 +177,26 @@
 }
 
 void BitrateAllocator::RemoveObserver(BitrateAllocatorObserver* observer) {
-  {
-    rtc::CritScope lock(&crit_sect_);
-    auto it = FindObserverConfig(observer);
-    if (it != bitrate_observer_configs_.end()) {
-      bitrate_observer_configs_.erase(it);
-    }
+  RTC_DCHECK_CALLED_SEQUENTIALLY(&sequenced_checker_);
+  auto it = FindObserverConfig(observer);
+  if (it != bitrate_observer_configs_.end()) {
+    bitrate_observer_configs_.erase(it);
   }
+
   UpdateAllocationLimits();
 }
 
 int BitrateAllocator::GetStartBitrate(BitrateAllocatorObserver* observer) {
-  rtc::CritScope lock(&crit_sect_);
+  RTC_DCHECK_CALLED_SEQUENTIALLY(&sequenced_checker_);
   const auto& it = FindObserverConfig(observer);
   if (it == bitrate_observer_configs_.end()) {
     // This observer hasn't been added yet, just give it its fair share.
     return last_non_zero_bitrate_bps_ /
-             static_cast<int>((bitrate_observer_configs_.size() + 1));
+           static_cast<int>((bitrate_observer_configs_.size() + 1));
   } else if (it->allocated_bitrate_bps == -1) {
     // This observer hasn't received an allocation yet, so do the same.
     return last_non_zero_bitrate_bps_ /
-             static_cast<int>(bitrate_observer_configs_.size());
+           static_cast<int>(bitrate_observer_configs_.size());
   } else {
     // This observer already has an allocation.
     return it->allocated_bitrate_bps;
@@ -205,8 +204,8 @@
 }
 
 BitrateAllocator::ObserverConfigs::iterator
-BitrateAllocator::FindObserverConfig(
-    const BitrateAllocatorObserver* observer) {
+BitrateAllocator::FindObserverConfig(const BitrateAllocatorObserver* observer) {
+  RTC_DCHECK_CALLED_SEQUENTIALLY(&sequenced_checker_);
   for (auto it = bitrate_observer_configs_.begin();
        it != bitrate_observer_configs_.end(); ++it) {
     if (it->observer == observer)
@@ -217,6 +216,7 @@
 
 BitrateAllocator::ObserverAllocation BitrateAllocator::AllocateBitrates(
     uint32_t bitrate) {
+  RTC_DCHECK_CALLED_SEQUENTIALLY(&sequenced_checker_);
   if (bitrate_observer_configs_.empty())
     return ObserverAllocation();
 
@@ -245,6 +245,7 @@
 }
 
 BitrateAllocator::ObserverAllocation BitrateAllocator::ZeroRateAllocation() {
+  RTC_DCHECK_CALLED_SEQUENTIALLY(&sequenced_checker_);
   ObserverAllocation allocation;
   for (const auto& observer_config : bitrate_observer_configs_)
     allocation[observer_config.observer] = 0;
@@ -253,8 +254,8 @@
 
 BitrateAllocator::ObserverAllocation BitrateAllocator::LowRateAllocation(
     uint32_t bitrate) {
+  RTC_DCHECK_CALLED_SEQUENTIALLY(&sequenced_checker_);
   ObserverAllocation allocation;
-
   // Start by allocating bitrate to observers enforcing a min bitrate, hence
   // remaining_bitrate might turn negative.
   int64_t remaining_bitrate = bitrate;
@@ -308,7 +309,7 @@
 BitrateAllocator::ObserverAllocation BitrateAllocator::NormalRateAllocation(
     uint32_t bitrate,
     uint32_t sum_min_bitrates) {
-
+  RTC_DCHECK_CALLED_SEQUENTIALLY(&sequenced_checker_);
   ObserverAllocation allocation;
   for (const auto& observer_config : bitrate_observer_configs_)
     allocation[observer_config.observer] = observer_config.min_bitrate_bps;
@@ -321,7 +322,9 @@
 }
 
 BitrateAllocator::ObserverAllocation BitrateAllocator::MaxRateAllocation(
-    uint32_t bitrate, uint32_t sum_max_bitrates) {
+    uint32_t bitrate,
+    uint32_t sum_max_bitrates) {
+  RTC_DCHECK_CALLED_SEQUENTIALLY(&sequenced_checker_);
   ObserverAllocation allocation;
 
   for (const auto& observer_config : bitrate_observer_configs_) {
@@ -335,12 +338,12 @@
 
 uint32_t BitrateAllocator::LastAllocatedBitrate(
     const ObserverConfig& observer_config) {
-
   // Return the configured minimum bitrate for newly added observers, to avoid
   // requiring an extra high bitrate for the observer to get an allocated
   // bitrate.
-  return observer_config.allocated_bitrate_bps == -1 ?
-      observer_config.min_bitrate_bps : observer_config.allocated_bitrate_bps;
+  return observer_config.allocated_bitrate_bps == -1
+             ? observer_config.min_bitrate_bps
+             : observer_config.allocated_bitrate_bps;
 }
 
 uint32_t BitrateAllocator::MinBitrateWithHysteresis(
@@ -366,6 +369,7 @@
                                                bool include_zero_allocations,
                                                int max_multiplier,
                                                ObserverAllocation* allocation) {
+  RTC_DCHECK_CALLED_SEQUENTIALLY(&sequenced_checker_);
   RTC_DCHECK_EQ(allocation->size(), bitrate_observer_configs_.size());
 
   ObserverSortingMap list_max_bitrates;
@@ -398,10 +402,12 @@
 
 bool BitrateAllocator::EnoughBitrateForAllObservers(uint32_t bitrate,
                                                     uint32_t sum_min_bitrates) {
+  RTC_DCHECK_CALLED_SEQUENTIALLY(&sequenced_checker_);
   if (bitrate < sum_min_bitrates)
     return false;
 
-  uint32_t extra_bitrate_per_observer = (bitrate - sum_min_bitrates) /
+  uint32_t extra_bitrate_per_observer =
+      (bitrate - sum_min_bitrates) /
       static_cast<uint32_t>(bitrate_observer_configs_.size());
   for (const auto& observer_config : bitrate_observer_configs_) {
     if (observer_config.min_bitrate_bps + extra_bitrate_per_observer <
diff --git a/webrtc/call/bitrate_allocator.h b/webrtc/call/bitrate_allocator.h
index 37e15b4..a5ed26c 100644
--- a/webrtc/call/bitrate_allocator.h
+++ b/webrtc/call/bitrate_allocator.h
@@ -17,8 +17,7 @@
 #include <utility>
 #include <vector>
 
-#include "webrtc/base/criticalsection.h"
-#include "webrtc/base/thread_annotations.h"
+#include "webrtc/base/sequenced_task_checker.h"
 
 namespace webrtc {
 
@@ -121,31 +120,24 @@
 
   typedef std::vector<ObserverConfig> ObserverConfigs;
   ObserverConfigs::iterator FindObserverConfig(
-      const BitrateAllocatorObserver* observer)
-      EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+      const BitrateAllocatorObserver* observer);
 
   typedef std::multimap<uint32_t, const ObserverConfig*> ObserverSortingMap;
   typedef std::map<BitrateAllocatorObserver*, int> ObserverAllocation;
 
-  ObserverAllocation AllocateBitrates(uint32_t bitrate)
-      EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+  ObserverAllocation AllocateBitrates(uint32_t bitrate);
 
-  ObserverAllocation ZeroRateAllocation() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
-  ObserverAllocation LowRateAllocation(uint32_t bitrate)
-      EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+  ObserverAllocation ZeroRateAllocation();
+  ObserverAllocation LowRateAllocation(uint32_t bitrate);
   ObserverAllocation NormalRateAllocation(uint32_t bitrate,
-                                          uint32_t sum_min_bitrates)
-      EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+                                          uint32_t sum_min_bitrates);
   ObserverAllocation MaxRateAllocation(uint32_t bitrate,
-                                       uint32_t sum_max_bitrates)
-      EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+                                       uint32_t sum_max_bitrates);
 
-  uint32_t LastAllocatedBitrate(const ObserverConfig& observer_config)
-      EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+  uint32_t LastAllocatedBitrate(const ObserverConfig& observer_config);
   // The minimum bitrate required by this observer, including enable-hysteresis
   // if the observer is in a paused state.
-  uint32_t MinBitrateWithHysteresis(const ObserverConfig& observer_config)
-      EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+  uint32_t MinBitrateWithHysteresis(const ObserverConfig& observer_config);
   // Splits |bitrate| evenly to observers already in |allocation|.
   // |include_zero_allocations| decides if zero allocations should be part of
   // the distribution or not. The allowed max bitrate is |max_multiplier| x
@@ -153,24 +145,22 @@
   void DistributeBitrateEvenly(uint32_t bitrate,
                                bool include_zero_allocations,
                                int max_multiplier,
-                               ObserverAllocation* allocation)
-          EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
-  bool EnoughBitrateForAllObservers(uint32_t bitrate, uint32_t sum_min_bitrates)
-      EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+                               ObserverAllocation* allocation);
+  bool EnoughBitrateForAllObservers(uint32_t bitrate,
+                                    uint32_t sum_min_bitrates);
 
-  LimitObserver* const limit_observer_;
-
-  rtc::CriticalSection crit_sect_;
+  rtc::SequencedTaskChecker sequenced_checker_;
+  LimitObserver* const limit_observer_ GUARDED_BY(&sequenced_checker_);
   // Stored in a list to keep track of the insertion order.
-  ObserverConfigs bitrate_observer_configs_ GUARDED_BY(crit_sect_);
-  uint32_t last_bitrate_bps_ GUARDED_BY(crit_sect_);
-  uint32_t last_non_zero_bitrate_bps_ GUARDED_BY(crit_sect_);
-  uint8_t last_fraction_loss_ GUARDED_BY(crit_sect_);
-  int64_t last_rtt_ GUARDED_BY(crit_sect_);
+  ObserverConfigs bitrate_observer_configs_ GUARDED_BY(&sequenced_checker_);
+  uint32_t last_bitrate_bps_ GUARDED_BY(&sequenced_checker_);
+  uint32_t last_non_zero_bitrate_bps_ GUARDED_BY(&sequenced_checker_);
+  uint8_t last_fraction_loss_ GUARDED_BY(&sequenced_checker_);
+  int64_t last_rtt_ GUARDED_BY(&sequenced_checker_);
   // Number of mute events based on too low BWE, not network up/down.
-  int num_pause_events_ GUARDED_BY(crit_sect_);
-  Clock* const clock_;
-  int64_t last_bwe_log_time_;
+  int num_pause_events_ GUARDED_BY(&sequenced_checker_);
+  Clock* const clock_ GUARDED_BY(&sequenced_checker_);
+  int64_t last_bwe_log_time_ GUARDED_BY(&sequenced_checker_);
 };
 }  // namespace webrtc
 #endif  // WEBRTC_CALL_BITRATE_ALLOCATOR_H_
diff --git a/webrtc/call/bitrate_estimator_tests.cc b/webrtc/call/bitrate_estimator_tests.cc
index ab8643d..b8a148c 100644
--- a/webrtc/call/bitrate_estimator_tests.cc
+++ b/webrtc/call/bitrate_estimator_tests.cc
@@ -173,7 +173,8 @@
       test_->video_send_config_.rtp.ssrcs[0]++;
       test_->video_send_config_.encoder_settings.encoder = &fake_encoder_;
       send_stream_ = test_->sender_call_->CreateVideoSendStream(
-          test_->video_send_config_, test_->video_encoder_config_);
+          test_->video_send_config_.Copy(),
+          test_->video_encoder_config_.Copy());
       RTC_DCHECK_EQ(1u, test_->video_encoder_config_.streams.size());
       frame_generator_capturer_.reset(test::FrameGeneratorCapturer::Create(
           send_stream_->Input(), test_->video_encoder_config_.streams[0].width,
diff --git a/webrtc/call/call.cc b/webrtc/call/call.cc
index ceec963..5aa7228 100644
--- a/webrtc/call/call.cc
+++ b/webrtc/call/call.cc
@@ -9,7 +9,6 @@
  */
 
 #include <string.h>
-
 #include <algorithm>
 #include <map>
 #include <memory>
@@ -22,6 +21,7 @@
 #include "webrtc/base/checks.h"
 #include "webrtc/base/constructormagic.h"
 #include "webrtc/base/logging.h"
+#include "webrtc/base/task_queue.h"
 #include "webrtc/base/thread_annotations.h"
 #include "webrtc/base/thread_checker.h"
 #include "webrtc/base/trace_event.h"
@@ -74,8 +74,8 @@
       webrtc::AudioReceiveStream* receive_stream) override;
 
   webrtc::VideoSendStream* CreateVideoSendStream(
-      const webrtc::VideoSendStream::Config& config,
-      const VideoEncoderConfig& encoder_config) override;
+      webrtc::VideoSendStream::Config config,
+      VideoEncoderConfig encoder_config) override;
   void DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) override;
 
   webrtc::VideoReceiveStream* CreateVideoReceiveStream(
@@ -198,6 +198,11 @@
   const std::unique_ptr<CongestionController> congestion_controller_;
   const std::unique_ptr<SendDelayStats> video_send_delay_stats_;
   const int64_t start_ms_;
+  // TODO(perkj): |worker_queue_| is supposed to replace
+  // |module_process_thread_|.
+  // |worker_queue| is defined last to ensure all pending tasks are cancelled
+  // and deleted before any other members.
+  rtc::TaskQueue worker_queue_;
 
   RTC_DISALLOW_COPY_AND_ASSIGN(Call);
 };
@@ -249,7 +254,8 @@
       congestion_controller_(
           new CongestionController(clock_, this, &remb_, event_log_.get())),
       video_send_delay_stats_(new SendDelayStats(clock_)),
-      start_ms_(clock_->TimeInMilliseconds()) {
+      start_ms_(clock_->TimeInMilliseconds()),
+      worker_queue_("call_worker_queue") {
   RTC_DCHECK(configuration_thread_checker_.CalledOnValidThread());
   RTC_DCHECK_GE(config.bitrate_config.min_bitrate_bps, 0);
   RTC_DCHECK_GE(config.bitrate_config.start_bitrate_bps,
@@ -279,6 +285,7 @@
 Call::~Call() {
   RTC_DCHECK(!remb_.InUse());
   RTC_DCHECK(configuration_thread_checker_.CalledOnValidThread());
+
   RTC_CHECK(audio_send_ssrcs_.empty());
   RTC_CHECK(video_send_ssrcs_.empty());
   RTC_CHECK(video_send_streams_.empty());
@@ -297,7 +304,10 @@
 
   // Only update histograms after process threads have been shut down, so that
   // they won't try to concurrently update stats.
-  UpdateSendHistograms();
+  {
+    rtc::CritScope lock(&bitrate_crit_);
+    UpdateSendHistograms();
+  }
   UpdateReceiveHistograms();
   UpdateHistograms();
 
@@ -369,7 +379,7 @@
   TRACE_EVENT0("webrtc", "Call::CreateAudioSendStream");
   RTC_DCHECK(configuration_thread_checker_.CalledOnValidThread());
   AudioSendStream* send_stream = new AudioSendStream(
-      config, config_.audio_state, congestion_controller_.get(),
+      config, config_.audio_state, &worker_queue_, congestion_controller_.get(),
       bitrate_allocator_.get());
   {
     WriteLockScoped write_lock(*send_crit_);
@@ -445,22 +455,28 @@
 }
 
 webrtc::VideoSendStream* Call::CreateVideoSendStream(
-    const webrtc::VideoSendStream::Config& config,
-    const VideoEncoderConfig& encoder_config) {
+    webrtc::VideoSendStream::Config config,
+    VideoEncoderConfig encoder_config) {
   TRACE_EVENT0("webrtc", "Call::CreateVideoSendStream");
   RTC_DCHECK(configuration_thread_checker_.CalledOnValidThread());
 
   video_send_delay_stats_->AddSsrcs(config);
+  event_log_->LogVideoSendStreamConfig(config);
+
   // TODO(mflodman): Base the start bitrate on a current bandwidth estimate, if
   // the call has already started.
+  // Copy ssrcs from |config| since |config| is moved.
+  std::vector<uint32_t> ssrcs = config.rtp.ssrcs;
   VideoSendStream* send_stream = new VideoSendStream(
-      num_cpu_cores_, module_process_thread_.get(), call_stats_.get(),
-      congestion_controller_.get(), bitrate_allocator_.get(),
-      video_send_delay_stats_.get(), &remb_, event_log_.get(), config,
-      encoder_config, suspended_video_send_ssrcs_);
+      num_cpu_cores_, module_process_thread_.get(), &worker_queue_,
+      call_stats_.get(), congestion_controller_.get(), bitrate_allocator_.get(),
+      video_send_delay_stats_.get(), &remb_, event_log_.get(),
+      std::move(config), std::move(encoder_config),
+      suspended_video_send_ssrcs_);
+
   {
     WriteLockScoped write_lock(*send_crit_);
-    for (uint32_t ssrc : config.rtp.ssrcs) {
+    for (uint32_t ssrc : ssrcs) {
       RTC_DCHECK(video_send_ssrcs_.find(ssrc) == video_send_ssrcs_.end());
       video_send_ssrcs_[ssrc] = send_stream;
     }
@@ -468,7 +484,7 @@
   }
   send_stream->SignalNetworkState(video_network_state_);
   UpdateAggregateNetworkState();
-  event_log_->LogVideoSendStreamConfig(config);
+
   return send_stream;
 }
 
@@ -495,11 +511,11 @@
   }
   RTC_CHECK(send_stream_impl != nullptr);
 
-  VideoSendStream::RtpStateMap rtp_state = send_stream_impl->GetRtpStates();
+  VideoSendStream::RtpStateMap rtp_state =
+      send_stream_impl->StopPermanentlyAndGetRtpStates();
 
   for (VideoSendStream::RtpStateMap::iterator it = rtp_state.begin();
-       it != rtp_state.end();
-       ++it) {
+       it != rtp_state.end(); ++it) {
     suspended_video_send_ssrcs_[it->first] = it->second;
   }
 
@@ -729,6 +745,15 @@
 
 void Call::OnNetworkChanged(uint32_t target_bitrate_bps, uint8_t fraction_loss,
                             int64_t rtt_ms) {
+  // TODO(perkj): Consider making sure CongestionController operates on
+  // |worker_queue_|.
+  if (!worker_queue_.IsCurrent()) {
+    worker_queue_.PostTask([this, target_bitrate_bps, fraction_loss, rtt_ms] {
+      OnNetworkChanged(target_bitrate_bps, fraction_loss, rtt_ms);
+    });
+    return;
+  }
+  RTC_DCHECK_RUN_ON(&worker_queue_);
   bitrate_allocator_->OnNetworkChanged(target_bitrate_bps, fraction_loss,
                                        rtt_ms);
 
diff --git a/webrtc/call/call_perf_tests.cc b/webrtc/call/call_perf_tests.cc
index 12cafd9..81fbdb7 100644
--- a/webrtc/call/call_perf_tests.cc
+++ b/webrtc/call/call_perf_tests.cc
@@ -672,7 +672,7 @@
       encoder_config->streams[0].target_bitrate_bps =
           encoder_config->streams[0].max_bitrate_bps = 2000000;
 
-      encoder_config_ = *encoder_config;
+      encoder_config_ = encoder_config->Copy();
     }
 
     void OnVideoStreamsCreated(
@@ -686,7 +686,7 @@
           << "Timed out before receiving an initial high bitrate.";
       encoder_config_.streams[0].width *= 2;
       encoder_config_.streams[0].height *= 2;
-      send_stream_->ReconfigureVideoEncoder(encoder_config_);
+      send_stream_->ReconfigureVideoEncoder(encoder_config_.Copy());
       EXPECT_TRUE(Wait())
           << "Timed out while waiting for a couple of high bitrate estimates "
              "after reconfiguring the send stream.";
diff --git a/webrtc/config.h b/webrtc/config.h
index 1550a9f..d932eda 100644
--- a/webrtc/config.h
+++ b/webrtc/config.h
@@ -125,12 +125,20 @@
 };
 
 struct VideoEncoderConfig {
+ public:
   enum class ContentType {
     kRealtimeVideo,
     kScreen,
   };
 
+  VideoEncoderConfig& operator=(VideoEncoderConfig&&) = default;
+  VideoEncoderConfig& operator=(const VideoEncoderConfig&) = delete;
+
+  // Mostly used by tests.  Avoid creating copies if you can.
+  VideoEncoderConfig Copy() const { return VideoEncoderConfig(*this); }
+
   VideoEncoderConfig();
+  VideoEncoderConfig(VideoEncoderConfig&&) = default;
   ~VideoEncoderConfig();
   std::string ToString() const;
 
@@ -145,6 +153,11 @@
   // unless the estimated bandwidth indicates that the link can handle it.
   int min_transmit_bitrate_bps;
   bool expect_encode_from_texture;
+
+ private:
+  // Access to the copy constructor is private to force use of the Copy()
+  // method for those exceptional cases where we do use it.
+  VideoEncoderConfig(const VideoEncoderConfig&) = default;
 };
 
 struct VideoDecoderH264Settings {
diff --git a/webrtc/media/engine/fakewebrtccall.cc b/webrtc/media/engine/fakewebrtccall.cc
index d0655cf..cb904df 100644
--- a/webrtc/media/engine/fakewebrtccall.cc
+++ b/webrtc/media/engine/fakewebrtccall.cc
@@ -98,21 +98,22 @@
 }
 
 FakeVideoSendStream::FakeVideoSendStream(
-    const webrtc::VideoSendStream::Config& config,
-    const webrtc::VideoEncoderConfig& encoder_config)
+    webrtc::VideoSendStream::Config config,
+    webrtc::VideoEncoderConfig encoder_config)
     : sending_(false),
-      config_(config),
+      config_(std::move(config)),
       codec_settings_set_(false),
       num_swapped_frames_(0) {
   RTC_DCHECK(config.encoder_settings.encoder != NULL);
-  ReconfigureVideoEncoder(encoder_config);
+  ReconfigureVideoEncoder(std::move(encoder_config));
 }
 
-webrtc::VideoSendStream::Config FakeVideoSendStream::GetConfig() const {
+const webrtc::VideoSendStream::Config& FakeVideoSendStream::GetConfig() const {
   return config_;
 }
 
-webrtc::VideoEncoderConfig FakeVideoSendStream::GetEncoderConfig() const {
+const webrtc::VideoEncoderConfig& FakeVideoSendStream::GetEncoderConfig()
+    const {
   return encoder_config_;
 }
 
@@ -177,8 +178,7 @@
 }
 
 void FakeVideoSendStream::ReconfigureVideoEncoder(
-    const webrtc::VideoEncoderConfig& config) {
-  encoder_config_ = config;
+    webrtc::VideoEncoderConfig config) {
   if (config.encoder_specific_settings != NULL) {
     if (config_.encoder_settings.payload_name == "VP8") {
       vpx_settings_.vp8 = *reinterpret_cast<const webrtc::VideoCodecVP8*>(
@@ -199,6 +199,7 @@
                     << config_.encoder_settings.payload_name;
     }
   }
+  encoder_config_ = std::move(config);
   codec_settings_set_ = config.encoder_specific_settings != NULL;
   ++num_encoder_reconfigurations_;
 }
@@ -359,10 +360,10 @@
 }
 
 webrtc::VideoSendStream* FakeCall::CreateVideoSendStream(
-    const webrtc::VideoSendStream::Config& config,
-    const webrtc::VideoEncoderConfig& encoder_config) {
+    webrtc::VideoSendStream::Config config,
+    webrtc::VideoEncoderConfig encoder_config) {
   FakeVideoSendStream* fake_stream =
-      new FakeVideoSendStream(config, encoder_config);
+      new FakeVideoSendStream(std::move(config), std::move(encoder_config));
   video_send_streams_.push_back(fake_stream);
   ++num_created_send_streams_;
   return fake_stream;
diff --git a/webrtc/media/engine/fakewebrtccall.h b/webrtc/media/engine/fakewebrtccall.h
index f723d7e..d3af222 100644
--- a/webrtc/media/engine/fakewebrtccall.h
+++ b/webrtc/media/engine/fakewebrtccall.h
@@ -102,10 +102,10 @@
 class FakeVideoSendStream final : public webrtc::VideoSendStream,
                                   public webrtc::VideoCaptureInput {
  public:
-  FakeVideoSendStream(const webrtc::VideoSendStream::Config& config,
-                      const webrtc::VideoEncoderConfig& encoder_config);
-  webrtc::VideoSendStream::Config GetConfig() const;
-  webrtc::VideoEncoderConfig GetEncoderConfig() const;
+  FakeVideoSendStream(webrtc::VideoSendStream::Config config,
+                      webrtc::VideoEncoderConfig encoder_config);
+  const webrtc::VideoSendStream::Config& GetConfig() const;
+  const webrtc::VideoEncoderConfig& GetEncoderConfig() const;
   std::vector<webrtc::VideoStream> GetVideoStreams();
 
   bool IsSending() const;
@@ -128,8 +128,7 @@
   void Start() override;
   void Stop() override;
   webrtc::VideoSendStream::Stats GetStats() override;
-  void ReconfigureVideoEncoder(
-      const webrtc::VideoEncoderConfig& config) override;
+  void ReconfigureVideoEncoder(webrtc::VideoEncoderConfig config) override;
   webrtc::VideoCaptureInput* Input() override;
 
   bool sending_;
@@ -208,8 +207,8 @@
       webrtc::AudioReceiveStream* receive_stream) override;
 
   webrtc::VideoSendStream* CreateVideoSendStream(
-      const webrtc::VideoSendStream::Config& config,
-      const webrtc::VideoEncoderConfig& encoder_config) override;
+      webrtc::VideoSendStream::Config config,
+      webrtc::VideoEncoderConfig encoder_config) override;
   void DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) override;
 
   webrtc::VideoReceiveStream* CreateVideoReceiveStream(
diff --git a/webrtc/media/engine/webrtcvideoengine2.cc b/webrtc/media/engine/webrtcvideoengine2.cc
index f5d0b4b..2dec8f4 100644
--- a/webrtc/media/engine/webrtcvideoengine2.cc
+++ b/webrtc/media/engine/webrtcvideoengine2.cc
@@ -1144,8 +1144,8 @@
   webrtc::VideoSendStream::Config config(this);
   config.suspend_below_min_bitrate = video_config_.suspend_below_min_bitrate;
   WebRtcVideoSendStream* stream = new WebRtcVideoSendStream(
-      call_, sp, config, default_send_options_, external_encoder_factory_,
-      video_config_.enable_cpu_overuse_detection,
+      call_, sp, std::move(config), default_send_options_,
+      external_encoder_factory_, video_config_.enable_cpu_overuse_detection,
       bitrate_config_.max_bitrate_bps, send_codec_, send_rtp_extensions_,
       send_params_);
 
@@ -1534,11 +1534,11 @@
 
 WebRtcVideoChannel2::WebRtcVideoSendStream::VideoSendStreamParameters::
     VideoSendStreamParameters(
-        const webrtc::VideoSendStream::Config& config,
+        webrtc::VideoSendStream::Config config,
         const VideoOptions& options,
         int max_bitrate_bps,
         const rtc::Optional<VideoCodecSettings>& codec_settings)
-    : config(config),
+    : config(std::move(config)),
       options(options),
       max_bitrate_bps(max_bitrate_bps),
       codec_settings(codec_settings) {}
@@ -1561,7 +1561,7 @@
 WebRtcVideoChannel2::WebRtcVideoSendStream::WebRtcVideoSendStream(
     webrtc::Call* call,
     const StreamParams& sp,
-    const webrtc::VideoSendStream::Config& config,
+    webrtc::VideoSendStream::Config config,
     const VideoOptions& options,
     WebRtcVideoEncoderFactory* external_encoder_factory,
     bool enable_cpu_overuse_detection,
@@ -1582,7 +1582,7 @@
       source_(nullptr),
       external_encoder_factory_(external_encoder_factory),
       stream_(nullptr),
-      parameters_(config, options, max_bitrate_bps, codec_settings),
+      parameters_(std::move(config), options, max_bitrate_bps, codec_settings),
       rtp_parameters_(CreateRtpParametersWithOneEncoding()),
       pending_encoder_reconfiguration_(false),
       allocated_encoder_(nullptr, webrtc::kVideoCodecUnknown, false),
@@ -2052,11 +2052,11 @@
   encoder_config.encoder_specific_settings = ConfigureVideoEncoderSettings(
       codec_settings.codec);
 
-  stream_->ReconfigureVideoEncoder(encoder_config);
+  stream_->ReconfigureVideoEncoder(encoder_config.Copy());
 
   encoder_config.encoder_specific_settings = NULL;
 
-  parameters_.encoder_config = encoder_config;
+  parameters_.encoder_config = std::move(encoder_config);
 }
 
 void WebRtcVideoChannel2::WebRtcVideoSendStream::SetSend(bool send) {
@@ -2249,13 +2249,14 @@
   parameters_.encoder_config.encoder_specific_settings =
       ConfigureVideoEncoderSettings(parameters_.codec_settings->codec);
 
-  webrtc::VideoSendStream::Config config = parameters_.config;
+  webrtc::VideoSendStream::Config config = parameters_.config.Copy();
   if (!config.rtp.rtx.ssrcs.empty() && config.rtp.rtx.payload_type == -1) {
     LOG(LS_WARNING) << "RTX SSRCs configured but there's no configured RTX "
                        "payload type the set codec. Ignoring RTX.";
     config.rtp.rtx.ssrcs.clear();
   }
-  stream_ = call_->CreateVideoSendStream(config, parameters_.encoder_config);
+  stream_ = call_->CreateVideoSendStream(std::move(config),
+                                         parameters_.encoder_config.Copy());
 
   parameters_.encoder_config.encoder_specific_settings = NULL;
   pending_encoder_reconfiguration_ = false;
diff --git a/webrtc/media/engine/webrtcvideoengine2.h b/webrtc/media/engine/webrtcvideoengine2.h
index f99c64d..4f8fcd8 100644
--- a/webrtc/media/engine/webrtcvideoengine2.h
+++ b/webrtc/media/engine/webrtcvideoengine2.h
@@ -248,7 +248,7 @@
     WebRtcVideoSendStream(
         webrtc::Call* call,
         const StreamParams& sp,
-        const webrtc::VideoSendStream::Config& config,
+        webrtc::VideoSendStream::Config config,
         const VideoOptions& options,
         WebRtcVideoEncoderFactory* external_encoder_factory,
         bool enable_cpu_overuse_detection,
@@ -284,7 +284,7 @@
     // similar parameters depending on which options changed etc.
     struct VideoSendStreamParameters {
       VideoSendStreamParameters(
-          const webrtc::VideoSendStream::Config& config,
+          webrtc::VideoSendStream::Config config,
           const VideoOptions& options,
           int max_bitrate_bps,
           const rtc::Optional<VideoCodecSettings>& codec_settings);
diff --git a/webrtc/media/engine/webrtcvideoengine2_unittest.cc b/webrtc/media/engine/webrtcvideoengine2_unittest.cc
index b0a7513..26b13ed 100644
--- a/webrtc/media/engine/webrtcvideoengine2_unittest.cc
+++ b/webrtc/media/engine/webrtcvideoengine2_unittest.cc
@@ -1153,7 +1153,8 @@
     EXPECT_TRUE(streams.size() > 0);
     FakeVideoSendStream* stream = streams[streams.size() - 1];
 
-    webrtc::VideoEncoderConfig encoder_config = stream->GetEncoderConfig();
+    webrtc::VideoEncoderConfig encoder_config =
+        stream->GetEncoderConfig().Copy();
     EXPECT_EQ(1, encoder_config.streams.size());
     return encoder_config.streams[0].max_bitrate_bps;
   }
@@ -1645,7 +1646,8 @@
   EXPECT_EQ(1, send_stream->GetNumberOfSwappedFrames());
 
   // Verify non-screencast settings.
-  webrtc::VideoEncoderConfig encoder_config = send_stream->GetEncoderConfig();
+  webrtc::VideoEncoderConfig encoder_config =
+      send_stream->GetEncoderConfig().Copy();
   EXPECT_EQ(webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo,
             encoder_config.content_type);
   EXPECT_EQ(codec.width, encoder_config.streams.front().width);
@@ -1666,7 +1668,7 @@
   EXPECT_EQ(3, send_stream->GetNumberOfSwappedFrames());
 
   // Verify screencast settings.
-  encoder_config = send_stream->GetEncoderConfig();
+  encoder_config = send_stream->GetEncoderConfig().Copy();
   EXPECT_EQ(webrtc::VideoEncoderConfig::ContentType::kScreen,
             encoder_config.content_type);
   EXPECT_EQ(kScreenshareMinBitrateKbps * 1000,
@@ -1693,7 +1695,7 @@
 
   ASSERT_EQ(1, fake_call_->GetNumCreatedSendStreams());
   FakeVideoSendStream* stream = fake_call_->GetVideoSendStreams().front();
-  webrtc::VideoEncoderConfig encoder_config = stream->GetEncoderConfig();
+  webrtc::VideoEncoderConfig encoder_config = stream->GetEncoderConfig().Copy();
   EXPECT_EQ(webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo,
             encoder_config.content_type);
 
@@ -1710,7 +1712,7 @@
   ASSERT_EQ(stream, fake_call_->GetVideoSendStreams().front());
   EXPECT_EQ(2, stream->GetNumberOfSwappedFrames());
 
-  encoder_config = stream->GetEncoderConfig();
+  encoder_config = stream->GetEncoderConfig().Copy();
   EXPECT_EQ(webrtc::VideoEncoderConfig::ContentType::kScreen,
             encoder_config.content_type);
 
@@ -1723,7 +1725,7 @@
   ASSERT_EQ(stream, fake_call_->GetVideoSendStreams().front());
   EXPECT_EQ(3, stream->GetNumberOfSwappedFrames());
 
-  encoder_config = stream->GetEncoderConfig();
+  encoder_config = stream->GetEncoderConfig().Copy();
   EXPECT_EQ(webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo,
             encoder_config.content_type);
 
@@ -1752,10 +1754,11 @@
   ASSERT_EQ(1u, fake_call_->GetVideoSendStreams().size());
   FakeVideoSendStream* send_stream = fake_call_->GetVideoSendStreams().front();
 
-  webrtc::VideoEncoderConfig encoder_config = send_stream->GetEncoderConfig();
+  webrtc::VideoEncoderConfig encoder_config =
+      send_stream->GetEncoderConfig().Copy();
 
   // Verify screencast settings.
-  encoder_config = send_stream->GetEncoderConfig();
+  encoder_config = send_stream->GetEncoderConfig().Copy();
   EXPECT_EQ(webrtc::VideoEncoderConfig::ContentType::kScreen,
             encoder_config.content_type);
   ASSERT_EQ(1u, encoder_config.streams.size());
@@ -2310,7 +2313,7 @@
   const std::vector<uint32_t> rtx_ssrcs = MAKE_VECTOR(kRtxSsrcs1);
   FakeVideoSendStream* stream = AddSendStream(
       cricket::CreateSimWithRtxStreamParams("cname", ssrcs, rtx_ssrcs));
-  webrtc::VideoSendStream::Config config = stream->GetConfig();
+  webrtc::VideoSendStream::Config config = stream->GetConfig().Copy();
 
   // Make sure NACK and FEC are enabled on the correct payload types.
   EXPECT_EQ(1000, config.rtp.nack.rtp_history_ms);
@@ -2329,7 +2332,7 @@
   ASSERT_TRUE(channel_->SetSendParameters(parameters));
 
   FakeVideoSendStream* stream = AddSendStream();
-  webrtc::VideoSendStream::Config config = stream->GetConfig();
+  webrtc::VideoSendStream::Config config = stream->GetConfig().Copy();
 
   EXPECT_EQ(-1, config.rtp.fec.ulpfec_payload_type);
   EXPECT_EQ(-1, config.rtp.fec.red_payload_type);
@@ -2368,7 +2371,7 @@
   ASSERT_TRUE(channel_->SetSendParameters(parameters));
 
   FakeVideoSendStream* stream = AddSendStream();
-  webrtc::VideoSendStream::Config config = stream->GetConfig();
+  webrtc::VideoSendStream::Config config = stream->GetConfig().Copy();
 
   EXPECT_EQ(kUlpfecCodec.id, config.rtp.fec.ulpfec_payload_type);
 
@@ -2376,7 +2379,7 @@
   ASSERT_TRUE(channel_->SetSendParameters(parameters));
   stream = fake_call_->GetVideoSendStreams()[0];
   ASSERT_TRUE(stream != NULL);
-  config = stream->GetConfig();
+  config = stream->GetConfig().Copy();
   EXPECT_EQ(-1, config.rtp.fec.ulpfec_payload_type)
       << "SetSendCodec without FEC should disable current FEC.";
 }
diff --git a/webrtc/test/call_test.cc b/webrtc/test/call_test.cc
index 590479f..a766f79 100644
--- a/webrtc/test/call_test.cc
+++ b/webrtc/test/call_test.cc
@@ -267,7 +267,7 @@
   RTC_DCHECK(audio_receive_streams_.empty());
 
   video_send_stream_ = sender_call_->CreateVideoSendStream(
-      video_send_config_, video_encoder_config_);
+      video_send_config_.Copy(), video_encoder_config_.Copy());
   for (size_t i = 0; i < video_receive_configs_.size(); ++i) {
     video_receive_streams_.push_back(receiver_call_->CreateVideoReceiveStream(
         video_receive_configs_[i].Copy()));
diff --git a/webrtc/test/fake_encoder.cc b/webrtc/test/fake_encoder.cc
index 0a3f34e..29f844c 100644
--- a/webrtc/test/fake_encoder.cc
+++ b/webrtc/test/fake_encoder.cc
@@ -12,6 +12,7 @@
 
 #include "testing/gtest/include/gtest/gtest.h"
 
+#include "webrtc/base/checks.h"
 #include "webrtc/modules/video_coding/include/video_codec_interface.h"
 #include "webrtc/system_wrappers/include/sleep.h"
 
@@ -33,7 +34,7 @@
 FakeEncoder::~FakeEncoder() {}
 
 void FakeEncoder::SetMaxBitrate(int max_kbps) {
-  assert(max_kbps >= -1);  // max_kbps == -1 disables it.
+  RTC_DCHECK_GE(max_kbps, -1);  // max_kbps == -1 disables it.
   max_target_bitrate_kbps_ = max_kbps;
 }
 
@@ -48,7 +49,7 @@
 int32_t FakeEncoder::Encode(const VideoFrame& input_image,
                             const CodecSpecificInfo* codec_specific_info,
                             const std::vector<FrameType>* frame_types) {
-  assert(config_.maxFramerate > 0);
+  RTC_DCHECK_GT(config_.maxFramerate, 0);
   int64_t time_since_last_encode_ms = 1000 / config_.maxFramerate;
   int64_t time_now_ms = clock_->TimeInMilliseconds();
   const bool first_encode = last_encode_time_ms_ == 0;
@@ -75,7 +76,7 @@
     bits_available = max_bits;
   last_encode_time_ms_ = time_now_ms;
 
-  assert(config_.numberOfSimulcastStreams > 0);
+  RTC_DCHECK_GT(config_.numberOfSimulcastStreams, 0);
   for (unsigned char i = 0; i < config_.numberOfSimulcastStreams; ++i) {
     CodecSpecificInfo specifics;
     memset(&specifics, 0, sizeof(specifics));
@@ -97,6 +98,9 @@
     if (stream_bytes > sizeof(encoded_buffer_))
       stream_bytes = sizeof(encoded_buffer_);
 
+    // Always encode something on the first frame.
+    if (min_stream_bits > bits_available && i > 0)
+      continue;
     EncodedImage encoded(
         encoded_buffer_, stream_bytes, sizeof(encoded_buffer_));
     encoded._timeStamp = input_image.timestamp();
@@ -104,10 +108,7 @@
     encoded._frameType = (*frame_types)[i];
     encoded._encodedWidth = config_.simulcastStream[i].width;
     encoded._encodedHeight = config_.simulcastStream[i].height;
-    // Always encode something on the first frame.
-    if (min_stream_bits > bits_available && i > 0)
-      continue;
-    assert(callback_ != NULL);
+    RTC_DCHECK(callback_ != NULL);
     specifics.codec_name = ImplementationName();
     if (callback_->Encoded(encoded, &specifics, NULL) != 0)
       return -1;
diff --git a/webrtc/video/BUILD.gn b/webrtc/video/BUILD.gn
index 84cab26..76c2ea6 100644
--- a/webrtc/video/BUILD.gn
+++ b/webrtc/video/BUILD.gn
@@ -35,8 +35,6 @@
     "stats_counter.h",
     "stream_synchronization.cc",
     "stream_synchronization.h",
-    "video_capture_input.cc",
-    "video_capture_input.h",
     "video_decoder.cc",
     "video_encoder.cc",
     "video_receive_stream.cc",
@@ -64,6 +62,7 @@
     "..:rtc_event_log",
     "..:webrtc_common",
     "../base:rtc_base_approved",
+    "../base:rtc_task_queue",
     "../common_video",
     "../modules/bitrate_controller",
     "../modules/congestion_controller",
@@ -94,10 +93,10 @@
       "send_statistics_proxy_unittest.cc",
       "stats_counter_unittest.cc",
       "stream_synchronization_unittest.cc",
-      "video_capture_input_unittest.cc",
       "video_decoder_unittest.cc",
       "video_encoder_unittest.cc",
       "video_send_stream_tests.cc",
+      "vie_encoder_unittest.cc",
       "vie_remb_unittest.cc",
     ]
     configs += [ "..:common_config" ]
diff --git a/webrtc/video/encoder_state_feedback_unittest.cc b/webrtc/video/encoder_state_feedback_unittest.cc
index 5351e15..cf92813 100644
--- a/webrtc/video/encoder_state_feedback_unittest.cc
+++ b/webrtc/video/encoder_state_feedback_unittest.cc
@@ -21,9 +21,14 @@
 
 class MockVieEncoder : public ViEEncoder {
  public:
-  explicit MockVieEncoder(ProcessThread* process_thread)
-      : ViEEncoder(1, process_thread, nullptr, nullptr, nullptr) {}
-  ~MockVieEncoder() {}
+  MockVieEncoder()
+      : ViEEncoder(1,
+                   nullptr,
+                   VideoSendStream::Config::EncoderSettings("fake", 0, nullptr),
+                   nullptr,
+                   nullptr,
+                   nullptr) {}
+  ~MockVieEncoder() { Stop(); }
 
   MOCK_METHOD1(OnReceivedIntraFrameRequest, void(size_t));
   MOCK_METHOD1(OnReceivedSLI, void(uint8_t picture_id));
@@ -33,8 +38,7 @@
 class VieKeyRequestTest : public ::testing::Test {
  public:
   VieKeyRequestTest()
-      : encoder_(&process_thread_),
-        simulated_clock_(123456789),
+      : simulated_clock_(123456789),
         encoder_state_feedback_(
             &simulated_clock_,
             std::vector<uint32_t>(1, VieKeyRequestTest::kSsrc),
@@ -42,7 +46,6 @@
 
  protected:
   const uint32_t kSsrc = 1234;
-  NiceMock<MockProcessThread> process_thread_;
   MockVieEncoder encoder_;
   SimulatedClock simulated_clock_;
   EncoderStateFeedback encoder_state_feedback_;
diff --git a/webrtc/video/end_to_end_tests.cc b/webrtc/video/end_to_end_tests.cc
index 60063f4..26ba26a 100644
--- a/webrtc/video/end_to_end_tests.cc
+++ b/webrtc/video/end_to_end_tests.cc
@@ -1284,8 +1284,8 @@
 
       UpdateSendConfig(i, &send_config, &encoder_config, &frame_generators[i]);
 
-      send_streams[i] =
-          sender_call->CreateVideoSendStream(send_config, encoder_config);
+      send_streams[i] = sender_call->CreateVideoSendStream(
+          send_config.Copy(), encoder_config.Copy());
       send_streams[i]->Start();
 
       VideoReceiveStream::Config receive_config(receiver_transport.get());
@@ -2489,7 +2489,7 @@
         }
       }
 
-      video_encoder_config_all_streams_ = *encoder_config;
+      video_encoder_config_all_streams_ = encoder_config->Copy();
       if (send_single_ssrc_first_)
         encoder_config->streams.resize(1);
     }
@@ -2508,7 +2508,7 @@
       if (send_single_ssrc_first_) {
         // Set full simulcast and continue with the rest of the SSRCs.
         send_stream_->ReconfigureVideoEncoder(
-            video_encoder_config_all_streams_);
+            std::move(video_encoder_config_all_streams_));
         EXPECT_TRUE(Wait()) << "Timed out while waiting on additional SSRCs.";
       }
     }
@@ -3203,7 +3203,7 @@
 
   // Use the same total bitrates when sending a single stream to avoid lowering
   // the bitrate estimate and requiring a subsequent rampup.
-  VideoEncoderConfig one_stream = video_encoder_config_;
+  VideoEncoderConfig one_stream = video_encoder_config_.Copy();
   one_stream.streams.resize(1);
   for (size_t i = 1; i < video_encoder_config_.streams.size(); ++i) {
     one_stream.streams.front().min_bitrate_bps +=
@@ -3230,8 +3230,8 @@
     sender_call_->DestroyVideoSendStream(video_send_stream_);
 
     // Re-create VideoSendStream with only one stream.
-    video_send_stream_ =
-        sender_call_->CreateVideoSendStream(video_send_config_, one_stream);
+    video_send_stream_ = sender_call_->CreateVideoSendStream(
+        video_send_config_.Copy(), one_stream.Copy());
     video_send_stream_->Start();
     if (provoke_rtcpsr_before_rtp) {
       // Rapid Resync Request forces sending RTCP Sender Report back.
@@ -3249,18 +3249,18 @@
     EXPECT_TRUE(observer.Wait()) << "Timed out waiting for single RTP packet.";
 
     // Reconfigure back to use all streams.
-    video_send_stream_->ReconfigureVideoEncoder(video_encoder_config_);
+    video_send_stream_->ReconfigureVideoEncoder(video_encoder_config_.Copy());
     observer.ResetExpectedSsrcs(kNumSsrcs);
     EXPECT_TRUE(observer.Wait())
         << "Timed out waiting for all SSRCs to send packets.";
 
     // Reconfigure down to one stream.
-    video_send_stream_->ReconfigureVideoEncoder(one_stream);
+    video_send_stream_->ReconfigureVideoEncoder(one_stream.Copy());
     observer.ResetExpectedSsrcs(1);
     EXPECT_TRUE(observer.Wait()) << "Timed out waiting for single RTP packet.";
 
     // Reconfigure back to use all streams.
-    video_send_stream_->ReconfigureVideoEncoder(video_encoder_config_);
+    video_send_stream_->ReconfigureVideoEncoder(video_encoder_config_.Copy());
     observer.ResetExpectedSsrcs(kNumSsrcs);
     EXPECT_TRUE(observer.Wait())
         << "Timed out waiting for all SSRCs to send packets.";
diff --git a/webrtc/video/overuse_frame_detector.cc b/webrtc/video/overuse_frame_detector.cc
index 8498008..cc4c000 100644
--- a/webrtc/video/overuse_frame_detector.cc
+++ b/webrtc/video/overuse_frame_detector.cc
@@ -193,7 +193,6 @@
       in_quick_rampup_(false),
       current_rampup_delay_ms_(kStandardRampUpDelayMs),
       usage_(new SendProcessingUsage(options)) {
-  RTC_DCHECK(metrics_observer);
   processing_thread_.DetachFromThread();
 }
 
diff --git a/webrtc/video/send_statistics_proxy.cc b/webrtc/video/send_statistics_proxy.cc
index 72417f2..82ee20a 100644
--- a/webrtc/video/send_statistics_proxy.cc
+++ b/webrtc/video/send_statistics_proxy.cc
@@ -75,7 +75,8 @@
     const VideoSendStream::Config& config,
     VideoEncoderConfig::ContentType content_type)
     : clock_(clock),
-      config_(config),
+      payload_name_(config.encoder_settings.payload_name),
+      rtp_config_(config.rtp),
       content_type_(content_type),
       start_ms_(clock->TimeInMilliseconds()),
       last_sent_frame_timestamp_(0),
@@ -86,14 +87,14 @@
 
 SendStatisticsProxy::~SendStatisticsProxy() {
   rtc::CritScope lock(&crit_);
-  uma_container_->UpdateHistograms(config_, stats_);
+  uma_container_->UpdateHistograms(rtp_config_, stats_);
 
   int64_t elapsed_sec = (clock_->TimeInMilliseconds() - start_ms_) / 1000;
   RTC_LOGGED_HISTOGRAM_COUNTS_100000("WebRTC.Video.SendStreamLifetimeInSeconds",
                                      elapsed_sec);
 
   if (elapsed_sec >= metrics::kMinRunTimeInSeconds)
-    UpdateCodecTypeHistogram(config_.encoder_settings.payload_name);
+    UpdateCodecTypeHistogram(payload_name_);
 }
 
 SendStatisticsProxy::UmaSamplesContainer::UmaSamplesContainer(
@@ -112,12 +113,11 @@
 
 SendStatisticsProxy::UmaSamplesContainer::~UmaSamplesContainer() {}
 
-void AccumulateRtpStats(const VideoSendStream::Stats& stats,
-                        const VideoSendStream::Config& config,
+void AccumulateRtxStats(const VideoSendStream::Stats& stats,
+                        const std::vector<uint32_t>& rtx_ssrcs,
                         StreamDataCounters* total_rtp_stats,
                         StreamDataCounters* rtx_stats) {
   for (auto it : stats.substreams) {
-    const std::vector<uint32_t> rtx_ssrcs = config.rtp.rtx.ssrcs;
     if (std::find(rtx_ssrcs.begin(), rtx_ssrcs.end(), it.first) !=
         rtx_ssrcs.end()) {
       rtx_stats->Add(it.second.rtp_stats);
@@ -128,7 +128,7 @@
 }
 
 void SendStatisticsProxy::UmaSamplesContainer::UpdateHistograms(
-    const VideoSendStream::Config& config,
+    const VideoSendStream::Config::Rtp& rtp_config,
     const VideoSendStream::Stats& current_stats) {
   RTC_DCHECK(uma_prefix_ == kRealtimePrefix || uma_prefix_ == kScreenPrefix);
   const int kIndex = uma_prefix_ == kScreenPrefix ? 1 : 0;
@@ -262,7 +262,7 @@
       // UmaSamplesContainer, we save the initial state of the counters, so that
       // we can calculate the delta here and aggregate over all ssrcs.
       RtcpPacketTypeCounter counters;
-      for (uint32_t ssrc : config.rtp.ssrcs) {
+      for (uint32_t ssrc : rtp_config.ssrcs) {
         auto kv = current_stats.substreams.find(ssrc);
         if (kv == current_stats.substreams.end())
           continue;
@@ -298,10 +298,11 @@
     if (elapsed_sec >= metrics::kMinRunTimeInSeconds) {
       StreamDataCounters rtp;
       StreamDataCounters rtx;
-      AccumulateRtpStats(current_stats, config, &rtp, &rtx);
+      AccumulateRtxStats(current_stats, rtp_config.rtx.ssrcs, &rtp, &rtx);
       StreamDataCounters start_rtp;
       StreamDataCounters start_rtx;
-      AccumulateRtpStats(start_stats_, config, &start_rtp, &start_rtx);
+      AccumulateRtxStats(start_stats_, rtp_config.rtx.ssrcs, &start_rtp,
+                         &start_rtx);
       rtp.Subtract(start_rtp);
       rtx.Subtract(start_rtx);
       StreamDataCounters rtp_rtx = rtp;
@@ -322,13 +323,13 @@
           kIndex, uma_prefix_ + "RetransmittedBitrateSentInKbps",
           static_cast<int>(rtp_rtx.retransmitted.TotalBytes() * 8 /
                            elapsed_sec / 1000));
-      if (!config.rtp.rtx.ssrcs.empty()) {
+      if (!rtp_config.rtx.ssrcs.empty()) {
         RTC_LOGGED_HISTOGRAMS_COUNTS_10000(
             kIndex, uma_prefix_ + "RtxBitrateSentInKbps",
             static_cast<int>(rtx.transmitted.TotalBytes() * 8 / elapsed_sec /
                              1000));
       }
-      if (config.rtp.fec.red_payload_type != -1) {
+      if (rtp_config.fec.red_payload_type != -1) {
         RTC_LOGGED_HISTOGRAMS_COUNTS_10000(
             kIndex, uma_prefix_ + "FecBitrateSentInKbps",
             static_cast<int>(rtp_rtx.fec.TotalBytes() * 8 / elapsed_sec /
@@ -342,7 +343,7 @@
     VideoEncoderConfig::ContentType content_type) {
   rtc::CritScope lock(&crit_);
   if (content_type_ != content_type) {
-    uma_container_->UpdateHistograms(config_, stats_);
+    uma_container_->UpdateHistograms(rtp_config_, stats_);
     uma_container_.reset(
         new UmaSamplesContainer(GetUmaPrefix(content_type), stats_, clock_));
     content_type_ = content_type;
@@ -400,10 +401,10 @@
     return &it->second;
 
   bool is_rtx = false;
-  if (std::find(config_.rtp.ssrcs.begin(), config_.rtp.ssrcs.end(), ssrc) ==
-      config_.rtp.ssrcs.end()) {
-    if (std::find(config_.rtp.rtx.ssrcs.begin(), config_.rtp.rtx.ssrcs.end(),
-                  ssrc) == config_.rtp.rtx.ssrcs.end()) {
+  if (std::find(rtp_config_.ssrcs.begin(), rtp_config_.ssrcs.end(), ssrc) ==
+      rtp_config_.ssrcs.end()) {
+    if (std::find(rtp_config_.rtx.ssrcs.begin(), rtp_config_.rtx.ssrcs.end(),
+                  ssrc) == rtp_config_.rtx.ssrcs.end()) {
       return nullptr;
     }
     is_rtx = true;
@@ -450,12 +451,12 @@
     }
   }
 
-  if (simulcast_idx >= config_.rtp.ssrcs.size()) {
+  if (simulcast_idx >= rtp_config_.ssrcs.size()) {
     LOG(LS_ERROR) << "Encoded image outside simulcast range (" << simulcast_idx
-                  << " >= " << config_.rtp.ssrcs.size() << ").";
+                  << " >= " << rtp_config_.ssrcs.size() << ").";
     return;
   }
-  uint32_t ssrc = config_.rtp.ssrcs[simulcast_idx];
+  uint32_t ssrc = rtp_config_.ssrcs[simulcast_idx];
 
   VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc);
   if (!stats)
@@ -492,7 +493,7 @@
 
   if (encoded_image.qp_ != -1 && codec_info) {
     if (codec_info->codecType == kVideoCodecVP8) {
-      int spatial_idx = (config_.rtp.ssrcs.size() == 1)
+      int spatial_idx = (rtp_config_.ssrcs.size() == 1)
                             ? -1
                             : static_cast<int>(simulcast_idx);
       uma_container_->qp_counters_[spatial_idx].vp8.Add(encoded_image.qp_);
diff --git a/webrtc/video/send_statistics_proxy.h b/webrtc/video/send_statistics_proxy.h
index 5d9dbf0..88a0e32 100644
--- a/webrtc/video/send_statistics_proxy.h
+++ b/webrtc/video/send_statistics_proxy.h
@@ -134,7 +134,8 @@
       EXCLUSIVE_LOCKS_REQUIRED(crit_);
 
   Clock* const clock_;
-  const VideoSendStream::Config config_;
+  const std::string payload_name_;
+  const VideoSendStream::Config::Rtp rtp_config_;
   rtc::CriticalSection crit_;
   VideoEncoderConfig::ContentType content_type_ GUARDED_BY(crit_);
   const int64_t start_ms_;
@@ -152,7 +153,7 @@
                         Clock* clock);
     ~UmaSamplesContainer();
 
-    void UpdateHistograms(const VideoSendStream::Config& config,
+    void UpdateHistograms(const VideoSendStream::Config::Rtp& rtp_config,
                           const VideoSendStream::Stats& current_stats);
 
     const std::string uma_prefix_;
diff --git a/webrtc/video/video_capture_input.cc b/webrtc/video/video_capture_input.cc
deleted file mode 100644
index 8f574e2..0000000
--- a/webrtc/video/video_capture_input.cc
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/video/video_capture_input.h"
-
-#include "webrtc/base/checks.h"
-#include "webrtc/base/logging.h"
-#include "webrtc/base/trace_event.h"
-#include "webrtc/modules/include/module_common_types.h"
-#include "webrtc/modules/video_capture/video_capture_factory.h"
-#include "webrtc/modules/video_processing/include/video_processing.h"
-#include "webrtc/video/overuse_frame_detector.h"
-#include "webrtc/video/send_statistics_proxy.h"
-#include "webrtc/video/vie_encoder.h"
-
-namespace webrtc {
-
-namespace internal {
-VideoCaptureInput::VideoCaptureInput(
-    rtc::Event* capture_event,
-    rtc::VideoSinkInterface<VideoFrame>* local_renderer,
-    SendStatisticsProxy* stats_proxy,
-    OveruseFrameDetector* overuse_detector)
-    : local_renderer_(local_renderer),
-      stats_proxy_(stats_proxy),
-      capture_event_(capture_event),
-      // TODO(danilchap): Pass clock from outside to ensure it is same clock
-      // rtcp module use to calculate offset since last frame captured
-      // to estimate rtp timestamp for SenderReport.
-      clock_(Clock::GetRealTimeClock()),
-      last_captured_timestamp_(0),
-      delta_ntp_internal_ms_(clock_->CurrentNtpInMilliseconds() -
-                             clock_->TimeInMilliseconds()),
-      overuse_detector_(overuse_detector) {}
-
-VideoCaptureInput::~VideoCaptureInput() {
-}
-
-void VideoCaptureInput::IncomingCapturedFrame(const VideoFrame& video_frame) {
-  // TODO(pbos): Remove local rendering, it should be handled by the client code
-  // if required.
-  if (local_renderer_)
-    local_renderer_->OnFrame(video_frame);
-
-  stats_proxy_->OnIncomingFrame(video_frame.width(), video_frame.height());
-
-  VideoFrame incoming_frame = video_frame;
-
-  // Local time in webrtc time base.
-  int64_t current_time = clock_->TimeInMilliseconds();
-  incoming_frame.set_render_time_ms(current_time);
-
-  // Capture time may come from clock with an offset and drift from clock_.
-  int64_t capture_ntp_time_ms;
-  if (video_frame.ntp_time_ms() != 0) {
-    capture_ntp_time_ms = video_frame.ntp_time_ms();
-  } else if (video_frame.render_time_ms() != 0) {
-    capture_ntp_time_ms = video_frame.render_time_ms() + delta_ntp_internal_ms_;
-  } else {
-    capture_ntp_time_ms = current_time + delta_ntp_internal_ms_;
-  }
-  incoming_frame.set_ntp_time_ms(capture_ntp_time_ms);
-
-  // Convert NTP time, in ms, to RTP timestamp.
-  const int kMsToRtpTimestamp = 90;
-  incoming_frame.set_timestamp(
-      kMsToRtpTimestamp * static_cast<uint32_t>(incoming_frame.ntp_time_ms()));
-
-  rtc::CritScope lock(&crit_);
-  if (incoming_frame.ntp_time_ms() <= last_captured_timestamp_) {
-    // We don't allow the same capture time for two frames, drop this one.
-    LOG(LS_WARNING) << "Same/old NTP timestamp ("
-                    << incoming_frame.ntp_time_ms()
-                    << " <= " << last_captured_timestamp_
-                    << ") for incoming frame. Dropping.";
-    return;
-  }
-
-  captured_frame_.reset(new VideoFrame);
-  captured_frame_->ShallowCopy(incoming_frame);
-  last_captured_timestamp_ = incoming_frame.ntp_time_ms();
-
-  overuse_detector_->FrameCaptured(*captured_frame_);
-
-  TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Video", video_frame.render_time_ms(),
-                           "render_time", video_frame.render_time_ms());
-
-  capture_event_->Set();
-}
-
-bool VideoCaptureInput::GetVideoFrame(VideoFrame* video_frame) {
-  rtc::CritScope lock(&crit_);
-  if (!captured_frame_)
-    return false;
-
-  *video_frame = *captured_frame_;
-  captured_frame_.reset();
-  return true;
-}
-
-}  // namespace internal
-}  // namespace webrtc
diff --git a/webrtc/video/video_capture_input.h b/webrtc/video/video_capture_input.h
deleted file mode 100644
index 5877f6c..0000000
--- a/webrtc/video/video_capture_input.h
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_VIDEO_VIDEO_CAPTURE_INPUT_H_
-#define WEBRTC_VIDEO_VIDEO_CAPTURE_INPUT_H_
-
-#include <memory>
-#include <vector>
-
-#include "webrtc/base/criticalsection.h"
-#include "webrtc/base/event.h"
-#include "webrtc/base/platform_thread.h"
-#include "webrtc/base/thread_annotations.h"
-#include "webrtc/common_types.h"
-#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/video_capture/video_capture.h"
-#include "webrtc/modules/video_coding/include/video_codec_interface.h"
-#include "webrtc/modules/video_coding/include/video_coding.h"
-#include "webrtc/modules/video_processing/include/video_processing.h"
-#include "webrtc/system_wrappers/include/clock.h"
-#include "webrtc/typedefs.h"
-#include "webrtc/video_send_stream.h"
-
-namespace webrtc {
-
-class Config;
-class OveruseFrameDetector;
-class SendStatisticsProxy;
-
-namespace internal {
-class VideoCaptureInput : public webrtc::VideoCaptureInput {
- public:
-  VideoCaptureInput(rtc::Event* capture_event,
-                    rtc::VideoSinkInterface<VideoFrame>* local_renderer,
-                    SendStatisticsProxy* send_stats_proxy,
-                    OveruseFrameDetector* overuse_detector);
-  ~VideoCaptureInput();
-
-  void IncomingCapturedFrame(const VideoFrame& video_frame) override;
-
-  bool GetVideoFrame(VideoFrame* frame);
-
- private:
-  rtc::CriticalSection crit_;
-
-  rtc::VideoSinkInterface<VideoFrame>* const local_renderer_;
-  SendStatisticsProxy* const stats_proxy_;
-  rtc::Event* const capture_event_;
-
-  std::unique_ptr<VideoFrame> captured_frame_ GUARDED_BY(crit_);
-  Clock* const clock_;
-  // Used to make sure incoming time stamp is increasing for every frame.
-  int64_t last_captured_timestamp_;
-  // Delta used for translating between NTP and internal timestamps.
-  const int64_t delta_ntp_internal_ms_;
-
-  OveruseFrameDetector* const overuse_detector_;
-};
-
-}  // namespace internal
-}  // namespace webrtc
-
-#endif  // WEBRTC_VIDEO_VIDEO_CAPTURE_INPUT_H_
diff --git a/webrtc/video/video_capture_input_unittest.cc b/webrtc/video/video_capture_input_unittest.cc
deleted file mode 100644
index 2da722b..0000000
--- a/webrtc/video/video_capture_input_unittest.cc
+++ /dev/null
@@ -1,255 +0,0 @@
-/*
- *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-#include "webrtc/video/video_capture_input.h"
-
-#include <memory>
-#include <vector>
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/base/event.h"
-#include "webrtc/base/refcount.h"
-#include "webrtc/test/fake_texture_frame.h"
-#include "webrtc/test/frame_utils.h"
-#include "webrtc/video/send_statistics_proxy.h"
-
-// If an output frame does not arrive in 500ms, the test will fail.
-#define FRAME_TIMEOUT_MS 500
-
-namespace webrtc {
-
-bool EqualFramesVector(const std::vector<std::unique_ptr<VideoFrame>>& frames1,
-                       const std::vector<std::unique_ptr<VideoFrame>>& frames2);
-std::unique_ptr<VideoFrame> CreateVideoFrame(uint8_t length);
-
-class VideoCaptureInputTest : public ::testing::Test {
- protected:
-  VideoCaptureInputTest()
-      : stats_proxy_(Clock::GetRealTimeClock(),
-                     webrtc::VideoSendStream::Config(nullptr),
-                     webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo),
-        capture_event_(false, false) {}
-
-  virtual void SetUp() {
-    overuse_detector_.reset(
-        new OveruseFrameDetector(Clock::GetRealTimeClock(), CpuOveruseOptions(),
-                                 nullptr, nullptr, &stats_proxy_));
-    input_.reset(new internal::VideoCaptureInput(
-        &capture_event_, nullptr, &stats_proxy_, overuse_detector_.get()));
-  }
-
-  void AddInputFrame(VideoFrame* frame) {
-    input_->IncomingCapturedFrame(*frame);
-  }
-
-  void WaitOutputFrame() {
-    EXPECT_TRUE(capture_event_.Wait(FRAME_TIMEOUT_MS));
-    VideoFrame frame;
-    EXPECT_TRUE(input_->GetVideoFrame(&frame));
-    ASSERT_TRUE(frame.video_frame_buffer());
-    if (!frame.video_frame_buffer()->native_handle()) {
-      output_frame_ybuffers_.push_back(frame.video_frame_buffer()->DataY());
-    }
-    output_frames_.push_back(
-        std::unique_ptr<VideoFrame>(new VideoFrame(frame)));
-  }
-
-  SendStatisticsProxy stats_proxy_;
-
-  rtc::Event capture_event_;
-
-  std::unique_ptr<OveruseFrameDetector> overuse_detector_;
-
-  // Used to send input capture frames to VideoCaptureInput.
-  std::unique_ptr<internal::VideoCaptureInput> input_;
-
-  // Input capture frames of VideoCaptureInput.
-  std::vector<std::unique_ptr<VideoFrame>> input_frames_;
-
-  // Output delivered frames of VideoCaptureInput.
-  std::vector<std::unique_ptr<VideoFrame>> output_frames_;
-
-  // The pointers of Y plane buffers of output frames. This is used to verify
-  // the frame are swapped and not copied.
-  std::vector<const uint8_t*> output_frame_ybuffers_;
-};
-
-TEST_F(VideoCaptureInputTest, DoesNotRetainHandleNorCopyBuffer) {
-  // Indicate an output frame has arrived.
-  rtc::Event frame_destroyed_event(false, false);
-  class TestBuffer : public webrtc::I420Buffer {
-   public:
-    explicit TestBuffer(rtc::Event* event) : I420Buffer(5, 5), event_(event) {}
-
-   private:
-    friend class rtc::RefCountedObject<TestBuffer>;
-    ~TestBuffer() override { event_->Set(); }
-    rtc::Event* const event_;
-  };
-
-  {
-    VideoFrame frame(
-        new rtc::RefCountedObject<TestBuffer>(&frame_destroyed_event), 1, 1,
-        kVideoRotation_0);
-
-    AddInputFrame(&frame);
-    WaitOutputFrame();
-
-    EXPECT_EQ(output_frames_[0]->video_frame_buffer().get(),
-              frame.video_frame_buffer().get());
-    output_frames_.clear();
-  }
-  EXPECT_TRUE(frame_destroyed_event.Wait(FRAME_TIMEOUT_MS));
-}
-
-TEST_F(VideoCaptureInputTest, TestNtpTimeStampSetIfRenderTimeSet) {
-  input_frames_.push_back(CreateVideoFrame(0));
-  input_frames_[0]->set_render_time_ms(5);
-  input_frames_[0]->set_ntp_time_ms(0);
-
-  AddInputFrame(input_frames_[0].get());
-  WaitOutputFrame();
-  EXPECT_GT(output_frames_[0]->ntp_time_ms(),
-            input_frames_[0]->render_time_ms());
-}
-
-TEST_F(VideoCaptureInputTest, TestRtpTimeStampSet) {
-  input_frames_.push_back(CreateVideoFrame(0));
-  input_frames_[0]->set_render_time_ms(0);
-  input_frames_[0]->set_ntp_time_ms(1);
-  input_frames_[0]->set_timestamp(0);
-
-  AddInputFrame(input_frames_[0].get());
-  WaitOutputFrame();
-  EXPECT_EQ(output_frames_[0]->timestamp(),
-            input_frames_[0]->ntp_time_ms() * 90);
-}
-
-TEST_F(VideoCaptureInputTest, DropsFramesWithSameOrOldNtpTimestamp) {
-  input_frames_.push_back(CreateVideoFrame(0));
-
-  input_frames_[0]->set_ntp_time_ms(17);
-  AddInputFrame(input_frames_[0].get());
-  WaitOutputFrame();
-  EXPECT_EQ(output_frames_[0]->timestamp(),
-            input_frames_[0]->ntp_time_ms() * 90);
-
-  // Repeat frame with the same NTP timestamp should drop.
-  AddInputFrame(input_frames_[0].get());
-  EXPECT_FALSE(capture_event_.Wait(FRAME_TIMEOUT_MS));
-
-  // As should frames with a decreased NTP timestamp.
-  input_frames_[0]->set_ntp_time_ms(input_frames_[0]->ntp_time_ms() - 1);
-  AddInputFrame(input_frames_[0].get());
-  EXPECT_FALSE(capture_event_.Wait(FRAME_TIMEOUT_MS));
-
-  // But delivering with an increased NTP timestamp should succeed.
-  input_frames_[0]->set_ntp_time_ms(4711);
-  AddInputFrame(input_frames_[0].get());
-  WaitOutputFrame();
-  EXPECT_EQ(output_frames_[1]->timestamp(),
-            input_frames_[0]->ntp_time_ms() * 90);
-}
-
-TEST_F(VideoCaptureInputTest, TestTextureFrames) {
-  const int kNumFrame = 3;
-  for (int i = 0 ; i < kNumFrame; ++i) {
-    test::FakeNativeHandle* dummy_handle = new test::FakeNativeHandle();
-    // Add one to |i| so that width/height > 0.
-    input_frames_.push_back(std::unique_ptr<VideoFrame>(new VideoFrame(
-        test::FakeNativeHandle::CreateFrame(dummy_handle, i + 1, i + 1, i + 1,
-                                            i + 1, webrtc::kVideoRotation_0))));
-    AddInputFrame(input_frames_[i].get());
-    WaitOutputFrame();
-    ASSERT_TRUE(output_frames_[i]->video_frame_buffer());
-    EXPECT_EQ(dummy_handle,
-              output_frames_[i]->video_frame_buffer()->native_handle());
-  }
-
-  EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_));
-}
-
-TEST_F(VideoCaptureInputTest, TestI420Frames) {
-  const int kNumFrame = 4;
-  std::vector<const uint8_t*> ybuffer_pointers;
-  for (int i = 0; i < kNumFrame; ++i) {
-    input_frames_.push_back(CreateVideoFrame(static_cast<uint8_t>(i + 1)));
-    ybuffer_pointers.push_back(input_frames_[i]->video_frame_buffer()->DataY());
-    AddInputFrame(input_frames_[i].get());
-    WaitOutputFrame();
-  }
-
-  EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_));
-  // Make sure the buffer is not copied.
-  for (int i = 0; i < kNumFrame; ++i)
-    EXPECT_EQ(ybuffer_pointers[i], output_frame_ybuffers_[i]);
-}
-
-TEST_F(VideoCaptureInputTest, TestI420FrameAfterTextureFrame) {
-  test::FakeNativeHandle* dummy_handle = new test::FakeNativeHandle();
-  input_frames_.push_back(std::unique_ptr<VideoFrame>(
-      new VideoFrame(test::FakeNativeHandle::CreateFrame(
-          dummy_handle, 1, 1, 1, 1, webrtc::kVideoRotation_0))));
-  AddInputFrame(input_frames_[0].get());
-  WaitOutputFrame();
-  ASSERT_TRUE(output_frames_[0]->video_frame_buffer());
-  EXPECT_EQ(dummy_handle,
-            output_frames_[0]->video_frame_buffer()->native_handle());
-
-  input_frames_.push_back(CreateVideoFrame(2));
-  AddInputFrame(input_frames_[1].get());
-  WaitOutputFrame();
-
-  EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_));
-}
-
-TEST_F(VideoCaptureInputTest, TestTextureFrameAfterI420Frame) {
-  input_frames_.push_back(CreateVideoFrame(1));
-  AddInputFrame(input_frames_[0].get());
-  WaitOutputFrame();
-
-  test::FakeNativeHandle* dummy_handle = new test::FakeNativeHandle();
-  input_frames_.push_back(std::unique_ptr<VideoFrame>(
-      new VideoFrame(test::FakeNativeHandle::CreateFrame(
-          dummy_handle, 1, 1, 2, 2, webrtc::kVideoRotation_0))));
-  AddInputFrame(input_frames_[1].get());
-  WaitOutputFrame();
-
-  EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_));
-}
-
-bool EqualFramesVector(
-    const std::vector<std::unique_ptr<VideoFrame>>& frames1,
-    const std::vector<std::unique_ptr<VideoFrame>>& frames2) {
-  if (frames1.size() != frames2.size())
-    return false;
-  for (size_t i = 0; i < frames1.size(); ++i) {
-    // Compare frame buffers, since we don't care about differing timestamps.
-    if (!test::FrameBufsEqual(frames1[i]->video_frame_buffer(),
-                              frames2[i]->video_frame_buffer())) {
-      return false;
-    }
-  }
-  return true;
-}
-
-std::unique_ptr<VideoFrame> CreateVideoFrame(uint8_t data) {
-  std::unique_ptr<VideoFrame> frame(new VideoFrame());
-  const int width = 36;
-  const int height = 24;
-  const int kSizeY = width * height * 2;
-  uint8_t buffer[kSizeY];
-  memset(buffer, data, kSizeY);
-  frame->CreateFrame(buffer, buffer, buffer, width, height, width, width / 2,
-                     width / 2, kVideoRotation_0);
-  frame->set_render_time_ms(data);
-  return frame;
-}
-
-}  // namespace webrtc
diff --git a/webrtc/video/video_quality_test.cc b/webrtc/video/video_quality_test.cc
index 51160c9..baccc71 100644
--- a/webrtc/video/video_quality_test.cc
+++ b/webrtc/video/video_quality_test.cc
@@ -1198,7 +1198,7 @@
 
   SetupCommon(&transport, &transport);
 
-  video_send_config_.local_renderer = local_preview.get();
+  video_send_config_.pre_encode_callback = local_preview.get();
   video_receive_configs_[stream_id].renderer = loopback_video.get();
   if (params_.audio && params_.audio_video_sync)
     video_receive_configs_[stream_id].sync_group = kSyncGroup;
@@ -1218,8 +1218,8 @@
   if (params_.screenshare.enabled)
     SetupScreenshare();
 
-  video_send_stream_ =
-      call->CreateVideoSendStream(video_send_config_, video_encoder_config_);
+  video_send_stream_ = call->CreateVideoSendStream(
+      video_send_config_.Copy(), video_encoder_config_.Copy());
   VideoReceiveStream* video_receive_stream =
       call->CreateVideoReceiveStream(video_receive_configs_[stream_id].Copy());
   CreateCapturer(video_send_stream_->Input());
diff --git a/webrtc/video/video_send_stream.cc b/webrtc/video/video_send_stream.cc
index a46bc85..2f8241e 100644
--- a/webrtc/video/video_send_stream.cc
+++ b/webrtc/video/video_send_stream.cc
@@ -7,7 +7,6 @@
  *  in the file PATENTS.  All contributing project authors may
  *  be found in the AUTHORS file in the root of the source tree.
  */
-
 #include "webrtc/video/video_send_stream.h"
 
 #include <algorithm>
@@ -19,7 +18,6 @@
 #include "webrtc/base/checks.h"
 #include "webrtc/base/logging.h"
 #include "webrtc/base/trace_event.h"
-#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
 #include "webrtc/modules/bitrate_controller/include/bitrate_controller.h"
 #include "webrtc/modules/congestion_controller/include/congestion_controller.h"
 #include "webrtc/modules/pacing/packet_router.h"
@@ -27,18 +25,12 @@
 #include "webrtc/modules/utility/include/process_thread.h"
 #include "webrtc/modules/video_coding/utility/ivf_file_writer.h"
 #include "webrtc/video/call_stats.h"
-#include "webrtc/video/video_capture_input.h"
 #include "webrtc/video/vie_remb.h"
 #include "webrtc/video_send_stream.h"
 
 namespace webrtc {
 
-class RtcpIntraFrameObserver;
-class TransportFeedbackObserver;
-
 static const int kMinSendSidePacketHistorySize = 600;
-static const int kEncoderTimeOutMs = 2000;
-
 namespace {
 
 std::vector<RtpRtcp*> CreateRtpRtcpModules(
@@ -152,8 +144,6 @@
      << (pre_encode_callback ? "(I420FrameCallback)" : "nullptr");
   ss << ", post_encode_callback: "
      << (post_encode_callback ? "(EncodedFrameObserver)" : "nullptr");
-  ss << ", local_renderer: "
-     << (local_renderer ? "(VideoRenderer)" : "nullptr");
   ss << ", render_delay_ms: " << render_delay_ms;
   ss << ", target_delay_ms: " << target_delay_ms;
   ss << ", suspend_below_min_bitrate: " << (suspend_below_min_bitrate ? "on"
@@ -204,192 +194,15 @@
 
 namespace {
 
-VideoCodecType PayloadNameToCodecType(const std::string& payload_name) {
-  if (payload_name == "VP8")
-    return kVideoCodecVP8;
-  if (payload_name == "VP9")
-    return kVideoCodecVP9;
-  if (payload_name == "H264")
-    return kVideoCodecH264;
-  return kVideoCodecGeneric;
-}
-
 bool PayloadTypeSupportsSkippingFecPackets(const std::string& payload_name) {
-  switch (PayloadNameToCodecType(payload_name)) {
-    case kVideoCodecVP8:
-    case kVideoCodecVP9:
-      return true;
-    case kVideoCodecH264:
-    case kVideoCodecGeneric:
-      return false;
-    case kVideoCodecI420:
-    case kVideoCodecRED:
-    case kVideoCodecULPFEC:
-    case kVideoCodecUnknown:
-      RTC_NOTREACHED();
-      return false;
-  }
-  RTC_NOTREACHED();
+  if (payload_name == "VP8" || payload_name == "VP9")
+    return true;
+  RTC_DCHECK(payload_name == "H264" || payload_name == "FAKE")
+      << "unknown payload_name " << payload_name;
   return false;
 }
 
-// TODO(pbos): Lower these thresholds (to closer to 100%) when we handle
-// pipelining encoders better (multiple input frames before something comes
-// out). This should effectively turn off CPU adaptations for systems that
-// remotely cope with the load right now.
-CpuOveruseOptions GetCpuOveruseOptions(bool full_overuse_time) {
-  CpuOveruseOptions options;
-  if (full_overuse_time) {
-    options.low_encode_usage_threshold_percent = 150;
-    options.high_encode_usage_threshold_percent = 200;
-  }
-  return options;
-}
-
-VideoCodec VideoEncoderConfigToVideoCodec(const VideoEncoderConfig& config,
-                                          const std::string& payload_name,
-                                          int payload_type) {
-  const std::vector<VideoStream>& streams = config.streams;
-  static const int kEncoderMinBitrateKbps = 30;
-  RTC_DCHECK(!streams.empty());
-  RTC_DCHECK_GE(config.min_transmit_bitrate_bps, 0);
-
-  VideoCodec video_codec;
-  memset(&video_codec, 0, sizeof(video_codec));
-  video_codec.codecType = PayloadNameToCodecType(payload_name);
-
-  switch (config.content_type) {
-    case VideoEncoderConfig::ContentType::kRealtimeVideo:
-      video_codec.mode = kRealtimeVideo;
-      break;
-    case VideoEncoderConfig::ContentType::kScreen:
-      video_codec.mode = kScreensharing;
-      if (config.streams.size() == 1 &&
-          config.streams[0].temporal_layer_thresholds_bps.size() == 1) {
-        video_codec.targetBitrate =
-            config.streams[0].temporal_layer_thresholds_bps[0] / 1000;
-      }
-      break;
-  }
-
-  switch (video_codec.codecType) {
-    case kVideoCodecVP8: {
-      if (config.encoder_specific_settings) {
-        video_codec.codecSpecific.VP8 = *reinterpret_cast<const VideoCodecVP8*>(
-            config.encoder_specific_settings);
-      } else {
-        video_codec.codecSpecific.VP8 = VideoEncoder::GetDefaultVp8Settings();
-      }
-      video_codec.codecSpecific.VP8.numberOfTemporalLayers =
-          static_cast<unsigned char>(
-              streams.back().temporal_layer_thresholds_bps.size() + 1);
-      break;
-    }
-    case kVideoCodecVP9: {
-      if (config.encoder_specific_settings) {
-        video_codec.codecSpecific.VP9 = *reinterpret_cast<const VideoCodecVP9*>(
-            config.encoder_specific_settings);
-        if (video_codec.mode == kScreensharing) {
-          video_codec.codecSpecific.VP9.flexibleMode = true;
-          // For now VP9 screensharing use 1 temporal and 2 spatial layers.
-          RTC_DCHECK_EQ(video_codec.codecSpecific.VP9.numberOfTemporalLayers,
-                        1);
-          RTC_DCHECK_EQ(video_codec.codecSpecific.VP9.numberOfSpatialLayers, 2);
-        }
-      } else {
-        video_codec.codecSpecific.VP9 = VideoEncoder::GetDefaultVp9Settings();
-      }
-      video_codec.codecSpecific.VP9.numberOfTemporalLayers =
-          static_cast<unsigned char>(
-              streams.back().temporal_layer_thresholds_bps.size() + 1);
-      break;
-    }
-    case kVideoCodecH264: {
-      if (config.encoder_specific_settings) {
-        video_codec.codecSpecific.H264 =
-            *reinterpret_cast<const VideoCodecH264*>(
-                config.encoder_specific_settings);
-      } else {
-        video_codec.codecSpecific.H264 = VideoEncoder::GetDefaultH264Settings();
-      }
-      break;
-    }
-    default:
-      // TODO(pbos): Support encoder_settings codec-agnostically.
-      RTC_DCHECK(!config.encoder_specific_settings)
-          << "Encoder-specific settings for codec type not wired up.";
-      break;
-  }
-
-  strncpy(video_codec.plName, payload_name.c_str(), kPayloadNameSize - 1);
-  video_codec.plName[kPayloadNameSize - 1] = '\0';
-  video_codec.plType = payload_type;
-  video_codec.numberOfSimulcastStreams =
-      static_cast<unsigned char>(streams.size());
-  video_codec.minBitrate = streams[0].min_bitrate_bps / 1000;
-  if (video_codec.minBitrate < kEncoderMinBitrateKbps)
-    video_codec.minBitrate = kEncoderMinBitrateKbps;
-  RTC_DCHECK_LE(streams.size(), static_cast<size_t>(kMaxSimulcastStreams));
-  if (video_codec.codecType == kVideoCodecVP9) {
-    // If the vector is empty, bitrates will be configured automatically.
-    RTC_DCHECK(config.spatial_layers.empty() ||
-               config.spatial_layers.size() ==
-                   video_codec.codecSpecific.VP9.numberOfSpatialLayers);
-    RTC_DCHECK_LE(video_codec.codecSpecific.VP9.numberOfSpatialLayers,
-                  kMaxSimulcastStreams);
-    for (size_t i = 0; i < config.spatial_layers.size(); ++i)
-      video_codec.spatialLayers[i] = config.spatial_layers[i];
-  }
-  for (size_t i = 0; i < streams.size(); ++i) {
-    SimulcastStream* sim_stream = &video_codec.simulcastStream[i];
-    RTC_DCHECK_GT(streams[i].width, 0u);
-    RTC_DCHECK_GT(streams[i].height, 0u);
-    RTC_DCHECK_GT(streams[i].max_framerate, 0);
-    // Different framerates not supported per stream at the moment.
-    RTC_DCHECK_EQ(streams[i].max_framerate, streams[0].max_framerate);
-    RTC_DCHECK_GE(streams[i].min_bitrate_bps, 0);
-    RTC_DCHECK_GE(streams[i].target_bitrate_bps, streams[i].min_bitrate_bps);
-    RTC_DCHECK_GE(streams[i].max_bitrate_bps, streams[i].target_bitrate_bps);
-    RTC_DCHECK_GE(streams[i].max_qp, 0);
-
-    sim_stream->width = static_cast<uint16_t>(streams[i].width);
-    sim_stream->height = static_cast<uint16_t>(streams[i].height);
-    sim_stream->minBitrate = streams[i].min_bitrate_bps / 1000;
-    sim_stream->targetBitrate = streams[i].target_bitrate_bps / 1000;
-    sim_stream->maxBitrate = streams[i].max_bitrate_bps / 1000;
-    sim_stream->qpMax = streams[i].max_qp;
-    sim_stream->numberOfTemporalLayers = static_cast<unsigned char>(
-        streams[i].temporal_layer_thresholds_bps.size() + 1);
-
-    video_codec.width = std::max(video_codec.width,
-                                 static_cast<uint16_t>(streams[i].width));
-    video_codec.height = std::max(
-        video_codec.height, static_cast<uint16_t>(streams[i].height));
-    video_codec.minBitrate =
-        std::min(static_cast<uint16_t>(video_codec.minBitrate),
-                 static_cast<uint16_t>(streams[i].min_bitrate_bps / 1000));
-    video_codec.maxBitrate += streams[i].max_bitrate_bps / 1000;
-    video_codec.qpMax = std::max(video_codec.qpMax,
-                                 static_cast<unsigned int>(streams[i].max_qp));
-  }
-
-  if (video_codec.maxBitrate == 0) {
-    // Unset max bitrate -> cap to one bit per pixel.
-    video_codec.maxBitrate =
-        (video_codec.width * video_codec.height * video_codec.maxFramerate) /
-        1000;
-  }
-  if (video_codec.maxBitrate < kEncoderMinBitrateKbps)
-    video_codec.maxBitrate = kEncoderMinBitrateKbps;
-
-  RTC_DCHECK_GT(streams[0].max_framerate, 0);
-  video_codec.maxFramerate = streams[0].max_framerate;
-  video_codec.expect_encode_from_texture = config.expect_encode_from_texture;
-
-  return video_codec;
-}
-
-int CalulcateMaxPadBitrateBps(const VideoEncoderConfig& config,
+int CalculateMaxPadBitrateBps(const VideoEncoderConfig& config,
                               bool pad_to_min_bitrate) {
   int pad_up_to_bitrate_bps = 0;
   // Calculate max padding bitrate for a multi layer codec.
@@ -413,87 +226,443 @@
 }  // namespace
 
 namespace internal {
+
+// VideoSendStreamImpl implements internal::VideoSendStream.
+// It is created and destroyed on |worker_queue|. The intent is to decrease the
+// need for locking and to ensure methods are called in sequence.
+// Public methods except |DeliverRtcp| must be called on |worker_queue|.
+// DeliverRtcp is called on the libjingle worker thread or a network thread.
+// An encoder may deliver frames through the EncodedImageCallback on an
+// arbitrary thread.
+class VideoSendStreamImpl : public webrtc::BitrateAllocatorObserver,
+                            public webrtc::VCMProtectionCallback,
+                            public EncodedImageCallback {
+ public:
+  VideoSendStreamImpl(SendStatisticsProxy* stats_proxy,
+                      rtc::TaskQueue* worker_queue,
+                      CallStats* call_stats,
+                      CongestionController* congestion_controller,
+                      BitrateAllocator* bitrate_allocator,
+                      SendDelayStats* send_delay_stats,
+                      VieRemb* remb,
+                      ViEEncoder* vie_encoder,
+                      RtcEventLog* event_log,
+                      const VideoSendStream::Config* config,
+                      std::map<uint32_t, RtpState> suspended_ssrcs);
+  ~VideoSendStreamImpl() override;
+
+  // RegisterProcessThread register |module_process_thread| with those objects
+  // that use it. Registration has to happen on the thread were
+  // |module_process_thread| was created (libjingle's worker thread).
+  // TODO(perkj): Replace the use of |module_process_thread| with a TaskQueue,
+  // maybe |worker_queue|.
+  void RegisterProcessThread(ProcessThread* module_process_thread);
+  void DeRegisterProcessThread();
+
+  void SignalNetworkState(NetworkState state);
+  bool DeliverRtcp(const uint8_t* packet, size_t length);
+  void Start();
+  void Stop();
+
+  void SignalEncoderConfigurationChanged(const VideoEncoderConfig& config);
+  VideoSendStream::RtpStateMap GetRtpStates() const;
+
+ private:
+  class CheckEncoderActivityTask;
+
+  // Implements BitrateAllocatorObserver.
+  uint32_t OnBitrateUpdated(uint32_t bitrate_bps,
+                            uint8_t fraction_loss,
+                            int64_t rtt) override;
+
+  // Implements webrtc::VCMProtectionCallback.
+  int ProtectionRequest(const FecProtectionParams* delta_params,
+                        const FecProtectionParams* key_params,
+                        uint32_t* sent_video_rate_bps,
+                        uint32_t* sent_nack_rate_bps,
+                        uint32_t* sent_fec_rate_bps) override;
+
+  // Implements EncodedImageCallback. The implementation routes encoded frames
+  // to the |payload_router_| and |config.pre_encode_callback| if set.
+  // Called on an arbitrary encoder callback thread.
+  EncodedImageCallback::Result OnEncodedImage(
+      const EncodedImage& encoded_image,
+      const CodecSpecificInfo* codec_specific_info,
+      const RTPFragmentationHeader* fragmentation) override;
+
+  void ConfigureProtection();
+  void ConfigureSsrcs();
+  void SignalEncoderTimedOut();
+  void SignalEncoderActive();
+
+  SendStatisticsProxy* const stats_proxy_;
+  const VideoSendStream::Config* const config_;
+  std::map<uint32_t, RtpState> suspended_ssrcs_;
+
+  ProcessThread* module_process_thread_;
+  rtc::ThreadChecker module_process_thread_checker_;
+  rtc::TaskQueue* const worker_queue_;
+
+  rtc::CriticalSection encoder_activity_crit_sect_;
+  CheckEncoderActivityTask* check_encoder_activity_task_
+      GUARDED_BY(encoder_activity_crit_sect_);
+  CallStats* const call_stats_;
+  CongestionController* const congestion_controller_;
+  BitrateAllocator* const bitrate_allocator_;
+  VieRemb* const remb_;
+
+  static const bool kEnableFrameRecording = false;
+  static const int kMaxLayers = 3;
+  std::unique_ptr<IvfFileWriter> file_writers_[kMaxLayers];
+
+  int max_padding_bitrate_;
+  int encoder_min_bitrate_bps_;
+  uint32_t encoder_max_bitrate_bps_;
+  uint32_t encoder_target_rate_bps_;
+
+  ViEEncoder* const vie_encoder_;
+  EncoderStateFeedback encoder_feedback_;
+  ProtectionBitrateCalculator protection_bitrate_calculator_;
+
+  const std::unique_ptr<RtcpBandwidthObserver> bandwidth_observer_;
+  // RtpRtcp modules, declared here as they use other members on construction.
+  const std::vector<RtpRtcp*> rtp_rtcp_modules_;
+  PayloadRouter payload_router_;
+};
+
+// TODO(tommi): See if there's a more elegant way to create a task that creates
+// an object on the correct task queue.
+class VideoSendStream::ConstructionTask : public rtc::QueuedTask {
+ public:
+  ConstructionTask(std::unique_ptr<VideoSendStreamImpl>* send_stream,
+                   rtc::Event* done_event,
+                   SendStatisticsProxy* stats_proxy,
+                   ViEEncoder* vie_encoder,
+                   ProcessThread* module_process_thread,
+                   CallStats* call_stats,
+                   CongestionController* congestion_controller,
+                   BitrateAllocator* bitrate_allocator,
+                   SendDelayStats* send_delay_stats,
+                   VieRemb* remb,
+                   RtcEventLog* event_log,
+                   const VideoSendStream::Config* config,
+                   const std::map<uint32_t, RtpState>& suspended_ssrcs)
+      : send_stream_(send_stream),
+        done_event_(done_event),
+        stats_proxy_(stats_proxy),
+        vie_encoder_(vie_encoder),
+        call_stats_(call_stats),
+        congestion_controller_(congestion_controller),
+        bitrate_allocator_(bitrate_allocator),
+        send_delay_stats_(send_delay_stats),
+        remb_(remb),
+        event_log_(event_log),
+        config_(config),
+        suspended_ssrcs_(suspended_ssrcs) {}
+
+  ~ConstructionTask() override { done_event_->Set(); }
+
+ private:
+  bool Run() override {
+    send_stream_->reset(new VideoSendStreamImpl(
+        stats_proxy_, rtc::TaskQueue::Current(), call_stats_,
+        congestion_controller_, bitrate_allocator_, send_delay_stats_, remb_,
+        vie_encoder_, event_log_, config_, std::move(suspended_ssrcs_)));
+    return true;
+  }
+
+  std::unique_ptr<VideoSendStreamImpl>* const send_stream_;
+  rtc::Event* const done_event_;
+  SendStatisticsProxy* const stats_proxy_;
+  ViEEncoder* const vie_encoder_;
+  CallStats* const call_stats_;
+  CongestionController* const congestion_controller_;
+  BitrateAllocator* const bitrate_allocator_;
+  SendDelayStats* const send_delay_stats_;
+  VieRemb* const remb_;
+  RtcEventLog* const event_log_;
+  const VideoSendStream::Config* config_;
+  std::map<uint32_t, RtpState> suspended_ssrcs_;
+};
+
+class VideoSendStream::DestructAndGetRtpStateTask : public rtc::QueuedTask {
+ public:
+  DestructAndGetRtpStateTask(VideoSendStream::RtpStateMap* state_map,
+                             std::unique_ptr<VideoSendStreamImpl> send_stream,
+                             rtc::Event* done_event)
+      : state_map_(state_map),
+        send_stream_(std::move(send_stream)),
+        done_event_(done_event) {}
+
+  ~DestructAndGetRtpStateTask() override { RTC_CHECK(!send_stream_); }
+
+ private:
+  bool Run() override {
+    send_stream_->Stop();
+    *state_map_ = send_stream_->GetRtpStates();
+    send_stream_.reset();
+    done_event_->Set();
+    return true;
+  }
+
+  VideoSendStream::RtpStateMap* state_map_;
+  std::unique_ptr<VideoSendStreamImpl> send_stream_;
+  rtc::Event* done_event_;
+};
+
+// CheckEncoderActivityTask is used for tracking when the encoder last produced
+// and encoded video frame. If the encoder has not produced anything the last
+// kEncoderTimeOutMs we also want to stop sending padding.
+class VideoSendStreamImpl::CheckEncoderActivityTask : public rtc::QueuedTask {
+ public:
+  static const int kEncoderTimeOutMs = 2000;
+  explicit CheckEncoderActivityTask(VideoSendStreamImpl* send_stream)
+      : activity_(0), send_stream_(send_stream), timed_out_(false) {}
+
+  void Stop() {
+    RTC_CHECK(task_checker_.CalledSequentially());
+    send_stream_ = nullptr;
+  }
+
+  void UpdateEncoderActivity() {
+    // UpdateEncoderActivity is called from VideoSendStreamImpl::Encoded on
+    // whatever thread the real encoder implementation run on. In the case of
+    // hardware encoders, there might be several encoders
+    // running in parallel on different threads.
+    rtc::AtomicOps::ReleaseStore(&activity_, 1);
+  }
+
+ private:
+  bool Run() override {
+    RTC_CHECK(task_checker_.CalledSequentially());
+    if (!send_stream_)
+      return true;
+    if (!rtc::AtomicOps::AcquireLoad(&activity_)) {
+      if (!timed_out_) {
+        send_stream_->SignalEncoderTimedOut();
+      }
+      timed_out_ = true;
+    } else if (timed_out_) {
+      send_stream_->SignalEncoderActive();
+      timed_out_ = false;
+    }
+    rtc::AtomicOps::ReleaseStore(&activity_, 0);
+
+    rtc::TaskQueue::Current()->PostDelayedTask(
+        std::unique_ptr<rtc::QueuedTask>(this), kEncoderTimeOutMs);
+    // Return false to prevent this task from being deleted. Ownership has been
+    // transferred to the task queue when PostDelayedTask was called.
+    return false;
+  }
+  volatile int activity_;
+
+  rtc::SequencedTaskChecker task_checker_;
+  VideoSendStreamImpl* send_stream_;
+  bool timed_out_;
+};
+
+class ReconfigureVideoEncoderTask : public rtc::QueuedTask {
+ public:
+  ReconfigureVideoEncoderTask(VideoSendStreamImpl* send_stream,
+                              VideoEncoderConfig config)
+      : send_stream_(send_stream), config_(std::move(config)) {}
+
+ private:
+  bool Run() override {
+    send_stream_->SignalEncoderConfigurationChanged(std::move(config_));
+    return true;
+  }
+
+  VideoSendStreamImpl* send_stream_;
+  VideoEncoderConfig config_;
+};
+
 VideoSendStream::VideoSendStream(
     int num_cpu_cores,
     ProcessThread* module_process_thread,
+    rtc::TaskQueue* worker_queue,
     CallStats* call_stats,
     CongestionController* congestion_controller,
     BitrateAllocator* bitrate_allocator,
     SendDelayStats* send_delay_stats,
     VieRemb* remb,
     RtcEventLog* event_log,
-    const VideoSendStream::Config& config,
-    const VideoEncoderConfig& encoder_config,
+    VideoSendStream::Config config,
+    VideoEncoderConfig encoder_config,
     const std::map<uint32_t, RtpState>& suspended_ssrcs)
-    : stats_proxy_(Clock::GetRealTimeClock(),
+    : worker_queue_(worker_queue),
+      thread_sync_event_(false /* manual_reset */, false),
+      stats_proxy_(Clock::GetRealTimeClock(),
                    config,
                    encoder_config.content_type),
+      config_(std::move(config)) {
+  vie_encoder_.reset(
+      new ViEEncoder(num_cpu_cores, &stats_proxy_, config_.encoder_settings,
+                     config_.pre_encode_callback, config_.overuse_callback,
+                     config_.post_encode_callback));
+
+  worker_queue_->PostTask(std::unique_ptr<rtc::QueuedTask>(new ConstructionTask(
+      &send_stream_, &thread_sync_event_, &stats_proxy_, vie_encoder_.get(),
+      module_process_thread, call_stats, congestion_controller,
+      bitrate_allocator, send_delay_stats, remb, event_log, &config_,
+      suspended_ssrcs)));
+
+  // Wait for ConstructionTask to complete so that |send_stream_| can be used.
+  // |module_process_thread| must be registered and deregistered on the thread
+  // it was created on.
+  thread_sync_event_.Wait(rtc::Event::kForever);
+  send_stream_->RegisterProcessThread(module_process_thread);
+
+  vie_encoder_->RegisterProcessThread(module_process_thread);
+
+  ReconfigureVideoEncoder(std::move(encoder_config));
+}
+
+VideoSendStream::~VideoSendStream() {
+  RTC_DCHECK_RUN_ON(&thread_checker_);
+  RTC_DCHECK(!send_stream_);
+}
+
+void VideoSendStream::Start() {
+  RTC_DCHECK_RUN_ON(&thread_checker_);
+  LOG(LS_INFO) << "VideoSendStream::Start";
+  VideoSendStreamImpl* send_stream = send_stream_.get();
+  worker_queue_->PostTask([this, send_stream] {
+    send_stream->Start();
+    thread_sync_event_.Set();
+  });
+
+  // It is expected that after VideoSendStream::Start has been called, incoming
+  // frames are not dropped in ViEEncoder. To ensure this, Start has to be
+  // synchronized.
+  thread_sync_event_.Wait(rtc::Event::kForever);
+}
+
+void VideoSendStream::Stop() {
+  RTC_DCHECK_RUN_ON(&thread_checker_);
+  LOG(LS_INFO) << "VideoSendStream::Stop";
+  VideoSendStreamImpl* send_stream = send_stream_.get();
+  worker_queue_->PostTask([send_stream] { send_stream->Stop(); });
+}
+
+VideoCaptureInput* VideoSendStream::Input() {
+  // Input() will be called on the thread that deliverers video frames from
+  // libjingle.
+  // TODO(perkj): Refactor ViEEncoder to register directly as a VideoSink to the
+  // VideoSource.
+  return vie_encoder_.get();
+}
+
+void VideoSendStream::ReconfigureVideoEncoder(VideoEncoderConfig config) {
+  // ReconfigureVideoEncoder will be called on the thread that deliverers video
+  // frames. We must change the encoder settings immediately so that
+  // the codec settings matches the next frame.
+  // TODO(perkj): Move logic for reconfiguration the encoder due to frame size
+  // change from WebRtcVideoChannel2::WebRtcVideoSendStream::OnFrame to
+  // be internally handled by ViEEncoder.
+  vie_encoder_->ConfigureEncoder(config, config_.rtp.max_packet_size);
+
+  worker_queue_->PostTask(std::unique_ptr<rtc::QueuedTask>(
+      new ReconfigureVideoEncoderTask(send_stream_.get(), std::move(config))));
+}
+
+VideoSendStream::Stats VideoSendStream::GetStats() {
+  // TODO(perkj, solenberg): Some test cases in EndToEndTest call GetStats from
+  // a network thread. See comment in Call::GetStats().
+  // RTC_DCHECK_RUN_ON(&thread_checker_);
+  return stats_proxy_.GetStats();
+}
+
+void VideoSendStream::SignalNetworkState(NetworkState state) {
+  RTC_DCHECK_RUN_ON(&thread_checker_);
+  VideoSendStreamImpl* send_stream = send_stream_.get();
+  worker_queue_->PostTask(
+      [send_stream, state] { send_stream->SignalNetworkState(state); });
+}
+
+VideoSendStream::RtpStateMap VideoSendStream::StopPermanentlyAndGetRtpStates() {
+  RTC_DCHECK_RUN_ON(&thread_checker_);
+  vie_encoder_->Stop();
+  vie_encoder_->DeRegisterProcessThread();
+  VideoSendStream::RtpStateMap state_map;
+  send_stream_->DeRegisterProcessThread();
+  worker_queue_->PostTask(
+      std::unique_ptr<rtc::QueuedTask>(new DestructAndGetRtpStateTask(
+          &state_map, std::move(send_stream_), &thread_sync_event_)));
+  thread_sync_event_.Wait(rtc::Event::kForever);
+  return state_map;
+}
+
+bool VideoSendStream::DeliverRtcp(const uint8_t* packet, size_t length) {
+  // Called on a network thread.
+  return send_stream_->DeliverRtcp(packet, length);
+}
+
+VideoSendStreamImpl::VideoSendStreamImpl(
+    SendStatisticsProxy* stats_proxy,
+    rtc::TaskQueue* worker_queue,
+    CallStats* call_stats,
+    CongestionController* congestion_controller,
+    BitrateAllocator* bitrate_allocator,
+    SendDelayStats* send_delay_stats,
+    VieRemb* remb,
+    ViEEncoder* vie_encoder,
+    RtcEventLog* event_log,
+    const VideoSendStream::Config* config,
+    std::map<uint32_t, RtpState> suspended_ssrcs)
+    : stats_proxy_(stats_proxy),
       config_(config),
-      suspended_ssrcs_(suspended_ssrcs),
-      module_process_thread_(module_process_thread),
+      suspended_ssrcs_(std::move(suspended_ssrcs)),
+      module_process_thread_(nullptr),
+      worker_queue_(worker_queue),
+      check_encoder_activity_task_(nullptr),
       call_stats_(call_stats),
       congestion_controller_(congestion_controller),
       bitrate_allocator_(bitrate_allocator),
       remb_(remb),
-      encoder_thread_(EncoderThreadFunction, this, "EncoderThread"),
-      encoder_wakeup_event_(false, false),
-      stop_encoder_thread_(0),
+      max_padding_bitrate_(0),
+      encoder_min_bitrate_bps_(0),
       encoder_max_bitrate_bps_(0),
       encoder_target_rate_bps_(0),
-      state_(State::kStopped),
-      overuse_detector_(
-          Clock::GetRealTimeClock(),
-          GetCpuOveruseOptions(config.encoder_settings.full_overuse_time),
-          this,
-          config.post_encode_callback,
-          &stats_proxy_),
-      vie_encoder_(num_cpu_cores,
-                   module_process_thread_,
-                   &stats_proxy_,
-                   &overuse_detector_,
-                   this),
+      vie_encoder_(vie_encoder),
       encoder_feedback_(Clock::GetRealTimeClock(),
-                        config.rtp.ssrcs,
-                        &vie_encoder_),
+                        config_->rtp.ssrcs,
+                        vie_encoder),
       protection_bitrate_calculator_(Clock::GetRealTimeClock(), this),
-      video_sender_(vie_encoder_.video_sender()),
       bandwidth_observer_(congestion_controller_->GetBitrateController()
                               ->CreateRtcpBandwidthObserver()),
       rtp_rtcp_modules_(CreateRtpRtcpModules(
-          config.send_transport,
+          config_->send_transport,
           &encoder_feedback_,
           bandwidth_observer_.get(),
           congestion_controller_->GetTransportFeedbackObserver(),
           call_stats_->rtcp_rtt_stats(),
           congestion_controller_->pacer(),
           congestion_controller_->packet_router(),
-          &stats_proxy_,
+          stats_proxy_,
           send_delay_stats,
           event_log,
           congestion_controller_->GetRetransmissionRateLimiter(),
-          config_.rtp.ssrcs.size())),
-      payload_router_(rtp_rtcp_modules_, config.encoder_settings.payload_type),
-      input_(&encoder_wakeup_event_,
-             config_.local_renderer,
-             &stats_proxy_,
-             &overuse_detector_) {
-  LOG(LS_INFO) << "VideoSendStream: " << config_.ToString();
+          config_->rtp.ssrcs.size())),
+      payload_router_(rtp_rtcp_modules_,
+                      config_->encoder_settings.payload_type) {
+  RTC_DCHECK_RUN_ON(worker_queue_);
+  LOG(LS_INFO) << "VideoSendStreamInternal: " << config_->ToString();
+  module_process_thread_checker_.DetachFromThread();
 
-  RTC_DCHECK(!config_.rtp.ssrcs.empty());
-  RTC_DCHECK(module_process_thread_);
+  RTC_DCHECK(!config_->rtp.ssrcs.empty());
   RTC_DCHECK(call_stats_);
   RTC_DCHECK(congestion_controller_);
   RTC_DCHECK(remb_);
 
   // RTP/RTCP initialization.
   for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
-    module_process_thread_->RegisterModule(rtp_rtcp);
     congestion_controller_->packet_router()->AddRtpModule(rtp_rtcp);
   }
 
-  for (size_t i = 0; i < config_.rtp.extensions.size(); ++i) {
-    const std::string& extension = config_.rtp.extensions[i].uri;
-    int id = config_.rtp.extensions[i].id;
+  for (size_t i = 0; i < config_->rtp.extensions.size(); ++i) {
+    const std::string& extension = config_->rtp.extensions[i].uri;
+    int id = config_->rtp.extensions[i].id;
     // One-byte-extension local identifiers are in the range 1-14 inclusive.
     RTC_DCHECK_GE(id, 1);
     RTC_DCHECK_LE(id, 14);
@@ -511,264 +680,185 @@
   ConfigureSsrcs();
 
   // TODO(pbos): Should we set CNAME on all RTP modules?
-  rtp_rtcp_modules_.front()->SetCNAME(config_.rtp.c_name.c_str());
+  rtp_rtcp_modules_.front()->SetCNAME(config_->rtp.c_name.c_str());
   // 28 to match packet overhead in ModuleRtpRtcpImpl.
   static const size_t kRtpPacketSizeOverhead = 28;
-  RTC_DCHECK_LE(config_.rtp.max_packet_size, 0xFFFFu + kRtpPacketSizeOverhead);
-  const uint16_t mtu = static_cast<uint16_t>(config_.rtp.max_packet_size +
+  RTC_DCHECK_LE(config_->rtp.max_packet_size, 0xFFFFu + kRtpPacketSizeOverhead);
+  const uint16_t mtu = static_cast<uint16_t>(config_->rtp.max_packet_size +
                                              kRtpPacketSizeOverhead);
   for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
-    rtp_rtcp->RegisterRtcpStatisticsCallback(&stats_proxy_);
-    rtp_rtcp->RegisterSendChannelRtpStatisticsCallback(&stats_proxy_);
+    rtp_rtcp->RegisterRtcpStatisticsCallback(stats_proxy_);
+    rtp_rtcp->RegisterSendChannelRtpStatisticsCallback(stats_proxy_);
     rtp_rtcp->SetMaxTransferUnit(mtu);
     rtp_rtcp->RegisterVideoSendPayload(
-        config_.encoder_settings.payload_type,
-        config_.encoder_settings.payload_name.c_str());
+        config_->encoder_settings.payload_type,
+        config_->encoder_settings.payload_name.c_str());
   }
 
-  RTC_DCHECK(config.encoder_settings.encoder);
-  RTC_DCHECK_GE(config.encoder_settings.payload_type, 0);
-  RTC_DCHECK_LE(config.encoder_settings.payload_type, 127);
-  ReconfigureVideoEncoder(encoder_config);
+  RTC_DCHECK(config_->encoder_settings.encoder);
+  RTC_DCHECK_GE(config_->encoder_settings.payload_type, 0);
+  RTC_DCHECK_LE(config_->encoder_settings.payload_type, 127);
 
-  module_process_thread_->RegisterModule(&overuse_detector_);
-
-  encoder_thread_checker_.DetachFromThread();
-  encoder_thread_.Start();
-  encoder_thread_.SetPriority(rtc::kHighPriority);
+  vie_encoder_->SetStartBitrate(bitrate_allocator_->GetStartBitrate(this));
+  vie_encoder_->SetSink(this);
 }
 
-VideoSendStream::~VideoSendStream() {
-  LOG(LS_INFO) << "~VideoSendStream: " << config_.ToString();
+void VideoSendStreamImpl::RegisterProcessThread(
+    ProcessThread* module_process_thread) {
+  RTC_DCHECK_RUN_ON(&module_process_thread_checker_);
+  RTC_DCHECK(!module_process_thread_);
+  module_process_thread_ = module_process_thread;
 
-  Stop();
+  for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+    module_process_thread_->RegisterModule(rtp_rtcp);
+}
 
-  // Stop the encoder thread permanently.
-  rtc::AtomicOps::ReleaseStore(&stop_encoder_thread_, 1);
-  encoder_wakeup_event_.Set();
-  encoder_thread_.Stop();
+void VideoSendStreamImpl::DeRegisterProcessThread() {
+  RTC_DCHECK_RUN_ON(&module_process_thread_checker_);
+  for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+    module_process_thread_->DeRegisterModule(rtp_rtcp);
+}
 
-  // This needs to happen after stopping the encoder thread,
-  // since the encoder thread calls AddObserver.
-  bitrate_allocator_->RemoveObserver(this);
-
-  module_process_thread_->DeRegisterModule(&overuse_detector_);
+VideoSendStreamImpl::~VideoSendStreamImpl() {
+  RTC_DCHECK_RUN_ON(worker_queue_);
+  RTC_DCHECK(!payload_router_.active())
+      << "VideoSendStreamImpl::Stop not called";
+  LOG(LS_INFO) << "~VideoSendStreamInternal: " << config_->ToString();
 
   rtp_rtcp_modules_[0]->SetREMBStatus(false);
   remb_->RemoveRembSender(rtp_rtcp_modules_[0]);
 
   for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
     congestion_controller_->packet_router()->RemoveRtpModule(rtp_rtcp);
-    module_process_thread_->DeRegisterModule(rtp_rtcp);
     delete rtp_rtcp;
   }
 }
 
-bool VideoSendStream::DeliverRtcp(const uint8_t* packet, size_t length) {
+bool VideoSendStreamImpl::DeliverRtcp(const uint8_t* packet, size_t length) {
+  // Runs on a network thread.
+  RTC_DCHECK(!worker_queue_->IsCurrent());
   for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
     rtp_rtcp->IncomingRtcpPacket(packet, length);
   return true;
 }
 
-void VideoSendStream::Start() {
+void VideoSendStreamImpl::Start() {
+  RTC_DCHECK_RUN_ON(worker_queue_);
   LOG(LS_INFO) << "VideoSendStream::Start";
   if (payload_router_.active())
     return;
   TRACE_EVENT_INSTANT0("webrtc", "VideoSendStream::Start");
   payload_router_.set_active(true);
+
+  bitrate_allocator_->AddObserver(
+      this, encoder_min_bitrate_bps_, encoder_max_bitrate_bps_,
+      max_padding_bitrate_, !config_->suspend_below_min_bitrate);
+
+  // Start monitoring encoder activity.
   {
-    rtc::CritScope lock(&encoder_settings_crit_);
-    pending_state_change_ = rtc::Optional<State>(State::kStarted);
+    rtc::CritScope lock(&encoder_activity_crit_sect_);
+    RTC_DCHECK(!check_encoder_activity_task_);
+    check_encoder_activity_task_ = new CheckEncoderActivityTask(this);
+    worker_queue_->PostDelayedTask(
+        std::unique_ptr<rtc::QueuedTask>(check_encoder_activity_task_),
+        CheckEncoderActivityTask::kEncoderTimeOutMs);
   }
-  encoder_wakeup_event_.Set();
+
+  vie_encoder_->SendKeyFrame();
 }
 
-void VideoSendStream::Stop() {
+void VideoSendStreamImpl::Stop() {
+  RTC_DCHECK_RUN_ON(worker_queue_);
   LOG(LS_INFO) << "VideoSendStream::Stop";
   if (!payload_router_.active())
     return;
   TRACE_EVENT_INSTANT0("webrtc", "VideoSendStream::Stop");
   payload_router_.set_active(false);
+  bitrate_allocator_->RemoveObserver(this);
   {
-    rtc::CritScope lock(&encoder_settings_crit_);
-    pending_state_change_ = rtc::Optional<State>(State::kStopped);
+    rtc::CritScope lock(&encoder_activity_crit_sect_);
+    check_encoder_activity_task_->Stop();
+    check_encoder_activity_task_ = nullptr;
   }
-  encoder_wakeup_event_.Set();
+  vie_encoder_->OnBitrateUpdated(0, 0, 0);
+  stats_proxy_->OnSetEncoderTargetRate(0);
 }
 
-VideoCaptureInput* VideoSendStream::Input() {
-  return &input_;
-}
-
-bool VideoSendStream::EncoderThreadFunction(void* obj) {
-  static_cast<VideoSendStream*>(obj)->EncoderProcess();
-  // We're done, return false to abort.
-  return false;
-}
-
-void VideoSendStream::EncoderProcess() {
-  RTC_CHECK_EQ(0, vie_encoder_.RegisterExternalEncoder(
-                      config_.encoder_settings.encoder,
-                      config_.encoder_settings.payload_type,
-                      config_.encoder_settings.internal_source));
-  RTC_DCHECK_RUN_ON(&encoder_thread_checker_);
-  while (true) {
-    // Wake up every kEncodeCheckForActivityPeriodMs to check if the encoder is
-    // active. If not, deregister as BitrateAllocatorObserver.
-    const int kEncodeCheckForActivityPeriodMs = 1000;
-    encoder_wakeup_event_.Wait(kEncodeCheckForActivityPeriodMs);
-    if (rtc::AtomicOps::AcquireLoad(&stop_encoder_thread_))
-      break;
-    bool change_settings = false;
-    rtc::Optional<State> pending_state_change;
-    {
-      rtc::CritScope lock(&encoder_settings_crit_);
-      if (pending_encoder_settings_) {
-        std::swap(current_encoder_settings_, pending_encoder_settings_);
-        pending_encoder_settings_.reset();
-        change_settings = true;
-      } else if (pending_state_change_) {
-        swap(pending_state_change, pending_state_change_);
-      }
-    }
-    if (change_settings) {
-      current_encoder_settings_->video_codec.startBitrate = std::max(
-          bitrate_allocator_->GetStartBitrate(this) / 1000,
-          static_cast<int>(current_encoder_settings_->video_codec.minBitrate));
-
-      if (state_ == State::kStarted) {
-        bitrate_allocator_->AddObserver(
-            this, current_encoder_settings_->video_codec.minBitrate * 1000,
-            current_encoder_settings_->video_codec.maxBitrate * 1000,
-            CalulcateMaxPadBitrateBps(current_encoder_settings_->config,
-                                      config_.suspend_below_min_bitrate),
-            !config_.suspend_below_min_bitrate);
-      }
-
-      payload_router_.SetSendStreams(current_encoder_settings_->config.streams);
-      vie_encoder_.SetEncoder(current_encoder_settings_->video_codec,
-                              payload_router_.MaxPayloadLength());
-
-      // Clear stats for disabled layers.
-      for (size_t i = current_encoder_settings_->config.streams.size();
-           i < config_.rtp.ssrcs.size(); ++i) {
-        stats_proxy_.OnInactiveSsrc(config_.rtp.ssrcs[i]);
-      }
-
-      size_t number_of_temporal_layers =
-          current_encoder_settings_->config.streams.back()
-              .temporal_layer_thresholds_bps.size() +
-          1;
-      protection_bitrate_calculator_.SetEncodingData(
-          current_encoder_settings_->video_codec.width,
-          current_encoder_settings_->video_codec.height,
-          number_of_temporal_layers, payload_router_.MaxPayloadLength());
-
-      // We might've gotten new settings while configuring the encoder settings,
-      // restart from the top to see if that's the case before trying to encode
-      // a frame (which might correspond to the last frame size).
-      encoder_wakeup_event_.Set();
-      continue;
-    }
-
-    if (pending_state_change) {
-      if (*pending_state_change == State::kStarted &&
-          state_ == State::kStopped) {
-        bitrate_allocator_->AddObserver(
-            this, current_encoder_settings_->video_codec.minBitrate * 1000,
-            current_encoder_settings_->video_codec.maxBitrate * 1000,
-            CalulcateMaxPadBitrateBps(current_encoder_settings_->config,
-                                      config_.suspend_below_min_bitrate),
-            !config_.suspend_below_min_bitrate);
-        vie_encoder_.SendKeyFrame();
-        state_ = State::kStarted;
-        LOG_F(LS_INFO) << "Encoder started.";
-      } else if (*pending_state_change == State::kStopped) {
-        bitrate_allocator_->RemoveObserver(this);
-        vie_encoder_.OnBitrateUpdated(0, 0, 0);
-        stats_proxy_.OnSetEncoderTargetRate(0);
-        state_ = State::kStopped;
-        LOG_F(LS_INFO) << "Encoder stopped.";
-      }
-      encoder_wakeup_event_.Set();
-      continue;
-    }
-
-    // Check if the encoder has produced anything the last kEncoderTimeOutMs.
-    // If not, deregister as BitrateAllocatorObserver.
-    if (state_ == State::kStarted &&
-        vie_encoder_.time_of_last_frame_activity_ms() <
-            rtc::TimeMillis() - kEncoderTimeOutMs) {
-      // The encoder has timed out.
-      LOG_F(LS_INFO) << "Encoder timed out.";
-      bitrate_allocator_->RemoveObserver(this);
-      state_ = State::kEncoderTimedOut;
-    }
-    if (state_ == State::kEncoderTimedOut &&
-        vie_encoder_.time_of_last_frame_activity_ms() >
-            rtc::TimeMillis() - kEncoderTimeOutMs) {
-      LOG_F(LS_INFO) << "Encoder is active.";
-      bitrate_allocator_->AddObserver(
-          this, current_encoder_settings_->video_codec.minBitrate * 1000,
-          current_encoder_settings_->video_codec.maxBitrate * 1000,
-          CalulcateMaxPadBitrateBps(current_encoder_settings_->config,
-                                    config_.suspend_below_min_bitrate),
-          !config_.suspend_below_min_bitrate);
-      state_ = State::kStarted;
-    }
-
-    VideoFrame frame;
-    if (input_.GetVideoFrame(&frame)) {
-      // TODO(perkj): |pre_encode_callback| is only used by tests. Tests should
-      // register as a sink to the VideoSource instead.
-      if (config_.pre_encode_callback) {
-        config_.pre_encode_callback->OnFrame(frame);
-      }
-      vie_encoder_.EncodeVideoFrame(frame);
-    }
+void VideoSendStreamImpl::SignalEncoderTimedOut() {
+  RTC_DCHECK_RUN_ON(worker_queue_);
+  // If the encoder has not produced anything the last kEncoderTimeOutMs and it
+  // is supposed to, deregister as BitrateAllocatorObserver. This can happen
+  // if a camera stops producing frames.
+  if (encoder_target_rate_bps_ > 0) {
+    LOG(LS_INFO) << "SignalEncoderTimedOut, Encoder timed out.";
+    bitrate_allocator_->RemoveObserver(this);
   }
-  vie_encoder_.DeRegisterExternalEncoder(config_.encoder_settings.payload_type);
 }
 
-void VideoSendStream::ReconfigureVideoEncoder(
+void VideoSendStreamImpl::SignalEncoderActive() {
+  RTC_DCHECK_RUN_ON(worker_queue_);
+  LOG(LS_INFO) << "SignalEncoderActive, Encoder is active.";
+  bitrate_allocator_->AddObserver(
+      this, encoder_min_bitrate_bps_, encoder_max_bitrate_bps_,
+      max_padding_bitrate_, !config_->suspend_below_min_bitrate);
+}
+
+void VideoSendStreamImpl::SignalEncoderConfigurationChanged(
     const VideoEncoderConfig& config) {
-  TRACE_EVENT0("webrtc", "VideoSendStream::(Re)configureVideoEncoder");
-  LOG(LS_INFO) << "(Re)configureVideoEncoder: " << config.ToString();
-  RTC_DCHECK_GE(config_.rtp.ssrcs.size(), config.streams.size());
-  VideoCodec video_codec = VideoEncoderConfigToVideoCodec(
-      config, config_.encoder_settings.payload_name,
-      config_.encoder_settings.payload_type);
-  {
-    rtc::CritScope lock(&encoder_settings_crit_);
-    encoder_max_bitrate_bps_ = video_codec.maxBitrate * 1000;
-    pending_encoder_settings_.reset(new EncoderSettings({video_codec, config}));
+  RTC_DCHECK_GE(config_->rtp.ssrcs.size(), config.streams.size());
+  TRACE_EVENT0("webrtc", "VideoSendStream::SignalEncoderConfigurationChanged");
+  LOG(LS_INFO) << "SignalEncoderConfigurationChanged: " << config.ToString();
+  RTC_DCHECK_GE(config_->rtp.ssrcs.size(), config.streams.size());
+  RTC_DCHECK_RUN_ON(worker_queue_);
+
+  const int kEncoderMinBitrateBps = 30000;
+  encoder_min_bitrate_bps_ =
+      std::max(config.streams[0].min_bitrate_bps, kEncoderMinBitrateBps);
+  encoder_max_bitrate_bps_ = 0;
+  for (const auto& stream : config.streams)
+    encoder_max_bitrate_bps_ += stream.max_bitrate_bps;
+  max_padding_bitrate_ =
+      CalculateMaxPadBitrateBps(config, config_->suspend_below_min_bitrate);
+
+  payload_router_.SetSendStreams(config.streams);
+
+  // Clear stats for disabled layers.
+  for (size_t i = config.streams.size(); i < config_->rtp.ssrcs.size(); ++i) {
+    stats_proxy_->OnInactiveSsrc(config_->rtp.ssrcs[i]);
   }
-  encoder_wakeup_event_.Set();
+
+  size_t number_of_temporal_layers =
+      config.streams.back().temporal_layer_thresholds_bps.size() + 1;
+  protection_bitrate_calculator_.SetEncodingData(
+      config.streams[0].width, config.streams[0].height,
+      number_of_temporal_layers, config_->rtp.max_packet_size);
+
+  if (payload_router_.active()) {
+    // The send stream is started already. Update the allocator with new bitrate
+    // limits.
+    bitrate_allocator_->AddObserver(
+        this, encoder_min_bitrate_bps_, encoder_max_bitrate_bps_,
+        max_padding_bitrate_, !config_->suspend_below_min_bitrate);
+  }
 }
 
-VideoSendStream::Stats VideoSendStream::GetStats() {
-  return stats_proxy_.GetStats();
-}
-
-void VideoSendStream::OveruseDetected() {
-  if (config_.overuse_callback)
-    config_.overuse_callback->OnLoadUpdate(LoadObserver::kOveruse);
-}
-
-void VideoSendStream::NormalUsage() {
-  if (config_.overuse_callback)
-    config_.overuse_callback->OnLoadUpdate(LoadObserver::kUnderuse);
-}
-
-EncodedImageCallback::Result VideoSendStream::OnEncodedImage(
+EncodedImageCallback::Result VideoSendStreamImpl::OnEncodedImage(
     const EncodedImage& encoded_image,
     const CodecSpecificInfo* codec_specific_info,
     const RTPFragmentationHeader* fragmentation) {
-  if (config_.post_encode_callback) {
-    config_.post_encode_callback->EncodedFrameCallback(
+  // Encoded is called on whatever thread the real encoder implementation run
+  // on. In the case of hardware encoders, there might be several encoders
+  // running in parallel on different threads.
+  if (config_->post_encode_callback) {
+    config_->post_encode_callback->EncodedFrameCallback(
         EncodedFrame(encoded_image._buffer, encoded_image._length,
                      encoded_image._frameType));
   }
+  {
+    rtc::CritScope lock(&encoder_activity_crit_sect_);
+    if (check_encoder_activity_task_)
+      check_encoder_activity_task_->UpdateEncoderActivity();
+  }
 
   protection_bitrate_calculator_.UpdateWithEncodedData(encoded_image);
   EncodedImageCallback::Result result = payload_router_.OnEncodedImage(
@@ -783,7 +873,7 @@
       if (file_writers_[layer] == nullptr) {
         std::ostringstream oss;
         oss << "send_bitstream_ssrc";
-        for (uint32_t ssrc : config_.rtp.ssrcs)
+        for (uint32_t ssrc : config_->rtp.ssrcs)
           oss << "_" << ssrc;
         oss << "_layer" << layer << ".ivf";
         file_writers_[layer] =
@@ -800,17 +890,18 @@
   return result;
 }
 
-void VideoSendStream::ConfigureProtection() {
+void VideoSendStreamImpl::ConfigureProtection() {
+  RTC_DCHECK_RUN_ON(worker_queue_);
   // Enable NACK, FEC or both.
-  const bool enable_protection_nack = config_.rtp.nack.rtp_history_ms > 0;
-  bool enable_protection_fec = config_.rtp.fec.ulpfec_payload_type != -1;
+  const bool enable_protection_nack = config_->rtp.nack.rtp_history_ms > 0;
+  bool enable_protection_fec = config_->rtp.fec.ulpfec_payload_type != -1;
   // Payload types without picture ID cannot determine that a stream is complete
   // without retransmitting FEC, so using FEC + NACK for H.264 (for instance) is
   // a waste of bandwidth since FEC packets still have to be transmitted. Note
   // that this is not the case with FLEXFEC.
   if (enable_protection_nack &&
       !PayloadTypeSupportsSkippingFecPackets(
-          config_.encoder_settings.payload_name)) {
+          config_->encoder_settings.payload_name)) {
     LOG(LS_WARNING) << "Transmitting payload type without picture ID using"
                        "NACK+FEC is a waste of bandwidth since FEC packets "
                        "also have to be retransmitted. Disabling FEC.";
@@ -824,21 +915,21 @@
   // TODO(changbin): Should set RTX for RED mapping in RTP sender in future.
   // Validate payload types. If either RED or FEC payload types are set then
   // both should be. If FEC is enabled then they both have to be set.
-  if (config_.rtp.fec.red_payload_type != -1) {
-    RTC_DCHECK_GE(config_.rtp.fec.red_payload_type, 0);
-    RTC_DCHECK_LE(config_.rtp.fec.red_payload_type, 127);
+  if (config_->rtp.fec.red_payload_type != -1) {
+    RTC_DCHECK_GE(config_->rtp.fec.red_payload_type, 0);
+    RTC_DCHECK_LE(config_->rtp.fec.red_payload_type, 127);
     // TODO(holmer): We should only enable red if ulpfec is also enabled, but
     // but due to an incompatibility issue with previous versions the receiver
     // assumes rtx packets are containing red if it has been configured to
     // receive red. Remove this in a few versions once the incompatibility
     // issue is resolved (M53 timeframe).
-    payload_type_red = static_cast<uint8_t>(config_.rtp.fec.red_payload_type);
+    payload_type_red = static_cast<uint8_t>(config_->rtp.fec.red_payload_type);
   }
-  if (config_.rtp.fec.ulpfec_payload_type != -1) {
-    RTC_DCHECK_GE(config_.rtp.fec.ulpfec_payload_type, 0);
-    RTC_DCHECK_LE(config_.rtp.fec.ulpfec_payload_type, 127);
+  if (config_->rtp.fec.ulpfec_payload_type != -1) {
+    RTC_DCHECK_GE(config_->rtp.fec.ulpfec_payload_type, 0);
+    RTC_DCHECK_LE(config_->rtp.fec.ulpfec_payload_type, 127);
     payload_type_fec =
-        static_cast<uint8_t>(config_.rtp.fec.ulpfec_payload_type);
+        static_cast<uint8_t>(config_->rtp.fec.ulpfec_payload_type);
   }
 
   for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
@@ -857,107 +948,102 @@
                                                      enable_protection_nack);
 }
 
-void VideoSendStream::ConfigureSsrcs() {
+void VideoSendStreamImpl::ConfigureSsrcs() {
+  RTC_DCHECK_RUN_ON(worker_queue_);
   // Configure regular SSRCs.
-  for (size_t i = 0; i < config_.rtp.ssrcs.size(); ++i) {
-    uint32_t ssrc = config_.rtp.ssrcs[i];
+  for (size_t i = 0; i < config_->rtp.ssrcs.size(); ++i) {
+    uint32_t ssrc = config_->rtp.ssrcs[i];
     RtpRtcp* const rtp_rtcp = rtp_rtcp_modules_[i];
     rtp_rtcp->SetSSRC(ssrc);
 
     // Restore RTP state if previous existed.
-    RtpStateMap::iterator it = suspended_ssrcs_.find(ssrc);
+    VideoSendStream::RtpStateMap::iterator it = suspended_ssrcs_.find(ssrc);
     if (it != suspended_ssrcs_.end())
       rtp_rtcp->SetRtpState(it->second);
   }
 
   // Set up RTX if available.
-  if (config_.rtp.rtx.ssrcs.empty())
+  if (config_->rtp.rtx.ssrcs.empty())
     return;
 
   // Configure RTX SSRCs.
-  RTC_DCHECK_EQ(config_.rtp.rtx.ssrcs.size(), config_.rtp.ssrcs.size());
-  for (size_t i = 0; i < config_.rtp.rtx.ssrcs.size(); ++i) {
-    uint32_t ssrc = config_.rtp.rtx.ssrcs[i];
+  RTC_DCHECK_EQ(config_->rtp.rtx.ssrcs.size(), config_->rtp.ssrcs.size());
+  for (size_t i = 0; i < config_->rtp.rtx.ssrcs.size(); ++i) {
+    uint32_t ssrc = config_->rtp.rtx.ssrcs[i];
     RtpRtcp* const rtp_rtcp = rtp_rtcp_modules_[i];
     rtp_rtcp->SetRtxSsrc(ssrc);
-    RtpStateMap::iterator it = suspended_ssrcs_.find(ssrc);
+    VideoSendStream::RtpStateMap::iterator it = suspended_ssrcs_.find(ssrc);
     if (it != suspended_ssrcs_.end())
       rtp_rtcp->SetRtxState(it->second);
   }
 
   // Configure RTX payload types.
-  RTC_DCHECK_GE(config_.rtp.rtx.payload_type, 0);
+  RTC_DCHECK_GE(config_->rtp.rtx.payload_type, 0);
   for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
-    rtp_rtcp->SetRtxSendPayloadType(config_.rtp.rtx.payload_type,
-                                    config_.encoder_settings.payload_type);
+    rtp_rtcp->SetRtxSendPayloadType(config_->rtp.rtx.payload_type,
+                                    config_->encoder_settings.payload_type);
     rtp_rtcp->SetRtxSendStatus(kRtxRetransmitted | kRtxRedundantPayloads);
   }
-  if (config_.rtp.fec.red_payload_type != -1 &&
-      config_.rtp.fec.red_rtx_payload_type != -1) {
+  if (config_->rtp.fec.red_payload_type != -1 &&
+      config_->rtp.fec.red_rtx_payload_type != -1) {
     for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
-      rtp_rtcp->SetRtxSendPayloadType(config_.rtp.fec.red_rtx_payload_type,
-                                      config_.rtp.fec.red_payload_type);
+      rtp_rtcp->SetRtxSendPayloadType(config_->rtp.fec.red_rtx_payload_type,
+                                      config_->rtp.fec.red_payload_type);
     }
   }
 }
 
-std::map<uint32_t, RtpState> VideoSendStream::GetRtpStates() const {
+std::map<uint32_t, RtpState> VideoSendStreamImpl::GetRtpStates() const {
+  RTC_DCHECK_RUN_ON(worker_queue_);
   std::map<uint32_t, RtpState> rtp_states;
-  for (size_t i = 0; i < config_.rtp.ssrcs.size(); ++i) {
-    uint32_t ssrc = config_.rtp.ssrcs[i];
+  for (size_t i = 0; i < config_->rtp.ssrcs.size(); ++i) {
+    uint32_t ssrc = config_->rtp.ssrcs[i];
     RTC_DCHECK_EQ(ssrc, rtp_rtcp_modules_[i]->SSRC());
     rtp_states[ssrc] = rtp_rtcp_modules_[i]->GetRtpState();
   }
 
-  for (size_t i = 0; i < config_.rtp.rtx.ssrcs.size(); ++i) {
-    uint32_t ssrc = config_.rtp.rtx.ssrcs[i];
+  for (size_t i = 0; i < config_->rtp.rtx.ssrcs.size(); ++i) {
+    uint32_t ssrc = config_->rtp.rtx.ssrcs[i];
     rtp_states[ssrc] = rtp_rtcp_modules_[i]->GetRtxState();
   }
 
   return rtp_states;
 }
 
-void VideoSendStream::SignalNetworkState(NetworkState state) {
+void VideoSendStreamImpl::SignalNetworkState(NetworkState state) {
+  RTC_DCHECK_RUN_ON(worker_queue_);
   for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
-    rtp_rtcp->SetRTCPStatus(state == kNetworkUp ? config_.rtp.rtcp_mode
+    rtp_rtcp->SetRTCPStatus(state == kNetworkUp ? config_->rtp.rtcp_mode
                                                 : RtcpMode::kOff);
   }
 }
 
-uint32_t VideoSendStream::OnBitrateUpdated(uint32_t bitrate_bps,
-                                           uint8_t fraction_loss,
-                                           int64_t rtt) {
+uint32_t VideoSendStreamImpl::OnBitrateUpdated(uint32_t bitrate_bps,
+                                               uint8_t fraction_loss,
+                                               int64_t rtt) {
+  RTC_DCHECK_RUN_ON(worker_queue_);
+  RTC_DCHECK(payload_router_.active())
+      << "VideoSendStream::Start has not been called.";
   // Get the encoder target rate. It is the estimated network rate -
   // protection overhead.
-  uint32_t encoder_target_rate_bps =
-      protection_bitrate_calculator_.SetTargetRates(
-          bitrate_bps, stats_proxy_.GetSendFrameRate(), fraction_loss, rtt);
+  encoder_target_rate_bps_ = protection_bitrate_calculator_.SetTargetRates(
+      bitrate_bps, stats_proxy_->GetSendFrameRate(), fraction_loss, rtt);
+  uint32_t protection_bitrate = bitrate_bps - encoder_target_rate_bps_;
 
-  uint32_t protection_bitrate = bitrate_bps - encoder_target_rate_bps;
-  {
-    // Limit the target bitrate to the configured max bitrate.
-    rtc::CritScope lock(&encoder_settings_crit_);
-    encoder_target_rate_bps =
-        std::min(encoder_max_bitrate_bps_, encoder_target_rate_bps);
-    if ((encoder_target_rate_bps_ == 0 && encoder_target_rate_bps > 0) ||
-        (encoder_target_rate_bps_ > 0 && encoder_target_rate_bps == 0)) {
-      LOG(LS_INFO)
-          << "OnBitrateUpdated: Encoder state changed, target bitrate "
-          << encoder_target_rate_bps << " bps.";
-    }
-    encoder_target_rate_bps_ = encoder_target_rate_bps;
-  }
-  vie_encoder_.OnBitrateUpdated(encoder_target_rate_bps, fraction_loss, rtt);
-  stats_proxy_.OnSetEncoderTargetRate(encoder_target_rate_bps);
-
+  encoder_target_rate_bps_ =
+      std::min(encoder_max_bitrate_bps_, encoder_target_rate_bps_);
+  vie_encoder_->OnBitrateUpdated(encoder_target_rate_bps_, fraction_loss, rtt);
+  stats_proxy_->OnSetEncoderTargetRate(encoder_target_rate_bps_);
   return protection_bitrate;
 }
 
-int VideoSendStream::ProtectionRequest(const FecProtectionParams* delta_params,
-                                       const FecProtectionParams* key_params,
-                                       uint32_t* sent_video_rate_bps,
-                                       uint32_t* sent_nack_rate_bps,
-                                       uint32_t* sent_fec_rate_bps) {
+int VideoSendStreamImpl::ProtectionRequest(
+    const FecProtectionParams* delta_params,
+    const FecProtectionParams* key_params,
+    uint32_t* sent_video_rate_bps,
+    uint32_t* sent_nack_rate_bps,
+    uint32_t* sent_fec_rate_bps) {
+  RTC_DCHECK_RUN_ON(worker_queue_);
   *sent_video_rate_bps = 0;
   *sent_nack_rate_bps = 0;
   *sent_fec_rate_bps = 0;
diff --git a/webrtc/video/video_send_stream.h b/webrtc/video/video_send_stream.h
index c67dc70..9322642 100644
--- a/webrtc/video/video_send_stream.h
+++ b/webrtc/video/video_send_stream.h
@@ -17,6 +17,8 @@
 
 #include "webrtc/call/bitrate_allocator.h"
 #include "webrtc/base/criticalsection.h"
+#include "webrtc/base/event.h"
+#include "webrtc/base/task_queue.h"
 #include "webrtc/call.h"
 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
 #include "webrtc/modules/video_coding/protection_bitrate_calculator.h"
@@ -24,7 +26,6 @@
 #include "webrtc/video/payload_router.h"
 #include "webrtc/video/send_delay_stats.h"
 #include "webrtc/video/send_statistics_proxy.h"
-#include "webrtc/video/video_capture_input.h"
 #include "webrtc/video/vie_encoder.h"
 #include "webrtc/video_receive_stream.h"
 #include "webrtc/video_send_stream.h"
@@ -37,32 +38,29 @@
 class IvfFileWriter;
 class ProcessThread;
 class RtpRtcp;
-class ViEEncoder;
 class VieRemb;
 class RtcEventLog;
 
-namespace vcm {
-class VideoSender;
-}  // namespace vcm
-
 namespace internal {
 
-class VideoSendStream : public webrtc::VideoSendStream,
-                        public webrtc::CpuOveruseObserver,
-                        public webrtc::BitrateAllocatorObserver,
-                        public webrtc::VCMProtectionCallback,
-                        public EncodedImageCallback {
+class VideoSendStreamImpl;
+
+// VideoSendStream implements webrtc::VideoSendStream.
+// Internally, it delegates all public methods to VideoSendStreamImpl and / or
+// VieEncoder. VideoSendStreamInternal is created and deleted on |worker_queue|.
+class VideoSendStream : public webrtc::VideoSendStream {
  public:
   VideoSendStream(int num_cpu_cores,
                   ProcessThread* module_process_thread,
+                  rtc::TaskQueue* worker_queue,
                   CallStats* call_stats,
                   CongestionController* congestion_controller,
                   BitrateAllocator* bitrate_allocator,
                   SendDelayStats* send_delay_stats,
                   VieRemb* remb,
                   RtcEventLog* event_log,
-                  const VideoSendStream::Config& config,
-                  const VideoEncoderConfig& encoder_config,
+                  VideoSendStream::Config config,
+                  VideoEncoderConfig encoder_config,
                   const std::map<uint32_t, RtpState>& suspended_ssrcs);
 
   ~VideoSendStream() override;
@@ -74,101 +72,26 @@
   void Start() override;
   void Stop() override;
   VideoCaptureInput* Input() override;
-  void ReconfigureVideoEncoder(const VideoEncoderConfig& config) override;
+  void ReconfigureVideoEncoder(VideoEncoderConfig) override;
   Stats GetStats() override;
 
-  // webrtc::CpuOveruseObserver implementation.
-  void OveruseDetected() override;
-  void NormalUsage() override;
-
   typedef std::map<uint32_t, RtpState> RtpStateMap;
-  RtpStateMap GetRtpStates() const;
-
-  int GetPaddingNeededBps() const;
-
-  // Implements BitrateAllocatorObserver.
-  uint32_t OnBitrateUpdated(uint32_t bitrate_bps,
-                            uint8_t fraction_loss,
-                            int64_t rtt) override;
-
- protected:
-  // Implements webrtc::VCMProtectionCallback.
-  int ProtectionRequest(const FecProtectionParams* delta_params,
-                        const FecProtectionParams* key_params,
-                        uint32_t* sent_video_rate_bps,
-                        uint32_t* sent_nack_rate_bps,
-                        uint32_t* sent_fec_rate_bps) override;
+  RtpStateMap StopPermanentlyAndGetRtpStates();
 
  private:
-  struct EncoderSettings {
-    VideoCodec video_codec;
-    VideoEncoderConfig config;
-  };
+  class ConstructionTask;
+  class DestructAndGetRtpStateTask;
 
-  // Implements EncodedImageCallback. The implementation routes encoded frames
-  // to the |payload_router_| and |config.pre_encode_callback| if set.
-  // Called on an arbitrary encoder callback thread.
-  EncodedImageCallback::Result OnEncodedImage(
-      const EncodedImage& encoded_image,
-      const CodecSpecificInfo* codec_specific_info,
-      const RTPFragmentationHeader* fragmentation) override;
-
-  static bool EncoderThreadFunction(void* obj);
-  void EncoderProcess();
-
-  void ConfigureProtection();
-  void ConfigureSsrcs();
+  rtc::ThreadChecker thread_checker_;
+  rtc::TaskQueue* const worker_queue_;
+  rtc::Event thread_sync_event_;
 
   SendStatisticsProxy stats_proxy_;
   const VideoSendStream::Config config_;
-  std::map<uint32_t, RtpState> suspended_ssrcs_;
-
-  ProcessThread* const module_process_thread_;
-  CallStats* const call_stats_;
-  CongestionController* const congestion_controller_;
-  BitrateAllocator* const bitrate_allocator_;
-  VieRemb* const remb_;
-
-  static const bool kEnableFrameRecording = false;
-  static const int kMaxLayers = 3;
-  std::unique_ptr<IvfFileWriter> file_writers_[kMaxLayers];
-
-  rtc::PlatformThread encoder_thread_;
-  rtc::Event encoder_wakeup_event_;
-  volatile int stop_encoder_thread_;
-  rtc::CriticalSection encoder_settings_crit_;
-  std::unique_ptr<EncoderSettings> pending_encoder_settings_
-      GUARDED_BY(encoder_settings_crit_);
-  uint32_t encoder_max_bitrate_bps_ GUARDED_BY(encoder_settings_crit_);
-  uint32_t encoder_target_rate_bps_ GUARDED_BY(encoder_settings_crit_);
-
-  enum class State {
-    kStopped,  // VideoSendStream::Start has not yet been called.
-    kStarted,  // VideoSendStream::Start has been called.
-    // VideoSendStream::Start has been called but the encoder have timed out.
-    kEncoderTimedOut,
-  };
-  rtc::Optional<State> pending_state_change_ GUARDED_BY(encoder_settings_crit_);
-
-  // Only used on the encoder thread.
-  rtc::ThreadChecker encoder_thread_checker_;
-  State state_ ACCESS_ON(&encoder_thread_checker_);
-  std::unique_ptr<EncoderSettings> current_encoder_settings_
-      ACCESS_ON(&encoder_thread_checker_);
-
-  OveruseFrameDetector overuse_detector_;
-  ViEEncoder vie_encoder_;
-  EncoderStateFeedback encoder_feedback_;
-  ProtectionBitrateCalculator protection_bitrate_calculator_;
-
-  vcm::VideoSender* const video_sender_;
-
-  const std::unique_ptr<RtcpBandwidthObserver> bandwidth_observer_;
-  // RtpRtcp modules, declared here as they use other members on construction.
-  const std::vector<RtpRtcp*> rtp_rtcp_modules_;
-  PayloadRouter payload_router_;
-  VideoCaptureInput input_;
+  std::unique_ptr<VideoSendStreamImpl> send_stream_;
+  std::unique_ptr<ViEEncoder> vie_encoder_;
 };
+
 }  // namespace internal
 }  // namespace webrtc
 
diff --git a/webrtc/video/video_send_stream_tests.cc b/webrtc/video/video_send_stream_tests.cc
index 623cd34..654784c 100644
--- a/webrtc/video/video_send_stream_tests.cc
+++ b/webrtc/video/video_send_stream_tests.cc
@@ -866,7 +866,8 @@
       return SEND_PACKET;
     }
 
-    // This method implements the rtc::VideoSinkInterface
+    // This method implements the rtc::VideoSinkInterface. This is called when
+    // a frame is provided to the VideoSendStream.
     void OnFrame(const VideoFrame& video_frame) override {
       rtc::CritScope lock(&crit_);
       if (test_state_ == kDuringSuspend &&
@@ -1205,7 +1206,7 @@
       encoder_config->min_transmit_bitrate_bps = kMinTransmitBitrateBps;
       encoder_config->content_type = VideoEncoderConfig::ContentType::kScreen;
     }
-    encoder_config_ = *encoder_config;
+    encoder_config_ = encoder_config->Copy();
   }
 
   void OnCallsCreated(Call* sender_call, Call* receiver_call) override {
@@ -1229,7 +1230,7 @@
       packets_sent_ = 0;
       encoder_config_.min_transmit_bitrate_bps = kMinTransmitBitrateBps;
       encoder_config_.content_type = VideoEncoderConfig::ContentType::kScreen;
-      send_stream_->ReconfigureVideoEncoder(encoder_config_);
+      send_stream_->ReconfigureVideoEncoder(encoder_config_.Copy());
       running_without_padding_ = false;
       return SEND_PACKET;
     }
@@ -1324,7 +1325,7 @@
 
   video_encoder_config_.streams[0].max_bitrate_bps =
       2 * bitrate_config.start_bitrate_bps;
-  video_send_stream_->ReconfigureVideoEncoder(video_encoder_config_);
+  video_send_stream_->ReconfigureVideoEncoder(video_encoder_config_.Copy());
 
   // New bitrate should be reconfigured above the previous max. As there's no
   // network connection this shouldn't be flaky, as no bitrate should've been
@@ -1589,13 +1590,13 @@
         std::vector<VideoReceiveStream::Config>* receive_configs,
         VideoEncoderConfig* encoder_config) override {
       send_config->encoder_settings.encoder = this;
-      encoder_config_ = *encoder_config;
+      encoder_config_ = encoder_config->Copy();
     }
 
     void PerformTest() override {
       EXPECT_TRUE(Wait()) << "Timed out while waiting for Encode.";
       EXPECT_EQ(0u, num_releases());
-      stream_->ReconfigureVideoEncoder(encoder_config_);
+      stream_->ReconfigureVideoEncoder(std::move(encoder_config_));
       EXPECT_EQ(0u, num_releases());
       stream_->Stop();
       // Encoder should not be released before destroying the VideoSendStream.
@@ -1638,7 +1639,7 @@
         std::vector<VideoReceiveStream::Config>* receive_configs,
         VideoEncoderConfig* encoder_config) override {
       send_config->encoder_settings.encoder = this;
-      encoder_config_ = *encoder_config;
+      encoder_config_ = encoder_config->Copy();
     }
 
     void OnVideoStreamsCreated(
@@ -1667,7 +1668,7 @@
       EXPECT_EQ(1u, num_initializations_) << "VideoEncoder not initialized.";
 
       encoder_config_.content_type = VideoEncoderConfig::ContentType::kScreen;
-      stream_->ReconfigureVideoEncoder(encoder_config_);
+      stream_->ReconfigureVideoEncoder(std::move(encoder_config_));
       EXPECT_TRUE(init_encode_event_.Wait(kDefaultTimeoutMs));
       EXPECT_EQ(2u, num_initializations_)
           << "ReconfigureVideoEncoder did not reinitialize the encoder with "
@@ -1714,7 +1715,7 @@
     }
 
     encoder_config->encoder_specific_settings = &encoder_settings_;
-    encoder_config_ = *encoder_config;
+    encoder_config_ = encoder_config->Copy();
   }
 
   void OnVideoStreamsCreated(
@@ -1741,7 +1742,7 @@
     ASSERT_EQ(1u, num_initializations_) << "VideoEncoder not initialized.";
 
     encoder_settings_.frameDroppingOn = true;
-    stream_->ReconfigureVideoEncoder(encoder_config_);
+    stream_->ReconfigureVideoEncoder(std::move(encoder_config_));
     ASSERT_TRUE(
         init_encode_event_.Wait(VideoSendStreamTest::kDefaultTimeoutMs));
     EXPECT_EQ(2u, num_initializations_)
@@ -1938,6 +1939,8 @@
         : SendTest(kDefaultTimeoutMs),
           FakeEncoder(Clock::GetRealTimeClock()),
           init_encode_event_(false, false),
+          bitrate_changed_event_(false, false),
+          target_bitrate_(0),
           num_initializations_(0),
           call_(nullptr),
           send_stream_(nullptr) {}
@@ -1946,6 +1949,8 @@
     int32_t InitEncode(const VideoCodec* codecSettings,
                        int32_t numberOfCores,
                        size_t maxPayloadSize) override {
+      EXPECT_GE(codecSettings->startBitrate, codecSettings->minBitrate);
+      EXPECT_LE(codecSettings->startBitrate, codecSettings->maxBitrate);
       if (num_initializations_ == 0) {
         EXPECT_EQ(static_cast<unsigned int>(kMinBitrateKbps),
                   codecSettings->minBitrate);
@@ -1964,8 +1969,9 @@
       } else if (num_initializations_ == 2) {
         EXPECT_EQ(static_cast<unsigned int>(kIncreasedMaxBitrateKbps),
                   codecSettings->maxBitrate);
-        EXPECT_EQ(static_cast<unsigned int>(kIncreasedStartBitrateKbps),
-                  codecSettings->startBitrate);
+        // The start bitrate will be whatever the rate BitRateController
+        // has currently configured but in the span of the set max and min
+        // bitrate.
       }
       ++num_initializations_;
       init_encode_event_.Set();
@@ -1973,6 +1979,23 @@
                                      maxPayloadSize);
     }
 
+    int32_t SetRates(uint32_t newBitRate, uint32_t frameRate) override {
+      {
+        rtc::CritScope lock(&crit_);
+        target_bitrate_ = newBitRate;
+      }
+      bitrate_changed_event_.Set();
+      return FakeEncoder::SetRates(newBitRate, frameRate);
+    }
+
+    void WaitForSetRates(uint32_t expected_bitrate) {
+      EXPECT_TRUE(
+          bitrate_changed_event_.Wait(VideoSendStreamTest::kDefaultTimeoutMs))
+          << "Timed out while waiting encoder rate to be set.";
+      rtc::CritScope lock(&crit_);
+      EXPECT_EQ(expected_bitrate, target_bitrate_);
+    }
+
     Call::Config GetSenderCallConfig() override {
       Call::Config config;
       config.bitrate_config.min_bitrate_bps = kMinBitrateKbps * 1000;
@@ -1990,7 +2013,7 @@
       // capped.
       encoder_config->streams.front().min_bitrate_bps = kMinBitrateKbps * 1000;
       encoder_config->streams.front().max_bitrate_bps = kMaxBitrateKbps * 1000;
-      encoder_config_ = *encoder_config;
+      encoder_config_ = encoder_config->Copy();
     }
 
     void OnCallsCreated(Call* sender_call, Call* receiver_call) override {
@@ -2006,32 +2029,42 @@
     void PerformTest() override {
       ASSERT_TRUE(
           init_encode_event_.Wait(VideoSendStreamTest::kDefaultTimeoutMs))
-          << "Timed out while waiting encoder to be configured.";
+          << "Timed out while waiting for encoder to be configured.";
+      WaitForSetRates(kStartBitrateKbps);
       Call::Config::BitrateConfig bitrate_config;
       bitrate_config.start_bitrate_bps = kIncreasedStartBitrateKbps * 1000;
       bitrate_config.max_bitrate_bps = kIncreasedMaxBitrateKbps * 1000;
       call_->SetBitrateConfig(bitrate_config);
-      EXPECT_TRUE(Wait())
-          << "Timed out while waiting encoder to be configured.";
+      // Encoder rate is capped by EncoderConfig max_bitrate_bps.
+      WaitForSetRates(kMaxBitrateKbps);
+
       encoder_config_.streams[0].min_bitrate_bps = 0;
       encoder_config_.streams[0].max_bitrate_bps = kLowerMaxBitrateKbps * 1000;
-      send_stream_->ReconfigureVideoEncoder(encoder_config_);
+      send_stream_->ReconfigureVideoEncoder(encoder_config_.Copy());
       ASSERT_TRUE(
           init_encode_event_.Wait(VideoSendStreamTest::kDefaultTimeoutMs));
       EXPECT_EQ(2, num_initializations_)
           << "Encoder should have been reconfigured with the new value.";
+      WaitForSetRates(kLowerMaxBitrateKbps);
+
       encoder_config_.streams[0].target_bitrate_bps =
           encoder_config_.streams[0].min_bitrate_bps;
       encoder_config_.streams[0].max_bitrate_bps =
           kIncreasedMaxBitrateKbps * 1000;
-      send_stream_->ReconfigureVideoEncoder(encoder_config_);
+      send_stream_->ReconfigureVideoEncoder(encoder_config_.Copy());
       ASSERT_TRUE(
           init_encode_event_.Wait(VideoSendStreamTest::kDefaultTimeoutMs));
       EXPECT_EQ(3, num_initializations_)
           << "Encoder should have been reconfigured with the new value.";
+      // Expected target bitrate is the start bitrate set in the call to
+      // call_->SetBitrateConfig.
+      WaitForSetRates(kIncreasedStartBitrateKbps);
     }
 
     rtc::Event init_encode_event_;
+    rtc::Event bitrate_changed_event_;
+    rtc::CriticalSection crit_;
+    uint32_t target_bitrate_ GUARDED_BY(&crit_);
     int num_initializations_;
     webrtc::Call* call_;
     webrtc::VideoSendStream* send_stream_;
@@ -2153,7 +2186,7 @@
     EXPECT_EQ(1u, encoder_config->streams.size());
     encoder_config->streams[0].temporal_layer_thresholds_bps.resize(
         vp9_settings_.numberOfTemporalLayers - 1);
-    encoder_config_ = *encoder_config;
+    encoder_config_ = encoder_config->Copy();
   }
 
   void PerformTest() override {
diff --git a/webrtc/video/vie_encoder.cc b/webrtc/video/vie_encoder.cc
index 1afbed0..f654543 100644
--- a/webrtc/video/vie_encoder.cc
+++ b/webrtc/video/vie_encoder.cc
@@ -27,64 +27,315 @@
 
 namespace webrtc {
 
+namespace {
+
+VideoCodecType PayloadNameToCodecType(const std::string& payload_name) {
+  if (payload_name == "VP8")
+    return kVideoCodecVP8;
+  if (payload_name == "VP9")
+    return kVideoCodecVP9;
+  if (payload_name == "H264")
+    return kVideoCodecH264;
+  return kVideoCodecGeneric;
+}
+
+VideoCodec VideoEncoderConfigToVideoCodec(const VideoEncoderConfig& config,
+                                          const std::string& payload_name,
+                                          int payload_type) {
+  const std::vector<VideoStream>& streams = config.streams;
+  static const int kEncoderMinBitrateKbps = 30;
+  RTC_DCHECK(!streams.empty());
+  RTC_DCHECK_GE(config.min_transmit_bitrate_bps, 0);
+
+  VideoCodec video_codec;
+  memset(&video_codec, 0, sizeof(video_codec));
+  video_codec.codecType = PayloadNameToCodecType(payload_name);
+
+  switch (config.content_type) {
+    case VideoEncoderConfig::ContentType::kRealtimeVideo:
+      video_codec.mode = kRealtimeVideo;
+      break;
+    case VideoEncoderConfig::ContentType::kScreen:
+      video_codec.mode = kScreensharing;
+      if (config.streams.size() == 1 &&
+          config.streams[0].temporal_layer_thresholds_bps.size() == 1) {
+        video_codec.targetBitrate =
+            config.streams[0].temporal_layer_thresholds_bps[0] / 1000;
+      }
+      break;
+  }
+
+  switch (video_codec.codecType) {
+    case kVideoCodecVP8: {
+      if (config.encoder_specific_settings) {
+        video_codec.codecSpecific.VP8 = *reinterpret_cast<const VideoCodecVP8*>(
+            config.encoder_specific_settings);
+      } else {
+        video_codec.codecSpecific.VP8 = VideoEncoder::GetDefaultVp8Settings();
+      }
+      video_codec.codecSpecific.VP8.numberOfTemporalLayers =
+          static_cast<unsigned char>(
+              streams.back().temporal_layer_thresholds_bps.size() + 1);
+      break;
+    }
+    case kVideoCodecVP9: {
+      if (config.encoder_specific_settings) {
+        video_codec.codecSpecific.VP9 = *reinterpret_cast<const VideoCodecVP9*>(
+            config.encoder_specific_settings);
+        if (video_codec.mode == kScreensharing) {
+          video_codec.codecSpecific.VP9.flexibleMode = true;
+          // For now VP9 screensharing use 1 temporal and 2 spatial layers.
+          RTC_DCHECK_EQ(video_codec.codecSpecific.VP9.numberOfTemporalLayers,
+                        1);
+          RTC_DCHECK_EQ(video_codec.codecSpecific.VP9.numberOfSpatialLayers, 2);
+        }
+      } else {
+        video_codec.codecSpecific.VP9 = VideoEncoder::GetDefaultVp9Settings();
+      }
+      video_codec.codecSpecific.VP9.numberOfTemporalLayers =
+          static_cast<unsigned char>(
+              streams.back().temporal_layer_thresholds_bps.size() + 1);
+      break;
+    }
+    case kVideoCodecH264: {
+      if (config.encoder_specific_settings) {
+        video_codec.codecSpecific.H264 =
+            *reinterpret_cast<const VideoCodecH264*>(
+                config.encoder_specific_settings);
+      } else {
+        video_codec.codecSpecific.H264 = VideoEncoder::GetDefaultH264Settings();
+      }
+      break;
+    }
+    default:
+      // TODO(pbos): Support encoder_settings codec-agnostically.
+      RTC_DCHECK(!config.encoder_specific_settings)
+          << "Encoder-specific settings for codec type not wired up.";
+      break;
+  }
+
+  strncpy(video_codec.plName, payload_name.c_str(), kPayloadNameSize - 1);
+  video_codec.plName[kPayloadNameSize - 1] = '\0';
+  video_codec.plType = payload_type;
+  video_codec.numberOfSimulcastStreams =
+      static_cast<unsigned char>(streams.size());
+  video_codec.minBitrate = streams[0].min_bitrate_bps / 1000;
+  if (video_codec.minBitrate < kEncoderMinBitrateKbps)
+    video_codec.minBitrate = kEncoderMinBitrateKbps;
+  RTC_DCHECK_LE(streams.size(), static_cast<size_t>(kMaxSimulcastStreams));
+  if (video_codec.codecType == kVideoCodecVP9) {
+    // If the vector is empty, bitrates will be configured automatically.
+    RTC_DCHECK(config.spatial_layers.empty() ||
+               config.spatial_layers.size() ==
+                   video_codec.codecSpecific.VP9.numberOfSpatialLayers);
+    RTC_DCHECK_LE(video_codec.codecSpecific.VP9.numberOfSpatialLayers,
+                  kMaxSimulcastStreams);
+    for (size_t i = 0; i < config.spatial_layers.size(); ++i)
+      video_codec.spatialLayers[i] = config.spatial_layers[i];
+  }
+  for (size_t i = 0; i < streams.size(); ++i) {
+    SimulcastStream* sim_stream = &video_codec.simulcastStream[i];
+    RTC_DCHECK_GT(streams[i].width, 0u);
+    RTC_DCHECK_GT(streams[i].height, 0u);
+    RTC_DCHECK_GT(streams[i].max_framerate, 0);
+    // Different framerates not supported per stream at the moment.
+    RTC_DCHECK_EQ(streams[i].max_framerate, streams[0].max_framerate);
+    RTC_DCHECK_GE(streams[i].min_bitrate_bps, 0);
+    RTC_DCHECK_GE(streams[i].target_bitrate_bps, streams[i].min_bitrate_bps);
+    RTC_DCHECK_GE(streams[i].max_bitrate_bps, streams[i].target_bitrate_bps);
+    RTC_DCHECK_GE(streams[i].max_qp, 0);
+
+    sim_stream->width = static_cast<uint16_t>(streams[i].width);
+    sim_stream->height = static_cast<uint16_t>(streams[i].height);
+    sim_stream->minBitrate = streams[i].min_bitrate_bps / 1000;
+    sim_stream->targetBitrate = streams[i].target_bitrate_bps / 1000;
+    sim_stream->maxBitrate = streams[i].max_bitrate_bps / 1000;
+    sim_stream->qpMax = streams[i].max_qp;
+    sim_stream->numberOfTemporalLayers = static_cast<unsigned char>(
+        streams[i].temporal_layer_thresholds_bps.size() + 1);
+
+    video_codec.width =
+        std::max(video_codec.width, static_cast<uint16_t>(streams[i].width));
+    video_codec.height =
+        std::max(video_codec.height, static_cast<uint16_t>(streams[i].height));
+    video_codec.minBitrate =
+        std::min(static_cast<uint16_t>(video_codec.minBitrate),
+                 static_cast<uint16_t>(streams[i].min_bitrate_bps / 1000));
+    video_codec.maxBitrate += streams[i].max_bitrate_bps / 1000;
+    video_codec.qpMax = std::max(video_codec.qpMax,
+                                 static_cast<unsigned int>(streams[i].max_qp));
+  }
+
+  if (video_codec.maxBitrate == 0) {
+    // Unset max bitrate -> cap to one bit per pixel.
+    video_codec.maxBitrate =
+        (video_codec.width * video_codec.height * video_codec.maxFramerate) /
+        1000;
+  }
+  if (video_codec.maxBitrate < kEncoderMinBitrateKbps)
+    video_codec.maxBitrate = kEncoderMinBitrateKbps;
+
+  RTC_DCHECK_GT(streams[0].max_framerate, 0);
+  video_codec.maxFramerate = streams[0].max_framerate;
+  video_codec.expect_encode_from_texture = config.expect_encode_from_texture;
+
+  return video_codec;
+}
+
+// TODO(pbos): Lower these thresholds (to closer to 100%) when we handle
+// pipelining encoders better (multiple input frames before something comes
+// out). This should effectively turn off CPU adaptations for systems that
+// remotely cope with the load right now.
+CpuOveruseOptions GetCpuOveruseOptions(bool full_overuse_time) {
+  CpuOveruseOptions options;
+  if (full_overuse_time) {
+    options.low_encode_usage_threshold_percent = 150;
+    options.high_encode_usage_threshold_percent = 200;
+  }
+  return options;
+}
+
+}  //  namespace
+
+class ViEEncoder::EncodeTask : public rtc::QueuedTask {
+ public:
+  EncodeTask(const VideoFrame& frame, ViEEncoder* vie_encoder)
+      : vie_encoder_(vie_encoder) {
+    frame_.ShallowCopy(frame);
+    ++vie_encoder_->posted_frames_waiting_for_encode_;
+  }
+
+ private:
+  bool Run() override {
+    RTC_DCHECK_GT(vie_encoder_->posted_frames_waiting_for_encode_.Value(), 0);
+    if (--vie_encoder_->posted_frames_waiting_for_encode_ == 0) {
+      vie_encoder_->EncodeVideoFrame(frame_);
+    } else {
+      // There is a newer frame in flight. Do not encode this frame.
+      LOG(LS_VERBOSE)
+          << "Incoming frame dropped due to that the encoder is blocked.";
+    }
+    return true;
+  }
+  VideoFrame frame_;
+  ViEEncoder* vie_encoder_;
+};
+
 ViEEncoder::ViEEncoder(uint32_t number_of_cores,
-                       ProcessThread* module_process_thread,
                        SendStatisticsProxy* stats_proxy,
-                       OveruseFrameDetector* overuse_detector,
-                       EncodedImageCallback* sink)
-    : number_of_cores_(number_of_cores),
-      sink_(sink),
+                       const VideoSendStream::Config::EncoderSettings& settings,
+                       rtc::VideoSinkInterface<VideoFrame>* pre_encode_callback,
+                       LoadObserver* overuse_callback,
+                       EncodedFrameObserver* encoder_timing)
+    : shutdown_event_(true /* manual_reset */, false),
+      number_of_cores_(number_of_cores),
+      settings_(settings),
       vp_(VideoProcessing::Create()),
       video_sender_(Clock::GetRealTimeClock(), this, this),
+      overuse_detector_(Clock::GetRealTimeClock(),
+                        GetCpuOveruseOptions(settings.full_overuse_time),
+                        this,
+                        encoder_timing,
+                        stats_proxy),
+      load_observer_(overuse_callback),
       stats_proxy_(stats_proxy),
-      overuse_detector_(overuse_detector),
-      time_of_last_frame_activity_ms_(std::numeric_limits<int64_t>::max()),
+      pre_encode_callback_(pre_encode_callback),
+      module_process_thread_(nullptr),
       encoder_config_(),
+      encoder_start_bitrate_bps_(0),
       last_observed_bitrate_bps_(0),
       encoder_paused_and_dropped_frame_(false),
-      module_process_thread_(module_process_thread),
       has_received_sli_(false),
       picture_id_sli_(0),
       has_received_rpsi_(false),
       picture_id_rpsi_(0),
-      video_suspended_(false) {
-  module_process_thread_->RegisterModule(&video_sender_);
-  vp_->EnableTemporalDecimation(true);
-}
+      clock_(Clock::GetRealTimeClock()),
+      last_captured_timestamp_(0),
+      delta_ntp_internal_ms_(clock_->CurrentNtpInMilliseconds() -
+                             clock_->TimeInMilliseconds()),
+      encoder_queue_("EncoderQueue") {
+  vp_->EnableTemporalDecimation(false);
 
-vcm::VideoSender* ViEEncoder::video_sender() {
-  return &video_sender_;
+  encoder_queue_.PostTask([this] {
+    RTC_DCHECK_RUN_ON(&encoder_queue_);
+    video_sender_.RegisterExternalEncoder(
+        settings_.encoder, settings_.payload_type, settings_.internal_source);
+  });
 }
 
 ViEEncoder::~ViEEncoder() {
+  RTC_DCHECK(shutdown_event_.Wait(0))
+      << "Must call ::Stop() before destruction.";
+}
+
+void ViEEncoder::Stop() {
+  if (!encoder_queue_.IsCurrent()) {
+    encoder_queue_.PostTask([this] { Stop(); });
+    shutdown_event_.Wait(rtc::Event::kForever);
+    return;
+  }
+  RTC_DCHECK_RUN_ON(&encoder_queue_);
+  video_sender_.RegisterExternalEncoder(nullptr, settings_.payload_type, false);
+  shutdown_event_.Set();
+}
+
+void ViEEncoder::RegisterProcessThread(ProcessThread* module_process_thread) {
+  RTC_DCHECK(!module_process_thread_);
+  module_process_thread_ = module_process_thread;
+  module_process_thread_->RegisterModule(&overuse_detector_);
+  module_process_thread_->RegisterModule(&video_sender_);
+  module_process_thread_checker_.DetachFromThread();
+}
+
+void ViEEncoder::DeRegisterProcessThread() {
+  module_process_thread_->DeRegisterModule(&overuse_detector_);
   module_process_thread_->DeRegisterModule(&video_sender_);
 }
 
-int32_t ViEEncoder::RegisterExternalEncoder(webrtc::VideoEncoder* encoder,
-                                            uint8_t pl_type,
-                                            bool internal_source) {
-  video_sender_.RegisterExternalEncoder(encoder, pl_type, internal_source);
-  return 0;
+void ViEEncoder::SetSink(EncodedImageCallback* sink) {
+  encoder_queue_.PostTask([this, sink] {
+    RTC_DCHECK_RUN_ON(&encoder_queue_);
+    sink_ = sink;
+  });
 }
 
-int32_t ViEEncoder::DeRegisterExternalEncoder(uint8_t pl_type) {
-  video_sender_.RegisterExternalEncoder(nullptr, pl_type, false);
-  return 0;
+void ViEEncoder::SetStartBitrate(int start_bitrate_bps) {
+  encoder_queue_.PostTask([this, start_bitrate_bps] {
+    RTC_DCHECK_RUN_ON(&encoder_queue_);
+    encoder_start_bitrate_bps_ = start_bitrate_bps;
+  });
 }
 
-void ViEEncoder::SetEncoder(const webrtc::VideoCodec& video_codec,
-                            size_t max_data_payload_length) {
+void ViEEncoder::ConfigureEncoder(const VideoEncoderConfig& config,
+                                  size_t max_data_payload_length) {
+  VideoCodec video_codec = VideoEncoderConfigToVideoCodec(
+      config, settings_.payload_name, settings_.payload_type);
+  encoder_queue_.PostTask([this, video_codec, max_data_payload_length] {
+    ConfigureEncoderInternal(video_codec, max_data_payload_length);
+  });
+  return;
+}
+
+void ViEEncoder::ConfigureEncoderInternal(const VideoCodec& video_codec,
+                                          size_t max_data_payload_length) {
+  RTC_DCHECK_RUN_ON(&encoder_queue_);
+  RTC_DCHECK_GE(encoder_start_bitrate_bps_, 0);
+  RTC_DCHECK(sink_);
+
   // Setting target width and height for VPM.
   RTC_CHECK_EQ(VPM_OK,
                vp_->SetTargetResolution(video_codec.width, video_codec.height,
                                         video_codec.maxFramerate));
-  {
-    rtc::CritScope lock(&data_cs_);
-    encoder_config_ = video_codec;
-  }
+
+  encoder_config_ = video_codec;
+  encoder_config_.startBitrate = encoder_start_bitrate_bps_ / 1000;
+  encoder_config_.startBitrate =
+      std::max(encoder_config_.startBitrate, video_codec.minBitrate);
+  encoder_config_.startBitrate =
+      std::min(encoder_config_.startBitrate, video_codec.maxBitrate);
 
   bool success = video_sender_.RegisterSendCodec(
-                     &video_codec, number_of_cores_,
+                     &encoder_config_, number_of_cores_,
                      static_cast<uint32_t>(max_data_payload_length)) == VCM_OK;
 
   if (!success) {
@@ -110,15 +361,58 @@
   }
 }
 
+void ViEEncoder::IncomingCapturedFrame(const VideoFrame& video_frame) {
+  RTC_DCHECK_RUNS_SERIALIZED(&incoming_frame_race_checker_);
+  stats_proxy_->OnIncomingFrame(video_frame.width(), video_frame.height());
+
+  VideoFrame incoming_frame = video_frame;
+
+  // Local time in webrtc time base.
+  int64_t current_time = clock_->TimeInMilliseconds();
+  incoming_frame.set_render_time_ms(current_time);
+
+  // Capture time may come from clock with an offset and drift from clock_.
+  int64_t capture_ntp_time_ms;
+  if (video_frame.ntp_time_ms() != 0) {
+    capture_ntp_time_ms = video_frame.ntp_time_ms();
+  } else if (video_frame.render_time_ms() != 0) {
+    capture_ntp_time_ms = video_frame.render_time_ms() + delta_ntp_internal_ms_;
+  } else {
+    capture_ntp_time_ms = current_time + delta_ntp_internal_ms_;
+  }
+  incoming_frame.set_ntp_time_ms(capture_ntp_time_ms);
+
+  // Convert NTP time, in ms, to RTP timestamp.
+  const int kMsToRtpTimestamp = 90;
+  incoming_frame.set_timestamp(
+      kMsToRtpTimestamp * static_cast<uint32_t>(incoming_frame.ntp_time_ms()));
+
+  if (incoming_frame.ntp_time_ms() <= last_captured_timestamp_) {
+    // We don't allow the same capture time for two frames, drop this one.
+    LOG(LS_WARNING) << "Same/old NTP timestamp ("
+                    << incoming_frame.ntp_time_ms()
+                    << " <= " << last_captured_timestamp_
+                    << ") for incoming frame. Dropping.";
+    return;
+  }
+
+  last_captured_timestamp_ = incoming_frame.ntp_time_ms();
+  overuse_detector_.FrameCaptured(incoming_frame);
+  encoder_queue_.PostTask(
+      std::unique_ptr<rtc::QueuedTask>(new EncodeTask(incoming_frame, this)));
+}
+
 bool ViEEncoder::EncoderPaused() const {
+  RTC_DCHECK_RUN_ON(&encoder_queue_);
   // Pause video if paused by caller or as long as the network is down or the
   // pacer queue has grown too large in buffered mode.
   // If the pacer queue has grown too large or the network is down,
   // last_observed_bitrate_bps_ will be 0.
-  return video_suspended_ || last_observed_bitrate_bps_ == 0;
+  return last_observed_bitrate_bps_ == 0;
 }
 
 void ViEEncoder::TraceFrameDropStart() {
+  RTC_DCHECK_RUN_ON(&encoder_queue_);
   // Start trace event only on the first frame after encoder is paused.
   if (!encoder_paused_and_dropped_frame_) {
     TRACE_EVENT_ASYNC_BEGIN0("webrtc", "EncoderPaused", this);
@@ -128,6 +422,7 @@
 }
 
 void ViEEncoder::TraceFrameDropEnd() {
+  RTC_DCHECK_RUN_ON(&encoder_queue_);
   // End trace event on first frame after encoder resumes, if frame was dropped.
   if (encoder_paused_and_dropped_frame_) {
     TRACE_EVENT_ASYNC_END0("webrtc", "EncoderPaused", this);
@@ -136,17 +431,15 @@
 }
 
 void ViEEncoder::EncodeVideoFrame(const VideoFrame& video_frame) {
-  VideoCodecType codec_type;
-  {
-    rtc::CritScope lock(&data_cs_);
-    time_of_last_frame_activity_ms_ = rtc::TimeMillis();
-    if (EncoderPaused()) {
-      TraceFrameDropStart();
-      return;
-    }
-    TraceFrameDropEnd();
-    codec_type = encoder_config_.codecType;
+  RTC_DCHECK_RUN_ON(&encoder_queue_);
+  if (pre_encode_callback_)
+    pre_encode_callback_->OnFrame(video_frame);
+
+  if (EncoderPaused()) {
+    TraceFrameDropStart();
+    return;
   }
+  TraceFrameDropEnd();
 
   TRACE_EVENT_ASYNC_STEP0("webrtc", "Video", video_frame.render_time_ms(),
                           "Encode");
@@ -161,11 +454,10 @@
     }
   }
 
-  if (codec_type == webrtc::kVideoCodecVP8) {
+  if (encoder_config_.codecType == webrtc::kVideoCodecVP8) {
     webrtc::CodecSpecificInfo codec_specific_info;
     codec_specific_info.codecType = webrtc::kVideoCodecVP8;
-    {
-      rtc::CritScope lock(&data_cs_);
+
       codec_specific_info.codecSpecific.VP8.hasReceivedRPSI =
           has_received_rpsi_;
       codec_specific_info.codecSpecific.VP8.hasReceivedSLI =
@@ -176,7 +468,6 @@
           picture_id_sli_;
       has_received_sli_ = false;
       has_received_rpsi_ = false;
-    }
 
     video_sender_.AddVideoFrame(*frame_to_send, &codec_specific_info);
     return;
@@ -185,22 +476,21 @@
 }
 
 void ViEEncoder::SendKeyFrame() {
+  if (!encoder_queue_.IsCurrent()) {
+    encoder_queue_.PostTask([this] { SendKeyFrame(); });
+    return;
+  }
+  RTC_DCHECK_RUN_ON(&encoder_queue_);
   video_sender_.IntraFrameRequest(0);
 }
 
-int64_t ViEEncoder::time_of_last_frame_activity_ms() {
-  rtc::CritScope lock(&data_cs_);
-  return time_of_last_frame_activity_ms_;
-}
-
 EncodedImageCallback::Result ViEEncoder::OnEncodedImage(
     const EncodedImage& encoded_image,
     const CodecSpecificInfo* codec_specific_info,
     const RTPFragmentationHeader* fragmentation) {
-  {
-    rtc::CritScope lock(&data_cs_);
-    time_of_last_frame_activity_ms_ = rtc::TimeMillis();
-  }
+  // Encoded is called on whatever thread the real encoder implementation run
+  // on. In the case of hardware encoders, there might be several encoders
+  // running in parallel on different threads.
   if (stats_proxy_) {
     stats_proxy_->OnSendEncodedImage(encoded_image, codec_specific_info);
   }
@@ -208,28 +498,43 @@
   EncodedImageCallback::Result result =
       sink_->OnEncodedImage(encoded_image, codec_specific_info, fragmentation);
 
-  overuse_detector_->FrameSent(encoded_image._timeStamp);
+  overuse_detector_.FrameSent(encoded_image._timeStamp);
   return result;
 }
 
 void ViEEncoder::SendStatistics(uint32_t bit_rate, uint32_t frame_rate) {
+  RTC_DCHECK(module_process_thread_checker_.CalledOnValidThread());
   if (stats_proxy_)
     stats_proxy_->OnEncoderStatsUpdate(frame_rate, bit_rate);
 }
 
 void ViEEncoder::OnReceivedSLI(uint8_t picture_id) {
-  rtc::CritScope lock(&data_cs_);
+  if (!encoder_queue_.IsCurrent()) {
+    encoder_queue_.PostTask([this, picture_id] { OnReceivedSLI(picture_id); });
+    return;
+  }
+  RTC_DCHECK_RUN_ON(&encoder_queue_);
   picture_id_sli_ = picture_id;
   has_received_sli_ = true;
 }
 
 void ViEEncoder::OnReceivedRPSI(uint64_t picture_id) {
-  rtc::CritScope lock(&data_cs_);
+  if (!encoder_queue_.IsCurrent()) {
+    encoder_queue_.PostTask([this, picture_id] { OnReceivedRPSI(picture_id); });
+    return;
+  }
+  RTC_DCHECK_RUN_ON(&encoder_queue_);
   picture_id_rpsi_ = picture_id;
   has_received_rpsi_ = true;
 }
 
 void ViEEncoder::OnReceivedIntraFrameRequest(size_t stream_index) {
+  if (!encoder_queue_.IsCurrent()) {
+    encoder_queue_.PostTask(
+        [this, stream_index] { OnReceivedIntraFrameRequest(stream_index); });
+    return;
+  }
+  RTC_DCHECK_RUN_ON(&encoder_queue_);
   // Key frame request from remote side, signal to VCM.
   TRACE_EVENT0("webrtc", "OnKeyFrameRequest");
   video_sender_.IntraFrameRequest(stream_index);
@@ -238,29 +543,29 @@
 void ViEEncoder::OnBitrateUpdated(uint32_t bitrate_bps,
                                   uint8_t fraction_lost,
                                   int64_t round_trip_time_ms) {
+  if (!encoder_queue_.IsCurrent()) {
+    encoder_queue_.PostTask(
+        [this, bitrate_bps, fraction_lost, round_trip_time_ms] {
+          OnBitrateUpdated(bitrate_bps, fraction_lost, round_trip_time_ms);
+        });
+    return;
+  }
+  RTC_DCHECK_RUN_ON(&encoder_queue_);
+  RTC_DCHECK(sink_) << "sink_ must be set before the encoder is active.";
+
   LOG(LS_VERBOSE) << "OnBitrateUpdated, bitrate " << bitrate_bps
                   << " packet loss " << static_cast<int>(fraction_lost)
                   << " rtt " << round_trip_time_ms;
+
   video_sender_.SetChannelParameters(bitrate_bps, fraction_lost,
                                      round_trip_time_ms);
-  bool video_suspension_changed;
+
+  encoder_start_bitrate_bps_ =
+      bitrate_bps != 0 ? bitrate_bps : encoder_start_bitrate_bps_;
   bool video_is_suspended = bitrate_bps == 0;
-  {
-    rtc::CritScope lock(&data_cs_);
-    last_observed_bitrate_bps_ = bitrate_bps;
-    video_suspension_changed = video_suspended_ != video_is_suspended;
-    video_suspended_ = video_is_suspended;
-    // Set |time_of_last_frame_activity_ms_| to now if this is the first time
-    // the encoder is supposed to produce encoded frames.
-    // TODO(perkj): Remove this hack. It is here to avoid a race that the
-    // encoder report that it has timed out before it has processed the first
-    // frame.
-    if (last_observed_bitrate_bps_ != 0 &&
-        time_of_last_frame_activity_ms_ ==
-            std::numeric_limits<int64_t>::max()) {
-      time_of_last_frame_activity_ms_ = rtc::TimeMillis();
-    }
-  }
+  bool video_suspension_changed =
+      video_is_suspended != (last_observed_bitrate_bps_ == 0);
+  last_observed_bitrate_bps_ = bitrate_bps;
 
   if (stats_proxy_ && video_suspension_changed) {
     LOG(LS_INFO) << "Video suspend state changed to: "
@@ -269,4 +574,19 @@
   }
 }
 
+void ViEEncoder::OveruseDetected() {
+  RTC_DCHECK_RUN_ON(&module_process_thread_checker_);
+  // TODO(perkj): When ViEEncoder inherit rtc::VideoSink instead of
+  // VideoCaptureInput |load_observer_| should be removed and overuse be
+  // expressed as rtc::VideoSinkWants instead.
+  if (load_observer_)
+    load_observer_->OnLoadUpdate(LoadObserver::kOveruse);
+}
+
+void ViEEncoder::NormalUsage() {
+  RTC_DCHECK_RUN_ON(&module_process_thread_checker_);
+  if (load_observer_)
+    load_observer_->OnLoadUpdate(LoadObserver::kUnderuse);
+}
+
 }  // namespace webrtc
diff --git a/webrtc/video/vie_encoder.h b/webrtc/video/vie_encoder.h
index b302f9e..7408a53 100644
--- a/webrtc/video/vie_encoder.h
+++ b/webrtc/video/vie_encoder.h
@@ -16,80 +16,72 @@
 #include <vector>
 
 #include "webrtc/base/criticalsection.h"
-#include "webrtc/base/scoped_ref_ptr.h"
-#include "webrtc/base/thread_annotations.h"
+#include "webrtc/base/event.h"
+#include "webrtc/base/sequenced_task_checker.h"
+#include "webrtc/base/task_queue.h"
+#include "webrtc/call.h"
 #include "webrtc/common_types.h"
-#include "webrtc/video_encoder.h"
 #include "webrtc/media/base/videosinkinterface.h"
-#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
 #include "webrtc/modules/video_coding/include/video_coding_defines.h"
 #include "webrtc/modules/video_coding/video_coding_impl.h"
 #include "webrtc/modules/video_processing/include/video_processing.h"
+#include "webrtc/system_wrappers/include/atomic32.h"
+#include "webrtc/video/overuse_frame_detector.h"
+#include "webrtc/video_encoder.h"
+#include "webrtc/video_send_stream.h"
 #include "webrtc/typedefs.h"
 
 namespace webrtc {
 
-class Config;
-class EncodedImageCallback;
-class OveruseFrameDetector;
-class PacedSender;
 class ProcessThread;
 class SendStatisticsProxy;
-class ViEBitrateObserver;
-class ViEEffectFilter;
-class VideoEncoder;
 
 // VieEncoder represent a video encoder that accepts raw video frames as input
 // and produces an encoded bit stream.
 // Usage:
-// 1. Instantiate
-// 2. Call Init
-// 3. Call RegisterExternalEncoder if available.
-// 4. Call SetEncoder with the codec settings and the object that shall receive
-//    the encoded bit stream.
-// 5. For each available raw video frame call EncodeVideoFrame.
-class ViEEncoder : public EncodedImageCallback,
-                   public VCMSendStatisticsCallback {
+//  Instantiate.
+//  Call SetStartRate and SetSink.
+//  Call ConfigureEncoder with the codec settings.
+//  Provide frames to encode by calling IncomingCapturedFrame.
+//  Call Stop() when done.
+class ViEEncoder : public VideoCaptureInput,
+                   public EncodedImageCallback,
+                   public VCMSendStatisticsCallback,
+                   public CpuOveruseObserver {
  public:
-  friend class ViEBitrateObserver;
-
   ViEEncoder(uint32_t number_of_cores,
-             ProcessThread* module_process_thread,
              SendStatisticsProxy* stats_proxy,
-             OveruseFrameDetector* overuse_detector,
-             EncodedImageCallback* sink);
+             const webrtc::VideoSendStream::Config::EncoderSettings& settings,
+             rtc::VideoSinkInterface<VideoFrame>* pre_encode_callback,
+             LoadObserver* overuse_callback,
+             EncodedFrameObserver* encoder_timing);
   ~ViEEncoder();
+  // RegisterProcessThread register |module_process_thread| with those objects
+  // that use it. Registration has to happen on the thread where
+  // |module_process_thread| was created (libjingle's worker thread).
+  // TODO(perkj): Replace the use of |module_process_thread| with a TaskQueue.
+  void RegisterProcessThread(ProcessThread* module_process_thread);
+  void DeRegisterProcessThread();
 
-  vcm::VideoSender* video_sender();
+  void SetSink(EncodedImageCallback* sink);
 
-  // Returns the id of the owning channel.
-  int Owner() const;
+  // TODO(perkj): Can we remove VideoCodec.startBitrate ?
+  void SetStartBitrate(int start_bitrate_bps);
 
-  // Codec settings.
-  int32_t RegisterExternalEncoder(VideoEncoder* encoder,
-                                  uint8_t pl_type,
-                                  bool internal_source);
-  int32_t DeRegisterExternalEncoder(uint8_t pl_type);
-  void SetEncoder(const VideoCodec& video_codec,
-                  size_t max_data_payload_length);
+  void ConfigureEncoder(const VideoEncoderConfig& config,
+                        size_t max_data_payload_length);
 
-  void EncodeVideoFrame(const VideoFrame& video_frame);
+  // Permanently stop encoding. After this method has returned, it is
+  // guaranteed that no encoded frames will be delivered to the sink.
+  void Stop();
+
+  // Implements VideoCaptureInput.
+  // TODO(perkj): Refactor ViEEncoder to inherit rtc::VideoSink instead of
+  // VideoCaptureInput.
+  void IncomingCapturedFrame(const VideoFrame& video_frame) override;
+
   void SendKeyFrame();
 
-  // Returns the time when the encoder last received an input frame or produced
-  // an encoded frame.
-  int64_t time_of_last_frame_activity_ms();
-
-
-  // Implements EncodedImageCallback.
-  EncodedImageCallback::Result OnEncodedImage(
-      const EncodedImage& encoded_image,
-      const CodecSpecificInfo* codec_specific_info,
-      const RTPFragmentationHeader* fragmentation) override;
-
-  // Implements VideoSendStatisticsCallback.
-  void SendStatistics(uint32_t bit_rate, uint32_t frame_rate) override;
-
   // virtual to test EncoderStateFeedback with mocks.
   virtual void OnReceivedIntraFrameRequest(size_t stream_index);
   virtual void OnReceivedSLI(uint8_t picture_id);
@@ -100,37 +92,68 @@
                         int64_t round_trip_time_ms);
 
  private:
-  bool EncoderPaused() const EXCLUSIVE_LOCKS_REQUIRED(data_cs_);
-  void TraceFrameDropStart() EXCLUSIVE_LOCKS_REQUIRED(data_cs_);
-  void TraceFrameDropEnd() EXCLUSIVE_LOCKS_REQUIRED(data_cs_);
+  class EncodeTask;
+
+  void ConfigureEncoderInternal(const VideoCodec& video_codec,
+                                size_t max_data_payload_length);
+
+  // Implements VideoSendStatisticsCallback.
+  void SendStatistics(uint32_t bit_rate,
+                      uint32_t frame_rate) override;
+
+  void EncodeVideoFrame(const VideoFrame& frame);
+
+  // Implements EncodedImageCallback.
+  EncodedImageCallback::Result OnEncodedImage(
+      const EncodedImage& encoded_image,
+      const CodecSpecificInfo* codec_specific_info,
+      const RTPFragmentationHeader* fragmentation) override;
+
+  // webrtc::CpuOveruseObserver implementation.
+  void OveruseDetected() override;
+  void NormalUsage() override;
+
+  bool EncoderPaused() const;
+  void TraceFrameDropStart();
+  void TraceFrameDropEnd();
+
+  rtc::Event shutdown_event_;
 
   const uint32_t number_of_cores_;
-  EncodedImageCallback* const sink_;
+  EncodedImageCallback* sink_;
+  const VideoSendStream::Config::EncoderSettings settings_;
 
   const std::unique_ptr<VideoProcessing> vp_;
-  vcm::VideoSender video_sender_;
-
-  rtc::CriticalSection data_cs_;
+  vcm::VideoSender video_sender_ ACCESS_ON(&encoder_queue_);
+  OveruseFrameDetector overuse_detector_;
+  LoadObserver* const load_observer_ ACCESS_ON(&module_process_thread_checker_);
 
   SendStatisticsProxy* const stats_proxy_;
-  OveruseFrameDetector* const overuse_detector_;
-
-  // The time we last received an input frame or encoded frame. This is used to
-  // track when video is stopped long enough that we also want to stop sending
-  // padding.
-  int64_t time_of_last_frame_activity_ms_ GUARDED_BY(data_cs_);
-  VideoCodec encoder_config_ GUARDED_BY(data_cs_);
-  uint32_t last_observed_bitrate_bps_ GUARDED_BY(data_cs_);
-  bool encoder_paused_and_dropped_frame_ GUARDED_BY(data_cs_);
-
+  rtc::VideoSinkInterface<VideoFrame>* const pre_encode_callback_;
   ProcessThread* module_process_thread_;
+  rtc::ThreadChecker module_process_thread_checker_;
 
-  bool has_received_sli_ GUARDED_BY(data_cs_);
-  uint8_t picture_id_sli_ GUARDED_BY(data_cs_);
-  bool has_received_rpsi_ GUARDED_BY(data_cs_);
-  uint64_t picture_id_rpsi_ GUARDED_BY(data_cs_);
+  VideoCodec encoder_config_ ACCESS_ON(&encoder_queue_);
 
-  bool video_suspended_ GUARDED_BY(data_cs_);
+  int encoder_start_bitrate_bps_ ACCESS_ON(&encoder_queue_);
+  uint32_t last_observed_bitrate_bps_ ACCESS_ON(&encoder_queue_);
+  bool encoder_paused_and_dropped_frame_ ACCESS_ON(&encoder_queue_);
+  bool has_received_sli_ ACCESS_ON(&encoder_queue_);
+  uint8_t picture_id_sli_ ACCESS_ON(&encoder_queue_);
+  bool has_received_rpsi_ ACCESS_ON(&encoder_queue_);
+  uint64_t picture_id_rpsi_ ACCESS_ON(&encoder_queue_);
+  Clock* const clock_;
+
+  rtc::RaceChecker incoming_frame_race_checker_;
+  Atomic32 posted_frames_waiting_for_encode_;
+  // Used to make sure incoming time stamp is increasing for every frame.
+  int64_t last_captured_timestamp_ GUARDED_BY(incoming_frame_race_checker_);
+  // Delta used for translating between NTP and internal timestamps.
+  const int64_t delta_ntp_internal_ms_;
+
+  // All public methods are proxied to |encoder_queue_|. It must must be
+  // destroyed first to make sure no tasks are run that use other members.
+  rtc::TaskQueue encoder_queue_;
 };
 
 }  // namespace webrtc
diff --git a/webrtc/video/vie_encoder_unittest.cc b/webrtc/video/vie_encoder_unittest.cc
new file mode 100644
index 0000000..698dd3a
--- /dev/null
+++ b/webrtc/video/vie_encoder_unittest.cc
@@ -0,0 +1,255 @@
+/*
+ *  Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/test/encoder_settings.h"
+#include "webrtc/test/fake_encoder.h"
+#include "webrtc/video/send_statistics_proxy.h"
+#include "webrtc/video/vie_encoder.h"
+
+namespace webrtc {
+
+class ViEEncoderTest : public ::testing::Test {
+ public:
+  static const int kDefaultTimeoutMs = 30 * 1000;
+
+  ViEEncoderTest()
+      : video_send_config_(VideoSendStream::Config(nullptr)),
+        fake_encoder_(),
+        stats_proxy_(Clock::GetRealTimeClock(),
+                     video_send_config_,
+                     webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo),
+        sink_(&fake_encoder_) {}
+
+  void SetUp() override {
+    video_send_config_ = VideoSendStream::Config(nullptr);
+    video_send_config_.encoder_settings.encoder = &fake_encoder_;
+    video_send_config_.encoder_settings.payload_name = "FAKE";
+    video_send_config_.encoder_settings.payload_type = 125;
+
+    video_encoder_config_.streams = test::CreateVideoStreams(1);
+
+    vie_encoder_.reset(new ViEEncoder(
+        1 /* number_of_cores */, &stats_proxy_,
+        video_send_config_.encoder_settings, nullptr /* pre_encode_callback */,
+        nullptr /* overuse_callback */, nullptr /* encoder_timing */));
+    vie_encoder_->SetSink(&sink_);
+    vie_encoder_->SetStartBitrate(10000);
+    vie_encoder_->ConfigureEncoder(video_encoder_config_, 1440);
+  }
+
+  VideoFrame CreateFrame(int64_t ntp_ts, rtc::Event* destruction_event) const {
+    class TestBuffer : public webrtc::I420Buffer {
+     public:
+      TestBuffer(rtc::Event* event, int width, int height)
+          : I420Buffer(width, height), event_(event) {}
+
+     private:
+      friend class rtc::RefCountedObject<TestBuffer>;
+      ~TestBuffer() override {
+        if (event_)
+          event_->Set();
+      }
+      rtc::Event* const event_;
+    };
+
+    VideoFrame frame(
+        new rtc::RefCountedObject<TestBuffer>(
+            destruction_event,
+            static_cast<int>(video_encoder_config_.streams[0].width),
+            static_cast<int>(video_encoder_config_.streams[0].height)),
+        99, 99, kVideoRotation_0);
+    frame.set_ntp_time_ms(ntp_ts);
+    return frame;
+  }
+
+  class TestEncoder : public test::FakeEncoder {
+   public:
+    TestEncoder()
+        : FakeEncoder(Clock::GetRealTimeClock()),
+          continue_encode_event_(false, false) {}
+
+    int32_t Encode(const VideoFrame& input_image,
+                   const CodecSpecificInfo* codec_specific_info,
+                   const std::vector<FrameType>* frame_types) override {
+      bool block_encode;
+      {
+        rtc::CritScope lock(&crit_);
+        EXPECT_GT(input_image.timestamp(), timestamp_);
+        EXPECT_GT(input_image.ntp_time_ms(), ntp_time_ms_);
+        EXPECT_EQ(input_image.timestamp(), input_image.ntp_time_ms() * 90);
+
+        timestamp_ = input_image.timestamp();
+        ntp_time_ms_ = input_image.ntp_time_ms();
+        block_encode = block_next_encode_;
+        block_next_encode_ = false;
+      }
+      int32_t result =
+          FakeEncoder::Encode(input_image, codec_specific_info, frame_types);
+      if (block_encode)
+        continue_encode_event_.Wait(kDefaultTimeoutMs);
+      return result;
+    }
+
+    void BlockNextEncode() {
+      rtc::CritScope lock(&crit_);
+      block_next_encode_ = true;
+    }
+
+    void ContinueEncode() { continue_encode_event_.Set(); }
+
+    void CheckLastTimeStampsMatch(int64_t ntp_time_ms,
+                                  uint32_t timestamp) const {
+      rtc::CritScope lock(&crit_);
+      EXPECT_EQ(timestamp_, timestamp);
+      EXPECT_EQ(ntp_time_ms_, ntp_time_ms);
+    }
+
+   private:
+    rtc::CriticalSection crit_;
+    bool block_next_encode_ = false;
+    rtc::Event continue_encode_event_;
+    uint32_t timestamp_ = 0;
+    int64_t ntp_time_ms_ = 0;
+  };
+
+  class TestSink : public EncodedImageCallback {
+   public:
+    explicit TestSink(TestEncoder* test_encoder)
+        : test_encoder_(test_encoder), encoded_frame_event_(false, false) {}
+
+    int32_t Encoded(const EncodedImage& encoded_image,
+                    const CodecSpecificInfo* codec_specific_info,
+                    const RTPFragmentationHeader* fragmentation) override {
+      rtc::CritScope lock(&crit_);
+      EXPECT_TRUE(expect_frames_);
+      timestamp_ = encoded_image._timeStamp;
+      encoded_frame_event_.Set();
+      return 0;
+    }
+
+    void WaitForEncodedFrame(int64_t expected_ntp_time) {
+      uint32_t timestamp = 0;
+      encoded_frame_event_.Wait(kDefaultTimeoutMs);
+      {
+        rtc::CritScope lock(&crit_);
+        timestamp = timestamp_;
+      }
+      test_encoder_->CheckLastTimeStampsMatch(expected_ntp_time, timestamp);
+    }
+
+    void SetExpectNoFrames() {
+      rtc::CritScope lock(&crit_);
+      expect_frames_ = false;
+    }
+
+   private:
+    rtc::CriticalSection crit_;
+    TestEncoder* test_encoder_;
+    rtc::Event encoded_frame_event_;
+    uint32_t timestamp_ = 0;
+    bool expect_frames_ = true;
+  };
+
+  VideoSendStream::Config video_send_config_;
+  VideoEncoderConfig video_encoder_config_;
+  TestEncoder fake_encoder_;
+  SendStatisticsProxy stats_proxy_;
+  TestSink sink_;
+  std::unique_ptr<ViEEncoder> vie_encoder_;
+};
+
+TEST_F(ViEEncoderTest, EncodeOneFrame) {
+  const int kTargetBitrateBps = 100000;
+  vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
+  rtc::Event frame_destroyed_event(false, false);
+  vie_encoder_->IncomingCapturedFrame(CreateFrame(1, &frame_destroyed_event));
+  sink_.WaitForEncodedFrame(1);
+  frame_destroyed_event.Wait(kDefaultTimeoutMs);
+  vie_encoder_->Stop();
+}
+
+TEST_F(ViEEncoderTest, DropsFramesBeforeFirstOnBitrateUpdated) {
+  // Dropped since no target bitrate has been set.
+  rtc::Event frame_destroyed_event(false, false);
+  vie_encoder_->IncomingCapturedFrame(CreateFrame(1, &frame_destroyed_event));
+  frame_destroyed_event.Wait(kDefaultTimeoutMs);
+
+  const int kTargetBitrateBps = 100000;
+  vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
+
+  vie_encoder_->IncomingCapturedFrame(CreateFrame(2, nullptr));
+  sink_.WaitForEncodedFrame(2);
+  vie_encoder_->Stop();
+}
+
+TEST_F(ViEEncoderTest, DropsFramesWhenRateSetToZero) {
+  const int kTargetBitrateBps = 100000;
+  vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
+  vie_encoder_->IncomingCapturedFrame(CreateFrame(1, nullptr));
+  sink_.WaitForEncodedFrame(1);
+
+  vie_encoder_->OnBitrateUpdated(0, 0, 0);
+  // Dropped since bitrate is zero.
+  vie_encoder_->IncomingCapturedFrame(CreateFrame(2, nullptr));
+
+  vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
+  vie_encoder_->IncomingCapturedFrame(CreateFrame(3, nullptr));
+  sink_.WaitForEncodedFrame(3);
+  vie_encoder_->Stop();
+}
+
+TEST_F(ViEEncoderTest, DropsFramesWithSameOrOldNtpTimestamp) {
+  const int kTargetBitrateBps = 100000;
+  vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
+  vie_encoder_->IncomingCapturedFrame(CreateFrame(1, nullptr));
+  sink_.WaitForEncodedFrame(1);
+
+  // This frame will be dropped since it has the same ntp timestamp.
+  vie_encoder_->IncomingCapturedFrame(CreateFrame(1, nullptr));
+
+  vie_encoder_->IncomingCapturedFrame(CreateFrame(2, nullptr));
+  sink_.WaitForEncodedFrame(2);
+  vie_encoder_->Stop();
+}
+
+TEST_F(ViEEncoderTest, DropsFrameAfterStop) {
+  const int kTargetBitrateBps = 100000;
+  vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
+
+  vie_encoder_->IncomingCapturedFrame(CreateFrame(1, nullptr));
+  sink_.WaitForEncodedFrame(1);
+
+  vie_encoder_->Stop();
+  sink_.SetExpectNoFrames();
+  rtc::Event frame_destroyed_event(false, false);
+  vie_encoder_->IncomingCapturedFrame(CreateFrame(2, &frame_destroyed_event));
+  frame_destroyed_event.Wait(kDefaultTimeoutMs);
+}
+
+TEST_F(ViEEncoderTest, DropsPendingFramesOnSlowEncode) {
+  const int kTargetBitrateBps = 100000;
+  vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
+
+  fake_encoder_.BlockNextEncode();
+  vie_encoder_->IncomingCapturedFrame(CreateFrame(1, nullptr));
+  sink_.WaitForEncodedFrame(1);
+  // Here, the encoder thread will be blocked in the TestEncoder waiting for a
+  // call to ContinueEncode.
+  vie_encoder_->IncomingCapturedFrame(CreateFrame(2, nullptr));
+  vie_encoder_->IncomingCapturedFrame(CreateFrame(3, nullptr));
+  fake_encoder_.ContinueEncode();
+  sink_.WaitForEncodedFrame(3);
+
+  vie_encoder_->Stop();
+}
+
+}  // namespace webrtc
diff --git a/webrtc/video/webrtc_video.gypi b/webrtc/video/webrtc_video.gypi
index 6b00e69..af162de 100644
--- a/webrtc/video/webrtc_video.gypi
+++ b/webrtc/video/webrtc_video.gypi
@@ -48,8 +48,6 @@
       'video/stats_counter.h',
       'video/stream_synchronization.cc',
       'video/stream_synchronization.h',
-      'video/video_capture_input.cc',
-      'video/video_capture_input.h',
       'video/video_decoder.cc',
       'video/video_encoder.cc',
       'video/video_receive_stream.cc',
diff --git a/webrtc/video_send_stream.h b/webrtc/video_send_stream.h
index b79f6dd..afdec43 100644
--- a/webrtc/video_send_stream.h
+++ b/webrtc/video_send_stream.h
@@ -13,13 +13,13 @@
 
 #include <map>
 #include <string>
+#include <vector>
 
 #include "webrtc/common_types.h"
 #include "webrtc/common_video/include/frame_callback.h"
 #include "webrtc/config.h"
 #include "webrtc/media/base/videosinkinterface.h"
 #include "webrtc/transport.h"
-#include "webrtc/media/base/videosinkinterface.h"
 
 namespace webrtc {
 
@@ -72,13 +72,28 @@
   };
 
   struct Config {
+   public:
     Config() = delete;
+    Config(Config&&) = default;
     explicit Config(Transport* send_transport)
         : send_transport(send_transport) {}
 
+    Config& operator=(Config&&) = default;
+    Config& operator=(const Config&) = delete;
+
+    // Mostly used by tests.  Avoid creating copies if you can.
+    Config Copy() const { return Config(*this); }
+
     std::string ToString() const;
 
     struct EncoderSettings {
+      EncoderSettings() = default;
+      EncoderSettings(std::string payload_name,
+                      int payload_type,
+                      VideoEncoder* encoder)
+          : payload_name(std::move(payload_name)),
+            payload_type(payload_type),
+            encoder(encoder) {}
       std::string ToString() const;
 
       std::string payload_name;
@@ -151,10 +166,6 @@
     // than the measuring window, since the sample data will have been dropped.
     EncodedFrameObserver* post_encode_callback = nullptr;
 
-    // Renderer for local preview. The local renderer will be called even if
-    // sending hasn't started. 'nullptr' disables local rendering.
-    rtc::VideoSinkInterface<VideoFrame>* local_renderer = nullptr;
-
     // Expected delay needed by the renderer, i.e. the frame will be delivered
     // this many milliseconds, if possible, earlier than expected render time.
     // Only valid if |local_renderer| is set.
@@ -168,6 +179,11 @@
     // below the minimum configured bitrate. If this variable is false, the
     // stream may send at a rate higher than the estimated available bitrate.
     bool suspend_below_min_bitrate = false;
+
+   private:
+    // Access to the copy constructor is private to force use of the Copy()
+    // method for those exceptional cases where we do use it.
+    Config(const Config&) = default;
   };
 
   // Starts stream activity.
@@ -184,7 +200,7 @@
   // Set which streams to send. Must have at least as many SSRCs as configured
   // in the config. Encoder settings are passed on to the encoder instance along
   // with the VideoStream settings.
-  virtual void ReconfigureVideoEncoder(const VideoEncoderConfig& config) = 0;
+  virtual void ReconfigureVideoEncoder(VideoEncoderConfig config) = 0;
 
   virtual Stats GetStats() = 0;
 
diff --git a/webrtc/webrtc_tests.gypi b/webrtc/webrtc_tests.gypi
index 93c6c88..1765ba7 100644
--- a/webrtc/webrtc_tests.gypi
+++ b/webrtc/webrtc_tests.gypi
@@ -376,10 +376,10 @@
         'video/send_statistics_proxy_unittest.cc',
         'video/stats_counter_unittest.cc',
         'video/stream_synchronization_unittest.cc',
-        'video/video_capture_input_unittest.cc',
         'video/video_decoder_unittest.cc',
         'video/video_encoder_unittest.cc',
         'video/video_send_stream_tests.cc',
+        'video/vie_encoder_unittest.cc',
         'video/vie_remb_unittest.cc',
       ],
       'dependencies': [