Move frame input (ViECapturer) to webrtc/video/.

Renames ViECapturer to VideoCaptureInput and initializes several
parameters on construction instead of setters.

Also removes an old deadlock suppression.

BUG=1695, 2999
R=asapersson@webrtc.org, mflodman@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/53559004.

Cr-Commit-Position: refs/heads/master@{#9508}
diff --git a/talk/media/webrtc/fakewebrtccall.cc b/talk/media/webrtc/fakewebrtccall.cc
index 00f1864..9f6a0ff 100644
--- a/talk/media/webrtc/fakewebrtccall.cc
+++ b/talk/media/webrtc/fakewebrtccall.cc
@@ -145,7 +145,7 @@
   return true;
 }
 
-webrtc::VideoSendStreamInput* FakeVideoSendStream::Input() {
+webrtc::VideoCaptureInput* FakeVideoSendStream::Input() {
   return this;
 }
 
diff --git a/talk/media/webrtc/fakewebrtccall.h b/talk/media/webrtc/fakewebrtccall.h
index 22b805b..c500416 100644
--- a/talk/media/webrtc/fakewebrtccall.h
+++ b/talk/media/webrtc/fakewebrtccall.h
@@ -55,7 +55,7 @@
 };
 
 class FakeVideoSendStream : public webrtc::VideoSendStream,
-                            public webrtc::VideoSendStreamInput {
+                            public webrtc::VideoCaptureInput {
  public:
   FakeVideoSendStream(const webrtc::VideoSendStream::Config& config,
                       const webrtc::VideoEncoderConfig& encoder_config);
@@ -79,7 +79,7 @@
   bool ReconfigureVideoEncoder(
       const webrtc::VideoEncoderConfig& config) override;
 
-  webrtc::VideoSendStreamInput* Input() override;
+  webrtc::VideoCaptureInput* Input() override;
 
   void Start() override;
   void Stop() override;
diff --git a/talk/media/webrtc/fakewebrtcvideoengine.h b/talk/media/webrtc/fakewebrtcvideoengine.h
index 8006677..755a3de 100644
--- a/talk/media/webrtc/fakewebrtcvideoengine.h
+++ b/talk/media/webrtc/fakewebrtcvideoengine.h
@@ -56,8 +56,6 @@
 // renderer for a channel or it is adding a renderer for a capturer.
 static const int kViEChannelIdBase = 0;
 static const int kViEChannelIdMax = 1000;
-static const int kViECaptureIdBase = 10000;  // Make sure there is a gap.
-static const int kViECaptureIdMax = 11000;
 
 // Fake class for mocking out webrtc::VideoDecoder
 class FakeWebRtcVideoDecoder : public webrtc::VideoDecoder {
diff --git a/tools/valgrind-webrtc/memcheck/suppressions.txt b/tools/valgrind-webrtc/memcheck/suppressions.txt
index 4e290aa..97f3240 100644
--- a/tools/valgrind-webrtc/memcheck/suppressions.txt
+++ b/tools/valgrind-webrtc/memcheck/suppressions.txt
@@ -133,19 +133,6 @@
    fun:_ZN3rtc41unstarted_task_test_DoNotDeleteTask2_Test8TestBodyEv
 }
 {
-   bug_716
-   Memcheck:Leak
-   fun:_Znw*
-   fun:_ZN6webrtc11ThreadPosix6CreateEPFbPvES1_NS_14ThreadPriorityEPKc
-   fun:_ZN6webrtc13ThreadWrapper12CreateThreadEPFbPvES1_NS_14ThreadPriorityEPKc
-   fun:_ZN6webrtc18videocapturemodule22VideoCaptureModuleV4L212StartCaptureERKNS_22VideoCaptureCapabilityE
-   fun:_ZN6webrtc11ViECapturer5StartERKNS_17CaptureCapabilityE
-   fun:_ZN6webrtc14ViECaptureImpl12StartCaptureEiRKNS_17CaptureCapabilityE
-   fun:_ZN15TbCaptureDeviceC1ER12TbInterfaces
-   fun:_ZN12_GLOBAL__N_114ViERtpFuzzTest5SetUpEv
-}
-
-{
    bug_329_1
    Memcheck:Unaddressable
    fun:I422ToARGBRow_SSSE3
@@ -161,16 +148,6 @@
 }
 
 {
-   bug_329_2
-   Memcheck:Leak
-   fun:_Znw*
-   fun:_ZN6webrtc18videocapturemodule16VideoCaptureImpl16CreateDeviceInfoEi
-   fun:_ZN6webrtc19VideoCaptureFactory16CreateDeviceInfoEi
-   fun:_ZN11ViEAutoTest22ViECaptureStandardTestEv
-   fun:_ZN12_GLOBAL__N_160ViEStandardIntegrationTest_RunsCaptureTestWithoutErrors_Test8TestBodyEv
-}
-
-{
    bug_329_3
    Memcheck:Unaddressable
    fun:I422ToARGBRow_SSSE3
diff --git a/webrtc/build/sanitizers/tsan_suppressions_webrtc.cc b/webrtc/build/sanitizers/tsan_suppressions_webrtc.cc
index f2ad6a5..ca006ee 100644
--- a/webrtc/build/sanitizers/tsan_suppressions_webrtc.cc
+++ b/webrtc/build/sanitizers/tsan_suppressions_webrtc.cc
@@ -73,7 +73,6 @@
 "deadlock:webrtc::RTCPReceiver::SetSsrcs\n"
 "deadlock:webrtc::test::UdpSocketManagerPosixImpl::RemoveSocket\n"
 "deadlock:webrtc::vcm::VideoReceiver::RegisterPacketRequestCallback\n"
-"deadlock:webrtc::ViECaptureImpl::ConnectCaptureDevice\n"
 "deadlock:webrtc::ViEChannel::StartSend\n"
 "deadlock:webrtc::ViEEncoder::OnLocalSsrcChanged\n"
 
diff --git a/webrtc/test/frame_generator_capturer.cc b/webrtc/test/frame_generator_capturer.cc
index 66d9a9b..fcab602 100644
--- a/webrtc/test/frame_generator_capturer.cc
+++ b/webrtc/test/frame_generator_capturer.cc
@@ -21,12 +21,11 @@
 namespace webrtc {
 namespace test {
 
-FrameGeneratorCapturer* FrameGeneratorCapturer::Create(
-    VideoSendStreamInput* input,
-    size_t width,
-    size_t height,
-    int target_fps,
-    Clock* clock) {
+FrameGeneratorCapturer* FrameGeneratorCapturer::Create(VideoCaptureInput* input,
+                                                       size_t width,
+                                                       size_t height,
+                                                       int target_fps,
+                                                       Clock* clock) {
   FrameGeneratorCapturer* capturer = new FrameGeneratorCapturer(
       clock, input, FrameGenerator::CreateChromaGenerator(width, height),
       target_fps);
@@ -39,7 +38,7 @@
 }
 
 FrameGeneratorCapturer* FrameGeneratorCapturer::CreateFromYuvFile(
-    VideoSendStreamInput* input,
+    VideoCaptureInput* input,
     const std::string& file_name,
     size_t width,
     size_t height,
@@ -59,7 +58,7 @@
 }
 
 FrameGeneratorCapturer::FrameGeneratorCapturer(Clock* clock,
-                                               VideoSendStreamInput* input,
+                                               VideoCaptureInput* input,
                                                FrameGenerator* frame_generator,
                                                int target_fps)
     : VideoCapturer(input),
diff --git a/webrtc/test/frame_generator_capturer.h b/webrtc/test/frame_generator_capturer.h
index a1ca336..86df689 100644
--- a/webrtc/test/frame_generator_capturer.h
+++ b/webrtc/test/frame_generator_capturer.h
@@ -29,13 +29,13 @@
 
 class FrameGeneratorCapturer : public VideoCapturer {
  public:
-  static FrameGeneratorCapturer* Create(VideoSendStreamInput* input,
+  static FrameGeneratorCapturer* Create(VideoCaptureInput* input,
                                         size_t width,
                                         size_t height,
                                         int target_fps,
                                         Clock* clock);
 
-  static FrameGeneratorCapturer* CreateFromYuvFile(VideoSendStreamInput* input,
+  static FrameGeneratorCapturer* CreateFromYuvFile(VideoCaptureInput* input,
                                                    const std::string& file_name,
                                                    size_t width,
                                                    size_t height,
@@ -49,7 +49,7 @@
   int64_t first_frame_capture_time() const { return first_frame_capture_time_; }
 
   FrameGeneratorCapturer(Clock* clock,
-                         VideoSendStreamInput* input,
+                         VideoCaptureInput* input,
                          FrameGenerator* frame_generator,
                          int target_fps);
   bool Init();
diff --git a/webrtc/test/vcm_capturer.cc b/webrtc/test/vcm_capturer.cc
index dbb28ab..c37140f 100644
--- a/webrtc/test/vcm_capturer.cc
+++ b/webrtc/test/vcm_capturer.cc
@@ -16,8 +16,9 @@
 namespace webrtc {
 namespace test {
 
-VcmCapturer::VcmCapturer(webrtc::VideoSendStreamInput* input)
-    : VideoCapturer(input), started_(false), vcm_(NULL) {}
+VcmCapturer::VcmCapturer(webrtc::VideoCaptureInput* input)
+    : VideoCapturer(input), started_(false), vcm_(NULL) {
+}
 
 bool VcmCapturer::Init(size_t width, size_t height, size_t target_fps) {
   VideoCaptureModule::DeviceInfo* device_info =
@@ -53,8 +54,9 @@
   return true;
 }
 
-VcmCapturer* VcmCapturer::Create(VideoSendStreamInput* input,
-                                 size_t width, size_t height,
+VcmCapturer* VcmCapturer::Create(VideoCaptureInput* input,
+                                 size_t width,
+                                 size_t height,
                                  size_t target_fps) {
   VcmCapturer* vcm__capturer = new VcmCapturer(input);
   if (!vcm__capturer->Init(width, height, target_fps)) {
diff --git a/webrtc/test/vcm_capturer.h b/webrtc/test/vcm_capturer.h
index 3b5d9e3..93321f0 100644
--- a/webrtc/test/vcm_capturer.h
+++ b/webrtc/test/vcm_capturer.h
@@ -20,8 +20,10 @@
 
 class VcmCapturer : public VideoCapturer, public VideoCaptureDataCallback {
  public:
-  static VcmCapturer* Create(VideoSendStreamInput* input, size_t width,
-                             size_t height, size_t target_fps);
+  static VcmCapturer* Create(VideoCaptureInput* input,
+                             size_t width,
+                             size_t height,
+                             size_t target_fps);
   virtual ~VcmCapturer();
 
   void Start() override;
@@ -32,7 +34,7 @@
   void OnCaptureDelayChanged(const int32_t id, const int32_t delay) override;
 
  private:
-  explicit VcmCapturer(VideoSendStreamInput* input);
+  explicit VcmCapturer(VideoCaptureInput* input);
   bool Init(size_t width, size_t height, size_t target_fps);
   void Destroy();
 
diff --git a/webrtc/test/video_capturer.cc b/webrtc/test/video_capturer.cc
index fc37648..840378f 100644
--- a/webrtc/test/video_capturer.cc
+++ b/webrtc/test/video_capturer.cc
@@ -26,10 +26,10 @@
   virtual void Stop() {}
 };
 
-VideoCapturer::VideoCapturer(VideoSendStreamInput* input)
-    : input_(input) {}
+VideoCapturer::VideoCapturer(VideoCaptureInput* input) : input_(input) {
+}
 
-VideoCapturer* VideoCapturer::Create(VideoSendStreamInput* input,
+VideoCapturer* VideoCapturer::Create(VideoCaptureInput* input,
                                      size_t width,
                                      size_t height,
                                      int fps,
diff --git a/webrtc/test/video_capturer.h b/webrtc/test/video_capturer.h
index ec576a0..3fe86f1 100644
--- a/webrtc/test/video_capturer.h
+++ b/webrtc/test/video_capturer.h
@@ -16,13 +16,13 @@
 
 class Clock;
 
-class VideoSendStreamInput;
+class VideoCaptureInput;
 
 namespace test {
 
 class VideoCapturer {
  public:
-  static VideoCapturer* Create(VideoSendStreamInput* input,
+  static VideoCapturer* Create(VideoCaptureInput* input,
                                size_t width,
                                size_t height,
                                int fps,
@@ -33,8 +33,8 @@
   virtual void Stop() = 0;
 
  protected:
-  explicit VideoCapturer(VideoSendStreamInput* input);
-  VideoSendStreamInput* input_;
+  explicit VideoCapturer(VideoCaptureInput* input);
+  VideoCaptureInput* input_;
 };
 }  // test
 }  // webrtc
diff --git a/webrtc/video/BUILD.gn b/webrtc/video/BUILD.gn
index 152d5f4..10f9510 100644
--- a/webrtc/video/BUILD.gn
+++ b/webrtc/video/BUILD.gn
@@ -22,8 +22,6 @@
     "../video_engine/report_block_stats.h",
     "../video_engine/stream_synchronization.cc",
     "../video_engine/stream_synchronization.h",
-    "../video_engine/vie_capturer.cc",
-    "../video_engine/vie_capturer.h",
     "../video_engine/vie_channel.cc",
     "../video_engine/vie_channel.h",
     "../video_engine/vie_channel_group.cc",
@@ -48,6 +46,8 @@
     "send_statistics_proxy.h",
     "transport_adapter.cc",
     "transport_adapter.h",
+    "video_capture_input.cc",
+    "video_capture_input.h",
     "video_decoder.cc",
     "video_encoder.cc",
     "video_receive_stream.cc",
diff --git a/webrtc/video/full_stack.cc b/webrtc/video/full_stack.cc
index f0c5d79..ad1b5c6 100644
--- a/webrtc/video/full_stack.cc
+++ b/webrtc/video/full_stack.cc
@@ -63,9 +63,9 @@
 class VideoAnalyzer : public PacketReceiver,
                       public newapi::Transport,
                       public VideoRenderer,
-                      public VideoSendStreamInput {
+                      public VideoCaptureInput {
  public:
-  VideoAnalyzer(VideoSendStreamInput* input,
+  VideoAnalyzer(VideoCaptureInput* input,
                 Transport* transport,
                 const char* test_label,
                 double avg_psnr_threshold,
@@ -224,7 +224,7 @@
     }
   }
 
-  VideoSendStreamInput* input_;
+  VideoCaptureInput* input_;
   Transport* transport_;
   PacketReceiver* receiver_;
 
diff --git a/webrtc/video_engine/vie_capturer.cc b/webrtc/video/video_capture_input.cc
similarity index 64%
rename from webrtc/video_engine/vie_capturer.cc
rename to webrtc/video/video_capture_input.cc
index 0692fef..4bf80e3 100644
--- a/webrtc/video_engine/vie_capturer.cc
+++ b/webrtc/video/video_capture_input.cc
@@ -8,10 +8,9 @@
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
-#include "webrtc/video_engine/vie_capturer.h"
+#include "webrtc/video/video_capture_input.h"
 
 #include "webrtc/base/checks.h"
-#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
 #include "webrtc/modules/interface/module_common_types.h"
 #include "webrtc/modules/utility/interface/process_thread.h"
 #include "webrtc/modules/video_capture/include/video_capture_factory.h"
@@ -23,48 +22,27 @@
 #include "webrtc/system_wrappers/interface/logging.h"
 #include "webrtc/system_wrappers/interface/tick_util.h"
 #include "webrtc/system_wrappers/interface/trace_event.h"
+#include "webrtc/video/send_statistics_proxy.h"
 #include "webrtc/video_engine/overuse_frame_detector.h"
-#include "webrtc/video_engine/vie_defines.h"
 #include "webrtc/video_engine/vie_encoder.h"
 
 namespace webrtc {
 
-const int kThreadWaitTimeMs = 100;
-
-class RegistrableCpuOveruseMetricsObserver : public CpuOveruseMetricsObserver {
- public:
-  void CpuOveruseMetricsUpdated(const CpuOveruseMetrics& metrics) override {
-    rtc::CritScope lock(&crit_);
-    if (observer_)
-      observer_->CpuOveruseMetricsUpdated(metrics);
-    metrics_ = metrics;
-  }
-
-  CpuOveruseMetrics GetCpuOveruseMetrics() const {
-    rtc::CritScope lock(&crit_);
-    return metrics_;
-  }
-
-  void Set(CpuOveruseMetricsObserver* observer) {
-    rtc::CritScope lock(&crit_);
-    observer_ = observer;
-  }
-
- private:
-  mutable rtc::CriticalSection crit_;
-  CpuOveruseMetricsObserver* observer_ GUARDED_BY(crit_) = nullptr;
-  CpuOveruseMetrics metrics_ GUARDED_BY(crit_);
-};
-
-ViECapturer::ViECapturer(ProcessThread* module_process_thread,
-                         ViEFrameCallback* frame_callback)
+namespace internal {
+VideoCaptureInput::VideoCaptureInput(ProcessThread* module_process_thread,
+                                     VideoCaptureCallback* frame_callback,
+                                     VideoRenderer* local_renderer,
+                                     SendStatisticsProxy* stats_proxy,
+                                     CpuOveruseObserver* overuse_observer)
     : capture_cs_(CriticalSectionWrapper::CreateCriticalSection()),
       module_process_thread_(module_process_thread),
       frame_callback_(frame_callback),
+      local_renderer_(local_renderer),
+      stats_proxy_(stats_proxy),
       incoming_frame_cs_(CriticalSectionWrapper::CreateCriticalSection()),
-      capture_thread_(ThreadWrapper::CreateThread(ViECaptureThreadFunction,
+      capture_thread_(ThreadWrapper::CreateThread(CaptureThreadFunction,
                                                   this,
-                                                  "ViECaptureThread")),
+                                                  "CaptureThread")),
       capture_event_(*EventWrapper::Create()),
       deliver_event_(*EventWrapper::Create()),
       stop_(0),
@@ -72,16 +50,16 @@
       delta_ntp_internal_ms_(
           Clock::GetRealTimeClock()->CurrentNtpInMilliseconds() -
           TickTime::MillisecondTimestamp()),
-      cpu_overuse_metrics_observer_(new RegistrableCpuOveruseMetricsObserver()),
-      overuse_detector_(
-          new OveruseFrameDetector(Clock::GetRealTimeClock(),
-                                   cpu_overuse_metrics_observer_.get())) {
+      overuse_detector_(new OveruseFrameDetector(Clock::GetRealTimeClock(),
+                                                 CpuOveruseOptions(),
+                                                 overuse_observer,
+                                                 stats_proxy)) {
   capture_thread_->Start();
   capture_thread_->SetPriority(kHighPriority);
   module_process_thread_->RegisterModule(overuse_detector_.get());
 }
 
-ViECapturer::~ViECapturer() {
+VideoCaptureInput::~VideoCaptureInput() {
   module_process_thread_->DeRegisterModule(overuse_detector_.get());
 
   // Stop the thread.
@@ -94,35 +72,33 @@
   delete &deliver_event_;
 }
 
-void ViECapturer::RegisterCpuOveruseObserver(CpuOveruseObserver* observer) {
-  overuse_detector_->SetObserver(observer);
-}
+void VideoCaptureInput::IncomingCapturedFrame(const VideoFrame& video_frame) {
+  // TODO(pbos): Remove local rendering, it should be handled by the client code
+  // if required.
+  if (local_renderer_)
+    local_renderer_->RenderFrame(video_frame, 0);
 
-void ViECapturer::RegisterCpuOveruseMetricsObserver(
-    CpuOveruseMetricsObserver* observer) {
-  cpu_overuse_metrics_observer_->Set(observer);
-}
+  stats_proxy_->OnIncomingFrame();
 
-void ViECapturer::IncomingFrame(const VideoFrame& video_frame) {
   VideoFrame incoming_frame = video_frame;
 
   if (incoming_frame.ntp_time_ms() != 0) {
     // If a NTP time stamp is set, this is the time stamp we will use.
-    incoming_frame.set_render_time_ms(
-        incoming_frame.ntp_time_ms() - delta_ntp_internal_ms_);
+    incoming_frame.set_render_time_ms(incoming_frame.ntp_time_ms() -
+                                      delta_ntp_internal_ms_);
   } else {  // NTP time stamp not set.
-    int64_t render_time = incoming_frame.render_time_ms() != 0 ?
-        incoming_frame.render_time_ms() : TickTime::MillisecondTimestamp();
+    int64_t render_time = incoming_frame.render_time_ms() != 0
+                              ? incoming_frame.render_time_ms()
+                              : TickTime::MillisecondTimestamp();
 
     incoming_frame.set_render_time_ms(render_time);
-    incoming_frame.set_ntp_time_ms(
-        render_time + delta_ntp_internal_ms_);
+    incoming_frame.set_ntp_time_ms(render_time + delta_ntp_internal_ms_);
   }
 
   // Convert NTP time, in ms, to RTP timestamp.
   const int kMsToRtpTimestamp = 90;
-  incoming_frame.set_timestamp(kMsToRtpTimestamp *
-      static_cast<uint32_t>(incoming_frame.ntp_time_ms()));
+  incoming_frame.set_timestamp(
+      kMsToRtpTimestamp * static_cast<uint32_t>(incoming_frame.ntp_time_ms()));
 
   CriticalSectionScoped cs(capture_cs_.get());
   if (incoming_frame.ntp_time_ms() <= last_captured_timestamp_) {
@@ -144,11 +120,12 @@
   capture_event_.Set();
 }
 
-bool ViECapturer::ViECaptureThreadFunction(void* obj) {
-  return static_cast<ViECapturer*>(obj)->ViECaptureProcess();
+bool VideoCaptureInput::CaptureThreadFunction(void* obj) {
+  return static_cast<VideoCaptureInput*>(obj)->CaptureProcess();
 }
 
-bool ViECapturer::ViECaptureProcess() {
+bool VideoCaptureInput::CaptureProcess() {
+  static const int kThreadWaitTimeMs = 100;
   int64_t capture_time = -1;
   if (capture_event_.Wait(kThreadWaitTimeMs) == kEventSignaled) {
     if (rtc::AtomicOps::Load(&stop_))
@@ -182,4 +159,5 @@
   return true;
 }
 
+}  // namespace internal
 }  // namespace webrtc
diff --git a/webrtc/video_engine/vie_capturer.h b/webrtc/video/video_capture_input.h
similarity index 67%
rename from webrtc/video_engine/vie_capturer.h
rename to webrtc/video/video_capture_input.h
index 201cc56..7ef1094 100644
--- a/webrtc/video_engine/vie_capturer.h
+++ b/webrtc/video/video_capture_input.h
@@ -8,8 +8,8 @@
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
-#ifndef WEBRTC_VIDEO_ENGINE_VIE_CAPTURER_H_
-#define WEBRTC_VIDEO_ENGINE_VIE_CAPTURER_H_
+#ifndef WEBRTC_VIDEO_VIDEO_CAPTURE_INPUT_H_
+#define WEBRTC_VIDEO_VIDEO_CAPTURE_INPUT_H_
 
 #include <vector>
 
@@ -26,6 +26,7 @@
 #include "webrtc/system_wrappers/interface/thread_wrapper.h"
 #include "webrtc/typedefs.h"
 #include "webrtc/video_engine/vie_defines.h"
+#include "webrtc/video_send_stream.h"
 
 namespace webrtc {
 
@@ -37,44 +38,41 @@
 class OveruseFrameDetector;
 class ProcessThread;
 class RegistrableCpuOveruseMetricsObserver;
-class ViEEffectFilter;
+class SendStatisticsProxy;
+class VideoRenderer;
 
-class ViEFrameCallback {
+class VideoCaptureCallback {
  public:
-  virtual ~ViEFrameCallback() {}
+  virtual ~VideoCaptureCallback() {}
 
   virtual void DeliverFrame(VideoFrame video_frame) = 0;
 };
 
-class ViECapturer {
+namespace internal {
+class VideoCaptureInput : public webrtc::VideoCaptureInput {
  public:
-  ViECapturer(ProcessThread* module_process_thread,
-              ViEFrameCallback* frame_callback);
-  ~ViECapturer();
+  VideoCaptureInput(ProcessThread* module_process_thread,
+                    VideoCaptureCallback* frame_callback,
+                    VideoRenderer* local_renderer,
+                    SendStatisticsProxy* send_stats_proxy,
+                    CpuOveruseObserver* overuse_observer);
+  ~VideoCaptureInput();
 
-  void IncomingFrame(const VideoFrame& frame);
-
-  void RegisterCpuOveruseObserver(CpuOveruseObserver* observer);
-  void RegisterCpuOveruseMetricsObserver(CpuOveruseMetricsObserver* observer);
-
- protected:
-  // Help function used for keeping track of VideoImageProcesingModule.
-  // Creates the module if it is needed, returns 0 on success and guarantees
-  // that the image proc module exist.
-  int32_t IncImageProcRefCount();
-  int32_t DecImageProcRefCount();
-
-  // Thread functions for deliver captured frames to receivers.
-  static bool ViECaptureThreadFunction(void* obj);
-  bool ViECaptureProcess();
+  void IncomingCapturedFrame(const VideoFrame& video_frame) override;
 
  private:
+  // Thread functions for deliver captured frames to receivers.
+  static bool CaptureThreadFunction(void* obj);
+  bool CaptureProcess();
+
   void DeliverI420Frame(VideoFrame* video_frame);
 
   rtc::scoped_ptr<CriticalSectionWrapper> capture_cs_;
   ProcessThread* const module_process_thread_;
 
-  ViEFrameCallback* const frame_callback_;
+  VideoCaptureCallback* const frame_callback_;
+  VideoRenderer* const local_renderer_;
+  SendStatisticsProxy* const stats_proxy_;
 
   // Frame used in IncomingFrameI420.
   rtc::scoped_ptr<CriticalSectionWrapper> incoming_frame_cs_;
@@ -94,12 +92,10 @@
   // Delta used for translating between NTP and internal timestamps.
   const int64_t delta_ntp_internal_ms_;
 
-  // Must be declared before overuse_detector_ where it's registered.
-  const rtc::scoped_ptr<RegistrableCpuOveruseMetricsObserver>
-      cpu_overuse_metrics_observer_;
   rtc::scoped_ptr<OveruseFrameDetector> overuse_detector_;
 };
 
+}  // namespace internal
 }  // namespace webrtc
 
-#endif  // WEBRTC_VIDEO_ENGINE_VIE_CAPTURER_H_
+#endif  // WEBRTC_VIDEO_VIDEO_CAPTURE_INPUT_H_
diff --git a/webrtc/video_engine/vie_capturer_unittest.cc b/webrtc/video/video_capture_input_unittest.cc
similarity index 82%
rename from webrtc/video_engine/vie_capturer_unittest.cc
rename to webrtc/video/video_capture_input_unittest.cc
index 3733b3d..cc4b5b0 100644
--- a/webrtc/video_engine/vie_capturer_unittest.cc
+++ b/webrtc/video/video_capture_input_unittest.cc
@@ -7,10 +7,7 @@
  *  in the file PATENTS.  All contributing project authors may
  *  be found in the AUTHORS file in the root of the source tree.
  */
-
-// This file includes unit tests for ViECapturer.
-
-#include "webrtc/video_engine/vie_capturer.h"
+#include "webrtc/video/video_capture_input.h"
 
 #include <vector>
 
@@ -25,6 +22,7 @@
 #include "webrtc/system_wrappers/interface/ref_count.h"
 #include "webrtc/system_wrappers/interface/scoped_vector.h"
 #include "webrtc/test/fake_texture_frame.h"
+#include "webrtc/video/send_statistics_proxy.h"
 
 using ::testing::_;
 using ::testing::Invoke;
@@ -37,7 +35,7 @@
 
 namespace webrtc {
 
-class MockViEFrameCallback : public ViEFrameCallback {
+class MockVideoCaptureCallback : public VideoCaptureCallback {
  public:
   MOCK_METHOD1(DeliverFrame, void(VideoFrame video_frame));
 };
@@ -49,31 +47,35 @@
                        const ScopedVector<VideoFrame>& frames2);
 VideoFrame* CreateVideoFrame(uint8_t length);
 
-class ViECapturerTest : public ::testing::Test {
+class VideoCaptureInputTest : public ::testing::Test {
  protected:
-  ViECapturerTest()
+  VideoCaptureInputTest()
       : mock_process_thread_(new NiceMock<MockProcessThread>),
-        mock_frame_callback_(new NiceMock<MockViEFrameCallback>),
-        output_frame_event_(EventWrapper::Create()) {}
+        mock_frame_callback_(new NiceMock<MockVideoCaptureCallback>),
+        output_frame_event_(EventWrapper::Create()),
+        stats_proxy_(Clock::GetRealTimeClock(),
+                     webrtc::VideoSendStream::Config()) {}
 
   virtual void SetUp() {
     EXPECT_CALL(*mock_frame_callback_, DeliverFrame(_))
         .WillRepeatedly(
-            WithArg<0>(Invoke(this, &ViECapturerTest::AddOutputFrame)));
+            WithArg<0>(Invoke(this, &VideoCaptureInputTest::AddOutputFrame)));
 
     Config config;
-    vie_capturer_.reset(new ViECapturer(mock_process_thread_.get(),
-                                        mock_frame_callback_.get()));
+    input_.reset(new internal::VideoCaptureInput(
+        mock_process_thread_.get(), mock_frame_callback_.get(), nullptr,
+        &stats_proxy_, nullptr));
   }
 
   virtual void TearDown() {
-    // ViECapturer accesses |mock_process_thread_| in destructor and should
+    // VideoCaptureInput accesses |mock_process_thread_| in destructor and
+    // should
     // be deleted first.
-    vie_capturer_.reset();
+    input_.reset();
   }
 
   void AddInputFrame(VideoFrame* frame) {
-    vie_capturer_->IncomingFrame(*frame);
+    input_->IncomingCapturedFrame(*frame);
   }
 
   void AddOutputFrame(const VideoFrame& frame) {
@@ -88,31 +90,33 @@
   }
 
   rtc::scoped_ptr<MockProcessThread> mock_process_thread_;
-  rtc::scoped_ptr<MockViEFrameCallback> mock_frame_callback_;
+  rtc::scoped_ptr<MockVideoCaptureCallback> mock_frame_callback_;
 
-  // Used to send input capture frames to ViECapturer.
-  rtc::scoped_ptr<ViECapturer> vie_capturer_;
+  // Used to send input capture frames to VideoCaptureInput.
+  rtc::scoped_ptr<internal::VideoCaptureInput> input_;
 
-  // Input capture frames of ViECapturer.
+  // Input capture frames of VideoCaptureInput.
   ScopedVector<VideoFrame> input_frames_;
 
   // Indicate an output frame has arrived.
   rtc::scoped_ptr<EventWrapper> output_frame_event_;
 
-  // Output delivered frames of ViECaptuer.
+  // Output delivered frames of VideoCaptureInput.
   ScopedVector<VideoFrame> output_frames_;
 
   // The pointers of Y plane buffers of output frames. This is used to verify
   // the frame are swapped and not copied.
   std::vector<const uint8_t*> output_frame_ybuffers_;
+  SendStatisticsProxy stats_proxy_;
 };
 
-TEST_F(ViECapturerTest, DoesNotRetainHandleNorCopyBuffer) {
+TEST_F(VideoCaptureInputTest, DoesNotRetainHandleNorCopyBuffer) {
   // Indicate an output frame has arrived.
   rtc::scoped_ptr<EventWrapper> frame_destroyed_event(EventWrapper::Create());
   class TestBuffer : public webrtc::I420Buffer {
    public:
-    TestBuffer(EventWrapper* event) : I420Buffer(5, 5), event_(event) {}
+    explicit TestBuffer(EventWrapper* event)
+        : I420Buffer(5, 5), event_(event) {}
 
    private:
     friend class rtc::RefCountedObject<TestBuffer>;
@@ -134,7 +138,7 @@
   EXPECT_EQ(kEventSignaled, frame_destroyed_event->Wait(FRAME_TIMEOUT_MS));
 }
 
-TEST_F(ViECapturerTest, TestNtpTimeStampSetIfRenderTimeSet) {
+TEST_F(VideoCaptureInputTest, TestNtpTimeStampSetIfRenderTimeSet) {
   input_frames_.push_back(CreateVideoFrame(static_cast<uint8_t>(0)));
   input_frames_[0]->set_render_time_ms(5);
   input_frames_[0]->set_ntp_time_ms(0);
@@ -145,7 +149,7 @@
             input_frames_[0]->render_time_ms());
 }
 
-TEST_F(ViECapturerTest, TestRtpTimeStampSet) {
+TEST_F(VideoCaptureInputTest, TestRtpTimeStampSet) {
   input_frames_.push_back(CreateVideoFrame(static_cast<uint8_t>(0)));
   input_frames_[0]->set_render_time_ms(0);
   input_frames_[0]->set_ntp_time_ms(1);
@@ -157,7 +161,7 @@
             input_frames_[0]->ntp_time_ms() * 90);
 }
 
-TEST_F(ViECapturerTest, TestTextureFrames) {
+TEST_F(VideoCaptureInputTest, TestTextureFrames) {
   const int kNumFrame = 3;
   for (int i = 0 ; i < kNumFrame; ++i) {
     test::FakeNativeHandle* dummy_handle = new test::FakeNativeHandle();
@@ -172,7 +176,7 @@
   EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_));
 }
 
-TEST_F(ViECapturerTest, TestI420Frames) {
+TEST_F(VideoCaptureInputTest, TestI420Frames) {
   const int kNumFrame = 4;
   std::vector<const uint8_t*> ybuffer_pointers;
   for (int i = 0; i < kNumFrame; ++i) {
@@ -189,7 +193,7 @@
     EXPECT_EQ(ybuffer_pointers[i], output_frame_ybuffers_[i]);
 }
 
-TEST_F(ViECapturerTest, TestI420FrameAfterTextureFrame) {
+TEST_F(VideoCaptureInputTest, TestI420FrameAfterTextureFrame) {
   test::FakeNativeHandle* dummy_handle = new test::FakeNativeHandle();
   input_frames_.push_back(new VideoFrame(test::CreateFakeNativeHandleFrame(
       dummy_handle, 1, 1, 1, 1, webrtc::kVideoRotation_0)));
@@ -204,7 +208,7 @@
   EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_));
 }
 
-TEST_F(ViECapturerTest, TestTextureFrameAfterI420Frame) {
+TEST_F(VideoCaptureInputTest, TestTextureFrameAfterI420Frame) {
   input_frames_.push_back(CreateVideoFrame(1));
   AddInputFrame(input_frames_[0]);
   WaitOutputFrame();
@@ -267,9 +271,8 @@
   const int kSizeY = width * height * 2;
   uint8_t buffer[kSizeY];
   memset(buffer, data, kSizeY);
-  frame->CreateFrame(
-      buffer, buffer, buffer, width, height, width,
-      width / 2, width / 2);
+  frame->CreateFrame(buffer, buffer, buffer, width, height, width, width / 2,
+                     width / 2);
   frame->set_render_time_ms(data);
   return frame;
 }
diff --git a/webrtc/video/video_send_stream.cc b/webrtc/video/video_send_stream.cc
index 45ea466..4ef90dd 100644
--- a/webrtc/video/video_send_stream.cc
+++ b/webrtc/video/video_send_stream.cc
@@ -19,8 +19,8 @@
 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
 #include "webrtc/system_wrappers/interface/logging.h"
 #include "webrtc/system_wrappers/interface/trace_event.h"
+#include "webrtc/video/video_capture_input.h"
 #include "webrtc/video_engine/encoder_state_feedback.h"
-#include "webrtc/video_engine/vie_capturer.h"
 #include "webrtc/video_engine/vie_channel.h"
 #include "webrtc/video_engine/vie_channel_group.h"
 #include "webrtc/video_engine/vie_defines.h"
@@ -174,7 +174,9 @@
 
   vie_channel_->SetRTCPCName(config_.rtp.c_name.c_str());
 
-  vie_capturer_ = new ViECapturer(module_process_thread_, vie_encoder_);
+  input_.reset(new internal::VideoCaptureInput(
+      module_process_thread_, vie_encoder_, config_.local_renderer,
+      &stats_proxy_, overuse_observer));
 
   // 28 to match packet overhead in ModuleRtpRtcpImpl.
   DCHECK_LE(config_.rtp.max_packet_size, static_cast<size_t>(0xFFFF - 28));
@@ -190,12 +192,6 @@
 
   CHECK(ReconfigureVideoEncoder(encoder_config));
 
-  if (overuse_observer) {
-    vie_capturer_->RegisterCpuOveruseObserver(overuse_observer);
-  }
-  // Registered regardless of monitoring, used for stats.
-  vie_capturer_->RegisterCpuOveruseMetricsObserver(&stats_proxy_);
-
   vie_channel_->RegisterSendSideDelayObserver(&stats_proxy_);
   vie_encoder_->RegisterSendStatisticsProxy(&stats_proxy_);
 
@@ -233,8 +229,9 @@
   vie_encoder_->RegisterPreEncodeCallback(nullptr);
   vie_encoder_->RegisterPostEncodeImageCallback(nullptr);
 
-  vie_capturer_->RegisterCpuOveruseObserver(nullptr);
-  delete vie_capturer_;
+  // Remove capture input (thread) so that it's not running after the current
+  // channel is deleted.
+  input_.reset();
 
   vie_encoder_->DeRegisterExternalEncoder(
       config_.encoder_settings.payload_type);
@@ -242,17 +239,10 @@
   channel_group_->DeleteChannel(channel_id_);
 }
 
-void VideoSendStream::IncomingCapturedFrame(const VideoFrame& frame) {
-  // TODO(pbos): Local rendering should not be done on the capture thread.
-  if (config_.local_renderer != nullptr)
-    config_.local_renderer->RenderFrame(frame, 0);
-
-  stats_proxy_.OnIncomingFrame();
-  vie_capturer_->IncomingFrame(frame);
+VideoCaptureInput* VideoSendStream::Input() {
+  return input_.get();
 }
 
-VideoSendStreamInput* VideoSendStream::Input() { return this; }
-
 void VideoSendStream::Start() {
   transport_adapter_.Enable();
   vie_encoder_->Pause();
diff --git a/webrtc/video/video_send_stream.h b/webrtc/video/video_send_stream.h
index a1e49fc..84537dd 100644
--- a/webrtc/video/video_send_stream.h
+++ b/webrtc/video/video_send_stream.h
@@ -20,6 +20,7 @@
 #include "webrtc/video/encoded_frame_callback_adapter.h"
 #include "webrtc/video/send_statistics_proxy.h"
 #include "webrtc/video/transport_adapter.h"
+#include "webrtc/video/video_capture_input.h"
 #include "webrtc/video_receive_stream.h"
 #include "webrtc/video_send_stream.h"
 #include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
@@ -29,14 +30,12 @@
 class ChannelGroup;
 class CpuOveruseObserver;
 class ProcessThread;
-class ViECapturer;
 class ViEChannel;
 class ViEEncoder;
 
 namespace internal {
 
-class VideoSendStream : public webrtc::VideoSendStream,
-                        public VideoSendStreamInput {
+class VideoSendStream : public webrtc::VideoSendStream {
  public:
   VideoSendStream(newapi::Transport* transport,
                   CpuOveruseObserver* overuse_observer,
@@ -50,6 +49,8 @@
 
   virtual ~VideoSendStream();
 
+  VideoCaptureInput* Input() override;
+
   void Start() override;
   void Stop() override;
 
@@ -59,12 +60,6 @@
 
   bool DeliverRtcp(const uint8_t* packet, size_t length);
 
-  // From VideoSendStreamInput.
-  void IncomingCapturedFrame(const VideoFrame& frame) override;
-
-  // From webrtc::VideoSendStream.
-  VideoSendStreamInput* Input() override;
-
   typedef std::map<uint32_t, RtpState> RtpStateMap;
   RtpStateMap GetRtpStates() const;
 
@@ -85,9 +80,9 @@
   ChannelGroup* const channel_group_;
   const int channel_id_;
 
+  rtc::scoped_ptr<VideoCaptureInput> input_;
   ViEChannel* vie_channel_;
   ViEEncoder* vie_encoder_;
-  ViECapturer* vie_capturer_;
 
   // Used as a workaround to indicate that we should be using the configured
   // start bitrate initially, instead of the one reported by VideoEngine (which
diff --git a/webrtc/video/webrtc_video.gypi b/webrtc/video/webrtc_video.gypi
index 86c2216..59b2e73 100644
--- a/webrtc/video/webrtc_video.gypi
+++ b/webrtc/video/webrtc_video.gypi
@@ -33,6 +33,8 @@
       'video/send_statistics_proxy.h',
       'video/transport_adapter.cc',
       'video/transport_adapter.h',
+      'video/video_capture_input.cc',
+      'video/video_capture_input.h',
       'video/video_decoder.cc',
       'video/video_encoder.cc',
       'video/video_receive_stream.cc',
@@ -51,8 +53,6 @@
       'video_engine/report_block_stats.h',
       'video_engine/stream_synchronization.cc',
       'video_engine/stream_synchronization.h',
-      'video_engine/vie_capturer.cc',
-      'video_engine/vie_capturer.h',
       'video_engine/vie_channel.cc',
       'video_engine/vie_channel.h',
       'video_engine/vie_channel_group.cc',
diff --git a/webrtc/video_engine/overuse_frame_detector.cc b/webrtc/video_engine/overuse_frame_detector.cc
index 23ee59f..71f8f57 100644
--- a/webrtc/video_engine/overuse_frame_detector.cc
+++ b/webrtc/video_engine/overuse_frame_detector.cc
@@ -52,18 +52,15 @@
 }  // namespace
 
 // TODO(asapersson): Remove this class. Not used.
-Statistics::Statistics() :
-    sum_(0.0),
-    count_(0),
-    filtered_samples_(new rtc::ExpFilter(kWeightFactorMean)),
-    filtered_variance_(new rtc::ExpFilter(kWeightFactor)) {
+Statistics::Statistics(const CpuOveruseOptions& options)
+    : sum_(0.0),
+      count_(0),
+      options_(options),
+      filtered_samples_(new rtc::ExpFilter(kWeightFactorMean)),
+      filtered_variance_(new rtc::ExpFilter(kWeightFactor)) {
   Reset();
 }
 
-void Statistics::SetOptions(const CpuOveruseOptions& options) {
-  options_ = options;
-}
-
 void Statistics::Reset() {
   sum_ =  0.0;
   count_ = 0;
@@ -143,22 +140,19 @@
 // captured frames).
 class OveruseFrameDetector::SendProcessingUsage {
  public:
-  SendProcessingUsage()
+  explicit SendProcessingUsage(const CpuOveruseOptions& options)
       : kWeightFactorFrameDiff(0.998f),
         kWeightFactorProcessing(0.995f),
         kInitialSampleDiffMs(40.0f),
         kMaxSampleDiffMs(45.0f),
         count_(0),
+        options_(options),
         filtered_processing_ms_(new rtc::ExpFilter(kWeightFactorProcessing)),
         filtered_frame_diff_ms_(new rtc::ExpFilter(kWeightFactorFrameDiff)) {
     Reset();
   }
   ~SendProcessingUsage() {}
 
-  void SetOptions(const CpuOveruseOptions& options) {
-    options_ = options;
-  }
-
   void Reset() {
     count_ = 0;
     filtered_frame_diff_ms_->Reset(kWeightFactorFrameDiff);
@@ -207,7 +201,7 @@
   const float kInitialSampleDiffMs;
   const float kMaxSampleDiffMs;
   uint64_t count_;
-  CpuOveruseOptions options_;
+  const CpuOveruseOptions options_;
   rtc::scoped_ptr<rtc::ExpFilter> filtered_processing_ms_;
   rtc::scoped_ptr<rtc::ExpFilter> filtered_frame_diff_ms_;
 };
@@ -319,12 +313,16 @@
 
 OveruseFrameDetector::OveruseFrameDetector(
     Clock* clock,
+    const CpuOveruseOptions& options,
+    CpuOveruseObserver* observer,
     CpuOveruseMetricsObserver* metrics_observer)
-    : observer_(NULL),
+    : options_(options),
+      observer_(observer),
       metrics_observer_(metrics_observer),
       clock_(clock),
       next_process_time_(clock_->TimeInMilliseconds()),
       num_process_times_(0),
+      capture_deltas_(options),
       last_capture_time_(0),
       last_overuse_time_(0),
       checks_above_threshold_(0),
@@ -335,34 +333,21 @@
       num_pixels_(0),
       last_encode_sample_ms_(0),
       encode_time_(new EncodeTimeAvg()),
-      usage_(new SendProcessingUsage()),
+      usage_(new SendProcessingUsage(options)),
       frame_queue_(new FrameQueue()),
       last_sample_time_ms_(0),
       capture_queue_delay_(new CaptureQueueDelay()) {
   DCHECK(metrics_observer != nullptr);
+  // Make sure stats are initially up-to-date. This simplifies unit testing
+  // since we don't have to trigger an update using one of the methods which
+  // would also alter the overuse state.
+  UpdateCpuOveruseMetrics();
   processing_thread_.DetachFromThread();
 }
 
 OveruseFrameDetector::~OveruseFrameDetector() {
 }
 
-void OveruseFrameDetector::SetObserver(CpuOveruseObserver* observer) {
-  rtc::CritScope cs(&crit_);
-  observer_ = observer;
-}
-
-void OveruseFrameDetector::SetOptions(const CpuOveruseOptions& options) {
-  assert(options.min_frame_samples > 0);
-  rtc::CritScope cs(&crit_);
-  if (options_.Equals(options)) {
-    return;
-  }
-  options_ = options;
-  capture_deltas_.SetOptions(options);
-  usage_->SetOptions(options);
-  ResetAll(num_pixels_);
-}
-
 int OveruseFrameDetector::CaptureQueueDelayMsPerS() const {
   rtc::CritScope cs(&crit_);
   return capture_queue_delay_->delay_ms();
diff --git a/webrtc/video_engine/overuse_frame_detector.h b/webrtc/video_engine/overuse_frame_detector.h
index 122cce5..5198293 100644
--- a/webrtc/video_engine/overuse_frame_detector.h
+++ b/webrtc/video_engine/overuse_frame_detector.h
@@ -129,11 +129,10 @@
 // TODO(pbos): Move this somewhere appropriate.
 class Statistics {
  public:
-  Statistics();
+  explicit Statistics(const CpuOveruseOptions& options);
 
   void AddSample(float sample_ms);
   void Reset();
-  void SetOptions(const CpuOveruseOptions& options);
 
   float Mean() const;
   float StdDev() const;
@@ -145,7 +144,7 @@
 
   float sum_;
   uint64_t count_;
-  CpuOveruseOptions options_;
+  const CpuOveruseOptions options_;
   rtc::scoped_ptr<rtc::ExpFilter> filtered_samples_;
   rtc::scoped_ptr<rtc::ExpFilter> filtered_variance_;
 };
@@ -154,16 +153,11 @@
 class OveruseFrameDetector : public Module {
  public:
   OveruseFrameDetector(Clock* clock,
+                       const CpuOveruseOptions& options,
+                       CpuOveruseObserver* overuse_observer,
                        CpuOveruseMetricsObserver* metrics_observer);
   ~OveruseFrameDetector();
 
-  // Registers an observer receiving overuse and underuse callbacks. Set
-  // 'observer' to NULL to disable callbacks.
-  void SetObserver(CpuOveruseObserver* observer);
-
-  // Sets options for overuse detection.
-  void SetOptions(const CpuOveruseOptions& options);
-
   // Called for each captured frame.
   void FrameCaptured(int width, int height, int64_t capture_time_ms);
 
@@ -215,10 +209,10 @@
   // processing contends with reading stats and the processing thread.
   mutable rtc::CriticalSection crit_;
 
-  // Observer getting overuse reports.
-  CpuOveruseObserver* observer_ GUARDED_BY(crit_);
+  const CpuOveruseOptions options_;
 
-  CpuOveruseOptions options_ GUARDED_BY(crit_);
+  // Observer getting overuse reports.
+  CpuOveruseObserver* const observer_;
 
   // Stats metrics.
   CpuOveruseMetricsObserver* const metrics_observer_;
diff --git a/webrtc/video_engine/overuse_frame_detector_unittest.cc b/webrtc/video_engine/overuse_frame_detector_unittest.cc
index 05b242b..07306ef 100644
--- a/webrtc/video_engine/overuse_frame_detector_unittest.cc
+++ b/webrtc/video_engine/overuse_frame_detector_unittest.cc
@@ -53,13 +53,15 @@
   virtual void SetUp() {
     clock_.reset(new SimulatedClock(1234));
     observer_.reset(new MockCpuOveruseObserver());
-    overuse_detector_.reset(new OveruseFrameDetector(clock_.get(), this));
-
     options_.low_capture_jitter_threshold_ms = 10.0f;
     options_.high_capture_jitter_threshold_ms = 15.0f;
     options_.min_process_count = 0;
-    overuse_detector_->SetOptions(options_);
-    overuse_detector_->SetObserver(observer_.get());
+    ReinitializeOveruseDetector();
+  }
+
+  void ReinitializeOveruseDetector() {
+    overuse_detector_.reset(new OveruseFrameDetector(clock_.get(), options_,
+                                                     observer_.get(), this));
   }
 
   void CpuOveruseMetricsUpdated(const CpuOveruseMetrics& metrics) override {
@@ -149,7 +151,7 @@
   options_.enable_capture_jitter_method = true;
   options_.enable_encode_usage_method = false;
   options_.enable_extended_processing_usage = false;
-  overuse_detector_->SetOptions(options_);
+  ReinitializeOveruseDetector();
   // capture_jitter > high => overuse
   EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(1);
   TriggerOveruse(options_.high_threshold_consecutive_count);
@@ -159,7 +161,7 @@
   options_.enable_capture_jitter_method = true;
   options_.enable_encode_usage_method = false;
   options_.enable_extended_processing_usage = false;
-  overuse_detector_->SetOptions(options_);
+  ReinitializeOveruseDetector();
   // capture_jitter > high => overuse
   EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(1);
   TriggerOveruse(options_.high_threshold_consecutive_count);
@@ -172,8 +174,8 @@
   options_.enable_capture_jitter_method = true;
   options_.enable_encode_usage_method = false;
   options_.enable_extended_processing_usage = false;
-  overuse_detector_->SetOptions(options_);
-  overuse_detector_->SetObserver(NULL);
+  overuse_detector_.reset(
+      new OveruseFrameDetector(clock_.get(), options_, nullptr, this));
   EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(0);
   TriggerOveruse(options_.high_threshold_consecutive_count);
   EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(0);
@@ -184,7 +186,7 @@
   options_.enable_capture_jitter_method = false;
   options_.enable_encode_usage_method = false;
   options_.enable_extended_processing_usage = false;
-  overuse_detector_->SetOptions(options_);
+  ReinitializeOveruseDetector();
   EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(0);
   TriggerOveruse(options_.high_threshold_consecutive_count);
   EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(0);
@@ -195,7 +197,7 @@
   options_.enable_capture_jitter_method = true;
   options_.enable_encode_usage_method = false;
   options_.enable_extended_processing_usage = false;
-  overuse_detector_->SetOptions(options_);
+  ReinitializeOveruseDetector();
   EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(2);
   TriggerOveruse(options_.high_threshold_consecutive_count);
   TriggerOveruse(options_.high_threshold_consecutive_count);
@@ -207,23 +209,23 @@
   options_.enable_capture_jitter_method = true;
   options_.enable_encode_usage_method = false;
   options_.enable_extended_processing_usage = false;
-  CpuOveruseObserverImpl overuse_observer_;
-  overuse_detector_->SetObserver(&overuse_observer_);
   options_.min_process_count = 1;
-  overuse_detector_->SetOptions(options_);
+  CpuOveruseObserverImpl overuse_observer;
+  overuse_detector_.reset(new OveruseFrameDetector(clock_.get(), options_,
+                                                   &overuse_observer, this));
   InsertFramesWithInterval(1200, kFrameInterval33ms, kWidth, kHeight);
   overuse_detector_->Process();
-  EXPECT_EQ(0, overuse_observer_.normaluse_);
+  EXPECT_EQ(0, overuse_observer.normaluse_);
   clock_->AdvanceTimeMilliseconds(kProcessIntervalMs);
   overuse_detector_->Process();
-  EXPECT_EQ(1, overuse_observer_.normaluse_);
+  EXPECT_EQ(1, overuse_observer.normaluse_);
 }
 
 TEST_F(OveruseFrameDetectorTest, ConstantOveruseGivesNoNormalUsage) {
   options_.enable_capture_jitter_method = true;
   options_.enable_encode_usage_method = false;
   options_.enable_extended_processing_usage = false;
-  overuse_detector_->SetOptions(options_);
+  ReinitializeOveruseDetector();
   EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(0);
   EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(64);
   for(size_t i = 0; i < 64; ++i) {
@@ -232,22 +234,22 @@
 }
 
 TEST_F(OveruseFrameDetectorTest, ConsecutiveCountTriggersOveruse) {
+  EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(1);
   options_.enable_capture_jitter_method = true;
   options_.enable_encode_usage_method = false;
   options_.enable_extended_processing_usage = false;
-  EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(1);
   options_.high_threshold_consecutive_count = 2;
-  overuse_detector_->SetOptions(options_);
+  ReinitializeOveruseDetector();
   TriggerOveruse(2);
 }
 
 TEST_F(OveruseFrameDetectorTest, IncorrectConsecutiveCountTriggersNoOveruse) {
+  EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(0);
   options_.enable_capture_jitter_method = true;
   options_.enable_encode_usage_method = false;
   options_.enable_extended_processing_usage = false;
-  EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(0);
   options_.high_threshold_consecutive_count = 2;
-  overuse_detector_->SetOptions(options_);
+  ReinitializeOveruseDetector();
   TriggerOveruse(1);
 }
 
@@ -279,19 +281,9 @@
   EXPECT_EQ(InitialJitter(), CaptureJitterMs());
 }
 
-TEST_F(OveruseFrameDetectorTest, CaptureJitterResetAfterChangingThreshold) {
-  EXPECT_EQ(InitialJitter(), CaptureJitterMs());
-  options_.high_capture_jitter_threshold_ms = 90.0f;
-  overuse_detector_->SetOptions(options_);
-  EXPECT_EQ(InitialJitter(), CaptureJitterMs());
-  options_.low_capture_jitter_threshold_ms = 30.0f;
-  overuse_detector_->SetOptions(options_);
-  EXPECT_EQ(InitialJitter(), CaptureJitterMs());
-}
-
 TEST_F(OveruseFrameDetectorTest, MinFrameSamplesBeforeUpdatingCaptureJitter) {
   options_.min_frame_samples = 40;
-  overuse_detector_->SetOptions(options_);
+  ReinitializeOveruseDetector();
   InsertFramesWithInterval(40, kFrameInterval33ms, kWidth, kHeight);
   EXPECT_EQ(InitialJitter(), CaptureJitterMs());
 }
@@ -351,7 +343,7 @@
 
 TEST_F(OveruseFrameDetectorTest, FrameDelay_OneFrameDisabled) {
   options_.enable_extended_processing_usage = false;
-  overuse_detector_->SetOptions(options_);
+  ReinitializeOveruseDetector();
   const int kProcessingTimeMs = 100;
   overuse_detector_->FrameCaptured(kWidth, kHeight, 33);
   clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
@@ -361,7 +353,7 @@
 
 TEST_F(OveruseFrameDetectorTest, FrameDelay_OneFrame) {
   options_.enable_extended_processing_usage = true;
-  overuse_detector_->SetOptions(options_);
+  ReinitializeOveruseDetector();
   const int kProcessingTimeMs = 100;
   overuse_detector_->FrameCaptured(kWidth, kHeight, 33);
   clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
@@ -373,7 +365,7 @@
 
 TEST_F(OveruseFrameDetectorTest, FrameDelay_TwoFrames) {
   options_.enable_extended_processing_usage = true;
-  overuse_detector_->SetOptions(options_);
+  ReinitializeOveruseDetector();
   const int kProcessingTimeMs1 = 100;
   const int kProcessingTimeMs2 = 50;
   const int kTimeBetweenFramesMs = 200;
@@ -390,7 +382,7 @@
 
 TEST_F(OveruseFrameDetectorTest, FrameDelay_MaxQueueSize) {
   options_.enable_extended_processing_usage = true;
-  overuse_detector_->SetOptions(options_);
+  ReinitializeOveruseDetector();
   const int kMaxQueueSize = 91;
   for (int i = 0; i < kMaxQueueSize * 2; ++i) {
     overuse_detector_->FrameCaptured(kWidth, kHeight, i);
@@ -400,7 +392,7 @@
 
 TEST_F(OveruseFrameDetectorTest, FrameDelay_NonProcessedFramesRemoved) {
   options_.enable_extended_processing_usage = true;
-  overuse_detector_->SetOptions(options_);
+  ReinitializeOveruseDetector();
   const int kProcessingTimeMs = 100;
   overuse_detector_->FrameCaptured(kWidth, kHeight, 33);
   clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
@@ -423,7 +415,7 @@
 
 TEST_F(OveruseFrameDetectorTest, FrameDelay_ResetClearsFrames) {
   options_.enable_extended_processing_usage = true;
-  overuse_detector_->SetOptions(options_);
+  ReinitializeOveruseDetector();
   const int kProcessingTimeMs = 100;
   overuse_detector_->FrameCaptured(kWidth, kHeight, 33);
   EXPECT_EQ(1, overuse_detector_->FramesInQueue());
@@ -439,7 +431,7 @@
 
 TEST_F(OveruseFrameDetectorTest, FrameDelay_NonMatchingSendFrameIgnored) {
   options_.enable_extended_processing_usage = true;
-  overuse_detector_->SetOptions(options_);
+  ReinitializeOveruseDetector();
   const int kProcessingTimeMs = 100;
   overuse_detector_->FrameCaptured(kWidth, kHeight, 33);
   clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
@@ -470,16 +462,6 @@
   EXPECT_EQ(kProcessingTimeMs * 100 / kFrameInterval33ms, UsagePercent());
 }
 
-TEST_F(OveruseFrameDetectorTest, ProcessingUsageResetAfterChangingThreshold) {
-  EXPECT_EQ(InitialUsage(), UsagePercent());
-  options_.high_encode_usage_threshold_percent = 100;
-  overuse_detector_->SetOptions(options_);
-  EXPECT_EQ(InitialUsage(), UsagePercent());
-  options_.low_encode_usage_threshold_percent = 20;
-  overuse_detector_->SetOptions(options_);
-  EXPECT_EQ(InitialUsage(), UsagePercent());
-}
-
 // enable_encode_usage_method = true;
 // UsagePercent() > high_encode_usage_threshold_percent => overuse.
 // UsagePercent() < low_encode_usage_threshold_percent => underuse.
@@ -487,7 +469,7 @@
   options_.enable_capture_jitter_method = false;
   options_.enable_encode_usage_method = true;
   options_.enable_extended_processing_usage = false;
-  overuse_detector_->SetOptions(options_);
+  ReinitializeOveruseDetector();
   // usage > high => overuse
   EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(1);
   TriggerOveruseWithProcessingUsage(options_.high_threshold_consecutive_count);
@@ -497,7 +479,7 @@
   options_.enable_capture_jitter_method = false;
   options_.enable_encode_usage_method = true;
   options_.enable_extended_processing_usage = false;
-  overuse_detector_->SetOptions(options_);
+  ReinitializeOveruseDetector();
   // usage > high => overuse
   EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(1);
   TriggerOveruseWithProcessingUsage(options_.high_threshold_consecutive_count);
@@ -511,7 +493,7 @@
   options_.enable_capture_jitter_method = false;
   options_.enable_encode_usage_method = false;
   options_.enable_extended_processing_usage = false;
-  overuse_detector_->SetOptions(options_);
+  ReinitializeOveruseDetector();
   // usage > high => overuse
   EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(0);
   TriggerOveruseWithProcessingUsage(options_.high_threshold_consecutive_count);
@@ -528,7 +510,7 @@
   options_.enable_capture_jitter_method = false;
   options_.enable_encode_usage_method = true;
   options_.enable_extended_processing_usage = true;
-  overuse_detector_->SetOptions(options_);
+  ReinitializeOveruseDetector();
   // usage > high => overuse
   EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(1);
   TriggerOveruseWithProcessingUsage(options_.high_threshold_consecutive_count);
@@ -538,7 +520,7 @@
   options_.enable_capture_jitter_method = false;
   options_.enable_encode_usage_method = true;
   options_.enable_extended_processing_usage = true;
-  overuse_detector_->SetOptions(options_);
+  ReinitializeOveruseDetector();
   // usage > high => overuse
   EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(1);
   TriggerOveruseWithProcessingUsage(options_.high_threshold_consecutive_count);
@@ -552,7 +534,7 @@
   options_.enable_capture_jitter_method = false;
   options_.enable_encode_usage_method = false;
   options_.enable_extended_processing_usage = true;
-  overuse_detector_->SetOptions(options_);
+  ReinitializeOveruseDetector();
   // usage > high => overuse
   EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(0);
   TriggerOveruseWithProcessingUsage(options_.high_threshold_consecutive_count);
diff --git a/webrtc/video_engine/video_engine_core_unittests.gyp b/webrtc/video_engine/video_engine_core_unittests.gyp
index 38bcf5f..d014344 100644
--- a/webrtc/video_engine/video_engine_core_unittests.gyp
+++ b/webrtc/video_engine/video_engine_core_unittests.gyp
@@ -29,7 +29,6 @@
         'payload_router_unittest.cc',
         'report_block_stats_unittest.cc',
         'stream_synchronization_unittest.cc',
-        'vie_capturer_unittest.cc',
         'vie_codec_unittest.cc',
         'vie_remb_unittest.cc',
       ],
diff --git a/webrtc/video_engine/vie_defines.h b/webrtc/video_engine/vie_defines.h
index 74b5e1a..59b56a5 100644
--- a/webrtc/video_engine/vie_defines.h
+++ b/webrtc/video_engine/vie_defines.h
@@ -36,13 +36,6 @@
 // ViEBase
 enum { kViEMaxNumberOfChannels = 64 };
 
-// ViECapture
-enum { kViEMaxCaptureDevices = 256 };
-enum { kViECaptureDefaultWidth = 352 };
-enum { kViECaptureDefaultHeight = 288 };
-enum { kViECaptureDefaultFramerate = 30 };
-enum { kViECaptureMaxSnapshotWaitTimeMs = 500 };
-
 // ViECodec
 enum { kViEMaxCodecWidth = 4096 };
 enum { kViEMaxCodecHeight = 3072 };
@@ -72,8 +65,6 @@
 enum {
   kViEChannelIdBase = 0x0,
   kViEChannelIdMax = 0xFF,
-  kViECaptureIdBase = 0x1001,
-  kViECaptureIdMax = 0x10FF,
   kViEDummyChannelId = 0xFFFF
 };
 
diff --git a/webrtc/video_engine/vie_encoder.h b/webrtc/video_engine/vie_encoder.h
index 26dd810..44a555b 100644
--- a/webrtc/video_engine/vie_encoder.h
+++ b/webrtc/video_engine/vie_encoder.h
@@ -25,7 +25,7 @@
 #include "webrtc/modules/video_coding/main/interface/video_coding_defines.h"
 #include "webrtc/modules/video_processing/main/interface/video_processing.h"
 #include "webrtc/typedefs.h"
-#include "webrtc/video_engine/vie_capturer.h"
+#include "webrtc/video/video_capture_input.h"
 #include "webrtc/video_engine/vie_defines.h"
 
 namespace webrtc {
@@ -63,12 +63,11 @@
   virtual ~ViEEncoderObserver() {}
 };
 
-class ViEEncoder
-    : public RtcpIntraFrameObserver,
-      public VideoEncoderRateObserver,
-      public VCMPacketizationCallback,
-      public VCMSendStatisticsCallback,
-      public ViEFrameCallback {
+class ViEEncoder : public RtcpIntraFrameObserver,
+                   public VideoEncoderRateObserver,
+                   public VCMPacketizationCallback,
+                   public VCMSendStatisticsCallback,
+                   public VideoCaptureCallback {
  public:
   friend class ViEBitrateObserver;
 
@@ -122,7 +121,7 @@
   // Scale or crop/pad image.
   int32_t ScaleInputImage(bool enable);
 
-  // Implementing ViEFrameCallback.
+  // Implementing VideoCaptureCallback.
   void DeliverFrame(VideoFrame video_frame) override;
 
   int32_t SendKeyFrame();
diff --git a/webrtc/video_send_stream.h b/webrtc/video_send_stream.h
index 4c4960a..69df41c 100644
--- a/webrtc/video_send_stream.h
+++ b/webrtc/video_send_stream.h
@@ -24,7 +24,7 @@
 class VideoEncoder;
 
 // Class to deliver captured frame to the video send stream.
-class VideoSendStreamInput {
+class VideoCaptureInput {
  public:
   // These methods do not lock internally and must be called sequentially.
   // If your application switches input sources synchronization must be done
@@ -32,7 +32,7 @@
   virtual void IncomingCapturedFrame(const VideoFrame& video_frame) = 0;
 
  protected:
-  virtual ~VideoSendStreamInput() {}
+  virtual ~VideoCaptureInput() {}
 };
 
 class VideoSendStream {
@@ -138,7 +138,7 @@
 
   // Gets interface used to insert captured frames. Valid as long as the
   // VideoSendStream is valid.
-  virtual VideoSendStreamInput* Input() = 0;
+  virtual VideoCaptureInput* Input() = 0;
 
   virtual void Start() = 0;
   virtual void Stop() = 0;
diff --git a/webrtc/webrtc_tests.gypi b/webrtc/webrtc_tests.gypi
index 6a4dec9..2de51ce 100644
--- a/webrtc/webrtc_tests.gypi
+++ b/webrtc/webrtc_tests.gypi
@@ -85,7 +85,7 @@
         'webrtc',
       ],
     },
-        {
+    {
       'target_name': 'screenshare_loopback',
       'type': 'executable',
       'sources': [
@@ -150,6 +150,7 @@
         'video/bitrate_estimator_tests.cc',
         'video/end_to_end_tests.cc',
         'video/send_statistics_proxy_unittest.cc',
+        'video/video_capture_input_unittest.cc',
         'video/video_decoder_unittest.cc',
         'video/video_encoder_unittest.cc',
         'video/video_send_stream_tests.cc',