Let ViEEncoder express resolution requests as Sinkwants.
This removes the VideoSendStream::LoadObserver interface and the implementation in WebrtcVideoSendStream and replace it with VideoSinkWants through the VideoSourceInterface.

To do that that, some stats for CPU adaptation is moved into VideoSendStream. Also handling of the CVO rtp header extension is moved to VideoSendStreamImpl.

BUG=webrtc:5687
TBR=mflodman@webrtc.org

Review-Url: https://codereview.webrtc.org/2304363002
Cr-Commit-Position: refs/heads/master@{#14877}
diff --git a/webrtc/call.h b/webrtc/call.h
index d914303..0ea4229 100644
--- a/webrtc/call.h
+++ b/webrtc/call.h
@@ -55,19 +55,6 @@
   virtual ~PacketReceiver() {}
 };
 
-// Callback interface for reporting when a system overuse is detected.
-class LoadObserver {
- public:
-  enum Load { kOveruse, kUnderuse };
-
-  // Triggered when overuse is detected or when we believe the system can take
-  // more load.
-  virtual void OnLoadUpdate(Load load) = 0;
-
- protected:
-  virtual ~LoadObserver() {}
-};
-
 // A Call instance can contain several send and/or receive streams. All streams
 // are assumed to have the same remote endpoint and will share bitrate estimates
 // etc.
diff --git a/webrtc/call/bitrate_estimator_tests.cc b/webrtc/call/bitrate_estimator_tests.cc
index 80949ba..bd41e44 100644
--- a/webrtc/call/bitrate_estimator_tests.cc
+++ b/webrtc/call/bitrate_estimator_tests.cc
@@ -178,7 +178,9 @@
       frame_generator_capturer_.reset(test::FrameGeneratorCapturer::Create(
           kDefaultWidth, kDefaultHeight, kDefaultFramerate,
           Clock::GetRealTimeClock()));
-      send_stream_->SetSource(frame_generator_capturer_.get());
+      send_stream_->SetSource(
+          frame_generator_capturer_.get(),
+          VideoSendStream::DegradationPreference::kBalanced);
       send_stream_->Start();
       frame_generator_capturer_->Start();
 
diff --git a/webrtc/call/call_perf_tests.cc b/webrtc/call/call_perf_tests.cc
index b57f808..4a36775 100644
--- a/webrtc/call/call_perf_tests.cc
+++ b/webrtc/call/call_perf_tests.cc
@@ -60,8 +60,6 @@
                           float video_rtp_speed,
                           float audio_rtp_speed);
 
-  void TestCpuOveruse(LoadObserver::Load tested_load, int encode_delay_ms);
-
   void TestMinTransmitBitrate(bool pad_to_min_bitrate);
 
   void TestCaptureNtpTime(const FakeNetworkPipe::Config& net_config,
@@ -473,25 +471,40 @@
   TestCaptureNtpTime(net_config, kThresholdMs, kStartTimeMs, kRunTimeMs);
 }
 
-void CallPerfTest::TestCpuOveruse(LoadObserver::Load tested_load,
-                                  int encode_delay_ms) {
-  class LoadObserver : public test::SendTest, public webrtc::LoadObserver {
+TEST_F(CallPerfTest, ReceivesCpuOveruseAndUnderuse) {
+  class LoadObserver : public test::SendTest,
+                       public test::FrameGeneratorCapturer::SinkWantsObserver {
    public:
-    LoadObserver(LoadObserver::Load tested_load, int encode_delay_ms)
+    LoadObserver()
         : SendTest(kLongTimeoutMs),
-          tested_load_(tested_load),
-          encoder_(Clock::GetRealTimeClock(), encode_delay_ms) {}
+          expect_lower_resolution_wants_(true),
+          encoder_(Clock::GetRealTimeClock(), 35 /* delay_ms */) {}
 
-    void OnLoadUpdate(Load load) override {
-      if (load == tested_load_)
+    void OnFrameGeneratorCapturerCreated(
+        test::FrameGeneratorCapturer* frame_generator_capturer) override {
+      frame_generator_capturer->SetSinkWantsObserver(this);
+    }
+
+    // OnSinkWantsChanged is called when FrameGeneratorCapturer::AddOrUpdateSink
+    // is called.
+    void OnSinkWantsChanged(rtc::VideoSinkInterface<VideoFrame>* sink,
+                            const rtc::VideoSinkWants& wants) override {
+      // First expect CPU overuse. Then expect CPU underuse when the encoder
+      // delay has been decreased.
+      if (wants.max_pixel_count) {
+        EXPECT_TRUE(expect_lower_resolution_wants_);
+        expect_lower_resolution_wants_ = false;
+        encoder_.SetDelay(2);
+      } else if (wants.max_pixel_count_step_up) {
+        EXPECT_FALSE(expect_lower_resolution_wants_);
         observation_complete_.Set();
+      }
     }
 
     void ModifyVideoConfigs(
         VideoSendStream::Config* send_config,
         std::vector<VideoReceiveStream::Config>* receive_configs,
         VideoEncoderConfig* encoder_config) override {
-      send_config->overuse_callback = this;
       send_config->encoder_settings.encoder = &encoder_;
     }
 
@@ -499,23 +512,13 @@
       EXPECT_TRUE(Wait()) << "Timed out before receiving an overuse callback.";
     }
 
-    LoadObserver::Load tested_load_;
+    bool expect_lower_resolution_wants_;
     test::DelayedEncoder encoder_;
-  } test(tested_load, encode_delay_ms);
+  } test;
 
   RunBaseTest(&test);
 }
 
-TEST_F(CallPerfTest, ReceivesCpuUnderuse) {
-  const int kEncodeDelayMs = 2;
-  TestCpuOveruse(LoadObserver::kUnderuse, kEncodeDelayMs);
-}
-
-TEST_F(CallPerfTest, ReceivesCpuOveruse) {
-  const int kEncodeDelayMs = 35;
-  TestCpuOveruse(LoadObserver::kOveruse, kEncodeDelayMs);
-}
-
 void CallPerfTest::TestMinTransmitBitrate(bool pad_to_min_bitrate) {
   static const int kMaxEncodeBitrateKbps = 30;
   static const int kMinTransmitBitrateBps = 150000;
diff --git a/webrtc/media/base/mediachannel.h b/webrtc/media/base/mediachannel.h
index 863b4dc..b21919c 100644
--- a/webrtc/media/base/mediachannel.h
+++ b/webrtc/media/base/mediachannel.h
@@ -105,9 +105,7 @@
   // Video-specific config.
   struct Video {
     // Enable WebRTC CPU Overuse Detection. This flag comes from the
-    // PeerConnection constraint 'googCpuOveruseDetection' and is
-    // checked in WebRtcVideoChannel2::OnLoadUpdate, where it's passed
-    // to VideoCapturer::video_adapter()->OnCpuResolutionRequest.
+    // PeerConnection constraint 'googCpuOveruseDetection'.
     bool enable_cpu_overuse_detection = true;
 
     // Enable WebRTC suspension of video. No video frames will be sent
diff --git a/webrtc/media/engine/fakewebrtccall.cc b/webrtc/media/engine/fakewebrtccall.cc
index df6febe..43e083f 100644
--- a/webrtc/media/engine/fakewebrtccall.cc
+++ b/webrtc/media/engine/fakewebrtccall.cc
@@ -104,6 +104,7 @@
     : sending_(false),
       config_(std::move(config)),
       codec_settings_set_(false),
+      resolution_scaling_enabled_(false),
       source_(nullptr),
       num_swapped_frames_(0) {
   RTC_DCHECK(config.encoder_settings.encoder != NULL);
@@ -233,13 +234,26 @@
 }
 
 void FakeVideoSendStream::SetSource(
-    rtc::VideoSourceInterface<webrtc::VideoFrame>* source) {
+    rtc::VideoSourceInterface<webrtc::VideoFrame>* source,
+    const webrtc::VideoSendStream::DegradationPreference&
+        degradation_preference) {
   RTC_DCHECK(source != source_);
   if (source_)
     source_->RemoveSink(this);
   source_ = source;
+  resolution_scaling_enabled_ =
+      degradation_preference !=
+      webrtc::VideoSendStream::DegradationPreference::kMaintainResolution;
   if (source)
-    source->AddOrUpdateSink(this, rtc::VideoSinkWants());
+    source->AddOrUpdateSink(this, resolution_scaling_enabled_
+                                      ? sink_wants_
+                                      : rtc::VideoSinkWants());
+}
+
+void FakeVideoSendStream::InjectVideoSinkWants(
+    const rtc::VideoSinkWants& wants) {
+  sink_wants_ = wants;
+  source_->AddOrUpdateSink(this, wants);
 }
 
 FakeVideoReceiveStream::FakeVideoReceiveStream(
diff --git a/webrtc/media/engine/fakewebrtccall.h b/webrtc/media/engine/fakewebrtccall.h
index aec22a3..59084fc 100644
--- a/webrtc/media/engine/fakewebrtccall.h
+++ b/webrtc/media/engine/fakewebrtccall.h
@@ -127,6 +127,15 @@
   void EnableEncodedFrameRecording(const std::vector<rtc::PlatformFile>& files,
                                    size_t byte_limit) override;
 
+  bool resolution_scaling_enabled() const {
+    return resolution_scaling_enabled_;
+  }
+  void InjectVideoSinkWants(const rtc::VideoSinkWants& wants);
+
+  rtc::VideoSourceInterface<webrtc::VideoFrame>* source() const {
+    return source_;
+  }
+
  private:
   // rtc::VideoSinkInterface<VideoFrame> implementation.
   void OnFrame(const webrtc::VideoFrame& frame) override;
@@ -134,8 +143,9 @@
   // webrtc::VideoSendStream implementation.
   void Start() override;
   void Stop() override;
-  void SetSource(
-      rtc::VideoSourceInterface<webrtc::VideoFrame>* source) override;
+  void SetSource(rtc::VideoSourceInterface<webrtc::VideoFrame>* source,
+                 const webrtc::VideoSendStream::DegradationPreference&
+                     degradation_preference) override;
   webrtc::VideoSendStream::Stats GetStats() override;
   void ReconfigureVideoEncoder(webrtc::VideoEncoderConfig config) override;
 
@@ -143,11 +153,14 @@
   webrtc::VideoSendStream::Config config_;
   webrtc::VideoEncoderConfig encoder_config_;
   std::vector<webrtc::VideoStream> video_streams_;
+  rtc::VideoSinkWants sink_wants_;
+
   bool codec_settings_set_;
   union VpxSettings {
     webrtc::VideoCodecVP8 vp8;
     webrtc::VideoCodecVP9 vp9;
   } vpx_settings_;
+  bool resolution_scaling_enabled_;
   rtc::VideoSourceInterface<webrtc::VideoFrame>* source_;
   int num_swapped_frames_;
   webrtc::VideoFrame last_frame_;
diff --git a/webrtc/media/engine/webrtcvideoengine2.cc b/webrtc/media/engine/webrtcvideoengine2.cc
index fcc9254..d3e71a3 100644
--- a/webrtc/media/engine/webrtcvideoengine2.cc
+++ b/webrtc/media/engine/webrtcvideoengine2.cc
@@ -31,7 +31,6 @@
 #include "webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.h"
 #include "webrtc/modules/video_coding/codecs/vp9/include/vp9.h"
 #include "webrtc/system_wrappers/include/field_trial.h"
-#include "webrtc/system_wrappers/include/metrics.h"
 #include "webrtc/video_decoder.h"
 #include "webrtc/video_encoder.h"
 
@@ -237,17 +236,6 @@
   return true;
 }
 
-inline bool ContainsHeaderExtension(
-    const std::vector<webrtc::RtpExtension>& extensions,
-    const std::string& uri) {
-  for (const auto& kv : extensions) {
-    if (kv.uri == uri) {
-      return true;
-    }
-  }
-  return false;
-}
-
 // Returns true if the given codec is disallowed from doing simulcast.
 bool IsCodecBlacklistedForSimulcast(const std::string& codec_name) {
   return CodecNamesEq(codec_name, kH264CodecName) ||
@@ -395,9 +383,6 @@
 
 static const int kDefaultRtcpReceiverReportSsrc = 1;
 
-// Down grade resolution at most 2 times for CPU reasons.
-static const int kMaxCpuDowngrades = 2;
-
 // Minimum time interval for logging stats.
 static const int64_t kStatsLogIntervalMs = 10000;
 
@@ -1567,10 +1552,7 @@
       ssrcs_(sp.ssrcs),
       ssrc_groups_(sp.ssrc_groups),
       call_(call),
-      cpu_restricted_counter_(0),
-      number_of_cpu_adapt_changes_(0),
-      frame_count_(0),
-      cpu_restricted_frame_count_(0),
+      enable_cpu_overuse_detection_(enable_cpu_overuse_detection),
       source_(nullptr),
       external_encoder_factory_(external_encoder_factory),
       stream_(nullptr),
@@ -1593,38 +1575,16 @@
   parameters_.config.rtp.rtcp_mode = send_params.rtcp.reduced_size
                                          ? webrtc::RtcpMode::kReducedSize
                                          : webrtc::RtcpMode::kCompound;
-  parameters_.config.overuse_callback =
-      enable_cpu_overuse_detection ? this : nullptr;
-
-  // Only request rotation at the source when we positively know that the remote
-  // side doesn't support the rotation extension. This allows us to prepare the
-  // encoder in the expectation that rotation is supported - which is the common
-  // case.
-  sink_wants_.rotation_applied =
-      rtp_extensions &&
-      !ContainsHeaderExtension(*rtp_extensions,
-                               webrtc::RtpExtension::kVideoRotationUri);
-
   if (codec_settings) {
     SetCodec(*codec_settings);
   }
 }
 
 WebRtcVideoChannel2::WebRtcVideoSendStream::~WebRtcVideoSendStream() {
-  DisconnectSource();
   if (stream_ != NULL) {
     call_->DestroyVideoSendStream(stream_);
   }
   DestroyVideoEncoder(&allocated_encoder_);
-  UpdateHistograms();
-}
-
-void WebRtcVideoChannel2::WebRtcVideoSendStream::UpdateHistograms() const {
-  const int kMinRequiredFrames = 200;
-  if (frame_count_ > kMinRequiredFrames) {
-    RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.CpuLimitedResolutionInPercent",
-                             cpu_restricted_frame_count_ * 100 / frame_count_);
-  }
 }
 
 void WebRtcVideoChannel2::WebRtcVideoSendStream::OnFrame(
@@ -1658,10 +1618,6 @@
 
   last_frame_timestamp_us_ = video_frame.timestamp_us();
 
-  ++frame_count_;
-  if (cpu_restricted_counter_ > 0)
-    ++cpu_restricted_frame_count_;
-
   // Forward frame to the encoder regardless if we are sending or not. This is
   // to ensure that the encoder can be reconfigured with the correct frame size
   // as quickly as possible.
@@ -1678,9 +1634,6 @@
   // Ignore |options| pointer if |enable| is false.
   bool options_present = enable && options;
   bool source_changing = source_ != source;
-  if (source_changing) {
-    DisconnectSource();
-  }
 
   if (options_present) {
     VideoOptions old_options = parameters_.options;
@@ -1692,8 +1645,7 @@
 
   if (source_changing) {
     rtc::CritScope cs(&lock_);
-    if (source == nullptr && encoder_sink_ != nullptr &&
-        last_frame_info_.width > 0) {
+    if (source == nullptr && last_frame_info_.width > 0 && encoder_sink_) {
       LOG(LS_VERBOSE) << "Disabling capturer, sending black frame.";
       // Force this black frame not to be dropped due to timestamp order
       // check. As IncomingCapturedFrame will drop the frame if this frame's
@@ -1709,34 +1661,30 @@
       encoder_sink_->OnFrame(webrtc::VideoFrame(
           black_buffer, last_frame_info_.rotation, last_frame_timestamp_us_));
     }
-    source_ = source;
   }
 
-  if (source_changing && source_) {
-    // |source_->AddOrUpdateSink| may not be called while holding |lock_| since
-    // that might cause a lock order inversion.
-    source_->AddOrUpdateSink(this, sink_wants_);
+  // TODO(perkj, nisse): Remove |source_| and directly call
+  // |stream_|->SetSource(source) once the video frame types have been
+  // merged.
+  if (source_ && stream_) {
+    stream_->SetSource(
+        nullptr, webrtc::VideoSendStream::DegradationPreference::kBalanced);
+  }
+  // Switch to the new source.
+  source_ = source;
+  if (source && stream_) {
+    // Do not adapt resolution for screen content as this will likely
+    // result in blurry and unreadable text.
+    stream_->SetSource(
+        this, enable_cpu_overuse_detection_ &&
+                      !parameters_.options.is_screencast.value_or(false)
+                  ? webrtc::VideoSendStream::DegradationPreference::kBalanced
+                  : webrtc::VideoSendStream::DegradationPreference::
+                        kMaintainResolution);
   }
   return true;
 }
 
-void WebRtcVideoChannel2::WebRtcVideoSendStream::DisconnectSource() {
-  RTC_DCHECK_RUN_ON(&thread_checker_);
-  if (source_ == nullptr) {
-    return;
-  }
-
-  // |source_->RemoveSink| may not be called while holding |lock_| since
-  // that might cause a lock order inversion.
-  source_->RemoveSink(this);
-  source_ = nullptr;
-  // Reset |cpu_restricted_counter_| if the source is changed. It is not
-  // possible to know if the video resolution is restricted by CPU usage after
-  // the source is changed since the next source might be screen capture
-  // with another resolution and frame rate.
-  cpu_restricted_counter_ = 0;
-}
-
 const std::vector<uint32_t>&
 WebRtcVideoChannel2::WebRtcVideoSendStream::GetSsrcs() const {
   return ssrcs_;
@@ -1864,16 +1812,6 @@
     LOG(LS_INFO) << "RecreateWebRtcStream (send) because of SetSendParameters";
     RecreateWebRtcStream();
   }
-
-  // |source_->AddOrUpdateSink| may not be called while holding |lock_| since
-  // that might cause a lock order inversion.
-  if (params.rtp_header_extensions) {
-    sink_wants_.rotation_applied = !ContainsHeaderExtension(
-        *params.rtp_header_extensions, webrtc::RtpExtension::kVideoRotationUri);
-    if (source_) {
-      source_->AddOrUpdateSink(this, sink_wants_);
-    }
-  }
 }
 
 bool WebRtcVideoChannel2::WebRtcVideoSendStream::SetRtpParameters(
@@ -2002,90 +1940,47 @@
   UpdateSendState();
 }
 
+void WebRtcVideoChannel2::WebRtcVideoSendStream::RemoveSink(
+    VideoSinkInterface<webrtc::VideoFrame>* sink) {
+  RTC_DCHECK_RUN_ON(&thread_checker_);
+  {
+    rtc::CritScope cs(&lock_);
+    RTC_DCHECK(encoder_sink_ == sink);
+    encoder_sink_ = nullptr;
+  }
+  source_->RemoveSink(this);
+}
+
 void WebRtcVideoChannel2::WebRtcVideoSendStream::AddOrUpdateSink(
     VideoSinkInterface<webrtc::VideoFrame>* sink,
     const rtc::VideoSinkWants& wants) {
-  // TODO(perkj): Actually consider the encoder |wants| and remove
-  // WebRtcVideoSendStream::OnLoadUpdate(Load load).
-  rtc::CritScope cs(&lock_);
-  RTC_DCHECK(!encoder_sink_ || encoder_sink_ == sink);
-  encoder_sink_ = sink;
-}
-
-void WebRtcVideoChannel2::WebRtcVideoSendStream::RemoveSink(
-    VideoSinkInterface<webrtc::VideoFrame>* sink) {
-  rtc::CritScope cs(&lock_);
-  RTC_DCHECK_EQ(encoder_sink_, sink);
-  encoder_sink_ = nullptr;
-}
-
-void WebRtcVideoChannel2::WebRtcVideoSendStream::OnLoadUpdate(Load load) {
-  if (worker_thread_ != rtc::Thread::Current()) {
-    invoker_.AsyncInvoke<void>(
-        RTC_FROM_HERE, worker_thread_,
-        rtc::Bind(&WebRtcVideoChannel2::WebRtcVideoSendStream::OnLoadUpdate,
-                  this, load));
-    return;
-  }
-  RTC_DCHECK_RUN_ON(&thread_checker_);
-  if (!source_) {
-    return;
-  }
-
-  LOG(LS_INFO) << "OnLoadUpdate " << load << ", is_screencast: "
-               << (parameters_.options.is_screencast
-                       ? (*parameters_.options.is_screencast ? "true" : "false")
-                       : "unset");
-  // Do not adapt resolution for screen content as this will likely result in
-  // blurry and unreadable text.
-  if (parameters_.options.is_screencast.value_or(false))
-    return;
-
-  rtc::Optional<int> max_pixel_count;
-  rtc::Optional<int> max_pixel_count_step_up;
-  if (load == kOveruse) {
-    rtc::CritScope cs(&lock_);
-    if (cpu_restricted_counter_ >= kMaxCpuDowngrades) {
-      return;
+  if (worker_thread_ == rtc::Thread::Current()) {
+    // AddOrUpdateSink is called on |worker_thread_| if this is the first
+    // registration of |sink|.
+    RTC_DCHECK_RUN_ON(&thread_checker_);
+    {
+      rtc::CritScope cs(&lock_);
+      encoder_sink_ = sink;
     }
-    // The input video frame size will have a resolution with less than or
-    // equal to |max_pixel_count| depending on how the source can scale the
-    // input frame size.
-    max_pixel_count = rtc::Optional<int>(
-        (last_frame_info_.height * last_frame_info_.width * 3) / 5);
-    // Increase |number_of_cpu_adapt_changes_| if
-    // sink_wants_.max_pixel_count will be changed since
-    // last time |source_->AddOrUpdateSink| was called. That is, this will
-    // result in a new request for the source to change resolution.
-    if (!sink_wants_.max_pixel_count ||
-        *sink_wants_.max_pixel_count > *max_pixel_count) {
-      ++number_of_cpu_adapt_changes_;
-      ++cpu_restricted_counter_;
-    }
+    source_->AddOrUpdateSink(this, wants);
   } else {
-    RTC_DCHECK(load == kUnderuse);
-    rtc::CritScope cs(&lock_);
-    // The input video frame size will have a resolution with "one step up"
-    // pixels than |max_pixel_count_step_up| where "one step up" depends on
-    // how the source can scale the input frame size.
-    max_pixel_count_step_up =
-        rtc::Optional<int>(last_frame_info_.height * last_frame_info_.width);
-    // Increase |number_of_cpu_adapt_changes_| if
-    // sink_wants_.max_pixel_count_step_up will be changed since
-    // last time |source_->AddOrUpdateSink| was called. That is, this will
-    // result in a new request for the source to change resolution.
-    if (sink_wants_.max_pixel_count ||
-        (sink_wants_.max_pixel_count_step_up &&
-         *sink_wants_.max_pixel_count_step_up < *max_pixel_count_step_up)) {
-      ++number_of_cpu_adapt_changes_;
-      --cpu_restricted_counter_;
-    }
+    // Subsequent calls to AddOrUpdateSink will happen on the encoder task
+    // queue.
+    invoker_.AsyncInvoke<void>(RTC_FROM_HERE, worker_thread_, [this, wants] {
+      RTC_DCHECK_RUN_ON(&thread_checker_);
+      bool encoder_sink_valid = true;
+      {
+        rtc::CritScope cs(&lock_);
+        encoder_sink_valid = encoder_sink_ != nullptr;
+      }
+      // Since |source_| is still valid after a call to RemoveSink, check if
+      // |encoder_sink_| is still valid to check if this call should be
+      // cancelled.
+      if (source_ && encoder_sink_valid) {
+        source_->AddOrUpdateSink(this, wants);
+      }
+    });
   }
-  sink_wants_.max_pixel_count = max_pixel_count;
-  sink_wants_.max_pixel_count_step_up = max_pixel_count_step_up;
-  // |source_->AddOrUpdateSink| may not be called while holding |lock_| since
-  // that might cause a lock order inversion.
-  source_->AddOrUpdateSink(this, sink_wants_);
 }
 
 VideoSenderInfo WebRtcVideoChannel2::WebRtcVideoSendStream::GetVideoSenderInfo(
@@ -2106,9 +2001,9 @@
   if (log_stats)
     LOG(LS_INFO) << stats.ToString(rtc::TimeMillis());
 
-  info.adapt_changes = number_of_cpu_adapt_changes_;
+  info.adapt_changes = stats.number_of_cpu_adapt_changes;
   info.adapt_reason =
-      cpu_restricted_counter_ <= 0 ? ADAPTREASON_NONE : ADAPTREASON_CPU;
+      stats.cpu_limited_resolution ? ADAPTREASON_CPU : ADAPTREASON_NONE;
 
   // Get bandwidth limitation info from stream_->GetStats().
   // Input resolution (output from video_adapter) can be further scaled down or
@@ -2201,10 +2096,24 @@
   }
   stream_ = call_->CreateVideoSendStream(std::move(config),
                                          parameters_.encoder_config.Copy());
-  stream_->SetSource(this);
 
   parameters_.encoder_config.encoder_specific_settings = NULL;
 
+  if (source_) {
+    // TODO(perkj, nisse): Remove |source_| and directly call
+    // |stream_|->SetSource(source) once the video frame types have been
+    // merged and |stream_| internally reconfigure the encoder on frame
+    // resolution change.
+    // Do not adapt resolution for screen content as this will likely result in
+    // blurry and unreadable text.
+    stream_->SetSource(
+        this, enable_cpu_overuse_detection_ &&
+                      !parameters_.options.is_screencast.value_or(false)
+                  ? webrtc::VideoSendStream::DegradationPreference::kBalanced
+                  : webrtc::VideoSendStream::DegradationPreference::
+                        kMaintainResolution);
+  }
+
   // Call stream_->Start() if necessary conditions are met.
   UpdateSendState();
 }
diff --git a/webrtc/media/engine/webrtcvideoengine2.h b/webrtc/media/engine/webrtcvideoengine2.h
index 263e834..bc11425 100644
--- a/webrtc/media/engine/webrtcvideoengine2.h
+++ b/webrtc/media/engine/webrtcvideoengine2.h
@@ -238,8 +238,7 @@
   // frames are then converted from cricket frames to webrtc frames.
   class WebRtcVideoSendStream
       : public rtc::VideoSinkInterface<cricket::VideoFrame>,
-        public rtc::VideoSourceInterface<webrtc::VideoFrame>,
-        public webrtc::LoadObserver {
+        public rtc::VideoSourceInterface<webrtc::VideoFrame> {
    public:
     WebRtcVideoSendStream(
         webrtc::Call* call,
@@ -272,13 +271,9 @@
     bool SetVideoSend(bool mute,
                       const VideoOptions* options,
                       rtc::VideoSourceInterface<cricket::VideoFrame>* source);
-    void DisconnectSource();
 
     void SetSend(bool send);
 
-    // Implements webrtc::LoadObserver.
-    void OnLoadUpdate(Load load) override;
-
     const std::vector<uint32_t>& GetSsrcs() const;
     VideoSenderInfo GetVideoSenderInfo(bool log_stats);
     void FillBandwidthEstimationInfo(BandwidthEstimationInfo* bwe_info);
@@ -315,9 +310,9 @@
       bool external;
     };
 
-    // TODO(perkj): VideoFrameInfo is currently used for CPU adaptation since
-    // we currently do not express CPU overuse using SinkWants in lower
-    // layers. This will be fixed in an upcoming cl.
+    // TODO(perkj): VideoFrameInfo is currently used for sending a black frame
+    // when the video source is removed. Consider moving that logic to
+    // VieEncoder or remove it.
     struct VideoFrameInfo {
       VideoFrameInfo()
           : width(0),
@@ -345,25 +340,13 @@
     // and whether or not the encoding in |rtp_parameters_| is active.
     void UpdateSendState();
 
-    void UpdateHistograms() const EXCLUSIVE_LOCKS_REQUIRED(lock_);
-
     rtc::ThreadChecker thread_checker_;
     rtc::AsyncInvoker invoker_;
     rtc::Thread* worker_thread_;
     const std::vector<uint32_t> ssrcs_ ACCESS_ON(&thread_checker_);
     const std::vector<SsrcGroup> ssrc_groups_ ACCESS_ON(&thread_checker_);
     webrtc::Call* const call_;
-    rtc::VideoSinkWants sink_wants_ ACCESS_ON(&thread_checker_);
-    // Counter used for deciding if the video resolution is currently
-    // restricted by CPU usage. It is reset if |source_| is changed.
-    int cpu_restricted_counter_;
-    // Total number of times resolution as been requested to be changed due to
-    // CPU adaptation.
-    int number_of_cpu_adapt_changes_ ACCESS_ON(&thread_checker_);
-    // Total number of frames sent to |stream_|.
-    int frame_count_ GUARDED_BY(lock_);
-    // Total number of cpu restricted frames sent to |stream_|.
-    int cpu_restricted_frame_count_ GUARDED_BY(lock_);
+    const bool enable_cpu_overuse_detection_;
     rtc::VideoSourceInterface<cricket::VideoFrame>* source_
         ACCESS_ON(&thread_checker_);
     WebRtcVideoEncoderFactory* const external_encoder_factory_
diff --git a/webrtc/media/engine/webrtcvideoengine2_unittest.cc b/webrtc/media/engine/webrtcvideoengine2_unittest.cc
index d3a6047..71156a6 100644
--- a/webrtc/media/engine/webrtcvideoengine2_unittest.cc
+++ b/webrtc/media/engine/webrtcvideoengine2_unittest.cc
@@ -1988,9 +1988,6 @@
 
   ASSERT_EQ(1u, fake_call_->GetVideoSendStreams().size());
   FakeVideoSendStream* send_stream = fake_call_->GetVideoSendStreams().front();
-  webrtc::LoadObserver* overuse_callback =
-      send_stream->GetConfig().overuse_callback;
-  ASSERT_TRUE(overuse_callback != NULL);
 
   EXPECT_TRUE(capturer.CaptureCustomFrame(1280, 720, cricket::FOURCC_I420));
   EXPECT_EQ(1, send_stream->GetNumberOfSwappedFrames());
@@ -1998,44 +1995,47 @@
   EXPECT_EQ(720, send_stream->GetLastHeight());
 
   // Trigger overuse.
-  overuse_callback->OnLoadUpdate(webrtc::LoadObserver::kOveruse);
+  rtc::VideoSinkWants wants;
+  wants.max_pixel_count = rtc::Optional<int>(
+      send_stream->GetLastWidth() * send_stream->GetLastHeight() - 1);
+  send_stream->InjectVideoSinkWants(wants);
   EXPECT_TRUE(capturer.CaptureCustomFrame(1280, 720, cricket::FOURCC_I420));
   EXPECT_EQ(2, send_stream->GetNumberOfSwappedFrames());
   EXPECT_EQ(1280 * 3 / 4, send_stream->GetLastWidth());
   EXPECT_EQ(720 * 3 / 4, send_stream->GetLastHeight());
 
   // Trigger overuse again.
-  overuse_callback->OnLoadUpdate(webrtc::LoadObserver::kOveruse);
+  wants.max_pixel_count = rtc::Optional<int>(
+      send_stream->GetLastWidth() * send_stream->GetLastHeight() - 1);
+  send_stream->InjectVideoSinkWants(wants);
   EXPECT_TRUE(capturer.CaptureCustomFrame(1280, 720, cricket::FOURCC_I420));
   EXPECT_EQ(3, send_stream->GetNumberOfSwappedFrames());
   EXPECT_EQ(1280 * 2 / 4, send_stream->GetLastWidth());
   EXPECT_EQ(720 * 2 / 4, send_stream->GetLastHeight());
 
-  // Trigger overuse again. This should not decrease the resolution since we
-  // should only adapt the resolution down max two steps.
-  overuse_callback->OnLoadUpdate(webrtc::LoadObserver::kOveruse);
-  EXPECT_TRUE(capturer.CaptureCustomFrame(1280, 720, cricket::FOURCC_I420));
-  EXPECT_EQ(4, send_stream->GetNumberOfSwappedFrames());
-  EXPECT_EQ(1280 * 2 / 4, send_stream->GetLastWidth());
-  EXPECT_EQ(720 * 2 / 4, send_stream->GetLastHeight());
-
   // Change input resolution.
   EXPECT_TRUE(capturer.CaptureCustomFrame(1284, 724, cricket::FOURCC_I420));
-  EXPECT_EQ(5, send_stream->GetNumberOfSwappedFrames());
+  EXPECT_EQ(4, send_stream->GetNumberOfSwappedFrames());
   EXPECT_EQ(1284 / 2, send_stream->GetLastWidth());
   EXPECT_EQ(724 / 2, send_stream->GetLastHeight());
 
   // Trigger underuse which should go back up in resolution.
-  overuse_callback->OnLoadUpdate(webrtc::LoadObserver::kUnderuse);
+  wants.max_pixel_count = rtc::Optional<int>();
+  wants.max_pixel_count_step_up = rtc::Optional<int>(
+      send_stream->GetLastWidth() * send_stream->GetLastHeight());
+  send_stream->InjectVideoSinkWants(wants);
   EXPECT_TRUE(capturer.CaptureCustomFrame(1284, 724, cricket::FOURCC_I420));
-  EXPECT_EQ(6, send_stream->GetNumberOfSwappedFrames());
+  EXPECT_EQ(5, send_stream->GetNumberOfSwappedFrames());
   EXPECT_EQ(1284 * 3 / 4, send_stream->GetLastWidth());
   EXPECT_EQ(724 * 3 / 4, send_stream->GetLastHeight());
 
   // Trigger underuse which should go back up in resolution.
-  overuse_callback->OnLoadUpdate(webrtc::LoadObserver::kUnderuse);
+  wants.max_pixel_count = rtc::Optional<int>();
+  wants.max_pixel_count_step_up = rtc::Optional<int>(
+      send_stream->GetLastWidth() * send_stream->GetLastHeight());
+  send_stream->InjectVideoSinkWants(wants);
   EXPECT_TRUE(capturer.CaptureCustomFrame(1284, 724, cricket::FOURCC_I420));
-  EXPECT_EQ(7, send_stream->GetNumberOfSwappedFrames());
+  EXPECT_EQ(6, send_stream->GetNumberOfSwappedFrames());
   EXPECT_EQ(1284, send_stream->GetLastWidth());
   EXPECT_EQ(724, send_stream->GetLastHeight());
 
@@ -2059,14 +2059,12 @@
             capturer.Start(capturer.GetSupportedFormats()->front()));
   ASSERT_TRUE(channel_->SetSend(true));
   cricket::VideoOptions camera_options;
+  camera_options.is_screencast = rtc::Optional<bool>(false);
   channel_->SetVideoSend(last_ssrc_, true /* enable */, &camera_options,
                          &capturer);
 
   ASSERT_EQ(1u, fake_call_->GetVideoSendStreams().size());
   FakeVideoSendStream* send_stream = fake_call_->GetVideoSendStreams().front();
-  webrtc::LoadObserver* overuse_callback =
-      send_stream->GetConfig().overuse_callback;
-  ASSERT_TRUE(overuse_callback != NULL);
 
   EXPECT_TRUE(capturer.CaptureCustomFrame(1280, 720, cricket::FOURCC_I420));
   EXPECT_EQ(1, send_stream->GetNumberOfSwappedFrames());
@@ -2074,7 +2072,10 @@
   EXPECT_EQ(720, send_stream->GetLastHeight());
 
   // Trigger overuse.
-  overuse_callback->OnLoadUpdate(webrtc::LoadObserver::kOveruse);
+  rtc::VideoSinkWants wants;
+  wants.max_pixel_count = rtc::Optional<int>(
+      send_stream->GetLastWidth() * send_stream->GetLastHeight() - 1);
+  send_stream->InjectVideoSinkWants(wants);
   EXPECT_TRUE(capturer.CaptureCustomFrame(1280, 720, cricket::FOURCC_I420));
   EXPECT_EQ(2, send_stream->GetNumberOfSwappedFrames());
   EXPECT_EQ(1280 * 3 / 4, send_stream->GetLastWidth());
@@ -2130,14 +2131,10 @@
 
   EXPECT_TRUE(channel_->SetSend(true));
 
-  // Trigger overuse.
-  ASSERT_EQ(1u, fake_call_->GetVideoSendStreams().size());
   FakeVideoSendStream* send_stream = fake_call_->GetVideoSendStreams().front();
-  webrtc::LoadObserver* overuse_callback =
-      send_stream->GetConfig().overuse_callback;
 
-  if (!enable_overuse) {
-    ASSERT_TRUE(overuse_callback == NULL);
+  if (!enable_overuse || is_screenshare) {
+    EXPECT_FALSE(send_stream->resolution_scaling_enabled());
 
     EXPECT_TRUE(capturer.CaptureFrame());
     EXPECT_EQ(1, send_stream->GetNumberOfSwappedFrames());
@@ -2149,25 +2146,30 @@
     return;
   }
 
-  ASSERT_TRUE(overuse_callback != NULL);
+  EXPECT_TRUE(send_stream->resolution_scaling_enabled());
+  // Trigger overuse.
+  ASSERT_EQ(1u, fake_call_->GetVideoSendStreams().size());
+
+  rtc::VideoSinkWants wants;
+  wants.max_pixel_count =
+      rtc::Optional<int>(capture_format.width * capture_format.height - 1);
+  send_stream->InjectVideoSinkWants(wants);
+
   EXPECT_TRUE(capturer.CaptureFrame());
   EXPECT_EQ(1, send_stream->GetNumberOfSwappedFrames());
-  overuse_callback->OnLoadUpdate(webrtc::LoadObserver::kOveruse);
 
   EXPECT_TRUE(capturer.CaptureFrame());
   EXPECT_EQ(2, send_stream->GetNumberOfSwappedFrames());
 
-  if (is_screenshare) {
-    // Do not adapt screen share.
-    EXPECT_EQ(capture_format.width, send_stream->GetLastWidth());
-    EXPECT_EQ(capture_format.height, send_stream->GetLastHeight());
-  } else {
-    EXPECT_LT(send_stream->GetLastWidth(), capture_format.width);
-    EXPECT_LT(send_stream->GetLastHeight(), capture_format.height);
-  }
+  EXPECT_LT(send_stream->GetLastWidth(), capture_format.width);
+  EXPECT_LT(send_stream->GetLastHeight(), capture_format.height);
 
   // Trigger underuse which should go back to normal resolution.
-  overuse_callback->OnLoadUpdate(webrtc::LoadObserver::kUnderuse);
+  wants.max_pixel_count = rtc::Optional<int>();
+  wants.max_pixel_count_step_up = rtc::Optional<int>(
+      send_stream->GetLastWidth() * send_stream->GetLastHeight());
+  send_stream->InjectVideoSinkWants(wants);
+
   EXPECT_TRUE(capturer.CaptureFrame());
   EXPECT_EQ(3, send_stream->GetNumberOfSwappedFrames());
 
@@ -2916,144 +2918,35 @@
   EXPECT_EQ(5, info.senders[0].preferred_bitrate);
 }
 
-TEST_F(WebRtcVideoChannel2Test, GetStatsTracksAdaptationStats) {
-  AddSendStream(cricket::CreateSimStreamParams("cname", MAKE_VECTOR(kSsrcs3)));
+TEST_F(WebRtcVideoChannel2Test, GetStatsReportsCpuAdaptationStats) {
+  FakeVideoSendStream* stream = AddSendStream();
+  webrtc::VideoSendStream::Stats stats;
+  stats.number_of_cpu_adapt_changes = 2;
+  stats.cpu_limited_resolution = true;
+  stream->SetStats(stats);
 
-  // Capture format VGA.
-  cricket::FakeVideoCapturer video_capturer_vga;
-  const std::vector<cricket::VideoFormat>* formats =
-      video_capturer_vga.GetSupportedFormats();
-  cricket::VideoFormat capture_format_vga = (*formats)[1];
-  EXPECT_EQ(cricket::CS_RUNNING, video_capturer_vga.Start(capture_format_vga));
-  EXPECT_TRUE(
-      channel_->SetVideoSend(kSsrcs3[0], true, nullptr, &video_capturer_vga));
-  EXPECT_TRUE(video_capturer_vga.CaptureFrame());
-
-  cricket::VideoCodec send_codec(100, "VP8");
-  cricket::VideoSendParameters parameters;
-  parameters.codecs.push_back(send_codec);
-  EXPECT_TRUE(channel_->SetSendParameters(parameters));
-  EXPECT_TRUE(channel_->SetSend(true));
-
-  // Verify that the CpuOveruseObserver is registered and trigger downgrade.
-
-  // Trigger overuse.
-  ASSERT_EQ(1u, fake_call_->GetVideoSendStreams().size());
-  webrtc::LoadObserver* overuse_callback =
-      fake_call_->GetVideoSendStreams().front()->GetConfig().overuse_callback;
-  ASSERT_TRUE(overuse_callback != NULL);
-  overuse_callback->OnLoadUpdate(webrtc::LoadObserver::kOveruse);
-
-  // Capture format VGA -> adapt (OnCpuResolutionRequest downgrade) -> VGA/2.
-  EXPECT_TRUE(video_capturer_vga.CaptureFrame());
   cricket::VideoMediaInfo info;
   EXPECT_TRUE(channel_->GetStats(&info));
   ASSERT_EQ(1U, info.senders.size());
-  EXPECT_EQ(1, info.senders[0].adapt_changes);
   EXPECT_EQ(WebRtcVideoChannel2::ADAPTREASON_CPU, info.senders[0].adapt_reason);
-
-  // Trigger upgrade and verify that we adapt back up to VGA.
-  overuse_callback->OnLoadUpdate(webrtc::LoadObserver::kUnderuse);
-  EXPECT_TRUE(video_capturer_vga.CaptureFrame());
-  info.Clear();
-  EXPECT_TRUE(channel_->GetStats(&info));
-  ASSERT_EQ(1U, info.senders.size());
-  EXPECT_EQ(2, info.senders[0].adapt_changes);
-  EXPECT_EQ(WebRtcVideoChannel2::ADAPTREASON_NONE,
-            info.senders[0].adapt_reason);
-
-  // No capturer (no adapter). Adapt changes from old adapter should be kept.
-  EXPECT_TRUE(channel_->SetVideoSend(kSsrcs3[0], true, nullptr, nullptr));
-  info.Clear();
-  EXPECT_TRUE(channel_->GetStats(&info));
-  ASSERT_EQ(1U, info.senders.size());
-  EXPECT_EQ(2, info.senders[0].adapt_changes);
-  EXPECT_EQ(WebRtcVideoChannel2::ADAPTREASON_NONE,
-            info.senders[0].adapt_reason);
-
-  // Set new capturer, capture format HD.
-  cricket::FakeVideoCapturer video_capturer_hd;
-  cricket::VideoFormat capture_format_hd = (*formats)[0];
-  EXPECT_EQ(cricket::CS_RUNNING, video_capturer_hd.Start(capture_format_hd));
-  EXPECT_TRUE(
-      channel_->SetVideoSend(kSsrcs3[0], true, nullptr, &video_capturer_hd));
-  EXPECT_TRUE(video_capturer_hd.CaptureFrame());
-
-  // Trigger overuse, HD -> adapt (OnCpuResolutionRequest downgrade) -> HD/2.
-  overuse_callback->OnLoadUpdate(webrtc::LoadObserver::kOveruse);
-  EXPECT_TRUE(video_capturer_hd.CaptureFrame());
-  info.Clear();
-  EXPECT_TRUE(channel_->GetStats(&info));
-  ASSERT_EQ(1U, info.senders.size());
-  EXPECT_EQ(3, info.senders[0].adapt_changes);
-  EXPECT_EQ(WebRtcVideoChannel2::ADAPTREASON_CPU, info.senders[0].adapt_reason);
-
-  EXPECT_TRUE(channel_->SetVideoSend(kSsrcs3[0], true, nullptr, nullptr));
+  EXPECT_EQ(stats.number_of_cpu_adapt_changes, info.senders[0].adapt_changes);
 }
 
-TEST_F(WebRtcVideoChannel2Test, GetStatsTracksAdaptationAndBandwidthStats) {
-  AddSendStream(cricket::CreateSimStreamParams("cname", MAKE_VECTOR(kSsrcs3)));
-
-  // Capture format VGA.
-  cricket::FakeVideoCapturer video_capturer_vga;
-  const std::vector<cricket::VideoFormat>* formats =
-      video_capturer_vga.GetSupportedFormats();
-  cricket::VideoFormat capture_format_vga = (*formats)[1];
-  EXPECT_EQ(cricket::CS_RUNNING, video_capturer_vga.Start(capture_format_vga));
-  EXPECT_TRUE(
-      channel_->SetVideoSend(kSsrcs3[0], true, nullptr, &video_capturer_vga));
-  EXPECT_TRUE(video_capturer_vga.CaptureFrame());
-
-  cricket::VideoCodec send_codec(100, "VP8");
-  cricket::VideoSendParameters parameters;
-  parameters.codecs.push_back(send_codec);
-  EXPECT_TRUE(channel_->SetSendParameters(parameters));
-  EXPECT_TRUE(channel_->SetSend(true));
-
-  // Verify that the CpuOveruseObserver is registered and trigger downgrade.
-
-  // Trigger overuse -> adapt CPU.
-  ASSERT_EQ(1u, fake_call_->GetVideoSendStreams().size());
-  webrtc::LoadObserver* overuse_callback =
-      fake_call_->GetVideoSendStreams().front()->GetConfig().overuse_callback;
-  ASSERT_TRUE(overuse_callback != NULL);
-  overuse_callback->OnLoadUpdate(webrtc::LoadObserver::kOveruse);
-  EXPECT_TRUE(video_capturer_vga.CaptureFrame());
-  cricket::VideoMediaInfo info;
-  EXPECT_TRUE(channel_->GetStats(&info));
-  ASSERT_EQ(1U, info.senders.size());
-  EXPECT_EQ(WebRtcVideoChannel2::ADAPTREASON_CPU, info.senders[0].adapt_reason);
-
-  // Set bandwidth limitation stats for the stream -> adapt CPU + BW.
+TEST_F(WebRtcVideoChannel2Test, GetStatsReportsAdaptationAndBandwidthStats) {
+  FakeVideoSendStream* stream = AddSendStream();
   webrtc::VideoSendStream::Stats stats;
+  stats.number_of_cpu_adapt_changes = 2;
+  stats.cpu_limited_resolution = true;
   stats.bw_limited_resolution = true;
-  fake_call_->GetVideoSendStreams().front()->SetStats(stats);
-  info.Clear();
+  stream->SetStats(stats);
+
+  cricket::VideoMediaInfo info;
   EXPECT_TRUE(channel_->GetStats(&info));
   ASSERT_EQ(1U, info.senders.size());
   EXPECT_EQ(WebRtcVideoChannel2::ADAPTREASON_CPU |
                 WebRtcVideoChannel2::ADAPTREASON_BANDWIDTH,
             info.senders[0].adapt_reason);
-
-  // Trigger upgrade -> adapt BW.
-  overuse_callback->OnLoadUpdate(webrtc::LoadObserver::kUnderuse);
-  EXPECT_TRUE(video_capturer_vga.CaptureFrame());
-  info.Clear();
-  EXPECT_TRUE(channel_->GetStats(&info));
-  ASSERT_EQ(1U, info.senders.size());
-  EXPECT_EQ(WebRtcVideoChannel2::ADAPTREASON_BANDWIDTH,
-            info.senders[0].adapt_reason);
-
-  // Reset bandwidth limitation state -> adapt NONE.
-  stats.bw_limited_resolution = false;
-  fake_call_->GetVideoSendStreams().front()->SetStats(stats);
-  info.Clear();
-  EXPECT_TRUE(channel_->GetStats(&info));
-  ASSERT_EQ(1U, info.senders.size());
-  EXPECT_EQ(WebRtcVideoChannel2::ADAPTREASON_NONE,
-            info.senders[0].adapt_reason);
-
-  EXPECT_TRUE(channel_->SetVideoSend(kSsrcs3[0], true, nullptr, nullptr));
+  EXPECT_EQ(stats.number_of_cpu_adapt_changes, info.senders[0].adapt_changes);
 }
 
 TEST_F(WebRtcVideoChannel2Test,
diff --git a/webrtc/test/call_test.cc b/webrtc/test/call_test.cc
index a4c21f1..be43743 100644
--- a/webrtc/test/call_test.cc
+++ b/webrtc/test/call_test.cc
@@ -246,7 +246,9 @@
                                                      int height) {
   frame_generator_capturer_.reset(test::FrameGeneratorCapturer::Create(
       width, height, framerate * speed, clock));
-  video_send_stream_->SetSource(frame_generator_capturer_.get());
+  video_send_stream_->SetSource(
+      frame_generator_capturer_.get(),
+      VideoSendStream::DegradationPreference::kBalanced);
 }
 
 void CallTest::CreateFrameGeneratorCapturer(int framerate,
@@ -254,7 +256,9 @@
                                             int height) {
   frame_generator_capturer_.reset(
       test::FrameGeneratorCapturer::Create(width, height, framerate, clock_));
-  video_send_stream_->SetSource(frame_generator_capturer_.get());
+  video_send_stream_->SetSource(
+      frame_generator_capturer_.get(),
+      VideoSendStream::DegradationPreference::kBalanced);
 }
 
 void CallTest::CreateFakeAudioDevices() {
diff --git a/webrtc/test/constants.cc b/webrtc/test/constants.cc
index 7e94fe5..43f9adc 100644
--- a/webrtc/test/constants.cc
+++ b/webrtc/test/constants.cc
@@ -16,5 +16,6 @@
 const int kTOffsetExtensionId = 6;
 const int kAbsSendTimeExtensionId = 7;
 const int kTransportSequenceNumberExtensionId = 8;
+const int kVideoRotationExtensionId = 9;
 }  // namespace test
 }  // namespace webrtc
diff --git a/webrtc/test/constants.h b/webrtc/test/constants.h
index 14b2ba6..1b5b0cb 100644
--- a/webrtc/test/constants.h
+++ b/webrtc/test/constants.h
@@ -14,5 +14,6 @@
 extern const int kTOffsetExtensionId;
 extern const int kAbsSendTimeExtensionId;
 extern const int kTransportSequenceNumberExtensionId;
+extern const int kVideoRotationExtensionId;
 }  // namespace test
 }  // namespace webrtc
diff --git a/webrtc/test/fake_encoder.cc b/webrtc/test/fake_encoder.cc
index ac1c6de..065b529 100644
--- a/webrtc/test/fake_encoder.cc
+++ b/webrtc/test/fake_encoder.cc
@@ -109,6 +109,7 @@
     encoded._frameType = (*frame_types)[i];
     encoded._encodedWidth = config_.simulcastStream[i].width;
     encoded._encodedHeight = config_.simulcastStream[i].height;
+    encoded.rotation_ = input_image.rotation();
     RTC_DCHECK(callback_ != NULL);
     specifics.codec_name = ImplementationName();
     if (callback_->Encoded(encoded, &specifics, NULL) != 0)
@@ -204,10 +205,20 @@
     : test::FakeEncoder(clock),
       delay_ms_(delay_ms) {}
 
+void DelayedEncoder::SetDelay(int delay_ms) {
+  rtc::CritScope lock(&lock_);
+  delay_ms_ = delay_ms;
+}
+
 int32_t DelayedEncoder::Encode(const VideoFrame& input_image,
                                const CodecSpecificInfo* codec_specific_info,
                                const std::vector<FrameType>* frame_types) {
-  SleepMs(delay_ms_);
+  int delay_ms = 0;
+  {
+    rtc::CritScope lock(&lock_);
+    delay_ms = delay_ms_;
+  }
+  SleepMs(delay_ms);
   return FakeEncoder::Encode(input_image, codec_specific_info, frame_types);
 }
 }  // namespace test
diff --git a/webrtc/test/fake_encoder.h b/webrtc/test/fake_encoder.h
index ae869ff..d5e7b48 100644
--- a/webrtc/test/fake_encoder.h
+++ b/webrtc/test/fake_encoder.h
@@ -13,6 +13,7 @@
 
 #include <vector>
 
+#include "webrtc/base/criticalsection.h"
 #include "webrtc/common_types.h"
 #include "webrtc/system_wrappers/include/clock.h"
 #include "webrtc/video_encoder.h"
@@ -75,12 +76,14 @@
   DelayedEncoder(Clock* clock, int delay_ms);
   virtual ~DelayedEncoder() {}
 
+  void SetDelay(int delay_ms);
   int32_t Encode(const VideoFrame& input_image,
                  const CodecSpecificInfo* codec_specific_info,
                  const std::vector<FrameType>* frame_types) override;
 
  private:
-  const int delay_ms_;
+  rtc::CriticalSection lock_;
+  int delay_ms_ GUARDED_BY(&lock_);
 };
 }  // namespace test
 }  // namespace webrtc
diff --git a/webrtc/test/frame_generator.cc b/webrtc/test/frame_generator.cc
index 2ae739a..eeb0b0f 100644
--- a/webrtc/test/frame_generator.cc
+++ b/webrtc/test/frame_generator.cc
@@ -258,6 +258,7 @@
   rtc::CritScope lock(&crit_);
   RTC_DCHECK(!sink_ || sink_ == sink);
   sink_ = sink;
+  sink_wants_ = wants;
 }
 
 void FrameForwarder::RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink) {
@@ -266,6 +267,16 @@
   sink_ = nullptr;
 }
 
+rtc::VideoSinkWants FrameForwarder::sink_wants() const {
+  rtc::CritScope lock(&crit_);
+  return sink_wants_;
+}
+
+bool FrameForwarder::has_sinks() const {
+  rtc::CritScope lock(&crit_);
+  return sink_ != nullptr;
+}
+
 FrameGenerator* FrameGenerator::CreateChromaGenerator(size_t width,
                                                       size_t height) {
   return new ChromaGenerator(width, height);
diff --git a/webrtc/test/frame_generator.h b/webrtc/test/frame_generator.h
index e7cba1c..9a643f8 100644
--- a/webrtc/test/frame_generator.h
+++ b/webrtc/test/frame_generator.h
@@ -31,6 +31,8 @@
   FrameForwarder();
   // Forwards |video_frame| to the registered |sink_|.
   void IncomingCapturedFrame(const VideoFrame& video_frame);
+  rtc::VideoSinkWants sink_wants() const;
+  bool has_sinks() const;
 
  private:
   void AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
@@ -39,6 +41,7 @@
 
   rtc::CriticalSection crit_;
   rtc::VideoSinkInterface<VideoFrame>* sink_ GUARDED_BY(crit_);
+  rtc::VideoSinkWants sink_wants_ GUARDED_BY(crit_);
 };
 
 class FrameGenerator {
diff --git a/webrtc/test/frame_generator_capturer.cc b/webrtc/test/frame_generator_capturer.cc
index d80154a..65be956 100644
--- a/webrtc/test/frame_generator_capturer.cc
+++ b/webrtc/test/frame_generator_capturer.cc
@@ -59,6 +59,7 @@
     : clock_(clock),
       sending_(false),
       sink_(nullptr),
+      sink_wants_observer_(nullptr),
       tick_(EventTimerWrapper::Create()),
       thread_(FrameGeneratorCapturer::Run, this, "FrameGeneratorCapturer"),
       frame_generator_(frame_generator),
@@ -129,12 +130,20 @@
   frame_generator_->ChangeResolution(width, height);
 }
 
+void FrameGeneratorCapturer::SetSinkWantsObserver(SinkWantsObserver* observer) {
+  rtc::CritScope cs(&lock_);
+  RTC_DCHECK(!sink_wants_observer_);
+  sink_wants_observer_ = observer;
+}
+
 void FrameGeneratorCapturer::AddOrUpdateSink(
     rtc::VideoSinkInterface<VideoFrame>* sink,
     const rtc::VideoSinkWants& wants) {
   rtc::CritScope cs(&lock_);
-  RTC_CHECK(!sink_);
+  RTC_CHECK(!sink_ || sink_ == sink);
   sink_ = sink;
+  if (sink_wants_observer_)
+    sink_wants_observer_->OnSinkWantsChanged(sink, wants);
 }
 
 void FrameGeneratorCapturer::RemoveSink(
diff --git a/webrtc/test/frame_generator_capturer.h b/webrtc/test/frame_generator_capturer.h
index fa001c4..01bc04b 100644
--- a/webrtc/test/frame_generator_capturer.h
+++ b/webrtc/test/frame_generator_capturer.h
@@ -31,6 +31,17 @@
 
 class FrameGeneratorCapturer : public VideoCapturer {
  public:
+  class SinkWantsObserver {
+   public:
+    // OnSinkWantsChanged is called when FrameGeneratorCapturer::AddOrUpdateSink
+    // is called.
+    virtual void OnSinkWantsChanged(rtc::VideoSinkInterface<VideoFrame>* sink,
+                                    const rtc::VideoSinkWants& wants) = 0;
+
+   protected:
+    virtual ~SinkWantsObserver() {}
+  };
+
   static FrameGeneratorCapturer* Create(size_t width,
                                         size_t height,
                                         int target_fps,
@@ -47,6 +58,8 @@
   void Stop() override;
   void ChangeResolution(size_t width, size_t height);
 
+  void SetSinkWantsObserver(SinkWantsObserver* observer);
+
   void AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
                        const rtc::VideoSinkWants& wants) override;
   void RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink) override;
@@ -68,6 +81,7 @@
   Clock* const clock_;
   bool sending_;
   rtc::VideoSinkInterface<VideoFrame>* sink_ GUARDED_BY(&lock_);
+  SinkWantsObserver* sink_wants_observer_ GUARDED_BY(&lock_);
 
   std::unique_ptr<EventTimerWrapper> tick_;
   rtc::CriticalSection lock_;
diff --git a/webrtc/video/encoder_rtcp_feedback_unittest.cc b/webrtc/video/encoder_rtcp_feedback_unittest.cc
index 189153d..ca4aa66 100644
--- a/webrtc/video/encoder_rtcp_feedback_unittest.cc
+++ b/webrtc/video/encoder_rtcp_feedback_unittest.cc
@@ -26,7 +26,6 @@
                    nullptr,
                    VideoSendStream::Config::EncoderSettings("fake", 0, nullptr),
                    nullptr,
-                   nullptr,
                    nullptr) {}
   ~MockVieEncoder() { Stop(); }
 
diff --git a/webrtc/video/end_to_end_tests.cc b/webrtc/video/end_to_end_tests.cc
index 8b923a7..7b796c0 100644
--- a/webrtc/video/end_to_end_tests.cc
+++ b/webrtc/video/end_to_end_tests.cc
@@ -233,7 +233,8 @@
   std::unique_ptr<test::FrameGenerator> frame_generator(
       test::FrameGenerator::CreateChromaGenerator(kWidth, kHeight));
   test::FrameForwarder frame_forwarder;
-  video_send_stream_->SetSource(&frame_forwarder);
+  video_send_stream_->SetSource(
+      &frame_forwarder, VideoSendStream::DegradationPreference::kBalanced);
 
   frame_forwarder.IncomingCapturedFrame(*frame_generator->NextFrame());
   EXPECT_TRUE(pre_render_callback.Wait())
@@ -279,7 +280,8 @@
       test::FrameGenerator::CreateChromaGenerator(kDefaultWidth,
                                                   kDefaultHeight));
   test::FrameForwarder frame_forwarder;
-  video_send_stream_->SetSource(&frame_forwarder);
+  video_send_stream_->SetSource(
+      &frame_forwarder, VideoSendStream::DegradationPreference::kBalanced);
   frame_forwarder.IncomingCapturedFrame(*frame_generator->NextFrame());
 
   EXPECT_TRUE(renderer.Wait())
@@ -1317,7 +1319,9 @@
 
       frame_generators[i] = test::FrameGeneratorCapturer::Create(
           width, height, 30, Clock::GetRealTimeClock());
-      send_streams[i]->SetSource(frame_generators[i]);
+      send_streams[i]->SetSource(
+          frame_generators[i],
+          VideoSendStream::DegradationPreference::kBalanced);
       frame_generators[i]->Start();
     }
 
@@ -1777,7 +1781,8 @@
       test::FrameGenerator::CreateChromaGenerator(kDefaultWidth,
                                                   kDefaultHeight));
   test::FrameForwarder forwarder;
-  video_send_stream_->SetSource(&forwarder);
+  video_send_stream_->SetSource(
+      &forwarder, VideoSendStream::DegradationPreference::kBalanced);
   forwarder.IncomingCapturedFrame(*frame_generator->NextFrame());
 
   EXPECT_TRUE(post_encode_observer.Wait())
diff --git a/webrtc/video/send_statistics_proxy.cc b/webrtc/video/send_statistics_proxy.cc
index 370f119..2da71eb 100644
--- a/webrtc/video/send_statistics_proxy.cc
+++ b/webrtc/video/send_statistics_proxy.cc
@@ -132,9 +132,8 @@
     const VideoSendStream::Stats& current_stats) {
   RTC_DCHECK(uma_prefix_ == kRealtimePrefix || uma_prefix_ == kScreenPrefix);
   const int kIndex = uma_prefix_ == kScreenPrefix ? 1 : 0;
-  const int kMinRequiredSamples = 200;
-  int in_width = input_width_counter_.Avg(kMinRequiredSamples);
-  int in_height = input_height_counter_.Avg(kMinRequiredSamples);
+  int in_width = input_width_counter_.Avg(kMinRequiredMetricsSamples);
+  int in_height = input_height_counter_.Avg(kMinRequiredMetricsSamples);
   int in_fps = round(input_frame_rate_tracker_.ComputeTotalRate());
   if (in_width != -1) {
     RTC_HISTOGRAMS_COUNTS_10000(kIndex, uma_prefix_ + "InputWidthInPixels",
@@ -144,8 +143,8 @@
     RTC_HISTOGRAMS_COUNTS_100(kIndex, uma_prefix_ + "InputFramesPerSecond",
                               in_fps);
   }
-  int sent_width = sent_width_counter_.Avg(kMinRequiredSamples);
-  int sent_height = sent_height_counter_.Avg(kMinRequiredSamples);
+  int sent_width = sent_width_counter_.Avg(kMinRequiredMetricsSamples);
+  int sent_height = sent_height_counter_.Avg(kMinRequiredMetricsSamples);
   int sent_fps = round(sent_frame_rate_tracker_.ComputeTotalRate());
   if (sent_width != -1) {
     RTC_HISTOGRAMS_COUNTS_10000(kIndex, uma_prefix_ + "SentWidthInPixels",
@@ -155,54 +154,63 @@
     RTC_HISTOGRAMS_COUNTS_100(kIndex, uma_prefix_ + "SentFramesPerSecond",
                               sent_fps);
   }
-  int encode_ms = encode_time_counter_.Avg(kMinRequiredSamples);
+  int encode_ms = encode_time_counter_.Avg(kMinRequiredMetricsSamples);
   if (encode_ms != -1) {
     RTC_HISTOGRAMS_COUNTS_1000(kIndex, uma_prefix_ + "EncodeTimeInMs",
                                encode_ms);
   }
-  int key_frames_permille = key_frame_counter_.Permille(kMinRequiredSamples);
+  int key_frames_permille =
+      key_frame_counter_.Permille(kMinRequiredMetricsSamples);
   if (key_frames_permille != -1) {
     RTC_HISTOGRAMS_COUNTS_1000(kIndex, uma_prefix_ + "KeyFramesSentInPermille",
                                key_frames_permille);
   }
   int quality_limited =
-      quality_limited_frame_counter_.Percent(kMinRequiredSamples);
+      quality_limited_frame_counter_.Percent(kMinRequiredMetricsSamples);
   if (quality_limited != -1) {
     RTC_HISTOGRAMS_PERCENTAGE(kIndex,
                               uma_prefix_ + "QualityLimitedResolutionInPercent",
                               quality_limited);
   }
-  int downscales = quality_downscales_counter_.Avg(kMinRequiredSamples);
+  int downscales = quality_downscales_counter_.Avg(kMinRequiredMetricsSamples);
   if (downscales != -1) {
     RTC_HISTOGRAMS_ENUMERATION(
         kIndex, uma_prefix_ + "QualityLimitedResolutionDownscales", downscales,
         20);
   }
-  int bw_limited = bw_limited_frame_counter_.Percent(kMinRequiredSamples);
+  int cpu_limited =
+      cpu_limited_frame_counter_.Percent(kMinRequiredMetricsSamples);
+  if (cpu_limited != -1) {
+    RTC_HISTOGRAMS_PERCENTAGE(
+        kIndex, uma_prefix_ + "CpuLimitedResolutionInPercent", cpu_limited);
+  }
+  int bw_limited =
+      bw_limited_frame_counter_.Percent(kMinRequiredMetricsSamples);
   if (bw_limited != -1) {
     RTC_HISTOGRAMS_PERCENTAGE(
         kIndex, uma_prefix_ + "BandwidthLimitedResolutionInPercent",
         bw_limited);
   }
-  int num_disabled = bw_resolutions_disabled_counter_.Avg(kMinRequiredSamples);
+  int num_disabled =
+      bw_resolutions_disabled_counter_.Avg(kMinRequiredMetricsSamples);
   if (num_disabled != -1) {
     RTC_HISTOGRAMS_ENUMERATION(
         kIndex, uma_prefix_ + "BandwidthLimitedResolutionsDisabled",
         num_disabled, 10);
   }
-  int delay_ms = delay_counter_.Avg(kMinRequiredSamples);
+  int delay_ms = delay_counter_.Avg(kMinRequiredMetricsSamples);
   if (delay_ms != -1)
     RTC_HISTOGRAMS_COUNTS_100000(kIndex, uma_prefix_ + "SendSideDelayInMs",
                                  delay_ms);
 
-  int max_delay_ms = max_delay_counter_.Avg(kMinRequiredSamples);
+  int max_delay_ms = max_delay_counter_.Avg(kMinRequiredMetricsSamples);
   if (max_delay_ms != -1) {
     RTC_HISTOGRAMS_COUNTS_100000(kIndex, uma_prefix_ + "SendSideDelayMaxInMs",
                                  max_delay_ms);
   }
 
   for (const auto& it : qp_counters_) {
-    int qp_vp8 = it.second.vp8.Avg(kMinRequiredSamples);
+    int qp_vp8 = it.second.vp8.Avg(kMinRequiredMetricsSamples);
     if (qp_vp8 != -1) {
       int spatial_idx = it.first;
       if (spatial_idx == -1) {
@@ -222,7 +230,7 @@
                         << spatial_idx;
       }
     }
-    int qp_vp9 = it.second.vp9.Avg(kMinRequiredSamples);
+    int qp_vp9 = it.second.vp9.Avg(kMinRequiredMetricsSamples);
     if (qp_vp9 != -1) {
       int spatial_idx = it.first;
       if (spatial_idx == -1) {
@@ -548,6 +556,20 @@
   uma_container_->input_frame_rate_tracker_.AddSamples(1);
   uma_container_->input_width_counter_.Add(width);
   uma_container_->input_height_counter_.Add(height);
+  uma_container_->cpu_limited_frame_counter_.Add(stats_.cpu_limited_resolution);
+}
+
+void SendStatisticsProxy::SetCpuRestrictedResolution(
+    bool cpu_restricted_resolution) {
+  rtc::CritScope lock(&crit_);
+  stats_.cpu_limited_resolution = cpu_restricted_resolution;
+}
+
+void SendStatisticsProxy::OnCpuRestrictedResolutionChanged(
+    bool cpu_restricted_resolution) {
+  rtc::CritScope lock(&crit_);
+  stats_.cpu_limited_resolution = cpu_restricted_resolution;
+  ++stats_.number_of_cpu_adapt_changes;
 }
 
 void SendStatisticsProxy::RtcpPacketTypesCounterUpdated(
diff --git a/webrtc/video/send_statistics_proxy.h b/webrtc/video/send_statistics_proxy.h
index 49f0bf3..89c4065 100644
--- a/webrtc/video/send_statistics_proxy.h
+++ b/webrtc/video/send_statistics_proxy.h
@@ -40,6 +40,9 @@
                             public SendSideDelayObserver {
  public:
   static const int kStatsTimeoutMs;
+  // Number of required samples to be collected before a metric is added
+  // to a rtc histogram.
+  static const int kMinRequiredMetricsSamples = 200;
 
   SendStatisticsProxy(Clock* clock,
                       const VideoSendStream::Config& config,
@@ -53,6 +56,13 @@
   // Used to update incoming frame rate.
   void OnIncomingFrame(int width, int height);
 
+  // Used to indicate that the current input frame resolution is restricted due
+  // to cpu usage.
+  void SetCpuRestrictedResolution(bool cpu_restricted);
+  // Used to update the number of times the input frame resolution has changed
+  // due to cpu adaptation.
+  void OnCpuRestrictedResolutionChanged(bool cpu_restricted_resolution);
+
   void OnEncoderStatsUpdate(uint32_t framerate, uint32_t bitrate);
   void OnSuspendChange(bool is_suspended);
   void OnInactiveSsrc(uint32_t ssrc);
@@ -170,6 +180,7 @@
     BoolSampleCounter key_frame_counter_;
     BoolSampleCounter quality_limited_frame_counter_;
     SampleCounter quality_downscales_counter_;
+    BoolSampleCounter cpu_limited_frame_counter_;
     BoolSampleCounter bw_limited_frame_counter_;
     SampleCounter bw_resolutions_disabled_counter_;
     SampleCounter delay_counter_;
diff --git a/webrtc/video/send_statistics_proxy_unittest.cc b/webrtc/video/send_statistics_proxy_unittest.cc
index e661e0e..fcab7d1 100644
--- a/webrtc/video/send_statistics_proxy_unittest.cc
+++ b/webrtc/video/send_statistics_proxy_unittest.cc
@@ -26,7 +26,6 @@
 const uint32_t kFirstRtxSsrc = 18;
 const uint32_t kSecondRtxSsrc = 43;
 
-const int kMinRequiredSamples = 200;
 const int kQpIdx0 = 21;
 const int kQpIdx1 = 39;
 }  // namespace
@@ -337,7 +336,7 @@
   const int kWidth = 640;
   const int kHeight = 480;
 
-  for (int i = 0; i < kMinRequiredSamples; ++i)
+  for (int i = 0; i < SendStatisticsProxy::kMinRequiredMetricsSamples; ++i)
     statistics_proxy_->OnIncomingFrame(kWidth, kHeight);
 
   // No switch, stats should not be updated.
@@ -352,6 +351,25 @@
   EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.InputWidthInPixels"));
 }
 
+TEST_F(SendStatisticsProxyTest, CpuLimitedResolutionUpdated) {
+  const int kWidth = 640;
+  const int kHeight = 480;
+
+  for (int i = 0; i < SendStatisticsProxy::kMinRequiredMetricsSamples; ++i)
+    statistics_proxy_->OnIncomingFrame(kWidth, kHeight);
+
+  statistics_proxy_->OnCpuRestrictedResolutionChanged(true);
+
+  for (int i = 0; i < SendStatisticsProxy::kMinRequiredMetricsSamples; ++i)
+    statistics_proxy_->OnIncomingFrame(kWidth, kHeight);
+
+  statistics_proxy_.reset();
+  EXPECT_EQ(1,
+            metrics::NumSamples("WebRTC.Video.CpuLimitedResolutionInPercent"));
+  EXPECT_EQ(
+      1, metrics::NumEvents("WebRTC.Video.CpuLimitedResolutionInPercent", 50));
+}
+
 TEST_F(SendStatisticsProxyTest, LifetimeHistogramIsUpdated) {
   const int64_t kTimeSec = 3;
   fake_clock_.AdvanceTimeMilliseconds(kTimeSec * 1000);
@@ -372,7 +390,7 @@
   CodecSpecificInfo codec_info;
   codec_info.codecType = kVideoCodecVP8;
 
-  for (int i = 0; i < kMinRequiredSamples; ++i) {
+  for (int i = 0; i < SendStatisticsProxy::kMinRequiredMetricsSamples; ++i) {
     codec_info.codecSpecific.VP8.simulcastIdx = 0;
     encoded_image.qp_ = kQpIdx0;
     statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info);
@@ -397,7 +415,7 @@
   CodecSpecificInfo codec_info;
   codec_info.codecType = kVideoCodecVP8;
 
-  for (int i = 0; i < kMinRequiredSamples; ++i) {
+  for (int i = 0; i < SendStatisticsProxy::kMinRequiredMetricsSamples; ++i) {
     codec_info.codecSpecific.VP8.simulcastIdx = 0;
     encoded_image.qp_ = kQpIdx0;
     statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info);
@@ -413,7 +431,7 @@
   codec_info.codecType = kVideoCodecVP9;
   codec_info.codecSpecific.VP9.num_spatial_layers = 2;
 
-  for (int i = 0; i < kMinRequiredSamples; ++i) {
+  for (int i = 0; i < SendStatisticsProxy::kMinRequiredMetricsSamples; ++i) {
     encoded_image.qp_ = kQpIdx0;
     codec_info.codecSpecific.VP9.spatial_idx = 0;
     statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info);
@@ -439,7 +457,7 @@
   codec_info.codecType = kVideoCodecVP9;
   codec_info.codecSpecific.VP9.num_spatial_layers = 1;
 
-  for (int i = 0; i < kMinRequiredSamples; ++i) {
+  for (int i = 0; i < SendStatisticsProxy::kMinRequiredMetricsSamples; ++i) {
     encoded_image.qp_ = kQpIdx0;
     codec_info.codecSpecific.VP9.spatial_idx = 0;
     statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info);
@@ -453,7 +471,7 @@
        BandwidthLimitedHistogramsNotUpdatedWhenDisabled) {
   EncodedImage encoded_image;
   // encoded_image.adapt_reason_.bw_resolutions_disabled by default: -1
-  for (int i = 0; i < kMinRequiredSamples; ++i)
+  for (int i = 0; i < SendStatisticsProxy::kMinRequiredMetricsSamples; ++i)
     statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr);
 
   // Histograms are updated when the statistics_proxy_ is deleted.
@@ -469,7 +487,7 @@
   const int kResolutionsDisabled = 0;
   EncodedImage encoded_image;
   encoded_image.adapt_reason_.bw_resolutions_disabled = kResolutionsDisabled;
-  for (int i = 0; i < kMinRequiredSamples; ++i)
+  for (int i = 0; i < SendStatisticsProxy::kMinRequiredMetricsSamples; ++i)
     statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr);
 
   // Histograms are updated when the statistics_proxy_ is deleted.
@@ -488,7 +506,7 @@
   const int kResolutionsDisabled = 1;
   EncodedImage encoded_image;
   encoded_image.adapt_reason_.bw_resolutions_disabled = kResolutionsDisabled;
-  for (int i = 0; i < kMinRequiredSamples; ++i)
+  for (int i = 0; i < SendStatisticsProxy::kMinRequiredMetricsSamples; ++i)
     statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr);
 
   // Histograms are updated when the statistics_proxy_ is deleted.
@@ -510,7 +528,7 @@
   EncodedImage encoded_image;
   // encoded_image.adapt_reason_.quality_resolution_downscales disabled by
   // default: -1
-  for (int i = 0; i < kMinRequiredSamples; ++i)
+  for (int i = 0; i < SendStatisticsProxy::kMinRequiredMetricsSamples; ++i)
     statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr);
 
   // Histograms are updated when the statistics_proxy_ is deleted.
@@ -526,7 +544,7 @@
   const int kDownscales = 0;
   EncodedImage encoded_image;
   encoded_image.adapt_reason_.quality_resolution_downscales = kDownscales;
-  for (int i = 0; i < kMinRequiredSamples; ++i)
+  for (int i = 0; i < SendStatisticsProxy::kMinRequiredMetricsSamples; ++i)
     statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr);
 
   // Histograms are updated when the statistics_proxy_ is deleted.
@@ -545,7 +563,7 @@
   const int kDownscales = 2;
   EncodedImage encoded_image;
   encoded_image.adapt_reason_.quality_resolution_downscales = kDownscales;
-  for (int i = 0; i < kMinRequiredSamples; ++i)
+  for (int i = 0; i < SendStatisticsProxy::kMinRequiredMetricsSamples; ++i)
     statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr);
 
   // Histograms are updated when the statistics_proxy_ is deleted.
diff --git a/webrtc/video/video_quality_test.cc b/webrtc/video/video_quality_test.cc
index 8e89544..c773e43 100644
--- a/webrtc/video/video_quality_test.cc
+++ b/webrtc/video/video_quality_test.cc
@@ -1217,7 +1217,9 @@
 
   CreateVideoStreams();
   analyzer.SetSendStream(video_send_stream_);
-  video_send_stream_->SetSource(analyzer.OutputInterface());
+  video_send_stream_->SetSource(
+      analyzer.OutputInterface(),
+      VideoSendStream::DegradationPreference::kBalanced);
 
   CreateCapturer();
   rtc::VideoSinkWants wants;
@@ -1365,7 +1367,9 @@
     video_receive_stream = call->CreateVideoReceiveStream(
         video_receive_configs_[stream_id].Copy());
     CreateCapturer();
-    video_send_stream_->SetSource(video_capturer_.get());
+    video_send_stream_->SetSource(
+        video_capturer_.get(),
+        VideoSendStream::DegradationPreference::kBalanced);
   }
 
   AudioReceiveStream* audio_receive_stream = nullptr;
diff --git a/webrtc/video/video_send_stream.cc b/webrtc/video/video_send_stream.cc
index cb6689e..fdb8ccc 100644
--- a/webrtc/video/video_send_stream.cc
+++ b/webrtc/video/video_send_stream.cc
@@ -527,10 +527,9 @@
                    config,
                    encoder_config.content_type),
       config_(std::move(config)) {
-  vie_encoder_.reset(
-      new ViEEncoder(num_cpu_cores, &stats_proxy_, config_.encoder_settings,
-                     config_.pre_encode_callback, config_.overuse_callback,
-                     config_.post_encode_callback));
+  vie_encoder_.reset(new ViEEncoder(
+      num_cpu_cores, &stats_proxy_, config_.encoder_settings,
+      config_.pre_encode_callback, config_.post_encode_callback));
 
   worker_queue_->PostTask(std::unique_ptr<rtc::QueuedTask>(new ConstructionTask(
       &send_stream_, &thread_sync_event_, &stats_proxy_, vie_encoder_.get(),
@@ -577,9 +576,10 @@
 }
 
 void VideoSendStream::SetSource(
-    rtc::VideoSourceInterface<webrtc::VideoFrame>* source) {
+    rtc::VideoSourceInterface<webrtc::VideoFrame>* source,
+    const DegradationPreference& degradation_preference) {
   RTC_DCHECK_RUN_ON(&thread_checker_);
-  vie_encoder_->SetSource(source);
+  vie_encoder_->SetSource(source, degradation_preference);
 }
 
 void VideoSendStream::ReconfigureVideoEncoder(VideoEncoderConfig config) {
@@ -733,7 +733,19 @@
   RTC_DCHECK_LE(config_->encoder_settings.payload_type, 127);
 
   vie_encoder_->SetStartBitrate(bitrate_allocator_->GetStartBitrate(this));
-  vie_encoder_->SetSink(this);
+
+  // Only request rotation at the source when we positively know that the remote
+  // side doesn't support the rotation extension. This allows us to prepare the
+  // encoder in the expectation that rotation is supported - which is the common
+  // case.
+  bool rotation_applied =
+      std::find_if(config_->rtp.extensions.begin(),
+                   config_->rtp.extensions.end(),
+                   [](const RtpExtension& extension) {
+                     return extension.uri == RtpExtension::kVideoRotationUri;
+                   }) == config_->rtp.extensions.end();
+
+  vie_encoder_->SetSink(this, rotation_applied);
 }
 
 void VideoSendStreamImpl::RegisterProcessThread(
diff --git a/webrtc/video/video_send_stream.h b/webrtc/video/video_send_stream.h
index cd1cfd3..ff52c03 100644
--- a/webrtc/video/video_send_stream.h
+++ b/webrtc/video/video_send_stream.h
@@ -72,8 +72,8 @@
   void Start() override;
   void Stop() override;
 
-  void SetSource(
-      rtc::VideoSourceInterface<webrtc::VideoFrame>* source) override;
+  void SetSource(rtc::VideoSourceInterface<webrtc::VideoFrame>* source,
+                 const DegradationPreference& degradation_preference) override;
 
   void ReconfigureVideoEncoder(VideoEncoderConfig) override;
   Stats GetStats() override;
diff --git a/webrtc/video/video_send_stream_tests.cc b/webrtc/video/video_send_stream_tests.cc
index f729654..5c836e0 100644
--- a/webrtc/video/video_send_stream_tests.cc
+++ b/webrtc/video/video_send_stream_tests.cc
@@ -57,6 +57,8 @@
 
   void TestVp9NonFlexMode(uint8_t num_temporal_layers,
                           uint8_t num_spatial_layers);
+
+  void TestRequestSourceRotateVideo(bool support_orientation_ext);
 };
 
 TEST_F(VideoSendStreamTest, CanStartStartedStream) {
@@ -255,6 +257,45 @@
   RunBaseTest(&test);
 }
 
+TEST_F(VideoSendStreamTest, SupportsVideoRotation) {
+  class VideoRotationObserver : public test::SendTest {
+   public:
+    VideoRotationObserver() : SendTest(kDefaultTimeoutMs) {
+      EXPECT_TRUE(parser_->RegisterRtpHeaderExtension(
+          kRtpExtensionVideoRotation, test::kVideoRotationExtensionId));
+    }
+
+    Action OnSendRtp(const uint8_t* packet, size_t length) override {
+      RTPHeader header;
+      EXPECT_TRUE(parser_->Parse(packet, length, &header));
+      EXPECT_TRUE(header.extension.hasVideoRotation);
+      EXPECT_EQ(kVideoRotation_90, header.extension.videoRotation);
+      observation_complete_.Set();
+      return SEND_PACKET;
+    }
+
+    void ModifyVideoConfigs(
+        VideoSendStream::Config* send_config,
+        std::vector<VideoReceiveStream::Config>* receive_configs,
+        VideoEncoderConfig* encoder_config) override {
+      send_config->rtp.extensions.clear();
+      send_config->rtp.extensions.push_back(RtpExtension(
+          RtpExtension::kVideoRotationUri, test::kVideoRotationExtensionId));
+    }
+
+    void OnFrameGeneratorCapturerCreated(
+        test::FrameGeneratorCapturer* frame_generator_capturer) override {
+      frame_generator_capturer->SetFakeRotation(kVideoRotation_90);
+    }
+
+    void PerformTest() override {
+      EXPECT_TRUE(Wait()) << "Timed out while waiting for single RTP packet.";
+    }
+  } test;
+
+  RunBaseTest(&test);
+}
+
 class FakeReceiveStatistics : public NullReceiveStatistics {
  public:
   FakeReceiveStatistics(uint32_t send_ssrc,
@@ -1656,7 +1697,8 @@
 
   video_send_stream_->Start();
   test::FrameForwarder forwarder;
-  video_send_stream_->SetSource(&forwarder);
+  video_send_stream_->SetSource(
+      &forwarder, VideoSendStream::DegradationPreference::kBalanced);
   for (size_t i = 0; i < input_frames.size(); i++) {
     forwarder.IncomingCapturedFrame(input_frames[i]);
     // Wait until the output frame is received before sending the next input
@@ -1664,7 +1706,8 @@
     observer.WaitOutputFrame();
   }
   video_send_stream_->Stop();
-  video_send_stream_->SetSource(nullptr);
+  video_send_stream_->SetSource(
+      nullptr, VideoSendStream::DegradationPreference::kBalanced);
 
   // Test if the input and output frames are the same. render_time_ms and
   // timestamp are not compared because capturer sets those values.
@@ -2879,4 +2922,37 @@
 }
 #endif  // !defined(RTC_DISABLE_VP9)
 
+void VideoSendStreamTest::TestRequestSourceRotateVideo(
+    bool support_orientation_ext) {
+  CreateSenderCall(Call::Config(&event_log_));
+
+  test::NullTransport transport;
+  CreateSendConfig(1, 0, &transport);
+  video_send_config_.rtp.extensions.clear();
+  if (support_orientation_ext) {
+    video_send_config_.rtp.extensions.push_back(
+        RtpExtension(RtpExtension::kVideoRotationUri, 1));
+  }
+
+  CreateVideoStreams();
+  test::FrameForwarder forwarder;
+  video_send_stream_->SetSource(
+      &forwarder, VideoSendStream::DegradationPreference::kBalanced);
+
+  EXPECT_TRUE(forwarder.sink_wants().rotation_applied !=
+              support_orientation_ext);
+
+  DestroyStreams();
+}
+
+TEST_F(VideoSendStreamTest,
+       RequestSourceRotateIfVideoOrientationExtensionNotSupported) {
+  TestRequestSourceRotateVideo(false);
+}
+
+TEST_F(VideoSendStreamTest,
+       DoNotRequestsRotationIfVideoOrientationExtensionSupported) {
+  TestRequestSourceRotateVideo(true);
+}
+
 }  // namespace webrtc
diff --git a/webrtc/video/vie_encoder.cc b/webrtc/video/vie_encoder.cc
index e1cd40d..7cc799a 100644
--- a/webrtc/video/vie_encoder.cc
+++ b/webrtc/video/vie_encoder.cc
@@ -226,6 +226,8 @@
   bool Run() override {
     RTC_DCHECK_RUN_ON(&vie_encoder_->encoder_queue_);
     RTC_DCHECK_GT(vie_encoder_->posted_frames_waiting_for_encode_.Value(), 0);
+    vie_encoder_->stats_proxy_->OnIncomingFrame(frame_.width(),
+                                                frame_.height());
     ++vie_encoder_->captured_frame_count_;
     if (--vie_encoder_->posted_frames_waiting_for_encode_ == 0) {
       vie_encoder_->EncodeVideoFrame(frame_, time_when_posted_ms_);
@@ -253,21 +255,30 @@
 };
 
 // VideoSourceProxy is responsible ensuring thread safety between calls to
-// ViEEncoder::SetSource that will happen on libjingles worker thread when a
+// ViEEncoder::SetSource that will happen on libjingle's worker thread when a
 // video capturer is connected to the encoder and the encoder task queue
 // (encoder_queue_) where the encoder reports its VideoSinkWants.
 class ViEEncoder::VideoSourceProxy {
  public:
   explicit VideoSourceProxy(ViEEncoder* vie_encoder)
-      : vie_encoder_(vie_encoder), source_(nullptr) {}
+      : vie_encoder_(vie_encoder),
+        degradation_preference_(
+            VideoSendStream::DegradationPreference::kMaintainResolution),
+        source_(nullptr) {}
 
-  void SetSource(rtc::VideoSourceInterface<VideoFrame>* source) {
+  void SetSource(
+      rtc::VideoSourceInterface<VideoFrame>* source,
+      const VideoSendStream::DegradationPreference& degradation_preference) {
+    // Called on libjingle's worker thread.
     RTC_DCHECK_CALLED_SEQUENTIALLY(&main_checker_);
     rtc::VideoSourceInterface<VideoFrame>* old_source = nullptr;
+    rtc::VideoSinkWants wants;
     {
       rtc::CritScope lock(&crit_);
       old_source = source_;
       source_ = source;
+      degradation_preference_ = degradation_preference;
+      wants = current_wants();
     }
 
     if (old_source != source && old_source != nullptr) {
@@ -278,16 +289,74 @@
       return;
     }
 
-    // TODO(perkj): Let VideoSourceProxy implement LoadObserver and truly send
-    // CPU load as sink wants.
-    rtc::VideoSinkWants wants;
     source->AddOrUpdateSink(vie_encoder_, wants);
   }
 
+  void SetWantsRotationApplied(bool rotation_applied) {
+    rtc::CritScope lock(&crit_);
+    sink_wants_.rotation_applied = rotation_applied;
+    disabled_scaling_sink_wants_.rotation_applied = rotation_applied;
+    if (source_) {
+      source_->AddOrUpdateSink(vie_encoder_, current_wants());
+    }
+  }
+
+  void RequestResolutionLowerThan(int pixel_count) {
+    // Called on the encoder task queue.
+    rtc::CritScope lock(&crit_);
+    if (!IsResolutionScalingEnabledLocked()) {
+      // This can happen since |degradation_preference_| is set on
+      // libjingle's worker thread but the adaptation is done on the encoder
+      // task queue.
+      return;
+    }
+    // The input video frame size will have a resolution with less than or
+    // equal to |max_pixel_count| depending on how the source can scale the
+    // input frame size.
+    sink_wants_.max_pixel_count = rtc::Optional<int>((pixel_count * 3) / 5);
+    sink_wants_.max_pixel_count_step_up = rtc::Optional<int>();
+    if (source_)
+      source_->AddOrUpdateSink(vie_encoder_, sink_wants_);
+  }
+
+  void RequestHigherResolutionThan(int pixel_count) {
+    rtc::CritScope lock(&crit_);
+    if (!IsResolutionScalingEnabledLocked()) {
+      // This can happen since |degradation_preference_| is set on
+      // libjingle's worker thread but the adaptation is done on the encoder
+      // task
+      // queue.
+      return;
+    }
+    // The input video frame size will have a resolution with "one step up"
+    // pixels than |max_pixel_count_step_up| where "one step up" depends on
+    // how the source can scale the input frame size.
+    sink_wants_.max_pixel_count = rtc::Optional<int>();
+    sink_wants_.max_pixel_count_step_up = rtc::Optional<int>(pixel_count);
+    if (source_)
+      source_->AddOrUpdateSink(vie_encoder_, sink_wants_);
+  }
+
  private:
+  bool IsResolutionScalingEnabledLocked() const
+      EXCLUSIVE_LOCKS_REQUIRED(&crit_) {
+    return degradation_preference_ !=
+           VideoSendStream::DegradationPreference::kMaintainResolution;
+  }
+
+  const rtc::VideoSinkWants& current_wants() const
+      EXCLUSIVE_LOCKS_REQUIRED(&crit_) {
+    return IsResolutionScalingEnabledLocked() ? sink_wants_
+                                              : disabled_scaling_sink_wants_;
+  }
+
   rtc::CriticalSection crit_;
   rtc::SequencedTaskChecker main_checker_;
-  ViEEncoder* vie_encoder_;
+  ViEEncoder* const vie_encoder_;
+  rtc::VideoSinkWants sink_wants_ GUARDED_BY(&crit_);
+  rtc::VideoSinkWants disabled_scaling_sink_wants_ GUARDED_BY(&crit_);
+  VideoSendStream::DegradationPreference degradation_preference_
+      GUARDED_BY(&crit_);
   rtc::VideoSourceInterface<VideoFrame>* source_ GUARDED_BY(&crit_);
 
   RTC_DISALLOW_COPY_AND_ASSIGN(VideoSourceProxy);
@@ -297,7 +366,6 @@
                        SendStatisticsProxy* stats_proxy,
                        const VideoSendStream::Config::EncoderSettings& settings,
                        rtc::VideoSinkInterface<VideoFrame>* pre_encode_callback,
-                       LoadObserver* overuse_callback,
                        EncodedFrameObserver* encoder_timing)
     : shutdown_event_(true /* manual_reset */, false),
       number_of_cores_(number_of_cores),
@@ -311,7 +379,6 @@
                         this,
                         encoder_timing,
                         stats_proxy),
-      load_observer_(overuse_callback),
       stats_proxy_(stats_proxy),
       pre_encode_callback_(pre_encode_callback),
       module_process_thread_(nullptr),
@@ -325,6 +392,11 @@
       has_received_rpsi_(false),
       picture_id_rpsi_(0),
       clock_(Clock::GetRealTimeClock()),
+      degradation_preference_(
+          VideoSendStream::DegradationPreference::kBalanced),
+      cpu_restricted_counter_(0),
+      last_frame_width_(0),
+      last_frame_height_(0),
       last_captured_timestamp_(0),
       delta_ntp_internal_ms_(clock_->CurrentNtpInMilliseconds() -
                              clock_->TimeInMilliseconds()),
@@ -332,11 +404,11 @@
       captured_frame_count_(0),
       dropped_frame_count_(0),
       encoder_queue_("EncoderQueue") {
-  encoder_queue_.PostTask([this, encoder_timing] {
+  encoder_queue_.PostTask([this] {
     RTC_DCHECK_RUN_ON(&encoder_queue_);
+    overuse_detector_.StartCheckForOveruse();
     video_sender_.RegisterExternalEncoder(
         settings_.encoder, settings_.payload_type, settings_.internal_source);
-    overuse_detector_.StartCheckForOveruse();
   });
 }
 
@@ -348,12 +420,12 @@
 
 void ViEEncoder::Stop() {
   RTC_DCHECK_RUN_ON(&thread_checker_);
-  source_proxy_->SetSource(nullptr);
+  source_proxy_->SetSource(nullptr, VideoSendStream::DegradationPreference());
   encoder_queue_.PostTask([this] {
     RTC_DCHECK_RUN_ON(&encoder_queue_);
+    overuse_detector_.StopCheckForOveruse();
     video_sender_.RegisterExternalEncoder(nullptr, settings_.payload_type,
                                           false);
-    overuse_detector_.StopCheckForOveruse();
     shutdown_event_.Set();
   });
 
@@ -373,12 +445,29 @@
   module_process_thread_->DeRegisterModule(&video_sender_);
 }
 
-void ViEEncoder::SetSource(rtc::VideoSourceInterface<VideoFrame>* source) {
+void ViEEncoder::SetSource(
+    rtc::VideoSourceInterface<VideoFrame>* source,
+    const VideoSendStream::DegradationPreference& degradation_preference) {
   RTC_DCHECK_RUN_ON(&thread_checker_);
-  source_proxy_->SetSource(source);
+  source_proxy_->SetSource(source, degradation_preference);
+  encoder_queue_.PostTask([this, degradation_preference] {
+    RTC_DCHECK_RUN_ON(&encoder_queue_);
+    degradation_preference_ = degradation_preference;
+    // Set the stats for if we are currently CPU restricted. We are CPU
+    // restricted depending on degradation preference and
+    // if the overusedetector has currently detected overuse which is counted in
+    // |cpu_restricted_counter_|
+    // We do this on the encoder task queue to avoid a race with the stats set
+    // in ViEEncoder::NormalUsage and ViEEncoder::OveruseDetected.
+    stats_proxy_->SetCpuRestrictedResolution(
+        degradation_preference_ !=
+            VideoSendStream::DegradationPreference::kMaintainResolution &&
+        cpu_restricted_counter_ != 0);
+  });
 }
 
-void ViEEncoder::SetSink(EncoderSink* sink) {
+void ViEEncoder::SetSink(EncoderSink* sink, bool rotation_applied) {
+  source_proxy_->SetWantsRotationApplied(rotation_applied);
   encoder_queue_.PostTask([this, sink] {
     RTC_DCHECK_RUN_ON(&encoder_queue_);
     sink_ = sink;
@@ -462,9 +551,6 @@
 
 void ViEEncoder::OnFrame(const VideoFrame& video_frame) {
   RTC_DCHECK_RUNS_SERIALIZED(&incoming_frame_race_checker_);
-  if (stats_proxy_) {
-    stats_proxy_->OnIncomingFrame(video_frame.width(), video_frame.height());
-  }
   VideoFrame incoming_frame = video_frame;
 
   // Local time in webrtc time base.
@@ -565,6 +651,9 @@
   }
   TraceFrameDropEnd();
 
+  last_frame_height_ = video_frame.height();
+  last_frame_width_ = video_frame.width();
+
   TRACE_EVENT_ASYNC_STEP0("webrtc", "Video", video_frame.render_time_ms(),
                           "Encode");
 
@@ -616,10 +705,12 @@
 
   int64_t time_sent = clock_->TimeInMilliseconds();
   uint32_t timestamp = encoded_image._timeStamp;
+
   encoder_queue_.PostTask([this, timestamp, time_sent] {
     RTC_DCHECK_RUN_ON(&encoder_queue_);
     overuse_detector_.FrameSent(timestamp, time_sent);
   });
+
   return result;
 }
 
@@ -697,17 +788,46 @@
 
 void ViEEncoder::OveruseDetected() {
   RTC_DCHECK_RUN_ON(&encoder_queue_);
-  // TODO(perkj): When ViEEncoder inherit rtc::VideoSink instead of
-  // VideoCaptureInput |load_observer_| should be removed and overuse be
-  // expressed as rtc::VideoSinkWants instead.
-  if (load_observer_)
-    load_observer_->OnLoadUpdate(LoadObserver::kOveruse);
+  if (degradation_preference_ ==
+          VideoSendStream::DegradationPreference::kMaintainResolution ||
+      cpu_restricted_counter_ >= kMaxCpuDowngrades) {
+    return;
+  }
+  LOG(LS_INFO) << "CPU overuse detected. Requesting lower resolution.";
+  // Request lower resolution if the current resolution is lower than last time
+  // we asked for the resolution to be lowered.
+  // Update stats accordingly.
+  int current_pixel_count = last_frame_height_ * last_frame_width_;
+  if (!max_pixel_count_ || current_pixel_count < *max_pixel_count_) {
+    max_pixel_count_ = rtc::Optional<int>(current_pixel_count);
+    max_pixel_count_step_up_ = rtc::Optional<int>();
+    stats_proxy_->OnCpuRestrictedResolutionChanged(true);
+    ++cpu_restricted_counter_;
+    source_proxy_->RequestResolutionLowerThan(current_pixel_count);
+  }
 }
 
 void ViEEncoder::NormalUsage() {
   RTC_DCHECK_RUN_ON(&encoder_queue_);
-  if (load_observer_)
-    load_observer_->OnLoadUpdate(LoadObserver::kUnderuse);
+  if (degradation_preference_ ==
+          VideoSendStream::DegradationPreference::kMaintainResolution ||
+      cpu_restricted_counter_ == 0) {
+    return;
+  }
+
+  LOG(LS_INFO) << "CPU underuse detected. Requesting higher resolution.";
+  int current_pixel_count = last_frame_height_ * last_frame_width_;
+  // Request higher resolution if we are CPU restricted and the the current
+  // resolution is higher than last time we requested higher resolution.
+  // Update stats accordingly.
+  if (!max_pixel_count_step_up_ ||
+      current_pixel_count > *max_pixel_count_step_up_) {
+    max_pixel_count_ = rtc::Optional<int>();
+    max_pixel_count_step_up_ = rtc::Optional<int>(current_pixel_count);
+    --cpu_restricted_counter_;
+    stats_proxy_->OnCpuRestrictedResolutionChanged(cpu_restricted_counter_ > 0);
+    source_proxy_->RequestHigherResolutionThan(current_pixel_count);
+  }
 }
 
 }  // namespace webrtc
diff --git a/webrtc/video/vie_encoder.h b/webrtc/video/vie_encoder.h
index 33caec4..b89f66e 100644
--- a/webrtc/video/vie_encoder.h
+++ b/webrtc/video/vie_encoder.h
@@ -60,11 +60,13 @@
         int min_transmit_bitrate_bps) = 0;
   };
 
+  // Down grade resolution at most 2 times for CPU reasons.
+  static const int kMaxCpuDowngrades = 2;
+
   ViEEncoder(uint32_t number_of_cores,
              SendStatisticsProxy* stats_proxy,
-             const webrtc::VideoSendStream::Config::EncoderSettings& settings,
+             const VideoSendStream::Config::EncoderSettings& settings,
              rtc::VideoSinkInterface<VideoFrame>* pre_encode_callback,
-             LoadObserver* overuse_callback,
              EncodedFrameObserver* encoder_timing);
   ~ViEEncoder();
   // RegisterProcessThread register |module_process_thread| with those objects
@@ -74,8 +76,17 @@
   void RegisterProcessThread(ProcessThread* module_process_thread);
   void DeRegisterProcessThread();
 
-  void SetSource(rtc::VideoSourceInterface<VideoFrame>* source);
-  void SetSink(EncoderSink* sink);
+  // Sets the source that will provide I420 video frames.
+  // |degradation_preference| control whether or not resolution or frame rate
+  // may be reduced.
+  void SetSource(
+      rtc::VideoSourceInterface<VideoFrame>* source,
+      const VideoSendStream::DegradationPreference& degradation_preference);
+
+  // Sets the |sink| that gets the encoded frames. |rotation_applied| means
+  // that the source must support rotation. Only set |rotation_applied| if the
+  // remote side does not support the rotation extension.
+  void SetSink(EncoderSink* sink, bool rotation_applied);
 
   // TODO(perkj): Can we remove VideoCodec.startBitrate ?
   void SetStartBitrate(int start_bitrate_bps);
@@ -98,6 +109,16 @@
                         uint8_t fraction_lost,
                         int64_t round_trip_time_ms);
 
+ protected:
+  // Used for testing. For example the |CpuOveruseObserver| methods must be
+  // called on |encoder_queue_|.
+  rtc::TaskQueue* encoder_queue() { return &encoder_queue_; }
+
+  // webrtc::CpuOveruseObserver implementation.
+  // These methods are protected for easier testing.
+  void OveruseDetected() override;
+  void NormalUsage() override;
+
  private:
   class ConfigureEncoderTask;
   class EncodeTask;
@@ -138,10 +159,6 @@
       const CodecSpecificInfo* codec_specific_info,
       const RTPFragmentationHeader* fragmentation) override;
 
-  // webrtc::CpuOveruseObserver implementation.
-  void OveruseDetected() override;
-  void NormalUsage() override;
-
   bool EncoderPaused() const;
   void TraceFrameDropStart();
   void TraceFrameDropEnd();
@@ -156,9 +173,7 @@
   const VideoCodecType codec_type_;
 
   vcm::VideoSender video_sender_ ACCESS_ON(&encoder_queue_);
-
   OveruseFrameDetector overuse_detector_ ACCESS_ON(&encoder_queue_);
-  LoadObserver* const load_observer_ ACCESS_ON(&encoder_queue_);
 
   SendStatisticsProxy* const stats_proxy_;
   rtc::VideoSinkInterface<VideoFrame>* const pre_encode_callback_;
@@ -189,12 +204,26 @@
   uint64_t picture_id_rpsi_ ACCESS_ON(&encoder_queue_);
   Clock* const clock_;
 
-  rtc::RaceChecker incoming_frame_race_checker_;
+  VideoSendStream::DegradationPreference degradation_preference_
+      ACCESS_ON(&encoder_queue_);
+  // Counter used for deciding if the video resolution is currently
+  // restricted by CPU usage.
+  int cpu_restricted_counter_ ACCESS_ON(&encoder_queue_);
+
+  int last_frame_width_ ACCESS_ON(&encoder_queue_);
+  int last_frame_height_ ACCESS_ON(&encoder_queue_);
+  // Pixel count last time the resolution was requested to be changed down.
+  rtc::Optional<int> max_pixel_count_ ACCESS_ON(&encoder_queue_);
+  // Pixel count last time the resolution was requested to be changed up.
+  rtc::Optional<int> max_pixel_count_step_up_ ACCESS_ON(&encoder_queue_);
+
+  rtc::RaceChecker incoming_frame_race_checker_
+      GUARDED_BY(incoming_frame_race_checker_);
   Atomic32 posted_frames_waiting_for_encode_;
   // Used to make sure incoming time stamp is increasing for every frame.
   int64_t last_captured_timestamp_ GUARDED_BY(incoming_frame_race_checker_);
   // Delta used for translating between NTP and internal timestamps.
-  const int64_t delta_ntp_internal_ms_;
+  const int64_t delta_ntp_internal_ms_ GUARDED_BY(incoming_frame_race_checker_);
 
   int64_t last_frame_log_ms_ GUARDED_BY(incoming_frame_race_checker_);
   int captured_frame_count_ ACCESS_ON(&encoder_queue_);
diff --git a/webrtc/video/vie_encoder_unittest.cc b/webrtc/video/vie_encoder_unittest.cc
index 78eb62f..8951097 100644
--- a/webrtc/video/vie_encoder_unittest.cc
+++ b/webrtc/video/vie_encoder_unittest.cc
@@ -8,9 +8,11 @@
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
+#include <limits>
 #include <utility>
 
 #include "webrtc/base/logging.h"
+#include "webrtc/system_wrappers/include/metrics_default.h"
 #include "webrtc/test/encoder_settings.h"
 #include "webrtc/test/fake_encoder.h"
 #include "webrtc/test/frame_generator.h"
@@ -20,6 +22,53 @@
 
 namespace webrtc {
 
+namespace {
+class TestBuffer : public webrtc::I420Buffer {
+ public:
+  TestBuffer(rtc::Event* event, int width, int height)
+      : I420Buffer(width, height), event_(event) {}
+
+ private:
+  friend class rtc::RefCountedObject<TestBuffer>;
+  ~TestBuffer() override {
+    if (event_)
+      event_->Set();
+  }
+  rtc::Event* const event_;
+};
+
+class ViEEncoderUnderTest : public ViEEncoder {
+ public:
+  ViEEncoderUnderTest(
+      SendStatisticsProxy* stats_proxy,
+      const webrtc::VideoSendStream::Config::EncoderSettings& settings)
+      : ViEEncoder(1 /* number_of_cores */,
+                   stats_proxy,
+                   settings,
+                   nullptr /* pre_encode_callback */,
+                   nullptr /* encoder_timing */) {}
+
+  void TriggerCpuOveruse() {
+    rtc::Event event(false, false);
+    encoder_queue()->PostTask([this, &event] {
+      OveruseDetected();
+      event.Set();
+    });
+    event.Wait(rtc::Event::kForever);
+  }
+
+  void TriggerCpuNormalUsage() {
+    rtc::Event event(false, false);
+    encoder_queue()->PostTask([this, &event] {
+      NormalUsage();
+      event.Set();
+    });
+    event.Wait(rtc::Event::kForever);
+  }
+};
+
+}  // namespace
+
 class ViEEncoderTest : public ::testing::Test {
  public:
   static const int kDefaultTimeoutMs = 30 * 1000;
@@ -29,12 +78,14 @@
         codec_width_(320),
         codec_height_(240),
         fake_encoder_(),
-        stats_proxy_(Clock::GetRealTimeClock(),
-                     video_send_config_,
-                     webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo),
+        stats_proxy_(new SendStatisticsProxy(
+            Clock::GetRealTimeClock(),
+            video_send_config_,
+            webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo)),
         sink_(&fake_encoder_) {}
 
   void SetUp() override {
+    metrics::Reset();
     video_send_config_ = VideoSendStream::Config(nullptr);
     video_send_config_.encoder_settings.encoder = &fake_encoder_;
     video_send_config_.encoder_settings.payload_name = "FAKE";
@@ -42,31 +93,16 @@
 
     VideoEncoderConfig video_encoder_config;
     test::FillEncoderConfiguration(1, &video_encoder_config);
-    vie_encoder_.reset(new ViEEncoder(
-        1 /* number_of_cores */, &stats_proxy_,
-        video_send_config_.encoder_settings, nullptr /* pre_encode_callback */,
-        nullptr /* overuse_callback */, nullptr /* encoder_timing */));
-    vie_encoder_->SetSink(&sink_);
-    vie_encoder_->SetSource(&video_source_);
+    vie_encoder_.reset(new ViEEncoderUnderTest(
+        stats_proxy_.get(), video_send_config_.encoder_settings));
+    vie_encoder_->SetSink(&sink_, false /* rotation_applied */);
+    vie_encoder_->SetSource(&video_source_,
+                            VideoSendStream::DegradationPreference::kBalanced);
     vie_encoder_->SetStartBitrate(10000);
     vie_encoder_->ConfigureEncoder(std::move(video_encoder_config), 1440);
   }
 
   VideoFrame CreateFrame(int64_t ntp_ts, rtc::Event* destruction_event) const {
-    class TestBuffer : public webrtc::I420Buffer {
-     public:
-      TestBuffer(rtc::Event* event, int width, int height)
-          : I420Buffer(width, height), event_(event) {}
-
-     private:
-      friend class rtc::RefCountedObject<TestBuffer>;
-      ~TestBuffer() override {
-        if (event_)
-          event_->Set();
-      }
-      rtc::Event* const event_;
-    };
-
     VideoFrame frame(new rtc::RefCountedObject<TestBuffer>(
                          destruction_event, codec_width_, codec_height_),
                      99, 99, kVideoRotation_0);
@@ -74,6 +110,14 @@
     return frame;
   }
 
+  VideoFrame CreateFrame(int64_t ntp_ts, int width, int height) const {
+    VideoFrame frame(
+        new rtc::RefCountedObject<TestBuffer>(nullptr, width, height), 99, 99,
+        kVideoRotation_0);
+    frame.set_ntp_time_ms(ntp_ts);
+    return frame;
+  }
+
   class TestEncoder : public test::FakeEncoder {
    public:
     TestEncoder()
@@ -112,6 +156,8 @@
 
         timestamp_ = input_image.timestamp();
         ntp_time_ms_ = input_image.ntp_time_ms();
+        last_input_width_ = input_image.width();
+        last_input_height_ = input_image.height();
         block_encode = block_next_encode_;
         block_next_encode_ = false;
       }
@@ -122,13 +168,13 @@
       return result;
     }
 
-
-
     rtc::CriticalSection crit_;
     bool block_next_encode_ = false;
     rtc::Event continue_encode_event_;
     uint32_t timestamp_ = 0;
     int64_t ntp_time_ms_ = 0;
+    int last_input_width_ = 0;
+    int last_input_height_ = 0;
   };
 
   class TestSink : public ViEEncoder::EncoderSink {
@@ -192,10 +238,10 @@
   int codec_width_;
   int codec_height_;
   TestEncoder fake_encoder_;
-  SendStatisticsProxy stats_proxy_;
+  std::unique_ptr<SendStatisticsProxy> stats_proxy_;
   TestSink sink_;
   test::FrameForwarder video_source_;
-  std::unique_ptr<ViEEncoder> vie_encoder_;
+  std::unique_ptr<ViEEncoderUnderTest> vie_encoder_;
 };
 
 TEST_F(ViEEncoderTest, EncodeOneFrame) {
@@ -334,4 +380,249 @@
   vie_encoder_->Stop();
 }
 
+TEST_F(ViEEncoderTest, SwitchSourceDeregisterEncoderAsSink) {
+  EXPECT_TRUE(video_source_.has_sinks());
+  test::FrameForwarder new_video_source;
+  vie_encoder_->SetSource(&new_video_source,
+                          VideoSendStream::DegradationPreference::kBalanced);
+  EXPECT_FALSE(video_source_.has_sinks());
+  EXPECT_TRUE(new_video_source.has_sinks());
+
+  vie_encoder_->Stop();
+}
+
+TEST_F(ViEEncoderTest, SinkWantsRotationApplied) {
+  EXPECT_FALSE(video_source_.sink_wants().rotation_applied);
+  vie_encoder_->SetSink(&sink_, true /*rotation_applied*/);
+  EXPECT_TRUE(video_source_.sink_wants().rotation_applied);
+  vie_encoder_->Stop();
+}
+
+TEST_F(ViEEncoderTest, SinkWantsFromOveruseDetector) {
+  const int kTargetBitrateBps = 100000;
+  vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
+
+  EXPECT_FALSE(video_source_.sink_wants().max_pixel_count);
+  EXPECT_FALSE(video_source_.sink_wants().max_pixel_count_step_up);
+
+  int frame_width = 1280;
+  int frame_height = 720;
+
+  // Trigger CPU overuse kMaxCpuDowngrades times. Every time, ViEEncoder should
+  // request lower resolution.
+  for (int i = 1; i <= ViEEncoder::kMaxCpuDowngrades; ++i) {
+    video_source_.IncomingCapturedFrame(
+        CreateFrame(i, frame_width, frame_height));
+    sink_.WaitForEncodedFrame(i);
+
+    vie_encoder_->TriggerCpuOveruse();
+
+    EXPECT_LT(video_source_.sink_wants().max_pixel_count.value_or(
+                  std::numeric_limits<int>::max()),
+              frame_width * frame_height);
+    EXPECT_FALSE(video_source_.sink_wants().max_pixel_count_step_up);
+
+    frame_width /= 2;
+    frame_height /= 2;
+  }
+
+  // Trigger CPU overuse a one more time. This should not trigger request for
+  // lower resolution.
+  rtc::VideoSinkWants current_wants = video_source_.sink_wants();
+  video_source_.IncomingCapturedFrame(CreateFrame(
+      ViEEncoder::kMaxCpuDowngrades + 1, frame_width, frame_height));
+  sink_.WaitForEncodedFrame(ViEEncoder::kMaxCpuDowngrades + 1);
+  vie_encoder_->TriggerCpuOveruse();
+  EXPECT_EQ(video_source_.sink_wants().max_pixel_count,
+            current_wants.max_pixel_count);
+  EXPECT_EQ(video_source_.sink_wants().max_pixel_count_step_up,
+            current_wants.max_pixel_count_step_up);
+
+  // Trigger CPU normal use.
+  vie_encoder_->TriggerCpuNormalUsage();
+  EXPECT_FALSE(video_source_.sink_wants().max_pixel_count);
+  EXPECT_EQ(video_source_.sink_wants().max_pixel_count_step_up.value_or(0),
+            frame_width * frame_height);
+
+  vie_encoder_->Stop();
+}
+
+TEST_F(ViEEncoderTest,
+       ResolutionSinkWantsResetOnSetSourceWithDisabledResolutionScaling) {
+  const int kTargetBitrateBps = 100000;
+  vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
+
+  EXPECT_FALSE(video_source_.sink_wants().max_pixel_count);
+  EXPECT_FALSE(video_source_.sink_wants().max_pixel_count_step_up);
+
+  int frame_width = 1280;
+  int frame_height = 720;
+
+  // Trigger CPU overuse.
+  vie_encoder_->TriggerCpuOveruse();
+
+  video_source_.IncomingCapturedFrame(
+      CreateFrame(1, frame_width, frame_height));
+  sink_.WaitForEncodedFrame(1);
+  EXPECT_LT(video_source_.sink_wants().max_pixel_count.value_or(
+                std::numeric_limits<int>::max()),
+            frame_width * frame_height);
+  EXPECT_FALSE(video_source_.sink_wants().max_pixel_count_step_up);
+
+  // Set new source.
+  test::FrameForwarder new_video_source;
+  vie_encoder_->SetSource(
+      &new_video_source,
+      VideoSendStream::DegradationPreference::kMaintainResolution);
+
+  EXPECT_FALSE(new_video_source.sink_wants().max_pixel_count);
+  EXPECT_FALSE(new_video_source.sink_wants().max_pixel_count_step_up);
+
+  new_video_source.IncomingCapturedFrame(
+      CreateFrame(2, frame_width, frame_height));
+  sink_.WaitForEncodedFrame(2);
+  EXPECT_FALSE(new_video_source.sink_wants().max_pixel_count);
+  EXPECT_FALSE(new_video_source.sink_wants().max_pixel_count_step_up);
+
+  // Calling SetSource with resolution scaling enabled apply the old SinkWants.
+  vie_encoder_->SetSource(&new_video_source,
+                          VideoSendStream::DegradationPreference::kBalanced);
+  EXPECT_LT(new_video_source.sink_wants().max_pixel_count.value_or(
+                std::numeric_limits<int>::max()),
+            frame_width * frame_height);
+  EXPECT_FALSE(new_video_source.sink_wants().max_pixel_count_step_up);
+
+  vie_encoder_->Stop();
+}
+
+TEST_F(ViEEncoderTest, StatsTracksAdaptationStats) {
+  const int kTargetBitrateBps = 100000;
+  vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
+
+  int frame_width = 1280;
+  int frame_height = 720;
+
+  video_source_.IncomingCapturedFrame(
+      CreateFrame(1, frame_width, frame_height));
+  sink_.WaitForEncodedFrame(1);
+  VideoSendStream::Stats stats = stats_proxy_->GetStats();
+  EXPECT_FALSE(stats.cpu_limited_resolution);
+  EXPECT_EQ(0, stats.number_of_cpu_adapt_changes);
+
+  // Trigger CPU overuse.
+  vie_encoder_->TriggerCpuOveruse();
+  video_source_.IncomingCapturedFrame(
+      CreateFrame(2, frame_width, frame_height));
+  sink_.WaitForEncodedFrame(2);
+
+  stats = stats_proxy_->GetStats();
+  EXPECT_TRUE(stats.cpu_limited_resolution);
+  EXPECT_EQ(1, stats.number_of_cpu_adapt_changes);
+
+  // Trigger CPU normal use.
+  vie_encoder_->TriggerCpuNormalUsage();
+  video_source_.IncomingCapturedFrame(
+      CreateFrame(3, frame_width, frame_height));
+  sink_.WaitForEncodedFrame(3);
+
+  stats = stats_proxy_->GetStats();
+  EXPECT_FALSE(stats.cpu_limited_resolution);
+  EXPECT_EQ(2, stats.number_of_cpu_adapt_changes);
+
+  vie_encoder_->Stop();
+}
+
+TEST_F(ViEEncoderTest, StatsTracksAdaptationStatsWhenSwitchingSource) {
+  const int kTargetBitrateBps = 100000;
+  vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
+
+  // Trigger CPU overuse.
+  vie_encoder_->TriggerCpuOveruse();
+  int frame_width = 1280;
+  int frame_height = 720;
+
+  video_source_.IncomingCapturedFrame(
+      CreateFrame(1, frame_width, frame_height));
+  sink_.WaitForEncodedFrame(1);
+
+  VideoSendStream::Stats stats = stats_proxy_->GetStats();
+  EXPECT_TRUE(stats.cpu_limited_resolution);
+  EXPECT_EQ(1, stats.number_of_cpu_adapt_changes);
+
+  // Set new source with adaptation still enabled.
+  test::FrameForwarder new_video_source;
+  vie_encoder_->SetSource(&new_video_source,
+                          VideoSendStream::DegradationPreference::kBalanced);
+
+  new_video_source.IncomingCapturedFrame(
+      CreateFrame(2, frame_width, frame_height));
+  sink_.WaitForEncodedFrame(2);
+  stats = stats_proxy_->GetStats();
+  EXPECT_TRUE(stats.cpu_limited_resolution);
+  EXPECT_EQ(1, stats.number_of_cpu_adapt_changes);
+
+  // Set adaptation disabled.
+  vie_encoder_->SetSource(
+      &new_video_source,
+      VideoSendStream::DegradationPreference::kMaintainResolution);
+  new_video_source.IncomingCapturedFrame(
+      CreateFrame(3, frame_width, frame_height));
+  sink_.WaitForEncodedFrame(3);
+  stats = stats_proxy_->GetStats();
+  EXPECT_FALSE(stats.cpu_limited_resolution);
+  EXPECT_EQ(1, stats.number_of_cpu_adapt_changes);
+
+  // Switch back the source with adaptation enabled.
+  vie_encoder_->SetSource(&video_source_,
+                          VideoSendStream::DegradationPreference::kBalanced);
+  video_source_.IncomingCapturedFrame(
+      CreateFrame(4, frame_width, frame_height));
+  sink_.WaitForEncodedFrame(4);
+  stats = stats_proxy_->GetStats();
+  EXPECT_TRUE(stats.cpu_limited_resolution);
+  EXPECT_EQ(1, stats.number_of_cpu_adapt_changes);
+
+  // Trigger CPU normal usage.
+  vie_encoder_->TriggerCpuNormalUsage();
+  video_source_.IncomingCapturedFrame(
+      CreateFrame(5, frame_width, frame_height));
+  sink_.WaitForEncodedFrame(5);
+  stats = stats_proxy_->GetStats();
+  EXPECT_FALSE(stats.cpu_limited_resolution);
+  EXPECT_EQ(2, stats.number_of_cpu_adapt_changes);
+
+  vie_encoder_->Stop();
+}
+
+TEST_F(ViEEncoderTest, UMACpuLimitedResolutionInPercent) {
+  const int kTargetBitrateBps = 100000;
+  vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
+
+  int frame_width = 640;
+  int frame_height = 360;
+
+  for (int i = 1; i <= SendStatisticsProxy::kMinRequiredMetricsSamples; ++i) {
+    video_source_.IncomingCapturedFrame(
+        CreateFrame(i, frame_width, frame_height));
+    sink_.WaitForEncodedFrame(i);
+  }
+
+  vie_encoder_->TriggerCpuOveruse();
+  for (int i = 1; i <= SendStatisticsProxy::kMinRequiredMetricsSamples; ++i) {
+    video_source_.IncomingCapturedFrame(
+        CreateFrame(SendStatisticsProxy::kMinRequiredMetricsSamples + i,
+                    frame_width, frame_height));
+    sink_.WaitForEncodedFrame(SendStatisticsProxy::kMinRequiredMetricsSamples +
+                              i);
+  }
+
+  vie_encoder_->Stop();
+
+  stats_proxy_.reset();
+  EXPECT_EQ(1,
+            metrics::NumSamples("WebRTC.Video.CpuLimitedResolutionInPercent"));
+  EXPECT_EQ(
+      1, metrics::NumEvents("WebRTC.Video.CpuLimitedResolutionInPercent", 50));
+}
+
 }  // namespace webrtc
diff --git a/webrtc/video_send_stream.h b/webrtc/video_send_stream.h
index 4ca9fa7..4e727e5 100644
--- a/webrtc/video_send_stream.h
+++ b/webrtc/video_send_stream.h
@@ -27,7 +27,6 @@
 
 namespace webrtc {
 
-class LoadObserver;
 class VideoEncoder;
 
 class VideoSendStream {
@@ -68,6 +67,10 @@
     int preferred_media_bitrate_bps = 0;
     bool suspended = false;
     bool bw_limited_resolution = false;
+    bool cpu_limited_resolution = false;
+    // Total number of times resolution as been requested to be changed due to
+    // CPU adaptation.
+    int number_of_cpu_adapt_changes = 0;
     std::map<uint32_t, StreamStats> substreams;
   };
 
@@ -152,10 +155,6 @@
     // Transport for outgoing packets.
     Transport* send_transport = nullptr;
 
-    // Callback for overuse and normal usage based on the jitter of incoming
-    // captured frames. 'nullptr' disables the callback.
-    LoadObserver* overuse_callback = nullptr;
-
     // Called for each I420 frame before encoding the frame. Can be used for
     // effects, snapshots etc. 'nullptr' disables the callback.
     rtc::VideoSinkInterface<VideoFrame>* pre_encode_callback = nullptr;
@@ -193,8 +192,17 @@
   // When a stream is stopped, it can't receive, process or deliver packets.
   virtual void Stop() = 0;
 
+  // Based on the spec in
+  // https://w3c.github.io/webrtc-pc/#idl-def-rtcdegradationpreference.
+  enum class DegradationPreference {
+    kMaintainResolution,
+    // TODO(perkj): Implement kMaintainFrameRate. kBalanced will drop frames
+    // if the encoder overshoots or the encoder can not encode fast enough.
+    kBalanced,
+  };
   virtual void SetSource(
-      rtc::VideoSourceInterface<webrtc::VideoFrame>* source) = 0;
+      rtc::VideoSourceInterface<webrtc::VideoFrame>* source,
+      const DegradationPreference& degradation_preference) = 0;
 
   // Set which streams to send. Must have at least as many SSRCs as configured
   // in the config. Encoder settings are passed on to the encoder instance along