Set overuse detector max frame interval based on target frame rate.
Currently there is a hard limit for the estimated captured frame
interval of 45ms. As the encoder utilization is calculated as
(input frame interval)/(encode time), overuse signals can be triggered
even though there is plenty of time to go around if the fps is low.
However, in order to avoid falsly estimating low encode usage in case
the capturer has a dynamic frame rate, set the frame interval based on
the actual current max framerate.
BUG=webrtc:4172
Review-Url: https://codereview.webrtc.org/2918143003
Cr-Commit-Position: refs/heads/master@{#18610}
diff --git a/webrtc/video/encoder_rtcp_feedback_unittest.cc b/webrtc/video/encoder_rtcp_feedback_unittest.cc
index 1606a59..12c0951 100644
--- a/webrtc/video/encoder_rtcp_feedback_unittest.cc
+++ b/webrtc/video/encoder_rtcp_feedback_unittest.cc
@@ -10,6 +10,8 @@
#include "webrtc/video/encoder_rtcp_feedback.h"
+#include <memory>
+
#include "webrtc/modules/utility/include/mock/mock_process_thread.h"
#include "webrtc/test/gmock.h"
#include "webrtc/test/gtest.h"
@@ -27,7 +29,8 @@
send_stats_proxy,
VideoSendStream::Config::EncoderSettings("fake", 0, nullptr),
nullptr,
- nullptr) {}
+ nullptr,
+ std::unique_ptr<OveruseFrameDetector>()) {}
~MockVieEncoder() { Stop(); }
MOCK_METHOD1(OnReceivedIntraFrameRequest, void(size_t));
diff --git a/webrtc/video/overuse_frame_detector.cc b/webrtc/video/overuse_frame_detector.cc
index 9f65057..9711ddb 100644
--- a/webrtc/video/overuse_frame_detector.cc
+++ b/webrtc/video/overuse_frame_detector.cc
@@ -49,8 +49,20 @@
const int kMaxOverusesBeforeApplyRampupDelay = 4;
// The maximum exponent to use in VCMExpFilter.
-const float kSampleDiffMs = 33.0f;
const float kMaxExp = 7.0f;
+// Default value used before first reconfiguration.
+const int kDefaultFrameRate = 30;
+// Default sample diff, default frame rate.
+const float kDefaultSampleDiffMs = 1000.0f / kDefaultFrameRate;
+// A factor applied to the sample diff on OnTargetFramerateUpdated to determine
+// a max limit for the sample diff. For instance, with a framerate of 30fps,
+// the sample diff is capped to (1000 / 30) * 1.35 = 45ms. This prevents
+// triggering too soon if there are individual very large outliers.
+const float kMaxSampleDiffMarginFactor = 1.35f;
+// Minimum framerate allowed for usage calculation. This prevents crazy long
+// encode times from being accepted if the frame rate happens to be low.
+const int kMinFramerate = 7;
+const int kMaxFramerate = 30;
const auto kScaleReasonCpu = AdaptationObserverInterface::AdaptReason::kCpu;
} // namespace
@@ -113,9 +125,9 @@
: kWeightFactorFrameDiff(0.998f),
kWeightFactorProcessing(0.995f),
kInitialSampleDiffMs(40.0f),
- kMaxSampleDiffMs(45.0f),
count_(0),
options_(options),
+ max_sample_diff_ms_(kDefaultSampleDiffMs * kMaxSampleDiffMarginFactor),
filtered_processing_ms_(new rtc::ExpFilter(kWeightFactorProcessing)),
filtered_frame_diff_ms_(new rtc::ExpFilter(kWeightFactorFrameDiff)) {
Reset();
@@ -124,21 +136,24 @@
void Reset() {
count_ = 0;
+ max_sample_diff_ms_ = kDefaultSampleDiffMs * kMaxSampleDiffMarginFactor;
filtered_frame_diff_ms_->Reset(kWeightFactorFrameDiff);
filtered_frame_diff_ms_->Apply(1.0f, kInitialSampleDiffMs);
filtered_processing_ms_->Reset(kWeightFactorProcessing);
filtered_processing_ms_->Apply(1.0f, InitialProcessingMs());
}
+ void SetMaxSampleDiffMs(float diff_ms) { max_sample_diff_ms_ = diff_ms; }
+
void AddCaptureSample(float sample_ms) {
- float exp = sample_ms / kSampleDiffMs;
+ float exp = sample_ms / kDefaultSampleDiffMs;
exp = std::min(exp, kMaxExp);
filtered_frame_diff_ms_->Apply(exp, sample_ms);
}
void AddSample(float processing_ms, int64_t diff_last_sample_ms) {
++count_;
- float exp = diff_last_sample_ms / kSampleDiffMs;
+ float exp = diff_last_sample_ms / kDefaultSampleDiffMs;
exp = std::min(exp, kMaxExp);
filtered_processing_ms_->Apply(exp, processing_ms);
}
@@ -148,7 +163,7 @@
return static_cast<int>(InitialUsageInPercent() + 0.5f);
}
float frame_diff_ms = std::max(filtered_frame_diff_ms_->filtered(), 1.0f);
- frame_diff_ms = std::min(frame_diff_ms, kMaxSampleDiffMs);
+ frame_diff_ms = std::min(frame_diff_ms, max_sample_diff_ms_);
float encode_usage_percent =
100.0f * filtered_processing_ms_->filtered() / frame_diff_ms;
return static_cast<int>(encode_usage_percent + 0.5);
@@ -168,9 +183,9 @@
const float kWeightFactorFrameDiff;
const float kWeightFactorProcessing;
const float kInitialSampleDiffMs;
- const float kMaxSampleDiffMs;
uint64_t count_;
const CpuOveruseOptions options_;
+ float max_sample_diff_ms_;
std::unique_ptr<rtc::ExpFilter> filtered_processing_ms_;
std::unique_ptr<rtc::ExpFilter> filtered_frame_diff_ms_;
};
@@ -331,6 +346,7 @@
last_capture_time_us_(-1),
last_processed_capture_time_us_(-1),
num_pixels_(0),
+ max_framerate_(kDefaultFrameRate),
last_overuse_time_ms_(-1),
checks_above_threshold_(0),
num_overuse_detections_(0),
@@ -382,6 +398,8 @@
}
void OveruseFrameDetector::ResetAll(int num_pixels) {
+ // Reset state, as a result resolution being changed. Do not however change
+ // the current frame rate back to the default.
RTC_DCHECK_CALLED_SEQUENTIALLY(&task_checker_);
num_pixels_ = num_pixels;
usage_->Reset();
@@ -390,6 +408,15 @@
last_processed_capture_time_us_ = -1;
num_process_times_ = 0;
metrics_ = rtc::Optional<CpuOveruseMetrics>();
+ OnTargetFramerateUpdated(max_framerate_);
+}
+
+void OveruseFrameDetector::OnTargetFramerateUpdated(int framerate_fps) {
+ RTC_DCHECK_CALLED_SEQUENTIALLY(&task_checker_);
+ RTC_DCHECK_GE(framerate_fps, 0);
+ max_framerate_ = std::min(kMaxFramerate, framerate_fps);
+ usage_->SetMaxSampleDiffMs((1000 / std::max(kMinFramerate, max_framerate_)) *
+ kMaxSampleDiffMarginFactor);
}
void OveruseFrameDetector::FrameCaptured(const VideoFrame& frame,
diff --git a/webrtc/video/overuse_frame_detector.h b/webrtc/video/overuse_frame_detector.h
index 2fb3104..29efa16 100644
--- a/webrtc/video/overuse_frame_detector.h
+++ b/webrtc/video/overuse_frame_detector.h
@@ -68,7 +68,7 @@
AdaptationObserverInterface* overuse_observer,
EncodedFrameObserver* encoder_timing_,
CpuOveruseMetricsObserver* metrics_observer);
- ~OveruseFrameDetector();
+ virtual ~OveruseFrameDetector();
// Start to periodically check for overuse.
void StartCheckForOveruse();
@@ -77,6 +77,13 @@
// StartCheckForOveruse has been called.
void StopCheckForOveruse();
+ // Defines the current maximum framerate targeted by the capturer. This is
+ // used to make sure the encode usage percent doesn't drop unduly if the
+ // capturer has quiet periods (for instance caused by screen capturers with
+ // variable capture rate depending on content updates), otherwise we might
+ // experience adaptation toggling.
+ virtual void OnTargetFramerateUpdated(int framerate_fps);
+
// Called for each captured frame.
void FrameCaptured(const VideoFrame& frame, int64_t time_when_first_seen_us);
@@ -135,6 +142,7 @@
// Number of pixels of last captured frame.
int num_pixels_ GUARDED_BY(task_checker_);
+ int max_framerate_ GUARDED_BY(task_checker_);
int64_t last_overuse_time_ms_ GUARDED_BY(task_checker_);
int checks_above_threshold_ GUARDED_BY(task_checker_);
int num_overuse_detections_ GUARDED_BY(task_checker_);
diff --git a/webrtc/video/overuse_frame_detector_unittest.cc b/webrtc/video/overuse_frame_detector_unittest.cc
index bf1c8b4..ae3b687 100644
--- a/webrtc/video/overuse_frame_detector_unittest.cc
+++ b/webrtc/video/overuse_frame_detector_unittest.cc
@@ -139,6 +139,20 @@
int UsagePercent() { return metrics_.encode_usage_percent; }
+ int64_t OveruseProcessingTimeLimitForFramerate(int fps) const {
+ int64_t frame_interval = rtc::kNumMicrosecsPerSec / fps;
+ int64_t max_processing_time_us =
+ (frame_interval * options_.high_encode_usage_threshold_percent) / 100;
+ return max_processing_time_us;
+ }
+
+ int64_t UnderuseProcessingTimeLimitForFramerate(int fps) const {
+ int64_t frame_interval = rtc::kNumMicrosecsPerSec / fps;
+ int64_t max_processing_time_us =
+ (frame_interval * options_.low_encode_usage_threshold_percent) / 100;
+ return max_processing_time_us;
+ }
+
CpuOveruseOptions options_;
rtc::ScopedFakeClock clock_;
std::unique_ptr<MockCpuOveruseObserver> observer_;
@@ -355,4 +369,115 @@
EXPECT_TRUE(event.Wait(10000));
}
+TEST_F(OveruseFrameDetectorTest, MaxIntervalScalesWithFramerate) {
+ const int kCapturerMaxFrameRate = 30;
+ const int kEncodeMaxFrameRate = 20; // Maximum fps the encoder can sustain.
+
+ // Trigger overuse.
+ int64_t frame_interval_us = rtc::kNumMicrosecsPerSec / kCapturerMaxFrameRate;
+ // Processing time just below over use limit given kEncodeMaxFrameRate.
+ int64_t processing_time_us =
+ (98 * OveruseProcessingTimeLimitForFramerate(kEncodeMaxFrameRate)) / 100;
+ EXPECT_CALL(*(observer_.get()), AdaptDown(reason_)).Times(1);
+ for (int i = 0; i < options_.high_threshold_consecutive_count; ++i) {
+ InsertAndSendFramesWithInterval(1200, frame_interval_us, kWidth, kHeight,
+ processing_time_us);
+ overuse_detector_->CheckForOveruse();
+ }
+
+ // Simulate frame rate reduction and normal usage.
+ frame_interval_us = rtc::kNumMicrosecsPerSec / kEncodeMaxFrameRate;
+ overuse_detector_->OnTargetFramerateUpdated(kEncodeMaxFrameRate);
+ EXPECT_CALL(*(observer_.get()), AdaptDown(reason_)).Times(0);
+ for (int i = 0; i < options_.high_threshold_consecutive_count; ++i) {
+ InsertAndSendFramesWithInterval(1200, frame_interval_us, kWidth, kHeight,
+ processing_time_us);
+ overuse_detector_->CheckForOveruse();
+ }
+
+ // Reduce processing time to trigger underuse.
+ processing_time_us =
+ (98 * UnderuseProcessingTimeLimitForFramerate(kEncodeMaxFrameRate)) / 100;
+ EXPECT_CALL(*(observer_.get()), AdaptUp(reason_)).Times(1);
+ InsertAndSendFramesWithInterval(1200, frame_interval_us, kWidth, kHeight,
+ processing_time_us);
+ overuse_detector_->CheckForOveruse();
+}
+
+TEST_F(OveruseFrameDetectorTest, RespectsMinFramerate) {
+ const int kMinFrameRate = 7; // Minimum fps allowed by current detector impl.
+ overuse_detector_->OnTargetFramerateUpdated(kMinFrameRate);
+
+ // Normal usage just at the limit.
+ int64_t frame_interval_us = rtc::kNumMicrosecsPerSec / kMinFrameRate;
+ // Processing time just below over use limit given kEncodeMaxFrameRate.
+ int64_t processing_time_us =
+ (98 * OveruseProcessingTimeLimitForFramerate(kMinFrameRate)) / 100;
+ EXPECT_CALL(*(observer_.get()), AdaptDown(reason_)).Times(0);
+ for (int i = 0; i < options_.high_threshold_consecutive_count; ++i) {
+ InsertAndSendFramesWithInterval(1200, frame_interval_us, kWidth, kHeight,
+ processing_time_us);
+ overuse_detector_->CheckForOveruse();
+ }
+
+ // Over the limit to overuse.
+ processing_time_us =
+ (102 * OveruseProcessingTimeLimitForFramerate(kMinFrameRate)) / 100;
+ EXPECT_CALL(*(observer_.get()), AdaptDown(reason_)).Times(1);
+ for (int i = 0; i < options_.high_threshold_consecutive_count; ++i) {
+ InsertAndSendFramesWithInterval(1200, frame_interval_us, kWidth, kHeight,
+ processing_time_us);
+ overuse_detector_->CheckForOveruse();
+ }
+
+ // Reduce input frame rate. Should still trigger overuse.
+ overuse_detector_->OnTargetFramerateUpdated(kMinFrameRate - 1);
+ EXPECT_CALL(*(observer_.get()), AdaptDown(reason_)).Times(1);
+ for (int i = 0; i < options_.high_threshold_consecutive_count; ++i) {
+ InsertAndSendFramesWithInterval(1200, frame_interval_us, kWidth, kHeight,
+ processing_time_us);
+ overuse_detector_->CheckForOveruse();
+ }
+}
+
+TEST_F(OveruseFrameDetectorTest, LimitsMaxFrameInterval) {
+ const int kMaxFrameRate = 20;
+ overuse_detector_->OnTargetFramerateUpdated(kMaxFrameRate);
+ int64_t frame_interval_us = rtc::kNumMicrosecsPerSec / kMaxFrameRate;
+ // Maximum frame interval allowed is 35% above ideal.
+ int64_t max_frame_interval_us = (135 * frame_interval_us) / 100;
+ // Maximum processing time, without triggering overuse, allowed with the above
+ // frame interval.
+ int64_t max_processing_time_us =
+ (max_frame_interval_us * options_.high_encode_usage_threshold_percent) /
+ 100;
+
+ // Processing time just below overuse limit given kMaxFrameRate.
+ int64_t processing_time_us = (98 * max_processing_time_us) / 100;
+ EXPECT_CALL(*(observer_.get()), AdaptDown(reason_)).Times(0);
+ for (int i = 0; i < options_.high_threshold_consecutive_count; ++i) {
+ InsertAndSendFramesWithInterval(1200, max_frame_interval_us, kWidth,
+ kHeight, processing_time_us);
+ overuse_detector_->CheckForOveruse();
+ }
+
+ // Go above limit, trigger overuse.
+ processing_time_us = (102 * max_processing_time_us) / 100;
+ EXPECT_CALL(*(observer_.get()), AdaptDown(reason_)).Times(1);
+ for (int i = 0; i < options_.high_threshold_consecutive_count; ++i) {
+ InsertAndSendFramesWithInterval(1200, max_frame_interval_us, kWidth,
+ kHeight, processing_time_us);
+ overuse_detector_->CheckForOveruse();
+ }
+
+ // Increase frame interval, should still trigger overuse.
+ max_frame_interval_us *= 2;
+ EXPECT_CALL(*(observer_.get()), AdaptDown(reason_)).Times(1);
+ for (int i = 0; i < options_.high_threshold_consecutive_count; ++i) {
+ InsertAndSendFramesWithInterval(1200, max_frame_interval_us, kWidth,
+ kHeight, processing_time_us);
+ overuse_detector_->CheckForOveruse();
+ }
+}
+
} // namespace webrtc
diff --git a/webrtc/video/video_send_stream.cc b/webrtc/video/video_send_stream.cc
index 7bf32c6..285b7ef 100644
--- a/webrtc/video/video_send_stream.cc
+++ b/webrtc/video/video_send_stream.cc
@@ -632,9 +632,10 @@
encoder_config.content_type),
config_(std::move(config)),
content_type_(encoder_config.content_type) {
- vie_encoder_.reset(new ViEEncoder(
- num_cpu_cores, &stats_proxy_, config_.encoder_settings,
- config_.pre_encode_callback, config_.post_encode_callback));
+ vie_encoder_.reset(
+ new ViEEncoder(num_cpu_cores, &stats_proxy_, config_.encoder_settings,
+ config_.pre_encode_callback, config_.post_encode_callback,
+ std::unique_ptr<OveruseFrameDetector>()));
worker_queue_->PostTask(std::unique_ptr<rtc::QueuedTask>(new ConstructionTask(
&send_stream_, &thread_sync_event_, &stats_proxy_, vie_encoder_.get(),
module_process_thread, call_stats, transport, bitrate_allocator,
diff --git a/webrtc/video/vie_encoder.cc b/webrtc/video/vie_encoder.cc
index 63b1c80..d82e4fc 100644
--- a/webrtc/video/vie_encoder.cc
+++ b/webrtc/video/vie_encoder.cc
@@ -44,24 +44,12 @@
// See https://bugs.chromium.org/p/webrtc/issues/detail?id=7206
const int kMinPixelsPerFrame = 320 * 180;
const int kMinFramerateFps = 2;
+const int kMaxFramerateFps = 120;
// The maximum number of frames to drop at beginning of stream
// to try and achieve desired bitrate.
const int kMaxInitialFramedrop = 4;
-// TODO(pbos): Lower these thresholds (to closer to 100%) when we handle
-// pipelining encoders better (multiple input frames before something comes
-// out). This should effectively turn off CPU adaptations for systems that
-// remotely cope with the load right now.
-CpuOveruseOptions GetCpuOveruseOptions(bool full_overuse_time) {
- CpuOveruseOptions options;
- if (full_overuse_time) {
- options.low_encode_usage_threshold_percent = 150;
- options.high_encode_usage_threshold_percent = 200;
- }
- return options;
-}
-
uint32_t MaximumFrameSizeForBitrate(uint32_t kbps) {
if (kbps > 0) {
if (kbps < 300 /* qvga */) {
@@ -207,7 +195,7 @@
degradation_preference_ = degradation_preference;
old_source = source_;
source_ = source;
- wants = GetActiveSinkWants();
+ wants = GetActiveSinkWantsInternal();
}
if (old_source != source && old_source != nullptr) {
@@ -228,26 +216,9 @@
source_->AddOrUpdateSink(vie_encoder_, sink_wants_);
}
- rtc::VideoSinkWants GetActiveSinkWants() EXCLUSIVE_LOCKS_REQUIRED(&crit_) {
- rtc::VideoSinkWants wants = sink_wants_;
- // Clear any constraints from the current sink wants that don't apply to
- // the used degradation_preference.
- switch (degradation_preference_) {
- case VideoSendStream::DegradationPreference::kBalanced:
- break;
- case VideoSendStream::DegradationPreference::kMaintainFramerate:
- wants.max_framerate_fps = std::numeric_limits<int>::max();
- break;
- case VideoSendStream::DegradationPreference::kMaintainResolution:
- wants.max_pixel_count = std::numeric_limits<int>::max();
- wants.target_pixel_count.reset();
- break;
- case VideoSendStream::DegradationPreference::kDegradationDisabled:
- wants.max_pixel_count = std::numeric_limits<int>::max();
- wants.target_pixel_count.reset();
- wants.max_framerate_fps = std::numeric_limits<int>::max();
- }
- return wants;
+ rtc::VideoSinkWants GetActiveSinkWants() {
+ rtc::CritScope lock(&crit_);
+ return GetActiveSinkWantsInternal();
}
void ResetPixelFpsCount() {
@@ -277,14 +248,15 @@
LOG(LS_INFO) << "Scaling down resolution, max pixels: " << pixels_wanted;
sink_wants_.max_pixel_count = pixels_wanted;
sink_wants_.target_pixel_count = rtc::Optional<int>();
- source_->AddOrUpdateSink(vie_encoder_, GetActiveSinkWants());
+ source_->AddOrUpdateSink(vie_encoder_, GetActiveSinkWantsInternal());
return true;
}
- bool RequestFramerateLowerThan(int fps) {
+ int RequestFramerateLowerThan(int fps) {
// Called on the encoder task queue.
// The input video frame rate will be scaled down to 2/3, rounding down.
- return RestrictFramerate((fps * 2) / 3);
+ int framerate_wanted = (fps * 2) / 3;
+ return RestrictFramerate(framerate_wanted) ? framerate_wanted : -1;
}
bool RequestHigherResolutionThan(int pixel_count) {
@@ -317,18 +289,22 @@
rtc::Optional<int>((pixel_count * 5) / 3);
}
LOG(LS_INFO) << "Scaling up resolution, max pixels: " << max_pixels_wanted;
- source_->AddOrUpdateSink(vie_encoder_, GetActiveSinkWants());
+ source_->AddOrUpdateSink(vie_encoder_, GetActiveSinkWantsInternal());
return true;
}
- bool RequestHigherFramerateThan(int fps) {
+ // Request upgrade in framerate. Returns the new requested frame, or -1 if
+ // no change requested. Note that maxint may be returned if limits due to
+ // adaptation requests are removed completely. In that case, consider
+ // |max_framerate_| to be the current limit (assuming the capturer complies).
+ int RequestHigherFramerateThan(int fps) {
// Called on the encoder task queue.
// The input frame rate will be scaled up to the last step, with rounding.
int framerate_wanted = fps;
if (fps != std::numeric_limits<int>::max())
framerate_wanted = (fps * 3) / 2;
- return IncreaseFramerate(framerate_wanted);
+ return IncreaseFramerate(framerate_wanted) ? framerate_wanted : -1;
}
bool RestrictFramerate(int fps) {
@@ -343,7 +319,7 @@
LOG(LS_INFO) << "Scaling down framerate: " << fps_wanted;
sink_wants_.max_framerate_fps = fps_wanted;
- source_->AddOrUpdateSink(vie_encoder_, GetActiveSinkWants());
+ source_->AddOrUpdateSink(vie_encoder_, GetActiveSinkWantsInternal());
return true;
}
@@ -359,11 +335,34 @@
LOG(LS_INFO) << "Scaling up framerate: " << fps_wanted;
sink_wants_.max_framerate_fps = fps_wanted;
- source_->AddOrUpdateSink(vie_encoder_, GetActiveSinkWants());
+ source_->AddOrUpdateSink(vie_encoder_, GetActiveSinkWantsInternal());
return true;
}
private:
+ rtc::VideoSinkWants GetActiveSinkWantsInternal()
+ EXCLUSIVE_LOCKS_REQUIRED(&crit_) {
+ rtc::VideoSinkWants wants = sink_wants_;
+ // Clear any constraints from the current sink wants that don't apply to
+ // the used degradation_preference.
+ switch (degradation_preference_) {
+ case VideoSendStream::DegradationPreference::kBalanced:
+ break;
+ case VideoSendStream::DegradationPreference::kMaintainFramerate:
+ wants.max_framerate_fps = std::numeric_limits<int>::max();
+ break;
+ case VideoSendStream::DegradationPreference::kMaintainResolution:
+ wants.max_pixel_count = std::numeric_limits<int>::max();
+ wants.target_pixel_count.reset();
+ break;
+ case VideoSendStream::DegradationPreference::kDegradationDisabled:
+ wants.max_pixel_count = std::numeric_limits<int>::max();
+ wants.target_pixel_count.reset();
+ wants.max_framerate_fps = std::numeric_limits<int>::max();
+ }
+ return wants;
+ }
+
rtc::CriticalSection crit_;
rtc::SequencedTaskChecker main_checker_;
ViEEncoder* const vie_encoder_;
@@ -379,7 +378,8 @@
SendStatisticsProxy* stats_proxy,
const VideoSendStream::Config::EncoderSettings& settings,
rtc::VideoSinkInterface<VideoFrame>* pre_encode_callback,
- EncodedFrameObserver* encoder_timing)
+ EncodedFrameObserver* encoder_timing,
+ std::unique_ptr<OveruseFrameDetector> overuse_detector)
: shutdown_event_(true /* manual_reset */, false),
number_of_cores_(number_of_cores),
initial_rampup_(0),
@@ -389,13 +389,18 @@
codec_type_(PayloadNameToCodecType(settings.payload_name)
.value_or(VideoCodecType::kVideoCodecUnknown)),
video_sender_(Clock::GetRealTimeClock(), this, this),
- overuse_detector_(GetCpuOveruseOptions(settings.full_overuse_time),
- this,
- encoder_timing,
- stats_proxy),
+ overuse_detector_(
+ overuse_detector.get()
+ ? overuse_detector.release()
+ : new OveruseFrameDetector(
+ GetCpuOveruseOptions(settings.full_overuse_time),
+ this,
+ encoder_timing,
+ stats_proxy)),
stats_proxy_(stats_proxy),
pre_encode_callback_(pre_encode_callback),
module_process_thread_(nullptr),
+ max_framerate_(-1),
pending_encoder_reconfiguration_(false),
encoder_start_bitrate_bps_(0),
max_data_payload_length_(0),
@@ -416,7 +421,7 @@
RTC_DCHECK(stats_proxy);
encoder_queue_.PostTask([this] {
RTC_DCHECK_RUN_ON(&encoder_queue_);
- overuse_detector_.StartCheckForOveruse();
+ overuse_detector_->StartCheckForOveruse();
video_sender_.RegisterExternalEncoder(
settings_.encoder, settings_.payload_type, settings_.internal_source);
});
@@ -428,12 +433,25 @@
<< "Must call ::Stop() before destruction.";
}
+// TODO(pbos): Lower these thresholds (to closer to 100%) when we handle
+// pipelining encoders better (multiple input frames before something comes
+// out). This should effectively turn off CPU adaptations for systems that
+// remotely cope with the load right now.
+CpuOveruseOptions ViEEncoder::GetCpuOveruseOptions(bool full_overuse_time) {
+ CpuOveruseOptions options;
+ if (full_overuse_time) {
+ options.low_encode_usage_threshold_percent = 150;
+ options.high_encode_usage_threshold_percent = 200;
+ }
+ return options;
+}
+
void ViEEncoder::Stop() {
RTC_DCHECK_RUN_ON(&thread_checker_);
source_proxy_->SetSource(nullptr, VideoSendStream::DegradationPreference());
encoder_queue_.PostTask([this] {
RTC_DCHECK_RUN_ON(&encoder_queue_);
- overuse_detector_.StopCheckForOveruse();
+ overuse_detector_->StopCheckForOveruse();
rate_allocator_.reset();
bitrate_observer_ = nullptr;
video_sender_.RegisterExternalEncoder(nullptr, settings_.payload_type,
@@ -493,6 +511,12 @@
bool allow_scaling = IsResolutionScalingEnabled(degradation_preference_);
initial_rampup_ = allow_scaling ? 0 : kMaxInitialFramedrop;
ConfigureQualityScaler();
+ if (!IsFramerateScalingEnabled(degradation_preference) &&
+ max_framerate_ != -1) {
+ // If frame rate scaling is no longer allowed, remove any potential
+ // allowance for longer frame intervals.
+ overuse_detector_->OnTargetFramerateUpdated(max_framerate_);
+ }
});
}
@@ -562,6 +586,8 @@
std::max(encoder_start_bitrate_bps_ / 1000, codec.minBitrate);
codec.startBitrate = std::min(codec.startBitrate, codec.maxBitrate);
codec.expect_encode_from_texture = last_frame_info_->is_texture;
+ max_framerate_ = codec.maxFramerate;
+ RTC_DCHECK_LE(max_framerate_, kMaxFramerateFps);
bool success = video_sender_.RegisterSendCodec(
&codec, number_of_cores_,
@@ -574,19 +600,31 @@
video_sender_.UpdateChannelParemeters(rate_allocator_.get(),
bitrate_observer_);
- int framerate = stats_proxy_->GetSendFrameRate();
- if (framerate == 0)
- framerate = codec.maxFramerate;
+ // Get the current actual framerate, as measured by the stats proxy. This is
+ // used to get the correct bitrate layer allocation.
+ int current_framerate = stats_proxy_->GetSendFrameRate();
+ if (current_framerate == 0)
+ current_framerate = codec.maxFramerate;
stats_proxy_->OnEncoderReconfigured(
- encoder_config_, rate_allocator_.get()
- ? rate_allocator_->GetPreferredBitrateBps(framerate)
- : codec.maxBitrate);
+ encoder_config_,
+ rate_allocator_.get()
+ ? rate_allocator_->GetPreferredBitrateBps(current_framerate)
+ : codec.maxBitrate);
pending_encoder_reconfiguration_ = false;
sink_->OnEncoderConfigurationChanged(
std::move(streams), encoder_config_.min_transmit_bitrate_bps);
+ // Get the current target framerate, ie the maximum framerate as specified by
+ // the current codec configuration, or any limit imposed by cpu adaption in
+ // maintain-resolution or balanced mode. This is used to make sure overuse
+ // detection doesn't needlessly trigger in low and/or variable framerate
+ // scenarios.
+ int target_framerate = std::min(
+ max_framerate_, source_proxy_->GetActiveSinkWants().max_framerate_fps);
+ overuse_detector_->OnTargetFramerateUpdated(target_framerate);
+
ConfigureQualityScaler();
}
@@ -735,7 +773,7 @@
TRACE_EVENT_ASYNC_STEP0("webrtc", "Video", video_frame.render_time_ms(),
"Encode");
- overuse_detector_.FrameCaptured(video_frame, time_when_posted_us);
+ overuse_detector_->FrameCaptured(video_frame, time_when_posted_us);
video_sender_.AddVideoFrame(video_frame, nullptr);
}
@@ -766,7 +804,7 @@
const int qp = encoded_image.qp_;
encoder_queue_.PostTask([this, timestamp, time_sent_us, qp] {
RTC_DCHECK_RUN_ON(&encoder_queue_);
- overuse_detector_.FrameSent(timestamp, time_sent_us);
+ overuse_detector_->FrameSent(timestamp, time_sent_us);
if (quality_scaler_ && qp >= 0)
quality_scaler_->ReportQP(qp);
});
@@ -901,14 +939,18 @@
}
GetAdaptCounter().IncrementResolution(reason);
break;
- case VideoSendStream::DegradationPreference::kMaintainResolution:
+ case VideoSendStream::DegradationPreference::kMaintainResolution: {
// Scale down framerate.
- if (!source_proxy_->RequestFramerateLowerThan(
- adaptation_request.framerate_fps_)) {
+ const int requested_framerate = source_proxy_->RequestFramerateLowerThan(
+ adaptation_request.framerate_fps_);
+ if (requested_framerate == -1)
return;
- }
+ RTC_DCHECK_NE(max_framerate_, -1);
+ overuse_detector_->OnTargetFramerateUpdated(
+ std::min(max_framerate_, requested_framerate));
GetAdaptCounter().IncrementFramerate(reason);
break;
+ }
case VideoSendStream::DegradationPreference::kDegradationDisabled:
RTC_NOTREACHED();
}
@@ -985,8 +1027,15 @@
LOG(LS_INFO) << "Removing framerate down-scaling setting.";
fps = std::numeric_limits<int>::max();
}
- if (!source_proxy_->RequestHigherFramerateThan(fps))
+
+ const int requested_framerate =
+ source_proxy_->RequestHigherFramerateThan(fps);
+ if (requested_framerate == -1) {
+ overuse_detector_->OnTargetFramerateUpdated(max_framerate_);
return;
+ }
+ overuse_detector_->OnTargetFramerateUpdated(
+ std::min(max_framerate_, requested_framerate));
GetAdaptCounter().DecrementFramerate(reason);
break;
}
diff --git a/webrtc/video/vie_encoder.h b/webrtc/video/vie_encoder.h
index 460abf1..77a6bc4 100644
--- a/webrtc/video/vie_encoder.h
+++ b/webrtc/video/vie_encoder.h
@@ -77,7 +77,8 @@
SendStatisticsProxy* stats_proxy,
const VideoSendStream::Config::EncoderSettings& settings,
rtc::VideoSinkInterface<VideoFrame>* pre_encode_callback,
- EncodedFrameObserver* encoder_timing);
+ EncodedFrameObserver* encoder_timing,
+ std::unique_ptr<OveruseFrameDetector> overuse_detector);
~ViEEncoder();
// RegisterProcessThread register |module_process_thread| with those objects
// that use it. Registration has to happen on the thread where
@@ -129,6 +130,7 @@
// These methods are protected for easier testing.
void AdaptUp(AdaptReason reason) override;
void AdaptDown(AdaptReason reason) override;
+ static CpuOveruseOptions GetCpuOveruseOptions(bool full_overuse_time);
private:
class ConfigureEncoderTask;
@@ -232,7 +234,8 @@
const VideoCodecType codec_type_;
vcm::VideoSender video_sender_ ACCESS_ON(&encoder_queue_);
- OveruseFrameDetector overuse_detector_ ACCESS_ON(&encoder_queue_);
+ std::unique_ptr<OveruseFrameDetector> overuse_detector_
+ ACCESS_ON(&encoder_queue_);
std::unique_ptr<QualityScaler> quality_scaler_ ACCESS_ON(&encoder_queue_);
SendStatisticsProxy* const stats_proxy_;
@@ -246,6 +249,9 @@
VideoEncoderConfig encoder_config_ ACCESS_ON(&encoder_queue_);
std::unique_ptr<VideoBitrateAllocator> rate_allocator_
ACCESS_ON(&encoder_queue_);
+ // The maximum frame rate of the current codec configuration, as determined
+ // at the last ReconfigureEncoder() call.
+ int max_framerate_ ACCESS_ON(&encoder_queue_);
// Set when ConfigureEncoder has been called in order to lazy reconfigure the
// encoder on the next frame.
diff --git a/webrtc/video/vie_encoder_unittest.cc b/webrtc/video/vie_encoder_unittest.cc
index a016fa9..b143e4d 100644
--- a/webrtc/video/vie_encoder_unittest.cc
+++ b/webrtc/video/vie_encoder_unittest.cc
@@ -48,6 +48,7 @@
const int kTargetBitrateBps = 1000000;
const int kLowTargetBitrateBps = kTargetBitrateBps / 10;
const int kMaxInitialFramedrop = 4;
+const int kDefaultFramerate = 30;
class TestBuffer : public webrtc::I420Buffer {
public:
@@ -63,6 +64,35 @@
rtc::Event* const event_;
};
+class CpuOveruseDetectorProxy : public OveruseFrameDetector {
+ public:
+ CpuOveruseDetectorProxy(const CpuOveruseOptions& options,
+ AdaptationObserverInterface* overuse_observer,
+ EncodedFrameObserver* encoder_timing_,
+ CpuOveruseMetricsObserver* metrics_observer)
+ : OveruseFrameDetector(options,
+ overuse_observer,
+ encoder_timing_,
+ metrics_observer),
+ last_target_framerate_fps_(-1) {}
+ virtual ~CpuOveruseDetectorProxy() {}
+
+ void OnTargetFramerateUpdated(int framerate_fps) override {
+ rtc::CritScope cs(&lock_);
+ last_target_framerate_fps_ = framerate_fps;
+ OveruseFrameDetector::OnTargetFramerateUpdated(framerate_fps);
+ }
+
+ int GetLastTargetFramerate() {
+ rtc::CritScope cs(&lock_);
+ return last_target_framerate_fps_;
+ }
+
+ private:
+ rtc::CriticalSection lock_;
+ int last_target_framerate_fps_ GUARDED_BY(lock_);
+};
+
class ViEEncoderUnderTest : public ViEEncoder {
public:
ViEEncoderUnderTest(SendStatisticsProxy* stats_proxy,
@@ -71,7 +101,13 @@
stats_proxy,
settings,
nullptr /* pre_encode_callback */,
- nullptr /* encoder_timing */) {}
+ nullptr /* encoder_timing */,
+ std::unique_ptr<OveruseFrameDetector>(
+ overuse_detector_proxy_ = new CpuOveruseDetectorProxy(
+ GetCpuOveruseOptions(settings.full_overuse_time),
+ this,
+ nullptr,
+ stats_proxy))) {}
void PostTaskAndWait(bool down, AdaptReason reason) {
rtc::Event event(false, false);
@@ -99,14 +135,17 @@
void TriggerQualityLow() { PostTaskAndWait(true, AdaptReason::kQuality); }
void TriggerQualityHigh() { PostTaskAndWait(false, AdaptReason::kQuality); }
+
+ CpuOveruseDetectorProxy* overuse_detector_proxy_;
};
class VideoStreamFactory
: public VideoEncoderConfig::VideoStreamFactoryInterface {
public:
- explicit VideoStreamFactory(size_t num_temporal_layers)
- : num_temporal_layers_(num_temporal_layers) {
+ explicit VideoStreamFactory(size_t num_temporal_layers, int framerate)
+ : num_temporal_layers_(num_temporal_layers), framerate_(framerate) {
EXPECT_GT(num_temporal_layers, 0u);
+ EXPECT_GT(framerate, 0);
}
private:
@@ -118,10 +157,13 @@
test::CreateVideoStreams(width, height, encoder_config);
for (VideoStream& stream : streams) {
stream.temporal_layer_thresholds_bps.resize(num_temporal_layers_ - 1);
+ stream.max_framerate = framerate_;
}
return streams;
}
+
const size_t num_temporal_layers_;
+ const int framerate_;
};
class AdaptingFrameForwarder : public test::FrameForwarder {
@@ -264,7 +306,8 @@
video_encoder_config.number_of_streams = num_streams;
video_encoder_config.max_bitrate_bps = kTargetBitrateBps;
video_encoder_config.video_stream_factory =
- new rtc::RefCountedObject<VideoStreamFactory>(num_temporal_layers);
+ new rtc::RefCountedObject<VideoStreamFactory>(num_temporal_layers,
+ kDefaultFramerate);
ConfigureEncoder(std::move(video_encoder_config), nack_enabled);
}
@@ -2027,6 +2070,164 @@
vie_encoder_->Stop();
}
+TEST_F(ViEEncoderTest, OveruseDetectorUpdatedOnReconfigureAndAdaption) {
+ const int kFrameWidth = 1280;
+ const int kFrameHeight = 720;
+ const int kFramerate = 24;
+
+ vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
+ test::FrameForwarder source;
+ vie_encoder_->SetSource(
+ &source, VideoSendStream::DegradationPreference::kMaintainResolution);
+
+ // Insert a single frame, triggering initial configuration.
+ source.IncomingCapturedFrame(CreateFrame(1, kFrameWidth, kFrameHeight));
+ vie_encoder_->WaitUntilTaskQueueIsIdle();
+
+ EXPECT_EQ(vie_encoder_->overuse_detector_proxy_->GetLastTargetFramerate(),
+ kDefaultFramerate);
+
+ // Trigger reconfigure encoder (without resetting the entire instance).
+ VideoEncoderConfig video_encoder_config;
+ video_encoder_config.max_bitrate_bps = kTargetBitrateBps;
+ video_encoder_config.number_of_streams = 1;
+ video_encoder_config.video_stream_factory =
+ new rtc::RefCountedObject<VideoStreamFactory>(1, kFramerate);
+ vie_encoder_->ConfigureEncoder(std::move(video_encoder_config),
+ kMaxPayloadLength, false);
+ vie_encoder_->WaitUntilTaskQueueIsIdle();
+
+ // Detector should be updated with fps limit from codec config.
+ EXPECT_EQ(vie_encoder_->overuse_detector_proxy_->GetLastTargetFramerate(),
+ kFramerate);
+
+ // Trigger overuse, max framerate should be reduced.
+ VideoSendStream::Stats stats = stats_proxy_->GetStats();
+ stats.input_frame_rate = kFramerate;
+ stats_proxy_->SetMockStats(stats);
+ vie_encoder_->TriggerCpuOveruse();
+ vie_encoder_->WaitUntilTaskQueueIsIdle();
+ int adapted_framerate =
+ vie_encoder_->overuse_detector_proxy_->GetLastTargetFramerate();
+ EXPECT_LT(adapted_framerate, kFramerate);
+
+ // Trigger underuse, max framerate should go back to codec configured fps.
+ // Set extra low fps, to make sure it's actually reset, not just incremented.
+ stats = stats_proxy_->GetStats();
+ stats.input_frame_rate = adapted_framerate / 2;
+ stats_proxy_->SetMockStats(stats);
+ vie_encoder_->TriggerCpuNormalUsage();
+ vie_encoder_->WaitUntilTaskQueueIsIdle();
+ EXPECT_EQ(vie_encoder_->overuse_detector_proxy_->GetLastTargetFramerate(),
+ kFramerate);
+
+ vie_encoder_->Stop();
+}
+
+TEST_F(ViEEncoderTest, OveruseDetectorUpdatedRespectsFramerateAfterUnderuse) {
+ const int kFrameWidth = 1280;
+ const int kFrameHeight = 720;
+ const int kLowFramerate = 15;
+ const int kHighFramerate = 25;
+
+ vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
+ test::FrameForwarder source;
+ vie_encoder_->SetSource(
+ &source, VideoSendStream::DegradationPreference::kMaintainResolution);
+
+ // Trigger initial configuration.
+ VideoEncoderConfig video_encoder_config;
+ video_encoder_config.max_bitrate_bps = kTargetBitrateBps;
+ video_encoder_config.number_of_streams = 1;
+ video_encoder_config.video_stream_factory =
+ new rtc::RefCountedObject<VideoStreamFactory>(1, kLowFramerate);
+ source.IncomingCapturedFrame(CreateFrame(1, kFrameWidth, kFrameHeight));
+ vie_encoder_->ConfigureEncoder(std::move(video_encoder_config),
+ kMaxPayloadLength, false);
+ vie_encoder_->WaitUntilTaskQueueIsIdle();
+
+ EXPECT_EQ(vie_encoder_->overuse_detector_proxy_->GetLastTargetFramerate(),
+ kLowFramerate);
+
+ // Trigger overuse, max framerate should be reduced.
+ VideoSendStream::Stats stats = stats_proxy_->GetStats();
+ stats.input_frame_rate = kLowFramerate;
+ stats_proxy_->SetMockStats(stats);
+ vie_encoder_->TriggerCpuOveruse();
+ vie_encoder_->WaitUntilTaskQueueIsIdle();
+ int adapted_framerate =
+ vie_encoder_->overuse_detector_proxy_->GetLastTargetFramerate();
+ EXPECT_LT(adapted_framerate, kLowFramerate);
+
+ // Reconfigure the encoder with a new (higher max framerate), max fps should
+ // still respect the adaptation.
+ video_encoder_config.video_stream_factory =
+ new rtc::RefCountedObject<VideoStreamFactory>(1, kHighFramerate);
+ source.IncomingCapturedFrame(CreateFrame(1, kFrameWidth, kFrameHeight));
+ vie_encoder_->ConfigureEncoder(std::move(video_encoder_config),
+ kMaxPayloadLength, false);
+ vie_encoder_->WaitUntilTaskQueueIsIdle();
+
+ EXPECT_EQ(vie_encoder_->overuse_detector_proxy_->GetLastTargetFramerate(),
+ adapted_framerate);
+
+ // Trigger underuse, max framerate should go back to codec configured fps.
+ stats = stats_proxy_->GetStats();
+ stats.input_frame_rate = adapted_framerate;
+ stats_proxy_->SetMockStats(stats);
+ vie_encoder_->TriggerCpuNormalUsage();
+ vie_encoder_->WaitUntilTaskQueueIsIdle();
+ EXPECT_EQ(vie_encoder_->overuse_detector_proxy_->GetLastTargetFramerate(),
+ kHighFramerate);
+
+ vie_encoder_->Stop();
+}
+
+TEST_F(ViEEncoderTest, OveruseDetectorUpdatedOnDegradationPreferenceChange) {
+ const int kFrameWidth = 1280;
+ const int kFrameHeight = 720;
+ const int kFramerate = 24;
+
+ vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
+ test::FrameForwarder source;
+ vie_encoder_->SetSource(
+ &source, VideoSendStream::DegradationPreference::kMaintainResolution);
+
+ // Trigger initial configuration.
+ VideoEncoderConfig video_encoder_config;
+ video_encoder_config.max_bitrate_bps = kTargetBitrateBps;
+ video_encoder_config.number_of_streams = 1;
+ video_encoder_config.video_stream_factory =
+ new rtc::RefCountedObject<VideoStreamFactory>(1, kFramerate);
+ source.IncomingCapturedFrame(CreateFrame(1, kFrameWidth, kFrameHeight));
+ vie_encoder_->ConfigureEncoder(std::move(video_encoder_config),
+ kMaxPayloadLength, false);
+ vie_encoder_->WaitUntilTaskQueueIsIdle();
+
+ EXPECT_EQ(vie_encoder_->overuse_detector_proxy_->GetLastTargetFramerate(),
+ kFramerate);
+
+ // Trigger overuse, max framerate should be reduced.
+ VideoSendStream::Stats stats = stats_proxy_->GetStats();
+ stats.input_frame_rate = kFramerate;
+ stats_proxy_->SetMockStats(stats);
+ vie_encoder_->TriggerCpuOveruse();
+ vie_encoder_->WaitUntilTaskQueueIsIdle();
+ int adapted_framerate =
+ vie_encoder_->overuse_detector_proxy_->GetLastTargetFramerate();
+ EXPECT_LT(adapted_framerate, kFramerate);
+
+ // Change degradation preference to not enable framerate scaling. Target
+ // framerate should be changed to codec defined limit.
+ vie_encoder_->SetSource(
+ &source, VideoSendStream::DegradationPreference::kMaintainFramerate);
+ vie_encoder_->WaitUntilTaskQueueIsIdle();
+ EXPECT_EQ(vie_encoder_->overuse_detector_proxy_->GetLastTargetFramerate(),
+ kFramerate);
+
+ vie_encoder_->Stop();
+}
+
TEST_F(ViEEncoderTest, DropsFramesAndScalesWhenBitrateIsTooLow) {
const int kTooLowBitrateForFrameSizeBps = 10000;
vie_encoder_->OnBitrateUpdated(kTooLowBitrateForFrameSizeBps, 0, 0);