Measure encoding time on encode callbacks.
Permits measuring encoding time even when performed on another thread,
typically for hardware encoding, instead of assuming that encoding is
blocking the calling thread.
Permitted encoding time is increased for hardware encoders since they
can be timed to keep 30fps, for instance, without indicating overload.
Merges EncodingTimeObserver into EncodedFrameObserver to have one post-encode
callback.
BUG=webrtc:5042, webrtc:5132
R=asapersson@webrtc.org, mflodman@webrtc.org
Review URL: https://codereview.webrtc.org/1569853002 .
Cr-Commit-Position: refs/heads/master@{#11499}
diff --git a/webrtc/frame_callback.h b/webrtc/frame_callback.h
index b7f2210..2bae250 100644
--- a/webrtc/frame_callback.h
+++ b/webrtc/frame_callback.h
@@ -12,6 +12,7 @@
#define WEBRTC_FRAME_CALLBACK_H_
#include <stddef.h>
+#include <stdint.h>
#include "webrtc/common_types.h"
@@ -43,6 +44,7 @@
class EncodedFrameObserver {
public:
virtual void EncodedFrameCallback(const EncodedFrame& encoded_frame) = 0;
+ virtual void OnEncodeTiming(int64_t capture_ntp_ms, int encode_duration_ms) {}
protected:
virtual ~EncodedFrameObserver() {}
diff --git a/webrtc/media/webrtc/webrtcvideoengine2.cc b/webrtc/media/webrtc/webrtcvideoengine2.cc
index 265aa12..0489198 100644
--- a/webrtc/media/webrtc/webrtcvideoengine2.cc
+++ b/webrtc/media/webrtc/webrtcvideoengine2.cc
@@ -1768,6 +1768,7 @@
AllocatedEncoder new_encoder = CreateVideoEncoder(codec_settings.codec);
parameters_.config.encoder_settings.encoder = new_encoder.encoder;
+ parameters_.config.encoder_settings.full_overuse_time = new_encoder.external;
parameters_.config.encoder_settings.payload_name = codec_settings.codec.name;
parameters_.config.encoder_settings.payload_type = codec_settings.codec.id;
if (new_encoder.external) {
diff --git a/webrtc/media/webrtc/webrtcvideoengine2_unittest.cc b/webrtc/media/webrtc/webrtcvideoengine2_unittest.cc
index 7a7c815..663f40d 100644
--- a/webrtc/media/webrtc/webrtcvideoengine2_unittest.cc
+++ b/webrtc/media/webrtc/webrtcvideoengine2_unittest.cc
@@ -153,6 +153,8 @@
cricket::WebRtcVideoDecoderFactory* decoder_factory,
const std::vector<VideoCodec>& codecs);
+ void TestExtendedEncoderOveruse(bool use_external_encoder);
+
webrtc::test::ScopedFieldTrials override_field_trials_;
// Used in WebRtcVideoEngine2VoiceTest, but defined here so it's properly
// initialized when the constructor is called.
@@ -356,6 +358,42 @@
EXPECT_EQ(0u, encoder_factory.encoders().size());
}
+void WebRtcVideoEngine2Test::TestExtendedEncoderOveruse(
+ bool use_external_encoder) {
+ cricket::FakeWebRtcVideoEncoderFactory encoder_factory;
+ encoder_factory.AddSupportedVideoCodecType(webrtc::kVideoCodecVP8, "VP8");
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(kVp8Codec);
+ rtc::scoped_ptr<VideoMediaChannel> channel;
+ FakeCall* fake_call = new FakeCall(webrtc::Call::Config());
+ call_.reset(fake_call);
+ if (use_external_encoder) {
+ channel.reset(
+ SetUpForExternalEncoderFactory(&encoder_factory, parameters.codecs));
+ } else {
+ engine_.Init();
+ channel.reset(engine_.CreateChannel(call_.get(), cricket::VideoOptions()));
+ }
+ ASSERT_TRUE(
+ channel->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc)));
+ EXPECT_TRUE(channel->SetSendParameters(parameters));
+ EXPECT_TRUE(channel->SetSend(true));
+ FakeVideoSendStream* stream = fake_call->GetVideoSendStreams()[0];
+
+ EXPECT_EQ(use_external_encoder,
+ stream->GetConfig().encoder_settings.full_overuse_time);
+ // Remove stream previously added to free the external encoder instance.
+ EXPECT_TRUE(channel->RemoveSendStream(kSsrc));
+}
+
+TEST_F(WebRtcVideoEngine2Test, EnablesFullEncoderTimeForExternalEncoders) {
+ TestExtendedEncoderOveruse(true);
+}
+
+TEST_F(WebRtcVideoEngine2Test, DisablesFullEncoderTimeForNonExternalEncoders) {
+ TestExtendedEncoderOveruse(false);
+}
+
TEST_F(WebRtcVideoEngine2Test, CanConstructDecoderForVp9EncoderFactory) {
cricket::FakeWebRtcVideoEncoderFactory encoder_factory;
encoder_factory.AddSupportedVideoCodecType(webrtc::kVideoCodecVP9, "VP9");
diff --git a/webrtc/modules/video_coding/video_coding_impl.h b/webrtc/modules/video_coding/video_coding_impl.h
index 2786d8b..581ab0e 100644
--- a/webrtc/modules/video_coding/video_coding_impl.h
+++ b/webrtc/modules/video_coding/video_coding_impl.h
@@ -31,8 +31,6 @@
namespace webrtc {
-class EncodedFrameObserver;
-
namespace vcm {
class VCMProcessTimer {
diff --git a/webrtc/video/encoder_state_feedback_unittest.cc b/webrtc/video/encoder_state_feedback_unittest.cc
index b9199dd..8fa1c12 100644
--- a/webrtc/video/encoder_state_feedback_unittest.cc
+++ b/webrtc/video/encoder_state_feedback_unittest.cc
@@ -31,7 +31,13 @@
class MockVieEncoder : public ViEEncoder {
public:
explicit MockVieEncoder(ProcessThread* process_thread, PacedSender* pacer)
- : ViEEncoder(1, process_thread, nullptr, nullptr, pacer, nullptr) {}
+ : ViEEncoder(1,
+ process_thread,
+ nullptr,
+ nullptr,
+ nullptr,
+ pacer,
+ nullptr) {}
~MockVieEncoder() {}
MOCK_METHOD1(OnReceivedIntraFrameRequest,
diff --git a/webrtc/video/end_to_end_tests.cc b/webrtc/video/end_to_end_tests.cc
index ce3255c..5385450 100644
--- a/webrtc/video/end_to_end_tests.cc
+++ b/webrtc/video/end_to_end_tests.cc
@@ -2653,7 +2653,7 @@
stats.substreams.size() == expected_send_ssrcs_.size();
send_stats_filled_["CpuOveruseMetrics"] |=
- stats.avg_encode_time_ms != 0 || stats.encode_usage_percent != 0;
+ stats.avg_encode_time_ms != 0 && stats.encode_usage_percent != 0;
send_stats_filled_["EncoderImplementationName"] |=
stats.encoder_implementation_name ==
diff --git a/webrtc/video/overuse_frame_detector.cc b/webrtc/video/overuse_frame_detector.cc
index d971ad9..e589c19 100644
--- a/webrtc/video/overuse_frame_detector.cc
+++ b/webrtc/video/overuse_frame_detector.cc
@@ -20,7 +20,9 @@
#include "webrtc/base/checks.h"
#include "webrtc/base/exp_filter.h"
#include "webrtc/base/logging.h"
+#include "webrtc/frame_callback.h"
#include "webrtc/system_wrappers/include/clock.h"
+#include "webrtc/video_frame.h"
namespace webrtc {
@@ -115,106 +117,47 @@
rtc::scoped_ptr<rtc::ExpFilter> filtered_frame_diff_ms_;
};
-// Class for calculating the processing time of frames.
-class OveruseFrameDetector::FrameQueue {
- public:
- FrameQueue() : last_processing_time_ms_(-1) {}
- ~FrameQueue() {}
-
- // Called when a frame is captured.
- // Starts the measuring of the processing time of the frame.
- void Start(int64_t capture_time, int64_t now) {
- const size_t kMaxSize = 90; // Allows for processing time of 1.5s at 60fps.
- if (frame_times_.size() > kMaxSize) {
- LOG(LS_WARNING) << "Max size reached, removed oldest frame.";
- frame_times_.erase(frame_times_.begin());
- }
- if (frame_times_.find(capture_time) != frame_times_.end()) {
- // Frame should not exist.
- assert(false);
- return;
- }
- frame_times_[capture_time] = now;
- }
-
- // Called when the processing of a frame has finished.
- // Returns the processing time of the frame.
- int End(int64_t capture_time, int64_t now) {
- std::map<int64_t, int64_t>::iterator it = frame_times_.find(capture_time);
- if (it == frame_times_.end()) {
- return -1;
- }
- // Remove any old frames up to current.
- // Old frames have been skipped by the capture process thread.
- // TODO(asapersson): Consider measuring time from first frame in list.
- last_processing_time_ms_ = now - (*it).second;
- frame_times_.erase(frame_times_.begin(), ++it);
- return last_processing_time_ms_;
- }
-
- void Reset() { frame_times_.clear(); }
- int NumFrames() const { return static_cast<int>(frame_times_.size()); }
- int last_processing_time_ms() const { return last_processing_time_ms_; }
-
- private:
- // Captured frames mapped by the capture time.
- std::map<int64_t, int64_t> frame_times_;
- int last_processing_time_ms_;
-};
-
-
OveruseFrameDetector::OveruseFrameDetector(
Clock* clock,
const CpuOveruseOptions& options,
CpuOveruseObserver* observer,
+ EncodedFrameObserver* encoder_timing,
CpuOveruseMetricsObserver* metrics_observer)
: options_(options),
observer_(observer),
+ encoder_timing_(encoder_timing),
metrics_observer_(metrics_observer),
clock_(clock),
num_process_times_(0),
- last_capture_time_(0),
+ last_capture_time_ms_(-1),
+ last_processed_capture_time_ms_(-1),
num_pixels_(0),
- next_process_time_(clock_->TimeInMilliseconds()),
- last_overuse_time_(0),
+ next_process_time_ms_(clock_->TimeInMilliseconds()),
+ last_overuse_time_ms_(-1),
checks_above_threshold_(0),
num_overuse_detections_(0),
- last_rampup_time_(0),
+ last_rampup_time_ms_(-1),
in_quick_rampup_(false),
current_rampup_delay_ms_(kStandardRampUpDelayMs),
- last_sample_time_ms_(0),
- usage_(new SendProcessingUsage(options)),
- frame_queue_(new FrameQueue()) {
+ usage_(new SendProcessingUsage(options)) {
RTC_DCHECK(metrics_observer != nullptr);
- // Make sure stats are initially up-to-date. This simplifies unit testing
- // since we don't have to trigger an update using one of the methods which
- // would also alter the overuse state.
- UpdateCpuOveruseMetrics();
processing_thread_.DetachFromThread();
}
OveruseFrameDetector::~OveruseFrameDetector() {
}
-int OveruseFrameDetector::LastProcessingTimeMs() const {
- rtc::CritScope cs(&crit_);
- return frame_queue_->last_processing_time_ms();
-}
+void OveruseFrameDetector::EncodedFrameTimeMeasured(int encode_duration_ms) {
+ if (!metrics_)
+ metrics_ = rtc::Optional<CpuOveruseMetrics>(CpuOveruseMetrics());
+ metrics_->encode_usage_percent = usage_->Value();
-int OveruseFrameDetector::FramesInQueue() const {
- rtc::CritScope cs(&crit_);
- return frame_queue_->NumFrames();
-}
-
-void OveruseFrameDetector::UpdateCpuOveruseMetrics() {
- metrics_.encode_usage_percent = usage_->Value();
-
- metrics_observer_->CpuOveruseMetricsUpdated(metrics_);
+ metrics_observer_->OnEncodedFrameTimeMeasured(encode_duration_ms, *metrics_);
}
int64_t OveruseFrameDetector::TimeUntilNextProcess() {
RTC_DCHECK(processing_thread_.CalledOnValidThread());
- return next_process_time_ - clock_->TimeInMilliseconds();
+ return next_process_time_ms_ - clock_->TimeInMilliseconds();
}
bool OveruseFrameDetector::FrameSizeChanged(int num_pixels) const {
@@ -225,56 +168,80 @@
}
bool OveruseFrameDetector::FrameTimeoutDetected(int64_t now) const {
- if (last_capture_time_ == 0) {
+ if (last_capture_time_ms_ == -1)
return false;
- }
- return (now - last_capture_time_) > options_.frame_timeout_interval_ms;
+ return (now - last_capture_time_ms_) > options_.frame_timeout_interval_ms;
}
void OveruseFrameDetector::ResetAll(int num_pixels) {
num_pixels_ = num_pixels;
usage_->Reset();
- frame_queue_->Reset();
- last_capture_time_ = 0;
+ frame_timing_.clear();
+ last_capture_time_ms_ = -1;
+ last_processed_capture_time_ms_ = -1;
num_process_times_ = 0;
- UpdateCpuOveruseMetrics();
+ metrics_ = rtc::Optional<CpuOveruseMetrics>();
}
-void OveruseFrameDetector::FrameCaptured(int width,
- int height,
- int64_t capture_time_ms) {
+void OveruseFrameDetector::FrameCaptured(const VideoFrame& frame) {
rtc::CritScope cs(&crit_);
int64_t now = clock_->TimeInMilliseconds();
- if (FrameSizeChanged(width * height) || FrameTimeoutDetected(now)) {
- ResetAll(width * height);
+ if (FrameSizeChanged(frame.width() * frame.height()) ||
+ FrameTimeoutDetected(now)) {
+ ResetAll(frame.width() * frame.height());
}
- if (last_capture_time_ != 0)
- usage_->AddCaptureSample(now - last_capture_time_);
+ if (last_capture_time_ms_ != -1)
+ usage_->AddCaptureSample(now - last_capture_time_ms_);
- last_capture_time_ = now;
+ last_capture_time_ms_ = now;
- frame_queue_->Start(capture_time_ms, now);
+ frame_timing_.push_back(
+ FrameTiming(frame.ntp_time_ms(), frame.timestamp(), now));
}
-void OveruseFrameDetector::FrameSent(int64_t capture_time_ms) {
+void OveruseFrameDetector::FrameSent(uint32_t timestamp) {
rtc::CritScope cs(&crit_);
- int delay_ms = frame_queue_->End(capture_time_ms,
- clock_->TimeInMilliseconds());
- if (delay_ms > 0) {
- AddProcessingTime(delay_ms);
- }
-}
-
-void OveruseFrameDetector::AddProcessingTime(int elapsed_ms) {
+ // Delay before reporting actual encoding time, used to have the ability to
+ // detect total encoding time when encoding more than one layer. Encoding is
+ // here assumed to finish within a second (or that we get enough long-time
+ // samples before one second to trigger an overuse even when this is not the
+ // case).
+ static const int64_t kEncodingTimeMeasureWindowMs = 1000;
int64_t now = clock_->TimeInMilliseconds();
- if (last_sample_time_ms_ != 0) {
- int64_t diff_ms = now - last_sample_time_ms_;
- usage_->AddSample(elapsed_ms, diff_ms);
+ for (auto& it : frame_timing_) {
+ if (it.timestamp == timestamp) {
+ it.last_send_ms = now;
+ break;
+ }
}
- last_sample_time_ms_ = now;
- UpdateCpuOveruseMetrics();
+ // TODO(pbos): Handle the case/log errors when not finding the corresponding
+ // frame (either very slow encoding or incorrect wrong timestamps returned
+ // from the encoder).
+ // This is currently the case for all frames on ChromeOS, so logging them
+ // would be spammy, and triggering overuse would be wrong.
+ // https://crbug.com/350106
+ while (!frame_timing_.empty()) {
+ FrameTiming timing = frame_timing_.front();
+ if (now - timing.capture_ms < kEncodingTimeMeasureWindowMs)
+ break;
+ if (timing.last_send_ms != -1) {
+ int encode_duration_ms =
+ static_cast<int>(timing.last_send_ms - timing.capture_ms);
+ if (encoder_timing_) {
+ encoder_timing_->OnEncodeTiming(timing.capture_ntp_ms,
+ encode_duration_ms);
+ }
+ if (last_processed_capture_time_ms_ != -1) {
+ int64_t diff_ms = timing.capture_ms - last_processed_capture_time_ms_;
+ usage_->AddSample(encode_duration_ms, diff_ms);
+ }
+ last_processed_capture_time_ms_ = timing.capture_ms;
+ EncodedFrameTimeMeasured(encode_duration_ms);
+ }
+ frame_timing_.pop_front();
+ }
}
int32_t OveruseFrameDetector::Process() {
@@ -283,28 +250,28 @@
int64_t now = clock_->TimeInMilliseconds();
// Used to protect against Process() being called too often.
- if (now < next_process_time_)
+ if (now < next_process_time_ms_)
return 0;
- next_process_time_ = now + kProcessIntervalMs;
+ next_process_time_ms_ = now + kProcessIntervalMs;
CpuOveruseMetrics current_metrics;
{
rtc::CritScope cs(&crit_);
++num_process_times_;
-
- current_metrics = metrics_;
- if (num_process_times_ <= options_.min_process_count)
+ if (num_process_times_ <= options_.min_process_count || !metrics_)
return 0;
+
+ current_metrics = *metrics_;
}
if (IsOverusing(current_metrics)) {
// If the last thing we did was going up, and now have to back down, we need
// to check if this peak was short. If so we should back off to avoid going
// back and forth between this load, the system doesn't seem to handle it.
- bool check_for_backoff = last_rampup_time_ > last_overuse_time_;
+ bool check_for_backoff = last_rampup_time_ms_ > last_overuse_time_ms_;
if (check_for_backoff) {
- if (now - last_rampup_time_ < kStandardRampUpDelayMs ||
+ if (now - last_rampup_time_ms_ < kStandardRampUpDelayMs ||
num_overuse_detections_ > kMaxOverusesBeforeApplyRampupDelay) {
// Going up was not ok for very long, back off.
current_rampup_delay_ms_ *= kRampUpBackoffFactor;
@@ -316,7 +283,7 @@
}
}
- last_overuse_time_ = now;
+ last_overuse_time_ms_ = now;
in_quick_rampup_ = false;
checks_above_threshold_ = 0;
++num_overuse_detections_;
@@ -324,7 +291,7 @@
if (observer_ != NULL)
observer_->OveruseDetected();
} else if (IsUnderusing(current_metrics, now)) {
- last_rampup_time_ = now;
+ last_rampup_time_ms_ = now;
in_quick_rampup_ = true;
if (observer_ != NULL)
@@ -355,7 +322,7 @@
bool OveruseFrameDetector::IsUnderusing(const CpuOveruseMetrics& metrics,
int64_t time_now) {
int delay = in_quick_rampup_ ? kQuickRampUpDelayMs : current_rampup_delay_ms_;
- if (time_now < last_rampup_time_ + delay)
+ if (time_now < last_rampup_time_ms_ + delay)
return false;
return metrics.encode_usage_percent <
diff --git a/webrtc/video/overuse_frame_detector.h b/webrtc/video/overuse_frame_detector.h
index 0ef2e43..8184aaf 100644
--- a/webrtc/video/overuse_frame_detector.h
+++ b/webrtc/video/overuse_frame_detector.h
@@ -11,8 +11,11 @@
#ifndef WEBRTC_VIDEO_OVERUSE_FRAME_DETECTOR_H_
#define WEBRTC_VIDEO_OVERUSE_FRAME_DETECTOR_H_
+#include <list>
+
#include "webrtc/base/constructormagic.h"
#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/optional.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/exp_filter.h"
#include "webrtc/base/thread_annotations.h"
@@ -22,6 +25,8 @@
namespace webrtc {
class Clock;
+class EncodedFrameObserver;
+class VideoFrame;
// CpuOveruseObserver is called when a system overuse is detected and
// VideoEngine cannot keep up the encoding frequency.
@@ -68,10 +73,10 @@
class CpuOveruseMetricsObserver {
public:
virtual ~CpuOveruseMetricsObserver() {}
- virtual void CpuOveruseMetricsUpdated(const CpuOveruseMetrics& metrics) = 0;
+ virtual void OnEncodedFrameTimeMeasured(int encode_duration_ms,
+ const CpuOveruseMetrics& metrics) = 0;
};
-
// Use to detect system overuse based on the send-side processing time of
// incoming frames.
class OveruseFrameDetector : public Module {
@@ -79,18 +84,15 @@
OveruseFrameDetector(Clock* clock,
const CpuOveruseOptions& options,
CpuOveruseObserver* overuse_observer,
+ EncodedFrameObserver* encoder_timing_,
CpuOveruseMetricsObserver* metrics_observer);
~OveruseFrameDetector();
// Called for each captured frame.
- void FrameCaptured(int width, int height, int64_t capture_time_ms);
+ void FrameCaptured(const VideoFrame& frame);
// Called for each sent frame.
- void FrameSent(int64_t capture_time_ms);
-
- // Only public for testing.
- int LastProcessingTimeMs() const;
- int FramesInQueue() const;
+ void FrameSent(uint32_t timestamp);
// Implements Module.
int64_t TimeUntilNextProcess() override;
@@ -98,13 +100,20 @@
private:
class SendProcessingUsage;
- class FrameQueue;
+ struct FrameTiming {
+ FrameTiming(int64_t capture_ntp_ms, uint32_t timestamp, int64_t now)
+ : capture_ntp_ms(capture_ntp_ms),
+ timestamp(timestamp),
+ capture_ms(now),
+ last_send_ms(-1) {}
+ int64_t capture_ntp_ms;
+ uint32_t timestamp;
+ int64_t capture_ms;
+ int64_t last_send_ms;
+ };
- void UpdateCpuOveruseMetrics() EXCLUSIVE_LOCKS_REQUIRED(crit_);
-
- // TODO(asapersson): This method is only used on one thread, so it shouldn't
- // need a guard.
- void AddProcessingTime(int elapsed_ms) EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ void EncodedFrameTimeMeasured(int encode_duration_ms)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_);
// Only called on the processing thread.
bool IsOverusing(const CpuOveruseMetrics& metrics);
@@ -125,34 +134,34 @@
// Observer getting overuse reports.
CpuOveruseObserver* const observer_;
+ EncodedFrameObserver* const encoder_timing_;
// Stats metrics.
CpuOveruseMetricsObserver* const metrics_observer_;
- CpuOveruseMetrics metrics_ GUARDED_BY(crit_);
+ rtc::Optional<CpuOveruseMetrics> metrics_ GUARDED_BY(crit_);
Clock* const clock_;
int64_t num_process_times_ GUARDED_BY(crit_);
- int64_t last_capture_time_ GUARDED_BY(crit_);
+ int64_t last_capture_time_ms_ GUARDED_BY(crit_);
+ int64_t last_processed_capture_time_ms_ GUARDED_BY(crit_);
// Number of pixels of last captured frame.
int num_pixels_ GUARDED_BY(crit_);
// These seven members are only accessed on the processing thread.
- int64_t next_process_time_;
- int64_t last_overuse_time_;
+ int64_t next_process_time_ms_;
+ int64_t last_overuse_time_ms_;
int checks_above_threshold_;
int num_overuse_detections_;
- int64_t last_rampup_time_;
+ int64_t last_rampup_time_ms_;
bool in_quick_rampup_;
int current_rampup_delay_ms_;
- int64_t last_sample_time_ms_; // Only accessed by one thread.
-
// TODO(asapersson): Can these be regular members (avoid separate heap
// allocs)?
const rtc::scoped_ptr<SendProcessingUsage> usage_ GUARDED_BY(crit_);
- const rtc::scoped_ptr<FrameQueue> frame_queue_ GUARDED_BY(crit_);
+ std::list<FrameTiming> frame_timing_ GUARDED_BY(crit_);
rtc::ThreadChecker processing_thread_;
diff --git a/webrtc/video/overuse_frame_detector_unittest.cc b/webrtc/video/overuse_frame_detector_unittest.cc
index 65e006b..1a6384c 100644
--- a/webrtc/video/overuse_frame_detector_unittest.cc
+++ b/webrtc/video/overuse_frame_detector_unittest.cc
@@ -15,6 +15,7 @@
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/system_wrappers/include/clock.h"
+#include "webrtc/video_frame.h"
namespace webrtc {
namespace {
@@ -59,11 +60,12 @@
}
void ReinitializeOveruseDetector() {
- overuse_detector_.reset(new OveruseFrameDetector(clock_.get(), options_,
- observer_.get(), this));
+ overuse_detector_.reset(new OveruseFrameDetector(
+ clock_.get(), options_, observer_.get(), nullptr, this));
}
- void CpuOveruseMetricsUpdated(const CpuOveruseMetrics& metrics) override {
+ void OnEncodedFrameTimeMeasured(int encode_time_ms,
+ const CpuOveruseMetrics& metrics) override {
metrics_ = metrics;
}
@@ -72,17 +74,31 @@
options_.high_encode_usage_threshold_percent) / 2.0f) + 0.5;
}
- void InsertAndSendFramesWithInterval(
- int num_frames, int interval_ms, int width, int height, int delay_ms) {
+ void InsertAndSendFramesWithInterval(int num_frames,
+ int interval_ms,
+ int width,
+ int height,
+ int delay_ms) {
+ VideoFrame frame;
+ frame.CreateEmptyFrame(width, height, width, width / 2, width / 2);
+ uint32_t timestamp = 0;
while (num_frames-- > 0) {
- int64_t capture_time_ms = clock_->TimeInMilliseconds();
- overuse_detector_->FrameCaptured(width, height, capture_time_ms);
+ frame.set_timestamp(timestamp);
+ overuse_detector_->FrameCaptured(frame);
clock_->AdvanceTimeMilliseconds(delay_ms);
- overuse_detector_->FrameSent(capture_time_ms);
+ overuse_detector_->FrameSent(timestamp);
clock_->AdvanceTimeMilliseconds(interval_ms - delay_ms);
+ timestamp += interval_ms * 90;
}
}
+ void ForceUpdate(int width, int height) {
+ // Insert one frame, wait a second and then put in another to force update
+ // the usage. From the tests where these are used, adding another sample
+ // doesn't affect the expected outcome (this is mainly to check initial
+ // values and whether the overuse detector has been reset or not).
+ InsertAndSendFramesWithInterval(2, 1000, width, height, kFrameInterval33ms);
+ }
void TriggerOveruse(int num_times) {
const int kDelayMs = 32;
for (int i = 0; i < num_times; ++i) {
@@ -131,7 +147,7 @@
TEST_F(OveruseFrameDetectorTest, OveruseAndRecoverWithNoObserver) {
overuse_detector_.reset(
- new OveruseFrameDetector(clock_.get(), options_, nullptr, this));
+ new OveruseFrameDetector(clock_.get(), options_, nullptr, nullptr, this));
EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(0);
TriggerOveruse(options_.high_threshold_consecutive_count);
EXPECT_CALL(*(observer_.get()), NormalUsage()).Times(0);
@@ -149,8 +165,8 @@
TEST_F(OveruseFrameDetectorTest, TriggerUnderuseWithMinProcessCount) {
options_.min_process_count = 1;
CpuOveruseObserverImpl overuse_observer;
- overuse_detector_.reset(new OveruseFrameDetector(clock_.get(), options_,
- &overuse_observer, this));
+ overuse_detector_.reset(new OveruseFrameDetector(
+ clock_.get(), options_, &overuse_observer, nullptr, this));
InsertAndSendFramesWithInterval(
1200, kFrameInterval33ms, kWidth, kHeight, kProcessTime5ms);
overuse_detector_->Process();
@@ -189,17 +205,18 @@
}
TEST_F(OveruseFrameDetectorTest, ResetAfterResolutionChange) {
+ ForceUpdate(kWidth, kHeight);
EXPECT_EQ(InitialUsage(), UsagePercent());
InsertAndSendFramesWithInterval(
1000, kFrameInterval33ms, kWidth, kHeight, kProcessTime5ms);
EXPECT_NE(InitialUsage(), UsagePercent());
- // Verify reset.
- InsertAndSendFramesWithInterval(
- 1, kFrameInterval33ms, kWidth, kHeight + 1, kProcessTime5ms);
+ // Verify reset (with new width/height).
+ ForceUpdate(kWidth, kHeight + 1);
EXPECT_EQ(InitialUsage(), UsagePercent());
}
TEST_F(OveruseFrameDetectorTest, ResetAfterFrameTimeout) {
+ ForceUpdate(kWidth, kHeight);
EXPECT_EQ(InitialUsage(), UsagePercent());
InsertAndSendFramesWithInterval(
1000, kFrameInterval33ms, kWidth, kHeight, kProcessTime5ms);
@@ -211,6 +228,7 @@
InsertAndSendFramesWithInterval(
2, options_.frame_timeout_interval_ms + 1, kWidth, kHeight,
kProcessTime5ms);
+ ForceUpdate(kWidth, kHeight);
EXPECT_EQ(InitialUsage(), UsagePercent());
}
@@ -220,91 +238,65 @@
InsertAndSendFramesWithInterval(
40, kFrameInterval33ms, kWidth, kHeight, kProcessTime5ms);
EXPECT_EQ(InitialUsage(), UsagePercent());
+ // Pass time far enough to digest all previous samples.
+ clock_->AdvanceTimeMilliseconds(1000);
+ InsertAndSendFramesWithInterval(1, kFrameInterval33ms, kWidth, kHeight,
+ kProcessTime5ms);
+ // The last sample has not been processed here.
+ EXPECT_EQ(InitialUsage(), UsagePercent());
+
+ // Pass time far enough to digest all previous samples, 41 in total.
+ clock_->AdvanceTimeMilliseconds(1000);
InsertAndSendFramesWithInterval(
1, kFrameInterval33ms, kWidth, kHeight, kProcessTime5ms);
EXPECT_NE(InitialUsage(), UsagePercent());
}
TEST_F(OveruseFrameDetectorTest, InitialProcessingUsage) {
+ ForceUpdate(kWidth, kHeight);
EXPECT_EQ(InitialUsage(), UsagePercent());
}
-TEST_F(OveruseFrameDetectorTest, FrameDelay_OneFrame) {
- const int kProcessingTimeMs = 100;
- overuse_detector_->FrameCaptured(kWidth, kHeight, 33);
- clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
- EXPECT_EQ(-1, overuse_detector_->LastProcessingTimeMs());
- overuse_detector_->FrameSent(33);
- EXPECT_EQ(kProcessingTimeMs, overuse_detector_->LastProcessingTimeMs());
- EXPECT_EQ(0, overuse_detector_->FramesInQueue());
-}
-
-TEST_F(OveruseFrameDetectorTest, FrameDelay_TwoFrames) {
- const int kProcessingTimeMs1 = 100;
- const int kProcessingTimeMs2 = 50;
- const int kTimeBetweenFramesMs = 200;
- overuse_detector_->FrameCaptured(kWidth, kHeight, 33);
- clock_->AdvanceTimeMilliseconds(kProcessingTimeMs1);
- overuse_detector_->FrameSent(33);
- EXPECT_EQ(kProcessingTimeMs1, overuse_detector_->LastProcessingTimeMs());
- clock_->AdvanceTimeMilliseconds(kTimeBetweenFramesMs);
- overuse_detector_->FrameCaptured(kWidth, kHeight, 66);
- clock_->AdvanceTimeMilliseconds(kProcessingTimeMs2);
- overuse_detector_->FrameSent(66);
- EXPECT_EQ(kProcessingTimeMs2, overuse_detector_->LastProcessingTimeMs());
-}
-
-TEST_F(OveruseFrameDetectorTest, FrameDelay_MaxQueueSize) {
- const int kMaxQueueSize = 91;
- for (int i = 0; i < kMaxQueueSize * 2; ++i) {
- overuse_detector_->FrameCaptured(kWidth, kHeight, i);
+TEST_F(OveruseFrameDetectorTest, MeasuresMultipleConcurrentSamples) {
+ EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(testing::AtLeast(1));
+ static const int kIntervalMs = 33;
+ static const size_t kNumFramesEncodingDelay = 3;
+ VideoFrame frame;
+ frame.CreateEmptyFrame(kWidth, kHeight, kWidth, kWidth / 2, kWidth / 2);
+ for (size_t i = 0; i < 1000; ++i) {
+ // Unique timestamps.
+ frame.set_timestamp(static_cast<uint32_t>(i));
+ overuse_detector_->FrameCaptured(frame);
+ clock_->AdvanceTimeMilliseconds(kIntervalMs);
+ if (i > kNumFramesEncodingDelay) {
+ overuse_detector_->FrameSent(
+ static_cast<uint32_t>(i - kNumFramesEncodingDelay));
+ }
+ overuse_detector_->Process();
}
- EXPECT_EQ(kMaxQueueSize, overuse_detector_->FramesInQueue());
}
-TEST_F(OveruseFrameDetectorTest, FrameDelay_NonProcessedFramesRemoved) {
- const int kProcessingTimeMs = 100;
- overuse_detector_->FrameCaptured(kWidth, kHeight, 33);
- clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
- overuse_detector_->FrameCaptured(kWidth, kHeight, 35);
- clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
- overuse_detector_->FrameCaptured(kWidth, kHeight, 66);
- clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
- overuse_detector_->FrameCaptured(kWidth, kHeight, 99);
- clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
- EXPECT_EQ(-1, overuse_detector_->LastProcessingTimeMs());
- EXPECT_EQ(4, overuse_detector_->FramesInQueue());
- overuse_detector_->FrameSent(66);
- // Frame 33, 35 removed, 66 processed, 99 not processed.
- EXPECT_EQ(2 * kProcessingTimeMs, overuse_detector_->LastProcessingTimeMs());
- EXPECT_EQ(1, overuse_detector_->FramesInQueue());
- overuse_detector_->FrameSent(99);
- EXPECT_EQ(kProcessingTimeMs, overuse_detector_->LastProcessingTimeMs());
- EXPECT_EQ(0, overuse_detector_->FramesInQueue());
-}
-
-TEST_F(OveruseFrameDetectorTest, FrameDelay_ResetClearsFrames) {
- const int kProcessingTimeMs = 100;
- overuse_detector_->FrameCaptured(kWidth, kHeight, 33);
- EXPECT_EQ(1, overuse_detector_->FramesInQueue());
- clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
- // Verify reset (resolution changed).
- overuse_detector_->FrameCaptured(kWidth, kHeight + 1, 66);
- EXPECT_EQ(1, overuse_detector_->FramesInQueue());
- clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
- overuse_detector_->FrameSent(66);
- EXPECT_EQ(kProcessingTimeMs, overuse_detector_->LastProcessingTimeMs());
- EXPECT_EQ(0, overuse_detector_->FramesInQueue());
-}
-
-TEST_F(OveruseFrameDetectorTest, FrameDelay_NonMatchingSendFrameIgnored) {
- const int kProcessingTimeMs = 100;
- overuse_detector_->FrameCaptured(kWidth, kHeight, 33);
- clock_->AdvanceTimeMilliseconds(kProcessingTimeMs);
- overuse_detector_->FrameSent(34);
- EXPECT_EQ(-1, overuse_detector_->LastProcessingTimeMs());
- overuse_detector_->FrameSent(33);
- EXPECT_EQ(kProcessingTimeMs, overuse_detector_->LastProcessingTimeMs());
+TEST_F(OveruseFrameDetectorTest, UpdatesExistingSamples) {
+ // >85% encoding time should trigger overuse.
+ EXPECT_CALL(*(observer_.get()), OveruseDetected()).Times(testing::AtLeast(1));
+ static const int kIntervalMs = 33;
+ static const int kDelayMs = 30;
+ VideoFrame frame;
+ frame.CreateEmptyFrame(kWidth, kHeight, kWidth, kWidth / 2, kWidth / 2);
+ uint32_t timestamp = 0;
+ for (size_t i = 0; i < 1000; ++i) {
+ frame.set_timestamp(timestamp);
+ overuse_detector_->FrameCaptured(frame);
+ // Encode and send first parts almost instantly.
+ clock_->AdvanceTimeMilliseconds(1);
+ overuse_detector_->FrameSent(timestamp);
+ // Encode heavier part, resulting in >85% usage total.
+ clock_->AdvanceTimeMilliseconds(kDelayMs - 1);
+ overuse_detector_->FrameSent(timestamp);
+ clock_->AdvanceTimeMilliseconds(kIntervalMs - kDelayMs);
+ timestamp += kIntervalMs * 90;
+ overuse_detector_->Process();
+ }
}
} // namespace webrtc
diff --git a/webrtc/video/send_statistics_proxy.cc b/webrtc/video/send_statistics_proxy.cc
index 7371111..8102b93 100644
--- a/webrtc/video/send_statistics_proxy.cc
+++ b/webrtc/video/send_statistics_proxy.cc
@@ -189,9 +189,13 @@
stats_.media_bitrate_bps = bitrate;
}
-void SendStatisticsProxy::CpuOveruseMetricsUpdated(
+void SendStatisticsProxy::OnEncodedFrameTimeMeasured(
+ int encode_time_ms,
const CpuOveruseMetrics& metrics) {
rtc::CritScope lock(&crit_);
+ uma_container_->encode_time_counter_.Add(encode_time_ms);
+ encode_time_.Apply(1.0f, encode_time_ms);
+ stats_.avg_encode_time_ms = round(encode_time_.filtered());
stats_.encode_usage_percent = metrics.encode_usage_percent;
}
@@ -331,13 +335,6 @@
uma_container_->input_height_counter_.Add(height);
}
-void SendStatisticsProxy::OnEncodedFrame(int encode_time_ms) {
- rtc::CritScope lock(&crit_);
- uma_container_->encode_time_counter_.Add(encode_time_ms);
- encode_time_.Apply(1.0f, encode_time_ms);
- stats_.avg_encode_time_ms = round(encode_time_.filtered());
-}
-
void SendStatisticsProxy::RtcpPacketTypesCounterUpdated(
uint32_t ssrc,
const RtcpPacketTypeCounter& packet_counter) {
diff --git a/webrtc/video/send_statistics_proxy.h b/webrtc/video/send_statistics_proxy.h
index f4c3f5a..d2b9b56 100644
--- a/webrtc/video/send_statistics_proxy.h
+++ b/webrtc/video/send_statistics_proxy.h
@@ -52,12 +52,6 @@
// Used to update incoming frame rate.
void OnIncomingFrame(int width, int height);
- // Used to update encode time of frames.
- void OnEncodedFrame(int encode_time_ms);
-
- // From VideoEncoderRateObserver.
- void OnSetRates(uint32_t bitrate_bps, int framerate) override;
-
void OnEncoderImplementationName(const char* implementation_name);
void OnOutgoingRate(uint32_t framerate, uint32_t bitrate);
void OnSuspendChange(bool is_suspended);
@@ -67,9 +61,14 @@
// how stats are collected.
void SetContentType(VideoEncoderConfig::ContentType content_type);
+ // Implements VideoEncoderRateObserver.
+ void OnSetRates(uint32_t bitrate_bps, int framerate) override;
+
+ // Implements CpuOveruseMetricsObserver.
+ void OnEncodedFrameTimeMeasured(int encode_time_ms,
+ const CpuOveruseMetrics& metrics) override;
+
protected:
- // From CpuOveruseMetricsObserver.
- void CpuOveruseMetricsUpdated(const CpuOveruseMetrics& metrics) override;
// From RtcpStatisticsCallback.
void StatisticsUpdated(const RtcpStatistics& statistics,
uint32_t ssrc) override;
diff --git a/webrtc/video/send_statistics_proxy_unittest.cc b/webrtc/video/send_statistics_proxy_unittest.cc
index fc1f3fd..cf54190 100644
--- a/webrtc/video/send_statistics_proxy_unittest.cc
+++ b/webrtc/video/send_statistics_proxy_unittest.cc
@@ -289,12 +289,15 @@
ExpectEqual(expected_, stats);
}
-TEST_F(SendStatisticsProxyTest, OnEncodedFrame) {
+TEST_F(SendStatisticsProxyTest, OnEncodedFrameTimeMeasured) {
const int kEncodeTimeMs = 11;
- statistics_proxy_->OnEncodedFrame(kEncodeTimeMs);
+ CpuOveruseMetrics metrics;
+ metrics.encode_usage_percent = 80;
+ statistics_proxy_->OnEncodedFrameTimeMeasured(kEncodeTimeMs, metrics);
VideoSendStream::Stats stats = statistics_proxy_->GetStats();
EXPECT_EQ(kEncodeTimeMs, stats.avg_encode_time_ms);
+ EXPECT_EQ(metrics.encode_usage_percent, stats.encode_usage_percent);
}
TEST_F(SendStatisticsProxyTest, SwitchContentTypeUpdatesHistograms) {
diff --git a/webrtc/video/video_capture_input.cc b/webrtc/video/video_capture_input.cc
index 2b5d638..dfdf5ae 100644
--- a/webrtc/video/video_capture_input.cc
+++ b/webrtc/video/video_capture_input.cc
@@ -14,7 +14,6 @@
#include "webrtc/base/logging.h"
#include "webrtc/base/trace_event.h"
#include "webrtc/modules/include/module_common_types.h"
-#include "webrtc/modules/utility/include/process_thread.h"
#include "webrtc/modules/video_capture/video_capture_factory.h"
#include "webrtc/modules/video_processing/include/video_processing.h"
#include "webrtc/modules/video_render/video_render_defines.h"
@@ -27,15 +26,11 @@
namespace webrtc {
namespace internal {
-VideoCaptureInput::VideoCaptureInput(
- ProcessThread* module_process_thread,
- VideoCaptureCallback* frame_callback,
- VideoRenderer* local_renderer,
- SendStatisticsProxy* stats_proxy,
- CpuOveruseObserver* overuse_observer,
- EncodingTimeObserver* encoding_time_observer)
- : module_process_thread_(module_process_thread),
- frame_callback_(frame_callback),
+VideoCaptureInput::VideoCaptureInput(VideoCaptureCallback* frame_callback,
+ VideoRenderer* local_renderer,
+ SendStatisticsProxy* stats_proxy,
+ OveruseFrameDetector* overuse_detector)
+ : frame_callback_(frame_callback),
local_renderer_(local_renderer),
stats_proxy_(stats_proxy),
encoder_thread_(EncoderThreadFunction, this, "EncoderThread"),
@@ -45,19 +40,12 @@
delta_ntp_internal_ms_(
Clock::GetRealTimeClock()->CurrentNtpInMilliseconds() -
TickTime::MillisecondTimestamp()),
- overuse_detector_(new OveruseFrameDetector(Clock::GetRealTimeClock(),
- CpuOveruseOptions(),
- overuse_observer,
- stats_proxy)),
- encoding_time_observer_(encoding_time_observer) {
+ overuse_detector_(overuse_detector) {
encoder_thread_.Start();
encoder_thread_.SetPriority(rtc::kHighPriority);
- module_process_thread_->RegisterModule(overuse_detector_.get());
}
VideoCaptureInput::~VideoCaptureInput() {
- module_process_thread_->DeRegisterModule(overuse_detector_.get());
-
// Stop the thread.
rtc::AtomicOps::ReleaseStore(&stop_, 1);
capture_event_.Set();
@@ -105,9 +93,7 @@
captured_frame_.ShallowCopy(incoming_frame);
last_captured_timestamp_ = incoming_frame.ntp_time_ms();
- overuse_detector_->FrameCaptured(captured_frame_.width(),
- captured_frame_.height(),
- captured_frame_.render_time_ms());
+ overuse_detector_->FrameCaptured(captured_frame_);
TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Video", video_frame.render_time_ms(),
"render_time", video_frame.render_time_ms());
@@ -121,12 +107,10 @@
bool VideoCaptureInput::EncoderProcess() {
static const int kThreadWaitTimeMs = 100;
- int64_t capture_time = -1;
if (capture_event_.Wait(kThreadWaitTimeMs)) {
if (rtc::AtomicOps::AcquireLoad(&stop_))
return false;
- int64_t encode_start_time = -1;
VideoFrame deliver_frame;
{
rtc::CritScope lock(&crit_);
@@ -136,24 +120,8 @@
}
}
if (!deliver_frame.IsZeroSize()) {
- capture_time = deliver_frame.render_time_ms();
- encode_start_time = Clock::GetRealTimeClock()->TimeInMilliseconds();
frame_callback_->DeliverFrame(deliver_frame);
}
- // Update the overuse detector with the duration.
- if (encode_start_time != -1) {
- int encode_time_ms = static_cast<int>(
- Clock::GetRealTimeClock()->TimeInMilliseconds() - encode_start_time);
- stats_proxy_->OnEncodedFrame(encode_time_ms);
- if (encoding_time_observer_) {
- encoding_time_observer_->OnReportEncodedTime(
- deliver_frame.ntp_time_ms(), encode_time_ms);
- }
- }
- }
- // We're done!
- if (capture_time != -1) {
- overuse_detector_->FrameSent(capture_time);
}
return true;
}
diff --git a/webrtc/video/video_capture_input.h b/webrtc/video/video_capture_input.h
index 1c27cb0..87b6452 100644
--- a/webrtc/video/video_capture_input.h
+++ b/webrtc/video/video_capture_input.h
@@ -16,7 +16,6 @@
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/event.h"
#include "webrtc/base/platform_thread.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/thread_annotations.h"
#include "webrtc/common_types.h"
#include "webrtc/engine_configurations.h"
@@ -30,11 +29,7 @@
namespace webrtc {
class Config;
-class CpuOveruseMetricsObserver;
-class CpuOveruseObserver;
class OveruseFrameDetector;
-class ProcessThread;
-class RegistrableCpuOveruseMetricsObserver;
class SendStatisticsProxy;
class VideoRenderer;
@@ -48,12 +43,10 @@
namespace internal {
class VideoCaptureInput : public webrtc::VideoCaptureInput {
public:
- VideoCaptureInput(ProcessThread* module_process_thread,
- VideoCaptureCallback* frame_callback,
+ VideoCaptureInput(VideoCaptureCallback* frame_callback,
VideoRenderer* local_renderer,
SendStatisticsProxy* send_stats_proxy,
- CpuOveruseObserver* overuse_observer,
- EncodingTimeObserver* encoding_time_observer);
+ OveruseFrameDetector* overuse_detector);
~VideoCaptureInput();
void IncomingCapturedFrame(const VideoFrame& video_frame) override;
@@ -64,7 +57,6 @@
bool EncoderProcess();
rtc::CriticalSection crit_;
- ProcessThread* const module_process_thread_;
VideoCaptureCallback* const frame_callback_;
VideoRenderer* const local_renderer_;
@@ -81,8 +73,7 @@
// Delta used for translating between NTP and internal timestamps.
const int64_t delta_ntp_internal_ms_;
- rtc::scoped_ptr<OveruseFrameDetector> overuse_detector_;
- EncodingTimeObserver* const encoding_time_observer_;
+ OveruseFrameDetector* const overuse_detector_;
};
} // namespace internal
diff --git a/webrtc/video/video_capture_input_unittest.cc b/webrtc/video/video_capture_input_unittest.cc
index 3fe6e3a..ff1194b 100644
--- a/webrtc/video/video_capture_input_unittest.cc
+++ b/webrtc/video/video_capture_input_unittest.cc
@@ -15,7 +15,6 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/event.h"
#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/modules/utility/include/mock/mock_process_thread.h"
#include "webrtc/system_wrappers/include/ref_count.h"
#include "webrtc/system_wrappers/include/scoped_vector.h"
#include "webrtc/test/fake_texture_frame.h"
@@ -47,28 +46,23 @@
class VideoCaptureInputTest : public ::testing::Test {
protected:
VideoCaptureInputTest()
- : mock_process_thread_(new NiceMock<MockProcessThread>),
- mock_frame_callback_(new NiceMock<MockVideoCaptureCallback>),
- output_frame_event_(false, false),
- stats_proxy_(Clock::GetRealTimeClock(),
+ : stats_proxy_(Clock::GetRealTimeClock(),
webrtc::VideoSendStream::Config(nullptr),
- webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo) {}
+ webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo),
+ mock_frame_callback_(new NiceMock<MockVideoCaptureCallback>),
+ output_frame_event_(false, false) {}
virtual void SetUp() {
EXPECT_CALL(*mock_frame_callback_, DeliverFrame(_))
.WillRepeatedly(
WithArg<0>(Invoke(this, &VideoCaptureInputTest::AddOutputFrame)));
- input_.reset(new internal::VideoCaptureInput(
- mock_process_thread_.get(), mock_frame_callback_.get(), nullptr,
- &stats_proxy_, nullptr, nullptr));
- }
-
- virtual void TearDown() {
- // VideoCaptureInput accesses |mock_process_thread_| in destructor and
- // should
- // be deleted first.
- input_.reset();
+ overuse_detector_.reset(
+ new OveruseFrameDetector(Clock::GetRealTimeClock(), CpuOveruseOptions(),
+ nullptr, nullptr, &stats_proxy_));
+ input_.reset(new internal::VideoCaptureInput(mock_frame_callback_.get(),
+ nullptr, &stats_proxy_,
+ overuse_detector_.get()));
}
void AddInputFrame(VideoFrame* frame) {
@@ -86,9 +80,12 @@
EXPECT_TRUE(output_frame_event_.Wait(FRAME_TIMEOUT_MS));
}
- rtc::scoped_ptr<MockProcessThread> mock_process_thread_;
+ SendStatisticsProxy stats_proxy_;
+
rtc::scoped_ptr<MockVideoCaptureCallback> mock_frame_callback_;
+ rtc::scoped_ptr<OveruseFrameDetector> overuse_detector_;
+
// Used to send input capture frames to VideoCaptureInput.
rtc::scoped_ptr<internal::VideoCaptureInput> input_;
@@ -104,7 +101,6 @@
// The pointers of Y plane buffers of output frames. This is used to verify
// the frame are swapped and not copied.
std::vector<const uint8_t*> output_frame_ybuffers_;
- SendStatisticsProxy stats_proxy_;
};
TEST_F(VideoCaptureInputTest, DoesNotRetainHandleNorCopyBuffer) {
diff --git a/webrtc/video/video_quality_test.cc b/webrtc/video/video_quality_test.cc
index 73cc1a64..2ebbde9 100644
--- a/webrtc/video/video_quality_test.cc
+++ b/webrtc/video/video_quality_test.cc
@@ -46,8 +46,7 @@
public Transport,
public VideoRenderer,
public VideoCaptureInput,
- public EncodedFrameObserver,
- public EncodingTimeObserver {
+ public EncodedFrameObserver {
public:
VideoAnalyzer(test::LayerFilteringTransport* transport,
const std::string& test_label,
@@ -65,6 +64,7 @@
graph_data_output_file_(graph_data_output_file),
graph_title_(graph_title),
ssrc_to_analyze_(ssrc_to_analyze),
+ encode_timing_proxy_(this),
frames_to_process_(duration_frames),
frames_recorded_(0),
frames_processed_(0),
@@ -129,8 +129,7 @@
return receiver_->DeliverPacket(media_type, packet, length, packet_time);
}
- // EncodingTimeObserver.
- void OnReportEncodedTime(int64_t ntp_time_ms, int encode_time_ms) override {
+ void MeasuredEncodeTiming(int64_t ntp_time_ms, int encode_time_ms) {
rtc::CritScope crit(&comparison_lock_);
samples_encode_time_ms_[ntp_time_ms] = encode_time_ms;
}
@@ -208,7 +207,7 @@
assert(!reference_frame.IsZeroSize());
if (send_timestamp == reference_frame.timestamp() - 1) {
// TODO(ivica): Make this work for > 2 streams.
- // Look at rtp_sender.c:RTPSender::BuildRTPHeader.
+ // Look at RTPSender::BuildRTPHeader.
++send_timestamp;
}
EXPECT_EQ(reference_frame.timestamp(), send_timestamp);
@@ -263,6 +262,8 @@
stats_polling_thread_.Stop();
}
+ EncodedFrameObserver* encode_timing_proxy() { return &encode_timing_proxy_; }
+
VideoCaptureInput* input_;
test::LayerFilteringTransport* const transport_;
PacketReceiver* receiver_;
@@ -329,6 +330,21 @@
double ssim;
};
+ // This class receives the send-side OnEncodeTiming and is provided to not
+ // conflict with the receiver-side pre_decode_callback.
+ class OnEncodeTimingProxy : public EncodedFrameObserver {
+ public:
+ explicit OnEncodeTimingProxy(VideoAnalyzer* parent) : parent_(parent) {}
+
+ void OnEncodeTiming(int64_t ntp_time_ms, int encode_time_ms) override {
+ parent_->MeasuredEncodeTiming(ntp_time_ms, encode_time_ms);
+ }
+ void EncodedFrameCallback(const EncodedFrame& frame) override {}
+
+ private:
+ VideoAnalyzer* const parent_;
+ };
+
void AddFrameComparison(const VideoFrame& reference,
const VideoFrame& render,
bool dropped,
@@ -566,6 +582,7 @@
FILE* const graph_data_output_file_;
const std::string graph_title_;
const uint32_t ssrc_to_analyze_;
+ OnEncodeTimingProxy encode_timing_proxy_;
std::vector<Sample> samples_ GUARDED_BY(comparison_lock_);
std::map<int64_t, int> samples_encode_time_ms_ GUARDED_BY(comparison_lock_);
test::Statistics sender_time_ GUARDED_BY(comparison_lock_);
@@ -976,10 +993,11 @@
recv_transport.SetReceiver(sender_call_->Receiver());
SetupCommon(&analyzer, &recv_transport);
- video_send_config_.encoding_time_observer = &analyzer;
video_receive_configs_[params_.ss.selected_stream].renderer = &analyzer;
for (auto& config : video_receive_configs_)
config.pre_decode_callback = &analyzer;
+ RTC_DCHECK(!video_send_config_.post_encode_callback);
+ video_send_config_.post_encode_callback = analyzer.encode_timing_proxy();
if (params_.screenshare.enabled)
SetupScreenshare();
diff --git a/webrtc/video/video_send_stream.cc b/webrtc/video/video_send_stream.cc
index 08dcd77..254805f 100644
--- a/webrtc/video/video_send_stream.cc
+++ b/webrtc/video/video_send_stream.cc
@@ -22,6 +22,7 @@
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/bitrate_controller/include/bitrate_controller.h"
#include "webrtc/modules/pacing/packet_router.h"
+#include "webrtc/modules/utility/include/process_thread.h"
#include "webrtc/video/call_stats.h"
#include "webrtc/video/encoder_state_feedback.h"
#include "webrtc/video/payload_router.h"
@@ -107,6 +108,18 @@
return ss.str();
}
+namespace {
+
+CpuOveruseOptions GetCpuOveruseOptions(bool full_overuse_time) {
+ CpuOveruseOptions options;
+ if (full_overuse_time) {
+ options.low_encode_usage_threshold_percent = 100;
+ options.high_encode_usage_threshold_percent = 120;
+ }
+ return options;
+}
+} // namespace
+
namespace internal {
VideoSendStream::VideoSendStream(
int num_cpu_cores,
@@ -127,6 +140,12 @@
module_process_thread_(module_process_thread),
call_stats_(call_stats),
congestion_controller_(congestion_controller),
+ overuse_detector_(
+ Clock::GetRealTimeClock(),
+ GetCpuOveruseOptions(config.encoder_settings.full_overuse_time),
+ this,
+ config.post_encode_callback,
+ &stats_proxy_),
encoder_feedback_(new EncoderStateFeedback()),
use_config_bitrate_(true) {
LOG(LS_INFO) << "VideoSendStream: " << config_.ToString();
@@ -144,10 +163,10 @@
const std::vector<uint32_t>& ssrcs = config.rtp.ssrcs;
- vie_encoder_.reset(new ViEEncoder(
- num_cpu_cores, module_process_thread_, &stats_proxy_,
- config.pre_encode_callback, congestion_controller_->pacer(),
- bitrate_allocator));
+ vie_encoder_.reset(
+ new ViEEncoder(num_cpu_cores, module_process_thread_, &stats_proxy_,
+ config.pre_encode_callback, &overuse_detector_,
+ congestion_controller_->pacer(), bitrate_allocator));
RTC_CHECK(vie_encoder_->Init());
vie_channel_.reset(new ViEChannel(
@@ -207,8 +226,8 @@
vie_channel_->SetRTCPCName(config_.rtp.c_name.c_str());
input_.reset(new internal::VideoCaptureInput(
- module_process_thread_, vie_encoder_.get(), config_.local_renderer,
- &stats_proxy_, this, config_.encoding_time_observer));
+ vie_encoder_.get(), config_.local_renderer, &stats_proxy_,
+ &overuse_detector_));
// 28 to match packet overhead in ModuleRtpRtcpImpl.
RTC_DCHECK_LE(config_.rtp.max_packet_size, static_cast<size_t>(0xFFFF - 28));
@@ -240,10 +259,13 @@
vie_channel_->RegisterRtcpPacketTypeCounterObserver(&stats_proxy_);
vie_channel_->RegisterSendBitrateObserver(&stats_proxy_);
vie_channel_->RegisterSendFrameCountObserver(&stats_proxy_);
+
+ module_process_thread_->RegisterModule(&overuse_detector_);
}
VideoSendStream::~VideoSendStream() {
LOG(LS_INFO) << "~VideoSendStream: " << config_.ToString();
+ module_process_thread_->DeRegisterModule(&overuse_detector_);
vie_channel_->RegisterSendFrameCountObserver(nullptr);
vie_channel_->RegisterSendBitrateObserver(nullptr);
vie_channel_->RegisterRtcpPacketTypeCounterObserver(nullptr);
diff --git a/webrtc/video/video_send_stream.h b/webrtc/video/video_send_stream.h
index e2911cc..acd36ec 100644
--- a/webrtc/video/video_send_stream.h
+++ b/webrtc/video/video_send_stream.h
@@ -86,6 +86,7 @@
CallStats* const call_stats_;
CongestionController* const congestion_controller_;
+ OveruseFrameDetector overuse_detector_;
rtc::scoped_ptr<VideoCaptureInput> input_;
rtc::scoped_ptr<ViEChannel> vie_channel_;
rtc::scoped_ptr<ViEEncoder> vie_encoder_;
diff --git a/webrtc/video/vie_encoder.cc b/webrtc/video/vie_encoder.cc
index 8adf4d0..a9b2abb 100644
--- a/webrtc/video/vie_encoder.cc
+++ b/webrtc/video/vie_encoder.cc
@@ -27,12 +27,13 @@
#include "webrtc/modules/video_coding/include/video_codec_interface.h"
#include "webrtc/modules/video_coding/include/video_coding.h"
#include "webrtc/modules/video_coding/include/video_coding_defines.h"
-#include "webrtc/modules/video_coding/encoded_frame.h"
#include "webrtc/system_wrappers/include/clock.h"
#include "webrtc/system_wrappers/include/metrics.h"
#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/video/overuse_frame_detector.h"
#include "webrtc/video/payload_router.h"
#include "webrtc/video/send_statistics_proxy.h"
+#include "webrtc/video_frame.h"
namespace webrtc {
@@ -107,6 +108,7 @@
ProcessThread* module_process_thread,
SendStatisticsProxy* stats_proxy,
I420FrameCallback* pre_encode_callback,
+ OveruseFrameDetector* overuse_detector,
PacedSender* pacer,
BitrateAllocator* bitrate_allocator)
: number_of_cores_(number_of_cores),
@@ -118,6 +120,7 @@
send_payload_router_(NULL),
stats_proxy_(stats_proxy),
pre_encode_callback_(pre_encode_callback),
+ overuse_detector_(overuse_detector),
pacer_(pacer),
bitrate_allocator_(bitrate_allocator),
time_of_last_frame_activity_ms_(0),
@@ -461,12 +464,12 @@
if (stats_proxy_ != NULL)
stats_proxy_->OnSendEncodedImage(encoded_image, rtp_video_hdr);
- return send_payload_router_->RoutePayload(
- encoded_image._frameType, payload_type, encoded_image._timeStamp,
- encoded_image.capture_time_ms_, encoded_image._buffer,
- encoded_image._length, fragmentation_header, rtp_video_hdr)
- ? 0
- : -1;
+ bool success = send_payload_router_->RoutePayload(
+ encoded_image._frameType, payload_type, encoded_image._timeStamp,
+ encoded_image.capture_time_ms_, encoded_image._buffer,
+ encoded_image._length, fragmentation_header, rtp_video_hdr);
+ overuse_detector_->FrameSent(encoded_image._timeStamp);
+ return success ? 0 : -1;
}
void ViEEncoder::OnEncoderImplementationName(
diff --git a/webrtc/video/vie_encoder.h b/webrtc/video/vie_encoder.h
index e36cb30..b8944dc 100644
--- a/webrtc/video/vie_encoder.h
+++ b/webrtc/video/vie_encoder.h
@@ -33,6 +33,7 @@
class BitrateObserver;
class Config;
class EncodedImageCallback;
+class OveruseFrameDetector;
class PacedSender;
class PayloadRouter;
class ProcessThread;
@@ -54,6 +55,7 @@
ProcessThread* module_process_thread,
SendStatisticsProxy* stats_proxy,
I420FrameCallback* pre_encode_callback,
+ OveruseFrameDetector* overuse_detector,
PacedSender* pacer,
BitrateAllocator* bitrate_allocator);
~ViEEncoder();
@@ -163,6 +165,7 @@
SendStatisticsProxy* const stats_proxy_;
I420FrameCallback* const pre_encode_callback_;
+ OveruseFrameDetector* const overuse_detector_;
PacedSender* const pacer_;
BitrateAllocator* const bitrate_allocator_;
diff --git a/webrtc/video_send_stream.h b/webrtc/video_send_stream.h
index 83a96d3..63acb09 100644
--- a/webrtc/video_send_stream.h
+++ b/webrtc/video_send_stream.h
@@ -26,13 +26,6 @@
class LoadObserver;
class VideoEncoder;
-class EncodingTimeObserver {
- public:
- virtual ~EncodingTimeObserver() {}
-
- virtual void OnReportEncodedTime(int64_t ntp_time_ms, int encode_time_ms) = 0;
-};
-
// Class to deliver captured frame to the video send stream.
class VideoCaptureInput {
public:
@@ -91,6 +84,11 @@
// sources anymore.
bool internal_source = false;
+ // Allow 100% encoder utilization. Used for HW encoders where CPU isn't
+ // expected to be the limiting factor, but a chip could be running at
+ // 30fps (for example) exactly.
+ bool full_overuse_time = false;
+
// Uninitialized VideoEncoder instance to be used for encoding. Will be
// initialized from inside the VideoSendStream.
VideoEncoder* encoder = nullptr;
@@ -144,7 +142,9 @@
I420FrameCallback* pre_encode_callback = nullptr;
// Called for each encoded frame, e.g. used for file storage. 'nullptr'
- // disables the callback.
+ // disables the callback. Also measures timing and passes the time
+ // spent on encoding. This timing will not fire if encoding takes longer
+ // than the measuring window, since the sample data will have been dropped.
EncodedFrameObserver* post_encode_callback = nullptr;
// Renderer for local preview. The local renderer will be called even if
@@ -164,11 +164,6 @@
// below the minimum configured bitrate. If this variable is false, the
// stream may send at a rate higher than the estimated available bitrate.
bool suspend_below_min_bitrate = false;
-
- // Called for each encoded frame. Passes the total time spent on encoding.
- // TODO(ivica): Consolidate with post_encode_callback:
- // https://code.google.com/p/webrtc/issues/detail?id=5042
- EncodingTimeObserver* encoding_time_observer = nullptr;
};
// Gets interface used to insert captured frames. Valid as long as the