Move processing of frame meta data for OnFrame/OnRenderedFrame to the worker thread
Bug: webrtc:11489
Change-Id: I9f88fec0aef449fd8923c5eec81cddf9ee42316b
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/174220
Commit-Queue: Tommi <tommi@webrtc.org>
Reviewed-by: Philip Eliasson <philipel@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#31199}
diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl.cc b/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
index 1cb61f5..4f84b02 100644
--- a/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
+++ b/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
@@ -774,8 +774,10 @@
}
void ModuleRtpRtcpImpl::set_rtt_ms(int64_t rtt_ms) {
- rtc::CritScope cs(&critical_section_rtt_);
- rtt_ms_ = rtt_ms;
+ {
+ rtc::CritScope cs(&critical_section_rtt_);
+ rtt_ms_ = rtt_ms;
+ }
if (rtp_sender_) {
rtp_sender_->packet_history.SetRtt(rtt_ms);
}
diff --git a/video/BUILD.gn b/video/BUILD.gn
index d708504..2404281 100644
--- a/video/BUILD.gn
+++ b/video/BUILD.gn
@@ -74,6 +74,7 @@
"../api/rtc_event_log",
"../api/task_queue",
"../api/transport/media:media_transport_interface",
+ "../api/units:timestamp",
"../api/video:encoded_image",
"../api/video:recordable_encoded_frame",
"../api/video:video_bitrate_allocation",
diff --git a/video/receive_statistics_proxy2.cc b/video/receive_statistics_proxy2.cc
index bf2eda0..c16dd8a 100644
--- a/video/receive_statistics_proxy2.cc
+++ b/video/receive_statistics_proxy2.cc
@@ -23,6 +23,7 @@
#include "system_wrappers/include/clock.h"
#include "system_wrappers/include/field_trial.h"
#include "system_wrappers/include/metrics.h"
+#include "video/video_receive_stream2.h"
namespace webrtc {
namespace internal {
@@ -500,15 +501,14 @@
videocontenttypehelpers::IsScreenshare(last_content_type_));
}
-void ReceiveStatisticsProxy::QualitySample() {
- RTC_DCHECK_RUN_ON(&incoming_render_queue_);
+void ReceiveStatisticsProxy::QualitySample(Timestamp now) {
+ RTC_DCHECK_RUN_ON(&main_thread_);
- int64_t now = clock_->TimeInMilliseconds();
- if (last_sample_time_ + kMinSampleLengthMs > now)
+ if (last_sample_time_ + kMinSampleLengthMs > now.ms())
return;
double fps =
- render_fps_tracker_.ComputeRateForInterval(now - last_sample_time_);
+ render_fps_tracker_.ComputeRateForInterval(now.ms() - last_sample_time_);
absl::optional<int> qp = qp_sample_.Avg(1);
bool prev_fps_bad = !fps_threshold_.IsHigh().value_or(true);
@@ -531,36 +531,37 @@
bool any_bad = fps_bad || qp_bad || variance_bad;
if (!prev_any_bad && any_bad) {
- RTC_LOG(LS_INFO) << "Bad call (any) start: " << now;
+ RTC_LOG(LS_INFO) << "Bad call (any) start: " << now.ms();
} else if (prev_any_bad && !any_bad) {
- RTC_LOG(LS_INFO) << "Bad call (any) end: " << now;
+ RTC_LOG(LS_INFO) << "Bad call (any) end: " << now.ms();
}
if (!prev_fps_bad && fps_bad) {
- RTC_LOG(LS_INFO) << "Bad call (fps) start: " << now;
+ RTC_LOG(LS_INFO) << "Bad call (fps) start: " << now.ms();
} else if (prev_fps_bad && !fps_bad) {
- RTC_LOG(LS_INFO) << "Bad call (fps) end: " << now;
+ RTC_LOG(LS_INFO) << "Bad call (fps) end: " << now.ms();
}
if (!prev_qp_bad && qp_bad) {
- RTC_LOG(LS_INFO) << "Bad call (qp) start: " << now;
+ RTC_LOG(LS_INFO) << "Bad call (qp) start: " << now.ms();
} else if (prev_qp_bad && !qp_bad) {
- RTC_LOG(LS_INFO) << "Bad call (qp) end: " << now;
+ RTC_LOG(LS_INFO) << "Bad call (qp) end: " << now.ms();
}
if (!prev_variance_bad && variance_bad) {
- RTC_LOG(LS_INFO) << "Bad call (variance) start: " << now;
+ RTC_LOG(LS_INFO) << "Bad call (variance) start: " << now.ms();
} else if (prev_variance_bad && !variance_bad) {
- RTC_LOG(LS_INFO) << "Bad call (variance) end: " << now;
+ RTC_LOG(LS_INFO) << "Bad call (variance) end: " << now.ms();
}
- RTC_LOG(LS_VERBOSE) << "SAMPLE: sample_length: " << (now - last_sample_time_)
- << " fps: " << fps << " fps_bad: " << fps_bad
- << " qp: " << qp.value_or(-1) << " qp_bad: " << qp_bad
+ RTC_LOG(LS_VERBOSE) << "SAMPLE: sample_length: "
+ << (now.ms() - last_sample_time_) << " fps: " << fps
+ << " fps_bad: " << fps_bad << " qp: " << qp.value_or(-1)
+ << " qp_bad: " << qp_bad
<< " variance_bad: " << variance_bad
<< " fps_variance: " << fps_variance;
- last_sample_time_ = now;
+ last_sample_time_ = now.ms();
qp_sample_.Reset();
if (fps_threshold_.IsHigh() || variance_threshold_.IsHigh() ||
@@ -807,8 +808,6 @@
// See VCMDecodedFrameCallback::Decoded for info on what thread/queue we may
// be on.
// RTC_DCHECK_RUN_ON(&decode_queue_);
- // TODO(bugs.webrtc.org/11489): - Same as OnRenderedFrame. Both called from
- // within VideoStreamDecoder::FrameToRender
rtc::CritScope lock(&crit_);
@@ -826,7 +825,8 @@
video_quality_observer_.reset(new VideoQualityObserver());
}
- video_quality_observer_->OnDecodedFrame(frame, qp, last_codec_type_);
+ video_quality_observer_->OnDecodedFrame(frame.timestamp(), qp,
+ last_codec_type_);
ContentSpecificStats* content_specific_stats =
&content_specific_stats_[content_type];
@@ -874,49 +874,46 @@
last_decoded_frame_time_ms_.emplace(now_ms);
}
-void ReceiveStatisticsProxy::OnRenderedFrame(const VideoFrame& frame) {
- // See information in OnDecodedFrame for calling context.
- // TODO(bugs.webrtc.org/11489): Consider posting the work to the worker
- // thread.
- // - Called from VideoReceiveStream::OnFrame.
+void ReceiveStatisticsProxy::OnRenderedFrame(
+ const VideoFrameMetaData& frame_meta) {
+ RTC_DCHECK_RUN_ON(&main_thread_);
+ // Called from VideoReceiveStream2::OnFrame.
- int width = frame.width();
- int height = frame.height();
- RTC_DCHECK_GT(width, 0);
- RTC_DCHECK_GT(height, 0);
- int64_t now_ms = clock_->TimeInMilliseconds();
+ RTC_DCHECK_GT(frame_meta.width, 0);
+ RTC_DCHECK_GT(frame_meta.height, 0);
+
+ // TODO(bugs.webrtc.org/11489): Remove lock once sync isn't needed.
rtc::CritScope lock(&crit_);
- // TODO(bugs.webrtc.org/11489): Lose the dependency on |frame| here, just
- // include the frame metadata so that this can be done asynchronously without
- // blocking the decoder thread.
- video_quality_observer_->OnRenderedFrame(frame, now_ms);
+ video_quality_observer_->OnRenderedFrame(frame_meta);
ContentSpecificStats* content_specific_stats =
&content_specific_stats_[last_content_type_];
- renders_fps_estimator_.Update(1, now_ms);
+ renders_fps_estimator_.Update(1, frame_meta.decode_timestamp.ms());
++stats_.frames_rendered;
- stats_.width = width;
- stats_.height = height;
+ stats_.width = frame_meta.width;
+ stats_.height = frame_meta.height;
render_fps_tracker_.AddSamples(1);
- render_pixel_tracker_.AddSamples(sqrt(width * height));
- content_specific_stats->received_width.Add(width);
- content_specific_stats->received_height.Add(height);
+ render_pixel_tracker_.AddSamples(sqrt(frame_meta.width * frame_meta.height));
+ content_specific_stats->received_width.Add(frame_meta.width);
+ content_specific_stats->received_height.Add(frame_meta.height);
// Consider taking stats_.render_delay_ms into account.
- const int64_t time_until_rendering_ms = frame.render_time_ms() - now_ms;
+ const int64_t time_until_rendering_ms =
+ frame_meta.render_time_ms() - frame_meta.decode_timestamp.ms();
if (time_until_rendering_ms < 0) {
sum_missed_render_deadline_ms_ += -time_until_rendering_ms;
++num_delayed_frames_rendered_;
}
- if (frame.ntp_time_ms() > 0) {
- int64_t delay_ms = clock_->CurrentNtpInMilliseconds() - frame.ntp_time_ms();
+ if (frame_meta.ntp_time_ms > 0) {
+ int64_t delay_ms =
+ clock_->CurrentNtpInMilliseconds() - frame_meta.ntp_time_ms;
if (delay_ms >= 0) {
content_specific_stats->e2e_delay_counter.Add(delay_ms);
}
}
- QualitySample();
+ QualitySample(frame_meta.decode_timestamp);
}
void ReceiveStatisticsProxy::OnSyncOffsetUpdated(int64_t video_playout_ntp_ms,
diff --git a/video/receive_statistics_proxy2.h b/video/receive_statistics_proxy2.h
index 86a015e..a8b3824 100644
--- a/video/receive_statistics_proxy2.h
+++ b/video/receive_statistics_proxy2.h
@@ -18,6 +18,7 @@
#include "absl/types/optional.h"
#include "api/task_queue/task_queue_base.h"
+#include "api/units/timestamp.h"
#include "call/video_receive_stream.h"
#include "modules/include/module_common_types.h"
#include "modules/video_coding/include/video_coding_defines.h"
@@ -41,6 +42,8 @@
struct CodecSpecificInfo;
namespace internal {
+// Declared in video_receive_stream2.h.
+struct VideoFrameMetaData;
class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback,
public RtcpCnameCallback,
@@ -61,7 +64,7 @@
void OnSyncOffsetUpdated(int64_t video_playout_ntp_ms,
int64_t sync_offset_ms,
double estimated_freq_khz);
- void OnRenderedFrame(const VideoFrame& frame);
+ void OnRenderedFrame(const VideoFrameMetaData& frame_meta);
void OnIncomingPayloadType(int payload_type);
void OnDecoderImplementationName(const char* implementation_name);
@@ -130,7 +133,7 @@
rtc::HistogramPercentileCounter interframe_delay_percentiles;
};
- void QualitySample() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ void QualitySample(Timestamp now) RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
// Removes info about old frames and then updates the framerate.
void UpdateFramerate(int64_t now_ms) const
diff --git a/video/receive_statistics_proxy2_unittest.cc b/video/receive_statistics_proxy2_unittest.cc
index bcc96cd..7ad71dc 100644
--- a/video/receive_statistics_proxy2_unittest.cc
+++ b/video/receive_statistics_proxy2_unittest.cc
@@ -28,6 +28,7 @@
#include "test/field_trial.h"
#include "test/gtest.h"
#include "test/run_loop.h"
+#include "video/video_receive_stream2.h"
namespace webrtc {
namespace internal {
@@ -63,6 +64,10 @@
return CreateVideoFrame(width, height, 0);
}
+ VideoFrame CreateFrameWithRenderTime(Timestamp render_time) {
+ return CreateFrameWithRenderTimeMs(render_time.ms());
+ }
+
VideoFrame CreateFrameWithRenderTimeMs(int64_t render_time_ms) {
return CreateVideoFrame(kWidth, kHeight, render_time_ms);
}
@@ -79,6 +84,19 @@
return frame;
}
+ // Return the current fake time as a Timestamp.
+ Timestamp Now() { return fake_clock_.CurrentTime(); }
+
+ // Creates a VideoFrameMetaData instance with a timestamp.
+ VideoFrameMetaData MetaData(const VideoFrame& frame, Timestamp ts) {
+ return VideoFrameMetaData(frame, ts);
+ }
+
+ // Creates a VideoFrameMetaData instance with the current fake time.
+ VideoFrameMetaData MetaData(const VideoFrame& frame) {
+ return VideoFrameMetaData(frame, Now());
+ }
+
SimulatedClock fake_clock_;
const VideoReceiveStream::Config config_;
std::unique_ptr<ReceiveStatisticsProxy> statistics_proxy_;
@@ -321,12 +339,12 @@
for (size_t i = 0; i < VideoQualityObserver::kMinFrameSamplesToDetectFreeze;
++i) {
fake_clock_.AdvanceTimeMilliseconds(30);
- statistics_proxy_->OnRenderedFrame(frame);
+ statistics_proxy_->OnRenderedFrame(MetaData(frame));
}
// Freeze.
fake_clock_.AdvanceTimeMilliseconds(kFreezeDurationMs);
- statistics_proxy_->OnRenderedFrame(frame);
+ statistics_proxy_->OnRenderedFrame(MetaData(frame));
stats = statistics_proxy_->GetStats();
EXPECT_EQ(1u, stats.freeze_count);
@@ -339,12 +357,12 @@
ASSERT_EQ(0u, stats.total_pauses_duration_ms);
webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight);
- statistics_proxy_->OnRenderedFrame(frame);
+ statistics_proxy_->OnRenderedFrame(MetaData(frame));
// Pause.
fake_clock_.AdvanceTimeMilliseconds(5432);
statistics_proxy_->OnStreamInactive();
- statistics_proxy_->OnRenderedFrame(frame);
+ statistics_proxy_->OnRenderedFrame(MetaData(frame));
stats = statistics_proxy_->GetStats();
EXPECT_EQ(1u, stats.pause_count);
@@ -361,10 +379,10 @@
// Pause -> Frame -> Pause
fake_clock_.AdvanceTimeMilliseconds(5000);
statistics_proxy_->OnStreamInactive();
- statistics_proxy_->OnRenderedFrame(frame);
+ statistics_proxy_->OnRenderedFrame(MetaData(frame));
fake_clock_.AdvanceTimeMilliseconds(30);
- statistics_proxy_->OnRenderedFrame(frame);
+ statistics_proxy_->OnRenderedFrame(MetaData(frame));
fake_clock_.AdvanceTimeMilliseconds(5000);
statistics_proxy_->OnStreamInactive();
@@ -387,7 +405,7 @@
for (int i = 0; i <= 10; ++i) {
fake_clock_.AdvanceTimeMilliseconds(30);
- statistics_proxy_->OnRenderedFrame(frame);
+ statistics_proxy_->OnRenderedFrame(MetaData(frame));
}
stats = statistics_proxy_->GetStats();
@@ -401,7 +419,7 @@
webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight);
for (int i = 0; i <= 10; ++i) {
fake_clock_.AdvanceTimeMilliseconds(30);
- statistics_proxy_->OnRenderedFrame(frame);
+ statistics_proxy_->OnRenderedFrame(MetaData(frame));
}
stats = statistics_proxy_->GetStats();
@@ -434,7 +452,7 @@
EXPECT_EQ(0u, statistics_proxy_->GetStats().frames_rendered);
webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight);
for (uint32_t i = 1; i <= 3; ++i) {
- statistics_proxy_->OnRenderedFrame(frame);
+ statistics_proxy_->OnRenderedFrame(MetaData(frame));
EXPECT_EQ(i, statistics_proxy_->GetStats().frames_rendered);
}
}
@@ -633,7 +651,7 @@
for (int i = 0; i < kNumBadSamples; ++i) {
fake_clock_.AdvanceTimeMilliseconds(kBadFameIntervalMs);
- statistics_proxy_->OnRenderedFrame(frame);
+ statistics_proxy_->OnRenderedFrame(MetaData(frame));
}
statistics_proxy_->UpdateHistograms(absl::nullopt, counters, nullptr);
EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.BadCall.Any"));
@@ -898,7 +916,7 @@
frame.set_ntp_time_ms(fake_clock_.CurrentNtpInMilliseconds());
statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0,
VideoContentType::UNSPECIFIED);
- statistics_proxy_->OnRenderedFrame(frame);
+ statistics_proxy_->OnRenderedFrame(MetaData(frame));
fake_clock_.AdvanceTimeMilliseconds(1000 / kDefaultFps);
}
@@ -916,7 +934,7 @@
EXPECT_EQ(0, statistics_proxy_->GetStats().height);
EXPECT_EQ(0u, statistics_proxy_->GetStats().frames_rendered);
- statistics_proxy_->OnRenderedFrame(CreateFrame(kWidth, kHeight));
+ statistics_proxy_->OnRenderedFrame(MetaData(CreateFrame(kWidth, kHeight)));
EXPECT_EQ(kWidth, statistics_proxy_->GetStats().width);
EXPECT_EQ(kHeight, statistics_proxy_->GetStats().height);
@@ -925,8 +943,9 @@
TEST_F(ReceiveStatisticsProxy2Test,
ReceivedFrameHistogramsAreNotUpdatedForTooFewSamples) {
- for (int i = 0; i < kMinRequiredSamples - 1; ++i)
- statistics_proxy_->OnRenderedFrame(CreateFrame(kWidth, kHeight));
+ for (int i = 0; i < kMinRequiredSamples - 1; ++i) {
+ statistics_proxy_->OnRenderedFrame(MetaData(CreateFrame(kWidth, kHeight)));
+ }
statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(),
nullptr);
@@ -941,8 +960,9 @@
}
TEST_F(ReceiveStatisticsProxy2Test, ReceivedFrameHistogramsAreUpdated) {
- for (int i = 0; i < kMinRequiredSamples; ++i)
- statistics_proxy_->OnRenderedFrame(CreateFrame(kWidth, kHeight));
+ for (int i = 0; i < kMinRequiredSamples; ++i) {
+ statistics_proxy_->OnRenderedFrame(MetaData(CreateFrame(kWidth, kHeight)));
+ }
statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(),
nullptr);
@@ -966,8 +986,8 @@
VideoContentType::UNSPECIFIED);
// Frame not delayed, delayed frames to render: 0%.
- const int64_t kNowMs = fake_clock_.TimeInMilliseconds();
- statistics_proxy_->OnRenderedFrame(CreateFrameWithRenderTimeMs(kNowMs));
+ statistics_proxy_->OnRenderedFrame(
+ MetaData(CreateFrameWithRenderTime(Now())));
// Min run time has passed.
fake_clock_.AdvanceTimeMilliseconds((metrics::kMinRunTimeInSeconds * 1000));
@@ -988,8 +1008,8 @@
VideoContentType::UNSPECIFIED);
// Frame not delayed, delayed frames to render: 0%.
- const int64_t kNowMs = fake_clock_.TimeInMilliseconds();
- statistics_proxy_->OnRenderedFrame(CreateFrameWithRenderTimeMs(kNowMs));
+ statistics_proxy_->OnRenderedFrame(
+ MetaData(CreateFrameWithRenderTime(Now())));
// Min run time has not passed.
fake_clock_.AdvanceTimeMilliseconds((metrics::kMinRunTimeInSeconds * 1000) -
@@ -1024,8 +1044,8 @@
VideoContentType::UNSPECIFIED);
// Frame delayed 1 ms, delayed frames to render: 100%.
- const int64_t kNowMs = fake_clock_.TimeInMilliseconds();
- statistics_proxy_->OnRenderedFrame(CreateFrameWithRenderTimeMs(kNowMs - 1));
+ statistics_proxy_->OnRenderedFrame(
+ MetaData(CreateFrameWithRenderTimeMs(Now().ms() - 1)));
// Min run time has passed.
fake_clock_.AdvanceTimeMilliseconds((metrics::kMinRunTimeInSeconds * 1000));
@@ -1048,11 +1068,16 @@
VideoContentType::UNSPECIFIED);
// Two frames delayed (6 ms, 10 ms), delayed frames to render: 50%.
- const int64_t kNowMs = fake_clock_.TimeInMilliseconds();
- statistics_proxy_->OnRenderedFrame(CreateFrameWithRenderTimeMs(kNowMs - 10));
- statistics_proxy_->OnRenderedFrame(CreateFrameWithRenderTimeMs(kNowMs - 6));
- statistics_proxy_->OnRenderedFrame(CreateFrameWithRenderTimeMs(kNowMs));
- statistics_proxy_->OnRenderedFrame(CreateFrameWithRenderTimeMs(kNowMs + 1));
+ const int64_t kNowMs = Now().ms();
+
+ statistics_proxy_->OnRenderedFrame(
+ MetaData(CreateFrameWithRenderTimeMs(kNowMs - 10)));
+ statistics_proxy_->OnRenderedFrame(
+ MetaData(CreateFrameWithRenderTimeMs(kNowMs - 6)));
+ statistics_proxy_->OnRenderedFrame(
+ MetaData(CreateFrameWithRenderTimeMs(kNowMs)));
+ statistics_proxy_->OnRenderedFrame(
+ MetaData(CreateFrameWithRenderTimeMs(kNowMs + 1)));
// Min run time has passed.
fake_clock_.AdvanceTimeMilliseconds((metrics::kMinRunTimeInSeconds * 1000));
@@ -1159,17 +1184,17 @@
// Add a very long frame. This is need to verify that average frame
// duration, which is supposed to be calculated as mean of durations of
// last 30 frames, is calculated correctly.
- statistics_proxy_->OnRenderedFrame(frame);
+ statistics_proxy_->OnRenderedFrame(MetaData(frame));
fake_clock_.AdvanceTimeMilliseconds(2000);
for (size_t i = 0;
i <= VideoQualityObserver::kAvgInterframeDelaysWindowSizeFrames; ++i) {
fake_clock_.AdvanceTimeMilliseconds(frame_duration_ms_);
- statistics_proxy_->OnRenderedFrame(frame);
+ statistics_proxy_->OnRenderedFrame(MetaData(frame));
}
fake_clock_.AdvanceTimeMilliseconds(freeze_duration_ms_);
- statistics_proxy_->OnRenderedFrame(frame);
+ statistics_proxy_->OnRenderedFrame(MetaData(frame));
stats = statistics_proxy_->GetStats();
EXPECT_EQ(stats.freeze_count, expected_freeze_count_);
@@ -1292,8 +1317,8 @@
statistics_proxy_->OnStreamInactive();
fake_clock_.AdvanceTimeMilliseconds(5000);
- // Insert two more frames. The interval during the pause should be disregarded
- // in the stats.
+ // Insert two more frames. The interval during the pause should be
+ // disregarded in the stats.
statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_);
fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs);
statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_);
@@ -1332,13 +1357,13 @@
for (int i = 0; i < kMinRequiredSamples; ++i) {
statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_);
- statistics_proxy_->OnRenderedFrame(frame);
+ statistics_proxy_->OnRenderedFrame(MetaData(frame));
fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs);
}
// Add extra freeze.
fake_clock_.AdvanceTimeMilliseconds(kFreezeDelayMs);
statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_);
- statistics_proxy_->OnRenderedFrame(frame);
+ statistics_proxy_->OnRenderedFrame(MetaData(frame));
statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(),
nullptr);
@@ -1377,20 +1402,20 @@
for (int i = 0; i < kMinRequiredSamples; ++i) {
fake_clock_.AdvanceTimeMilliseconds(kFrameDurationMs);
statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_);
- statistics_proxy_->OnRenderedFrame(frame);
+ statistics_proxy_->OnRenderedFrame(MetaData(frame));
}
// Freezes and pauses should be included into harmonic frame rate.
// Add freeze.
fake_clock_.AdvanceTimeMilliseconds(kFreezeDurationMs);
statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_);
- statistics_proxy_->OnRenderedFrame(frame);
+ statistics_proxy_->OnRenderedFrame(MetaData(frame));
// Add pause.
fake_clock_.AdvanceTimeMilliseconds(kPauseDurationMs);
statistics_proxy_->OnStreamInactive();
statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_);
- statistics_proxy_->OnRenderedFrame(frame);
+ statistics_proxy_->OnRenderedFrame(MetaData(frame));
statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(),
nullptr);
@@ -1420,7 +1445,7 @@
for (int i = 0; i <= kMinRequiredSamples; ++i) {
statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_);
- statistics_proxy_->OnRenderedFrame(frame);
+ statistics_proxy_->OnRenderedFrame(MetaData(frame));
fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs);
}
// Add a pause.
@@ -1430,7 +1455,7 @@
// Second playback interval with triple the length.
for (int i = 0; i <= kMinRequiredSamples * 3; ++i) {
statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, 0, content_type_);
- statistics_proxy_->OnRenderedFrame(frame);
+ statistics_proxy_->OnRenderedFrame(MetaData(frame));
fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs);
}
@@ -1472,7 +1497,8 @@
statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(),
nullptr);
- // No freezes should be detected, as all long inter-frame delays were pauses.
+ // No freezes should be detected, as all long inter-frame delays were
+ // pauses.
if (videocontenttypehelpers::IsScreenshare(content_type_)) {
EXPECT_METRIC_EQ(-1, metrics::MinSample(
"WebRTC.Video.Screenshare.MeanFreezeDurationMs"));
@@ -1491,18 +1517,18 @@
for (int i = 0; i < kMinRequiredSamples; ++i) {
statistics_proxy_->OnDecodedFrame(frame_hd, absl::nullopt, 0,
content_type_);
- statistics_proxy_->OnRenderedFrame(frame_hd);
+ statistics_proxy_->OnRenderedFrame(MetaData(frame_hd));
fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs);
}
// SD frames.
for (int i = 0; i < 2 * kMinRequiredSamples; ++i) {
statistics_proxy_->OnDecodedFrame(frame_sd, absl::nullopt, 0,
content_type_);
- statistics_proxy_->OnRenderedFrame(frame_sd);
+ statistics_proxy_->OnRenderedFrame(MetaData(frame_sd));
fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs);
}
// Extra last frame.
- statistics_proxy_->OnRenderedFrame(frame_sd);
+ statistics_proxy_->OnRenderedFrame(MetaData(frame_sd));
statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(),
nullptr);
@@ -1526,18 +1552,18 @@
// High quality frames.
for (int i = 0; i < kMinRequiredSamples; ++i) {
statistics_proxy_->OnDecodedFrame(frame, kLowQp, 0, content_type_);
- statistics_proxy_->OnRenderedFrame(frame);
+ statistics_proxy_->OnRenderedFrame(MetaData(frame));
fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs);
}
// Blocky frames.
for (int i = 0; i < 2 * kMinRequiredSamples; ++i) {
statistics_proxy_->OnDecodedFrame(frame, kHighQp, 0, content_type_);
- statistics_proxy_->OnRenderedFrame(frame);
+ statistics_proxy_->OnRenderedFrame(MetaData(frame));
fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs);
}
// Extra last frame.
statistics_proxy_->OnDecodedFrame(frame, kHighQp, 0, content_type_);
- statistics_proxy_->OnRenderedFrame(frame);
+ statistics_proxy_->OnRenderedFrame(MetaData(frame));
statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(),
nullptr);
@@ -1564,15 +1590,15 @@
// Call once to pass content type.
statistics_proxy_->OnDecodedFrame(frame_hd, absl::nullopt, 0, content_type_);
- statistics_proxy_->OnRenderedFrame(frame_hd);
+ statistics_proxy_->OnRenderedFrame(MetaData(frame_hd));
fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs);
// Downscale.
- statistics_proxy_->OnRenderedFrame(frame_sd);
+ statistics_proxy_->OnRenderedFrame(MetaData(frame_sd));
fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs);
// Downscale.
- statistics_proxy_->OnRenderedFrame(frame_ld);
+ statistics_proxy_->OnRenderedFrame(MetaData(frame_ld));
fake_clock_.AdvanceTimeMilliseconds(kInterFrameDelayMs);
statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(),
@@ -1581,8 +1607,8 @@
if (videocontenttypehelpers::IsScreenshare(content_type_)) {
EXPECT_METRIC_EQ(
kExpectedDownscales,
- metrics::MinSample(
- "WebRTC.Video.Screenshare.NumberResolutionDownswitchesPerMinute"));
+ metrics::MinSample("WebRTC.Video.Screenshare."
+ "NumberResolutionDownswitchesPerMinute"));
} else {
EXPECT_METRIC_EQ(kExpectedDownscales,
metrics::MinSample(
diff --git a/video/rtp_streams_synchronizer.h b/video/rtp_streams_synchronizer.h
index 60e2c8e..00ef526 100644
--- a/video/rtp_streams_synchronizer.h
+++ b/video/rtp_streams_synchronizer.h
@@ -25,6 +25,11 @@
class Syncable;
+// TODO(bugs.webrtc.org/11489): Remove dependency on ProcessThread/Module.
+// Instead make this a single threaded class, constructed on a TQ and
+// post a 1 sec timer there. There shouldn't be a need for locking internally
+// and the callback from this class, should occur on the construction TQ
+// which in turn means that the callback doesn't need locking either.
class RtpStreamsSynchronizer : public Module {
public:
explicit RtpStreamsSynchronizer(Syncable* syncable_video);
diff --git a/video/video_quality_observer2.cc b/video/video_quality_observer2.cc
index b1282c1..0751d3f 100644
--- a/video/video_quality_observer2.cc
+++ b/video/video_quality_observer2.cc
@@ -18,6 +18,7 @@
#include "rtc_base/logging.h"
#include "rtc_base/strings/string_builder.h"
#include "system_wrappers/include/metrics.h"
+#include "video/video_receive_stream2.h"
namespace webrtc {
namespace internal {
@@ -133,20 +134,22 @@
RTC_LOG(LS_INFO) << log_stream.str();
}
-void VideoQualityObserver::OnRenderedFrame(const VideoFrame& frame,
- int64_t now_ms) {
- RTC_DCHECK_LE(last_frame_rendered_ms_, now_ms);
- RTC_DCHECK_LE(last_unfreeze_time_ms_, now_ms);
+void VideoQualityObserver::OnRenderedFrame(
+ const VideoFrameMetaData& frame_meta) {
+ RTC_DCHECK_LE(last_frame_rendered_ms_, frame_meta.decode_timestamp.ms());
+ RTC_DCHECK_LE(last_unfreeze_time_ms_, frame_meta.decode_timestamp.ms());
if (num_frames_rendered_ == 0) {
- first_frame_rendered_ms_ = last_unfreeze_time_ms_ = now_ms;
+ first_frame_rendered_ms_ = last_unfreeze_time_ms_ =
+ frame_meta.decode_timestamp.ms();
}
- auto blocky_frame_it = blocky_frames_.find(frame.timestamp());
+ auto blocky_frame_it = blocky_frames_.find(frame_meta.rtp_timestamp);
if (num_frames_rendered_ > 0) {
// Process inter-frame delay.
- const int64_t interframe_delay_ms = now_ms - last_frame_rendered_ms_;
+ const int64_t interframe_delay_ms =
+ frame_meta.decode_timestamp.ms() - last_frame_rendered_ms_;
const double interframe_delays_secs = interframe_delay_ms / 1000.0;
// Sum of squared inter frame intervals is used to calculate the harmonic
@@ -172,7 +175,7 @@
freezes_durations_.Add(interframe_delay_ms);
smooth_playback_durations_.Add(last_frame_rendered_ms_ -
last_unfreeze_time_ms_);
- last_unfreeze_time_ms_ = now_ms;
+ last_unfreeze_time_ms_ = frame_meta.decode_timestamp.ms();
} else {
// Count spatial metrics if there were no freeze.
time_in_resolution_ms_[current_resolution_] += interframe_delay_ms;
@@ -193,14 +196,15 @@
smooth_playback_durations_.Add(last_frame_rendered_ms_ -
last_unfreeze_time_ms_);
}
- last_unfreeze_time_ms_ = now_ms;
+ last_unfreeze_time_ms_ = frame_meta.decode_timestamp.ms();
if (num_frames_rendered_ > 0) {
- pauses_durations_.Add(now_ms - last_frame_rendered_ms_);
+ pauses_durations_.Add(frame_meta.decode_timestamp.ms() -
+ last_frame_rendered_ms_);
}
}
- int64_t pixels = frame.width() * frame.height();
+ int64_t pixels = frame_meta.width * frame_meta.height;
if (pixels >= kPixelsInHighResolution) {
current_resolution_ = Resolution::High;
} else if (pixels >= kPixelsInMediumResolution) {
@@ -214,7 +218,7 @@
}
last_frame_pixels_ = pixels;
- last_frame_rendered_ms_ = now_ms;
+ last_frame_rendered_ms_ = frame_meta.decode_timestamp.ms();
is_last_frame_blocky_ = blocky_frame_it != blocky_frames_.end();
if (is_last_frame_blocky_) {
@@ -224,36 +228,37 @@
++num_frames_rendered_;
}
-void VideoQualityObserver::OnDecodedFrame(const VideoFrame& frame,
+void VideoQualityObserver::OnDecodedFrame(uint32_t rtp_frame_timestamp,
absl::optional<uint8_t> qp,
VideoCodecType codec) {
- if (qp) {
- absl::optional<int> qp_blocky_threshold;
- // TODO(ilnik): add other codec types when we have QP for them.
- switch (codec) {
- case kVideoCodecVP8:
- qp_blocky_threshold = kBlockyQpThresholdVp8;
- break;
- case kVideoCodecVP9:
- qp_blocky_threshold = kBlockyQpThresholdVp9;
- break;
- default:
- qp_blocky_threshold = absl::nullopt;
+ if (!qp)
+ return;
+
+ absl::optional<int> qp_blocky_threshold;
+ // TODO(ilnik): add other codec types when we have QP for them.
+ switch (codec) {
+ case kVideoCodecVP8:
+ qp_blocky_threshold = kBlockyQpThresholdVp8;
+ break;
+ case kVideoCodecVP9:
+ qp_blocky_threshold = kBlockyQpThresholdVp9;
+ break;
+ default:
+ qp_blocky_threshold = absl::nullopt;
+ }
+
+ RTC_DCHECK(blocky_frames_.find(rtp_frame_timestamp) == blocky_frames_.end());
+
+ if (qp_blocky_threshold && *qp > *qp_blocky_threshold) {
+ // Cache blocky frame. Its duration will be calculated in render callback.
+ if (blocky_frames_.size() > kMaxNumCachedBlockyFrames) {
+ RTC_LOG(LS_WARNING) << "Overflow of blocky frames cache.";
+ blocky_frames_.erase(
+ blocky_frames_.begin(),
+ std::next(blocky_frames_.begin(), kMaxNumCachedBlockyFrames / 2));
}
- RTC_DCHECK(blocky_frames_.find(frame.timestamp()) == blocky_frames_.end());
-
- if (qp_blocky_threshold && *qp > *qp_blocky_threshold) {
- // Cache blocky frame. Its duration will be calculated in render callback.
- if (blocky_frames_.size() > kMaxNumCachedBlockyFrames) {
- RTC_LOG(LS_WARNING) << "Overflow of blocky frames cache.";
- blocky_frames_.erase(
- blocky_frames_.begin(),
- std::next(blocky_frames_.begin(), kMaxNumCachedBlockyFrames / 2));
- }
-
- blocky_frames_.insert(frame.timestamp());
- }
+ blocky_frames_.insert(rtp_frame_timestamp);
}
}
diff --git a/video/video_quality_observer2.h b/video/video_quality_observer2.h
index 615e0d3..ed5a0b9 100644
--- a/video/video_quality_observer2.h
+++ b/video/video_quality_observer2.h
@@ -19,12 +19,13 @@
#include "absl/types/optional.h"
#include "api/video/video_codec_type.h"
#include "api/video/video_content_type.h"
-#include "api/video/video_frame.h"
#include "rtc_base/numerics/moving_average.h"
#include "rtc_base/numerics/sample_counter.h"
namespace webrtc {
namespace internal {
+// Declared in video_receive_stream2.h.
+struct VideoFrameMetaData;
// Calculates spatial and temporal quality metrics and reports them to UMA
// stats.
@@ -35,11 +36,11 @@
VideoQualityObserver();
~VideoQualityObserver() = default;
- void OnDecodedFrame(const VideoFrame& frame,
+ void OnDecodedFrame(uint32_t rtp_frame_timestamp,
absl::optional<uint8_t> qp,
VideoCodecType codec);
- void OnRenderedFrame(const VideoFrame& frame, int64_t now_ms);
+ void OnRenderedFrame(const VideoFrameMetaData& frame_meta);
void OnStreamInactive();
diff --git a/video/video_receive_stream2.cc b/video/video_receive_stream2.cc
index 0af17d5..9f40c45 100644
--- a/video/video_receive_stream2.cc
+++ b/video/video_receive_stream2.cc
@@ -274,6 +274,7 @@
RTC_LOG(LS_INFO) << "~VideoReceiveStream2: " << config_.ToString();
Stop();
process_thread_->DeRegisterModule(&rtp_stream_sync_);
+ task_safety_flag_->SetNotAlive();
}
void VideoReceiveStream2::SignalNetworkState(NetworkState state) {
@@ -491,28 +492,31 @@
return base_minimum_playout_delay_ms_;
}
-// TODO(bugs.webrtc.org/11489): This method grabs a lock 6 times.
void VideoReceiveStream2::OnFrame(const VideoFrame& video_frame) {
- int64_t video_playout_ntp_ms;
- int64_t sync_offset_ms;
- double estimated_freq_khz;
- // TODO(bugs.webrtc.org/11489): GetStreamSyncOffsetInMs grabs three locks. One
- // inside the function itself, another in GetChannel() and a third in
- // GetPlayoutTimestamp. Seems excessive. Anyhow, I'm assuming the function
- // succeeds most of the time, which leads to grabbing a fourth lock.
- if (rtp_stream_sync_.GetStreamSyncOffsetInMs(
- video_frame.timestamp(), video_frame.render_time_ms(),
- &video_playout_ntp_ms, &sync_offset_ms, &estimated_freq_khz)) {
- // TODO(bugs.webrtc.org/11489): OnSyncOffsetUpdated grabs a lock.
- stats_proxy_.OnSyncOffsetUpdated(video_playout_ntp_ms, sync_offset_ms,
- estimated_freq_khz);
- }
+ VideoFrameMetaData frame_meta(video_frame, clock_->CurrentTime());
+
+ worker_thread_->PostTask(
+ ToQueuedTask(task_safety_flag_, [frame_meta, this]() {
+ RTC_DCHECK_RUN_ON(&worker_sequence_checker_);
+ int64_t video_playout_ntp_ms;
+ int64_t sync_offset_ms;
+ double estimated_freq_khz;
+ // TODO(bugs.webrtc.org/11489): GetStreamSyncOffsetInMs grabs three
+ // locks. One inside the function itself, another in GetChannel() and a
+ // third in GetPlayoutTimestamp. Seems excessive. Anyhow, I'm assuming
+ // the function succeeds most of the time, which leads to grabbing a
+ // fourth lock.
+ if (rtp_stream_sync_.GetStreamSyncOffsetInMs(
+ frame_meta.rtp_timestamp, frame_meta.render_time_ms(),
+ &video_playout_ntp_ms, &sync_offset_ms, &estimated_freq_khz)) {
+ stats_proxy_.OnSyncOffsetUpdated(video_playout_ntp_ms, sync_offset_ms,
+ estimated_freq_khz);
+ }
+ stats_proxy_.OnRenderedFrame(frame_meta);
+ }));
+
source_tracker_.OnFrameDelivered(video_frame.packet_infos());
-
config_.renderer->OnFrame(video_frame);
-
- // TODO(bugs.webrtc.org/11489): OnRenderFrame grabs a lock too.
- stats_proxy_.OnRenderedFrame(video_frame);
}
void VideoReceiveStream2::SetFrameDecryptor(
diff --git a/video/video_receive_stream2.h b/video/video_receive_stream2.h
index 2a0c07c..66fbc05 100644
--- a/video/video_receive_stream2.h
+++ b/video/video_receive_stream2.h
@@ -16,6 +16,7 @@
#include "api/task_queue/task_queue_factory.h"
#include "api/transport/media/media_transport_interface.h"
+#include "api/units/timestamp.h"
#include "api/video/recordable_encoded_frame.h"
#include "call/rtp_packet_sink_interface.h"
#include "call/syncable.h"
@@ -45,6 +46,33 @@
namespace internal {
+// Utility struct for grabbing metadata from a VideoFrame and processing it
+// asynchronously without needing the actual frame data.
+// Additionally the caller can bundle information from the current clock
+// when the metadata is captured, for accurate reporting and not needeing
+// multiple calls to clock->Now().
+struct VideoFrameMetaData {
+ VideoFrameMetaData(const webrtc::VideoFrame& frame, Timestamp now)
+ : rtp_timestamp(frame.timestamp()),
+ timestamp_us(frame.timestamp_us()),
+ ntp_time_ms(frame.ntp_time_ms()),
+ width(frame.width()),
+ height(frame.height()),
+ decode_timestamp(now) {}
+
+ int64_t render_time_ms() const {
+ return timestamp_us / rtc::kNumMicrosecsPerMillisec;
+ }
+
+ const uint32_t rtp_timestamp;
+ const int64_t timestamp_us;
+ const int64_t ntp_time_ms;
+ const int width;
+ const int height;
+
+ const Timestamp decode_timestamp;
+};
+
class VideoReceiveStream2 : public webrtc::VideoReceiveStream,
public rtc::VideoSinkInterface<VideoFrame>,
public NackSender,
@@ -225,6 +253,10 @@
// Defined last so they are destroyed before all other members.
rtc::TaskQueue decode_queue_;
+
+ // Used to signal destruction to potentially pending tasks.
+ PendingTaskSafetyFlag::Pointer task_safety_flag_ =
+ PendingTaskSafetyFlag::Create();
};
} // namespace internal
} // namespace webrtc