Move RTP for synchroninzation and rename classes, files and variables.
This CL removes (almost) the last RTP references in VideoReceiveStream.
There are still references to RTPFragmentationHeader and SSRCs, which
will be dealt with later.
There are also new GUARDED_BY and thred checker added to the
synchronization class.
When there are othre transports than RTP, there will instead be an
interface + inheritance for RtpStreamReceiver and
RtpStreamSynchronizattion in VideoReceiveStream. This work will be done
when we actually know how we want to make thee transport interface.
BUG=webrtc:5838
Review-Url: https://codereview.webrtc.org/2216533002
Cr-Commit-Position: refs/heads/master@{#13655}
diff --git a/webrtc/video/BUILD.gn b/webrtc/video/BUILD.gn
index 1f116bf..84cab26 100644
--- a/webrtc/video/BUILD.gn
+++ b/webrtc/video/BUILD.gn
@@ -25,6 +25,8 @@
"report_block_stats.h",
"rtp_stream_receiver.cc",
"rtp_stream_receiver.h",
+ "rtp_streams_synchronizer.cc",
+ "rtp_streams_synchronizer.h",
"send_delay_stats.cc",
"send_delay_stats.h",
"send_statistics_proxy.cc",
@@ -47,8 +49,6 @@
"vie_encoder.h",
"vie_remb.cc",
"vie_remb.h",
- "vie_sync_module.cc",
- "vie_sync_module.h",
]
configs += [ "..:common_config" ]
diff --git a/webrtc/video/rtp_stream_receiver.cc b/webrtc/video/rtp_stream_receiver.cc
index e7367d8..d5b547f 100644
--- a/webrtc/video/rtp_stream_receiver.cc
+++ b/webrtc/video/rtp_stream_receiver.cc
@@ -12,6 +12,7 @@
#include <vector>
+#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/common_types.h"
#include "webrtc/config.h"
diff --git a/webrtc/video/vie_sync_module.cc b/webrtc/video/rtp_streams_synchronizer.cc
similarity index 65%
rename from webrtc/video/vie_sync_module.cc
rename to webrtc/video/rtp_streams_synchronizer.cc
index 2e62ff8..75dd8cd 100644
--- a/webrtc/video/vie_sync_module.cc
+++ b/webrtc/video/rtp_streams_synchronizer.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/video/vie_sync_module.h"
+#include "webrtc/video/rtp_streams_synchronizer.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
@@ -25,17 +25,17 @@
namespace webrtc {
namespace {
int UpdateMeasurements(StreamSynchronization::Measurements* stream,
- const RtpRtcp& rtp_rtcp, const RtpReceiver& receiver) {
- if (!receiver.Timestamp(&stream->latest_timestamp))
+ RtpRtcp* rtp_rtcp, RtpReceiver* receiver) {
+ if (!receiver->Timestamp(&stream->latest_timestamp))
return -1;
- if (!receiver.LastReceivedTimeMs(&stream->latest_receive_time_ms))
+ if (!receiver->LastReceivedTimeMs(&stream->latest_receive_time_ms))
return -1;
uint32_t ntp_secs = 0;
uint32_t ntp_frac = 0;
uint32_t rtp_timestamp = 0;
- if (rtp_rtcp.RemoteNTP(&ntp_secs, &ntp_frac, nullptr, nullptr,
- &rtp_timestamp) != 0) {
+ if (rtp_rtcp->RemoteNTP(&ntp_secs, &ntp_frac, nullptr, nullptr,
+ &rtp_timestamp) != 0) {
return -1;
}
@@ -49,57 +49,69 @@
}
} // namespace
-ViESyncModule::ViESyncModule(vcm::VideoReceiver* video_receiver)
- : video_receiver_(video_receiver),
- clock_(Clock::GetRealTimeClock()),
- rtp_receiver_(nullptr),
- video_rtp_rtcp_(nullptr),
+RtpStreamsSynchronizer::RtpStreamsSynchronizer(
+ vcm::VideoReceiver* video_receiver,
+ RtpStreamReceiver* rtp_stream_receiver)
+ : clock_(Clock::GetRealTimeClock()),
+ video_receiver_(video_receiver),
+ video_rtp_receiver_(rtp_stream_receiver->GetRtpReceiver()),
+ video_rtp_rtcp_(rtp_stream_receiver->rtp_rtcp()),
voe_channel_id_(-1),
voe_sync_interface_(nullptr),
- last_sync_time_(rtc::TimeNanos()),
- sync_() {}
-
-ViESyncModule::~ViESyncModule() {
+ audio_rtp_receiver_(nullptr),
+ audio_rtp_rtcp_(nullptr),
+ sync_(),
+ last_sync_time_(rtc::TimeNanos()) {
+ process_thread_checker_.DetachFromThread();
}
-void ViESyncModule::ConfigureSync(int voe_channel_id,
- VoEVideoSync* voe_sync_interface,
- RtpRtcp* video_rtcp_module,
- RtpReceiver* rtp_receiver) {
+void RtpStreamsSynchronizer::ConfigureSync(int voe_channel_id,
+ VoEVideoSync* voe_sync_interface) {
if (voe_channel_id != -1)
RTC_DCHECK(voe_sync_interface);
- rtc::CritScope lock(&data_cs_);
- // Prevent expensive no-ops.
+
+ rtc::CritScope lock(&crit_);
if (voe_channel_id_ == voe_channel_id &&
- voe_sync_interface_ == voe_sync_interface &&
- rtp_receiver_ == rtp_receiver && video_rtp_rtcp_ == video_rtcp_module) {
+ voe_sync_interface_ == voe_sync_interface) {
+ // This prevents expensive no-ops.
return;
}
voe_channel_id_ = voe_channel_id;
voe_sync_interface_ = voe_sync_interface;
- rtp_receiver_ = rtp_receiver;
- video_rtp_rtcp_ = video_rtcp_module;
- sync_.reset(
- new StreamSynchronization(video_rtp_rtcp_->SSRC(), voe_channel_id));
+
+ audio_rtp_rtcp_ = nullptr;
+ audio_rtp_receiver_ = nullptr;
+ sync_.reset(nullptr);
+
+ if (voe_channel_id_ != -1) {
+ voe_sync_interface_->GetRtpRtcp(voe_channel_id_, &audio_rtp_rtcp_,
+ &audio_rtp_receiver_);
+ RTC_DCHECK(audio_rtp_rtcp_);
+ RTC_DCHECK(audio_rtp_receiver_);
+ sync_.reset(new StreamSynchronization(video_rtp_rtcp_->SSRC(),
+ voe_channel_id_));
+ }
}
-int64_t ViESyncModule::TimeUntilNextProcess() {
+int64_t RtpStreamsSynchronizer::TimeUntilNextProcess() {
+ RTC_DCHECK_RUN_ON(&process_thread_checker_);
const int64_t kSyncIntervalMs = 1000;
return kSyncIntervalMs -
(rtc::TimeNanos() - last_sync_time_) / rtc::kNumNanosecsPerMillisec;
}
-void ViESyncModule::Process() {
- rtc::CritScope lock(&data_cs_);
- last_sync_time_ = rtc::TimeNanos();
+void RtpStreamsSynchronizer::Process() {
+ RTC_DCHECK_RUN_ON(&process_thread_checker_);
const int current_video_delay_ms = video_receiver_->Delay();
+ last_sync_time_ = rtc::TimeNanos();
+ rtc::CritScope lock(&crit_);
if (voe_channel_id_ == -1) {
return;
}
- assert(video_rtp_rtcp_ && voe_sync_interface_);
- assert(sync_.get());
+ RTC_DCHECK(voe_sync_interface_);
+ RTC_DCHECK(sync_.get());
int audio_jitter_buffer_delay_ms = 0;
int playout_buffer_delay_ms = 0;
@@ -111,22 +123,13 @@
const int current_audio_delay_ms = audio_jitter_buffer_delay_ms +
playout_buffer_delay_ms;
- RtpRtcp* voice_rtp_rtcp = nullptr;
- RtpReceiver* voice_receiver = nullptr;
- if (voe_sync_interface_->GetRtpRtcp(voe_channel_id_, &voice_rtp_rtcp,
- &voice_receiver) != 0) {
- return;
- }
- assert(voice_rtp_rtcp);
- assert(voice_receiver);
-
- if (UpdateMeasurements(&video_measurement_, *video_rtp_rtcp_,
- *rtp_receiver_) != 0) {
+ if (UpdateMeasurements(&video_measurement_, video_rtp_rtcp_,
+ video_rtp_receiver_) != 0) {
return;
}
- if (UpdateMeasurements(&audio_measurement_, *voice_rtp_rtcp,
- *voice_receiver) != 0) {
+ if (UpdateMeasurements(&audio_measurement_, audio_rtp_rtcp_,
+ audio_rtp_receiver_) != 0) {
return;
}
@@ -158,9 +161,9 @@
video_receiver_->SetMinimumPlayoutDelay(target_video_delay_ms);
}
-bool ViESyncModule::GetStreamSyncOffsetInMs(const VideoFrame& frame,
- int64_t* stream_offset_ms) const {
- rtc::CritScope lock(&data_cs_);
+bool RtpStreamsSynchronizer::GetStreamSyncOffsetInMs(
+ const VideoFrame& frame, int64_t* stream_offset_ms) const {
+ rtc::CritScope lock(&crit_);
if (voe_channel_id_ == -1)
return false;
diff --git a/webrtc/video/rtp_streams_synchronizer.h b/webrtc/video/rtp_streams_synchronizer.h
new file mode 100644
index 0000000..082bec7
--- /dev/null
+++ b/webrtc/video/rtp_streams_synchronizer.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// RtpStreamsSynchronizer is responsible for synchronization audio and video for
+// a given voice engine channel and video receive stream.
+
+#ifndef WEBRTC_VIDEO_RTP_STREAMS_SYNCHRONIZER_H_
+#define WEBRTC_VIDEO_RTP_STREAMS_SYNCHRONIZER_H_
+
+#include <memory>
+
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/thread_checker.h"
+#include "webrtc/modules/include/module.h"
+#include "webrtc/video/rtp_stream_receiver.h"
+#include "webrtc/video/stream_synchronization.h"
+
+namespace webrtc {
+
+class Clock;
+class VideoFrame;
+class VoEVideoSync;
+
+namespace vcm {
+class VideoReceiver;
+} // namespace vcm
+
+class RtpStreamsSynchronizer : public Module {
+ public:
+ RtpStreamsSynchronizer(vcm::VideoReceiver* vcm,
+ RtpStreamReceiver* rtp_stream_receiver);
+
+ void ConfigureSync(int voe_channel_id,
+ VoEVideoSync* voe_sync_interface);
+
+ // Implements Module.
+ int64_t TimeUntilNextProcess() override;
+ void Process() override;
+
+ // Gets the sync offset between the current played out audio frame and the
+ // video |frame|. Returns true on success, false otherwise.
+ bool GetStreamSyncOffsetInMs(const VideoFrame& frame,
+ int64_t* stream_offset_ms) const;
+
+ private:
+ Clock* const clock_;
+ vcm::VideoReceiver* const video_receiver_;
+ RtpReceiver* const video_rtp_receiver_;
+ RtpRtcp* const video_rtp_rtcp_;
+
+ rtc::CriticalSection crit_;
+ int voe_channel_id_ GUARDED_BY(crit_);
+ VoEVideoSync* voe_sync_interface_ GUARDED_BY(crit_);
+ RtpReceiver* audio_rtp_receiver_ GUARDED_BY(crit_);
+ RtpRtcp* audio_rtp_rtcp_ GUARDED_BY(crit_);
+ std::unique_ptr<StreamSynchronization> sync_ GUARDED_BY(crit_);
+ StreamSynchronization::Measurements audio_measurement_ GUARDED_BY(crit_);
+ StreamSynchronization::Measurements video_measurement_ GUARDED_BY(crit_);
+
+ rtc::ThreadChecker process_thread_checker_;
+ int64_t last_sync_time_ ACCESS_ON(&process_thread_checker_);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_VIDEO_RTP_STREAMS_SYNCHRONIZER_H_
diff --git a/webrtc/video/stream_synchronization.cc b/webrtc/video/stream_synchronization.cc
index 3727f8f..ff38973 100644
--- a/webrtc/video/stream_synchronization.cc
+++ b/webrtc/video/stream_synchronization.cc
@@ -26,35 +26,14 @@
// Minimum difference between audio and video to warrant a change.
static const int kMinDeltaMs = 30;
-struct ViESyncDelay {
- ViESyncDelay() {
- extra_video_delay_ms = 0;
- last_video_delay_ms = 0;
- extra_audio_delay_ms = 0;
- last_audio_delay_ms = 0;
- network_delay = 120;
- }
-
- int extra_video_delay_ms;
- int last_video_delay_ms;
- int extra_audio_delay_ms;
- int last_audio_delay_ms;
- int network_delay;
-};
-
StreamSynchronization::StreamSynchronization(uint32_t video_primary_ssrc,
int audio_channel_id)
- : channel_delay_(new ViESyncDelay),
- video_primary_ssrc_(video_primary_ssrc),
+ : video_primary_ssrc_(video_primary_ssrc),
audio_channel_id_(audio_channel_id),
base_target_delay_ms_(0),
avg_diff_ms_(0) {
}
-StreamSynchronization::~StreamSynchronization() {
- delete channel_delay_;
-}
-
bool StreamSynchronization::ComputeRelativeDelay(
const Measurements& audio_measurement,
const Measurements& video_measurement,
@@ -94,7 +73,6 @@
int current_video_delay_ms = *total_video_delay_target_ms;
LOG(LS_VERBOSE) << "Audio delay: " << current_audio_delay_ms
- << ", network delay diff: " << channel_delay_->network_delay
<< " current diff: " << relative_delay_ms
<< " for channel " << audio_channel_id_;
// Calculate the difference between the lowest possible video delay and
@@ -120,78 +98,78 @@
if (diff_ms > 0) {
// The minimum video delay is longer than the current audio delay.
// We need to decrease extra video delay, or add extra audio delay.
- if (channel_delay_->extra_video_delay_ms > base_target_delay_ms_) {
+ if (channel_delay_.extra_video_delay_ms > base_target_delay_ms_) {
// We have extra delay added to ViE. Reduce this delay before adding
// extra delay to VoE.
- channel_delay_->extra_video_delay_ms -= diff_ms;
- channel_delay_->extra_audio_delay_ms = base_target_delay_ms_;
- } else { // channel_delay_->extra_video_delay_ms > 0
+ channel_delay_.extra_video_delay_ms -= diff_ms;
+ channel_delay_.extra_audio_delay_ms = base_target_delay_ms_;
+ } else { // channel_delay_.extra_video_delay_ms > 0
// We have no extra video delay to remove, increase the audio delay.
- channel_delay_->extra_audio_delay_ms += diff_ms;
- channel_delay_->extra_video_delay_ms = base_target_delay_ms_;
+ channel_delay_.extra_audio_delay_ms += diff_ms;
+ channel_delay_.extra_video_delay_ms = base_target_delay_ms_;
}
} else { // if (diff_ms > 0)
// The video delay is lower than the current audio delay.
// We need to decrease extra audio delay, or add extra video delay.
- if (channel_delay_->extra_audio_delay_ms > base_target_delay_ms_) {
+ if (channel_delay_.extra_audio_delay_ms > base_target_delay_ms_) {
// We have extra delay in VoiceEngine.
// Start with decreasing the voice delay.
// Note: diff_ms is negative; add the negative difference.
- channel_delay_->extra_audio_delay_ms += diff_ms;
- channel_delay_->extra_video_delay_ms = base_target_delay_ms_;
- } else { // channel_delay_->extra_audio_delay_ms > base_target_delay_ms_
+ channel_delay_.extra_audio_delay_ms += diff_ms;
+ channel_delay_.extra_video_delay_ms = base_target_delay_ms_;
+ } else { // channel_delay_.extra_audio_delay_ms > base_target_delay_ms_
// We have no extra delay in VoiceEngine, increase the video delay.
// Note: diff_ms is negative; subtract the negative difference.
- channel_delay_->extra_video_delay_ms -= diff_ms; // X - (-Y) = X + Y.
- channel_delay_->extra_audio_delay_ms = base_target_delay_ms_;
+ channel_delay_.extra_video_delay_ms -= diff_ms; // X - (-Y) = X + Y.
+ channel_delay_.extra_audio_delay_ms = base_target_delay_ms_;
}
}
// Make sure that video is never below our target.
- channel_delay_->extra_video_delay_ms = std::max(
- channel_delay_->extra_video_delay_ms, base_target_delay_ms_);
+ channel_delay_.extra_video_delay_ms = std::max(
+ channel_delay_.extra_video_delay_ms, base_target_delay_ms_);
int new_video_delay_ms;
- if (channel_delay_->extra_video_delay_ms > base_target_delay_ms_) {
- new_video_delay_ms = channel_delay_->extra_video_delay_ms;
+ if (channel_delay_.extra_video_delay_ms > base_target_delay_ms_) {
+ new_video_delay_ms = channel_delay_.extra_video_delay_ms;
} else {
// No change to the extra video delay. We are changing audio and we only
// allow to change one at the time.
- new_video_delay_ms = channel_delay_->last_video_delay_ms;
+ new_video_delay_ms = channel_delay_.last_video_delay_ms;
}
// Make sure that we don't go below the extra video delay.
new_video_delay_ms = std::max(
- new_video_delay_ms, channel_delay_->extra_video_delay_ms);
+ new_video_delay_ms, channel_delay_.extra_video_delay_ms);
// Verify we don't go above the maximum allowed video delay.
new_video_delay_ms =
std::min(new_video_delay_ms, base_target_delay_ms_ + kMaxDeltaDelayMs);
int new_audio_delay_ms;
- if (channel_delay_->extra_audio_delay_ms > base_target_delay_ms_) {
- new_audio_delay_ms = channel_delay_->extra_audio_delay_ms;
+ if (channel_delay_.extra_audio_delay_ms > base_target_delay_ms_) {
+ new_audio_delay_ms = channel_delay_.extra_audio_delay_ms;
} else {
// No change to the audio delay. We are changing video and we only
// allow to change one at the time.
- new_audio_delay_ms = channel_delay_->last_audio_delay_ms;
+ new_audio_delay_ms = channel_delay_.last_audio_delay_ms;
}
// Make sure that we don't go below the extra audio delay.
new_audio_delay_ms = std::max(
- new_audio_delay_ms, channel_delay_->extra_audio_delay_ms);
+ new_audio_delay_ms, channel_delay_.extra_audio_delay_ms);
// Verify we don't go above the maximum allowed audio delay.
new_audio_delay_ms =
std::min(new_audio_delay_ms, base_target_delay_ms_ + kMaxDeltaDelayMs);
// Remember our last audio and video delays.
- channel_delay_->last_video_delay_ms = new_video_delay_ms;
- channel_delay_->last_audio_delay_ms = new_audio_delay_ms;
+ channel_delay_.last_video_delay_ms = new_video_delay_ms;
+ channel_delay_.last_audio_delay_ms = new_audio_delay_ms;
LOG(LS_VERBOSE) << "Sync video delay " << new_video_delay_ms
<< " for video primary SSRC " << video_primary_ssrc_
- << " and audio delay " << channel_delay_->extra_audio_delay_ms
+ << " and audio delay " << channel_delay_.extra_audio_delay_ms
<< " for audio channel " << audio_channel_id_;
// Return values.
@@ -202,17 +180,17 @@
void StreamSynchronization::SetTargetBufferingDelay(int target_delay_ms) {
// Initial extra delay for audio (accounting for existing extra delay).
- channel_delay_->extra_audio_delay_ms +=
+ channel_delay_.extra_audio_delay_ms +=
target_delay_ms - base_target_delay_ms_;
- channel_delay_->last_audio_delay_ms +=
+ channel_delay_.last_audio_delay_ms +=
target_delay_ms - base_target_delay_ms_;
// The video delay is compared to the last value (and how much we can update
// is limited by that as well).
- channel_delay_->last_video_delay_ms +=
+ channel_delay_.last_video_delay_ms +=
target_delay_ms - base_target_delay_ms_;
- channel_delay_->extra_video_delay_ms +=
+ channel_delay_.extra_video_delay_ms +=
target_delay_ms - base_target_delay_ms_;
// Video is already delayed by the desired amount.
diff --git a/webrtc/video/stream_synchronization.h b/webrtc/video/stream_synchronization.h
index cb7c110..f231cfb 100644
--- a/webrtc/video/stream_synchronization.h
+++ b/webrtc/video/stream_synchronization.h
@@ -18,8 +18,6 @@
namespace webrtc {
-struct ViESyncDelay;
-
class StreamSynchronization {
public:
struct Measurements {
@@ -30,7 +28,6 @@
};
StreamSynchronization(uint32_t video_primary_ssrc, int audio_channel_id);
- ~StreamSynchronization();
bool ComputeDelays(int relative_delay_ms,
int current_audio_delay_ms,
@@ -48,7 +45,14 @@
void SetTargetBufferingDelay(int target_delay_ms);
private:
- ViESyncDelay* channel_delay_;
+ struct SynchronizationDelays {
+ int extra_video_delay_ms = 0;
+ int last_video_delay_ms = 0;
+ int extra_audio_delay_ms = 0;
+ int last_audio_delay_ms = 0;
+ };
+
+ SynchronizationDelays channel_delay_;
const uint32_t video_primary_ssrc_;
const int audio_channel_id_;
int base_target_delay_ms_;
diff --git a/webrtc/video/video_receive_stream.cc b/webrtc/video/video_receive_stream.cc
index 3c2139c..b07759b 100644
--- a/webrtc/video/video_receive_stream.cc
+++ b/webrtc/video/video_receive_stream.cc
@@ -27,6 +27,7 @@
#include "webrtc/video/call_stats.h"
#include "webrtc/video/receive_statistics_proxy.h"
#include "webrtc/video_receive_stream.h"
+#include "webrtc/voice_engine/include/voe_video_sync.h"
namespace webrtc {
@@ -176,7 +177,7 @@
&stats_proxy_,
process_thread_,
congestion_controller_->GetRetransmissionRateLimiter()),
- vie_sync_(&video_receiver_) {
+ rtp_stream_sync_(&video_receiver_, &rtp_stream_receiver_) {
LOG(LS_INFO) << "VideoReceiveStream: " << config_.ToString();
RTC_DCHECK(process_thread_);
@@ -204,14 +205,14 @@
video_receiver_.SetRenderDelay(config.render_delay_ms);
process_thread_->RegisterModule(&video_receiver_);
- process_thread_->RegisterModule(&vie_sync_);
+ process_thread_->RegisterModule(&rtp_stream_sync_);
}
VideoReceiveStream::~VideoReceiveStream() {
LOG(LS_INFO) << "~VideoReceiveStream: " << config_.ToString();
Stop();
- process_thread_->DeRegisterModule(&vie_sync_);
+ process_thread_->DeRegisterModule(&rtp_stream_sync_);
process_thread_->DeRegisterModule(&video_receiver_);
// Deregister external decoders so they are no longer running during
@@ -285,13 +286,10 @@
int audio_channel_id) {
if (voice_engine && audio_channel_id != -1) {
VoEVideoSync* voe_sync_interface = VoEVideoSync::GetInterface(voice_engine);
- vie_sync_.ConfigureSync(audio_channel_id, voe_sync_interface,
- rtp_stream_receiver_.rtp_rtcp(),
- rtp_stream_receiver_.GetRtpReceiver());
+ rtp_stream_sync_.ConfigureSync(audio_channel_id, voe_sync_interface);
voe_sync_interface->Release();
} else {
- vie_sync_.ConfigureSync(-1, nullptr, rtp_stream_receiver_.rtp_rtcp(),
- rtp_stream_receiver_.GetRtpReceiver());
+ rtp_stream_sync_.ConfigureSync(-1, nullptr);
}
}
@@ -310,7 +308,7 @@
// function itself, another in GetChannel() and a third in
// GetPlayoutTimestamp. Seems excessive. Anyhow, I'm assuming the function
// succeeds most of the time, which leads to grabbing a fourth lock.
- if (vie_sync_.GetStreamSyncOffsetInMs(video_frame, &sync_offset_ms)) {
+ if (rtp_stream_sync_.GetStreamSyncOffsetInMs(video_frame, &sync_offset_ms)) {
// TODO(tommi): OnSyncOffsetUpdated grabs a lock.
stats_proxy_.OnSyncOffsetUpdated(sync_offset_ms);
}
diff --git a/webrtc/video/video_receive_stream.h b/webrtc/video/video_receive_stream.h
index ba5e4e9..3cffb4b 100644
--- a/webrtc/video/video_receive_stream.h
+++ b/webrtc/video/video_receive_stream.h
@@ -22,6 +22,7 @@
#include "webrtc/system_wrappers/include/clock.h"
#include "webrtc/video/receive_statistics_proxy.h"
#include "webrtc/video/rtp_stream_receiver.h"
+#include "webrtc/video/rtp_streams_synchronizer.h"
#include "webrtc/video/video_stream_decoder.h"
#include "webrtc/video_receive_stream.h"
@@ -31,6 +32,7 @@
class CongestionController;
class IvfFileWriter;
class ProcessThread;
+class RTPFragmentationHeader;
class VoiceEngine;
class VieRemb;
@@ -101,7 +103,7 @@
ReceiveStatisticsProxy stats_proxy_;
RtpStreamReceiver rtp_stream_receiver_;
std::unique_ptr<VideoStreamDecoder> video_stream_decoder_;
- ViESyncModule vie_sync_;
+ RtpStreamsSynchronizer rtp_stream_sync_;
std::unique_ptr<IvfFileWriter> ivf_writer_;
};
diff --git a/webrtc/video/video_stream_decoder.h b/webrtc/video/video_stream_decoder.h
index 11f0b03..465c8c4 100644
--- a/webrtc/video/video_stream_decoder.h
+++ b/webrtc/video/video_stream_decoder.h
@@ -23,7 +23,6 @@
#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
#include "webrtc/modules/video_coding/include/video_coding_defines.h"
#include "webrtc/typedefs.h"
-#include "webrtc/video/vie_sync_module.h"
namespace webrtc {
@@ -34,7 +33,6 @@
class I420FrameCallback;
class ReceiveStatisticsProxy;
class VideoRenderCallback;
-class VoEVideoSync;
namespace vcm {
class VideoReceiver;
diff --git a/webrtc/video/vie_sync_module.h b/webrtc/video/vie_sync_module.h
deleted file mode 100644
index 18b6c5d..0000000
--- a/webrtc/video/vie_sync_module.h
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// ViESyncModule is responsible for synchronization audio and video for a given
-// VoE and ViE channel couple.
-
-#ifndef WEBRTC_VIDEO_VIE_SYNC_MODULE_H_
-#define WEBRTC_VIDEO_VIE_SYNC_MODULE_H_
-
-#include <memory>
-
-#include "webrtc/base/criticalsection.h"
-#include "webrtc/modules/include/module.h"
-#include "webrtc/video/stream_synchronization.h"
-#include "webrtc/voice_engine/include/voe_video_sync.h"
-
-namespace webrtc {
-
-class Clock;
-class RtpRtcp;
-class VideoFrame;
-class ViEChannel;
-class VoEVideoSync;
-
-namespace vcm {
-class VideoReceiver;
-} // namespace vcm
-
-class ViESyncModule : public Module {
- public:
- explicit ViESyncModule(vcm::VideoReceiver* vcm);
- ~ViESyncModule();
-
- void ConfigureSync(int voe_channel_id,
- VoEVideoSync* voe_sync_interface,
- RtpRtcp* video_rtcp_module,
- RtpReceiver* rtp_receiver);
-
- // Implements Module.
- int64_t TimeUntilNextProcess() override;
- void Process() override;
-
- // Gets the sync offset between the current played out audio frame and the
- // video |frame|. Returns true on success, false otherwise.
- bool GetStreamSyncOffsetInMs(const VideoFrame& frame,
- int64_t* stream_offset_ms) const;
-
- private:
- rtc::CriticalSection data_cs_;
- vcm::VideoReceiver* const video_receiver_;
- Clock* const clock_;
- RtpReceiver* rtp_receiver_;
- RtpRtcp* video_rtp_rtcp_;
- int voe_channel_id_;
- VoEVideoSync* voe_sync_interface_;
- int64_t last_sync_time_;
- std::unique_ptr<StreamSynchronization> sync_;
- StreamSynchronization::Measurements audio_measurement_;
- StreamSynchronization::Measurements video_measurement_;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_VIDEO_VIE_SYNC_MODULE_H_
diff --git a/webrtc/video/webrtc_video.gypi b/webrtc/video/webrtc_video.gypi
index 5ae8b5f..6b00e69 100644
--- a/webrtc/video/webrtc_video.gypi
+++ b/webrtc/video/webrtc_video.gypi
@@ -38,6 +38,8 @@
'video/report_block_stats.h',
'video/rtp_stream_receiver.cc',
'video/rtp_stream_receiver.h',
+ 'video/rtp_streams_synchronizer.cc',
+ 'video/rtp_streams_synchronizer.h',
'video/send_delay_stats.cc',
'video/send_delay_stats.h',
'video/send_statistics_proxy.cc',
@@ -60,8 +62,6 @@
'video/vie_encoder.h',
'video/vie_remb.cc',
'video/vie_remb.h',
- 'video/vie_sync_module.cc',
- 'video/vie_sync_module.h',
],
},
}