Add send frame rate statistics callback
BUG=2235
R=mflodman@webrtc.org, pbos@webrtc.org, stefan@webrtc.org
Review URL: https://webrtc-codereview.appspot.com/4479005
git-svn-id: http://webrtc.googlecode.com/svn/trunk@5213 4adac7df-926f-26a2-2b94-8c16560cd09d
diff --git a/webrtc/common_types.h b/webrtc/common_types.h
index 7138b05..a45cf06 100644
--- a/webrtc/common_types.h
+++ b/webrtc/common_types.h
@@ -138,7 +138,6 @@
kFileFormatPcm32kHzFile = 9
};
-
enum ProcessingTypes
{
kPlaybackPerChannel = 0,
@@ -148,6 +147,15 @@
kRecordingPreprocessing
};
+enum FrameType
+{
+ kFrameEmpty = 0,
+ kAudioFrameSpeech = 1,
+ kAudioFrameCN = 2,
+ kVideoFrameKey = 3, // independent frame
+ kVideoFrameDelta = 4, // depends on the previus frame
+};
+
// Interface for encrypting and decrypting regular data and rtp/rtcp packets.
// Implement this interface if you wish to provide an encryption scheme to
// the voice or video engines.
@@ -302,9 +310,9 @@
class FrameCountObserver {
public:
virtual ~FrameCountObserver() {}
- virtual void Notify(const unsigned int key_frames,
- const unsigned int delta_frames,
- const unsigned int ssrc) = 0;
+ virtual void FrameCountUpdated(FrameType frame_type,
+ uint32_t frame_count,
+ const unsigned int ssrc) = 0;
};
// ==================================================================
@@ -322,17 +330,6 @@
int rate; // bits/sec unlike {start,min,max}Bitrate elsewhere in this file!
};
-enum FrameType
-{
- kFrameEmpty = 0,
- kAudioFrameSpeech = 1,
- kAudioFrameCN = 2,
- kVideoFrameKey = 3, // independent frame
- kVideoFrameDelta = 4, // depends on the previus frame
- kVideoFrameGolden = 5, // depends on a old known previus frame
- kVideoFrameAltRef = 6
-};
-
// RTP
enum {kRtpCsrcSize = 15}; // RFC 3550 page 13
diff --git a/webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h b/webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h
index 28ee0f5..9fb8199 100644
--- a/webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h
+++ b/webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h
@@ -331,6 +331,10 @@
virtual int TimeToSendPadding(int bytes) = 0;
+ virtual void RegisterSendFrameCountObserver(
+ FrameCountObserver* observer) = 0;
+ virtual FrameCountObserver* GetSendFrameCountObserver() const = 0;
+
/**************************************************************************
*
* RTCP
diff --git a/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h b/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h
index 472683e..93230da 100644
--- a/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h
+++ b/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h
@@ -240,7 +240,10 @@
int32_t());
MOCK_METHOD0(Process,
int32_t());
-
+ MOCK_METHOD1(RegisterSendFrameCountObserver,
+ void(FrameCountObserver*));
+ MOCK_CONST_METHOD0(GetSendFrameCountObserver,
+ FrameCountObserver*(void));
// Members.
unsigned int remote_ssrc_;
};
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc b/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
index 44dcb86..027d17d 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
@@ -1645,4 +1645,13 @@
return rtt_ms_;
}
+void ModuleRtpRtcpImpl::RegisterSendFrameCountObserver(
+ FrameCountObserver* observer) {
+ rtp_sender_.RegisterFrameCountObserver(observer);
+}
+
+FrameCountObserver* ModuleRtpRtcpImpl::GetSendFrameCountObserver() const {
+ return rtp_sender_.GetFrameCountObserver();
+}
+
} // Namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h b/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h
index 1fdec28..c8ab063 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h
+++ b/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h
@@ -363,6 +363,10 @@
void OnRequestSendReport();
+ virtual void RegisterSendFrameCountObserver(
+ FrameCountObserver* observer) OVERRIDE;
+ virtual FrameCountObserver* GetSendFrameCountObserver() const OVERRIDE;
+
protected:
void RegisterChildModule(RtpRtcp* module);
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_sender.cc b/webrtc/modules/rtp_rtcp/source/rtp_sender.cc
index 0f08573..c717964 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_sender.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_sender.cc
@@ -32,8 +32,6 @@
case kAudioFrameCN: return "audio_cn";
case kVideoFrameKey: return "video_key";
case kVideoFrameDelta: return "video_delta";
- case kVideoFrameGolden: return "video_golden";
- case kVideoFrameAltRef: return "video_altref";
}
return "";
}
@@ -61,7 +59,8 @@
remote_ssrc_(0), sequence_number_forced_(false), ssrc_forced_(false),
timestamp_(0), capture_time_ms_(0), last_timestamp_time_ms_(0),
last_packet_marker_bit_(false), num_csrcs_(0), csrcs_(),
- include_csrcs_(true), rtx_(kRtxOff), payload_type_rtx_(-1) {
+ include_csrcs_(true), rtx_(kRtxOff), payload_type_rtx_(-1),
+ frame_counts_(), frame_count_observer_(NULL) {
memset(nack_byte_count_times_, 0, sizeof(nack_byte_count_times_));
memset(nack_byte_count_, 0, sizeof(nack_byte_count_));
memset(csrcs_, 0, sizeof(csrcs_));
@@ -359,14 +358,15 @@
return -1;
}
+ uint32_t ret_val;
if (audio_configured_) {
TRACE_EVENT_ASYNC_STEP1("webrtc", "Audio", capture_timestamp,
"Send", "type", FrameTypeToString(frame_type));
assert(frame_type == kAudioFrameSpeech || frame_type == kAudioFrameCN ||
frame_type == kFrameEmpty);
- return audio_->SendAudio(frame_type, payload_type, capture_timestamp,
- payload_data, payload_size, fragmentation);
+ ret_val = audio_->SendAudio(frame_type, payload_type, capture_timestamp,
+ payload_data, payload_size, fragmentation);
} else {
TRACE_EVENT_ASYNC_STEP1("webrtc", "Video", capture_time_ms,
"Send", "type", FrameTypeToString(frame_type));
@@ -380,11 +380,23 @@
return SendPaddingAccordingToBitrate(payload_type, capture_timestamp,
capture_time_ms) ? 0 : -1;
}
- return video_->SendVideo(video_type, frame_type, payload_type,
- capture_timestamp, capture_time_ms, payload_data,
- payload_size, fragmentation, codec_info,
- rtp_type_hdr);
+ ret_val = video_->SendVideo(video_type, frame_type, payload_type,
+ capture_timestamp, capture_time_ms,
+ payload_data, payload_size,
+ fragmentation, codec_info,
+ rtp_type_hdr);
+
}
+
+ CriticalSectionScoped cs(statistics_crit_.get());
+ uint32_t frame_count = ++frame_counts_[frame_type];
+ if (frame_count_observer_) {
+ frame_count_observer_->FrameCountUpdated(frame_type,
+ frame_count,
+ ssrc_);
+ }
+
+ return ret_val;
}
int RTPSender::SendRedundantPayloads(int payload_type, int bytes_to_send) {
@@ -1478,4 +1490,16 @@
*length += 2;
}
+void RTPSender::RegisterFrameCountObserver(FrameCountObserver* observer) {
+ CriticalSectionScoped cs(statistics_crit_.get());
+ if (observer != NULL)
+ assert(frame_count_observer_ == NULL);
+ frame_count_observer_ = observer;
+}
+
+FrameCountObserver* RTPSender::GetFrameCountObserver() const {
+ CriticalSectionScoped cs(statistics_crit_.get());
+ return frame_count_observer_;
+}
+
} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_sender.h b/webrtc/modules/rtp_rtcp/source/rtp_sender.h
index c78af28..7b7967e 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_sender.h
+++ b/webrtc/modules/rtp_rtcp/source/rtp_sender.h
@@ -260,6 +260,9 @@
int32_t SetFecParameters(const FecProtectionParams *delta_params,
const FecProtectionParams *key_params);
+ virtual void RegisterFrameCountObserver(FrameCountObserver* observer);
+ virtual FrameCountObserver* GetFrameCountObserver() const;
+
protected:
int32_t CheckPayloadType(const int8_t payload_type,
RtpVideoCodecTypes *video_type);
@@ -346,6 +349,8 @@
int rtx_;
uint32_t ssrc_rtx_;
int payload_type_rtx_;
+ std::map<FrameType, uint32_t> frame_counts_;
+ FrameCountObserver* frame_count_observer_;
};
} // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc b/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc
index ed4aa82..0bb752f 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc
@@ -717,6 +717,68 @@
EXPECT_EQ(0, memcmp(payload, payload_data, sizeof(payload)));
}
+TEST_F(RtpSenderTest, FrameCountCallbacks) {
+ class TestCallback : public FrameCountObserver {
+ public:
+ TestCallback()
+ : FrameCountObserver(), num_calls_(0), ssrc_(0),
+ key_frames_(0), delta_frames_(0) {}
+ virtual ~TestCallback() {}
+
+ virtual void FrameCountUpdated(FrameType frame_type,
+ uint32_t frame_count,
+ const unsigned int ssrc) {
+ ++num_calls_;
+ ssrc_ = ssrc;
+ switch (frame_type) {
+ case kVideoFrameDelta:
+ delta_frames_ = frame_count;
+ break;
+ case kVideoFrameKey:
+ key_frames_ = frame_count;
+ break;
+ default:
+ break;
+ }
+ }
+
+ uint32_t num_calls_;
+ uint32_t ssrc_;
+ uint32_t key_frames_;
+ uint32_t delta_frames_;
+ } callback;
+
+ char payload_name[RTP_PAYLOAD_NAME_SIZE] = "GENERIC";
+ const uint8_t payload_type = 127;
+ ASSERT_EQ(0, rtp_sender_->RegisterPayload(payload_name, payload_type, 90000,
+ 0, 1500));
+ uint8_t payload[] = {47, 11, 32, 93, 89};
+ rtp_sender_->SetStorePacketsStatus(true, 1);
+ uint32_t ssrc = rtp_sender_->SSRC();
+
+ rtp_sender_->RegisterFrameCountObserver(&callback);
+
+ ASSERT_EQ(0, rtp_sender_->SendOutgoingData(kVideoFrameKey, payload_type, 1234,
+ 4321, payload, sizeof(payload),
+ NULL));
+
+ EXPECT_EQ(1U, callback.num_calls_);
+ EXPECT_EQ(ssrc, callback.ssrc_);
+ EXPECT_EQ(1U, callback.key_frames_);
+ EXPECT_EQ(0U, callback.delta_frames_);
+
+ ASSERT_EQ(0, rtp_sender_->SendOutgoingData(kVideoFrameDelta,
+ payload_type, 1234, 4321, payload,
+ sizeof(payload), NULL));
+
+ EXPECT_EQ(2U, callback.num_calls_);
+ EXPECT_EQ(ssrc, callback.ssrc_);
+ EXPECT_EQ(1U, callback.key_frames_);
+ EXPECT_EQ(1U, callback.delta_frames_);
+
+ rtp_sender_->RegisterFrameCountObserver(NULL);
+}
+
class RtpSenderAudioTest : public RtpSenderTest {
protected:
RtpSenderAudioTest() {}
diff --git a/webrtc/modules/video_coding/main/source/encoded_frame.cc b/webrtc/modules/video_coding/main/source/encoded_frame.cc
index 6274450..6760762 100644
--- a/webrtc/modules/video_coding/main/source/encoded_frame.cc
+++ b/webrtc/modules/video_coding/main/source/encoded_frame.cc
@@ -174,33 +174,16 @@
webrtc::FrameType VCMEncodedFrame::ConvertFrameType(VideoFrameType frameType)
{
- switch(frameType)
- {
+ switch(frameType) {
case kKeyFrame:
- {
- return kVideoFrameKey;
- }
+ return kVideoFrameKey;
case kDeltaFrame:
- {
- return kVideoFrameDelta;
- }
- case kGoldenFrame:
- {
- return kVideoFrameGolden;
- }
- case kAltRefFrame:
- {
- return kVideoFrameAltRef;
- }
+ return kVideoFrameDelta;
case kSkipFrame:
- {
- return kFrameEmpty;
- }
+ return kFrameEmpty;
default:
- {
- return kVideoFrameDelta;
- }
- }
+ return kVideoFrameDelta;
+ }
}
VideoFrameType VCMEncodedFrame::ConvertFrameType(webrtc::FrameType frame_type) {
@@ -209,10 +192,6 @@
return kKeyFrame;
case kVideoFrameDelta:
return kDeltaFrame;
- case kVideoFrameGolden:
- return kGoldenFrame;
- case kVideoFrameAltRef:
- return kAltRefFrame;
default:
assert(false);
return kDeltaFrame;
diff --git a/webrtc/modules/video_coding/main/source/jitter_buffer.cc b/webrtc/modules/video_coding/main/source/jitter_buffer.cc
index 5628200..f11f81b 100644
--- a/webrtc/modules/video_coding/main/source/jitter_buffer.cc
+++ b/webrtc/modules/video_coding/main/source/jitter_buffer.cc
@@ -147,7 +147,6 @@
incomplete_frames_(),
last_decoded_state_(),
first_packet_since_reset_(true),
- receive_statistics_(),
incoming_frame_rate_(0),
incoming_frame_count_(0),
time_last_incoming_frame_count_(0),
@@ -172,7 +171,6 @@
average_packets_per_frame_(0.0f),
frame_counter_(0) {
memset(frame_buffers_, 0, sizeof(frame_buffers_));
- memset(receive_statistics_, 0, sizeof(receive_statistics_));
for (int i = 0; i < kStartNumberOfFrames; i++) {
frame_buffers_[i] = new VCMFrameBuffer();
@@ -218,8 +216,7 @@
assert(max_nack_list_size_ == rhs.max_nack_list_size_);
assert(max_packet_age_to_nack_ == rhs.max_packet_age_to_nack_);
assert(max_incomplete_time_ms_ == rhs.max_incomplete_time_ms_);
- memcpy(receive_statistics_, rhs.receive_statistics_,
- sizeof(receive_statistics_));
+ receive_statistics_ = rhs.receive_statistics_;
nack_seq_nums_.resize(rhs.nack_seq_nums_.size());
missing_sequence_numbers_ = rhs.missing_sequence_numbers_;
latest_received_sequence_number_ = rhs.latest_received_sequence_number_;
@@ -264,7 +261,7 @@
incoming_bit_count_ = 0;
incoming_bit_rate_ = 0;
time_last_incoming_frame_count_ = clock_->TimeInMilliseconds();
- memset(receive_statistics_, 0, sizeof(receive_statistics_));
+ receive_statistics_.clear();
num_consecutive_old_frames_ = 0;
num_consecutive_old_packets_ = 0;
@@ -336,13 +333,9 @@
}
// Get received key and delta frames
-void VCMJitterBuffer::FrameStatistics(uint32_t* received_delta_frames,
- uint32_t* received_key_frames) const {
- assert(received_delta_frames);
- assert(received_key_frames);
+std::map<FrameType, uint32_t> VCMJitterBuffer::FrameStatistics() const {
CriticalSectionScoped cs(crit_sect_);
- *received_delta_frames = receive_statistics_[1] + receive_statistics_[3];
- *received_key_frames = receive_statistics_[0] + receive_statistics_[2];
+ return receive_statistics_;
}
int VCMJitterBuffer::num_discarded_packets() const {
@@ -1206,26 +1199,7 @@
// Update receive statistics. We count all layers, thus when you use layers
// adding all key and delta frames might differ from frame count.
if (frame.IsSessionComplete()) {
- switch (frame.FrameType()) {
- case kVideoFrameKey: {
- receive_statistics_[0]++;
- break;
- }
- case kVideoFrameDelta: {
- receive_statistics_[1]++;
- break;
- }
- case kVideoFrameGolden: {
- receive_statistics_[2]++;
- break;
- }
- case kVideoFrameAltRef: {
- receive_statistics_[3]++;
- break;
- }
- default:
- assert(false);
- }
+ ++receive_statistics_[frame.FrameType()];
}
}
diff --git a/webrtc/modules/video_coding/main/source/jitter_buffer.h b/webrtc/modules/video_coding/main/source/jitter_buffer.h
index 3716a41..8586f11 100644
--- a/webrtc/modules/video_coding/main/source/jitter_buffer.h
+++ b/webrtc/modules/video_coding/main/source/jitter_buffer.h
@@ -98,10 +98,9 @@
// Empty the jitter buffer of all its data.
void Flush();
- // Get the number of received key and delta frames since the jitter buffer
+ // Get the number of received frames, by type, since the jitter buffer
// was started.
- void FrameStatistics(uint32_t* received_delta_frames,
- uint32_t* received_key_frames) const;
+ std::map<FrameType, uint32_t> FrameStatistics() const;
// The number of packets discarded by the jitter buffer because the decoder
// won't be able to decode them.
@@ -297,8 +296,8 @@
bool first_packet_since_reset_;
// Statistics.
- // Frame counter for each type (key, delta, golden, key-delta).
- unsigned int receive_statistics_[4];
+ // Frame counts for each type (key, delta, ...)
+ std::map<FrameType, uint32_t> receive_statistics_;
// Latest calculated frame rates of incoming stream.
unsigned int incoming_frame_rate_;
unsigned int incoming_frame_count_;
diff --git a/webrtc/modules/video_coding/main/source/jitter_buffer_unittest.cc b/webrtc/modules/video_coding/main/source/jitter_buffer_unittest.cc
index f596d68..e535a8a 100644
--- a/webrtc/modules/video_coding/main/source/jitter_buffer_unittest.cc
+++ b/webrtc/modules/video_coding/main/source/jitter_buffer_unittest.cc
@@ -1632,11 +1632,9 @@
}
TEST_F(TestRunningJitterBuffer, StatisticsTest) {
- uint32_t num_delta_frames = 0;
- uint32_t num_key_frames = 0;
- jitter_buffer_->FrameStatistics(&num_delta_frames, &num_key_frames);
- EXPECT_EQ(0u, num_delta_frames);
- EXPECT_EQ(0u, num_key_frames);
+ std::map<FrameType, uint32_t> frame_stats(jitter_buffer_->FrameStatistics());
+ EXPECT_EQ(0u, frame_stats[kVideoFrameDelta]);
+ EXPECT_EQ(0u, frame_stats[kVideoFrameKey]);
uint32_t framerate = 0;
uint32_t bitrate = 0;
@@ -1654,9 +1652,9 @@
// being decoded.
EXPECT_TRUE(DecodeCompleteFrame());
EXPECT_TRUE(DecodeCompleteFrame());
- jitter_buffer_->FrameStatistics(&num_delta_frames, &num_key_frames);
- EXPECT_EQ(3u, num_delta_frames);
- EXPECT_EQ(2u, num_key_frames);
+ frame_stats = jitter_buffer_->FrameStatistics();
+ EXPECT_EQ(3u, frame_stats[kVideoFrameDelta]);
+ EXPECT_EQ(2u, frame_stats[kVideoFrameKey]);
// Insert 20 more frames to get estimates of bitrate and framerate over
// 1 second.
diff --git a/webrtc/modules/video_coding/main/source/media_optimization.cc b/webrtc/modules/video_coding/main/source/media_optimization.cc
index 37dff6c..27fa681 100644
--- a/webrtc/modules/video_coding/main/source/media_optimization.cc
+++ b/webrtc/modules/video_coding/main/source/media_optimization.cc
@@ -291,8 +291,7 @@
UpdateSentBitrate(now_ms);
UpdateSentFramerate();
if (encoded_length > 0) {
- const bool delta_frame = (encoded_frame_type != kVideoFrameKey &&
- encoded_frame_type != kVideoFrameGolden);
+ const bool delta_frame = (encoded_frame_type != kVideoFrameKey);
frame_dropper_->Fill(encoded_length, delta_frame);
if (max_payload_size_ > 0 && encoded_length > 0) {
diff --git a/webrtc/modules/video_coding/main/source/receiver.cc b/webrtc/modules/video_coding/main/source/receiver.cc
index e0969ef..ae13ddd 100644
--- a/webrtc/modules/video_coding/main/source/receiver.cc
+++ b/webrtc/modules/video_coding/main/source/receiver.cc
@@ -238,8 +238,9 @@
void VCMReceiver::ReceivedFrameCount(VCMFrameCount* frame_count) const {
assert(frame_count);
- jitter_buffer_.FrameStatistics(&frame_count->numDeltaFrames,
- &frame_count->numKeyFrames);
+ std::map<FrameType, uint32_t> counts(jitter_buffer_.FrameStatistics());
+ frame_count->numDeltaFrames = counts[kVideoFrameDelta];
+ frame_count->numKeyFrames = counts[kVideoFrameKey];
}
uint32_t VCMReceiver::DiscardedPackets() const {
diff --git a/webrtc/video_engine/vie_channel.cc b/webrtc/video_engine/vie_channel.cc
index 3a0cb74..59c628e 100644
--- a/webrtc/video_engine/vie_channel.cc
+++ b/webrtc/video_engine/vie_channel.cc
@@ -356,6 +356,7 @@
module_process_thread_.DeRegisterModule(rtp_rtcp);
rtp_rtcp->SetSendingStatus(false);
rtp_rtcp->SetSendingMediaStatus(false);
+ rtp_rtcp->RegisterSendFrameCountObserver(NULL);
simulcast_rtp_rtcp_.pop_back();
removed_rtp_rtcp_.push_front(rtp_rtcp);
}
@@ -410,6 +411,8 @@
kRtpExtensionAbsoluteSendTime);
}
rtp_rtcp->SetRtcpXrRrtrStatus(rtp_rtcp_->RtcpXrRrtrStatus());
+ rtp_rtcp->RegisterSendFrameCountObserver(
+ rtp_rtcp_->GetSendFrameCountObserver());
}
// |RegisterSimulcastRtpRtcpModules| resets all old weak pointers and old
// modules can be deleted after this step.
@@ -420,6 +423,7 @@
module_process_thread_.DeRegisterModule(rtp_rtcp);
rtp_rtcp->SetSendingStatus(false);
rtp_rtcp->SetSendingMediaStatus(false);
+ rtp_rtcp->RegisterSendFrameCountObserver(NULL);
simulcast_rtp_rtcp_.pop_back();
removed_rtp_rtcp_.push_front(rtp_rtcp);
}
@@ -1971,4 +1975,15 @@
statistician->ResetStatistics();
}
+void ViEChannel::RegisterSendFrameCountObserver(
+ FrameCountObserver* observer) {
+ rtp_rtcp_->RegisterSendFrameCountObserver(observer);
+ CriticalSectionScoped cs(rtp_rtcp_cs_.get());
+ for (std::list<RtpRtcp*>::iterator it = simulcast_rtp_rtcp_.begin();
+ it != simulcast_rtp_rtcp_.end();
+ it++) {
+ (*it)->RegisterSendFrameCountObserver(observer);
+ }
+}
+
} // namespace webrtc
diff --git a/webrtc/video_engine/vie_channel.h b/webrtc/video_engine/vie_channel.h
index c9536f1..a16a65d 100644
--- a/webrtc/video_engine/vie_channel.h
+++ b/webrtc/video_engine/vie_channel.h
@@ -326,6 +326,8 @@
void RegisterPreDecodeImageCallback(
EncodedImageCallback* pre_decode_callback);
+ void RegisterSendFrameCountObserver(FrameCountObserver* observer);
+
protected:
static bool ChannelDecodeThreadFunction(void* obj);
bool ChannelDecodeProcess();
diff --git a/webrtc/video_engine/vie_rtp_rtcp_impl.cc b/webrtc/video_engine/vie_rtp_rtcp_impl.cc
index 7d3c8ad..edad270 100644
--- a/webrtc/video_engine/vie_rtp_rtcp_impl.cc
+++ b/webrtc/video_engine/vie_rtp_rtcp_impl.cc
@@ -1178,16 +1178,39 @@
}
int ViERTP_RTCPImpl::RegisterSendFrameCountObserver(
- int channel, FrameCountObserver* callback) {
- // TODO(sprang): Implement
- return -1;
+ int video_channel, FrameCountObserver* callback) {
+ WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+ ViEId(shared_data_->instance_id(), video_channel),
+ "%s(channel: %d)", __FUNCTION__, video_channel);
+ ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+ ViEChannel* vie_channel = cs.Channel(video_channel);
+ if (!vie_channel) {
+ WEBRTC_TRACE(kTraceError, kTraceVideo,
+ ViEId(shared_data_->instance_id(), video_channel),
+ "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+ shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+ return -1;
+ }
+ vie_channel->RegisterSendFrameCountObserver(callback);
+ return 0;
}
int ViERTP_RTCPImpl::DeregisterSendFrameCountObserver(
- int channel, FrameCountObserver* callback) {
- // TODO(sprang): Implement
- return -1;
+ int video_channel, FrameCountObserver* callback) {
+ WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+ ViEId(shared_data_->instance_id(), video_channel),
+ "%s(channel: %d)", __FUNCTION__, video_channel);
+ ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+ ViEChannel* vie_channel = cs.Channel(video_channel);
+ if (!vie_channel) {
+ WEBRTC_TRACE(kTraceError, kTraceVideo,
+ ViEId(shared_data_->instance_id(), video_channel),
+ "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+ shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+ return -1;
+ }
+ vie_channel->RegisterSendFrameCountObserver(NULL);
+ return 0;
}
-
} // namespace webrtc