Report encoded frame size in VideoSendStream.
Implements reporting transmitted frame size in WebRtcVideoEngine2.
R=mflodman@webrtc.org, stefan@webrtc.org
BUG=4033
Review URL: https://webrtc-codereview.appspot.com/33399004
git-svn-id: http://webrtc.googlecode.com/svn/trunk@7772 4adac7df-926f-26a2-2b94-8c16560cd09d
diff --git a/talk/media/webrtc/fakewebrtcvideoengine.h b/talk/media/webrtc/fakewebrtcvideoengine.h
index f729ddd..914b885 100644
--- a/talk/media/webrtc/fakewebrtcvideoengine.h
+++ b/talk/media/webrtc/fakewebrtcvideoengine.h
@@ -724,6 +724,8 @@
}
WEBRTC_STUB(GetVersion, (char version[1024]));
WEBRTC_STUB(LastError, ());
+ WEBRTC_VOID_STUB(RegisterSendStatisticsProxy,
+ (int, webrtc::SendStatisticsProxy*));
// webrtc::ViECodec
WEBRTC_FUNC_CONST(NumberOfCodecs, ()) {
diff --git a/talk/media/webrtc/webrtcvideoengine2.cc b/talk/media/webrtc/webrtcvideoengine2.cc
index 06dee05..17adba7 100644
--- a/talk/media/webrtc/webrtcvideoengine2.cc
+++ b/talk/media/webrtc/webrtcvideoengine2.cc
@@ -1836,6 +1836,8 @@
info.framerate_input = stats.input_frame_rate;
info.framerate_sent = stats.encode_frame_rate;
+ info.send_frame_width = 0;
+ info.send_frame_height = 0;
for (std::map<uint32_t, webrtc::SsrcStats>::iterator it =
stats.substreams.begin();
it != stats.substreams.end();
@@ -1847,6 +1849,10 @@
stream_stats.rtp_stats.padding_bytes;
info.packets_sent += stream_stats.rtp_stats.packets;
info.packets_lost += stream_stats.rtcp_stats.cumulative_lost;
+ if (stream_stats.sent_width > info.send_frame_width)
+ info.send_frame_width = stream_stats.sent_width;
+ if (stream_stats.sent_height > info.send_frame_height)
+ info.send_frame_height = stream_stats.sent_height;
}
if (!stats.substreams.empty()) {
@@ -1865,10 +1871,6 @@
&last_captured_frame_format);
info.input_frame_width = last_captured_frame_format.width;
info.input_frame_height = last_captured_frame_format.height;
- info.send_frame_width =
- static_cast<int>(parameters_.encoder_config.streams.front().width);
- info.send_frame_height =
- static_cast<int>(parameters_.encoder_config.streams.front().height);
}
// TODO(pbos): Support or remove the following stats.
diff --git a/talk/media/webrtc/webrtcvideoengine2_unittest.cc b/talk/media/webrtc/webrtcvideoengine2_unittest.cc
index 6af5540..db0b044 100644
--- a/talk/media/webrtc/webrtcvideoengine2_unittest.cc
+++ b/talk/media/webrtc/webrtcvideoengine2_unittest.cc
@@ -122,8 +122,14 @@
++num_swapped_frames_;
last_frame_.SwapFrame(frame);
}
-webrtc::VideoSendStream::Stats FakeVideoSendStream::GetStats() const {
- return webrtc::VideoSendStream::Stats();
+
+void FakeVideoSendStream::SetStats(
+ const webrtc::VideoSendStream::Stats& stats) {
+ stats_ = stats;
+}
+
+webrtc::VideoSendStream::Stats FakeVideoSendStream::GetStats() {
+ return stats_;
}
bool FakeVideoSendStream::ReconfigureVideoEncoder(
@@ -1873,4 +1879,22 @@
EXPECT_EQ(webrtc::Call::kNetworkUp, fake_call_->GetNetworkState());
}
+TEST_F(WebRtcVideoChannel2Test, GetStatsReportsUpperResolution) {
+ FakeVideoSendStream* stream = AddSendStream();
+ webrtc::VideoSendStream::Stats stats;
+ stats.substreams[17].sent_width = 123;
+ stats.substreams[17].sent_height = 40;
+ stats.substreams[42].sent_width = 80;
+ stats.substreams[42].sent_height = 31;
+ stats.substreams[11].sent_width = 20;
+ stats.substreams[11].sent_height = 90;
+ stream->SetStats(stats);
+
+ cricket::VideoMediaInfo info;
+ ASSERT_TRUE(channel_->GetStats(cricket::StatsOptions(), &info));
+ ASSERT_EQ(1u, info.senders.size());
+ EXPECT_EQ(123, info.senders[0].send_frame_width);
+ EXPECT_EQ(90, info.senders[0].send_frame_height);
+}
+
} // namespace cricket
diff --git a/talk/media/webrtc/webrtcvideoengine2_unittest.h b/talk/media/webrtc/webrtcvideoengine2_unittest.h
index 1826e9c..dcfaf86 100644
--- a/talk/media/webrtc/webrtcvideoengine2_unittest.h
+++ b/talk/media/webrtc/webrtcvideoengine2_unittest.h
@@ -51,10 +51,11 @@
int GetNumberOfSwappedFrames() const;
int GetLastWidth() const;
int GetLastHeight() const;
+ void SetStats(const webrtc::VideoSendStream::Stats& stats);
private:
virtual void SwapFrame(webrtc::I420VideoFrame* frame) OVERRIDE;
- virtual webrtc::VideoSendStream::Stats GetStats() const OVERRIDE;
+ virtual webrtc::VideoSendStream::Stats GetStats() OVERRIDE;
virtual bool ReconfigureVideoEncoder(
const webrtc::VideoEncoderConfig& config) OVERRIDE;
@@ -71,6 +72,7 @@
webrtc::VideoCodecVP8 vp8_settings_;
int num_swapped_frames_;
webrtc::I420VideoFrame last_frame_;
+ webrtc::VideoSendStream::Stats stats_;
};
class FakeVideoReceiveStream : public webrtc::VideoReceiveStream {
diff --git a/webrtc/config.h b/webrtc/config.h
index 7da7093..cf41ae6 100644
--- a/webrtc/config.h
+++ b/webrtc/config.h
@@ -25,12 +25,16 @@
SsrcStats()
: key_frames(0),
delta_frames(0),
+ sent_width(0),
+ sent_height(0),
total_bitrate_bps(0),
retransmit_bitrate_bps(0),
avg_delay_ms(0),
max_delay_ms(0) {}
uint32_t key_frames;
uint32_t delta_frames;
+ int sent_width;
+ int sent_height;
// TODO(holmer): Move bitrate_bps out to the webrtc::Call layer.
int total_bitrate_bps;
int retransmit_bitrate_bps;
diff --git a/webrtc/modules/utility/source/video_coder.cc b/webrtc/modules/utility/source/video_coder.cc
index e0d969d..5471e80 100644
--- a/webrtc/modules/utility/source/video_coder.cc
+++ b/webrtc/modules/utility/source/video_coder.cc
@@ -11,6 +11,7 @@
#ifdef WEBRTC_MODULE_UTILITY_VIDEO
#include "webrtc/modules/utility/source/video_coder.h"
+#include "webrtc/modules/video_coding/main/source/encoded_frame.h"
namespace webrtc {
VideoCoder::VideoCoder() : _vcm(VideoCodingModule::Create()), _decodedVideo(0) {
@@ -108,25 +109,22 @@
}
int32_t VideoCoder::SendData(
- const FrameType frameType,
- const uint8_t payloadType,
- const uint32_t timeStamp,
- int64_t capture_time_ms,
- const uint8_t* payloadData,
- size_t payloadSize,
+ const uint8_t payloadType,
+ const EncodedImage& encoded_image,
const RTPFragmentationHeader& fragmentationHeader,
const RTPVideoHeader* /*rtpVideoHdr*/)
{
// Store the data in _videoEncodedData which is a pointer to videoFrame in
// Encode(..)
- _videoEncodedData->VerifyAndAllocate(payloadSize);
- _videoEncodedData->frameType = frameType;
+ _videoEncodedData->VerifyAndAllocate(encoded_image._length);
+ _videoEncodedData->frameType =
+ VCMEncodedFrame::ConvertFrameType(encoded_image._frameType);
_videoEncodedData->payloadType = payloadType;
- _videoEncodedData->timeStamp = timeStamp;
+ _videoEncodedData->timeStamp = encoded_image._timeStamp;
_videoEncodedData->fragmentationHeader.CopyFrom(fragmentationHeader);
- memcpy(_videoEncodedData->payloadData, payloadData,
- sizeof(uint8_t) * payloadSize);
- _videoEncodedData->payloadSize = payloadSize;
+ memcpy(_videoEncodedData->payloadData, encoded_image._buffer,
+ sizeof(uint8_t) * encoded_image._length);
+ _videoEncodedData->payloadSize = encoded_image._length;
return 0;
}
} // namespace webrtc
diff --git a/webrtc/modules/utility/source/video_coder.h b/webrtc/modules/utility/source/video_coder.h
index a1d1a17..5695f5e 100644
--- a/webrtc/modules/utility/source/video_coder.h
+++ b/webrtc/modules/utility/source/video_coder.h
@@ -48,12 +48,8 @@
// VCMPacketizationCallback function.
// Note: called by VideoCodingModule when encoding finished.
virtual int32_t SendData(
- FrameType /*frameType*/,
uint8_t /*payloadType*/,
- uint32_t /*timeStamp*/,
- int64_t capture_time_ms,
- const uint8_t* payloadData,
- size_t payloadSize,
+ const EncodedImage& encoded_image,
const RTPFragmentationHeader& /* fragmentationHeader*/,
const RTPVideoHeader* rtpTypeHdr) OVERRIDE;
diff --git a/webrtc/modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h b/webrtc/modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h
index 18bf5b8..4758aa1 100644
--- a/webrtc/modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h
+++ b/webrtc/modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h
@@ -21,7 +21,7 @@
class MockEncodedImageCallback : public EncodedImageCallback {
public:
- MOCK_METHOD3(Encoded, int32_t(EncodedImage& encodedImage,
+ MOCK_METHOD3(Encoded, int32_t(const EncodedImage& encodedImage,
const CodecSpecificInfo* codecSpecificInfo,
const RTPFragmentationHeader* fragmentation));
};
diff --git a/webrtc/modules/video_coding/codecs/test/packet_manipulator.cc b/webrtc/modules/video_coding/codecs/test/packet_manipulator.cc
index 6e7139e..36ba0e8 100644
--- a/webrtc/modules/video_coding/codecs/test/packet_manipulator.cc
+++ b/webrtc/modules/video_coding/codecs/test/packet_manipulator.cc
@@ -36,7 +36,6 @@
int PacketManipulatorImpl::ManipulatePackets(
webrtc::EncodedImage* encoded_image) {
- assert(encoded_image);
int nbr_packets_dropped = 0;
// There's no need to build a copy of the image data since viewing an
// EncodedImage object, setting the length to a new lower value represents
diff --git a/webrtc/modules/video_coding/codecs/test/packet_manipulator.h b/webrtc/modules/video_coding/codecs/test/packet_manipulator.h
index 69bc35b..3cf7233 100644
--- a/webrtc/modules/video_coding/codecs/test/packet_manipulator.h
+++ b/webrtc/modules/video_coding/codecs/test/packet_manipulator.h
@@ -82,8 +82,7 @@
// If packets are dropped from frame data, the completedFrame field will be
// set to false.
// Returns the number of packets being dropped.
- virtual int
- ManipulatePackets(webrtc::EncodedImage* encoded_image) = 0;
+ virtual int ManipulatePackets(webrtc::EncodedImage* encoded_image) = 0;
};
class PacketManipulatorImpl : public PacketManipulator {
diff --git a/webrtc/modules/video_coding/codecs/test/videoprocessor.cc b/webrtc/modules/video_coding/codecs/test/videoprocessor.cc
index 412ec10..df04ba5 100644
--- a/webrtc/modules/video_coding/codecs/test/videoprocessor.cc
+++ b/webrtc/modules/video_coding/codecs/test/videoprocessor.cc
@@ -223,12 +223,12 @@
}
}
-void VideoProcessorImpl::FrameEncoded(EncodedImage* encoded_image) {
+void VideoProcessorImpl::FrameEncoded(const EncodedImage& encoded_image) {
// Timestamp is frame number, so this gives us #dropped frames.
- int num_dropped_from_prev_encode = encoded_image->_timeStamp -
+ int num_dropped_from_prev_encode = encoded_image._timeStamp -
prev_time_stamp_ - 1;
num_dropped_frames_ += num_dropped_from_prev_encode;
- prev_time_stamp_ = encoded_image->_timeStamp;
+ prev_time_stamp_ = encoded_image._timeStamp;
if (num_dropped_from_prev_encode > 0) {
// For dropped frames, we write out the last decoded frame to avoid getting
// out of sync for the computation of PSNR and SSIM.
@@ -238,25 +238,25 @@
}
// Frame is not dropped, so update the encoded frame size
// (encoder callback is only called for non-zero length frames).
- encoded_frame_size_ = encoded_image->_length;
+ encoded_frame_size_ = encoded_image._length;
TickTime encode_stop = TickTime::Now();
- int frame_number = encoded_image->_timeStamp;
+ int frame_number = encoded_image._timeStamp;
FrameStatistic& stat = stats_->stats_[frame_number];
stat.encode_time_in_us = GetElapsedTimeMicroseconds(encode_start_,
encode_stop);
stat.encoding_successful = true;
- stat.encoded_frame_length_in_bytes = encoded_image->_length;
- stat.frame_number = encoded_image->_timeStamp;
- stat.frame_type = encoded_image->_frameType;
- stat.bit_rate_in_kbps = encoded_image->_length * bit_rate_factor_;
- stat.total_packets = encoded_image->_length /
+ stat.encoded_frame_length_in_bytes = encoded_image._length;
+ stat.frame_number = encoded_image._timeStamp;
+ stat.frame_type = encoded_image._frameType;
+ stat.bit_rate_in_kbps = encoded_image._length * bit_rate_factor_;
+ stat.total_packets = encoded_image._length /
config_.networking_config.packet_size_in_bytes + 1;
// Perform packet loss if criteria is fullfilled:
bool exclude_this_frame = false;
// Only keyframes can be excluded
- if (encoded_image->_frameType == kKeyFrame) {
+ if (encoded_image._frameType == kKeyFrame) {
switch (config_.exclude_frame_types) {
case kExcludeOnlyFirstKeyFrame:
if (!first_key_frame_has_been_excluded_) {
@@ -271,9 +271,15 @@
assert(false);
}
}
+ scoped_ptr<uint8_t[]> copied_buffer(new uint8_t[encoded_image._length]);
+ memcpy(copied_buffer.get(), encoded_image._buffer, encoded_image._length);
+ EncodedImage copied_image;
+ memcpy(&copied_image, &encoded_image, sizeof(copied_image));
+ copied_image._size = copied_image._length;
+ copied_image._buffer = copied_buffer.get();
if (!exclude_this_frame) {
stat.packets_dropped =
- packet_manipulator_->ManipulatePackets(encoded_image);
+ packet_manipulator_->ManipulatePackets(&copied_image);
}
// Keep track of if frames are lost due to packet loss so we can tell
@@ -281,8 +287,8 @@
decode_start_ = TickTime::Now();
// TODO(kjellander): Pass fragmentation header to the decoder when
// CL 172001 has been submitted and PacketManipulator supports this.
- int32_t decode_result = decoder_->Decode(*encoded_image, last_frame_missing_,
- NULL);
+ int32_t decode_result =
+ decoder_->Decode(copied_image, last_frame_missing_, NULL);
stat.decode_return_code = decode_result;
if (decode_result != WEBRTC_VIDEO_CODEC_OK) {
// Write the last successful frame the output file to avoid getting it out
@@ -290,7 +296,7 @@
frame_writer_->WriteFrame(last_successful_frame_buffer_);
}
// save status for losses so we can inform the decoder for the next frame:
- last_frame_missing_ = encoded_image->_length == 0;
+ last_frame_missing_ = copied_image._length == 0;
}
void VideoProcessorImpl::FrameDecoded(const I420VideoFrame& image) {
@@ -399,10 +405,10 @@
// Callbacks
int32_t
VideoProcessorImpl::VideoProcessorEncodeCompleteCallback::Encoded(
- EncodedImage& encoded_image,
+ const EncodedImage& encoded_image,
const webrtc::CodecSpecificInfo* codec_specific_info,
const webrtc::RTPFragmentationHeader* fragmentation) {
- video_processor_->FrameEncoded(&encoded_image); // Forward to parent class.
+ video_processor_->FrameEncoded(encoded_image); // Forward to parent class.
return 0;
}
int32_t
diff --git a/webrtc/modules/video_coding/codecs/test/videoprocessor.h b/webrtc/modules/video_coding/codecs/test/videoprocessor.h
index 2cfde52..b092363 100644
--- a/webrtc/modules/video_coding/codecs/test/videoprocessor.h
+++ b/webrtc/modules/video_coding/codecs/test/videoprocessor.h
@@ -168,7 +168,7 @@
private:
// Invoked by the callback when a frame has completed encoding.
- void FrameEncoded(webrtc::EncodedImage* encodedImage);
+ void FrameEncoded(const webrtc::EncodedImage& encodedImage);
// Invoked by the callback when a frame has completed decoding.
void FrameDecoded(const webrtc::I420VideoFrame& image);
// Used for getting a 32-bit integer representing time
@@ -226,9 +226,9 @@
explicit VideoProcessorEncodeCompleteCallback(VideoProcessorImpl* vp)
: video_processor_(vp) {}
virtual int32_t Encoded(
- webrtc::EncodedImage& encoded_image,
- const webrtc::CodecSpecificInfo* codec_specific_info = NULL,
- const webrtc::RTPFragmentationHeader* fragmentation = NULL) OVERRIDE;
+ const webrtc::EncodedImage& encoded_image,
+ const webrtc::CodecSpecificInfo* codec_specific_info,
+ const webrtc::RTPFragmentationHeader* fragmentation) OVERRIDE;
private:
VideoProcessorImpl* video_processor_;
diff --git a/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.cc b/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.cc
index 3ad6ed7..fa02f88 100644
--- a/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.cc
+++ b/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.cc
@@ -223,12 +223,10 @@
return _encodedBytes;
}
-int32_t
-VideoEncodeCompleteCallback::Encoded(EncodedImage& encodedImage,
- const webrtc::CodecSpecificInfo* codecSpecificInfo,
- const webrtc::RTPFragmentationHeader*
- fragmentation)
-{
+int32_t VideoEncodeCompleteCallback::Encoded(
+ const EncodedImage& encodedImage,
+ const webrtc::CodecSpecificInfo* codecSpecificInfo,
+ const webrtc::RTPFragmentationHeader* fragmentation) {
_test.Encoded(encodedImage);
VideoFrame *newBuffer = new VideoFrame();
newBuffer->VerifyAndAllocate(encodedImage._size);
@@ -564,7 +562,7 @@
}
void NormalAsyncTest::CopyEncodedImage(VideoFrame& dest,
- EncodedImage& src,
+ const EncodedImage& src,
void* /*codecSpecificInfo*/) const
{
dest.CopyFrame(src._length, src._buffer);
diff --git a/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.h b/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.h
index 63ac0bf..3bfe5d4 100644
--- a/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.h
+++ b/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.h
@@ -85,7 +85,7 @@
CopyCodecSpecificInfo(
const webrtc::CodecSpecificInfo* codecSpecificInfo) const;
virtual void CopyEncodedImage(webrtc::VideoFrame& dest,
- webrtc::EncodedImage& src,
+ const webrtc::EncodedImage& src,
void* /*codecSpecificInfo*/) const;
virtual webrtc::CodecSpecificInfo* CreateEncoderSpecificInfo() const
{
@@ -149,10 +149,9 @@
_encodedBytes(0)
{}
- int32_t
- Encoded(webrtc::EncodedImage& encodedImage,
- const webrtc::CodecSpecificInfo* codecSpecificInfo = NULL,
- const webrtc::RTPFragmentationHeader* fragmentation = NULL);
+ int32_t Encoded(const webrtc::EncodedImage& encodedImage,
+ const webrtc::CodecSpecificInfo* codecSpecificInfo,
+ const webrtc::RTPFragmentationHeader* fragmentation);
size_t EncodedBytes();
private:
FILE* _encodedFile;
diff --git a/webrtc/modules/video_coding/codecs/test_framework/unit_test.cc b/webrtc/modules/video_coding/codecs/test_framework/unit_test.cc
index 1af462c..a71ab68 100644
--- a/webrtc/modules/video_coding/codecs/test_framework/unit_test.cc
+++ b/webrtc/modules/video_coding/codecs/test_framework/unit_test.cc
@@ -91,7 +91,7 @@
}
int32_t
-UnitTestEncodeCompleteCallback::Encoded(EncodedImage& encodedImage,
+UnitTestEncodeCompleteCallback::Encoded(const EncodedImage& encodedImage,
const webrtc::CodecSpecificInfo* codecSpecificInfo,
const webrtc::RTPFragmentationHeader*
fragmentation)
diff --git a/webrtc/modules/video_coding/codecs/test_framework/unit_test.h b/webrtc/modules/video_coding/codecs/test_framework/unit_test.h
index 7e55a90..5e680ae 100644
--- a/webrtc/modules/video_coding/codecs/test_framework/unit_test.h
+++ b/webrtc/modules/video_coding/codecs/test_framework/unit_test.h
@@ -79,9 +79,9 @@
void* decoderSpecificInfo = NULL) :
_encodedVideoBuffer(buffer),
_encodeComplete(false) {}
- int32_t Encoded(webrtc::EncodedImage& encodedImage,
+ int32_t Encoded(const webrtc::EncodedImage& encodedImage,
const webrtc::CodecSpecificInfo* codecSpecificInfo,
- const webrtc::RTPFragmentationHeader* fragmentation = NULL);
+ const webrtc::RTPFragmentationHeader* fragmentation);
bool EncodeComplete();
// Note that this only makes sense if an encode has been completed
webrtc::VideoFrameType EncodedFrameType() const;
diff --git a/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc b/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc
index 6666bab..759f5ba 100644
--- a/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc
@@ -33,7 +33,7 @@
void* decoderSpecificInfo)
: encoded_video_frame_(frame),
encode_complete_(false) {}
- int Encoded(EncodedImage& encodedImage,
+ int Encoded(const EncodedImage& encodedImage,
const CodecSpecificInfo* codecSpecificInfo,
const RTPFragmentationHeader*);
bool EncodeComplete();
@@ -46,7 +46,7 @@
VideoFrameType encoded_frame_type_;
};
-int Vp8UnitTestEncodeCompleteCallback::Encoded(EncodedImage& encodedImage,
+int Vp8UnitTestEncodeCompleteCallback::Encoded(const EncodedImage& encodedImage,
const CodecSpecificInfo* codecSpecificInfo,
const RTPFragmentationHeader* fragmentation) {
encoded_video_frame_->VerifyAndAllocate(encodedImage._size);
diff --git a/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc b/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc
index 992f089..39da34b 100644
--- a/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc
@@ -25,7 +25,7 @@
: encoded_file_(encoded_file),
encoded_bytes_(0) {}
~Vp8SequenceCoderEncodeCallback();
- int Encoded(webrtc::EncodedImage& encoded_image,
+ int Encoded(const webrtc::EncodedImage& encoded_image,
const webrtc::CodecSpecificInfo* codecSpecificInfo,
const webrtc::RTPFragmentationHeader*);
// Returns the encoded image.
@@ -42,7 +42,7 @@
encoded_image_._buffer = NULL;
}
int Vp8SequenceCoderEncodeCallback::Encoded(
- webrtc::EncodedImage& encoded_image,
+ const webrtc::EncodedImage& encoded_image,
const webrtc::CodecSpecificInfo* codecSpecificInfo,
const webrtc::RTPFragmentationHeader* fragmentation) {
if (encoded_image_._size < encoded_image._size) {
diff --git a/webrtc/modules/video_coding/main/interface/video_coding_defines.h b/webrtc/modules/video_coding/main/interface/video_coding_defines.h
index a99105d..1bf3d6f 100644
--- a/webrtc/modules/video_coding/main/interface/video_coding_defines.h
+++ b/webrtc/modules/video_coding/main/interface/video_coding_defines.h
@@ -68,15 +68,11 @@
// Callback class used for sending data ready to be packetized
class VCMPacketizationCallback {
public:
- virtual int32_t SendData(
- FrameType frameType,
- uint8_t payloadType,
- uint32_t timeStamp,
- int64_t capture_time_ms,
- const uint8_t* payloadData,
- size_t payloadSize,
- const RTPFragmentationHeader& fragmentationHeader,
- const RTPVideoHeader* rtpVideoHdr) = 0;
+ virtual int32_t SendData(uint8_t payloadType,
+ const EncodedImage& encoded_image,
+ const RTPFragmentationHeader& fragmentationHeader,
+ const RTPVideoHeader* rtpVideoHdr) = 0;
+
protected:
virtual ~VCMPacketizationCallback() {
}
diff --git a/webrtc/modules/video_coding/main/source/generic_encoder.cc b/webrtc/modules/video_coding/main/source/generic_encoder.cc
index d6a7bbb..096287f 100644
--- a/webrtc/modules/video_coding/main/source/generic_encoder.cc
+++ b/webrtc/modules/video_coding/main/source/generic_encoder.cc
@@ -210,19 +210,14 @@
int32_t
VCMEncodedFrameCallback::Encoded(
- EncodedImage &encodedImage,
+ const EncodedImage &encodedImage,
const CodecSpecificInfo* codecSpecificInfo,
const RTPFragmentationHeader* fragmentationHeader)
{
post_encode_callback_->Encoded(encodedImage);
- FrameType frameType = VCMEncodedFrame::ConvertFrameType(encodedImage._frameType);
-
- size_t encodedBytes = 0;
if (_sendCallback != NULL)
{
- encodedBytes = encodedImage._length;
-
#ifdef DEBUG_ENCODER_BIT_STREAM
if (_bitStreamAfterEncoder != NULL)
{
@@ -235,12 +230,8 @@
CopyCodecSpecific(codecSpecificInfo, &rtpVideoHeaderPtr);
int32_t callbackReturn = _sendCallback->SendData(
- frameType,
_payloadType,
- encodedImage._timeStamp,
- encodedImage.capture_time_ms_,
- encodedImage._buffer,
- encodedBytes,
+ encodedImage,
*fragmentationHeader,
rtpVideoHeaderPtr);
if (callbackReturn < 0)
@@ -253,12 +244,9 @@
return VCM_UNINITIALIZED;
}
if (_mediaOpt != NULL) {
- _mediaOpt->UpdateWithEncodedData(encodedBytes, encodedImage._timeStamp,
- frameType);
+ _mediaOpt->UpdateWithEncodedData(encodedImage);
if (_internalSource)
- {
- return _mediaOpt->DropFrame(); // Signal to encoder to drop next frame
- }
+ return _mediaOpt->DropFrame(); // Signal to encoder to drop next frame.
}
return VCM_OK;
}
diff --git a/webrtc/modules/video_coding/main/source/generic_encoder.h b/webrtc/modules/video_coding/main/source/generic_encoder.h
index 8eb1480..a986ada 100644
--- a/webrtc/modules/video_coding/main/source/generic_encoder.h
+++ b/webrtc/modules/video_coding/main/source/generic_encoder.h
@@ -37,7 +37,7 @@
* Callback implementation - codec encode complete
*/
int32_t Encoded(
- EncodedImage& encodedImage,
+ const EncodedImage& encodedImage,
const CodecSpecificInfo* codecSpecificInfo = NULL,
const RTPFragmentationHeader* fragmentationHeader = NULL);
/*
diff --git a/webrtc/modules/video_coding/main/source/media_optimization.cc b/webrtc/modules/video_coding/main/source/media_optimization.cc
index 630f013..85cce8f 100644
--- a/webrtc/modules/video_coding/main/source/media_optimization.cc
+++ b/webrtc/modules/video_coding/main/source/media_optimization.cc
@@ -369,9 +369,10 @@
return count;
}
-int32_t MediaOptimization::UpdateWithEncodedData(size_t encoded_length,
- uint32_t timestamp,
- FrameType encoded_frame_type) {
+int32_t MediaOptimization::UpdateWithEncodedData(
+ const EncodedImage& encoded_image) {
+ size_t encoded_length = encoded_image._length;
+ uint32_t timestamp = encoded_image._timeStamp;
CriticalSectionScoped lock(crit_sect_.get());
const int64_t now_ms = clock_->TimeInMilliseconds();
PurgeOldFrameSamples(now_ms);
@@ -389,7 +390,7 @@
UpdateSentBitrate(now_ms);
UpdateSentFramerate();
if (encoded_length > 0) {
- const bool delta_frame = (encoded_frame_type != kVideoFrameKey);
+ const bool delta_frame = encoded_image._frameType != kKeyFrame;
frame_dropper_->Fill(encoded_length, delta_frame);
if (max_payload_size_ > 0 && encoded_length > 0) {
@@ -405,7 +406,7 @@
if (enable_qm_) {
// Update quality select with encoded length.
- qm_resolution_->UpdateEncodedSize(encoded_length, encoded_frame_type);
+ qm_resolution_->UpdateEncodedSize(encoded_length);
}
}
if (!delta_frame && encoded_length > 0) {
diff --git a/webrtc/modules/video_coding/main/source/media_optimization.h b/webrtc/modules/video_coding/main/source/media_optimization.h
index af35f01..675d64e 100644
--- a/webrtc/modules/video_coding/main/source/media_optimization.h
+++ b/webrtc/modules/video_coding/main/source/media_optimization.h
@@ -76,10 +76,8 @@
void UpdateContentData(const VideoContentMetrics* content_metrics);
- // Informs Media Optimization of encoding output: Length and frame type.
- int32_t UpdateWithEncodedData(size_t encoded_length,
- uint32_t timestamp,
- FrameType encoded_frame_type);
+ // Informs Media Optimization of encoded output.
+ int32_t UpdateWithEncodedData(const EncodedImage& encoded_image);
uint32_t InputFrameRate();
uint32_t SentFrameRate();
diff --git a/webrtc/modules/video_coding/main/source/media_optimization_unittest.cc b/webrtc/modules/video_coding/main/source/media_optimization_unittest.cc
index df79fb7..5031015 100644
--- a/webrtc/modules/video_coding/main/source/media_optimization_unittest.cc
+++ b/webrtc/modules/video_coding/main/source/media_optimization_unittest.cc
@@ -35,8 +35,11 @@
EXPECT_EQ(expect_frame_drop, frame_dropped);
if (!frame_dropped) {
size_t bytes_per_frame = bitrate_bps * frame_time_ms_ / (8 * 1000);
- ASSERT_EQ(VCM_OK, media_opt_.UpdateWithEncodedData(
- bytes_per_frame, next_timestamp_, kVideoFrameDelta));
+ EncodedImage encoded_image;
+ encoded_image._length = bytes_per_frame;
+ encoded_image._timeStamp = next_timestamp_;
+ encoded_image._frameType = kKeyFrame;
+ ASSERT_EQ(VCM_OK, media_opt_.UpdateWithEncodedData(encoded_image));
}
next_timestamp_ += frame_time_ms_ * kSampleRate / 1000;
clock_.AdvanceTimeMilliseconds(frame_time_ms_);
diff --git a/webrtc/modules/video_coding/main/source/qm_select.cc b/webrtc/modules/video_coding/main/source/qm_select.cc
index 0df61b5..9255aed 100644
--- a/webrtc/modules/video_coding/main/source/qm_select.cc
+++ b/webrtc/modules/video_coding/main/source/qm_select.cc
@@ -239,8 +239,7 @@
}
// Update rate data after every encoded frame.
-void VCMQmResolution::UpdateEncodedSize(size_t encoded_size,
- FrameType encoded_frame_type) {
+void VCMQmResolution::UpdateEncodedSize(size_t encoded_size) {
frame_cnt_++;
// Convert to Kbps.
float encoded_size_kbits = 8.0f * static_cast<float>(encoded_size) / 1000.0f;
diff --git a/webrtc/modules/video_coding/main/source/qm_select.h b/webrtc/modules/video_coding/main/source/qm_select.h
index a87d502..654c078 100644
--- a/webrtc/modules/video_coding/main/source/qm_select.h
+++ b/webrtc/modules/video_coding/main/source/qm_select.h
@@ -216,8 +216,7 @@
// Update with actual bit rate (size of the latest encoded frame)
// and frame type, after every encoded frame.
- void UpdateEncodedSize(size_t encoded_size,
- FrameType encoded_frame_type);
+ void UpdateEncodedSize(size_t encoded_size);
// Update with new target bitrate, actual encoder sent rate, frame_rate,
// loss rate: every ~1 sec from SetTargetRates in media_opt.
diff --git a/webrtc/modules/video_coding/main/source/qm_select_unittest.cc b/webrtc/modules/video_coding/main/source/qm_select_unittest.cc
index 0120f20..6abc0d3 100644
--- a/webrtc/modules/video_coding/main/source/qm_select_unittest.cc
+++ b/webrtc/modules/video_coding/main/source/qm_select_unittest.cc
@@ -1264,11 +1264,10 @@
void QmSelectTest::UpdateQmEncodedFrame(size_t* encoded_size,
size_t num_updates) {
- FrameType frame_type = kVideoFrameDelta;
for (size_t i = 0; i < num_updates; ++i) {
// Convert to bytes.
size_t encoded_size_update = 1000 * encoded_size[i] / 8;
- qm_resolution_->UpdateEncodedSize(encoded_size_update, frame_type);
+ qm_resolution_->UpdateEncodedSize(encoded_size_update);
}
}
diff --git a/webrtc/modules/video_coding/main/source/video_coding_impl.cc b/webrtc/modules/video_coding/main/source/video_coding_impl.cc
index c3ecd83..80258be 100644
--- a/webrtc/modules/video_coding/main/source/video_coding_impl.cc
+++ b/webrtc/modules/video_coding/main/source/video_coding_impl.cc
@@ -59,7 +59,7 @@
}
// TODO(andresp): Change to void as return value is ignored.
- virtual int32_t Encoded(EncodedImage& encoded_image,
+ virtual int32_t Encoded(const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_specific_info,
const RTPFragmentationHeader* fragmentation) {
CriticalSectionScoped cs(cs_.get());
diff --git a/webrtc/modules/video_coding/main/source/video_sender_unittest.cc b/webrtc/modules/video_coding/main/source/video_sender_unittest.cc
index f689809..43a3ebc 100644
--- a/webrtc/modules/video_coding/main/source/video_sender_unittest.cc
+++ b/webrtc/modules/video_coding/main/source/video_sender_unittest.cc
@@ -86,16 +86,12 @@
virtual ~PacketizationCallback() {}
- virtual int32_t SendData(FrameType frame_type,
- uint8_t payload_type,
- uint32_t timestamp,
- int64_t capture_time_ms,
- const uint8_t* payload_data,
- size_t payload_size,
+ virtual int32_t SendData(uint8_t payload_type,
+ const EncodedImage& encoded_image,
const RTPFragmentationHeader& fragmentation_header,
const RTPVideoHeader* rtp_video_header) OVERRIDE {
assert(rtp_video_header);
- frame_data_.push_back(FrameData(payload_size, *rtp_video_header));
+ frame_data_.push_back(FrameData(encoded_image._length, *rtp_video_header));
return 0;
}
diff --git a/webrtc/modules/video_coding/main/test/generic_codec_test.cc b/webrtc/modules/video_coding/main/test/generic_codec_test.cc
index 2848212..48c65d7 100644
--- a/webrtc/modules/video_coding/main/test/generic_codec_test.cc
+++ b/webrtc/modules/video_coding/main/test/generic_codec_test.cc
@@ -532,17 +532,11 @@
return 0;
}
-int32_t
-VCMEncComplete_KeyReqTest::SendData(
- FrameType frameType,
- uint8_t payloadType,
- uint32_t timeStamp,
- int64_t capture_time_ms,
- const uint8_t* payloadData,
- size_t payloadSize,
- const RTPFragmentationHeader& /*fragmentationHeader*/,
- const webrtc::RTPVideoHeader* /*videoHdr*/)
-{
+int32_t VCMEncComplete_KeyReqTest::SendData(
+ uint8_t payloadType,
+ const webrtc::EncodedImage& encoded_image,
+ const RTPFragmentationHeader& /*fragmentationHeader*/,
+ const webrtc::RTPVideoHeader* /*videoHdr*/) {
WebRtcRTPHeader rtpInfo;
rtpInfo.header.markerBit = true; // end of frame
rtpInfo.type.Video.codecHeader.VP8.InitRTPVideoHeaderVP8();
@@ -555,5 +549,6 @@
_timeStamp += 3000;
rtpInfo.type.Video.isFirstPacket = false;
rtpInfo.frameType = kVideoFrameKey;
- return _vcm.IncomingPacket(payloadData, payloadSize, rtpInfo);
+ return _vcm.IncomingPacket(encoded_image._buffer, encoded_image._length,
+ rtpInfo);
}
diff --git a/webrtc/modules/video_coding/main/test/generic_codec_test.h b/webrtc/modules/video_coding/main/test/generic_codec_test.h
index 9a450de..3b3ddce 100644
--- a/webrtc/modules/video_coding/main/test/generic_codec_test.h
+++ b/webrtc/modules/video_coding/main/test/generic_codec_test.h
@@ -95,14 +95,11 @@
public:
VCMEncComplete_KeyReqTest(webrtc::VideoCodingModule &vcm) : _vcm(vcm), _seqNo(0), _timeStamp(0) {}
virtual int32_t SendData(
- webrtc::FrameType frameType,
uint8_t payloadType,
- uint32_t timeStamp,
- int64_t capture_time_ms,
- const uint8_t* payloadData,
- size_t payloadSize,
+ const webrtc::EncodedImage& encoded_image,
const webrtc::RTPFragmentationHeader& fragmentationHeader,
const webrtc::RTPVideoHeader* videoHdr) OVERRIDE;
+
private:
webrtc::VideoCodingModule& _vcm;
uint16_t _seqNo;
diff --git a/webrtc/modules/video_coding/main/test/normal_test.cc b/webrtc/modules/video_coding/main/test/normal_test.cc
index 4ab97a1..0a803d3b 100644
--- a/webrtc/modules/video_coding/main/test/normal_test.cc
+++ b/webrtc/modules/video_coding/main/test/normal_test.cc
@@ -18,6 +18,7 @@
#include "webrtc/common_types.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/video_coding/main/interface/video_coding.h"
+#include "webrtc/modules/video_coding/main/source/encoded_frame.h"
#include "webrtc/modules/video_coding/main/test/test_callbacks.h"
#include "webrtc/modules/video_coding/main/test/test_macros.h"
#include "webrtc/modules/video_coding/main/test/test_util.h"
@@ -69,22 +70,17 @@
{
}
-int32_t
-VCMNTEncodeCompleteCallback::SendData(
- FrameType frameType,
- uint8_t payloadType,
- uint32_t timeStamp,
- int64_t capture_time_ms,
- const uint8_t* payloadData,
- size_t payloadSize,
- const RTPFragmentationHeader& /*fragmentationHeader*/,
- const webrtc::RTPVideoHeader* videoHdr)
-
+int32_t VCMNTEncodeCompleteCallback::SendData(
+ uint8_t payloadType,
+ const webrtc::EncodedImage& encoded_image,
+ const RTPFragmentationHeader& /*fragmentationHeader*/,
+ const webrtc::RTPVideoHeader* videoHdr)
{
// will call the VCMReceiver input packet
- _frameType = frameType;
+ _frameType = VCMEncodedFrame::ConvertFrameType(encoded_image._frameType);
// writing encodedData into file
- if (fwrite(payloadData, 1, payloadSize, _encodedFile) != payloadSize) {
+ if (fwrite(encoded_image._buffer, 1, encoded_image._length, _encodedFile) !=
+ encoded_image._length) {
return -1;
}
WebRtcRTPHeader rtpInfo;
@@ -111,18 +107,19 @@
rtpInfo.header.payloadType = payloadType;
rtpInfo.header.sequenceNumber = _seqNo++;
rtpInfo.header.ssrc = 0;
- rtpInfo.header.timestamp = timeStamp;
- rtpInfo.frameType = frameType;
+ rtpInfo.header.timestamp = encoded_image._timeStamp;
+ rtpInfo.frameType = _frameType;
rtpInfo.type.Video.isFirstPacket = true;
// Size should also be received from that table, since the payload type
// defines the size.
- _encodedBytes += payloadSize;
- if (payloadSize < 20)
+ _encodedBytes += encoded_image._length;
+ if (encoded_image._length < 20)
{
_skipCnt++;
}
- _VCMReceiver->IncomingPacket(payloadData, payloadSize, rtpInfo);
+ _VCMReceiver->IncomingPacket(
+ encoded_image._buffer, encoded_image._length, rtpInfo);
return 0;
}
void
diff --git a/webrtc/modules/video_coding/main/test/normal_test.h b/webrtc/modules/video_coding/main/test/normal_test.h
index 4d33f3c..91862c9 100644
--- a/webrtc/modules/video_coding/main/test/normal_test.h
+++ b/webrtc/modules/video_coding/main/test/normal_test.h
@@ -33,12 +33,8 @@
// process encoded data received from the encoder,
// pass stream to the VCMReceiver module
virtual int32_t SendData(
- webrtc::FrameType frameType,
uint8_t payloadType,
- uint32_t timeStamp,
- int64_t capture_time_ms,
- const uint8_t* payloadData,
- size_t payloadSize,
+ const webrtc::EncodedImage& encoded_image,
const webrtc::RTPFragmentationHeader& fragmentationHeader,
const webrtc::RTPVideoHeader* videoHdr) OVERRIDE;
diff --git a/webrtc/modules/video_coding/main/test/test_callbacks.cc b/webrtc/modules/video_coding/main/test/test_callbacks.cc
index 35aaae1..58468b2 100644
--- a/webrtc/modules/video_coding/main/test/test_callbacks.cc
+++ b/webrtc/modules/video_coding/main/test/test_callbacks.cc
@@ -17,6 +17,7 @@
#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
#include "webrtc/modules/utility/interface/rtp_dump.h"
+#include "webrtc/modules/video_coding/main/source/encoded_frame.h"
#include "webrtc/modules/video_coding/main/test/test_macros.h"
#include "webrtc/system_wrappers/interface/clock.h"
@@ -44,27 +45,22 @@
{
}
-void
-VCMEncodeCompleteCallback::RegisterTransportCallback(
- VCMPacketizationCallback* transport)
-{
+void VCMEncodeCompleteCallback::RegisterTransportCallback(
+ VCMPacketizationCallback* transport) {
}
int32_t
VCMEncodeCompleteCallback::SendData(
- const FrameType frameType,
- const uint8_t payloadType,
- const uint32_t timeStamp,
- int64_t capture_time_ms,
- const uint8_t* payloadData,
- const size_t payloadSize,
+ const uint8_t payloadType,
+ const EncodedImage& encoded_image,
const RTPFragmentationHeader& fragmentationHeader,
const RTPVideoHeader* videoHdr)
{
// will call the VCMReceiver input packet
- _frameType = frameType;
+ _frameType = VCMEncodedFrame::ConvertFrameType(encoded_image._frameType);
// writing encodedData into file
- if (fwrite(payloadData, 1, payloadSize, _encodedFile) != payloadSize) {
+ if (fwrite(encoded_image._buffer, 1, encoded_image._length, _encodedFile) !=
+ encoded_image._length) {
return -1;
}
WebRtcRTPHeader rtpInfo;
@@ -93,14 +89,15 @@
rtpInfo.header.payloadType = payloadType;
rtpInfo.header.sequenceNumber = _seqNo++;
rtpInfo.header.ssrc = 0;
- rtpInfo.header.timestamp = timeStamp;
- rtpInfo.frameType = frameType;
+ rtpInfo.header.timestamp = encoded_image._timeStamp;
+ rtpInfo.frameType = _frameType;
// Size should also be received from that table, since the payload type
// defines the size.
- _encodedBytes += payloadSize;
+ _encodedBytes += encoded_image._length;
// directly to receiver
- int ret = _VCMReceiver->IncomingPacket(payloadData, payloadSize, rtpInfo);
+ int ret = _VCMReceiver->IncomingPacket(encoded_image._buffer,
+ encoded_image._length, rtpInfo);
_encodeComplete = true;
return ret;
@@ -147,24 +144,20 @@
int32_t
VCMRTPEncodeCompleteCallback::SendData(
- FrameType frameType,
- uint8_t payloadType,
- uint32_t timeStamp,
- int64_t capture_time_ms,
- const uint8_t* payloadData,
- size_t payloadSize,
+ uint8_t payloadType,
+ const EncodedImage& encoded_image,
const RTPFragmentationHeader& fragmentationHeader,
const RTPVideoHeader* videoHdr)
{
- _frameType = frameType;
- _encodedBytes+= payloadSize;
+ _frameType = VCMEncodedFrame::ConvertFrameType(encoded_image._frameType);
+ _encodedBytes+= encoded_image._length;
_encodeComplete = true;
- return _RTPModule->SendOutgoingData(frameType,
+ return _RTPModule->SendOutgoingData(_frameType,
payloadType,
- timeStamp,
- capture_time_ms,
- payloadData,
- payloadSize,
+ encoded_image._timeStamp,
+ encoded_image.capture_time_ms_,
+ encoded_image._buffer,
+ encoded_image._length,
&fragmentationHeader,
videoHdr);
}
diff --git a/webrtc/modules/video_coding/main/test/test_callbacks.h b/webrtc/modules/video_coding/main/test/test_callbacks.h
index fb08e9c..3fe991a 100644
--- a/webrtc/modules/video_coding/main/test/test_callbacks.h
+++ b/webrtc/modules/video_coding/main/test/test_callbacks.h
@@ -44,12 +44,8 @@
void RegisterTransportCallback(VCMPacketizationCallback* transport);
// Process encoded data received from the encoder, pass stream to the
// VCMReceiver module
- virtual int32_t SendData(FrameType frameType,
- uint8_t payloadType,
- uint32_t timeStamp,
- int64_t capture_time_ms,
- const uint8_t* payloadData,
- size_t payloadSize,
+ virtual int32_t SendData(uint8_t payloadType,
+ const EncodedImage& encoded_image,
const RTPFragmentationHeader& fragmentationHeader,
const RTPVideoHeader* videoHdr) OVERRIDE;
// Register exisitng VCM. Currently - encode and decode under same module.
@@ -101,12 +97,8 @@
virtual ~VCMRTPEncodeCompleteCallback() {}
// Process encoded data received from the encoder, pass stream to the
// RTP module
- virtual int32_t SendData(FrameType frameType,
- uint8_t payloadType,
- uint32_t timeStamp,
- int64_t capture_time_ms,
- const uint8_t* payloadData,
- size_t payloadSize,
+ virtual int32_t SendData(uint8_t payloadType,
+ const EncodedImage& encoded_image,
const RTPFragmentationHeader& fragmentationHeader,
const RTPVideoHeader* videoHdr) OVERRIDE;
// Return size of last encoded frame. Value good for one call
diff --git a/webrtc/test/fake_encoder.cc b/webrtc/test/fake_encoder.cc
index d0b6402..42b6e9f 100644
--- a/webrtc/test/fake_encoder.cc
+++ b/webrtc/test/fake_encoder.cc
@@ -139,7 +139,7 @@
return 0;
}
-int32_t FakeH264Encoder::Encoded(EncodedImage& encoded_image,
+int32_t FakeH264Encoder::Encoded(const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_specific_info,
const RTPFragmentationHeader* fragments) {
const size_t kSpsSize = 8;
diff --git a/webrtc/test/fake_encoder.h b/webrtc/test/fake_encoder.h
index b77cb3e..4d31e1c 100644
--- a/webrtc/test/fake_encoder.h
+++ b/webrtc/test/fake_encoder.h
@@ -42,7 +42,7 @@
virtual int32_t SetRates(uint32_t new_target_bitrate,
uint32_t framerate) OVERRIDE;
- private:
+ protected:
Clock* const clock_;
VideoCodec config_;
EncodedImageCallback* callback_;
@@ -61,9 +61,9 @@
EncodedImageCallback* callback) OVERRIDE;
virtual int32_t Encoded(
- EncodedImage& encodedImage,
- const CodecSpecificInfo* codecSpecificInfo = NULL,
- const RTPFragmentationHeader* fragments = NULL) OVERRIDE;
+ const EncodedImage& encodedImage,
+ const CodecSpecificInfo* codecSpecificInfo,
+ const RTPFragmentationHeader* fragments) OVERRIDE;
private:
EncodedImageCallback* callback_;
diff --git a/webrtc/video/encoded_frame_callback_adapter.cc b/webrtc/video/encoded_frame_callback_adapter.cc
index f5eca7c..3afa108 100644
--- a/webrtc/video/encoded_frame_callback_adapter.cc
+++ b/webrtc/video/encoded_frame_callback_adapter.cc
@@ -22,7 +22,7 @@
EncodedFrameCallbackAdapter::~EncodedFrameCallbackAdapter() {}
int32_t EncodedFrameCallbackAdapter::Encoded(
- EncodedImage& encodedImage,
+ const EncodedImage& encodedImage,
const CodecSpecificInfo* codecSpecificInfo,
const RTPFragmentationHeader* fragmentation) {
assert(observer_ != NULL);
diff --git a/webrtc/video/encoded_frame_callback_adapter.h b/webrtc/video/encoded_frame_callback_adapter.h
index d381479..b39a8e2 100644
--- a/webrtc/video/encoded_frame_callback_adapter.h
+++ b/webrtc/video/encoded_frame_callback_adapter.h
@@ -22,7 +22,7 @@
explicit EncodedFrameCallbackAdapter(EncodedFrameObserver* observer);
virtual ~EncodedFrameCallbackAdapter();
- virtual int32_t Encoded(EncodedImage& encodedImage,
+ virtual int32_t Encoded(const EncodedImage& encodedImage,
const CodecSpecificInfo* codecSpecificInfo,
const RTPFragmentationHeader* fragmentation);
diff --git a/webrtc/video/rampup_tests.cc b/webrtc/video/rampup_tests.cc
index 5e73abf..b7d699f 100644
--- a/webrtc/video/rampup_tests.cc
+++ b/webrtc/video/rampup_tests.cc
@@ -226,7 +226,7 @@
test::DirectTransport::SetReceiver(this);
}
-void LowRateStreamObserver::SetSendStream(const VideoSendStream* send_stream) {
+void LowRateStreamObserver::SetSendStream(VideoSendStream* send_stream) {
CriticalSectionScoped lock(crit_.get());
send_stream_ = send_stream;
}
diff --git a/webrtc/video/rampup_tests.h b/webrtc/video/rampup_tests.h
index 69399b4..e506cd4 100644
--- a/webrtc/video/rampup_tests.h
+++ b/webrtc/video/rampup_tests.h
@@ -96,7 +96,7 @@
size_t number_of_streams,
bool rtx_used);
- virtual void SetSendStream(const VideoSendStream* send_stream);
+ virtual void SetSendStream(VideoSendStream* send_stream);
virtual void OnReceiveBitrateChanged(const std::vector<unsigned int>& ssrcs,
unsigned int bitrate);
@@ -135,7 +135,7 @@
scoped_ptr<RemoteBitrateEstimator> remote_bitrate_estimator_;
scoped_ptr<CriticalSectionWrapper> crit_;
- const VideoSendStream* send_stream_ GUARDED_BY(crit_);
+ VideoSendStream* send_stream_ GUARDED_BY(crit_);
FakeNetworkPipe::Config forward_transport_config_ GUARDED_BY(crit_);
TestStates test_state_ GUARDED_BY(crit_);
int64_t state_start_ms_ GUARDED_BY(crit_);
diff --git a/webrtc/video/send_statistics_proxy.cc b/webrtc/video/send_statistics_proxy.cc
index f2df0ed..d590be1 100644
--- a/webrtc/video/send_statistics_proxy.cc
+++ b/webrtc/video/send_statistics_proxy.cc
@@ -13,12 +13,16 @@
#include <map>
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/interface/logging.h"
namespace webrtc {
-SendStatisticsProxy::SendStatisticsProxy(
- const VideoSendStream::Config& config)
- : config_(config),
+const int SendStatisticsProxy::kStatsTimeoutMs = 5000;
+
+SendStatisticsProxy::SendStatisticsProxy(Clock* clock,
+ const VideoSendStream::Config& config)
+ : clock_(clock),
+ config_(config),
crit_(CriticalSectionWrapper::CreateCriticalSection()) {
}
@@ -43,11 +47,26 @@
stats_.input_frame_rate = frame_rate;
}
-VideoSendStream::Stats SendStatisticsProxy::GetStats() const {
+VideoSendStream::Stats SendStatisticsProxy::GetStats() {
CriticalSectionScoped lock(crit_.get());
+ PurgeOldStats();
return stats_;
}
+void SendStatisticsProxy::PurgeOldStats() {
+ int64_t current_time_ms = clock_->TimeInMilliseconds();
+ for (std::map<uint32_t, SsrcStats>::iterator it = stats_.substreams.begin();
+ it != stats_.substreams.end(); ++it) {
+ uint32_t ssrc = it->first;
+ if (update_times_[ssrc].resolution_update_ms + kStatsTimeoutMs >
+ current_time_ms)
+ continue;
+
+ it->second.sent_width = 0;
+ it->second.sent_height = 0;
+ }
+}
+
SsrcStats* SendStatisticsProxy::GetStatsEntry(uint32_t ssrc) {
std::map<uint32_t, SsrcStats>::iterator it = stats_.substreams.find(ssrc);
if (it != stats_.substreams.end())
@@ -64,6 +83,28 @@
return &stats_.substreams[ssrc]; // Insert new entry and return ptr.
}
+void SendStatisticsProxy::OnSendEncodedImage(
+ const EncodedImage& encoded_image,
+ const RTPVideoHeader* rtp_video_header) {
+ size_t simulcast_idx =
+ rtp_video_header != NULL ? rtp_video_header->simulcastIdx : 0;
+ if (simulcast_idx >= config_.rtp.ssrcs.size()) {
+ LOG(LS_ERROR) << "Encoded image outside simulcast range (" << simulcast_idx
+ << " >= " << config_.rtp.ssrcs.size() << ").";
+ return;
+ }
+ uint32_t ssrc = config_.rtp.ssrcs[simulcast_idx];
+
+ CriticalSectionScoped lock(crit_.get());
+ SsrcStats* stats = GetStatsEntry(ssrc);
+ if (stats == NULL)
+ return;
+
+ stats->sent_width = encoded_image._encodedWidth;
+ stats->sent_height = encoded_image._encodedHeight;
+ update_times_[ssrc].resolution_update_ms = clock_->TimeInMilliseconds();
+}
+
void SendStatisticsProxy::StatisticsUpdated(const RtcpStatistics& statistics,
uint32_t ssrc) {
CriticalSectionScoped lock(crit_.get());
diff --git a/webrtc/video/send_statistics_proxy.h b/webrtc/video/send_statistics_proxy.h
index 2f645b1..5e7d208 100644
--- a/webrtc/video/send_statistics_proxy.h
+++ b/webrtc/video/send_statistics_proxy.h
@@ -15,10 +15,12 @@
#include "webrtc/base/thread_annotations.h"
#include "webrtc/common_types.h"
-#include "webrtc/video_engine/include/vie_codec.h"
-#include "webrtc/video_engine/include/vie_capture.h"
-#include "webrtc/video_send_stream.h"
+#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "webrtc/system_wrappers/interface/clock.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/video_engine/include/vie_capture.h"
+#include "webrtc/video_engine/include/vie_codec.h"
+#include "webrtc/video_send_stream.h"
namespace webrtc {
@@ -32,10 +34,15 @@
public ViECaptureObserver,
public SendSideDelayObserver {
public:
- explicit SendStatisticsProxy(const VideoSendStream::Config& config);
+ static const int kStatsTimeoutMs;
+
+ SendStatisticsProxy(Clock* clock, const VideoSendStream::Config& config);
virtual ~SendStatisticsProxy();
- VideoSendStream::Stats GetStats() const;
+ VideoSendStream::Stats GetStats();
+
+ virtual void OnSendEncodedImage(const EncodedImage& encoded_image,
+ const RTPVideoHeader* rtp_video_header);
protected:
// From RtcpStatisticsCallback.
@@ -77,11 +84,18 @@
uint32_t ssrc) OVERRIDE;
private:
+ struct StatsUpdateTimes {
+ StatsUpdateTimes() : resolution_update_ms(0) {}
+ int64_t resolution_update_ms;
+ };
+ void PurgeOldStats() EXCLUSIVE_LOCKS_REQUIRED(crit_);
SsrcStats* GetStatsEntry(uint32_t ssrc) EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ Clock* const clock_;
const VideoSendStream::Config config_;
scoped_ptr<CriticalSectionWrapper> crit_;
VideoSendStream::Stats stats_ GUARDED_BY(crit_);
+ std::map<uint32_t, StatsUpdateTimes> update_times_ GUARDED_BY(crit_);
};
} // namespace webrtc
diff --git a/webrtc/video/send_statistics_proxy_unittest.cc b/webrtc/video/send_statistics_proxy_unittest.cc
index 06abb9e..4ee2cc6 100644
--- a/webrtc/video/send_statistics_proxy_unittest.cc
+++ b/webrtc/video/send_statistics_proxy_unittest.cc
@@ -21,13 +21,14 @@
class SendStatisticsProxyTest : public ::testing::Test {
public:
- SendStatisticsProxyTest() : avg_delay_ms_(0), max_delay_ms_(0) {}
+ SendStatisticsProxyTest()
+ : fake_clock_(1234), avg_delay_ms_(0), max_delay_ms_(0) {}
virtual ~SendStatisticsProxyTest() {}
protected:
virtual void SetUp() {
statistics_proxy_.reset(
- new SendStatisticsProxy(GetTestConfig()));
+ new SendStatisticsProxy(&fake_clock_, GetTestConfig()));
config_ = GetTestConfig();
expected_ = VideoSendStream::Stats();
}
@@ -81,6 +82,7 @@
}
scoped_ptr<SendStatisticsProxy> statistics_proxy_;
+ SimulatedClock fake_clock_;
VideoSendStream::Config config_;
int avg_delay_ms_;
int max_delay_ms_;
@@ -322,4 +324,51 @@
EXPECT_TRUE(stats.substreams.empty());
}
+TEST_F(SendStatisticsProxyTest, EncodedResolutionTimesOut) {
+ static const int kEncodedWidth = 123;
+ static const int kEncodedHeight = 81;
+ EncodedImage encoded_image;
+ encoded_image._encodedWidth = kEncodedWidth;
+ encoded_image._encodedHeight = kEncodedHeight;
+
+ RTPVideoHeader rtp_video_header;
+
+ rtp_video_header.simulcastIdx = 0;
+ statistics_proxy_->OnSendEncodedImage(encoded_image, &rtp_video_header);
+ rtp_video_header.simulcastIdx = 1;
+ statistics_proxy_->OnSendEncodedImage(encoded_image, &rtp_video_header);
+
+ VideoSendStream::Stats stats = statistics_proxy_->GetStats();
+ EXPECT_EQ(kEncodedWidth, stats.substreams[config_.rtp.ssrcs[0]].sent_width);
+ EXPECT_EQ(kEncodedHeight, stats.substreams[config_.rtp.ssrcs[0]].sent_height);
+ EXPECT_EQ(kEncodedWidth, stats.substreams[config_.rtp.ssrcs[1]].sent_width);
+ EXPECT_EQ(kEncodedHeight, stats.substreams[config_.rtp.ssrcs[1]].sent_height);
+
+ // Forward almost to timeout, this should not have removed stats.
+ fake_clock_.AdvanceTimeMilliseconds(SendStatisticsProxy::kStatsTimeoutMs - 1);
+ stats = statistics_proxy_->GetStats();
+ EXPECT_EQ(kEncodedWidth, stats.substreams[config_.rtp.ssrcs[0]].sent_width);
+ EXPECT_EQ(kEncodedHeight, stats.substreams[config_.rtp.ssrcs[0]].sent_height);
+
+ // Update the first SSRC with bogus RTCP stats to make sure that encoded
+ // resolution still times out (no global timeout for all stats).
+ RtcpStatistics rtcp_statistics;
+ RtcpStatisticsCallback* rtcp_stats = statistics_proxy_.get();
+ rtcp_stats->StatisticsUpdated(rtcp_statistics, config_.rtp.ssrcs[0]);
+
+ // Report stats for second SSRC to make sure it's not outdated along with the
+ // first SSRC.
+ rtp_video_header.simulcastIdx = 1;
+ statistics_proxy_->OnSendEncodedImage(encoded_image, &rtp_video_header);
+
+ // Forward 1 ms, reach timeout, substream 0 should have no resolution
+ // reported, but substream 1 should.
+ fake_clock_.AdvanceTimeMilliseconds(1);
+ stats = statistics_proxy_->GetStats();
+ EXPECT_EQ(0, stats.substreams[config_.rtp.ssrcs[0]].sent_width);
+ EXPECT_EQ(0, stats.substreams[config_.rtp.ssrcs[0]].sent_height);
+ EXPECT_EQ(kEncodedWidth, stats.substreams[config_.rtp.ssrcs[1]].sent_width);
+ EXPECT_EQ(kEncodedHeight, stats.substreams[config_.rtp.ssrcs[1]].sent_height);
+}
+
} // namespace webrtc
diff --git a/webrtc/video/video_send_stream.cc b/webrtc/video/video_send_stream.cc
index 5da2669..01e2bdd 100644
--- a/webrtc/video/video_send_stream.cc
+++ b/webrtc/video/video_send_stream.cc
@@ -124,7 +124,7 @@
external_codec_(NULL),
channel_(-1),
use_config_bitrate_(true),
- stats_proxy_(config) {
+ stats_proxy_(Clock::GetRealTimeClock(), config) {
// Duplicate assert checking of bitrate config. These should be checked in
// Call but are added here for verbosity.
assert(bitrate_config.min_bitrate_bps >= 0);
@@ -218,6 +218,7 @@
video_engine_base_->RegisterCpuOveruseObserver(channel_, overuse_observer);
video_engine_base_->RegisterSendSideDelayObserver(channel_, &stats_proxy_);
+ video_engine_base_->RegisterSendStatisticsProxy(channel_, &stats_proxy_);
image_process_ = ViEImageProcess::GetInterface(video_engine);
image_process_->RegisterPreEncodeCallback(channel_,
@@ -442,7 +443,7 @@
return network_->ReceivedRTCPPacket(channel_, packet, length) == 0;
}
-VideoSendStream::Stats VideoSendStream::GetStats() const {
+VideoSendStream::Stats VideoSendStream::GetStats() {
return stats_proxy_.GetStats();
}
diff --git a/webrtc/video/video_send_stream.h b/webrtc/video/video_send_stream.h
index 56d0d36..2e07914 100644
--- a/webrtc/video/video_send_stream.h
+++ b/webrtc/video/video_send_stream.h
@@ -59,7 +59,7 @@
virtual bool ReconfigureVideoEncoder(
const VideoEncoderConfig& config) OVERRIDE;
- virtual Stats GetStats() const OVERRIDE;
+ virtual Stats GetStats() OVERRIDE;
bool DeliverRtcp(const uint8_t* packet, size_t length);
diff --git a/webrtc/video/video_send_stream_tests.cc b/webrtc/video/video_send_stream_tests.cc
index cec65f5..a94a379 100644
--- a/webrtc/video/video_send_stream_tests.cc
+++ b/webrtc/video/video_send_stream_tests.cc
@@ -32,6 +32,7 @@
#include "webrtc/test/configurable_frame_size_encoder.h"
#include "webrtc/test/null_transport.h"
#include "webrtc/test/testsupport/perf_test.h"
+#include "webrtc/video/send_statistics_proxy.h"
#include "webrtc/video/transport_adapter.h"
#include "webrtc/video_send_stream.h"
@@ -1683,4 +1684,84 @@
RunBaseTest(&test);
}
+
+TEST_F(VideoSendStreamTest, ReportsSentResolution) {
+ static const size_t kNumStreams = 3;
+ // Unusual resolutions to make sure that they are the ones being reported.
+ static const struct {
+ int width;
+ int height;
+ } kEncodedResolution[kNumStreams] = {
+ {241, 181}, {300, 121}, {121, 221}};
+ class ScreencastTargetBitrateTest : public test::SendTest,
+ public test::FakeEncoder {
+ public:
+ ScreencastTargetBitrateTest()
+ : SendTest(kDefaultTimeoutMs),
+ test::FakeEncoder(Clock::GetRealTimeClock()) {}
+
+ private:
+ virtual int32_t Encode(
+ const I420VideoFrame& input_image,
+ const CodecSpecificInfo* codecSpecificInfo,
+ const std::vector<VideoFrameType>* frame_types) OVERRIDE {
+ CodecSpecificInfo specifics;
+ memset(&specifics, 0, sizeof(specifics));
+ specifics.codecType = kVideoCodecGeneric;
+
+ uint8_t buffer[16] = {0};
+ EncodedImage encoded(buffer, sizeof(buffer), sizeof(buffer));
+ encoded._timeStamp = input_image.timestamp();
+ encoded.capture_time_ms_ = input_image.render_time_ms();
+
+ for (size_t i = 0; i < kNumStreams; ++i) {
+ specifics.codecSpecific.generic.simulcast_idx = static_cast<uint8_t>(i);
+ encoded._frameType = (*frame_types)[i];
+ encoded._encodedWidth = kEncodedResolution[i].width;
+ encoded._encodedHeight = kEncodedResolution[i].height;
+ assert(callback_ != NULL);
+ if (callback_->Encoded(encoded, &specifics, NULL) != 0)
+ return -1;
+ }
+
+ observation_complete_->Set();
+ return 0;
+ }
+ virtual void ModifyConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) OVERRIDE {
+ send_config->encoder_settings.encoder = this;
+ EXPECT_EQ(kNumStreams, encoder_config->streams.size());
+ }
+
+ virtual size_t GetNumStreams() const OVERRIDE { return kNumStreams; }
+
+ virtual void PerformTest() OVERRIDE {
+ EXPECT_EQ(kEventSignaled, Wait())
+ << "Timed out while waiting for the encoder to send one frame.";
+ VideoSendStream::Stats stats = send_stream_->GetStats();
+
+ for (size_t i = 0; i < kNumStreams; ++i) {
+ ASSERT_TRUE(stats.substreams.find(kSendSsrcs[i]) !=
+ stats.substreams.end())
+ << "No stats for SSRC: " << kSendSsrcs[i]
+ << ", stats should exist as soon as frames have been encoded.";
+ SsrcStats ssrc_stats = stats.substreams[kSendSsrcs[i]];
+ EXPECT_EQ(kEncodedResolution[i].width, ssrc_stats.sent_width);
+ EXPECT_EQ(kEncodedResolution[i].height, ssrc_stats.sent_height);
+ }
+ }
+
+ virtual void OnStreamsCreated(
+ VideoSendStream* send_stream,
+ const std::vector<VideoReceiveStream*>& receive_streams) OVERRIDE {
+ send_stream_ = send_stream;
+ }
+
+ VideoSendStream* send_stream_;
+ } test;
+
+ RunBaseTest(&test);
+}
} // namespace webrtc
diff --git a/webrtc/video_encoder.h b/webrtc/video_encoder.h
index 649051c..a66a51a 100644
--- a/webrtc/video_encoder.h
+++ b/webrtc/video_encoder.h
@@ -29,9 +29,9 @@
virtual ~EncodedImageCallback() {}
// Callback function which is called when an image has been encoded.
- // TODO(pbos): Make encoded_image const or pointer. Remove default arguments.
+ // TODO(pbos): Remove default arguments.
virtual int32_t Encoded(
- EncodedImage& encoded_image,
+ const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_specific_info = NULL,
const RTPFragmentationHeader* fragmentation = NULL) = 0;
};
diff --git a/webrtc/video_engine/include/vie_base.h b/webrtc/video_engine/include/vie_base.h
index 04e23de..afddf38 100644
--- a/webrtc/video_engine/include/vie_base.h
+++ b/webrtc/video_engine/include/vie_base.h
@@ -25,6 +25,7 @@
class Config;
class VoiceEngine;
+class SendStatisticsProxy;
// CpuOveruseObserver is called when a system overuse is detected and
// VideoEngine cannot keep up the encoding frequency.
@@ -238,6 +239,10 @@
// Returns the last VideoEngine error code.
virtual int LastError() = 0;
+ virtual void RegisterSendStatisticsProxy(
+ int channel,
+ SendStatisticsProxy* send_statistics_proxy) = 0;
+
protected:
ViEBase() {}
virtual ~ViEBase() {}
diff --git a/webrtc/video_engine/vie_base_impl.cc b/webrtc/video_engine/vie_base_impl.cc
index 4e41611..64c96d2 100644
--- a/webrtc/video_engine/vie_base_impl.cc
+++ b/webrtc/video_engine/vie_base_impl.cc
@@ -358,4 +358,19 @@
return 0;
}
+void ViEBaseImpl::RegisterSendStatisticsProxy(
+ int channel,
+ SendStatisticsProxy* send_statistics_proxy) {
+ LOG_F(LS_VERBOSE) << "RegisterSendStatisticsProxy on channel " << channel;
+ ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
+ ViEChannel* vie_channel = cs.Channel(channel);
+ if (!vie_channel) {
+ shared_data_.SetLastError(kViEBaseInvalidChannelId);
+ return;
+ }
+ ViEEncoder* vie_encoder = cs.Encoder(channel);
+ assert(vie_encoder);
+
+ vie_encoder->RegisterSendStatisticsProxy(send_statistics_proxy);
+}
} // namespace webrtc
diff --git a/webrtc/video_engine/vie_base_impl.h b/webrtc/video_engine/vie_base_impl.h
index 0ae7818..f03cd41 100644
--- a/webrtc/video_engine/vie_base_impl.h
+++ b/webrtc/video_engine/vie_base_impl.h
@@ -67,6 +67,9 @@
int CreateChannel(int& video_channel, int original_channel, // NOLINT
bool sender);
+ virtual void RegisterSendStatisticsProxy(
+ int channel,
+ SendStatisticsProxy* send_statistics_proxy) OVERRIDE;
// ViEBaseImpl owns ViESharedData used by all interface implementations.
ViESharedData shared_data_;
};
diff --git a/webrtc/video_engine/vie_encoder.cc b/webrtc/video_engine/vie_encoder.cc
index 1d6b816..6c2f201 100644
--- a/webrtc/video_engine/vie_encoder.cc
+++ b/webrtc/video_engine/vie_encoder.cc
@@ -16,6 +16,7 @@
#include "webrtc/common_video/interface/video_image.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/frame_callback.h"
#include "webrtc/modules/pacing/include/paced_sender.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
#include "webrtc/modules/utility/interface/process_thread.h"
@@ -29,9 +30,9 @@
#include "webrtc/system_wrappers/interface/metrics.h"
#include "webrtc/system_wrappers/interface/tick_util.h"
#include "webrtc/system_wrappers/interface/trace_event.h"
+#include "webrtc/video/send_statistics_proxy.h"
#include "webrtc/video_engine/include/vie_codec.h"
#include "webrtc/video_engine/include/vie_image_process.h"
-#include "webrtc/frame_callback.h"
#include "webrtc/video_engine/vie_defines.h"
namespace webrtc {
@@ -158,7 +159,8 @@
qm_callback_(NULL),
video_suspended_(false),
pre_encode_callback_(NULL),
- start_ms_(Clock::GetRealTimeClock()->TimeInMilliseconds()) {
+ start_ms_(Clock::GetRealTimeClock()->TimeInMilliseconds()),
+ send_statistics_proxy_(NULL) {
RtpRtcp::Configuration configuration;
configuration.id = ViEModuleId(engine_id_, channel_id_);
configuration.audio = false; // Video.
@@ -724,23 +726,19 @@
}
int32_t ViEEncoder::SendData(
- const FrameType frame_type,
const uint8_t payload_type,
- const uint32_t time_stamp,
- int64_t capture_time_ms,
- const uint8_t* payload_data,
- const size_t payload_size,
+ const EncodedImage& encoded_image,
const webrtc::RTPFragmentationHeader& fragmentation_header,
const RTPVideoHeader* rtp_video_hdr) {
+ if (send_statistics_proxy_ != NULL) {
+ send_statistics_proxy_->OnSendEncodedImage(encoded_image, rtp_video_hdr);
+ }
// New encoded data, hand over to the rtp module.
- return default_rtp_rtcp_->SendOutgoingData(frame_type,
- payload_type,
- time_stamp,
- capture_time_ms,
- payload_data,
- payload_size,
- &fragmentation_header,
- rtp_video_hdr);
+ return default_rtp_rtcp_->SendOutgoingData(
+ VCMEncodedFrame::ConvertFrameType(encoded_image._frameType), payload_type,
+ encoded_image._timeStamp, encoded_image.capture_time_ms_,
+ encoded_image._buffer, encoded_image._length, &fragmentation_header,
+ rtp_video_hdr);
}
int32_t ViEEncoder::ProtectionRequest(
@@ -987,6 +985,11 @@
vcm_.RegisterPostEncodeImageCallback(NULL);
}
+void ViEEncoder::RegisterSendStatisticsProxy(
+ SendStatisticsProxy* send_statistics_proxy) {
+ send_statistics_proxy_ = send_statistics_proxy;
+}
+
QMVideoSettingsCallback::QMVideoSettingsCallback(VideoProcessingModule* vpm)
: vpm_(vpm) {
}
diff --git a/webrtc/video_engine/vie_encoder.h b/webrtc/video_engine/vie_encoder.h
index 0084722..896806c 100644
--- a/webrtc/video_engine/vie_encoder.h
+++ b/webrtc/video_engine/vie_encoder.h
@@ -35,6 +35,7 @@
class ProcessThread;
class QMVideoSettingsCallback;
class RtpRtcp;
+class SendStatisticsProxy;
class ViEBitrateObserver;
class ViEEffectFilter;
class ViEEncoderObserver;
@@ -120,15 +121,10 @@
void SetSenderBufferingMode(int target_delay_ms);
// Implements VCMPacketizationCallback.
- virtual int32_t SendData(
- FrameType frame_type,
- uint8_t payload_type,
- uint32_t time_stamp,
- int64_t capture_time_ms,
- const uint8_t* payload_data,
- size_t payload_size,
- const RTPFragmentationHeader& fragmentation_header,
- const RTPVideoHeader* rtp_video_hdr) OVERRIDE;
+ virtual int32_t SendData(uint8_t payload_type,
+ const EncodedImage& encoded_image,
+ const RTPFragmentationHeader& fragmentation_header,
+ const RTPVideoHeader* rtp_video_hdr) OVERRIDE;
// Implements VideoProtectionCallback.
virtual int ProtectionRequest(
@@ -177,6 +173,8 @@
EncodedImageCallback* post_encode_callback);
void DeRegisterPostEncodeImageCallback();
+ void RegisterSendStatisticsProxy(SendStatisticsProxy* send_statistics_proxy);
+
int channel_id() const { return channel_id_; }
protected:
@@ -239,6 +237,8 @@
bool video_suspended_ GUARDED_BY(data_cs_);
I420FrameCallback* pre_encode_callback_ GUARDED_BY(callback_cs_);
const int64_t start_ms_;
+
+ SendStatisticsProxy* send_statistics_proxy_;
};
} // namespace webrtc
diff --git a/webrtc/video_frame.h b/webrtc/video_frame.h
index 2c753b9..95bcca0 100644
--- a/webrtc/video_frame.h
+++ b/webrtc/video_frame.h
@@ -207,6 +207,7 @@
// NTP time of the capture time in local timebase in milliseconds.
int64_t ntp_time_ms_;
int64_t capture_time_ms_;
+ // TODO(pbos): Use webrtc::FrameType directly (and remove VideoFrameType).
VideoFrameType _frameType;
uint8_t* _buffer;
size_t _length;
diff --git a/webrtc/video_send_stream.h b/webrtc/video_send_stream.h
index a9aba94..712b16d 100644
--- a/webrtc/video_send_stream.h
+++ b/webrtc/video_send_stream.h
@@ -151,7 +151,7 @@
// with the VideoStream settings.
virtual bool ReconfigureVideoEncoder(const VideoEncoderConfig& config) = 0;
- virtual Stats GetStats() const = 0;
+ virtual Stats GetStats() = 0;
protected:
virtual ~VideoSendStream() {}