Update libjingle to 55618622.
Update libyuv to r826.
TEST=try bots
R=niklas.enbom@webrtc.org
Review URL: https://webrtc-codereview.appspot.com/2889004
git-svn-id: http://webrtc.googlecode.com/svn/trunk@5038 4adac7df-926f-26a2-2b94-8c16560cd09d
diff --git a/talk/media/base/cpuid.cc b/talk/media/base/cpuid.cc
index 9fd7865..bd87d2e 100644
--- a/talk/media/base/cpuid.cc
+++ b/talk/media/base/cpuid.cc
@@ -51,8 +51,8 @@
bool IsCoreIOrBetter() {
#if !defined(DISABLE_YUV) && (defined(__i386__) || defined(__x86_64__) || \
defined(_M_IX86) || defined(_M_X64))
- int cpu_info[4];
- libyuv::CpuId(cpu_info, 0); // Function 0: Vendor ID
+ uint32 cpu_info[4];
+ libyuv::CpuId(0, 0, &cpu_info[0]); // Function 0: Vendor ID
if (cpu_info[1] == 0x756e6547 && cpu_info[3] == 0x49656e69 &&
cpu_info[2] == 0x6c65746e) { // GenuineIntel
// Detect CPU Family and Model
@@ -62,7 +62,7 @@
// 13:12 - Processor Type
// 19:16 - Extended Model
// 27:20 - Extended Family
- libyuv::CpuId(cpu_info, 1); // Function 1: Family and Model
+ libyuv::CpuId(1, 0, &cpu_info[0]); // Function 1: Family and Model
int family = ((cpu_info[0] >> 8) & 0x0f) | ((cpu_info[0] >> 16) & 0xff0);
int model = ((cpu_info[0] >> 4) & 0x0f) | ((cpu_info[0] >> 12) & 0xf0);
// CpuFamily | CpuModel | Name
diff --git a/talk/media/base/fakevideocapturer.h b/talk/media/base/fakevideocapturer.h
index 5a33265..8dc69c3 100644
--- a/talk/media/base/fakevideocapturer.h
+++ b/talk/media/base/fakevideocapturer.h
@@ -101,7 +101,7 @@
frame.time_stamp = initial_unix_timestamp_ + next_timestamp_;
next_timestamp_ += 33333333; // 30 fps
- talk_base::scoped_array<char> data(new char[size]);
+ talk_base::scoped_ptr<char[]> data(new char[size]);
frame.data = data.get();
// Copy something non-zero into the buffer so Validate wont complain that
// the frame is all duplicate.
diff --git a/talk/media/base/mediachannel.h b/talk/media/base/mediachannel.h
index f90265c..3dc9c56 100644
--- a/talk/media/base/mediachannel.h
+++ b/talk/media/base/mediachannel.h
@@ -162,29 +162,51 @@
void SetAll(const AudioOptions& change) {
echo_cancellation.SetFrom(change.echo_cancellation);
auto_gain_control.SetFrom(change.auto_gain_control);
+ rx_auto_gain_control.SetFrom(change.rx_auto_gain_control);
noise_suppression.SetFrom(change.noise_suppression);
highpass_filter.SetFrom(change.highpass_filter);
stereo_swapping.SetFrom(change.stereo_swapping);
typing_detection.SetFrom(change.typing_detection);
+ aecm_generate_comfort_noise.SetFrom(change.aecm_generate_comfort_noise);
conference_mode.SetFrom(change.conference_mode);
adjust_agc_delta.SetFrom(change.adjust_agc_delta);
experimental_agc.SetFrom(change.experimental_agc);
experimental_aec.SetFrom(change.experimental_aec);
aec_dump.SetFrom(change.aec_dump);
+ tx_agc_target_dbov.SetFrom(change.tx_agc_target_dbov);
+ tx_agc_digital_compression_gain.SetFrom(
+ change.tx_agc_digital_compression_gain);
+ tx_agc_limiter.SetFrom(change.tx_agc_limiter);
+ rx_agc_target_dbov.SetFrom(change.rx_agc_target_dbov);
+ rx_agc_digital_compression_gain.SetFrom(
+ change.rx_agc_digital_compression_gain);
+ rx_agc_limiter.SetFrom(change.rx_agc_limiter);
+ recording_sample_rate.SetFrom(change.recording_sample_rate);
+ playout_sample_rate.SetFrom(change.playout_sample_rate);
}
bool operator==(const AudioOptions& o) const {
return echo_cancellation == o.echo_cancellation &&
auto_gain_control == o.auto_gain_control &&
+ rx_auto_gain_control == o.rx_auto_gain_control &&
noise_suppression == o.noise_suppression &&
highpass_filter == o.highpass_filter &&
stereo_swapping == o.stereo_swapping &&
typing_detection == o.typing_detection &&
+ aecm_generate_comfort_noise == o.aecm_generate_comfort_noise &&
conference_mode == o.conference_mode &&
experimental_agc == o.experimental_agc &&
experimental_aec == o.experimental_aec &&
adjust_agc_delta == o.adjust_agc_delta &&
- aec_dump == o.aec_dump;
+ aec_dump == o.aec_dump &&
+ tx_agc_target_dbov == o.tx_agc_target_dbov &&
+ tx_agc_digital_compression_gain == o.tx_agc_digital_compression_gain &&
+ tx_agc_limiter == o.tx_agc_limiter &&
+ rx_agc_target_dbov == o.rx_agc_target_dbov &&
+ rx_agc_digital_compression_gain == o.rx_agc_digital_compression_gain &&
+ rx_agc_limiter == o.rx_agc_limiter &&
+ recording_sample_rate == o.recording_sample_rate &&
+ playout_sample_rate == o.playout_sample_rate;
}
std::string ToString() const {
@@ -192,15 +214,27 @@
ost << "AudioOptions {";
ost << ToStringIfSet("aec", echo_cancellation);
ost << ToStringIfSet("agc", auto_gain_control);
+ ost << ToStringIfSet("rx_agc", rx_auto_gain_control);
ost << ToStringIfSet("ns", noise_suppression);
ost << ToStringIfSet("hf", highpass_filter);
ost << ToStringIfSet("swap", stereo_swapping);
ost << ToStringIfSet("typing", typing_detection);
+ ost << ToStringIfSet("comfort_noise", aecm_generate_comfort_noise);
ost << ToStringIfSet("conference", conference_mode);
ost << ToStringIfSet("agc_delta", adjust_agc_delta);
ost << ToStringIfSet("experimental_agc", experimental_agc);
ost << ToStringIfSet("experimental_aec", experimental_aec);
ost << ToStringIfSet("aec_dump", aec_dump);
+ ost << ToStringIfSet("tx_agc_target_dbov", tx_agc_target_dbov);
+ ost << ToStringIfSet("tx_agc_digital_compression_gain",
+ tx_agc_digital_compression_gain);
+ ost << ToStringIfSet("tx_agc_limiter", tx_agc_limiter);
+ ost << ToStringIfSet("rx_agc_target_dbov", rx_agc_target_dbov);
+ ost << ToStringIfSet("rx_agc_digital_compression_gain",
+ rx_agc_digital_compression_gain);
+ ost << ToStringIfSet("rx_agc_limiter", rx_agc_limiter);
+ ost << ToStringIfSet("recording_sample_rate", recording_sample_rate);
+ ost << ToStringIfSet("playout_sample_rate", playout_sample_rate);
ost << "}";
return ost.str();
}
@@ -210,6 +244,8 @@
Settable<bool> echo_cancellation;
// Audio processing to adjust the sensitivity of the local mic dynamically.
Settable<bool> auto_gain_control;
+ // Audio processing to apply gain to the remote audio.
+ Settable<bool> rx_auto_gain_control;
// Audio processing to filter out background noise.
Settable<bool> noise_suppression;
// Audio processing to remove background noise of lower frequencies.
@@ -218,11 +254,21 @@
Settable<bool> stereo_swapping;
// Audio processing to detect typing.
Settable<bool> typing_detection;
+ Settable<bool> aecm_generate_comfort_noise;
Settable<bool> conference_mode;
Settable<int> adjust_agc_delta;
Settable<bool> experimental_agc;
Settable<bool> experimental_aec;
Settable<bool> aec_dump;
+ // Note that tx_agc_* only applies to non-experimental AGC.
+ Settable<uint16> tx_agc_target_dbov;
+ Settable<uint16> tx_agc_digital_compression_gain;
+ Settable<bool> tx_agc_limiter;
+ Settable<uint16> rx_agc_target_dbov;
+ Settable<uint16> rx_agc_digital_compression_gain;
+ Settable<bool> rx_agc_limiter;
+ Settable<uint32> recording_sample_rate;
+ Settable<uint32> playout_sample_rate;
};
// Options that can be applied to a VideoMediaChannel or a VideoMediaEngine.
@@ -244,12 +290,13 @@
video_adapt_third.SetFrom(change.video_adapt_third);
video_noise_reduction.SetFrom(change.video_noise_reduction);
video_three_layers.SetFrom(change.video_three_layers);
- video_enable_camera_list.SetFrom(change.video_enable_camera_list);
video_one_layer_screencast.SetFrom(change.video_one_layer_screencast);
video_high_bitrate.SetFrom(change.video_high_bitrate);
video_watermark.SetFrom(change.video_watermark);
video_temporal_layer_screencast.SetFrom(
change.video_temporal_layer_screencast);
+ video_temporal_layer_realtime.SetFrom(
+ change.video_temporal_layer_realtime);
video_leaky_bucket.SetFrom(change.video_leaky_bucket);
cpu_overuse_detection.SetFrom(change.cpu_overuse_detection);
conference_mode.SetFrom(change.conference_mode);
@@ -269,11 +316,11 @@
video_adapt_third == o.video_adapt_third &&
video_noise_reduction == o.video_noise_reduction &&
video_three_layers == o.video_three_layers &&
- video_enable_camera_list == o.video_enable_camera_list &&
video_one_layer_screencast == o.video_one_layer_screencast &&
video_high_bitrate == o.video_high_bitrate &&
video_watermark == o.video_watermark &&
video_temporal_layer_screencast == o.video_temporal_layer_screencast &&
+ video_temporal_layer_realtime == o.video_temporal_layer_realtime &&
video_leaky_bucket == o.video_leaky_bucket &&
cpu_overuse_detection == o.cpu_overuse_detection &&
conference_mode == o.conference_mode &&
@@ -295,12 +342,13 @@
ost << ToStringIfSet("video adapt third", video_adapt_third);
ost << ToStringIfSet("noise reduction", video_noise_reduction);
ost << ToStringIfSet("3 layers", video_three_layers);
- ost << ToStringIfSet("camera list", video_enable_camera_list);
ost << ToStringIfSet("1 layer screencast", video_one_layer_screencast);
ost << ToStringIfSet("high bitrate", video_high_bitrate);
ost << ToStringIfSet("watermark", video_watermark);
ost << ToStringIfSet("video temporal layer screencast",
video_temporal_layer_screencast);
+ ost << ToStringIfSet("video temporal layer realtime",
+ video_temporal_layer_realtime);
ost << ToStringIfSet("leaky bucket", video_leaky_bucket);
ost << ToStringIfSet("cpu overuse detection", cpu_overuse_detection);
ost << ToStringIfSet("conference mode", conference_mode);
@@ -326,8 +374,6 @@
Settable<bool> video_noise_reduction;
// Experimental: Enable multi layer?
Settable<bool> video_three_layers;
- // Experimental: Enable camera list?
- Settable<bool> video_enable_camera_list;
// Experimental: Enable one layer screencast?
Settable<bool> video_one_layer_screencast;
// Experimental: Enable WebRtc higher bitrate?
@@ -336,6 +382,8 @@
Settable<bool> video_watermark;
// Experimental: Enable WebRTC layered screencast.
Settable<bool> video_temporal_layer_screencast;
+ // Experimental: Enable WebRTC temporal layer strategy for realtime video.
+ Settable<bool> video_temporal_layer_realtime;
// Enable WebRTC leaky bucket when sending media packets.
Settable<bool> video_leaky_bucket;
// Enable WebRTC Cpu Overuse Detection, which is a new version of the CPU
@@ -513,15 +561,68 @@
SEND_MICROPHONE
};
-struct VoiceSenderInfo {
- VoiceSenderInfo()
+// The stats information is structured as follows:
+// Media are represented by either MediaSenderInfo or MediaReceiverInfo.
+// Media contains a vector of SSRC infos that are exclusively used by this
+// media. (SSRCs shared between media streams can't be represented.)
+
+// Information about an SSRC.
+// This data may be locally recorded, or received in an RTCP SR or RR.
+struct SsrcSenderInfo {
+ SsrcSenderInfo()
: ssrc(0),
- bytes_sent(0),
+ timestamp(0) {
+ }
+ uint32 ssrc;
+ double timestamp; // NTP timestamp, represented as seconds since epoch.
+};
+
+struct SsrcReceiverInfo {
+ SsrcReceiverInfo()
+ : ssrc(0),
+ timestamp(0) {
+ }
+ uint32 ssrc;
+ double timestamp;
+};
+
+struct MediaSenderInfo {
+ MediaSenderInfo()
+ : bytes_sent(0),
packets_sent(0),
packets_lost(0),
fraction_lost(0.0),
+ rtt_ms(0) {
+ }
+ int64 bytes_sent;
+ int packets_sent;
+ int packets_lost;
+ float fraction_lost;
+ int rtt_ms;
+ std::string codec_name;
+ std::vector<SsrcSenderInfo> local_stats;
+ std::vector<SsrcReceiverInfo> remote_stats;
+};
+
+struct MediaReceiverInfo {
+ MediaReceiverInfo()
+ : bytes_rcvd(0),
+ packets_rcvd(0),
+ packets_lost(0),
+ fraction_lost(0.0) {
+ }
+ int64 bytes_rcvd;
+ int packets_rcvd;
+ int packets_lost;
+ float fraction_lost;
+ std::vector<SsrcReceiverInfo> local_stats;
+ std::vector<SsrcSenderInfo> remote_stats;
+};
+
+struct VoiceSenderInfo : public MediaSenderInfo {
+ VoiceSenderInfo()
+ : ssrc(0),
ext_seqnum(0),
- rtt_ms(0),
jitter_ms(0),
audio_level(0),
aec_quality_min(0.0),
@@ -533,13 +634,7 @@
}
uint32 ssrc;
- std::string codec_name;
- int64 bytes_sent;
- int packets_sent;
- int packets_lost;
- float fraction_lost;
int ext_seqnum;
- int rtt_ms;
int jitter_ms;
int audio_level;
float aec_quality_min;
@@ -550,13 +645,9 @@
bool typing_noise_detected;
};
-struct VoiceReceiverInfo {
+struct VoiceReceiverInfo : public MediaReceiverInfo {
VoiceReceiverInfo()
: ssrc(0),
- bytes_rcvd(0),
- packets_rcvd(0),
- packets_lost(0),
- fraction_lost(0.0),
ext_seqnum(0),
jitter_ms(0),
jitter_buffer_ms(0),
@@ -567,10 +658,6 @@
}
uint32 ssrc;
- int64 bytes_rcvd;
- int packets_rcvd;
- int packets_lost;
- float fraction_lost;
int ext_seqnum;
int jitter_ms;
int jitter_buffer_ms;
@@ -581,16 +668,11 @@
float expand_rate;
};
-struct VideoSenderInfo {
+struct VideoSenderInfo : public MediaSenderInfo {
VideoSenderInfo()
- : bytes_sent(0),
- packets_sent(0),
- packets_cached(0),
- packets_lost(0),
- fraction_lost(0.0),
+ : packets_cached(0),
firs_rcvd(0),
nacks_rcvd(0),
- rtt_ms(0),
frame_width(0),
frame_height(0),
framerate_input(0),
@@ -602,15 +684,9 @@
std::vector<uint32> ssrcs;
std::vector<SsrcGroup> ssrc_groups;
- std::string codec_name;
- int64 bytes_sent;
- int packets_sent;
int packets_cached;
- int packets_lost;
- float fraction_lost;
int firs_rcvd;
int nacks_rcvd;
- int rtt_ms;
int frame_width;
int frame_height;
int framerate_input;
@@ -620,13 +696,9 @@
int adapt_reason;
};
-struct VideoReceiverInfo {
+struct VideoReceiverInfo : public MediaReceiverInfo {
VideoReceiverInfo()
- : bytes_rcvd(0),
- packets_rcvd(0),
- packets_lost(0),
- packets_concealed(0),
- fraction_lost(0.0),
+ : packets_concealed(0),
firs_sent(0),
nacks_sent(0),
frame_width(0),
@@ -635,17 +707,19 @@
framerate_decoded(0),
framerate_output(0),
framerate_render_input(0),
- framerate_render_output(0) {
+ framerate_render_output(0),
+ decode_ms(0),
+ max_decode_ms(0),
+ jitter_buffer_ms(0),
+ min_playout_delay_ms(0),
+ render_delay_ms(0),
+ target_delay_ms(0),
+ current_delay_ms(0) {
}
std::vector<uint32> ssrcs;
std::vector<SsrcGroup> ssrc_groups;
- int64 bytes_rcvd;
- // vector<int> layer_bytes_rcvd;
- int packets_rcvd;
- int packets_lost;
int packets_concealed;
- float fraction_lost;
int firs_sent;
int nacks_sent;
int frame_width;
@@ -657,31 +731,42 @@
int framerate_render_input;
// Framerate that the renderer reports.
int framerate_render_output;
+
+ // All stats below are gathered per-VideoReceiver, but some will be correlated
+ // across MediaStreamTracks. NOTE(hta): when sinking stats into per-SSRC
+ // structures, reflect this in the new layout.
+
+ // Current frame decode latency.
+ int decode_ms;
+ // Maximum observed frame decode latency.
+ int max_decode_ms;
+ // Jitter (network-related) latency.
+ int jitter_buffer_ms;
+ // Requested minimum playout latency.
+ int min_playout_delay_ms;
+ // Requested latency to account for rendering delay.
+ int render_delay_ms;
+ // Target overall delay: network+decode+render, accounting for
+ // min_playout_delay_ms.
+ int target_delay_ms;
+ // Current overall delay, possibly ramping towards target_delay_ms.
+ int current_delay_ms;
};
-struct DataSenderInfo {
+struct DataSenderInfo : public MediaSenderInfo {
DataSenderInfo()
- : ssrc(0),
- bytes_sent(0),
- packets_sent(0) {
+ : ssrc(0) {
}
uint32 ssrc;
- std::string codec_name;
- int64 bytes_sent;
- int packets_sent;
};
-struct DataReceiverInfo {
+struct DataReceiverInfo : public MediaReceiverInfo {
DataReceiverInfo()
- : ssrc(0),
- bytes_rcvd(0),
- packets_rcvd(0) {
+ : ssrc(0) {
}
uint32 ssrc;
- int64 bytes_rcvd;
- int packets_rcvd;
};
struct BandwidthEstimationInfo {
diff --git a/talk/media/base/videocapturer.cc b/talk/media/base/videocapturer.cc
index acab19d..900a6f1 100644
--- a/talk/media/base/videocapturer.cc
+++ b/talk/media/base/videocapturer.cc
@@ -373,7 +373,7 @@
// TODO(fbarchard): Avoid scale and convert if muted.
// Temporary buffer is scoped here so it will persist until i420_frame.Init()
// makes a copy of the frame, converting to I420.
- talk_base::scoped_array<uint8> temp_buffer;
+ talk_base::scoped_ptr<uint8[]> temp_buffer;
// YUY2 can be scaled vertically using an ARGB scaler. Aspect ratio is only
// a problem on OSX. OSX always converts webcams to YUY2 or UYVY.
bool can_scale =
diff --git a/talk/media/base/videocommon.cc b/talk/media/base/videocommon.cc
index 5dd45d7..b051d52 100644
--- a/talk/media/base/videocommon.cc
+++ b/talk/media/base/videocommon.cc
@@ -153,12 +153,15 @@
// Compute size to crop video frame to.
// If cropped_format_* is 0, return the frame_* size as is.
-void ComputeCrop(int cropped_format_width,
- int cropped_format_height,
+void ComputeCrop(int cropped_format_width, int cropped_format_height,
int frame_width, int frame_height,
int pixel_width, int pixel_height,
int rotation,
int* cropped_width, int* cropped_height) {
+ // Transform screen crop to camera space if rotated.
+ if (rotation == 90 || rotation == 270) {
+ std::swap(cropped_format_width, cropped_format_height);
+ }
ASSERT(cropped_format_width >= 0);
ASSERT(cropped_format_height >= 0);
ASSERT(frame_width > 0);
@@ -182,39 +185,26 @@
static_cast<float>(frame_height * pixel_height);
float crop_aspect = static_cast<float>(cropped_format_width) /
static_cast<float>(cropped_format_height);
- int new_frame_width = frame_width;
- int new_frame_height = frame_height;
- if (rotation == 90 || rotation == 270) {
- frame_aspect = 1.0f / frame_aspect;
- new_frame_width = frame_height;
- new_frame_height = frame_width;
- }
-
// kAspectThresh is the maximum aspect ratio difference that we'll accept
- // for cropping. The value 1.33 is based on 4:3 being cropped to 16:9.
+ // for cropping. The value 1.34 allows cropping from 4:3 to 16:9.
// Set to zero to disable cropping entirely.
// TODO(fbarchard): crop to multiple of 16 width for better performance.
- const float kAspectThresh = 16.f / 9.f / (4.f / 3.f) + 0.01f; // 1.33
+ const float kAspectThresh = 1.34f;
// Wide aspect - crop horizontally
if (frame_aspect > crop_aspect &&
frame_aspect < crop_aspect * kAspectThresh) {
// Round width down to multiple of 4 to avoid odd chroma width.
// Width a multiple of 4 allows a half size image to have chroma channel
- // that avoids rounding errors. lmi and webrtc have odd width limitations.
- new_frame_width = static_cast<int>((crop_aspect * frame_height *
+ // that avoids rounding errors.
+ frame_width = static_cast<int>((crop_aspect * frame_height *
pixel_height) / pixel_width + 0.5f) & ~3;
- } else if (crop_aspect > frame_aspect &&
- crop_aspect < frame_aspect * kAspectThresh) {
- new_frame_height = static_cast<int>((frame_width * pixel_width) /
+ } else if (frame_aspect < crop_aspect &&
+ frame_aspect > crop_aspect / kAspectThresh) {
+ frame_height = static_cast<int>((frame_width * pixel_width) /
(crop_aspect * pixel_height) + 0.5f) & ~1;
}
-
- *cropped_width = new_frame_width;
- *cropped_height = new_frame_height;
- if (rotation == 90 || rotation == 270) {
- *cropped_width = new_frame_height;
- *cropped_height = new_frame_width;
- }
+ *cropped_width = frame_width;
+ *cropped_height = frame_height;
}
// Compute the frame size that makes pixels square pixel aspect ratio.
diff --git a/talk/media/base/videocommon_unittest.cc b/talk/media/base/videocommon_unittest.cc
index 9122843..455a47b 100644
--- a/talk/media/base/videocommon_unittest.cc
+++ b/talk/media/base/videocommon_unittest.cc
@@ -276,6 +276,15 @@
EXPECT_EQ(640, cropped_width);
EXPECT_EQ(480, cropped_height);
+ // Request 9:16 from VGA rotated (portrait). Expect crop.
+ ComputeCrop(360, 640, // Crop size 9:16
+ 640, 480, // Frame is 3:4 portrait
+ 1, 1, // Normal 1:1 pixels
+ 90,
+ &cropped_width, &cropped_height);
+ EXPECT_EQ(640, cropped_width);
+ EXPECT_EQ(360, cropped_height);
+
// Cropped size 0x0. Expect no cropping.
// This is used when adding multiple capturers
ComputeCrop(0, 0, // Crop size 0x0
diff --git a/talk/media/base/videoframe_unittest.h b/talk/media/base/videoframe_unittest.h
index f70e567..361c195c 100644
--- a/talk/media/base/videoframe_unittest.h
+++ b/talk/media/base/videoframe_unittest.h
@@ -157,7 +157,7 @@
prefix.c_str(), frame.GetWidth(), frame.GetHeight());
size_t out_size = cricket::VideoFrame::SizeOf(frame.GetWidth(),
frame.GetHeight());
- talk_base::scoped_array<uint8> out(new uint8[out_size]);
+ talk_base::scoped_ptr<uint8[]> out(new uint8[out_size]);
frame.CopyToBuffer(out.get(), out_size);
return DumpSample(filename, out.get(), out_size);
}
@@ -514,7 +514,7 @@
T frame1, frame2;
ASSERT_TRUE(LoadFrameNoRepeat(&frame1));
size_t buf_size = kWidth * kHeight * 2;
- talk_base::scoped_array<uint8> buf(new uint8[buf_size + kAlignment]);
+ talk_base::scoped_ptr<uint8[]> buf(new uint8[buf_size + kAlignment]);
uint8* y = ALIGNP(buf.get(), kAlignment);
uint8* u = y + kWidth * kHeight;
uint8* v = u + (kWidth / 2) * kHeight;
@@ -535,7 +535,7 @@
T frame1, frame2;
ASSERT_TRUE(LoadFrameNoRepeat(&frame1));
size_t buf_size = kWidth * kHeight * 2;
- talk_base::scoped_array<uint8> buf(new uint8[buf_size + kAlignment]);
+ talk_base::scoped_ptr<uint8[]> buf(new uint8[buf_size + kAlignment]);
uint8* yuy2 = ALIGNP(buf.get(), kAlignment);
EXPECT_EQ(0, libyuv::I420ToYUY2(frame1.GetYPlane(), frame1.GetYPitch(),
frame1.GetUPlane(), frame1.GetUPitch(),
@@ -552,7 +552,7 @@
T frame1, frame2;
ASSERT_TRUE(LoadFrameNoRepeat(&frame1));
size_t buf_size = kWidth * kHeight * 2;
- talk_base::scoped_array<uint8> buf(new uint8[buf_size + kAlignment + 1]);
+ talk_base::scoped_ptr<uint8[]> buf(new uint8[buf_size + kAlignment + 1]);
uint8* yuy2 = ALIGNP(buf.get(), kAlignment) + 1;
EXPECT_EQ(0, libyuv::I420ToYUY2(frame1.GetYPlane(), frame1.GetYPitch(),
frame1.GetUPlane(), frame1.GetUPitch(),
@@ -718,7 +718,7 @@
void ConstructRGB565() {
T frame1, frame2;
size_t out_size = kWidth * kHeight * 2;
- talk_base::scoped_array<uint8> outbuf(new uint8[out_size + kAlignment]);
+ talk_base::scoped_ptr<uint8[]> outbuf(new uint8[out_size + kAlignment]);
uint8 *out = ALIGNP(outbuf.get(), kAlignment);
T frame;
ASSERT_TRUE(LoadFrameNoRepeat(&frame1));
@@ -734,7 +734,7 @@
void ConstructARGB1555() {
T frame1, frame2;
size_t out_size = kWidth * kHeight * 2;
- talk_base::scoped_array<uint8> outbuf(new uint8[out_size + kAlignment]);
+ talk_base::scoped_ptr<uint8[]> outbuf(new uint8[out_size + kAlignment]);
uint8 *out = ALIGNP(outbuf.get(), kAlignment);
T frame;
ASSERT_TRUE(LoadFrameNoRepeat(&frame1));
@@ -750,7 +750,7 @@
void ConstructARGB4444() {
T frame1, frame2;
size_t out_size = kWidth * kHeight * 2;
- talk_base::scoped_array<uint8> outbuf(new uint8[out_size + kAlignment]);
+ talk_base::scoped_ptr<uint8[]> outbuf(new uint8[out_size + kAlignment]);
uint8 *out = ALIGNP(outbuf.get(), kAlignment);
T frame;
ASSERT_TRUE(LoadFrameNoRepeat(&frame1));
@@ -769,7 +769,7 @@
#define TEST_BYR(NAME, BAYER) \
void NAME() { \
size_t bayer_size = kWidth * kHeight; \
- talk_base::scoped_array<uint8> bayerbuf(new uint8[ \
+ talk_base::scoped_ptr<uint8[]> bayerbuf(new uint8[ \
bayer_size + kAlignment]); \
uint8 *bayer = ALIGNP(bayerbuf.get(), kAlignment); \
T frame1, frame2; \
@@ -994,7 +994,7 @@
}
// Convert back to ARGB.
size_t out_size = 4;
- talk_base::scoped_array<uint8> outbuf(new uint8[out_size + kAlignment]);
+ talk_base::scoped_ptr<uint8[]> outbuf(new uint8[out_size + kAlignment]);
uint8 *out = ALIGNP(outbuf.get(), kAlignment);
EXPECT_EQ(out_size, frame.ConvertToRgbBuffer(cricket::FOURCC_ARGB,
@@ -1031,7 +1031,7 @@
}
// Convert back to ARGB
size_t out_size = 10 * 4;
- talk_base::scoped_array<uint8> outbuf(new uint8[out_size + kAlignment]);
+ talk_base::scoped_ptr<uint8[]> outbuf(new uint8[out_size + kAlignment]);
uint8 *out = ALIGNP(outbuf.get(), kAlignment);
EXPECT_EQ(out_size, frame.ConvertToRgbBuffer(cricket::FOURCC_ARGB,
@@ -1162,7 +1162,7 @@
// Allocate a buffer with end page aligned.
const int kPadToHeapSized = 16 * 1024 * 1024;
- talk_base::scoped_array<uint8> page_buffer(
+ talk_base::scoped_ptr<uint8[]> page_buffer(
new uint8[((data_size + kPadToHeapSized + 4095) & ~4095)]);
uint8* data_ptr = page_buffer.get();
if (!data_ptr) {
@@ -1427,7 +1427,7 @@
int astride = kWidth * bpp + rowpad;
size_t out_size = astride * kHeight;
- talk_base::scoped_array<uint8> outbuf(new uint8[out_size + kAlignment + 1]);
+ talk_base::scoped_ptr<uint8[]> outbuf(new uint8[out_size + kAlignment + 1]);
memset(outbuf.get(), 0, out_size + kAlignment + 1);
uint8 *outtop = ALIGNP(outbuf.get(), kAlignment);
uint8 *out = outtop;
@@ -1841,7 +1841,7 @@
void ConvertToI422Buffer() {
T frame1, frame2;
size_t out_size = kWidth * kHeight * 2;
- talk_base::scoped_array<uint8> buf(new uint8[out_size + kAlignment]);
+ talk_base::scoped_ptr<uint8[]> buf(new uint8[out_size + kAlignment]);
uint8* y = ALIGNP(buf.get(), kAlignment);
uint8* u = y + kWidth * kHeight;
uint8* v = u + (kWidth / 2) * kHeight;
@@ -1865,7 +1865,7 @@
#define TEST_TOBYR(NAME, BAYER) \
void NAME() { \
size_t bayer_size = kWidth * kHeight; \
- talk_base::scoped_array<uint8> bayerbuf(new uint8[ \
+ talk_base::scoped_ptr<uint8[]> bayerbuf(new uint8[ \
bayer_size + kAlignment]); \
uint8 *bayer = ALIGNP(bayerbuf.get(), kAlignment); \
T frame; \
@@ -1894,7 +1894,7 @@
} \
void NAME##Unaligned() { \
size_t bayer_size = kWidth * kHeight; \
- talk_base::scoped_array<uint8> bayerbuf(new uint8[ \
+ talk_base::scoped_ptr<uint8[]> bayerbuf(new uint8[ \
bayer_size + 1 + kAlignment]); \
uint8 *bayer = ALIGNP(bayerbuf.get(), kAlignment) + 1; \
T frame; \
@@ -1931,7 +1931,7 @@
#define TEST_BYRTORGB(NAME, BAYER) \
void NAME() { \
size_t bayer_size = kWidth * kHeight; \
- talk_base::scoped_array<uint8> bayerbuf(new uint8[ \
+ talk_base::scoped_ptr<uint8[]> bayerbuf(new uint8[ \
bayer_size + kAlignment]); \
uint8 *bayer1 = ALIGNP(bayerbuf.get(), kAlignment); \
for (int i = 0; i < kWidth * kHeight; ++i) { \
@@ -1947,7 +1947,7 @@
kWidth * 4, \
kWidth, kHeight); \
} \
- talk_base::scoped_array<uint8> bayer2buf(new uint8[ \
+ talk_base::scoped_ptr<uint8[]> bayer2buf(new uint8[ \
bayer_size + kAlignment]); \
uint8 *bayer2 = ALIGNP(bayer2buf.get(), kAlignment); \
libyuv::ARGBToBayer##BAYER(reinterpret_cast<uint8*>(ms->GetBuffer()), \
@@ -2010,7 +2010,7 @@
ASSERT_TRUE(LoadFrame(ms.get(), cricket::FOURCC_I420, kWidth, kHeight,
&frame));
size_t out_size = kWidth * kHeight * 3 / 2;
- talk_base::scoped_array<uint8> out(new uint8[out_size]);
+ talk_base::scoped_ptr<uint8[]> out(new uint8[out_size]);
for (int i = 0; i < repeat_; ++i) {
EXPECT_EQ(out_size, frame.CopyToBuffer(out.get(), out_size));
}
@@ -2056,7 +2056,7 @@
void CopyToBuffer1Pixel() {
size_t out_size = 3;
- talk_base::scoped_array<uint8> out(new uint8[out_size + 1]);
+ talk_base::scoped_ptr<uint8[]> out(new uint8[out_size + 1]);
memset(out.get(), 0xfb, out_size + 1); // Fill buffer
uint8 pixel[3] = { 1, 2, 3 };
T frame;
diff --git a/talk/media/devices/carbonvideorenderer.h b/talk/media/devices/carbonvideorenderer.h
index e091186..6c52fcf 100644
--- a/talk/media/devices/carbonvideorenderer.h
+++ b/talk/media/devices/carbonvideorenderer.h
@@ -57,7 +57,7 @@
static OSStatus DrawEventHandler(EventHandlerCallRef handler,
EventRef event,
void* data);
- talk_base::scoped_array<uint8> image_;
+ talk_base::scoped_ptr<uint8[]> image_;
talk_base::CriticalSection image_crit_;
int image_width_;
int image_height_;
diff --git a/talk/media/devices/gdivideorenderer.cc b/talk/media/devices/gdivideorenderer.cc
index c8024b7..9633eb6 100755
--- a/talk/media/devices/gdivideorenderer.cc
+++ b/talk/media/devices/gdivideorenderer.cc
@@ -98,7 +98,7 @@
void OnRenderFrame(const VideoFrame* frame);
BITMAPINFO bmi_;
- talk_base::scoped_array<uint8> image_;
+ talk_base::scoped_ptr<uint8[]> image_;
talk_base::scoped_ptr<WindowThread> window_thread_;
// The initial position of the window.
int initial_x_;
diff --git a/talk/media/devices/gtkvideorenderer.h b/talk/media/devices/gtkvideorenderer.h
index 6276b51..744c19f 100755
--- a/talk/media/devices/gtkvideorenderer.h
+++ b/talk/media/devices/gtkvideorenderer.h
@@ -56,7 +56,7 @@
// Check if the window has been closed.
bool IsClosed() const;
- talk_base::scoped_array<uint8> image_;
+ talk_base::scoped_ptr<uint8[]> image_;
GtkWidget* window_;
GtkWidget* draw_area_;
// The initial position of the window.
diff --git a/talk/media/devices/macdevicemanager.cc b/talk/media/devices/macdevicemanager.cc
index 10d85a0..e92408e 100644
--- a/talk/media/devices/macdevicemanager.cc
+++ b/talk/media/devices/macdevicemanager.cc
@@ -120,7 +120,7 @@
}
size_t num_devices = propsize / sizeof(AudioDeviceID);
- talk_base::scoped_array<AudioDeviceID> device_ids(
+ talk_base::scoped_ptr<AudioDeviceID[]> device_ids(
new AudioDeviceID[num_devices]);
err = AudioHardwareGetProperty(kAudioHardwarePropertyDevices,
diff --git a/talk/media/sctp/sctpdataengine.h b/talk/media/sctp/sctpdataengine.h
index cadf78c..d09b152 100644
--- a/talk/media/sctp/sctpdataengine.h
+++ b/talk/media/sctp/sctpdataengine.h
@@ -56,7 +56,7 @@
namespace cricket {
// The highest stream ID (Sid) that SCTP allows, and the number of streams we
// tell SCTP we're going to use.
-const uint32 kMaxSctpSid = USHRT_MAX;
+const uint32 kMaxSctpSid = 1023;
// A DataEngine that interacts with usrsctp.
//
diff --git a/talk/media/sctp/sctputils.cc b/talk/media/sctp/sctputils.cc
index c33c64e..4073905 100644
--- a/talk/media/sctp/sctputils.cc
+++ b/talk/media/sctp/sctputils.cc
@@ -35,7 +35,7 @@
namespace cricket {
// Format defined at
-// http://tools.ietf.org/html/draft-jesup-rtcweb-data-protocol-04
+// http://tools.ietf.org/html/draft-ietf-rtcweb-data-protocol-00#section-6.1
static const uint8 DATA_CHANNEL_OPEN_MESSAGE_TYPE = 0x03;
@@ -73,16 +73,17 @@
LOG(LS_WARNING) << "Could not read OPEN message channel type.";
return false;
}
- uint16 reliability_param;
- if (!buffer.ReadUInt16(&reliability_param)) {
- LOG(LS_WARNING) << "Could not read OPEN message reliabilility param.";
- return false;
- }
+
uint16 priority;
if (!buffer.ReadUInt16(&priority)) {
LOG(LS_WARNING) << "Could not read OPEN message reliabilility prioirty.";
return false;
}
+ uint32 reliability_param;
+ if (!buffer.ReadUInt32(&reliability_param)) {
+ LOG(LS_WARNING) << "Could not read OPEN message reliabilility param.";
+ return false;
+ }
uint16 label_length;
if (!buffer.ReadUInt16(&label_length)) {
LOG(LS_WARNING) << "Could not read OPEN message label length.";
@@ -116,10 +117,11 @@
case DCOMCT_ORDERED_PARTIAL_RTXS:
case DCOMCT_UNORDERED_PARTIAL_RTXS:
config->maxRetransmits = reliability_param;
-
+ break;
case DCOMCT_ORDERED_PARTIAL_TIME:
case DCOMCT_UNORDERED_PARTIAL_TIME:
config->maxRetransmitTime = reliability_param;
+ break;
}
return true;
@@ -130,11 +132,9 @@
const webrtc::DataChannelInit& config,
talk_base::Buffer* payload) {
// Format defined at
- // http://tools.ietf.org/html/draft-jesup-rtcweb-data-protocol-04
- // TODO(pthatcher)
-
+ // http://tools.ietf.org/html/draft-ietf-rtcweb-data-protocol-00#section-6.1
uint8 channel_type = 0;
- uint16 reliability_param = 0;
+ uint32 reliability_param = 0;
uint16 priority = 0;
if (config.ordered) {
if (config.maxRetransmits > -1) {
@@ -163,8 +163,8 @@
talk_base::ByteBuffer::ORDER_NETWORK);
buffer.WriteUInt8(DATA_CHANNEL_OPEN_MESSAGE_TYPE);
buffer.WriteUInt8(channel_type);
- buffer.WriteUInt16(reliability_param);
buffer.WriteUInt16(priority);
+ buffer.WriteUInt32(reliability_param);
buffer.WriteUInt16(static_cast<uint16>(label.length()));
buffer.WriteUInt16(static_cast<uint16>(config.protocol.length()));
buffer.WriteString(label);
diff --git a/talk/media/sctp/sctputils_unittest.cc b/talk/media/sctp/sctputils_unittest.cc
index 1cc9a70..70f67b8 100644
--- a/talk/media/sctp/sctputils_unittest.cc
+++ b/talk/media/sctp/sctputils_unittest.cc
@@ -37,7 +37,7 @@
const webrtc::DataChannelInit& config) {
uint8 message_type;
uint8 channel_type;
- uint16 reliability;
+ uint32 reliability;
uint16 priority;
uint16 label_length;
uint16 protocol_length;
@@ -57,15 +57,15 @@
channel_type);
}
- ASSERT_TRUE(buffer.ReadUInt16(&reliability));
+ ASSERT_TRUE(buffer.ReadUInt16(&priority));
+
+ ASSERT_TRUE(buffer.ReadUInt32(&reliability));
if (config.maxRetransmits > -1 || config.maxRetransmitTime > -1) {
EXPECT_EQ(config.maxRetransmits > -1 ?
config.maxRetransmits : config.maxRetransmitTime,
- reliability);
+ static_cast<int>(reliability));
}
- ASSERT_TRUE(buffer.ReadUInt16(&priority));
-
ASSERT_TRUE(buffer.ReadUInt16(&label_length));
ASSERT_TRUE(buffer.ReadUInt16(&protocol_length));
EXPECT_EQ(label.size(), label_length);
@@ -86,13 +86,14 @@
config.protocol = "y";
talk_base::Buffer packet;
- ASSERT(cricket::WriteDataChannelOpenMessage(input_label, config, &packet));
+ ASSERT_TRUE(
+ cricket::WriteDataChannelOpenMessage(input_label, config, &packet));
VerifyOpenMessageFormat(packet, input_label, config);
std::string output_label;
webrtc::DataChannelInit output_config;
- ASSERT(cricket::ParseDataChannelOpenMessage(
+ ASSERT_TRUE(cricket::ParseDataChannelOpenMessage(
packet, &output_label, &output_config));
EXPECT_EQ(input_label, output_label);
@@ -110,19 +111,21 @@
config.protocol = "y";
talk_base::Buffer packet;
- ASSERT(cricket::WriteDataChannelOpenMessage(input_label, config, &packet));
+ ASSERT_TRUE(
+ cricket::WriteDataChannelOpenMessage(input_label, config, &packet));
VerifyOpenMessageFormat(packet, input_label, config);
std::string output_label;
webrtc::DataChannelInit output_config;
- ASSERT(cricket::ParseDataChannelOpenMessage(
+ ASSERT_TRUE(cricket::ParseDataChannelOpenMessage(
packet, &output_label, &output_config));
EXPECT_EQ(input_label, output_label);
EXPECT_EQ(config.protocol, output_config.protocol);
EXPECT_EQ(config.ordered, output_config.ordered);
EXPECT_EQ(config.maxRetransmitTime, output_config.maxRetransmitTime);
+ EXPECT_EQ(-1, output_config.maxRetransmits);
}
TEST_F(SctpUtilsTest, WriteParseOpenMessageWithMaxRetransmits) {
@@ -132,17 +135,19 @@
config.protocol = "y";
talk_base::Buffer packet;
- ASSERT(cricket::WriteDataChannelOpenMessage(input_label, config, &packet));
+ ASSERT_TRUE(
+ cricket::WriteDataChannelOpenMessage(input_label, config, &packet));
VerifyOpenMessageFormat(packet, input_label, config);
std::string output_label;
webrtc::DataChannelInit output_config;
- ASSERT(cricket::ParseDataChannelOpenMessage(
+ ASSERT_TRUE(cricket::ParseDataChannelOpenMessage(
packet, &output_label, &output_config));
EXPECT_EQ(input_label, output_label);
EXPECT_EQ(config.protocol, output_config.protocol);
EXPECT_EQ(config.ordered, output_config.ordered);
EXPECT_EQ(config.maxRetransmits, output_config.maxRetransmits);
+ EXPECT_EQ(-1, output_config.maxRetransmitTime);
}
diff --git a/talk/media/webrtc/fakewebrtcvideoengine.h b/talk/media/webrtc/fakewebrtcvideoengine.h
index 31de172..0b68728 100644
--- a/talk/media/webrtc/fakewebrtcvideoengine.h
+++ b/talk/media/webrtc/fakewebrtcvideoengine.h
@@ -1046,7 +1046,14 @@
return 0;
}
WEBRTC_STUB(EnableColorEnhancement, (const int, const bool));
-
+#ifdef USE_WEBRTC_DEV_BRANCH
+ WEBRTC_VOID_STUB(RegisterPreEncodeCallback,
+ (int, webrtc::I420FrameCallback*));
+ WEBRTC_VOID_STUB(DeRegisterPreEncodeCallback, (int));
+ WEBRTC_VOID_STUB(RegisterPreRenderCallback,
+ (int, webrtc::I420FrameCallback*));
+ WEBRTC_VOID_STUB(DeRegisterPreRenderCallback, (int));
+#endif
// webrtc::ViEExternalCodec
WEBRTC_FUNC(RegisterExternalSendCodec,
(const int channel, const unsigned char pl_type, webrtc::VideoEncoder*,
diff --git a/talk/media/webrtc/fakewebrtcvoiceengine.h b/talk/media/webrtc/fakewebrtcvoiceengine.h
index c3cd786..9696518 100644
--- a/talk/media/webrtc/fakewebrtcvoiceengine.h
+++ b/talk/media/webrtc/fakewebrtcvoiceengine.h
@@ -87,6 +87,8 @@
fec(false),
nack(false),
media_processor_registered(false),
+ rx_agc_enabled(false),
+ rx_agc_mode(webrtc::kAgcDefault),
cn8_type(13),
cn16_type(105),
dtmf_type(106),
@@ -95,6 +97,7 @@
send_ssrc(0),
level_header_ext_(-1) {
memset(&send_codec, 0, sizeof(send_codec));
+ memset(&rx_agc_config, 0, sizeof(rx_agc_config));
}
bool external_transport;
bool send;
@@ -107,6 +110,9 @@
bool fec;
bool nack;
bool media_processor_registered;
+ bool rx_agc_enabled;
+ webrtc::AgcModes rx_agc_mode;
+ webrtc::AgcConfig rx_agc_config;
int cn8_type;
int cn16_type;
int dtmf_type;
@@ -144,6 +150,8 @@
send_fail_channel_(-1),
fail_start_recording_microphone_(false),
recording_microphone_(false),
+ recording_sample_rate_(-1),
+ playout_sample_rate_(-1),
media_processor_(NULL) {
memset(&agc_config_, 0, sizeof(agc_config_));
}
@@ -584,10 +592,22 @@
WEBRTC_STUB(AudioDeviceControl, (unsigned int, unsigned int, unsigned int));
WEBRTC_STUB(SetLoudspeakerStatus, (bool enable));
WEBRTC_STUB(GetLoudspeakerStatus, (bool& enabled));
- WEBRTC_STUB(SetRecordingSampleRate, (unsigned int samples_per_sec));
- WEBRTC_STUB_CONST(RecordingSampleRate, (unsigned int* samples_per_sec));
- WEBRTC_STUB(SetPlayoutSampleRate, (unsigned int samples_per_sec));
- WEBRTC_STUB_CONST(PlayoutSampleRate, (unsigned int* samples_per_sec));
+ WEBRTC_FUNC(SetRecordingSampleRate, (unsigned int samples_per_sec)) {
+ recording_sample_rate_ = samples_per_sec;
+ return 0;
+ }
+ WEBRTC_FUNC_CONST(RecordingSampleRate, (unsigned int* samples_per_sec)) {
+ *samples_per_sec = recording_sample_rate_;
+ return 0;
+ }
+ WEBRTC_FUNC(SetPlayoutSampleRate, (unsigned int samples_per_sec)) {
+ playout_sample_rate_ = samples_per_sec;
+ return 0;
+ }
+ WEBRTC_FUNC_CONST(PlayoutSampleRate, (unsigned int* samples_per_sec)) {
+ *samples_per_sec = playout_sample_rate_;
+ return 0;
+ }
WEBRTC_STUB(EnableBuiltInAEC, (bool enable));
virtual bool BuiltInAECIsEnabled() const { return true; }
@@ -841,12 +861,27 @@
WEBRTC_STUB(SetRxNsStatus, (int channel, bool enable, webrtc::NsModes mode));
WEBRTC_STUB(GetRxNsStatus, (int channel, bool& enabled,
webrtc::NsModes& mode));
- WEBRTC_STUB(SetRxAgcStatus, (int channel, bool enable,
- webrtc::AgcModes mode));
- WEBRTC_STUB(GetRxAgcStatus, (int channel, bool& enabled,
- webrtc::AgcModes& mode));
- WEBRTC_STUB(SetRxAgcConfig, (int channel, webrtc::AgcConfig config));
- WEBRTC_STUB(GetRxAgcConfig, (int channel, webrtc::AgcConfig& config));
+ WEBRTC_FUNC(SetRxAgcStatus, (int channel, bool enable,
+ webrtc::AgcModes mode)) {
+ channels_[channel]->rx_agc_enabled = enable;
+ channels_[channel]->rx_agc_mode = mode;
+ return 0;
+ }
+ WEBRTC_FUNC(GetRxAgcStatus, (int channel, bool& enabled,
+ webrtc::AgcModes& mode)) {
+ enabled = channels_[channel]->rx_agc_enabled;
+ mode = channels_[channel]->rx_agc_mode;
+ return 0;
+ }
+
+ WEBRTC_FUNC(SetRxAgcConfig, (int channel, webrtc::AgcConfig config)) {
+ channels_[channel]->rx_agc_config = config;
+ return 0;
+ }
+ WEBRTC_FUNC(GetRxAgcConfig, (int channel, webrtc::AgcConfig& config)) {
+ config = channels_[channel]->rx_agc_config;
+ return 0;
+ }
WEBRTC_STUB(RegisterRxVadObserver, (int, webrtc::VoERxVadCallback&));
WEBRTC_STUB(DeRegisterRxVadObserver, (int channel));
@@ -996,6 +1031,8 @@
int send_fail_channel_;
bool fail_start_recording_microphone_;
bool recording_microphone_;
+ int recording_sample_rate_;
+ int playout_sample_rate_;
DtmfInfo dtmf_info_;
webrtc::VoEMediaProcess* media_processor_;
};
diff --git a/talk/media/webrtc/webrtcvideoengine.cc b/talk/media/webrtc/webrtcvideoengine.cc
index 3f4667d..05f8b2b 100644
--- a/talk/media/webrtc/webrtcvideoengine.cc
+++ b/talk/media/webrtc/webrtcvideoengine.cc
@@ -309,6 +309,13 @@
: video_channel_(video_channel),
framerate_(0),
bitrate_(0),
+ decode_ms_(0),
+ max_decode_ms_(0),
+ current_delay_ms_(0),
+ target_delay_ms_(0),
+ jitter_buffer_ms_(0),
+ min_playout_delay_ms_(0),
+ render_delay_ms_(0),
firs_requested_(0) {
}
@@ -323,23 +330,42 @@
framerate_ = framerate;
bitrate_ = bitrate;
}
+
+ virtual void DecoderTiming(int decode_ms,
+ int max_decode_ms,
+ int current_delay_ms,
+ int target_delay_ms,
+ int jitter_buffer_ms,
+ int min_playout_delay_ms,
+ int render_delay_ms) {
+ talk_base::CritScope cs(&crit_);
+ decode_ms_ = decode_ms;
+ max_decode_ms_ = max_decode_ms;
+ current_delay_ms_ = current_delay_ms;
+ target_delay_ms_ = target_delay_ms;
+ jitter_buffer_ms_ = jitter_buffer_ms;
+ min_playout_delay_ms_ = min_playout_delay_ms;
+ render_delay_ms_ = render_delay_ms;
+ }
+
virtual void RequestNewKeyFrame(const int videoChannel) {
talk_base::CritScope cs(&crit_);
ASSERT(video_channel_ == videoChannel);
++firs_requested_;
}
- int framerate() const {
+ // Populate |rinfo| based on previously-set data in |*this|.
+ void ExportTo(VideoReceiverInfo* rinfo) {
talk_base::CritScope cs(&crit_);
- return framerate_;
- }
- int bitrate() const {
- talk_base::CritScope cs(&crit_);
- return bitrate_;
- }
- int firs_requested() const {
- talk_base::CritScope cs(&crit_);
- return firs_requested_;
+ rinfo->firs_sent = firs_requested_;
+ rinfo->framerate_rcvd = framerate_;
+ rinfo->decode_ms = decode_ms_;
+ rinfo->max_decode_ms = max_decode_ms_;
+ rinfo->current_delay_ms = current_delay_ms_;
+ rinfo->target_delay_ms = target_delay_ms_;
+ rinfo->jitter_buffer_ms = jitter_buffer_ms_;
+ rinfo->min_playout_delay_ms = min_playout_delay_ms_;
+ rinfo->render_delay_ms = render_delay_ms_;
}
private:
@@ -347,6 +373,13 @@
int video_channel_;
int framerate_;
int bitrate_;
+ int decode_ms_;
+ int max_decode_ms_;
+ int current_delay_ms_;
+ int target_delay_ms_;
+ int jitter_buffer_ms_;
+ int min_playout_delay_ms_;
+ int render_delay_ms_;
int firs_requested_;
};
@@ -2303,14 +2336,13 @@
rinfo.packets_lost = -1;
rinfo.packets_concealed = -1;
rinfo.fraction_lost = -1; // from SentRTCP
- rinfo.firs_sent = channel->decoder_observer()->firs_requested();
rinfo.nacks_sent = -1;
rinfo.frame_width = channel->render_adapter()->width();
rinfo.frame_height = channel->render_adapter()->height();
- rinfo.framerate_rcvd = channel->decoder_observer()->framerate();
int fps = channel->render_adapter()->framerate();
rinfo.framerate_decoded = fps;
rinfo.framerate_output = fps;
+ channel->decoder_observer()->ExportTo(&rinfo);
// Get sent RTCP statistics.
uint16 s_fraction_lost;
diff --git a/talk/media/webrtc/webrtcvideoengine_unittest.cc b/talk/media/webrtc/webrtcvideoengine_unittest.cc
index 9537673..9fbbbe4 100644
--- a/talk/media/webrtc/webrtcvideoengine_unittest.cc
+++ b/talk/media/webrtc/webrtcvideoengine_unittest.cc
@@ -118,7 +118,7 @@
}
cricket::WebRtcVideoFrame frame;
size_t size = width * height * 3 / 2; // I420
- talk_base::scoped_array<uint8> pixel(new uint8[size]);
+ talk_base::scoped_ptr<uint8[]> pixel(new uint8[size]);
if (!frame.Init(cricket::FOURCC_I420,
width, height, width, height,
pixel.get(), size, 1, 1, 0, 0, 0)) {
@@ -138,7 +138,7 @@
}
cricket::WebRtcVideoFrame frame;
size_t size = width * height * 3 / 2; // I420
- talk_base::scoped_array<uint8> pixel(new uint8[size]);
+ talk_base::scoped_ptr<uint8[]> pixel(new uint8[size]);
if (!frame.Init(cricket::FOURCC_I420,
width, height, width, height,
pixel.get(), size, 1, 1, 0, timestamp, 0)) {
@@ -1161,7 +1161,8 @@
}
-TEST_F(WebRtcVideoEngineTestFake, SendReceiveBitratesStats) {
+// Disabled since its flaky: b/11288120
+TEST_F(WebRtcVideoEngineTestFake, DISABLED_SendReceiveBitratesStats) {
EXPECT_TRUE(SetupEngine());
cricket::VideoOptions options;
options.conference_mode.Set(true);
diff --git a/talk/media/webrtc/webrtcvideoframe.h b/talk/media/webrtc/webrtcvideoframe.h
index 18475a6..e023234 100644
--- a/talk/media/webrtc/webrtcvideoframe.h
+++ b/talk/media/webrtc/webrtcvideoframe.h
@@ -55,7 +55,7 @@
const webrtc::VideoFrame* frame() const;
private:
- talk_base::scoped_array<char> data_;
+ talk_base::scoped_ptr<char[]> data_;
size_t length_;
webrtc::VideoFrame video_frame_;
};
diff --git a/talk/media/webrtc/webrtcvideoframe_unittest.cc b/talk/media/webrtc/webrtcvideoframe_unittest.cc
index 2f0decb..ebc345e 100644
--- a/talk/media/webrtc/webrtcvideoframe_unittest.cc
+++ b/talk/media/webrtc/webrtcvideoframe_unittest.cc
@@ -53,7 +53,7 @@
captured_frame.height = frame_height;
captured_frame.data_size = (frame_width * frame_height) +
((frame_width + 1) / 2) * ((frame_height + 1) / 2) * 2;
- talk_base::scoped_array<uint8> captured_frame_buffer(
+ talk_base::scoped_ptr<uint8[]> captured_frame_buffer(
new uint8[captured_frame.data_size]);
captured_frame.data = captured_frame_buffer.get();
diff --git a/talk/media/webrtc/webrtcvie.h b/talk/media/webrtc/webrtcvie.h
index 9550962..50cc5d7 100644
--- a/talk/media/webrtc/webrtcvie.h
+++ b/talk/media/webrtc/webrtcvie.h
@@ -45,6 +45,7 @@
#include "webrtc/video_engine/include/vie_network.h"
#include "webrtc/video_engine/include/vie_render.h"
#include "webrtc/video_engine/include/vie_rtp_rtcp.h"
+#include "webrtc/video_engine/new_include/frame_callback.h"
namespace cricket {
diff --git a/talk/media/webrtc/webrtcvoiceengine.cc b/talk/media/webrtc/webrtcvoiceengine.cc
index 83cbdaf..121dd46 100644
--- a/talk/media/webrtc/webrtcvoiceengine.cc
+++ b/talk/media/webrtc/webrtcvoiceengine.cc
@@ -528,7 +528,7 @@
// Save the default AGC configuration settings. This must happen before
// calling SetOptions or the default will be overwritten.
if (voe_wrapper_->processing()->GetAgcConfig(default_agc_config_) == -1) {
- LOG_RTCERR0(GetAGCConfig);
+ LOG_RTCERR0(GetAgcConfig);
return false;
}
@@ -686,6 +686,10 @@
webrtc::AgcModes agc_mode = webrtc::kAgcAdaptiveAnalog;
webrtc::NsModes ns_mode = webrtc::kNsHighSuppression;
bool aecm_comfort_noise = false;
+ if (options.aecm_generate_comfort_noise.Get(&aecm_comfort_noise)) {
+ LOG(LS_VERBOSE) << "Comfort noise explicitly set to "
+ << aecm_comfort_noise << " (default is false).";
+ }
#if defined(IOS)
// On iOS, VPIO provides built-in EC and AGC.
@@ -713,6 +717,9 @@
if (voep->SetEcStatus(echo_cancellation, ec_mode) == -1) {
LOG_RTCERR2(SetEcStatus, echo_cancellation, ec_mode);
return false;
+ } else {
+ LOG(LS_VERBOSE) << "Echo control set to " << echo_cancellation
+ << " with mode " << ec_mode;
}
#if !defined(ANDROID)
// TODO(ajm): Remove the error return on Android from webrtc.
@@ -734,6 +741,38 @@
if (voep->SetAgcStatus(auto_gain_control, agc_mode) == -1) {
LOG_RTCERR2(SetAgcStatus, auto_gain_control, agc_mode);
return false;
+ } else {
+ LOG(LS_VERBOSE) << "Auto gain set to " << auto_gain_control
+ << " with mode " << agc_mode;
+ }
+ }
+
+ if (options.tx_agc_target_dbov.IsSet() ||
+ options.tx_agc_digital_compression_gain.IsSet() ||
+ options.tx_agc_limiter.IsSet()) {
+ // Override default_agc_config_. Generally, an unset option means "leave
+ // the VoE bits alone" in this function, so we want whatever is set to be
+ // stored as the new "default". If we didn't, then setting e.g.
+ // tx_agc_target_dbov would reset digital compression gain and limiter
+ // settings.
+ // Also, if we don't update default_agc_config_, then adjust_agc_delta
+ // would be an offset from the original values, and not whatever was set
+ // explicitly.
+ default_agc_config_.targetLeveldBOv =
+ options.tx_agc_target_dbov.GetWithDefaultIfUnset(
+ default_agc_config_.targetLeveldBOv);
+ default_agc_config_.digitalCompressionGaindB =
+ options.tx_agc_digital_compression_gain.GetWithDefaultIfUnset(
+ default_agc_config_.digitalCompressionGaindB);
+ default_agc_config_.limiterEnable =
+ options.tx_agc_limiter.GetWithDefaultIfUnset(
+ default_agc_config_.limiterEnable);
+ if (voe_wrapper_->processing()->SetAgcConfig(default_agc_config_) == -1) {
+ LOG_RTCERR3(SetAgcConfig,
+ default_agc_config_.targetLeveldBOv,
+ default_agc_config_.digitalCompressionGaindB,
+ default_agc_config_.limiterEnable);
+ return false;
}
}
@@ -742,6 +781,9 @@
if (voep->SetNsStatus(noise_suppression, ns_mode) == -1) {
LOG_RTCERR2(SetNsStatus, noise_suppression, ns_mode);
return false;
+ } else {
+ LOG(LS_VERBOSE) << "Noise suppression set to " << noise_suppression
+ << " with mode " << ns_mode;
}
}
@@ -799,6 +841,20 @@
}
}
+ uint32 recording_sample_rate;
+ if (options.recording_sample_rate.Get(&recording_sample_rate)) {
+ if (voe_wrapper_->hw()->SetRecordingSampleRate(recording_sample_rate)) {
+ LOG_RTCERR1(SetRecordingSampleRate, recording_sample_rate);
+ }
+ }
+
+ uint32 playout_sample_rate;
+ if (options.playout_sample_rate.Get(&playout_sample_rate)) {
+ if (voe_wrapper_->hw()->SetPlayoutSampleRate(playout_sample_rate)) {
+ LOG_RTCERR1(SetPlayoutSampleRate, playout_sample_rate);
+ }
+ }
+
return true;
}
@@ -1136,6 +1192,18 @@
}
}
+ // Allow trace options to override the trace filter. We default
+ // it to log_filter_ (as a translation of libjingle log levels)
+ // elsewhere, but this allows clients to explicitly set webrtc
+ // log levels.
+ std::vector<std::string>::iterator tracefilter =
+ std::find(opts.begin(), opts.end(), "tracefilter");
+ if (tracefilter != opts.end() && ++tracefilter != opts.end()) {
+ if (!tracing_->SetTraceFilter(talk_base::FromString<int>(*tracefilter))) {
+ LOG_RTCERR1(SetTraceFilter, *tracefilter);
+ }
+ }
+
// Set AEC dump file
std::vector<std::string>::iterator recordEC =
std::find(opts.begin(), opts.end(), "recordEC");
@@ -1587,6 +1655,56 @@
// Will be interpreted when appropriate.
}
+ // Receiver-side auto gain control happens per channel, so set it here from
+ // options. Note that, like conference mode, setting it on the engine won't
+ // have the desired effect, since voice channels don't inherit options from
+ // the media engine when those options are applied per-channel.
+ bool rx_auto_gain_control;
+ if (options.rx_auto_gain_control.Get(&rx_auto_gain_control)) {
+ if (engine()->voe()->processing()->SetRxAgcStatus(
+ voe_channel(), rx_auto_gain_control,
+ webrtc::kAgcFixedDigital) == -1) {
+ LOG_RTCERR1(SetRxAgcStatus, rx_auto_gain_control);
+ return false;
+ } else {
+ LOG(LS_VERBOSE) << "Rx auto gain set to " << rx_auto_gain_control
+ << " with mode " << webrtc::kAgcFixedDigital;
+ }
+ }
+ if (options.rx_agc_target_dbov.IsSet() ||
+ options.rx_agc_digital_compression_gain.IsSet() ||
+ options.rx_agc_limiter.IsSet()) {
+ webrtc::AgcConfig config;
+ // If only some of the options are being overridden, get the current
+ // settings for the channel and bail if they aren't available.
+ if (!options.rx_agc_target_dbov.IsSet() ||
+ !options.rx_agc_digital_compression_gain.IsSet() ||
+ !options.rx_agc_limiter.IsSet()) {
+ if (engine()->voe()->processing()->GetRxAgcConfig(
+ voe_channel(), config) != 0) {
+ LOG(LS_ERROR) << "Failed to get default rx agc configuration for "
+ << "channel " << voe_channel() << ". Since not all rx "
+ << "agc options are specified, unable to safely set rx "
+ << "agc options.";
+ return false;
+ }
+ }
+ config.targetLeveldBOv =
+ options.rx_agc_target_dbov.GetWithDefaultIfUnset(
+ config.targetLeveldBOv);
+ config.digitalCompressionGaindB =
+ options.rx_agc_digital_compression_gain.GetWithDefaultIfUnset(
+ config.digitalCompressionGaindB);
+ config.limiterEnable = options.rx_agc_limiter.GetWithDefaultIfUnset(
+ config.limiterEnable);
+ if (engine()->voe()->processing()->SetRxAgcConfig(
+ voe_channel(), config) == -1) {
+ LOG_RTCERR4(SetRxAgcConfig, voe_channel(), config.targetLeveldBOv,
+ config.digitalCompressionGaindB, config.limiterEnable);
+ return false;
+ }
+ }
+
LOG(LS_INFO) << "Set voice channel options. Current options: "
<< options_.ToString();
return true;
diff --git a/talk/media/webrtc/webrtcvoiceengine_unittest.cc b/talk/media/webrtc/webrtcvoiceengine_unittest.cc
index acefc38..2e52c8f 100644
--- a/talk/media/webrtc/webrtcvoiceengine_unittest.cc
+++ b/talk/media/webrtc/webrtcvoiceengine_unittest.cc
@@ -55,9 +55,10 @@
}
};
-class NullVoETraceWrapper : public cricket::VoETraceWrapper {
+class FakeVoETraceWrapper : public cricket::VoETraceWrapper {
public:
virtual int SetTraceFilter(const unsigned int filter) {
+ filter_ = filter;
return 0;
}
virtual int SetTraceFile(const char* fileNameUTF8) {
@@ -66,6 +67,7 @@
virtual int SetTraceCallback(webrtc::TraceCallback* callback) {
return 0;
}
+ unsigned int filter_;
};
class WebRtcVoiceEngineTestFake : public testing::Test {
@@ -102,9 +104,10 @@
WebRtcVoiceEngineTestFake()
: voe_(kAudioCodecs, ARRAY_SIZE(kAudioCodecs)),
voe_sc_(kAudioCodecs, ARRAY_SIZE(kAudioCodecs)),
+ trace_wrapper_(new FakeVoETraceWrapper()),
engine_(new FakeVoEWrapper(&voe_),
new FakeVoEWrapper(&voe_sc_),
- new NullVoETraceWrapper()),
+ trace_wrapper_),
channel_(NULL), soundclip_(NULL) {
options_conference_.conference_mode.Set(true);
options_adjust_agc_.adjust_agc_delta.Set(-10);
@@ -277,6 +280,7 @@
protected:
cricket::FakeWebRtcVoiceEngine voe_;
cricket::FakeWebRtcVoiceEngine voe_sc_;
+ FakeVoETraceWrapper* trace_wrapper_;
cricket::WebRtcVoiceEngine engine_;
cricket::VoiceMediaChannel* channel_;
cricket::SoundclipMedia* soundclip_;
@@ -1873,6 +1877,84 @@
EXPECT_FALSE(voe_.GetPlayout(channel_num));
}
+TEST_F(WebRtcVoiceEngineTestFake, TxAgcConfigViaOptions) {
+ EXPECT_TRUE(SetupEngine());
+ webrtc::AgcConfig agc_config;
+ EXPECT_EQ(0, voe_.GetAgcConfig(agc_config));
+ EXPECT_EQ(0, agc_config.targetLeveldBOv);
+
+ cricket::AudioOptions options;
+ options.tx_agc_target_dbov.Set(3);
+ options.tx_agc_digital_compression_gain.Set(9);
+ options.tx_agc_limiter.Set(true);
+ options.auto_gain_control.Set(true);
+ EXPECT_TRUE(engine_.SetOptions(options));
+
+ EXPECT_EQ(0, voe_.GetAgcConfig(agc_config));
+ EXPECT_EQ(3, agc_config.targetLeveldBOv);
+ EXPECT_EQ(9, agc_config.digitalCompressionGaindB);
+ EXPECT_TRUE(agc_config.limiterEnable);
+
+ // Check interaction with adjust_agc_delta. Both should be respected, for
+ // backwards compatibility.
+ options.adjust_agc_delta.Set(-10);
+ EXPECT_TRUE(engine_.SetOptions(options));
+
+ EXPECT_EQ(0, voe_.GetAgcConfig(agc_config));
+ EXPECT_EQ(13, agc_config.targetLeveldBOv);
+}
+
+TEST_F(WebRtcVoiceEngineTestFake, RxAgcConfigViaOptions) {
+ EXPECT_TRUE(SetupEngine());
+ int channel_num = voe_.GetLastChannel();
+ cricket::AudioOptions options;
+ options.rx_agc_target_dbov.Set(6);
+ options.rx_agc_digital_compression_gain.Set(0);
+ options.rx_agc_limiter.Set(true);
+ options.rx_auto_gain_control.Set(true);
+ EXPECT_TRUE(channel_->SetOptions(options));
+
+ webrtc::AgcConfig agc_config;
+ EXPECT_EQ(0, engine_.voe()->processing()->GetRxAgcConfig(
+ channel_num, agc_config));
+ EXPECT_EQ(6, agc_config.targetLeveldBOv);
+ EXPECT_EQ(0, agc_config.digitalCompressionGaindB);
+ EXPECT_TRUE(agc_config.limiterEnable);
+}
+
+TEST_F(WebRtcVoiceEngineTestFake, SampleRatesViaOptions) {
+ EXPECT_TRUE(SetupEngine());
+ cricket::AudioOptions options;
+ options.recording_sample_rate.Set(48000u);
+ options.playout_sample_rate.Set(44100u);
+ EXPECT_TRUE(engine_.SetOptions(options));
+
+ unsigned int recording_sample_rate, playout_sample_rate;
+ EXPECT_EQ(0, voe_.RecordingSampleRate(&recording_sample_rate));
+ EXPECT_EQ(0, voe_.PlayoutSampleRate(&playout_sample_rate));
+ EXPECT_EQ(48000u, recording_sample_rate);
+ EXPECT_EQ(44100u, playout_sample_rate);
+}
+
+TEST_F(WebRtcVoiceEngineTestFake, TraceFilterViaTraceOptions) {
+ EXPECT_TRUE(SetupEngine());
+ engine_.SetLogging(talk_base::LS_INFO, "");
+ EXPECT_EQ(
+ // Info:
+ webrtc::kTraceStateInfo | webrtc::kTraceInfo |
+ // Warning:
+ webrtc::kTraceTerseInfo | webrtc::kTraceWarning |
+ // Error:
+ webrtc::kTraceError | webrtc::kTraceCritical,
+ static_cast<int>(trace_wrapper_->filter_));
+ // Now set it explicitly
+ std::string filter =
+ "tracefilter " + talk_base::ToString(webrtc::kTraceDefault);
+ engine_.SetLogging(talk_base::LS_VERBOSE, filter.c_str());
+ EXPECT_EQ(static_cast<unsigned int>(webrtc::kTraceDefault),
+ trace_wrapper_->filter_);
+}
+
// Test that we can set the outgoing SSRC properly.
// SSRC is set in SetupEngine by calling AddSendStream.
TEST_F(WebRtcVoiceEngineTestFake, SetSendSsrc) {