Add UMA histogram for native audio buffer size in ms
The Android native audio code asks the OS to provide an appropriate
buffer size for real-time audio playout. We should add logging for this
value so we can see what values are used in practice.
Bug: b/157429867
Change-Id: I111a74faefc0e77b5c98921804d6625cba1b84af
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/176126
Reviewed-by: Henrik Andreassson <henrika@webrtc.org>
Reviewed-by: Henrik Andreasson <henrika@chromium.org>
Commit-Queue: Ivo Creusen <ivoc@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#31368}
diff --git a/modules/audio_device/android/audio_track_jni.cc b/modules/audio_device/android/audio_track_jni.cc
index 776f0cf..6dd75dd 100644
--- a/modules/audio_device/android/audio_track_jni.cc
+++ b/modules/audio_device/android/audio_track_jni.cc
@@ -19,6 +19,7 @@
#include "rtc_base/logging.h"
#include "rtc_base/platform_thread.h"
#include "system_wrappers/include/field_trial.h"
+#include "system_wrappers/include/metrics.h"
namespace webrtc {
@@ -27,7 +28,7 @@
NativeRegistration* native_reg,
std::unique_ptr<GlobalRef> audio_track)
: audio_track_(std::move(audio_track)),
- init_playout_(native_reg->GetMethodId("initPlayout", "(IID)Z")),
+ init_playout_(native_reg->GetMethodId("initPlayout", "(IID)I")),
start_playout_(native_reg->GetMethodId("startPlayout", "()Z")),
stop_playout_(native_reg->GetMethodId("stopPlayout", "()Z")),
set_stream_volume_(native_reg->GetMethodId("setStreamVolume", "(I)Z")),
@@ -45,8 +46,18 @@
nullptr);
if (buffer_size_factor == 0)
buffer_size_factor = 1.0;
- return audio_track_->CallBooleanMethod(init_playout_, sample_rate, channels,
- buffer_size_factor);
+ int buffer_size_bytes = audio_track_->CallIntMethod(
+ init_playout_, sample_rate, channels, buffer_size_factor);
+ if (buffer_size_bytes != -1) {
+ // To avoid division by zero, we assume the sample rate is 48k if an invalid
+ // value is found.
+ sample_rate = sample_rate <= 0 ? 48000 : sample_rate;
+ const int buffer_size_ms = (buffer_size_bytes * 1000) / (2 * sample_rate);
+ RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AndroidNativeRequestedAudioBufferSizeMs",
+ buffer_size_ms, 0, 1000, 100);
+ return true;
+ }
+ return false;
}
bool AudioTrackJni::JavaAudioTrack::StartPlayout() {
diff --git a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java
index 3023c99..1973657 100644
--- a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java
+++ b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java
@@ -215,7 +215,7 @@
}
}
- private boolean initPlayout(int sampleRate, int channels, double bufferSizeFactor) {
+ private int initPlayout(int sampleRate, int channels, double bufferSizeFactor) {
threadChecker.checkIsOnValidThread();
Logging.d(TAG,
"initPlayout(sampleRate=" + sampleRate + ", channels=" + channels
@@ -244,14 +244,14 @@
// can happen that |minBufferSizeInBytes| contains an invalid value.
if (minBufferSizeInBytes < byteBuffer.capacity()) {
reportWebRtcAudioTrackInitError("AudioTrack.getMinBufferSize returns an invalid value.");
- return false;
+ return -1;
}
// Ensure that prevision audio session was stopped correctly before trying
// to create a new AudioTrack.
if (audioTrack != null) {
reportWebRtcAudioTrackInitError("Conflict with existing AudioTrack.");
- return false;
+ return -1;
}
try {
// Create an AudioTrack object and initialize its associated audio buffer.
@@ -273,7 +273,7 @@
} catch (IllegalArgumentException e) {
reportWebRtcAudioTrackInitError(e.getMessage());
releaseAudioResources();
- return false;
+ return -1;
}
// It can happen that an AudioTrack is created but it was not successfully
@@ -282,11 +282,11 @@
if (audioTrack == null || audioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
reportWebRtcAudioTrackInitError("Initialization of audio track failed.");
releaseAudioResources();
- return false;
+ return -1;
}
logMainParameters();
logMainParametersExtended();
- return true;
+ return minBufferSizeInBytes;
}
private boolean startPlayout() {
diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java
index edc9dd1..07debc3 100644
--- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java
+++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java
@@ -183,7 +183,7 @@
}
@CalledByNative
- private boolean initPlayout(int sampleRate, int channels, double bufferSizeFactor) {
+ private int initPlayout(int sampleRate, int channels, double bufferSizeFactor) {
threadChecker.checkIsOnValidThread();
Logging.d(TAG,
"initPlayout(sampleRate=" + sampleRate + ", channels=" + channels
@@ -212,14 +212,14 @@
// can happen that |minBufferSizeInBytes| contains an invalid value.
if (minBufferSizeInBytes < byteBuffer.capacity()) {
reportWebRtcAudioTrackInitError("AudioTrack.getMinBufferSize returns an invalid value.");
- return false;
+ return -1;
}
// Ensure that prevision audio session was stopped correctly before trying
// to create a new AudioTrack.
if (audioTrack != null) {
reportWebRtcAudioTrackInitError("Conflict with existing AudioTrack.");
- return false;
+ return -1;
}
try {
// Create an AudioTrack object and initialize its associated audio buffer.
@@ -241,7 +241,7 @@
} catch (IllegalArgumentException e) {
reportWebRtcAudioTrackInitError(e.getMessage());
releaseAudioResources();
- return false;
+ return -1;
}
// It can happen that an AudioTrack is created but it was not successfully
@@ -250,11 +250,11 @@
if (audioTrack == null || audioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
reportWebRtcAudioTrackInitError("Initialization of audio track failed.");
releaseAudioResources();
- return false;
+ return -1;
}
logMainParameters();
logMainParametersExtended();
- return true;
+ return minBufferSizeInBytes;
}
@CalledByNative