Allow AudioAttributes to be app/client configurable
WebRtcAudioTrack is hardcoded to configure AudioAttributes with
1. usage=USAGE_VOICE_COMMUNICATIOON
2. contentType=CONTENT_TYPE_SPEECH
This change allows AudioAttributes to be configured via the
JavaAudioDeviceModule.
Bug: webrtc:12153
Change-Id: I67c7f6e572c5a9f3a8fde674b6600d2adaf17895
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/191941
Commit-Queue: Gaurav Vaish <gvaish@chromium.org>
Reviewed-by: Henrik Andreassson <henrika@webrtc.org>
Reviewed-by: Paulina Hensman <phensman@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#32583}
diff --git a/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java b/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java
index 3cb9438..43fce4f 100644
--- a/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java
+++ b/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java
@@ -11,6 +11,7 @@
package org.webrtc.audio;
import android.content.Context;
+import android.media.AudioAttributes;
import android.media.AudioDeviceInfo;
import android.media.AudioManager;
import android.os.Build;
@@ -47,6 +48,7 @@
private boolean useHardwareNoiseSuppressor = isBuiltInNoiseSuppressorSupported();
private boolean useStereoInput;
private boolean useStereoOutput;
+ private AudioAttributes audioAttributes;
private Builder(Context context) {
this.context = context;
@@ -194,6 +196,14 @@
}
/**
+ * Set custom {@link AudioAttributes} to use.
+ */
+ public Builder setAudioAttributes(AudioAttributes audioAttributes) {
+ this.audioAttributes = audioAttributes;
+ return this;
+ }
+
+ /**
* Construct an AudioDeviceModule based on the supplied arguments. The caller takes ownership
* and is responsible for calling release().
*/
@@ -223,7 +233,7 @@
audioSource, audioFormat, audioRecordErrorCallback, audioRecordStateCallback,
samplesReadyCallback, useHardwareAcousticEchoCanceler, useHardwareNoiseSuppressor);
final WebRtcAudioTrack audioOutput = new WebRtcAudioTrack(
- context, audioManager, audioTrackErrorCallback, audioTrackStateCallback);
+ context, audioManager, audioAttributes, audioTrackErrorCallback, audioTrackStateCallback);
return new JavaAudioDeviceModule(context, audioManager, audioInput, audioOutput,
inputSampleRate, outputSampleRate, useStereoInput, useStereoOutput);
}
diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java
index 94eb2a4..3e01b95 100644
--- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java
+++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java
@@ -71,6 +71,7 @@
private ByteBuffer byteBuffer;
+ private @Nullable final AudioAttributes audioAttributes;
private @Nullable AudioTrack audioTrack;
private @Nullable AudioTrackThread audioThread;
private final VolumeLogger volumeLogger;
@@ -162,15 +163,17 @@
@CalledByNative
WebRtcAudioTrack(Context context, AudioManager audioManager) {
- this(context, audioManager, null /* errorCallback */, null /* stateCallback */);
+ this(context, audioManager, null /* audioAttributes */, null /* errorCallback */,
+ null /* stateCallback */);
}
WebRtcAudioTrack(Context context, AudioManager audioManager,
- @Nullable AudioTrackErrorCallback errorCallback,
+ @Nullable AudioAttributes audioAttributes, @Nullable AudioTrackErrorCallback errorCallback,
@Nullable AudioTrackStateCallback stateCallback) {
threadChecker.detachThread();
this.context = context;
this.audioManager = audioManager;
+ this.audioAttributes = audioAttributes;
this.errorCallback = errorCallback;
this.stateCallback = stateCallback;
this.volumeLogger = new VolumeLogger(audioManager);
@@ -231,8 +234,8 @@
// supersede the notion of stream types for defining the behavior of audio playback,
// and to allow certain platforms or routing policies to use this information for more
// refined volume or routing decisions.
- audioTrack =
- createAudioTrackOnLollipopOrHigher(sampleRate, channelConfig, minBufferSizeInBytes);
+ audioTrack = createAudioTrackOnLollipopOrHigher(
+ sampleRate, channelConfig, minBufferSizeInBytes, audioAttributes);
} else {
// Use default constructor for API levels below 21.
audioTrack =
@@ -383,8 +386,8 @@
// It allows certain platforms or routing policies to use this information for more
// refined volume or routing decisions.
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
- private static AudioTrack createAudioTrackOnLollipopOrHigher(
- int sampleRateInHz, int channelConfig, int bufferSizeInBytes) {
+ private static AudioTrack createAudioTrackOnLollipopOrHigher(int sampleRateInHz,
+ int channelConfig, int bufferSizeInBytes, @Nullable AudioAttributes overrideAttributes) {
Logging.d(TAG, "createAudioTrackOnLollipopOrHigher");
// TODO(henrika): use setPerformanceMode(int) with PERFORMANCE_MODE_LOW_LATENCY to control
// performance when Android O is supported. Add some logging in the mean time.
@@ -394,11 +397,26 @@
if (sampleRateInHz != nativeOutputSampleRate) {
Logging.w(TAG, "Unable to use fast mode since requested sample rate is not native");
}
+
+ AudioAttributes.Builder attributesBuilder =
+ new AudioAttributes.Builder()
+ .setUsage(DEFAULT_USAGE)
+ .setContentType(AudioAttributes.CONTENT_TYPE_SPEECH);
+
+ if (overrideAttributes != null) {
+ if (overrideAttributes.getUsage() != AudioAttributes.USAGE_UNKNOWN) {
+ attributesBuilder.setUsage(overrideAttributes.getUsage());
+ }
+ if (overrideAttributes.getContentType() != AudioAttributes.CONTENT_TYPE_UNKNOWN) {
+ attributesBuilder.setContentType(overrideAttributes.getContentType());
+ }
+
+ attributesBuilder.setAllowedCapturePolicy(overrideAttributes.getAllowedCapturePolicy())
+ .setFlags(overrideAttributes.getFlags());
+ }
+
// Create an audio track where the audio usage is for VoIP and the content type is speech.
- return new AudioTrack(new AudioAttributes.Builder()
- .setUsage(DEFAULT_USAGE)
- .setContentType(AudioAttributes.CONTENT_TYPE_SPEECH)
- .build(),
+ return new AudioTrack(attributesBuilder.build(),
new AudioFormat.Builder()
.setEncoding(AudioFormat.ENCODING_PCM_16BIT)
.setSampleRate(sampleRateInHz)