Android audio code: Replace C++ template with input/output interface

Bug: webrtc:7452
Change-Id: Id816500051e065918bba5c2235d38ad8eb50a8eb
Reviewed-on: https://webrtc-review.googlesource.com/64442
Commit-Queue: Magnus Jedvert <magjed@webrtc.org>
Reviewed-by: Paulina Hensman <phensman@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#22660}
diff --git a/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java b/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java
index 93343a0..fab9201 100644
--- a/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java
+++ b/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java
@@ -595,10 +595,9 @@
     // Enable/disable OpenSL ES playback.
     if (!peerConnectionParameters.useOpenSLES) {
       Log.d(TAG, "Disable OpenSL ES audio even if device supports it");
-      AudioDeviceModule.setBlacklistDeviceForOpenSLESUsage(true /* enable */);
     } else {
       Log.d(TAG, "Allow OpenSL ES audio if device supports it");
-      AudioDeviceModule.setBlacklistDeviceForOpenSLESUsage(false);
+      // TODO(magjed): Add support for external OpenSLES ADM.
     }
 
     if (peerConnectionParameters.disableBuiltInAEC) {
diff --git a/sdk/android/BUILD.gn b/sdk/android/BUILD.gn
index 8b6c0d2..3061170 100644
--- a/sdk/android/BUILD.gn
+++ b/sdk/android/BUILD.gn
@@ -164,7 +164,8 @@
 
   sources = [
     "src/jni/audio_device/audio_common.h",
-    "src/jni/audio_device/audio_device_template_android.h",
+    "src/jni/audio_device/audio_device_module.cc",
+    "src/jni/audio_device/audio_device_module.h",
     "src/jni/audio_device/audio_manager.cc",
     "src/jni/audio_device/audio_manager.h",
     "src/jni/audio_device/audio_record_jni.cc",
diff --git a/sdk/android/api/org/webrtc/audio/AudioDeviceModule.java b/sdk/android/api/org/webrtc/audio/AudioDeviceModule.java
index cc6f37f..a9c75b8 100644
--- a/sdk/android/api/org/webrtc/audio/AudioDeviceModule.java
+++ b/sdk/android/api/org/webrtc/audio/AudioDeviceModule.java
@@ -24,10 +24,6 @@
   public AudioDeviceModule() {}
 
   /* AudioManager */
-  public static void setBlacklistDeviceForOpenSLESUsage(boolean enable) {
-    WebRtcAudioManager.setBlacklistDeviceForOpenSLESUsage(enable);
-  }
-
   public static void setStereoInput(boolean enable) {
     WebRtcAudioManager.setStereoInput(enable);
   }
diff --git a/sdk/android/native_api/audio_device_module/audio_device_android.cc b/sdk/android/native_api/audio_device_module/audio_device_android.cc
index 9821f3f6..2649aa7 100644
--- a/sdk/android/native_api/audio_device_module/audio_device_android.cc
+++ b/sdk/android/native_api/audio_device_module/audio_device_android.cc
@@ -11,31 +11,76 @@
 #include "sdk/android/native_api/audio_device_module/audio_device_android.h"
 
 #include <stdlib.h>
+#include <utility>
+
 #include "rtc_base/logging.h"
+#include "rtc_base/ptr_util.h"
 #include "rtc_base/refcount.h"
 #include "rtc_base/refcountedobject.h"
-#include "system_wrappers/include/metrics.h"
+#include "sdk/android/src/jni/audio_device/aaudio_player.h"
+#include "sdk/android/src/jni/audio_device/aaudio_recorder.h"
 #include "sdk/android/src/jni/audio_device/audio_manager.h"
+#include "sdk/android/src/jni/audio_device/audio_record_jni.h"
+#include "sdk/android/src/jni/audio_device/audio_track_jni.h"
+#include "sdk/android/src/jni/audio_device/opensles_player.h"
+#include "sdk/android/src/jni/audio_device/opensles_recorder.h"
+#include "system_wrappers/include/metrics.h"
 
 namespace webrtc {
 
+namespace {
+
+// This template function takes care of some boiler plate.
+template <typename AudioInputT, typename AudioOutputT>
+rtc::scoped_refptr<AudioDeviceModule> CreateAudioDeviceModuleTemplate(
+    AudioDeviceModule::AudioLayer audio_layer,
+    JNIEnv* env,
+    jobject application_context) {
+  auto audio_manager = rtc::MakeUnique<android_adm::AudioManager>(
+      env, audio_layer, JavaParamRef<jobject>(application_context));
+  auto audio_input = rtc::MakeUnique<AudioInputT>(audio_manager.get());
+  auto audio_output = rtc::MakeUnique<AudioOutputT>(audio_manager.get());
+  return CreateAudioDeviceModuleFromInputAndOutput(
+      audio_layer, std::move(audio_manager), std::move(audio_input),
+      std::move(audio_output));
+}
+
+}  // namespace
+
 #if defined(AUDIO_DEVICE_INCLUDE_ANDROID_AAUDIO)
 rtc::scoped_refptr<AudioDeviceModule> CreateAAudioAudioDeviceModule(
     JNIEnv* env,
     jobject application_context) {
-  return android_adm::AudioManager::CreateAAudioAudioDeviceModule(
-      env, JavaParamRef<jobject>(application_context));
+  RTC_LOG(INFO) << __FUNCTION__;
+  return CreateAudioDeviceModuleTemplate<android_adm::AAudioRecorder,
+                                         android_adm::AAudioPlayer>(
+      AudioDeviceModule::kAndroidAAudioAudio, env, application_context);
 }
 #endif
 
-rtc::scoped_refptr<AudioDeviceModule> CreateAudioDeviceModule(
+rtc::scoped_refptr<AudioDeviceModule> CreateJavaAudioDeviceModule(
     JNIEnv* env,
-    jobject application_context,
-    bool use_opensles_input,
-    bool use_opensles_output) {
-  return android_adm::AudioManager::CreateAudioDeviceModule(
-      env, JavaParamRef<jobject>(application_context), use_opensles_input,
-      use_opensles_output);
+    jobject application_context) {
+  return CreateAudioDeviceModuleTemplate<android_adm::AudioRecordJni,
+                                         android_adm::AudioTrackJni>(
+      AudioDeviceModule::kAndroidJavaAudio, env, application_context);
+}
+
+rtc::scoped_refptr<AudioDeviceModule> CreateOpenSLESAudioDeviceModule(
+    JNIEnv* env,
+    jobject application_context) {
+  return CreateAudioDeviceModuleTemplate<android_adm::OpenSLESRecorder,
+                                         android_adm::OpenSLESPlayer>(
+      AudioDeviceModule::kAndroidJavaAudio, env, application_context);
+}
+
+rtc::scoped_refptr<AudioDeviceModule>
+CreateJavaInputAndOpenSLESOutputAudioDeviceModule(JNIEnv* env,
+                                                  jobject application_context) {
+  return CreateAudioDeviceModuleTemplate<android_adm::AudioRecordJni,
+                                         android_adm::OpenSLESPlayer>(
+      AudioDeviceModule::kAndroidJavaInputAndOpenSLESOutputAudio, env,
+      application_context);
 }
 
 }  // namespace webrtc
diff --git a/sdk/android/native_api/audio_device_module/audio_device_android.h b/sdk/android/native_api/audio_device_module/audio_device_android.h
index 5f2561d..ef404fd 100644
--- a/sdk/android/native_api/audio_device_module/audio_device_android.h
+++ b/sdk/android/native_api/audio_device_module/audio_device_android.h
@@ -23,11 +23,17 @@
     jobject application_context);
 #endif
 
-rtc::scoped_refptr<AudioDeviceModule> CreateAudioDeviceModule(
+rtc::scoped_refptr<AudioDeviceModule> CreateJavaAudioDeviceModule(
     JNIEnv* env,
-    jobject application_context,
-    bool use_opensles_input,
-    bool use_opensles_output);
+    jobject application_context);
+
+rtc::scoped_refptr<AudioDeviceModule> CreateOpenSLESAudioDeviceModule(
+    JNIEnv* env,
+    jobject application_context);
+
+rtc::scoped_refptr<AudioDeviceModule>
+CreateJavaInputAndOpenSLESOutputAudioDeviceModule(JNIEnv* env,
+                                                  jobject application_context);
 
 }  // namespace webrtc
 
diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java
index aa83656..e75b3ed 100644
--- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java
+++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java
@@ -42,20 +42,6 @@
   private static boolean useStereoOutput = false;
   private static boolean useStereoInput = false;
 
-  private static boolean blacklistDeviceForOpenSLESUsage = false;
-  private static boolean blacklistDeviceForOpenSLESUsageIsOverridden = false;
-
-  // Call this method to override the default list of blacklisted devices
-  // specified in WebRtcAudioUtils.BLACKLISTED_OPEN_SL_ES_MODELS.
-  // Allows an app to take control over which devices to exclude from using
-  // the OpenSL ES audio output path
-  // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
-  @SuppressWarnings("NoSynchronizedMethodCheck")
-  public static synchronized void setBlacklistDeviceForOpenSLESUsage(boolean enable) {
-    blacklistDeviceForOpenSLESUsageIsOverridden = true;
-    blacklistDeviceForOpenSLESUsage = enable;
-  }
-
   // Call these methods to override the default mono audio modes for the specified direction(s)
   // (input and/or output).
   // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
@@ -204,19 +190,7 @@
     return (audioManager.getMode() == AudioManager.MODE_IN_COMMUNICATION);
   }
 
-  @CalledByNative
-  private static boolean isDeviceBlacklistedForOpenSLESUsage() {
-    boolean blacklisted = blacklistDeviceForOpenSLESUsageIsOverridden
-        ? blacklistDeviceForOpenSLESUsage
-        : WebRtcAudioUtils.deviceIsBlacklistedForOpenSLESUsage();
-    if (blacklisted) {
-      Logging.d(TAG, Build.MODEL + " is blacklisted for OpenSL ES usage!");
-    }
-    return blacklisted;
-  }
-
   // Returns true if low-latency audio output is supported.
-  @CalledByNative
   public static boolean isLowLatencyOutputSupported(Context context) {
     return context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_AUDIO_LOW_LATENCY);
   }
@@ -224,7 +198,6 @@
   // Returns true if low-latency audio input is supported.
   // TODO(henrika): remove the hardcoded false return value when OpenSL ES
   // input performance has been evaluated and tested more.
-  @CalledByNative
   public static boolean isLowLatencyInputSupported(Context context) {
     // TODO(henrika): investigate if some sort of device list is needed here
     // as well. The NDK doc states that: "As of API level 21, lower latency
diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java
index d3304ce..5a72300 100644
--- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java
+++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java
@@ -193,7 +193,7 @@
 
   @CalledByNative
   WebRtcAudioTrack(long nativeAudioTrack) {
-    threadChecker.checkIsOnValidThread();
+    threadChecker.detachThread();
     Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
     this.nativeAudioTrack = nativeAudioTrack;
     audioManager =
diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java
index 0693922..2be436a 100644
--- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java
+++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java
@@ -35,16 +35,6 @@
 final class WebRtcAudioUtils {
   private static final String TAG = "WebRtcAudioUtils";
 
-  // List of devices where we have seen issues (e.g. bad audio quality) using
-  // the low latency output mode in combination with OpenSL ES.
-  // The device name is given by Build.MODEL.
-  private static final String[] BLACKLISTED_OPEN_SL_ES_MODELS = new String[] {
-      // It is recommended to maintain a list of blacklisted models outside
-      // this package and instead call
-      // WebRtcAudioManager.setBlacklistDeviceForOpenSLESUsage(true)
-      // from the client for devices where OpenSL ES shall be disabled.
-  };
-
   // List of devices where it has been verified that the built-in effect
   // bad and where it makes sense to avoid using it and instead rely on the
   // native WebRTC version instead. The device name is given by Build.MODEL.
@@ -207,12 +197,6 @@
     return Build.HARDWARE.equals("goldfish") && Build.BRAND.startsWith("generic_");
   }
 
-  // Returns true if the device is blacklisted for OpenSL ES usage.
-  public static boolean deviceIsBlacklistedForOpenSLESUsage() {
-    List<String> blackListedModels = Arrays.asList(BLACKLISTED_OPEN_SL_ES_MODELS);
-    return blackListedModels.contains(Build.MODEL);
-  }
-
   // Information about the current build, taken from system properties.
   static void logDeviceInfo(String tag) {
     Logging.d(tag,
diff --git a/sdk/android/src/jni/audio_device/aaudio_player.h b/sdk/android/src/jni/audio_device/aaudio_player.h
index c23081a..0770b46 100644
--- a/sdk/android/src/jni/audio_device/aaudio_player.h
+++ b/sdk/android/src/jni/audio_device/aaudio_player.h
@@ -22,6 +22,7 @@
 #include "rtc_base/thread_annotations.h"
 #include "rtc_base/thread_checker.h"
 #include "sdk/android/src/jni/audio_device/aaudio_wrapper.h"
+#include "sdk/android/src/jni/audio_device/audio_device_module.h"
 
 namespace webrtc {
 
@@ -30,8 +31,6 @@
 
 namespace android_adm {
 
-class AudioManager;
-
 // Implements low-latency 16-bit mono PCM audio output support for Android
 // using the C based AAudio API.
 //
@@ -52,30 +51,31 @@
 // where the internal AAudio buffer can be increased when needed. It will
 // reduce the risk of underruns (~glitches) at the expense of an increased
 // latency.
-class AAudioPlayer final : public AAudioObserverInterface,
+class AAudioPlayer final : public AudioOutput,
+                           public AAudioObserverInterface,
                            public rtc::MessageHandler {
  public:
   explicit AAudioPlayer(AudioManager* audio_manager);
   ~AAudioPlayer();
 
-  int Init();
-  int Terminate();
+  int Init() override;
+  int Terminate() override;
 
-  int InitPlayout();
-  bool PlayoutIsInitialized() const;
+  int InitPlayout() override;
+  bool PlayoutIsInitialized() const override;
 
-  int StartPlayout();
-  int StopPlayout();
-  bool Playing() const;
+  int StartPlayout() override;
+  int StopPlayout() override;
+  bool Playing() const override;
 
-  void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
+  void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override;
 
   // Not implemented in AAudio.
-  bool SpeakerVolumeIsAvailable();
-  int SetSpeakerVolume(uint32_t volume);
-  rtc::Optional<uint32_t> SpeakerVolume() const;
-  rtc::Optional<uint32_t> MaxSpeakerVolume() const;
-  rtc::Optional<uint32_t> MinSpeakerVolume() const;
+  bool SpeakerVolumeIsAvailable() override;
+  int SetSpeakerVolume(uint32_t volume) override;
+  rtc::Optional<uint32_t> SpeakerVolume() const override;
+  rtc::Optional<uint32_t> MaxSpeakerVolume() const override;
+  rtc::Optional<uint32_t> MinSpeakerVolume() const override;
 
  protected:
   // AAudioObserverInterface implementation.
diff --git a/sdk/android/src/jni/audio_device/aaudio_recorder.h b/sdk/android/src/jni/audio_device/aaudio_recorder.h
index 9924b75..a424c47 100644
--- a/sdk/android/src/jni/audio_device/aaudio_recorder.h
+++ b/sdk/android/src/jni/audio_device/aaudio_recorder.h
@@ -20,6 +20,7 @@
 #include "rtc_base/thread.h"
 #include "rtc_base/thread_checker.h"
 #include "sdk/android/src/jni/audio_device/aaudio_wrapper.h"
+#include "sdk/android/src/jni/audio_device/audio_device_module.h"
 
 namespace webrtc {
 
@@ -28,8 +29,6 @@
 
 namespace android_adm {
 
-class AudioManager;
-
 // Implements low-latency 16-bit mono PCM audio input support for Android
 // using the C based AAudio API.
 //
@@ -44,30 +43,29 @@
 //
 // TODO(henrika): add comments about device changes and adaptive buffer
 // management.
-class AAudioRecorder : public AAudioObserverInterface,
+class AAudioRecorder : public AudioInput,
+                       public AAudioObserverInterface,
                        public rtc::MessageHandler {
  public:
   explicit AAudioRecorder(AudioManager* audio_manager);
   ~AAudioRecorder();
 
-  int Init();
-  int Terminate();
+  int Init() override;
+  int Terminate() override;
 
-  int InitRecording();
-  bool RecordingIsInitialized() const { return initialized_; }
+  int InitRecording() override;
+  bool RecordingIsInitialized() const override { return initialized_; }
 
-  int StartRecording();
-  int StopRecording();
-  bool Recording() const { return recording_; }
+  int StartRecording() override;
+  int StopRecording() override;
+  bool Recording() const override { return recording_; }
 
-  void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
-
-  double latency_millis() const { return latency_millis_; }
+  void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override;
 
   // TODO(henrika): add support using AAudio APIs when available.
-  int EnableBuiltInAEC(bool enable);
-  int EnableBuiltInAGC(bool enable);
-  int EnableBuiltInNS(bool enable);
+  int EnableBuiltInAEC(bool enable) override;
+  int EnableBuiltInAGC(bool enable) override;
+  int EnableBuiltInNS(bool enable) override;
 
  protected:
   // AAudioObserverInterface implementation.
diff --git a/sdk/android/src/jni/audio_device/audio_device_template_android.h b/sdk/android/src/jni/audio_device/audio_device_module.cc
similarity index 89%
rename from sdk/android/src/jni/audio_device/audio_device_template_android.h
rename to sdk/android/src/jni/audio_device/audio_device_module.cc
index 938a57b..e57d7c6 100644
--- a/sdk/android/src/jni/audio_device/audio_device_template_android.h
+++ b/sdk/android/src/jni/audio_device/audio_device_module.cc
@@ -8,16 +8,14 @@
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
-#ifndef SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_DEVICE_TEMPLATE_ANDROID_H_
-#define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_DEVICE_TEMPLATE_ANDROID_H_
+#include "sdk/android/src/jni/audio_device/audio_device_module.h"
 
-#include <memory>
+#include <utility>
 
-#include "modules/audio_device/audio_device_buffer.h"
 #include "rtc_base/checks.h"
 #include "rtc_base/logging.h"
+#include "rtc_base/refcountedobject.h"
 #include "rtc_base/thread_checker.h"
-#include "sdk/android/src/jni/audio_device/audio_manager.h"
 #include "system_wrappers/include/metrics.h"
 
 #define CHECKinitialized_() \
@@ -38,6 +36,8 @@
 
 namespace android_adm {
 
+namespace {
+
 // InputType/OutputType can be any class that implements the capturing/rendering
 // part of the AudioDeviceGeneric API.
 // Construction and destruction must be done on one and the same thread. Each
@@ -47,9 +47,7 @@
 // It is possible to call the two static methods (SetAndroidAudioDeviceObjects
 // and ClearAndroidAudioDeviceObjects) from a different thread but both will
 // RTC_CHECK that the calling thread is attached to a Java VM.
-
-template <class InputType, class OutputType>
-class AudioDeviceTemplateAndroid : public AudioDeviceModule {
+class AndroidAudioDeviceModule : public AudioDeviceModule {
  public:
   // For use with UMA logging. Must be kept in sync with histograms.xml in
   // Chrome, located at
@@ -62,17 +60,23 @@
     NUM_STATUSES = 4
   };
 
-  AudioDeviceTemplateAndroid(JNIEnv* env,
-                             const JavaParamRef<jobject>& application_context,
-                             AudioDeviceModule::AudioLayer audio_layer)
+  AndroidAudioDeviceModule(AudioDeviceModule::AudioLayer audio_layer,
+                           std::unique_ptr<AudioManager> audio_manager,
+                           std::unique_ptr<AudioInput> audio_input,
+                           std::unique_ptr<AudioOutput> audio_output)
       : audio_layer_(audio_layer),
-        audio_manager_(env, audio_layer, application_context),
+        audio_manager_(std::move(audio_manager)),
+        input_(std::move(audio_input)),
+        output_(std::move(audio_output)),
         initialized_(false) {
+    RTC_CHECK(input_);
+    RTC_CHECK(output_);
+    RTC_CHECK(audio_manager_);
     RTC_LOG(INFO) << __FUNCTION__;
     thread_checker_.DetachFromThread();
   }
 
-  virtual ~AudioDeviceTemplateAndroid() { RTC_LOG(INFO) << __FUNCTION__; }
+  virtual ~AndroidAudioDeviceModule() { RTC_LOG(INFO) << __FUNCTION__; }
 
   int32_t ActiveAudioLayer(
       AudioDeviceModule::AudioLayer* audioLayer) const override {
@@ -89,22 +93,20 @@
   int32_t Init() override {
     RTC_LOG(INFO) << __FUNCTION__;
     RTC_DCHECK(thread_checker_.CalledOnValidThread());
-    output_ = rtc::MakeUnique<OutputType>(&audio_manager_);
-    input_ = rtc::MakeUnique<InputType>(&audio_manager_);
     audio_device_buffer_ = rtc::MakeUnique<AudioDeviceBuffer>();
     AttachAudioBuffer();
     if (initialized_) {
       return 0;
     }
     InitStatus status;
-    if (!audio_manager_.Init()) {
+    if (!audio_manager_->Init()) {
       status = InitStatus::OTHER_ERROR;
     } else if (output_->Init() != 0) {
-      audio_manager_.Close();
+      audio_manager_->Close();
       status = InitStatus::PLAYOUT_ERROR;
     } else if (input_->Init() != 0) {
       output_->Terminate();
-      audio_manager_.Close();
+      audio_manager_->Close();
       status = InitStatus::RECORDING_ERROR;
     } else {
       initialized_ = true;
@@ -127,7 +129,7 @@
     RTC_DCHECK(thread_checker_.CalledOnValidThread());
     int32_t err = input_->Terminate();
     err |= output_->Terminate();
-    err |= !audio_manager_.Close();
+    err |= !audio_manager_->Close();
     initialized_ = false;
     RTC_DCHECK_EQ(err, 0);
     return err;
@@ -250,7 +252,7 @@
       return 0;
     }
     audio_device_buffer_->StartPlayout();
-    if (!audio_manager_.IsCommunicationModeEnabled()) {
+    if (!audio_manager_->IsCommunicationModeEnabled()) {
       RTC_LOG(WARNING)
           << "The application should use MODE_IN_COMMUNICATION audio mode!";
     }
@@ -288,7 +290,7 @@
     if (Recording()) {
       return 0;
     }
-    if (!audio_manager_.IsCommunicationModeEnabled()) {
+    if (!audio_manager_->IsCommunicationModeEnabled()) {
       RTC_LOG(WARNING)
           << "The application should use MODE_IN_COMMUNICATION audio mode!";
     }
@@ -474,7 +476,7 @@
   int32_t StereoPlayoutIsAvailable(bool* available) const override {
     RTC_LOG(INFO) << __FUNCTION__;
     CHECKinitialized_();
-    *available = audio_manager_.IsStereoPlayoutSupported();
+    *available = audio_manager_->IsStereoPlayoutSupported();
     RTC_LOG(INFO) << "output: " << *available;
     return 0;
   }
@@ -486,7 +488,7 @@
       RTC_LOG(WARNING) << "recording in stereo is not supported";
       return -1;
     }
-    bool available = audio_manager_.IsStereoPlayoutSupported();
+    bool available = audio_manager_->IsStereoPlayoutSupported();
     // Android does not support changes between mono and stero on the fly.
     // Instead, the native audio layer is configured via the audio manager
     // to either support mono or stereo. It is allowed to call this method
@@ -506,7 +508,7 @@
   int32_t StereoPlayout(bool* enabled) const override {
     RTC_LOG(INFO) << __FUNCTION__;
     CHECKinitialized_();
-    *enabled = audio_manager_.IsStereoPlayoutSupported();
+    *enabled = audio_manager_->IsStereoPlayoutSupported();
     RTC_LOG(INFO) << "output: " << *enabled;
     return 0;
   }
@@ -514,7 +516,7 @@
   int32_t StereoRecordingIsAvailable(bool* available) const override {
     RTC_LOG(INFO) << __FUNCTION__;
     CHECKinitialized_();
-    *available = audio_manager_.IsStereoRecordSupported();
+    *available = audio_manager_->IsStereoRecordSupported();
     RTC_LOG(INFO) << "output: " << *available;
     return 0;
   }
@@ -526,7 +528,7 @@
       RTC_LOG(WARNING) << "recording in stereo is not supported";
       return -1;
     }
-    bool available = audio_manager_.IsStereoRecordSupported();
+    bool available = audio_manager_->IsStereoRecordSupported();
     // Android does not support changes between mono and stero on the fly.
     // Instead, the native audio layer is configured via the audio manager
     // to either support mono or stereo. It is allowed to call this method
@@ -546,7 +548,7 @@
   int32_t StereoRecording(bool* enabled) const override {
     RTC_LOG(INFO) << __FUNCTION__;
     CHECKinitialized_();
-    *enabled = audio_manager_.IsStereoRecordSupported();
+    *enabled = audio_manager_->IsStereoRecordSupported();
     RTC_LOG(INFO) << "output: " << *enabled;
     return 0;
   }
@@ -554,7 +556,7 @@
   int32_t PlayoutDelay(uint16_t* delay_ms) const override {
     CHECKinitialized_();
     // Best guess we can do is to use half of the estimated total delay.
-    *delay_ms = audio_manager_.GetDelayEstimateInMilliseconds() / 2;
+    *delay_ms = audio_manager_->GetDelayEstimateInMilliseconds() / 2;
     RTC_DCHECK_GT(*delay_ms, 0);
     return 0;
   }
@@ -574,7 +576,7 @@
   bool BuiltInAECIsAvailable() const override {
     RTC_LOG(INFO) << __FUNCTION__;
     CHECKinitialized__BOOL();
-    bool isAvailable = audio_manager_.IsAcousticEchoCancelerSupported();
+    bool isAvailable = audio_manager_->IsAcousticEchoCancelerSupported();
     RTC_LOG(INFO) << "output: " << isAvailable;
     return isAvailable;
   }
@@ -598,7 +600,7 @@
   bool BuiltInNSIsAvailable() const override {
     RTC_LOG(INFO) << __FUNCTION__;
     CHECKinitialized__BOOL();
-    bool isAvailable = audio_manager_.IsNoiseSuppressorSupported();
+    bool isAvailable = audio_manager_->IsNoiseSuppressorSupported();
     RTC_LOG(INFO) << "output: " << isAvailable;
     return isAvailable;
   }
@@ -644,17 +646,26 @@
   rtc::ThreadChecker thread_checker_;
 
   const AudioDeviceModule::AudioLayer audio_layer_;
-
-  AudioManager audio_manager_;
-  std::unique_ptr<OutputType> output_;
-  std::unique_ptr<InputType> input_;
+  const std::unique_ptr<AudioManager> audio_manager_;
+  const std::unique_ptr<AudioInput> input_;
+  const std::unique_ptr<AudioOutput> output_;
   std::unique_ptr<AudioDeviceBuffer> audio_device_buffer_;
 
   bool initialized_;
 };
 
+}  // namespace
+
+rtc::scoped_refptr<AudioDeviceModule> CreateAudioDeviceModuleFromInputAndOutput(
+    AudioDeviceModule::AudioLayer audio_layer,
+    std::unique_ptr<AudioManager> audio_manager,
+    std::unique_ptr<AudioInput> audio_input,
+    std::unique_ptr<AudioOutput> audio_output) {
+  return new rtc::RefCountedObject<AndroidAudioDeviceModule>(
+      audio_layer, std::move(audio_manager), std::move(audio_input),
+      std::move(audio_output));
+}
+
 }  // namespace android_adm
 
 }  // namespace webrtc
-
-#endif  // SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_DEVICE_TEMPLATE_ANDROID_H_
diff --git a/sdk/android/src/jni/audio_device/audio_device_module.h b/sdk/android/src/jni/audio_device/audio_device_module.h
new file mode 100644
index 0000000..b11beb4
--- /dev/null
+++ b/sdk/android/src/jni/audio_device/audio_device_module.h
@@ -0,0 +1,77 @@
+/*
+ *  Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_DEVICE_MODULE_H_
+#define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_DEVICE_MODULE_H_
+
+#include <memory>
+
+#include "api/optional.h"
+#include "modules/audio_device/audio_device_buffer.h"
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+#include "sdk/android/src/jni/audio_device/audio_manager.h"
+
+namespace webrtc {
+
+namespace android_adm {
+
+class AudioManager;
+
+class AudioInput {
+ public:
+  virtual ~AudioInput() {}
+
+  virtual int32_t Init() = 0;
+  virtual int32_t Terminate() = 0;
+
+  virtual int32_t InitRecording() = 0;
+  virtual bool RecordingIsInitialized() const = 0;
+
+  virtual int32_t StartRecording() = 0;
+  virtual int32_t StopRecording() = 0;
+  virtual bool Recording() const = 0;
+
+  virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) = 0;
+
+  virtual int32_t EnableBuiltInAEC(bool enable) = 0;
+  virtual int32_t EnableBuiltInAGC(bool enable) = 0;
+  virtual int32_t EnableBuiltInNS(bool enable) = 0;
+};
+
+class AudioOutput {
+ public:
+  virtual ~AudioOutput() {}
+
+  virtual int32_t Init() = 0;
+  virtual int32_t Terminate() = 0;
+  virtual int32_t InitPlayout() = 0;
+  virtual bool PlayoutIsInitialized() const = 0;
+  virtual int32_t StartPlayout() = 0;
+  virtual int32_t StopPlayout() = 0;
+  virtual bool Playing() const = 0;
+  virtual bool SpeakerVolumeIsAvailable() = 0;
+  virtual int SetSpeakerVolume(uint32_t volume) = 0;
+  virtual rtc::Optional<uint32_t> SpeakerVolume() const = 0;
+  virtual rtc::Optional<uint32_t> MaxSpeakerVolume() const = 0;
+  virtual rtc::Optional<uint32_t> MinSpeakerVolume() const = 0;
+  virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) = 0;
+};
+
+rtc::scoped_refptr<AudioDeviceModule> CreateAudioDeviceModuleFromInputAndOutput(
+    AudioDeviceModule::AudioLayer audio_layer,
+    std::unique_ptr<AudioManager> audio_manager,
+    std::unique_ptr<AudioInput> audio_input,
+    std::unique_ptr<AudioOutput> audio_output);
+
+}  // namespace android_adm
+
+}  // namespace webrtc
+
+#endif  // SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_DEVICE_MODULE_H_
diff --git a/sdk/android/src/jni/audio_device/audio_manager.cc b/sdk/android/src/jni/audio_device/audio_manager.cc
index 8e09d8a..41943dc 100644
--- a/sdk/android/src/jni/audio_device/audio_manager.cc
+++ b/sdk/android/src/jni/audio_device/audio_manager.cc
@@ -28,7 +28,7 @@
 #include "sdk/android/src/jni/audio_device/aaudio_player.h"
 #include "sdk/android/src/jni/audio_device/aaudio_recorder.h"
 #endif
-#include "sdk/android/src/jni/audio_device/audio_device_template_android.h"
+#include "sdk/android/src/jni/audio_device/audio_device_module.h"
 #include "sdk/android/src/jni/audio_device/audio_manager.h"
 #include "sdk/android/src/jni/audio_device/audio_record_jni.h"
 #include "sdk/android/src/jni/audio_device/audio_track_jni.h"
@@ -39,64 +39,6 @@
 
 namespace android_adm {
 
-#if defined(AUDIO_DEVICE_INCLUDE_ANDROID_AAUDIO)
-rtc::scoped_refptr<AudioDeviceModule>
-AudioManager::CreateAAudioAudioDeviceModule(
-    JNIEnv* env,
-    const JavaParamRef<jobject>& application_context) {
-  RTC_LOG(INFO) << __FUNCTION__;
-  return new rtc::RefCountedObject<android_adm::AudioDeviceTemplateAndroid<
-      android_adm::AAudioRecorder, android_adm::AAudioPlayer>>(
-      env, application_context, AudioDeviceModule::kAndroidAAudioAudio);
-}
-#endif
-
-rtc::scoped_refptr<AudioDeviceModule> AudioManager::CreateAudioDeviceModule(
-    JNIEnv* env,
-    const JavaParamRef<jobject>& application_context) {
-  const bool use_opensles_output =
-      !Java_WebRtcAudioManager_isDeviceBlacklistedForOpenSLESUsage(env) &&
-      Java_WebRtcAudioManager_isLowLatencyOutputSupported(env,
-                                                          application_context);
-  const bool use_opensles_input =
-      use_opensles_output && Java_WebRtcAudioManager_isLowLatencyInputSupported(
-                                 env, application_context);
-  return CreateAudioDeviceModule(env, application_context, use_opensles_input,
-                                 use_opensles_output);
-}
-
-rtc::scoped_refptr<AudioDeviceModule> AudioManager::CreateAudioDeviceModule(
-    JNIEnv* env,
-    const JavaParamRef<jobject>& application_context,
-    bool use_opensles_input,
-    bool use_opensles_output) {
-  RTC_LOG(INFO) << __FUNCTION__;
-
-  if (use_opensles_output) {
-    if (use_opensles_input) {
-      // Use OpenSL ES for both playout and recording.
-      return new rtc::RefCountedObject<android_adm::AudioDeviceTemplateAndroid<
-          android_adm::OpenSLESRecorder, android_adm::OpenSLESPlayer>>(
-          env, application_context, AudioDeviceModule::kAndroidOpenSLESAudio);
-    } else {
-      // Use OpenSL ES for output and AudioRecord API for input. This
-      // combination provides low-latency output audio and at the same
-      // time support for HW AEC using the AudioRecord Java API.
-      return new rtc::RefCountedObject<android_adm::AudioDeviceTemplateAndroid<
-          android_adm::AudioRecordJni, android_adm::OpenSLESPlayer>>(
-          env, application_context,
-          AudioDeviceModule::kAndroidJavaInputAndOpenSLESOutputAudio);
-    }
-  } else {
-    RTC_DCHECK(!use_opensles_input)
-        << "Combination of OpenSLES input and Java-based output not supported";
-    // Use Java-based audio in both directions.
-    return new rtc::RefCountedObject<android_adm::AudioDeviceTemplateAndroid<
-        android_adm::AudioRecordJni, android_adm::AudioTrackJni>>(
-        env, application_context, AudioDeviceModule::kAndroidJavaAudio);
-  }
-}
-
 // AudioManager implementation
 AudioManager::AudioManager(JNIEnv* env,
                            AudioDeviceModule::AudioLayer audio_layer,
@@ -120,6 +62,8 @@
                             static_cast<size_t>(output_buffer_size));
   record_parameters_.reset(sample_rate, static_cast<size_t>(input_channels),
                            static_cast<size_t>(input_buffer_size));
+  RTC_CHECK(playout_parameters_.is_valid());
+  RTC_CHECK(record_parameters_.is_valid());
   thread_checker_.DetachFromThread();
 }
 
@@ -217,12 +161,10 @@
 }
 
 bool AudioManager::IsStereoPlayoutSupported() const {
-  RTC_DCHECK(thread_checker_.CalledOnValidThread());
   return (playout_parameters_.channels() == 2);
 }
 
 bool AudioManager::IsStereoRecordSupported() const {
-  RTC_DCHECK(thread_checker_.CalledOnValidThread());
   return (record_parameters_.channels() == 2);
 }
 
@@ -234,13 +176,11 @@
 
 const AudioParameters& AudioManager::GetPlayoutAudioParameters() {
   RTC_CHECK(playout_parameters_.is_valid());
-  RTC_DCHECK(thread_checker_.CalledOnValidThread());
   return playout_parameters_;
 }
 
 const AudioParameters& AudioManager::GetRecordAudioParameters() {
   RTC_CHECK(record_parameters_.is_valid());
-  RTC_DCHECK(thread_checker_.CalledOnValidThread());
   return record_parameters_;
 }
 
diff --git a/sdk/android/src/jni/audio_device/audio_manager.h b/sdk/android/src/jni/audio_device/audio_manager.h
index 201b363..5f32e2a 100644
--- a/sdk/android/src/jni/audio_device/audio_manager.h
+++ b/sdk/android/src/jni/audio_device/audio_manager.h
@@ -33,24 +33,6 @@
 // unless Init() is called.
 class AudioManager {
  public:
-#if defined(AUDIO_DEVICE_INCLUDE_ANDROID_AAUDIO)
-  static rtc::scoped_refptr<AudioDeviceModule> CreateAAudioAudioDeviceModule(
-      JNIEnv* env,
-      const JavaParamRef<jobject>& application_context);
-#endif
-
-  static rtc::scoped_refptr<AudioDeviceModule> CreateAudioDeviceModule(
-      JNIEnv* env,
-      const JavaParamRef<jobject>& application_context,
-      bool use_opensles_input,
-      bool use_opensles_output);
-
-  // This function has internal logic checking if OpenSLES is blacklisted and
-  // whether it's supported.
-  static rtc::scoped_refptr<AudioDeviceModule> CreateAudioDeviceModule(
-      JNIEnv* env,
-      const JavaParamRef<jobject>& application_context);
-
   AudioManager(JNIEnv* env,
                AudioDeviceModule::AudioLayer audio_layer,
                const JavaParamRef<jobject>& application_context);
diff --git a/sdk/android/src/jni/audio_device/audio_record_jni.cc b/sdk/android/src/jni/audio_device/audio_record_jni.cc
index 08c54a9..93675f7 100644
--- a/sdk/android/src/jni/audio_device/audio_record_jni.cc
+++ b/sdk/android/src/jni/audio_device/audio_record_jni.cc
@@ -45,13 +45,13 @@
   const std::string histogram_name_;
   int64_t start_time_ms_;
 };
+
 }  // namespace
 
 // AudioRecordJni implementation.
 AudioRecordJni::AudioRecordJni(AudioManager* audio_manager)
-    : env_(AttachCurrentThreadIfNeeded()),
-      j_audio_record_(
-          Java_WebRtcAudioRecord_Constructor(env_,
+    : j_audio_record_(
+          Java_WebRtcAudioRecord_Constructor(AttachCurrentThreadIfNeeded(),
                                              jni::jlongFromPointer(this))),
       audio_manager_(audio_manager),
       audio_parameters_(audio_manager->GetRecordAudioParameters()),
@@ -64,8 +64,9 @@
       audio_device_buffer_(nullptr) {
   RTC_LOG(INFO) << "ctor";
   RTC_DCHECK(audio_parameters_.is_valid());
-  // Detach from this thread since we want to use the checker to verify calls
-  // from the Java based audio thread.
+  // Detach from this thread since construction is allowed to happen on a
+  // different thread.
+  thread_checker_.DetachFromThread();
   thread_checker_java_.DetachFromThread();
 }
 
@@ -77,6 +78,7 @@
 
 int32_t AudioRecordJni::Init() {
   RTC_LOG(INFO) << "Init";
+  env_ = AttachCurrentThreadIfNeeded();
   RTC_DCHECK(thread_checker_.CalledOnValidThread());
   return 0;
 }
diff --git a/sdk/android/src/jni/audio_device/audio_record_jni.h b/sdk/android/src/jni/audio_device/audio_record_jni.h
index a8a3177..40cfa0b 100644
--- a/sdk/android/src/jni/audio_device/audio_record_jni.h
+++ b/sdk/android/src/jni/audio_device/audio_record_jni.h
@@ -17,6 +17,7 @@
 #include "modules/audio_device/audio_device_buffer.h"
 #include "modules/audio_device/include/audio_device_defines.h"
 #include "rtc_base/thread_checker.h"
+#include "sdk/android/src/jni/audio_device/audio_device_module.h"
 #include "sdk/android/src/jni/audio_device/audio_manager.h"
 
 namespace webrtc {
@@ -41,26 +42,26 @@
 // This class uses AttachCurrentThreadIfNeeded to attach to a Java VM if needed
 // and detach when the object goes out of scope. Additional thread checking
 // guarantees that no other (possibly non attached) thread is used.
-class AudioRecordJni {
+class AudioRecordJni : public AudioInput {
  public:
   explicit AudioRecordJni(AudioManager* audio_manager);
-  ~AudioRecordJni();
+  ~AudioRecordJni() override;
 
-  int32_t Init();
-  int32_t Terminate();
+  int32_t Init() override;
+  int32_t Terminate() override;
 
-  int32_t InitRecording();
-  bool RecordingIsInitialized() const { return initialized_; }
+  int32_t InitRecording() override;
+  bool RecordingIsInitialized() const override { return initialized_; }
 
-  int32_t StartRecording();
-  int32_t StopRecording();
-  bool Recording() const { return recording_; }
+  int32_t StartRecording() override;
+  int32_t StopRecording() override;
+  bool Recording() const override { return recording_; }
 
-  void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
+  void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override;
 
-  int32_t EnableBuiltInAEC(bool enable);
-  int32_t EnableBuiltInAGC(bool enable);
-  int32_t EnableBuiltInNS(bool enable);
+  int32_t EnableBuiltInAEC(bool enable) override;
+  int32_t EnableBuiltInAGC(bool enable) override;
+  int32_t EnableBuiltInNS(bool enable) override;
 
   // Called from Java side so we can cache the address of the Java-manged
   // |byte_buffer| in |direct_buffer_address_|. The size of the buffer
@@ -90,7 +91,7 @@
   rtc::ThreadChecker thread_checker_java_;
 
   // Wraps the Java specific parts of the AudioRecordJni class.
-  JNIEnv* const env_;
+  JNIEnv* env_ = nullptr;
   ScopedJavaGlobalRef<jobject> j_audio_record_;
 
   // Raw pointer to the audio manger.
diff --git a/sdk/android/src/jni/audio_device/audio_track_jni.cc b/sdk/android/src/jni/audio_device/audio_track_jni.cc
index 48aab44..c5ab6b3 100644
--- a/sdk/android/src/jni/audio_device/audio_track_jni.cc
+++ b/sdk/android/src/jni/audio_device/audio_track_jni.cc
@@ -27,9 +27,9 @@
 
 // TODO(henrika): possible extend usage of AudioManager and add it as member.
 AudioTrackJni::AudioTrackJni(AudioManager* audio_manager)
-    : env_(AttachCurrentThreadIfNeeded()),
-      j_audio_track_(
-          Java_WebRtcAudioTrack_Constructor(env_, jni::jlongFromPointer(this))),
+    : j_audio_track_(
+          Java_WebRtcAudioTrack_Constructor(AttachCurrentThreadIfNeeded(),
+                                            jni::jlongFromPointer(this))),
       audio_parameters_(audio_manager->GetPlayoutAudioParameters()),
       direct_buffer_address_(nullptr),
       direct_buffer_capacity_in_bytes_(0),
@@ -39,8 +39,9 @@
       audio_device_buffer_(nullptr) {
   RTC_LOG(INFO) << "ctor";
   RTC_DCHECK(audio_parameters_.is_valid());
-  // Detach from this thread since we want to use the checker to verify calls
-  // from the Java based audio thread.
+  // Detach from this thread since construction is allowed to happen on a
+  // different thread.
+  thread_checker_.DetachFromThread();
   thread_checker_java_.DetachFromThread();
 }
 
@@ -52,6 +53,7 @@
 
 int32_t AudioTrackJni::Init() {
   RTC_LOG(INFO) << "Init";
+  env_ = AttachCurrentThreadIfNeeded();
   RTC_DCHECK(thread_checker_.CalledOnValidThread());
   return 0;
 }
diff --git a/sdk/android/src/jni/audio_device/audio_track_jni.h b/sdk/android/src/jni/audio_device/audio_track_jni.h
index c73bd70..d56f30c 100644
--- a/sdk/android/src/jni/audio_device/audio_track_jni.h
+++ b/sdk/android/src/jni/audio_device/audio_track_jni.h
@@ -19,6 +19,7 @@
 #include "modules/audio_device/include/audio_device_defines.h"
 #include "rtc_base/thread_checker.h"
 #include "sdk/android/src/jni/audio_device/audio_common.h"
+#include "sdk/android/src/jni/audio_device/audio_device_module.h"
 #include "sdk/android/src/jni/audio_device/audio_manager.h"
 
 namespace webrtc {
@@ -38,28 +39,28 @@
 // This class uses AttachCurrentThreadIfNeeded to attach to a Java VM if needed
 // and detach when the object goes out of scope. Additional thread checking
 // guarantees that no other (possibly non attached) thread is used.
-class AudioTrackJni {
+class AudioTrackJni : public AudioOutput {
  public:
   explicit AudioTrackJni(AudioManager* audio_manager);
-  ~AudioTrackJni();
+  ~AudioTrackJni() override;
 
-  int32_t Init();
-  int32_t Terminate();
+  int32_t Init() override;
+  int32_t Terminate() override;
 
-  int32_t InitPlayout();
-  bool PlayoutIsInitialized() const { return initialized_; }
+  int32_t InitPlayout() override;
+  bool PlayoutIsInitialized() const override { return initialized_; }
 
-  int32_t StartPlayout();
-  int32_t StopPlayout();
-  bool Playing() const { return playing_; }
+  int32_t StartPlayout() override;
+  int32_t StopPlayout() override;
+  bool Playing() const override { return playing_; }
 
-  bool SpeakerVolumeIsAvailable();
-  int SetSpeakerVolume(uint32_t volume);
-  rtc::Optional<uint32_t> SpeakerVolume() const;
-  rtc::Optional<uint32_t> MaxSpeakerVolume() const;
-  rtc::Optional<uint32_t> MinSpeakerVolume() const;
+  bool SpeakerVolumeIsAvailable() override;
+  int SetSpeakerVolume(uint32_t volume) override;
+  rtc::Optional<uint32_t> SpeakerVolume() const override;
+  rtc::Optional<uint32_t> MaxSpeakerVolume() const override;
+  rtc::Optional<uint32_t> MinSpeakerVolume() const override;
 
-  void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
+  void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override;
 
   // Called from Java side so we can cache the address of the Java-manged
   // |byte_buffer| in |direct_buffer_address_|. The size of the buffer
@@ -86,7 +87,7 @@
   rtc::ThreadChecker thread_checker_java_;
 
   // Wraps the Java specific parts of the AudioTrackJni class.
-  JNIEnv* const env_;
+  JNIEnv* env_ = nullptr;
   ScopedJavaGlobalRef<jobject> j_audio_track_;
 
   // Contains audio parameters provided to this class at construction by the
diff --git a/sdk/android/src/jni/audio_device/opensles_player.h b/sdk/android/src/jni/audio_device/opensles_player.h
index 70629ea..34d3c53 100644
--- a/sdk/android/src/jni/audio_device/opensles_player.h
+++ b/sdk/android/src/jni/audio_device/opensles_player.h
@@ -22,6 +22,7 @@
 #include "modules/audio_device/include/audio_device_defines.h"
 #include "rtc_base/thread_checker.h"
 #include "sdk/android/src/jni/audio_device/audio_common.h"
+#include "sdk/android/src/jni/audio_device/audio_device_module.h"
 #include "sdk/android/src/jni/audio_device/audio_manager.h"
 #include "sdk/android/src/jni/audio_device/opensles_common.h"
 
@@ -49,7 +50,7 @@
 // If the device doesn't claim this feature but supports API level 9 (Android
 // platform version 2.3) or later, then we can still use the OpenSL ES APIs but
 // the output latency may be higher.
-class OpenSLESPlayer {
+class OpenSLESPlayer : public AudioOutput {
  public:
   // Beginning with API level 17 (Android 4.2), a buffer count of 2 or more is
   // required for lower latency. Beginning with API level 18 (Android 4.3), a
@@ -60,25 +61,25 @@
   static const int kNumOfOpenSLESBuffers = 2;
 
   explicit OpenSLESPlayer(AudioManager* audio_manager);
-  ~OpenSLESPlayer();
+  ~OpenSLESPlayer() override;
 
-  int Init();
-  int Terminate();
+  int Init() override;
+  int Terminate() override;
 
-  int InitPlayout();
-  bool PlayoutIsInitialized() const { return initialized_; }
+  int InitPlayout() override;
+  bool PlayoutIsInitialized() const override { return initialized_; }
 
-  int StartPlayout();
-  int StopPlayout();
-  bool Playing() const { return playing_; }
+  int StartPlayout() override;
+  int StopPlayout() override;
+  bool Playing() const override { return playing_; }
 
-  bool SpeakerVolumeIsAvailable();
-  int SetSpeakerVolume(uint32_t volume);
-  rtc::Optional<uint32_t> SpeakerVolume() const;
-  rtc::Optional<uint32_t> MaxSpeakerVolume() const;
-  rtc::Optional<uint32_t> MinSpeakerVolume() const;
+  bool SpeakerVolumeIsAvailable() override;
+  int SetSpeakerVolume(uint32_t volume) override;
+  rtc::Optional<uint32_t> SpeakerVolume() const override;
+  rtc::Optional<uint32_t> MaxSpeakerVolume() const override;
+  rtc::Optional<uint32_t> MinSpeakerVolume() const override;
 
-  void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
+  void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override;
 
  private:
   // These callback methods are called when data is required for playout.
diff --git a/sdk/android/src/jni/audio_device/opensles_recorder.h b/sdk/android/src/jni/audio_device/opensles_recorder.h
index 556e578..81efcb8 100644
--- a/sdk/android/src/jni/audio_device/opensles_recorder.h
+++ b/sdk/android/src/jni/audio_device/opensles_recorder.h
@@ -22,6 +22,7 @@
 #include "modules/audio_device/include/audio_device_defines.h"
 #include "rtc_base/thread_checker.h"
 #include "sdk/android/src/jni/audio_device/audio_common.h"
+#include "sdk/android/src/jni/audio_device/audio_device_module.h"
 #include "sdk/android/src/jni/audio_device/audio_manager.h"
 #include "sdk/android/src/jni/audio_device/opensles_common.h"
 
@@ -52,7 +53,7 @@
 // for input effects preclude the lower latency path.
 // See https://developer.android.com/ndk/guides/audio/opensl-prog-notes.html
 // for more details.
-class OpenSLESRecorder {
+class OpenSLESRecorder : public AudioInput {
  public:
   // Beginning with API level 17 (Android 4.2), a buffer count of 2 or more is
   // required for lower latency. Beginning with API level 18 (Android 4.3), a
@@ -63,24 +64,24 @@
   static const int kNumOfOpenSLESBuffers = 2;
 
   explicit OpenSLESRecorder(AudioManager* audio_manager);
-  ~OpenSLESRecorder();
+  ~OpenSLESRecorder() override;
 
-  int Init();
-  int Terminate();
+  int Init() override;
+  int Terminate() override;
 
-  int InitRecording();
-  bool RecordingIsInitialized() const { return initialized_; }
+  int InitRecording() override;
+  bool RecordingIsInitialized() const override { return initialized_; }
 
-  int StartRecording();
-  int StopRecording();
-  bool Recording() const { return recording_; }
+  int StartRecording() override;
+  int StopRecording() override;
+  bool Recording() const override { return recording_; }
 
-  void AttachAudioBuffer(AudioDeviceBuffer* audio_buffer);
+  void AttachAudioBuffer(AudioDeviceBuffer* audio_buffer) override;
 
   // TODO(henrika): add support using OpenSL ES APIs when available.
-  int EnableBuiltInAEC(bool enable);
-  int EnableBuiltInAGC(bool enable);
-  int EnableBuiltInNS(bool enable);
+  int EnableBuiltInAEC(bool enable) override;
+  int EnableBuiltInAGC(bool enable) override;
+  int EnableBuiltInNS(bool enable) override;
 
  private:
   // Obtaines the SL Engine Interface from the existing global Engine object.
diff --git a/sdk/android/src/jni/pc/peerconnectionfactory.cc b/sdk/android/src/jni/pc/peerconnectionfactory.cc
index 62873bd..04874bb 100644
--- a/sdk/android/src/jni/pc/peerconnectionfactory.cc
+++ b/sdk/android/src/jni/pc/peerconnectionfactory.cc
@@ -27,6 +27,8 @@
 #include "sdk/android/generated_peerconnection_jni/jni/PeerConnectionFactory_jni.h"
 #include "sdk/android/native_api/jni/java_types.h"
 #include "sdk/android/src/jni/audio_device/audio_manager.h"
+#include "sdk/android/src/jni/audio_device/audio_record_jni.h"
+#include "sdk/android/src/jni/audio_device/audio_track_jni.h"
 #include "sdk/android/src/jni/jni_helpers.h"
 #include "sdk/android/src/jni/pc/androidnetworkmonitor.h"
 #include "sdk/android/src/jni/pc/audio.h"
@@ -238,10 +240,23 @@
     rtc::NetworkMonitorFactory::SetFactory(network_monitor_factory);
   }
 
-  rtc::scoped_refptr<AudioDeviceModule> adm =
-      field_trial::IsEnabled(kExternalAndroidAudioDeviceFieldTrialName)
-          ? android_adm::AudioManager::CreateAudioDeviceModule(jni, jcontext)
-          : nullptr;
+  rtc::scoped_refptr<AudioDeviceModule> adm = nullptr;
+  if (field_trial::IsEnabled(kExternalAndroidAudioDeviceFieldTrialName)) {
+    // Only Java AudioDeviceModule is supported as an external ADM at the
+    // moment.
+    const AudioDeviceModule::AudioLayer audio_layer =
+        AudioDeviceModule::kAndroidJavaAudio;
+    auto audio_manager =
+        rtc::MakeUnique<android_adm::AudioManager>(jni, audio_layer, jcontext);
+    auto audio_input =
+        rtc::MakeUnique<android_adm::AudioRecordJni>(audio_manager.get());
+    auto audio_output =
+        rtc::MakeUnique<android_adm::AudioTrackJni>(audio_manager.get());
+    adm = CreateAudioDeviceModuleFromInputAndOutput(
+        audio_layer, std::move(audio_manager), std::move(audio_input),
+        std::move(audio_output));
+  }
+
   rtc::scoped_refptr<AudioMixer> audio_mixer = nullptr;
   std::unique_ptr<CallFactoryInterface> call_factory(CreateCallFactory());
   std::unique_ptr<RtcEventLogFactoryInterface> rtc_event_log_factory(