Remove checks for SDK <= 21
WebRTC’s minSdk is 21, so all those checks are dead code.
Change-Id: I26497fd92259b66d9e5ac6afbb393adf4d904c77
Bug: webrtc:13780
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/253124
Reviewed-by: Henrik Andreassson <henrika@webrtc.org>
Reviewed-by: Linus Nilsson <lnilsson@webrtc.org>
Commit-Queue: Xavier Lepaul <xalep@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#36140}
diff --git a/examples/androidapp/src/org/appspot/apprtc/AppRTCProximitySensor.java b/examples/androidapp/src/org/appspot/apprtc/AppRTCProximitySensor.java
index 5c73b43..604e286 100644
--- a/examples/androidapp/src/org/appspot/apprtc/AppRTCProximitySensor.java
+++ b/examples/androidapp/src/org/appspot/apprtc/AppRTCProximitySensor.java
@@ -149,16 +149,10 @@
info.append(", resolution: ").append(proximitySensor.getResolution());
info.append(", max range: ").append(proximitySensor.getMaximumRange());
info.append(", min delay: ").append(proximitySensor.getMinDelay());
- if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT_WATCH) {
- // Added in API level 20.
- info.append(", type: ").append(proximitySensor.getStringType());
- }
- if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
- // Added in API level 21.
- info.append(", max delay: ").append(proximitySensor.getMaxDelay());
- info.append(", reporting mode: ").append(proximitySensor.getReportingMode());
- info.append(", isWakeUpSensor: ").append(proximitySensor.isWakeUpSensor());
- }
+ info.append(", type: ").append(proximitySensor.getStringType());
+ info.append(", max delay: ").append(proximitySensor.getMaxDelay());
+ info.append(", reporting mode: ").append(proximitySensor.getReportingMode());
+ info.append(", isWakeUpSensor: ").append(proximitySensor.isWakeUpSensor());
Log.d(TAG, info.toString());
}
}
diff --git a/examples/androidapp/src/org/appspot/apprtc/CallActivity.java b/examples/androidapp/src/org/appspot/apprtc/CallActivity.java
index 2da2073..eb5ee82 100644
--- a/examples/androidapp/src/org/appspot/apprtc/CallActivity.java
+++ b/examples/androidapp/src/org/appspot/apprtc/CallActivity.java
@@ -384,7 +384,6 @@
}
}
- @TargetApi(17)
private DisplayMetrics getDisplayMetrics() {
DisplayMetrics displayMetrics = new DisplayMetrics();
WindowManager windowManager =
@@ -393,16 +392,11 @@
return displayMetrics;
}
- @TargetApi(19)
private static int getSystemUiVisibility() {
- int flags = View.SYSTEM_UI_FLAG_HIDE_NAVIGATION | View.SYSTEM_UI_FLAG_FULLSCREEN;
- if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
- flags |= View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY;
- }
- return flags;
+ return View.SYSTEM_UI_FLAG_HIDE_NAVIGATION | View.SYSTEM_UI_FLAG_FULLSCREEN
+ | View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY;
}
- @TargetApi(21)
private void startScreenCapture() {
MediaProjectionManager mediaProjectionManager =
(MediaProjectionManager) getApplication().getSystemService(
@@ -460,7 +454,6 @@
return null;
}
- @TargetApi(21)
private @Nullable VideoCapturer createScreenCapturer() {
if (mediaProjectionPermissionResultCode != Activity.RESULT_OK) {
reportError("User didn't give permission to capture the screen.");
diff --git a/examples/androidapp/src/org/appspot/apprtc/CpuMonitor.java b/examples/androidapp/src/org/appspot/apprtc/CpuMonitor.java
index dd51ab2..1c64621 100644
--- a/examples/androidapp/src/org/appspot/apprtc/CpuMonitor.java
+++ b/examples/androidapp/src/org/appspot/apprtc/CpuMonitor.java
@@ -73,7 +73,6 @@
* correct value, and then returns to back to correct reading. Both when
* jumping up and back down we might create faulty CPU load readings.
*/
-@TargetApi(Build.VERSION_CODES.KITKAT)
class CpuMonitor {
private static final String TAG = "CpuMonitor";
private static final int MOVING_AVERAGE_SAMPLES = 5;
@@ -159,8 +158,7 @@
}
public static boolean isSupported() {
- return Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT
- && Build.VERSION.SDK_INT < Build.VERSION_CODES.N;
+ return Build.VERSION.SDK_INT < Build.VERSION_CODES.N;
}
public CpuMonitor(Context context) {
diff --git a/examples/androidtests/src/org/appspot/apprtc/test/PeerConnectionClientTest.java b/examples/androidtests/src/org/appspot/apprtc/test/PeerConnectionClientTest.java
index ac3fb23..051d737 100644
--- a/examples/androidtests/src/org/appspot/apprtc/test/PeerConnectionClientTest.java
+++ b/examples/androidtests/src/org/appspot/apprtc/test/PeerConnectionClientTest.java
@@ -453,10 +453,6 @@
@Test
@SmallTest
public void testLoopbackVp8DecodeToTexture() throws InterruptedException {
- if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
- Log.i(TAG, "Decode to textures is not supported, requires SDK version 19.");
- return;
- }
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8),
createCameraCapturer(false /* captureToTexture */), true /* decodeToTexture */);
}
@@ -464,10 +460,6 @@
@Test
@SmallTest
public void testLoopbackVp9DecodeToTexture() throws InterruptedException {
- if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
- Log.i(TAG, "Decode to textures is not supported, requires SDK version 19.");
- return;
- }
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9),
createCameraCapturer(false /* captureToTexture */), true /* decodeToTexture */);
}
@@ -475,10 +467,6 @@
@Test
@SmallTest
public void testLoopbackH264DecodeToTexture() throws InterruptedException {
- if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
- Log.i(TAG, "Decode to textures is not supported, requires SDK version 19.");
- return;
- }
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264),
createCameraCapturer(false /* captureToTexture */), true /* decodeToTexture */);
}
@@ -486,10 +474,6 @@
@Test
@SmallTest
public void testLoopbackVp8CaptureToTexture() throws InterruptedException {
- if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
- Log.i(TAG, "Encode to textures is not supported. Requires SDK version 19");
- return;
- }
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8),
createCameraCapturer(true /* captureToTexture */), true /* decodeToTexture */);
}
@@ -497,10 +481,6 @@
@Test
@SmallTest
public void testLoopbackH264CaptureToTexture() throws InterruptedException {
- if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
- Log.i(TAG, "Encode to textures is not supported. Requires KITKAT");
- return;
- }
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264),
createCameraCapturer(true /* captureToTexture */), true /* decodeToTexture */);
}
diff --git a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioEffects.java b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioEffects.java
index 11ed669..92f1c93 100644
--- a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioEffects.java
+++ b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioEffects.java
@@ -95,8 +95,6 @@
// Returns true if the platform AEC should be excluded based on its UUID.
// AudioEffect.queryEffects() can throw IllegalStateException.
private static boolean isAcousticEchoCancelerExcludedByUUID() {
- if (Build.VERSION.SDK_INT < 18)
- return false;
for (Descriptor d : getAvailableEffects()) {
if (d.type.equals(AudioEffect.EFFECT_TYPE_AEC)
&& d.uuid.equals(AOSP_ACOUSTIC_ECHO_CANCELER)) {
@@ -109,8 +107,6 @@
// Returns true if the platform NS should be excluded based on its UUID.
// AudioEffect.queryEffects() can throw IllegalStateException.
private static boolean isNoiseSuppressorExcludedByUUID() {
- if (Build.VERSION.SDK_INT < 18)
- return false;
for (Descriptor d : getAvailableEffects()) {
if (d.type.equals(AudioEffect.EFFECT_TYPE_NS) && d.uuid.equals(AOSP_NOISE_SUPPRESSOR)) {
return true;
@@ -121,15 +117,11 @@
// Returns true if the device supports Acoustic Echo Cancellation (AEC).
private static boolean isAcousticEchoCancelerEffectAvailable() {
- if (Build.VERSION.SDK_INT < 18)
- return false;
return isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_AEC);
}
// Returns true if the device supports Noise Suppression (NS).
private static boolean isNoiseSuppressorEffectAvailable() {
- if (Build.VERSION.SDK_INT < 18)
- return false;
return isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_NS);
}
@@ -277,9 +269,6 @@
// As an example: Samsung Galaxy S6 includes an AGC in the descriptor but
// AutomaticGainControl.isAvailable() returns false.
private boolean effectTypeIsVoIP(UUID type) {
- if (Build.VERSION.SDK_INT < 18)
- return false;
-
return (AudioEffect.EFFECT_TYPE_AEC.equals(type) && isAcousticEchoCancelerSupported())
|| (AudioEffect.EFFECT_TYPE_NS.equals(type) && isNoiseSuppressorSupported());
}
diff --git a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioManager.java b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioManager.java
index b057c3a..43c416f 100644
--- a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioManager.java
+++ b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioManager.java
@@ -258,7 +258,7 @@
// as well. The NDK doc states that: "As of API level 21, lower latency
// audio input is supported on select devices. To take advantage of this
// feature, first confirm that lower latency output is available".
- return Build.VERSION.SDK_INT >= 21 && isLowLatencyOutputSupported();
+ return isLowLatencyOutputSupported();
}
// Returns true if the device has professional audio level of functionality
@@ -301,9 +301,6 @@
}
private int getSampleRateForApiLevel() {
- if (Build.VERSION.SDK_INT < 17) {
- return WebRtcAudioUtils.getDefaultSampleRateHz();
- }
String sampleRateString = audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
return (sampleRateString == null) ? WebRtcAudioUtils.getDefaultSampleRateHz()
: Integer.parseInt(sampleRateString);
@@ -312,9 +309,6 @@
// Returns the native output buffer size for low-latency output streams.
private int getLowLatencyOutputFramesPerBuffer() {
assertTrue(isLowLatencyOutputSupported());
- if (Build.VERSION.SDK_INT < 17) {
- return DEFAULT_FRAME_PER_BUFFER;
- }
String framesPerBuffer =
audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER);
return framesPerBuffer == null ? DEFAULT_FRAME_PER_BUFFER : Integer.parseInt(framesPerBuffer);
diff --git a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java
index e3988e1..3e1875c 100644
--- a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java
+++ b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java
@@ -10,7 +10,6 @@
package org.webrtc.voiceengine;
-import android.annotation.TargetApi;
import android.content.Context;
import android.media.AudioAttributes;
import android.media.AudioFormat;
@@ -46,7 +45,7 @@
// By default, WebRTC creates audio tracks with a usage attribute
// corresponding to voice communications, such as telephony or VoIP.
- private static final int DEFAULT_USAGE = getDefaultUsageAttribute();
+ private static final int DEFAULT_USAGE = AudioAttributes.USAGE_VOICE_COMMUNICATION;
private static int usageAttribute = DEFAULT_USAGE;
// This method overrides the default usage attribute and allows the user
@@ -60,15 +59,6 @@
usageAttribute = usage;
}
- private static int getDefaultUsageAttribute() {
- if (Build.VERSION.SDK_INT >= 21) {
- return AudioAttributes.USAGE_VOICE_COMMUNICATION;
- } else {
- // Not used on SDKs lower than 21.
- return 0;
- }
- }
-
private final long nativeAudioTrack;
private final AudioManager audioManager;
private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker();
@@ -154,7 +144,7 @@
byteBuffer.put(emptyBytes);
byteBuffer.position(0);
}
- int bytesWritten = writeBytes(audioTrack, byteBuffer, sizeInBytes);
+ int bytesWritten = audioTrack.write(byteBuffer, sizeInBytes, AudioTrack.WRITE_BLOCKING);
if (bytesWritten != sizeInBytes) {
Logging.e(TAG, "AudioTrack.write played invalid number of bytes: " + bytesWritten);
// If a write() returns a negative value, an error has occurred.
@@ -188,14 +178,6 @@
}
}
- private int writeBytes(AudioTrack audioTrack, ByteBuffer byteBuffer, int sizeInBytes) {
- if (Build.VERSION.SDK_INT >= 21) {
- return audioTrack.write(byteBuffer, sizeInBytes, AudioTrack.WRITE_BLOCKING);
- } else {
- return audioTrack.write(byteBuffer.array(), byteBuffer.arrayOffset(), sizeInBytes);
- }
- }
-
// Stops the inner thread loop which results in calling AudioTrack.stop().
// Does not block the calling thread.
public void stopThread() {
@@ -257,19 +239,12 @@
// Create an AudioTrack object and initialize its associated audio buffer.
// The size of this buffer determines how long an AudioTrack can play
// before running out of data.
- if (Build.VERSION.SDK_INT >= 21) {
- // If we are on API level 21 or higher, it is possible to use a special AudioTrack
- // constructor that uses AudioAttributes and AudioFormat as input. It allows us to
- // supersede the notion of stream types for defining the behavior of audio playback,
- // and to allow certain platforms or routing policies to use this information for more
- // refined volume or routing decisions.
- audioTrack = createAudioTrackOnLollipopOrHigher(
- sampleRate, channelConfig, minBufferSizeInBytes);
- } else {
- // Use default constructor for API levels below 21.
- audioTrack =
- createAudioTrackOnLowerThanLollipop(sampleRate, channelConfig, minBufferSizeInBytes);
- }
+ // As we are on API level 21 or higher, it is possible to use a special AudioTrack
+ // constructor that uses AudioAttributes and AudioFormat as input. It allows us to
+ // supersede the notion of stream types for defining the behavior of audio playback,
+ // and to allow certain platforms or routing policies to use this information for more
+ // refined volume or routing decisions.
+ audioTrack = createAudioTrack(sampleRate, channelConfig, minBufferSizeInBytes);
} catch (IllegalArgumentException e) {
reportWebRtcAudioTrackInitError(e.getMessage());
releaseAudioResources();
@@ -353,7 +328,7 @@
threadChecker.checkIsOnValidThread();
Logging.d(TAG, "setStreamVolume(" + volume + ")");
assertTrue(audioManager != null);
- if (isVolumeFixed()) {
+ if (audioManager.isVolumeFixed()) {
Logging.e(TAG, "The device implements a fixed volume policy.");
return false;
}
@@ -361,12 +336,6 @@
return true;
}
- private boolean isVolumeFixed() {
- if (Build.VERSION.SDK_INT < 21)
- return false;
- return audioManager.isVolumeFixed();
- }
-
/** Get current volume level for a phone call audio stream. */
private int getStreamVolume() {
threadChecker.checkIsOnValidThread();
@@ -387,10 +356,9 @@
// Creates and AudioTrack instance using AudioAttributes and AudioFormat as input.
// It allows certain platforms or routing policies to use this information for more
// refined volume or routing decisions.
- @TargetApi(21)
- private static AudioTrack createAudioTrackOnLollipopOrHigher(
+ private static AudioTrack createAudioTrack(
int sampleRateInHz, int channelConfig, int bufferSizeInBytes) {
- Logging.d(TAG, "createAudioTrackOnLollipopOrHigher");
+ Logging.d(TAG, "createAudioTrack");
// TODO(henrika): use setPerformanceMode(int) with PERFORMANCE_MODE_LOW_LATENCY to control
// performance when Android O is supported. Add some logging in the mean time.
final int nativeOutputSampleRate =
@@ -418,13 +386,6 @@
AudioManager.AUDIO_SESSION_ID_GENERATE);
}
- @SuppressWarnings("deprecation") // Deprecated in API level 25.
- private static AudioTrack createAudioTrackOnLowerThanLollipop(
- int sampleRateInHz, int channelConfig, int bufferSizeInBytes) {
- return new AudioTrack(AudioManager.STREAM_VOICE_CALL, sampleRateInHz, channelConfig,
- AudioFormat.ENCODING_PCM_16BIT, bufferSizeInBytes, AudioTrack.MODE_STREAM);
- }
-
private void logBufferSizeInFrames() {
if (Build.VERSION.SDK_INT >= 23) {
Logging.d(TAG, "AudioTrack: "
diff --git a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioUtils.java b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioUtils.java
index 61cab58..0472114 100644
--- a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioUtils.java
+++ b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioUtils.java
@@ -214,13 +214,6 @@
+ "BT SCO: " + audioManager.isBluetoothScoOn());
}
- private static boolean isVolumeFixed(AudioManager audioManager) {
- if (Build.VERSION.SDK_INT < 21) {
- return false;
- }
- return audioManager.isVolumeFixed();
- }
-
// Adds volume information for all possible stream types.
private static void logAudioStateVolume(String tag, AudioManager audioManager) {
final int[] streams = {
@@ -233,7 +226,7 @@
};
Logging.d(tag, "Audio State: ");
// Some devices may not have volume controls and might use a fixed volume.
- boolean fixedVolume = isVolumeFixed(audioManager);
+ boolean fixedVolume = audioManager.isVolumeFixed();
Logging.d(tag, " fixed volume=" + fixedVolume);
if (!fixedVolume) {
for (int stream : streams) {
diff --git a/sdk/android/api/org/webrtc/Camera2Capturer.java b/sdk/android/api/org/webrtc/Camera2Capturer.java
index 3c1d477..c4becf4 100644
--- a/sdk/android/api/org/webrtc/Camera2Capturer.java
+++ b/sdk/android/api/org/webrtc/Camera2Capturer.java
@@ -10,12 +10,10 @@
package org.webrtc;
-import android.annotation.TargetApi;
import android.content.Context;
import android.hardware.camera2.CameraManager;
import androidx.annotation.Nullable;
-@TargetApi(21)
public class Camera2Capturer extends CameraCapturer {
private final Context context;
@Nullable private final CameraManager cameraManager;
diff --git a/sdk/android/api/org/webrtc/Camera2Enumerator.java b/sdk/android/api/org/webrtc/Camera2Enumerator.java
index 961ab75..8a0ce4a 100644
--- a/sdk/android/api/org/webrtc/Camera2Enumerator.java
+++ b/sdk/android/api/org/webrtc/Camera2Enumerator.java
@@ -10,7 +10,6 @@
package org.webrtc;
-import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.Rect;
import android.graphics.SurfaceTexture;
@@ -30,7 +29,6 @@
import java.util.Map;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
-@TargetApi(21)
public class Camera2Enumerator implements CameraEnumerator {
private final static String TAG = "Camera2Enumerator";
private final static double NANO_SECONDS_PER_SECOND = 1.0e9;
@@ -107,10 +105,6 @@
* Checks if API is supported and all cameras have better than legacy support.
*/
public static boolean isSupported(Context context) {
- if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {
- return false;
- }
-
CameraManager cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
try {
String[] cameraIds = cameraManager.getCameraIdList();
diff --git a/sdk/android/api/org/webrtc/EglBase.java b/sdk/android/api/org/webrtc/EglBase.java
index 763f4e5..64771d0 100644
--- a/sdk/android/api/org/webrtc/EglBase.java
+++ b/sdk/android/api/org/webrtc/EglBase.java
@@ -147,13 +147,11 @@
/**
* Create a new context with the specified config attributes, sharing data with `sharedContext`.
- * If `sharedContext` is null, a root context is created. This function will try to create an EGL
- * 1.4 context if possible, and an EGL 1.0 context otherwise.
+ * If `sharedContext` is null, a root EGL 1.4 context is created.
*/
public static EglBase create(@Nullable Context sharedContext, int[] configAttributes) {
if (sharedContext == null) {
- return EglBase14Impl.isEGL14Supported() ? createEgl14(configAttributes)
- : createEgl10(configAttributes);
+ return createEgl14(configAttributes);
} else if (sharedContext instanceof EglBase14.Context) {
return createEgl14((EglBase14.Context) sharedContext, configAttributes);
} else if (sharedContext instanceof EglBase10.Context) {
diff --git a/sdk/android/api/org/webrtc/HardwareVideoEncoderFactory.java b/sdk/android/api/org/webrtc/HardwareVideoEncoderFactory.java
index 2454182..b48a39a 100644
--- a/sdk/android/api/org/webrtc/HardwareVideoEncoderFactory.java
+++ b/sdk/android/api/org/webrtc/HardwareVideoEncoderFactory.java
@@ -94,11 +94,6 @@
@Nullable
@Override
public VideoEncoder createEncoder(VideoCodecInfo input) {
- // HW encoding is not supported below Android Kitkat.
- if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
- return null;
- }
-
VideoCodecMimeType type = VideoCodecMimeType.valueOf(input.getName());
MediaCodecInfo info = findCodecForType(type);
@@ -135,11 +130,6 @@
@Override
public VideoCodecInfo[] getSupportedCodecs() {
- // HW encoding is not supported below Android Kitkat.
- if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
- return new VideoCodecInfo[0];
- }
-
List<VideoCodecInfo> supportedCodecInfos = new ArrayList<VideoCodecInfo>();
// Generate a list of supported codecs in order of preference:
// VP8, VP9, H264 (high profile), H264 (baseline profile) and AV1.
@@ -219,13 +209,12 @@
private boolean isHardwareSupportedInCurrentSdkVp8(MediaCodecInfo info) {
String name = info.getName();
- // QCOM Vp8 encoder is supported in KITKAT or later.
- return (name.startsWith(QCOM_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT)
+ // QCOM Vp8 encoder is always supported.
+ return name.startsWith(QCOM_PREFIX)
// Exynos VP8 encoder is supported in M or later.
|| (name.startsWith(EXYNOS_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.M)
- // Intel Vp8 encoder is supported in LOLLIPOP or later, with the intel encoder enabled.
- || (name.startsWith(INTEL_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP
- && enableIntelVp8Encoder);
+ // Intel Vp8 encoder is always supported, with the intel encoder enabled.
+ || (name.startsWith(INTEL_PREFIX) && enableIntelVp8Encoder);
}
private boolean isHardwareSupportedInCurrentSdkVp9(MediaCodecInfo info) {
@@ -241,11 +230,8 @@
return false;
}
String name = info.getName();
- // QCOM H264 encoder is supported in KITKAT or later.
- return (name.startsWith(QCOM_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT)
- // Exynos H264 encoder is supported in LOLLIPOP or later.
- || (name.startsWith(EXYNOS_PREFIX)
- && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP);
+ // QCOM and Exynos H264 encoders are always supported.
+ return name.startsWith(QCOM_PREFIX) || name.startsWith(EXYNOS_PREFIX);
}
private boolean isMediaCodecAllowed(MediaCodecInfo info) {
@@ -257,14 +243,13 @@
private int getForcedKeyFrameIntervalMs(VideoCodecMimeType type, String codecName) {
if (type == VideoCodecMimeType.VP8 && codecName.startsWith(QCOM_PREFIX)) {
- if (Build.VERSION.SDK_INT == Build.VERSION_CODES.LOLLIPOP
- || Build.VERSION.SDK_INT == Build.VERSION_CODES.LOLLIPOP_MR1) {
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) {
return QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_L_MS;
- } else if (Build.VERSION.SDK_INT == Build.VERSION_CODES.M) {
- return QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_M_MS;
- } else if (Build.VERSION.SDK_INT > Build.VERSION_CODES.M) {
- return QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_N_MS;
}
+ if (Build.VERSION.SDK_INT == Build.VERSION_CODES.M) {
+ return QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_M_MS;
+ }
+ return QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_N_MS;
}
// Other codecs don't need key frame forcing.
return 0;
diff --git a/sdk/android/api/org/webrtc/ScreenCapturerAndroid.java b/sdk/android/api/org/webrtc/ScreenCapturerAndroid.java
index adcb114..231d507 100644
--- a/sdk/android/api/org/webrtc/ScreenCapturerAndroid.java
+++ b/sdk/android/api/org/webrtc/ScreenCapturerAndroid.java
@@ -10,7 +10,6 @@
package org.webrtc;
-import android.annotation.TargetApi;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
@@ -31,10 +30,7 @@
* place on the HandlerThread of the given {@code SurfaceTextureHelper}. When done with each frame,
* the native code returns the buffer to the {@code SurfaceTextureHelper} to be used for new
* frames. At any time, at most one frame is being processed.
- *
- * @note This class is only supported on Android Lollipop and above.
*/
-@TargetApi(21)
public class ScreenCapturerAndroid implements VideoCapturer, VideoSink {
private static final int DISPLAY_FLAGS =
DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC | DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION;
diff --git a/sdk/android/api/org/webrtc/SurfaceTextureHelper.java b/sdk/android/api/org/webrtc/SurfaceTextureHelper.java
index e186f9a..3ea2273 100644
--- a/sdk/android/api/org/webrtc/SurfaceTextureHelper.java
+++ b/sdk/android/api/org/webrtc/SurfaceTextureHelper.java
@@ -198,7 +198,7 @@
oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
surfaceTexture = new SurfaceTexture(oesTextureId);
- setOnFrameAvailableListener(surfaceTexture, (SurfaceTexture st) -> {
+ surfaceTexture.setOnFrameAvailableListener(st -> {
if (hasPendingTexture) {
Logging.d(TAG, "A frame is already pending, dropping frame.");
}
@@ -208,20 +208,6 @@
}, handler);
}
- @TargetApi(21)
- private static void setOnFrameAvailableListener(SurfaceTexture surfaceTexture,
- SurfaceTexture.OnFrameAvailableListener listener, Handler handler) {
- if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
- surfaceTexture.setOnFrameAvailableListener(listener, handler);
- } else {
- // The documentation states that the listener will be called on an arbitrary thread, but in
- // pratice, it is always the thread on which the SurfaceTexture was constructed. There are
- // assertions in place in case this ever changes. For API >= 21, we use the new API to
- // explicitly specify the handler.
- surfaceTexture.setOnFrameAvailableListener(listener);
- }
- }
-
/**
* Start to stream textures to the given `listener`. If you need to change listener, you need to
* call stopListening() first.
diff --git a/sdk/android/instrumentationtests/src/org/webrtc/Camera2CapturerTest.java b/sdk/android/instrumentationtests/src/org/webrtc/Camera2CapturerTest.java
index 5a09795..ae84df0 100644
--- a/sdk/android/instrumentationtests/src/org/webrtc/Camera2CapturerTest.java
+++ b/sdk/android/instrumentationtests/src/org/webrtc/Camera2CapturerTest.java
@@ -12,7 +12,6 @@
import static org.junit.Assert.fail;
-import android.annotation.TargetApi;
import android.content.Context;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraDevice;
@@ -31,7 +30,6 @@
import org.junit.Test;
import org.junit.runner.RunWith;
-@TargetApi(21)
@RunWith(BaseJUnit4ClassRunner.class)
public class Camera2CapturerTest {
static final String TAG = "Camera2CapturerTest";
diff --git a/sdk/android/instrumentationtests/src/org/webrtc/HardwareVideoEncoderTest.java b/sdk/android/instrumentationtests/src/org/webrtc/HardwareVideoEncoderTest.java
index 3a61b6e..fa1e46a 100644
--- a/sdk/android/instrumentationtests/src/org/webrtc/HardwareVideoEncoderTest.java
+++ b/sdk/android/instrumentationtests/src/org/webrtc/HardwareVideoEncoderTest.java
@@ -16,7 +16,6 @@
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
-import android.annotation.TargetApi;
import android.graphics.Matrix;
import android.opengl.GLES11Ext;
import android.util.Log;
@@ -38,7 +37,6 @@
import org.junit.Test;
import org.junit.runner.RunWith;
-@TargetApi(16)
@RunWith(ParameterizedRunner.class)
@UseRunnerDelegate(BaseJUnit4RunnerDelegate.class)
public class HardwareVideoEncoderTest {
diff --git a/sdk/android/instrumentationtests/src/org/webrtc/NetworkMonitorTest.java b/sdk/android/instrumentationtests/src/org/webrtc/NetworkMonitorTest.java
index a80cd95..e045285 100644
--- a/sdk/android/instrumentationtests/src/org/webrtc/NetworkMonitorTest.java
+++ b/sdk/android/instrumentationtests/src/org/webrtc/NetworkMonitorTest.java
@@ -296,14 +296,12 @@
ConnectivityManagerDelegate delegate = new ConnectivityManagerDelegate(
InstrumentationRegistry.getTargetContext(), new HashSet<>());
delegate.getNetworkState();
- if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
- Network[] networks = delegate.getAllNetworks();
- if (networks.length >= 1) {
- delegate.getNetworkState(networks[0]);
- delegate.hasInternetCapability(networks[0]);
- }
- delegate.getDefaultNetId();
+ Network[] networks = delegate.getAllNetworks();
+ if (networks.length >= 1) {
+ delegate.getNetworkState(networks[0]);
+ delegate.hasInternetCapability(networks[0]);
}
+ delegate.getDefaultNetId();
}
/** Tests that ConnectivityManagerDelegate preferentially reads from the cache */
diff --git a/sdk/android/src/java/org/webrtc/Camera2Session.java b/sdk/android/src/java/org/webrtc/Camera2Session.java
index d0f92be..e0b898f 100644
--- a/sdk/android/src/java/org/webrtc/Camera2Session.java
+++ b/sdk/android/src/java/org/webrtc/Camera2Session.java
@@ -10,7 +10,6 @@
package org.webrtc;
-import android.annotation.TargetApi;
import android.content.Context;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
@@ -29,7 +28,6 @@
import java.util.concurrent.TimeUnit;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
-@TargetApi(21)
class Camera2Session implements CameraSession {
private static final String TAG = "Camera2Session";
diff --git a/sdk/android/src/java/org/webrtc/EglBase14Impl.java b/sdk/android/src/java/org/webrtc/EglBase14Impl.java
index aa27cf8..e53dda6 100644
--- a/sdk/android/src/java/org/webrtc/EglBase14Impl.java
+++ b/sdk/android/src/java/org/webrtc/EglBase14Impl.java
@@ -10,7 +10,6 @@
package org.webrtc;
-import android.annotation.TargetApi;
import android.graphics.SurfaceTexture;
import android.opengl.EGL14;
import android.opengl.EGLConfig;
@@ -29,25 +28,13 @@
* and an EGLSurface.
*/
@SuppressWarnings("ReferenceEquality") // We want to compare to EGL14 constants.
-@TargetApi(18)
class EglBase14Impl implements EglBase14 {
private static final String TAG = "EglBase14Impl";
- private static final int EGLExt_SDK_VERSION = Build.VERSION_CODES.JELLY_BEAN_MR2;
- private static final int CURRENT_SDK_VERSION = Build.VERSION.SDK_INT;
private EGLContext eglContext;
@Nullable private EGLConfig eglConfig;
private EGLDisplay eglDisplay;
private EGLSurface eglSurface = EGL14.EGL_NO_SURFACE;
- // EGL 1.4 is supported from API 17. But EGLExt that is used for setting presentation
- // time stamp on a surface is supported from 18 so we require 18.
- public static boolean isEGL14Supported() {
- Logging.d(TAG,
- "SDK version: " + CURRENT_SDK_VERSION
- + ". isEGL14Supported: " + (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION));
- return (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION);
- }
-
public static class Context implements EglBase14.Context {
private final EGLContext egl14Context;
@@ -57,11 +44,8 @@
}
@Override
- @SuppressWarnings("deprecation")
- @TargetApi(Build.VERSION_CODES.LOLLIPOP)
public long getNativeEglContext() {
- return CURRENT_SDK_VERSION >= Build.VERSION_CODES.LOLLIPOP ? egl14Context.getNativeHandle()
- : egl14Context.getHandle();
+ return egl14Context.getNativeHandle();
}
public Context(android.opengl.EGLContext eglContext) {
diff --git a/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java b/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java
index 0825c1e..743edb0b 100644
--- a/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java
+++ b/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java
@@ -10,7 +10,6 @@
package org.webrtc;
-import android.annotation.TargetApi;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
@@ -29,11 +28,7 @@
/**
* Android hardware video encoder.
- *
- * @note This class is only supported on Android Kitkat and above.
*/
-@TargetApi(19)
-@SuppressWarnings("deprecation") // Cannot support API level 19 without using deprecated methods.
class HardwareVideoEncoder implements VideoEncoder {
private static final String TAG = "HardwareVideoEncoder";
diff --git a/sdk/android/src/java/org/webrtc/MediaCodecUtils.java b/sdk/android/src/java/org/webrtc/MediaCodecUtils.java
index 5d83014..d5ccae9 100644
--- a/sdk/android/src/java/org/webrtc/MediaCodecUtils.java
+++ b/sdk/android/src/java/org/webrtc/MediaCodecUtils.java
@@ -56,15 +56,8 @@
MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m};
// Color formats supported by texture mode encoding - in order of preference.
- static final int[] TEXTURE_COLOR_FORMATS = getTextureColorFormats();
-
- private static int[] getTextureColorFormats() {
- if (Build.VERSION.SDK_INT >= 18) {
- return new int[] {MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface};
- } else {
- return new int[] {};
- }
- }
+ static final int[] TEXTURE_COLOR_FORMATS =
+ new int[] {MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface};
static @Nullable Integer selectColorFormat(
int[] supportedColorFormats, CodecCapabilities capabilities) {
diff --git a/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java b/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java
index 4ee8b1a..bf591dd 100644
--- a/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java
+++ b/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java
@@ -83,11 +83,6 @@
}
private @Nullable MediaCodecInfo findCodecForType(VideoCodecMimeType type) {
- // HW decoding is not supported on builds before KITKAT.
- if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
- return null;
- }
-
for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
MediaCodecInfo info = null;
try {
@@ -131,8 +126,8 @@
private boolean isH264HighProfileSupported(MediaCodecInfo info) {
String name = info.getName();
- // Support H.264 HP decoding on QCOM chips for Android L and above.
- if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP && name.startsWith(QCOM_PREFIX)) {
+ // Support H.264 HP decoding on QCOM chips.
+ if (name.startsWith(QCOM_PREFIX)) {
return true;
}
// Support H.264 HP decoding on Exynos chips for Android M and above.
diff --git a/sdk/android/src/java/org/webrtc/MediaCodecWrapperFactoryImpl.java b/sdk/android/src/java/org/webrtc/MediaCodecWrapperFactoryImpl.java
index 29edf6e..3b4647b 100644
--- a/sdk/android/src/java/org/webrtc/MediaCodecWrapperFactoryImpl.java
+++ b/sdk/android/src/java/org/webrtc/MediaCodecWrapperFactoryImpl.java
@@ -10,7 +10,6 @@
package org.webrtc;
-import android.annotation.TargetApi;
import android.media.MediaCodec;
import android.media.MediaCodec.BufferInfo;
import android.media.MediaCrypto;
@@ -99,13 +98,11 @@
}
@Override
- @TargetApi(18)
public Surface createInputSurface() {
return mediaCodec.createInputSurface();
}
@Override
- @TargetApi(19)
public void setParameters(Bundle params) {
mediaCodec.setParameters(params);
}
diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioEffects.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioEffects.java
index fb132c6..a9ff101 100644
--- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioEffects.java
+++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioEffects.java
@@ -54,15 +54,11 @@
// Returns true if all conditions for supporting HW Acoustic Echo Cancellation (AEC) are
// fulfilled.
public static boolean isAcousticEchoCancelerSupported() {
- if (Build.VERSION.SDK_INT < 18)
- return false;
return isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_AEC, AOSP_ACOUSTIC_ECHO_CANCELER);
}
// Returns true if all conditions for supporting HW Noise Suppression (NS) are fulfilled.
public static boolean isNoiseSuppressorSupported() {
- if (Build.VERSION.SDK_INT < 18)
- return false;
return isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_NS, AOSP_NOISE_SUPPRESSOR);
}
@@ -188,9 +184,6 @@
// As an example: Samsung Galaxy S6 includes an AGC in the descriptor but
// AutomaticGainControl.isAvailable() returns false.
private boolean effectTypeIsVoIP(UUID type) {
- if (Build.VERSION.SDK_INT < 18)
- return false;
-
return (AudioEffect.EFFECT_TYPE_AEC.equals(type) && isAcousticEchoCancelerSupported())
|| (AudioEffect.EFFECT_TYPE_NS.equals(type) && isNoiseSuppressorSupported());
}
diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java
index f016dad..f398602 100644
--- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java
+++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java
@@ -64,7 +64,7 @@
// as well. The NDK doc states that: "As of API level 21, lower latency
// audio input is supported on select devices. To take advantage of this
// feature, first confirm that lower latency output is available".
- return Build.VERSION.SDK_INT >= 21 && isLowLatencyOutputSupported(context);
+ return isLowLatencyOutputSupported(context);
}
/**
@@ -85,18 +85,12 @@
}
private static int getSampleRateForApiLevel(AudioManager audioManager) {
- if (Build.VERSION.SDK_INT < 17) {
- return DEFAULT_SAMPLE_RATE_HZ;
- }
String sampleRateString = audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
return (sampleRateString == null) ? DEFAULT_SAMPLE_RATE_HZ : Integer.parseInt(sampleRateString);
}
// Returns the native output buffer size for low-latency output streams.
private static int getLowLatencyFramesPerBuffer(AudioManager audioManager) {
- if (Build.VERSION.SDK_INT < 17) {
- return DEFAULT_FRAME_PER_BUFFER;
- }
String framesPerBuffer =
audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER);
return framesPerBuffer == null ? DEFAULT_FRAME_PER_BUFFER : Integer.parseInt(framesPerBuffer);
diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java
index 885df55..f0cfd73 100644
--- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java
+++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java
@@ -47,16 +47,7 @@
// By default, WebRTC creates audio tracks with a usage attribute
// corresponding to voice communications, such as telephony or VoIP.
- private static final int DEFAULT_USAGE = getDefaultUsageAttribute();
-
- private static int getDefaultUsageAttribute() {
- if (Build.VERSION.SDK_INT >= 21) {
- return AudioAttributes.USAGE_VOICE_COMMUNICATION;
- } else {
- // Not used on SDKs lower than L.
- return 0;
- }
- }
+ private static final int DEFAULT_USAGE = AudioAttributes.USAGE_VOICE_COMMUNICATION;
// Indicates the AudioTrack has started playing audio.
private static final int AUDIO_TRACK_START = 0;
@@ -128,7 +119,7 @@
byteBuffer.put(emptyBytes);
byteBuffer.position(0);
}
- int bytesWritten = writeBytes(audioTrack, byteBuffer, sizeInBytes);
+ int bytesWritten = audioTrack.write(byteBuffer, sizeInBytes, AudioTrack.WRITE_BLOCKING);
if (bytesWritten != sizeInBytes) {
Logging.e(TAG, "AudioTrack.write played invalid number of bytes: " + bytesWritten);
// If a write() returns a negative value, an error has occurred.
@@ -152,14 +143,6 @@
}
}
- private int writeBytes(AudioTrack audioTrack, ByteBuffer byteBuffer, int sizeInBytes) {
- if (Build.VERSION.SDK_INT >= 21) {
- return audioTrack.write(byteBuffer, sizeInBytes, AudioTrack.WRITE_BLOCKING);
- } else {
- return audioTrack.write(byteBuffer.array(), byteBuffer.arrayOffset(), sizeInBytes);
- }
- }
-
// Stops the inner thread loop which results in calling AudioTrack.stop().
// Does not block the calling thread.
public void stopThread() {
@@ -247,18 +230,14 @@
// On API level 26 or higher, we can use a low latency mode.
audioTrack = createAudioTrackOnOreoOrHigher(
sampleRate, channelConfig, minBufferSizeInBytes, audioAttributes);
- } else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
- // If we are on API level 21 or higher, it is possible to use a special AudioTrack
+ } else {
+ // As we are on API level 21 or higher, it is possible to use a special AudioTrack
// constructor that uses AudioAttributes and AudioFormat as input. It allows us to
// supersede the notion of stream types for defining the behavior of audio playback,
// and to allow certain platforms or routing policies to use this information for more
// refined volume or routing decisions.
- audioTrack = createAudioTrackOnLollipopOrHigher(
+ audioTrack = createAudioTrackBeforeOreo(
sampleRate, channelConfig, minBufferSizeInBytes, audioAttributes);
- } else {
- // Use default constructor for API levels below 21.
- audioTrack =
- createAudioTrackOnLowerThanLollipop(sampleRate, channelConfig, minBufferSizeInBytes);
}
} catch (IllegalArgumentException e) {
reportWebRtcAudioTrackInitError(e.getMessage());
@@ -360,7 +339,7 @@
private boolean setStreamVolume(int volume) {
threadChecker.checkIsOnValidThread();
Logging.d(TAG, "setStreamVolume(" + volume + ")");
- if (isVolumeFixed()) {
+ if (audioManager.isVolumeFixed()) {
Logging.e(TAG, "The device implements a fixed volume policy.");
return false;
}
@@ -368,12 +347,6 @@
return true;
}
- private boolean isVolumeFixed() {
- if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP)
- return false;
- return audioManager.isVolumeFixed();
- }
-
/** Get current volume level for a phone call audio stream. */
@CalledByNative
private int getStreamVolume() {
@@ -441,10 +414,9 @@
// Creates and AudioTrack instance using AudioAttributes and AudioFormat as input.
// It allows certain platforms or routing policies to use this information for more
// refined volume or routing decisions.
- @TargetApi(Build.VERSION_CODES.LOLLIPOP)
- private static AudioTrack createAudioTrackOnLollipopOrHigher(int sampleRateInHz,
- int channelConfig, int bufferSizeInBytes, @Nullable AudioAttributes overrideAttributes) {
- Logging.d(TAG, "createAudioTrackOnLollipopOrHigher");
+ private static AudioTrack createAudioTrackBeforeOreo(int sampleRateInHz, int channelConfig,
+ int bufferSizeInBytes, @Nullable AudioAttributes overrideAttributes) {
+ Logging.d(TAG, "createAudioTrackBeforeOreo");
logNativeOutputSampleRate(sampleRateInHz);
// Create an audio track where the audio usage is for VoIP and the content type is speech.
@@ -489,13 +461,6 @@
return builder.setAllowedCapturePolicy(overrideAttributes.getAllowedCapturePolicy());
}
- @SuppressWarnings("deprecation") // Deprecated in API level 25.
- private static AudioTrack createAudioTrackOnLowerThanLollipop(
- int sampleRateInHz, int channelConfig, int bufferSizeInBytes) {
- return new AudioTrack(AudioManager.STREAM_VOICE_CALL, sampleRateInHz, channelConfig,
- AudioFormat.ENCODING_PCM_16BIT, bufferSizeInBytes, AudioTrack.MODE_STREAM);
- }
-
private void logBufferSizeInFrames() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
Logging.d(TAG,
diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java
index 591fc9e..7894659 100644
--- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java
+++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java
@@ -201,13 +201,6 @@
+ "BT SCO: " + audioManager.isBluetoothScoOn());
}
- private static boolean isVolumeFixed(AudioManager audioManager) {
- if (Build.VERSION.SDK_INT < 21) {
- return false;
- }
- return audioManager.isVolumeFixed();
- }
-
// Adds volume information for all possible stream types.
private static void logAudioStateVolume(String tag, AudioManager audioManager) {
final int[] streams = {AudioManager.STREAM_VOICE_CALL, AudioManager.STREAM_MUSIC,
@@ -215,7 +208,7 @@
AudioManager.STREAM_SYSTEM};
Logging.d(tag, "Audio State: ");
// Some devices may not have volume controls and might use a fixed volume.
- boolean fixedVolume = isVolumeFixed(audioManager);
+ boolean fixedVolume = audioManager.isVolumeFixed();
Logging.d(tag, " fixed volume=" + fixedVolume);
if (!fixedVolume) {
for (int stream : streams) {