Introduces Android API level linting, fixes all current API lint errors.

This CL attempts to annotate accesses on >16 API levels using as
small scopes as possible. The TargetApi notations mean "yes, I know
I'm accessing a higher API and I take responsibility for gating the
call on Android API level". The Encoder/Decoder classes are annotated
on the whole class, but they're only accessed through JNI; we should
annotate on method level otherwise and preferably on private methods.

This patch also fixes some compiler-level deprecation warnings (i.e.
-Xlint:deprecation), but probably not all of them.

BUG=webrtc:5063
R=henrika@webrtc.org, kjellander@webrtc.org, magjed@webrtc.org

Review URL: https://codereview.webrtc.org/1412673008 .

Cr-Original-Commit-Position: refs/heads/master@{#10624}
Cr-Mirrored-From: https://chromium.googlesource.com/external/webrtc
Cr-Mirrored-Commit: 68876f990ea1ea365d2d8155df261b38ec9fbeff
diff --git a/build/android/AndroidManifest.xml b/build/android/AndroidManifest.xml
new file mode 100644
index 0000000..bb6d354
--- /dev/null
+++ b/build/android/AndroidManifest.xml
@@ -0,0 +1,14 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+  This is a dummy manifest which is required by:
+  1. aapt when generating R.java in java.gypi:
+     Nothing in the manifest is used, but it is still required by aapt.
+  2. lint: [min|target]SdkVersion are required by lint and should
+     be kept up-to-date.
+-->
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="dummy.package">
+
+      <uses-sdk android:minSdkVersion="16" android:targetSdkVersion="23" />
+
+</manifest>
diff --git a/build/android/suppressions.xml b/build/android/suppressions.xml
new file mode 100644
index 0000000..0fc22e0
--- /dev/null
+++ b/build/android/suppressions.xml
@@ -0,0 +1,23 @@
+<?xml version="1.0" encoding="utf-8"?>
+<lint>
+  <!-- These lint settings is for the Android linter that gets run by
+       lint_action.gypi on compile of WebRTC java code. All WebRTC java code
+       should lint cleanly for the issues below. -->
+  <!-- TODO(phoglund): make work with suppress.py or remove printout referring
+       to suppress.py. -->
+  <issue id="NewApi"></issue>
+
+  <issue id="Locale" severity="ignore"/>
+  <issue id="SdCardPath" severity="ignore"/>
+  <issue id="UseValueOf" severity="ignore"/>
+  <issue id="InlinedApi" severity="ignore"/>
+  <issue id="DefaultLocale" severity="ignore"/>
+  <issue id="Assert" severity="ignore"/>
+  <issue id="UseSparseArrays" severity="ignore"/>
+
+  <!-- These are just from the dummy AndroidManifest.xml we use for linting.
+       It's in the same directory as this file. -->
+  <issue id="MissingApplicationIcon" severity="ignore"/>
+  <issue id="AllowBackup" severity="ignore"/>
+  <issue id="MissingVersion" severity="ignore"/>
+</lint>
diff --git a/examples/androidapp/AndroidManifest.xml b/examples/androidapp/AndroidManifest.xml
index 6a91cfd..bd0dee8 100644
--- a/examples/androidapp/AndroidManifest.xml
+++ b/examples/androidapp/AndroidManifest.xml
@@ -7,7 +7,7 @@
     <uses-feature android:name="android.hardware.camera" />
     <uses-feature android:name="android.hardware.camera.autofocus" />
     <uses-feature android:glEsVersion="0x00020000" android:required="true" />
-    <uses-sdk android:minSdkVersion="14" android:targetSdkVersion="21" />
+    <uses-sdk android:minSdkVersion="16" android:targetSdkVersion="21" />
 
     <uses-permission android:name="android.permission.CAMERA" />
     <uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
diff --git a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioEffects.java b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioEffects.java
index 7cd769a..c3ab043 100644
--- a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioEffects.java
+++ b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioEffects.java
@@ -10,6 +10,7 @@
 
 package org.webrtc.voiceengine;
 
+import android.annotation.TargetApi;
 import android.media.audiofx.AcousticEchoCanceler;
 import android.media.audiofx.AudioEffect;
 import android.media.audiofx.AudioEffect.Descriptor;
@@ -119,6 +120,7 @@
 
   // Returns true if the platform AEC should be excluded based on its UUID.
   // AudioEffect.queryEffects() can throw IllegalStateException.
+  @TargetApi(18)
   private static boolean isAcousticEchoCancelerExcludedByUUID() {
     for (Descriptor d : AudioEffect.queryEffects()) {
       if (d.type.equals(AudioEffect.EFFECT_TYPE_AEC) &&
@@ -131,6 +133,7 @@
 
   // Returns true if the platform AGC should be excluded based on its UUID.
   // AudioEffect.queryEffects() can throw IllegalStateException.
+  @TargetApi(18)
   private static boolean isAutomaticGainControlExcludedByUUID() {
     for (Descriptor d : AudioEffect.queryEffects()) {
       if (d.type.equals(AudioEffect.EFFECT_TYPE_AGC) &&
@@ -143,6 +146,7 @@
 
   // Returns true if the platform NS should be excluded based on its UUID.
   // AudioEffect.queryEffects() can throw IllegalStateException.
+  @TargetApi(18)
   private static boolean isNoiseSuppressorExcludedByUUID() {
     for (Descriptor d : AudioEffect.queryEffects()) {
       if (d.type.equals(AudioEffect.EFFECT_TYPE_NS) &&
@@ -368,7 +372,11 @@
   // AudioEffect.Descriptor array that are actually not available on the device.
   // As an example: Samsung Galaxy S6 includes an AGC in the descriptor but
   // AutomaticGainControl.isAvailable() returns false.
+  @TargetApi(18)
   private boolean effectTypeIsVoIP(UUID type) {
+    if (!WebRtcAudioUtils.runningOnJellyBeanMR2OrHigher())
+      return false;
+
     return (AudioEffect.EFFECT_TYPE_AEC.equals(type)
         && isAcousticEchoCancelerSupported())
         || (AudioEffect.EFFECT_TYPE_AGC.equals(type)
diff --git a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioManager.java b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioManager.java
index 7359486..f40317b 100644
--- a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioManager.java
+++ b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioManager.java
@@ -10,6 +10,7 @@
 
 package org.webrtc.voiceengine;
 
+import android.annotation.TargetApi;
 import android.content.Context;
 import android.content.pm.PackageManager;
 import android.media.AudioFormat;
@@ -189,20 +190,26 @@
     // No overrides available. Deliver best possible estimate based on default
     // Android AudioManager APIs.
     final int sampleRateHz;
-    if (!WebRtcAudioUtils.runningOnJellyBeanMR1OrHigher()) {
-      sampleRateHz = WebRtcAudioUtils.getDefaultSampleRateHz();
+    if (WebRtcAudioUtils.runningOnJellyBeanMR1OrHigher()) {
+      sampleRateHz = getSampleRateOnJellyBeanMR10OrHigher();
     } else {
-      String sampleRateString = audioManager.getProperty(
-          AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
-      sampleRateHz = (sampleRateString == null)
-          ? WebRtcAudioUtils.getDefaultSampleRateHz()
-          : Integer.parseInt(sampleRateString);
+      sampleRateHz = WebRtcAudioUtils.getDefaultSampleRateHz();
     }
     Logging.d(TAG, "Sample rate is set to " + sampleRateHz + " Hz");
     return sampleRateHz;
   }
 
+  @TargetApi(17)
+  private int getSampleRateOnJellyBeanMR10OrHigher() {
+    String sampleRateString = audioManager.getProperty(
+        AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
+    return (sampleRateString == null)
+        ? WebRtcAudioUtils.getDefaultSampleRateHz()
+        : Integer.parseInt(sampleRateString);
+  }
+
   // Returns the native output buffer size for low-latency output streams.
+  @TargetApi(17)
   private int getLowLatencyOutputFramesPerBuffer() {
     assertTrue(isLowLatencyOutputSupported());
     if (!WebRtcAudioUtils.runningOnJellyBeanMR1OrHigher()) {
diff --git a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java
index 0602e44..11eb513 100644
--- a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java
+++ b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java
@@ -13,6 +13,7 @@
 import java.lang.Thread;
 import java.nio.ByteBuffer;
 
+import android.annotation.TargetApi;
 import android.content.Context;
 import android.media.AudioFormat;
 import android.media.AudioManager;
@@ -90,13 +91,9 @@
         assertTrue(sizeInBytes <= byteBuffer.remaining());
         int bytesWritten = 0;
         if (WebRtcAudioUtils.runningOnLollipopOrHigher()) {
-          bytesWritten = audioTrack.write(byteBuffer,
-                                          sizeInBytes,
-                                          AudioTrack.WRITE_BLOCKING);
+          bytesWritten = writeOnLollipop(audioTrack, byteBuffer, sizeInBytes);
         } else {
-          bytesWritten = audioTrack.write(byteBuffer.array(),
-                                          byteBuffer.arrayOffset(),
-                                          sizeInBytes);
+          bytesWritten = writePreLollipop(audioTrack, byteBuffer, sizeInBytes);
         }
         if (bytesWritten != sizeInBytes) {
           Logging.e(TAG, "AudioTrack.write failed: " + bytesWritten);
@@ -123,6 +120,15 @@
       audioTrack.flush();
     }
 
+    @TargetApi(21)
+    private int writeOnLollipop(AudioTrack audioTrack, ByteBuffer byteBuffer, int sizeInBytes) {
+      return audioTrack.write(byteBuffer, sizeInBytes, AudioTrack.WRITE_BLOCKING);
+    }
+
+    private int writePreLollipop(AudioTrack audioTrack, ByteBuffer byteBuffer, int sizeInBytes) {
+      return audioTrack.write(byteBuffer.array(), byteBuffer.arrayOffset(), sizeInBytes);
+    }
+
     public void joinThread() {
       keepAlive = false;
       while (isAlive()) {
@@ -224,16 +230,21 @@
   private boolean setStreamVolume(int volume) {
     Logging.d(TAG, "setStreamVolume(" + volume + ")");
     assertTrue(audioManager != null);
-    if (WebRtcAudioUtils.runningOnLollipopOrHigher()) {
-      if (audioManager.isVolumeFixed()) {
-        Logging.e(TAG, "The device implements a fixed volume policy.");
-        return false;
-      }
+    if (isVolumeFixed()) {
+      Logging.e(TAG, "The device implements a fixed volume policy.");
+      return false;
     }
     audioManager.setStreamVolume(AudioManager.STREAM_VOICE_CALL, volume, 0);
     return true;
   }
 
+  @TargetApi(21)
+  private boolean isVolumeFixed() {
+    if (!WebRtcAudioUtils.runningOnLollipopOrHigher())
+      return false;
+    return audioManager.isVolumeFixed();
+  }
+
   /** Get current volume level for a phone call audio stream. */
   private int getStreamVolume() {
     Logging.d(TAG, "getStreamVolume");
diff --git a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioUtils.java b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioUtils.java
index 9d7a600..45f564a 100644
--- a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioUtils.java
+++ b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioUtils.java
@@ -144,6 +144,11 @@
     return Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1;
   }
 
+  public static boolean runningOnJellyBeanMR2OrHigher() {
+    // July 24, 2013: Android 4.3. API Level 18.
+    return Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2;
+  }
+
   public static boolean runningOnLollipopOrHigher() {
     // API Level 21.
     return Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP;