Reland of Android: Add FramerateRange class (patchset #1 id:1 of https://codereview.webrtc.org/2024573002/ )

Reason for revert:
Updated signature to work with other JNI versions. We would need to compile it differently in order to catch failures like this in WebRTC in the future.

Original issue's description:
> Revert of Android: Add FramerateRange class (patchset #2 id:60001 of https://codereview.webrtc.org/2010763003/ )
>
> Reason for revert:
> Breaks downstream Android tests:
> java.lang.NoSuchFieldError: no field with name='framerate' signature='org/webrtc/CameraEnumerationAndroid$CaptureFormat$FramerateRange' in class Lorg/webrtc/CameraEnumerationAndroid$CaptureFormat;
>
> We should have a similar test in WebRTC so we can catch such errors pre-commit.
>
> Original issue's description:
> > Android: Add FramerateRange class
> >
> > The Camera1 and Camera2 API use different framerate range types. Camera1
> > uses int[2] and Camera2 uses Range<Integer>. Range<Integer> is
> > unfortunately only available on Lollipop and later, so this CL adds a
> > similar FramerateRange class in CaptureFormat.
> >
> > The purpose with this CL is to have a common framerate range type that can
> > be reused from both Camera1 and Camera2 in helper functions such as
> > CameraEnumerationAndroid.getClosestSupportedFramerateRange().
> >
> > BUG=webrtc:5519
> > R=sakal@webrtc.org
> >
> > Committed: https://crrev.com/94cb67d6df1a78e7fa25e469f719c1a8809dc583
> > Cr-Commit-Position: refs/heads/master@{#12942}
>
> TBR=sakal@webrtc.org,magjed@webrtc.org
> NOTRY=True
> BUG=webrtc:5519
>
> Committed: https://crrev.com/bd5621f065fd25e0a77307f10dc9ddaf76e7945f
> Cr-Commit-Position: refs/heads/master@{#12956}

TBR=sakal@webrtc.org,kjellander@webrtc.org
# Skipping CQ checks because original CL landed less than 1 days ago.
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=webrtc:5519

Review-Url: https://codereview.webrtc.org/2019333002
Cr-Commit-Position: refs/heads/master@{#12957}
diff --git a/webrtc/api/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java b/webrtc/api/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java
index ed7e250..04643f1 100644
--- a/webrtc/api/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java
+++ b/webrtc/api/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java
@@ -286,7 +286,7 @@
     final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
         "SurfaceTextureHelper test" /* threadName */, null /* sharedContext */);
     final FakeCapturerObserver observer = new FakeCapturerObserver();
-    capturer.startCapture(format.width, format.height, format.maxFramerate,
+    capturer.startCapture(format.width, format.height, format.framerate.max,
         surfaceTextureHelper, appContext, observer);
     // Make sure camera is started and first frame is received and then stop it.
     assertTrue(observer.WaitForCapturerToStart());
@@ -310,7 +310,7 @@
     final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
         "SurfaceTextureHelper test" /* threadName */, null /* sharedContext */);
     final FakeCapturerObserver observer = new FakeCapturerObserver();
-    capturer.startCapture(format.width, format.height, format.maxFramerate,
+    capturer.startCapture(format.width, format.height, format.framerate.max,
         surfaceTextureHelper, appContext, observer);
     // Make sure camera is started and then stop it.
     assertTrue(observer.WaitForCapturerToStart());
@@ -359,7 +359,7 @@
 
     for(int i = 0; i < 3 ; ++i) {
       CameraEnumerationAndroid.CaptureFormat format = formats.get(i);
-      capturer.startCapture(format.width, format.height, format.maxFramerate,
+      capturer.startCapture(format.width, format.height, format.framerate.max,
           surfaceTextureHelper, appContext, observer);
       assertTrue(observer.WaitForCapturerToStart());
       observer.WaitForNextCapturedFrame();
@@ -408,7 +408,7 @@
     final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
         "SurfaceTextureHelper test" /* threadName */, null /* sharedContext */);
     final FakeCapturerObserver observer = new FakeCapturerObserver();
-    capturer.startCapture(format.width, format.height, format.maxFramerate,
+    capturer.startCapture(format.width, format.height, format.framerate.max,
         surfaceTextureHelper, appContext, observer);
 
     if (android.os.Build.VERSION.SDK_INT > android.os.Build.VERSION_CODES.LOLLIPOP_MR1) {
@@ -433,7 +433,7 @@
     final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
         "SurfaceTextureHelper test" /* threadName */, null /* sharedContext */);
     final FakeCapturerObserver observer = new FakeCapturerObserver();
-    capturer.startCapture(format.width, format.height, format.maxFramerate,
+    capturer.startCapture(format.width, format.height, format.framerate.max,
         surfaceTextureHelper, appContext, observer);
     waitUntilIdle(capturer);
 
@@ -459,7 +459,7 @@
     final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
         "SurfaceTextureHelper test" /* threadName */, null /* sharedContext */);
     final FakeCapturerObserver observer = new FakeCapturerObserver();
-    capturer.startCapture(format.width, format.height, format.maxFramerate,
+    capturer.startCapture(format.width, format.height, format.framerate.max,
         surfaceTextureHelper, appContext, observer);
     capturer.stopCapture();
     release(capturer);
@@ -475,7 +475,7 @@
 
     List<CaptureFormat> formats = capturer.getSupportedFormats();
     CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
-    capturer.startCapture(format.width, format.height, format.maxFramerate,
+    capturer.startCapture(format.width, format.height, format.framerate.max,
         surfaceTextureHelper, appContext, observer);
     assertTrue(observer.WaitForCapturerToStart());
 
@@ -485,7 +485,7 @@
     assertTrue(listOftimestamps.size() >= 1);
 
     format = formats.get(1);
-    capturer.startCapture(format.width, format.height, format.maxFramerate,
+    capturer.startCapture(format.width, format.height, format.framerate.max,
         surfaceTextureHelper, appContext, observer);
     observer.WaitForCapturerToStart();
     if (capturer.isCapturingToTexture()) {
@@ -548,7 +548,7 @@
     final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
         "SurfaceTextureHelper test" /* threadName */, null /* sharedContext */);
     final FakeCapturerObserver observer = new FakeCapturerObserver();
-    capturer.startCapture(format.width, format.height, format.maxFramerate,
+    capturer.startCapture(format.width, format.height, format.framerate.max,
         surfaceTextureHelper, appContext, observer);
     // Make sure camera is started.
     assertTrue(observer.WaitForCapturerToStart());
diff --git a/webrtc/api/java/android/org/webrtc/CameraEnumerationAndroid.java b/webrtc/api/java/android/org/webrtc/CameraEnumerationAndroid.java
index c5c8be3..2fa685d 100644
--- a/webrtc/api/java/android/org/webrtc/CameraEnumerationAndroid.java
+++ b/webrtc/api/java/android/org/webrtc/CameraEnumerationAndroid.java
@@ -43,21 +43,56 @@
   }
 
   public static class CaptureFormat {
+    // Class to represent a framerate range. The framerate varies because of lightning conditions.
+    // The values are multiplied by 1000, so 1000 represents one frame per second.
+    public static class FramerateRange {
+      public int min;
+      public int max;
+
+      public FramerateRange(int min, int max) {
+        this.min = min;
+        this.max = max;
+      }
+
+      @Override
+      public String toString() {
+        return "[" + (min / 1000.0f) + ":" + (max / 1000.0f) + "]";
+      }
+
+      @Override
+      public boolean equals(Object other) {
+        if (!(other instanceof FramerateRange)) {
+          return false;
+        }
+        final FramerateRange otherFramerate = (FramerateRange) other;
+        return min == otherFramerate.min && max == otherFramerate.max;
+      }
+
+      @Override
+      public int hashCode() {
+        // Use prime close to 2^16 to avoid collisions for normal values less than 2^16.
+        return 1 + 65537 * min + max;
+      }
+    }
+
     public final int width;
     public final int height;
-    public final int maxFramerate;
-    public final int minFramerate;
+    public final FramerateRange framerate;
     // TODO(hbos): If VideoCapturer.startCapture is updated to support other image formats then this
     // needs to be updated and VideoCapturer.getSupportedFormats need to return CaptureFormats of
     // all imageFormats.
     public final int imageFormat = ImageFormat.NV21;
 
-    public CaptureFormat(int width, int height, int minFramerate,
-        int maxFramerate) {
+    public CaptureFormat(int width, int height, int minFramerate, int maxFramerate) {
       this.width = width;
       this.height = height;
-      this.minFramerate = minFramerate;
-      this.maxFramerate = maxFramerate;
+      this.framerate = new FramerateRange(minFramerate, maxFramerate);
+    }
+
+    public CaptureFormat(int width, int height, FramerateRange framerate) {
+      this.width = width;
+      this.height = height;
+      this.framerate = framerate;
     }
 
     // Calculates the frame size of this capture format.
@@ -79,15 +114,14 @@
 
     @Override
     public String toString() {
-      return width + "x" + height + "@[" + minFramerate + ":" + maxFramerate + "]";
+      return width + "x" + height + "@" + framerate;
     }
 
     public boolean isSameFormat(final CaptureFormat that) {
       if (that == null) {
         return false;
       }
-      return width == that.width && height == that.height && maxFramerate == that.maxFramerate
-          && minFramerate == that.minFramerate;
+      return width == that.width && height == that.height && framerate.equals(that.framerate);
     }
   }
 
@@ -134,7 +168,9 @@
     return getNameOfDevice(android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK);
   }
 
-  // Helper class for finding the closest supported format for the two functions below.
+  // Helper class for finding the closest supported format for the two functions below. It creates a
+  // comparator based on the difference to some requested parameters, where the element with the
+  // minimum difference is the element that is closest to the requested parameters.
   private static abstract class ClosestComparator<T> implements Comparator<T> {
     // Difference between supported and requested parameter.
     abstract int diff(T supportedParameter);
@@ -145,20 +181,15 @@
     }
   }
 
-  public static int[] getFramerateRange(android.hardware.Camera.Parameters parameters,
-      final int framerate) {
-    List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange();
-    if (listFpsRange.isEmpty()) {
-      Logging.w(TAG, "No supported preview fps range");
-      return new int[]{0, 0};
-    }
-    return Collections.min(listFpsRange,
-        new ClosestComparator<int[]>() {
-          @Override int diff(int[] range) {
-            final int maxFpsWeight = 10;
-            return range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX]
-                + maxFpsWeight * abs(framerate
-                    - range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
+  public static CaptureFormat.FramerateRange getClosestSupportedFramerateRange(
+      List<CaptureFormat.FramerateRange> supportedFramerates, final int requestedFps) {
+    return Collections.min(supportedFramerates,
+        new ClosestComparator<CaptureFormat.FramerateRange>() {
+          private static final int MAX_FPS_WEIGHT = 10;
+
+          @Override
+          int diff(CaptureFormat.FramerateRange range) {
+            return range.min + MAX_FPS_WEIGHT * abs(requestedFps * 1000 - range.max);
           }
      });
   }
diff --git a/webrtc/api/java/android/org/webrtc/CameraEnumerator.java b/webrtc/api/java/android/org/webrtc/CameraEnumerator.java
index 59a7847..04edba8 100644
--- a/webrtc/api/java/android/org/webrtc/CameraEnumerator.java
+++ b/webrtc/api/java/android/org/webrtc/CameraEnumerator.java
@@ -81,4 +81,16 @@
         + " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
     return formatList;
   }
+
+  // Convert from int[2] to CaptureFormat.FramerateRange.
+  public static List<CaptureFormat.FramerateRange> convertFramerates(
+      List<int[]> arrayRanges) {
+    final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
+    for (int[] range : arrayRanges) {
+      ranges.add(new CaptureFormat.FramerateRange(
+          range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
+          range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX]));
+    }
+    return ranges;
+  }
 }
diff --git a/webrtc/api/java/android/org/webrtc/VideoCapturerAndroid.java b/webrtc/api/java/android/org/webrtc/VideoCapturerAndroid.java
index 6515c07..d9793cf 100644
--- a/webrtc/api/java/android/org/webrtc/VideoCapturerAndroid.java
+++ b/webrtc/api/java/android/org/webrtc/VideoCapturerAndroid.java
@@ -401,19 +401,24 @@
 
     // Find closest supported format for |width| x |height| @ |framerate|.
     final android.hardware.Camera.Parameters parameters = camera.getParameters();
-    for (int[] fpsRange : parameters.getSupportedPreviewFpsRange()) {
-      Logging.d(TAG, "Available fps range: " +
-          fpsRange[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX] + ":" +
-          fpsRange[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
+    final List<CaptureFormat.FramerateRange> supportedFramerates =
+        CameraEnumerator.convertFramerates(parameters.getSupportedPreviewFpsRange());
+    Logging.d(TAG, "Available fps ranges: " + supportedFramerates);
+
+    final CaptureFormat.FramerateRange bestFpsRange;
+    if (supportedFramerates.isEmpty()) {
+      Logging.w(TAG, "No supported preview fps range");
+      bestFpsRange = new CaptureFormat.FramerateRange(0, 0);
+    } else {
+      bestFpsRange = CameraEnumerationAndroid.getClosestSupportedFramerateRange(
+            supportedFramerates, framerate);
     }
-    final int[] range = CameraEnumerationAndroid.getFramerateRange(parameters, framerate * 1000);
+
     final android.hardware.Camera.Size previewSize =
         CameraEnumerationAndroid.getClosestSupportedSize(
             parameters.getSupportedPreviewSizes(), width, height);
     final CaptureFormat captureFormat = new CaptureFormat(
-        previewSize.width, previewSize.height,
-        range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
-        range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
+        previewSize.width, previewSize.height, bestFpsRange);
 
     // Check if we are already using this capture format, then we don't need to do anything.
     if (captureFormat.isSameFormat(this.captureFormat)) {
@@ -428,8 +433,8 @@
     }
     // Note: setRecordingHint(true) actually decrease frame rate on N5.
     // parameters.setRecordingHint(true);
-    if (captureFormat.maxFramerate > 0) {
-      parameters.setPreviewFpsRange(captureFormat.minFramerate, captureFormat.maxFramerate);
+    if (captureFormat.framerate.max > 0) {
+      parameters.setPreviewFpsRange(captureFormat.framerate.min, captureFormat.framerate.max);
     }
     parameters.setPreviewSize(captureFormat.width, captureFormat.height);
 
diff --git a/webrtc/api/java/jni/androidvideocapturer_jni.cc b/webrtc/api/java/jni/androidvideocapturer_jni.cc
index 3f79f3a..0abc9c0 100644
--- a/webrtc/api/java/jni/androidvideocapturer_jni.cc
+++ b/webrtc/api/java/jni/androidvideocapturer_jni.cc
@@ -127,21 +127,27 @@
   jclass j_list_class = jni->FindClass("java/util/List");
   jclass j_format_class =
       jni->FindClass("org/webrtc/CameraEnumerationAndroid$CaptureFormat");
+  jclass j_framerate_class = jni->FindClass(
+      "org/webrtc/CameraEnumerationAndroid$CaptureFormat$FramerateRange");
   const int size = jni->CallIntMethod(
       j_list_of_formats, GetMethodID(jni, j_list_class, "size", "()I"));
   jmethodID j_get =
       GetMethodID(jni, j_list_class, "get", "(I)Ljava/lang/Object;");
+  jfieldID j_framerate_field = GetFieldID(
+      jni, j_format_class, "framerate",
+      "Lorg/webrtc/CameraEnumerationAndroid$CaptureFormat$FramerateRange;");
   jfieldID j_width_field = GetFieldID(jni, j_format_class, "width", "I");
   jfieldID j_height_field = GetFieldID(jni, j_format_class, "height", "I");
   jfieldID j_max_framerate_field =
-      GetFieldID(jni, j_format_class, "maxFramerate", "I");
+      GetFieldID(jni, j_framerate_class, "max", "I");
 
   std::vector<cricket::VideoFormat> formats;
   formats.reserve(size);
   for (int i = 0; i < size; ++i) {
     jobject j_format = jni->CallObjectMethod(j_list_of_formats, j_get, i);
+    jobject j_framerate = GetObjectField(jni, j_format, j_framerate_field);
     const int frame_interval = cricket::VideoFormat::FpsToInterval(
-        (GetIntField(jni, j_format, j_max_framerate_field) + 999) / 1000);
+        (GetIntField(jni, j_framerate, j_max_framerate_field) + 999) / 1000);
     formats.emplace_back(GetIntField(jni, j_format, j_width_field),
                          GetIntField(jni, j_format, j_height_field),
                          frame_interval, cricket::FOURCC_NV21);
diff --git a/webrtc/examples/androidapp/src/org/appspot/apprtc/CaptureQualityController.java b/webrtc/examples/androidapp/src/org/appspot/apprtc/CaptureQualityController.java
index dfd9558..d231c41 100644
--- a/webrtc/examples/androidapp/src/org/appspot/apprtc/CaptureQualityController.java
+++ b/webrtc/examples/androidapp/src/org/appspot/apprtc/CaptureQualityController.java
@@ -77,7 +77,7 @@
     long maxCaptureBandwidth = java.lang.Long.MIN_VALUE;
     for (CaptureFormat format : formats) {
       maxCaptureBandwidth = Math.max(maxCaptureBandwidth,
-          (long) format.width * format.height * format.maxFramerate);
+          (long) format.width * format.height * format.framerate.max);
     }
 
     // Fraction between 0 and 1.
@@ -107,7 +107,7 @@
 
   // Return the highest frame rate possible based on bandwidth and format.
   private int calculateFramerate(double bandwidth, CaptureFormat format) {
-    return (int) Math.round(Math.min(format.maxFramerate,
+    return (int) Math.round(Math.min(format.framerate.max,
         (int) Math.round(bandwidth / (format.width * format.height))) / 1000.0);
   }
 }