VideoCaptureAndroid: support multiple frame-rates per resolution.
Also enables running video_capture_tests_apk on the WebRTC/Chromium APK bots,
assuming GYP_DEFINES includes include_tests=1 and
include_internal_video_capture=1.
This required running VideoCaptureAndroid's camera capture on a dedicated thread, matching other platform's video_capture impls.
BUG=2974,3152
R=wu@webrtc.org
Review URL: https://webrtc-codereview.appspot.com/11359004
git-svn-id: http://webrtc.googlecode.com/svn/trunk/webrtc@5868 4adac7df-926f-26a2-2b94-8c16560cd09d
diff --git a/build/apk_tests.gyp b/build/apk_tests.gyp
index 1df0469..4914886 100644
--- a/build/apk_tests.gyp
+++ b/build/apk_tests.gyp
@@ -213,7 +213,36 @@
'includes': [
'../../../build/apk_test.gypi',
],
- }
+ },
+ {
+ 'target_name': 'video_capture_tests_apk',
+ 'type': 'none',
+ 'variables': {
+ 'test_suite_name': 'video_capture_tests',
+ 'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)video_capture_tests<(SHARED_LIB_SUFFIX)',
+ },
+ 'dependencies': [
+ '<(webrtc_root)/modules/modules.gyp:video_capture_tests',
+ 'video_capture_java',
+ ],
+ 'includes': [
+ '../../../build/apk_test.gypi',
+ ],
+ },
+ {
+ # Used only by video_capture_tests_apk above, and impossible to use in the
+ # standalone build, which is why it's declared here instead of under
+ # modules/video_capture/ (to avoid the need for a forked _noop.gyp file
+ # like this file has; see comment at the top of this file).
+ 'target_name': 'video_capture_java',
+ 'type': 'none',
+ 'variables': {
+ 'java_in_dir': '<(webrtc_root)/modules/video_capture/android/java',
+ },
+ 'includes': [
+ '../../../build/java.gypi',
+ ],
+ },
],
}
diff --git a/build/apk_tests_noop.gyp b/build/apk_tests_noop.gyp
index ed64863..e8d39d7 100644
--- a/build/apk_tests_noop.gyp
+++ b/build/apk_tests_noop.gyp
@@ -65,5 +65,9 @@
'target_name': 'audio_codec_speed_tests_apk',
'type': 'none',
},
+ {
+ 'target_name': 'video_capture_tests_apk',
+ 'type': 'none',
+ },
],
}
diff --git a/modules/video_capture/android/device_info_android.cc b/modules/video_capture/android/device_info_android.cc
index 10c277e..144fcbd 100644
--- a/modules/video_capture/android/device_info_android.cc
+++ b/modules/video_capture/android/device_info_android.cc
@@ -21,36 +21,40 @@
#include "webrtc/system_wrappers/interface/ref_count.h"
#include "webrtc/system_wrappers/interface/trace.h"
-namespace webrtc
-{
+namespace webrtc {
-namespace videocapturemodule
-{
+namespace videocapturemodule {
-static std::string ResolutionsToString(
- const std::vector<std::pair<int, int> >& pairs) {
+// Helper for storing lists of pairs of ints. Used e.g. for resolutions & FPS
+// ranges.
+typedef std::pair<int, int> IntPair;
+typedef std::vector<IntPair> IntPairs;
+
+static std::string IntPairsToString(const IntPairs& pairs, char separator) {
std::stringstream stream;
for (size_t i = 0; i < pairs.size(); ++i) {
if (i > 0)
stream << ", ";
- stream << "(" << pairs[i].first << "x" << pairs[i].second << ")";
+ stream << "(" << pairs[i].first << separator << pairs[i].second << ")";
}
return stream.str();
}
struct AndroidCameraInfo {
std::string name;
- int min_mfps, max_mfps; // FPS*1000.
bool front_facing;
int orientation;
- std::vector<std::pair<int, int> > resolutions; // Pairs are: (width,height).
+ IntPairs resolutions; // Pairs are: (width,height).
+ // Pairs are (min,max) in units of FPS*1000 ("milli-frame-per-second").
+ IntPairs mfpsRanges;
std::string ToString() {
std::stringstream stream;
- stream << "Name: [" << name << "], mfps: [" << min_mfps << ":" << max_mfps
+ stream << "Name: [" << name << "], MFPS ranges: ["
+ << IntPairsToString(mfpsRanges, ':')
<< "], front_facing: " << front_facing
<< ", orientation: " << orientation << ", resolutions: ["
- << ResolutionsToString(resolutions) << "]";
+ << IntPairsToString(resolutions, 'x') << "]";
return stream.str();
}
};
@@ -120,8 +124,6 @@
const Json::Value& camera = cameras[i];
AndroidCameraInfo info;
info.name = camera["name"].asString();
- info.min_mfps = camera["min_mfps"].asInt();
- info.max_mfps = camera["max_mfps"].asInt();
info.front_facing = camera["front_facing"].asBool();
info.orientation = camera["orientation"].asInt();
Json::Value sizes = camera["sizes"];
@@ -130,6 +132,12 @@
info.resolutions.push_back(std::make_pair(
size["width"].asInt(), size["height"].asInt()));
}
+ Json::Value mfpsRanges = camera["mfpsRanges"];
+ for (Json::ArrayIndex j = 0; j < mfpsRanges.size(); ++j) {
+ const Json::Value& mfpsRange = mfpsRanges[j];
+ info.mfpsRanges.push_back(std::make_pair(mfpsRange["min_mfps"].asInt(),
+ mfpsRange["max_mfps"].asInt()));
+ }
g_camera_info->push_back(info);
}
}
@@ -187,14 +195,17 @@
return -1;
for (size_t i = 0; i < info->resolutions.size(); ++i) {
- const std::pair<int, int>& size = info->resolutions[i];
- VideoCaptureCapability cap;
- cap.width = size.first;
- cap.height = size.second;
- cap.maxFPS = info->max_mfps / 1000;
- cap.expectedCaptureDelay = kExpectedCaptureDelay;
- cap.rawType = kVideoNV21;
- _captureCapabilities.push_back(cap);
+ for (size_t j = 0; j < info->mfpsRanges.size(); ++j) {
+ const IntPair& size = info->resolutions[i];
+ const IntPair& mfpsRange = info->mfpsRanges[j];
+ VideoCaptureCapability cap;
+ cap.width = size.first;
+ cap.height = size.second;
+ cap.maxFPS = mfpsRange.second / 1000;
+ cap.expectedCaptureDelay = kExpectedCaptureDelay;
+ cap.rawType = kVideoNV21;
+ _captureCapabilities.push_back(cap);
+ }
}
return _captureCapabilities.size();
}
@@ -210,13 +221,22 @@
return 0;
}
-void DeviceInfoAndroid::GetFpsRange(const char* deviceUniqueIdUTF8,
- int* min_mfps, int* max_mfps) {
+void DeviceInfoAndroid::GetMFpsRange(const char* deviceUniqueIdUTF8,
+ int max_fps_to_match,
+ int* min_mfps, int* max_mfps) {
const AndroidCameraInfo* info = FindCameraInfoByName(deviceUniqueIdUTF8);
if (info == NULL)
return;
- *min_mfps = info->min_mfps;
- *max_mfps = info->max_mfps;
+ // Rely on CameraParameters.getSupportedPreviewFpsRange() to sort its return
+ // value (per its documentation) and return the first (most flexible) range
+ // whose high end is at least as high as that requested.
+ for (size_t i = 0; i < info->mfpsRanges.size(); ++i) {
+ if (info->mfpsRanges[i].second / 1000 >= max_fps_to_match) {
+ *min_mfps = info->mfpsRanges[i].first;
+ *max_mfps = info->mfpsRanges[i].second;
+ return;
+ }
+ }
}
} // namespace videocapturemodule
diff --git a/modules/video_capture/android/device_info_android.h b/modules/video_capture/android/device_info_android.h
index d277113..b8d838c 100644
--- a/modules/video_capture/android/device_info_android.h
+++ b/modules/video_capture/android/device_info_android.h
@@ -53,10 +53,12 @@
virtual int32_t GetOrientation(const char* deviceUniqueIdUTF8,
VideoCaptureRotation& orientation);
- // Populate |min_mfps| and |max_mfps| with the supported range of the device.
- void GetFpsRange(const char* deviceUniqueIdUTF8,
- int* min_mfps,
- int* max_mfps);
+ // Populate |min_mfps| and |max_mfps| with the closest supported range of the
+ // device to |max_fps_to_match|.
+ void GetMFpsRange(const char* deviceUniqueIdUTF8,
+ int max_fps_to_match,
+ int* min_mfps,
+ int* max_mfps);
private:
enum { kExpectedCaptureDelay = 190};
diff --git a/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java b/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java
index 6e308af..80f6f63 100644
--- a/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java
+++ b/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java
@@ -12,6 +12,7 @@
import java.io.IOException;
import java.util.Locale;
+import java.util.concurrent.Exchanger;
import java.util.concurrent.locks.ReentrantLock;
import android.graphics.ImageFormat;
@@ -19,11 +20,13 @@
import android.graphics.Rect;
import android.graphics.SurfaceTexture;
import android.graphics.YuvImage;
-import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
+import android.hardware.Camera;
+import android.os.Handler;
+import android.os.Looper;
import android.util.Log;
-import android.view.SurfaceHolder;
import android.view.SurfaceHolder.Callback;
+import android.view.SurfaceHolder;
// Wrapper for android Camera, with support for direct local preview rendering.
// Threading notes: this class is called from ViE C++ code, and from Camera &
@@ -39,6 +42,8 @@
private static SurfaceHolder localPreview;
private Camera camera; // Only non-null while capturing.
+ private CameraThread cameraThread;
+ private Handler cameraThreadHandler;
private final int id;
private final Camera.CameraInfo info;
private final long native_capturer; // |VideoCaptureAndroid*| in C++.
@@ -61,15 +66,45 @@
this.native_capturer = native_capturer;
this.info = new Camera.CameraInfo();
Camera.getCameraInfo(id, info);
+ Exchanger<Handler> handlerExchanger = new Exchanger<Handler>();
+ cameraThread = new CameraThread(handlerExchanger);
+ cameraThread.start();
+ cameraThreadHandler = exchange(handlerExchanger, null);
+ }
+
+ private class CameraThread extends Thread {
+ private Exchanger<Handler> handlerExchanger;
+ public CameraThread(Exchanger<Handler> handlerExchanger) {
+ this.handlerExchanger = handlerExchanger;
+ }
+
+ @Override public void run() {
+ Looper.prepare();
+ exchange(handlerExchanger, new Handler());
+ Looper.loop();
+ }
}
// Called by native code. Returns true if capturer is started.
//
- // Note that this actually opens the camera, which can be a slow operation and
- // thus might be done on a background thread, but ViE API needs a
- // synchronous success return value so we can't do that.
+ // Note that this actually opens the camera, and Camera callbacks run on the
+ // thread that calls open(), so this is done on the CameraThread. Since ViE
+ // API needs a synchronous success return value we wait for the result.
private synchronized boolean startCapture(
- int width, int height, int min_mfps, int max_mfps) {
+ final int width, final int height,
+ final int min_mfps, final int max_mfps) {
+ final Exchanger<Boolean> result = new Exchanger<Boolean>();
+ cameraThreadHandler.post(new Runnable() {
+ @Override public void run() {
+ startCaptureOnCameraThread(width, height, min_mfps, max_mfps, result);
+ }
+ });
+ return exchange(result, false); // |false| is a dummy value here.
+ }
+
+ private void startCaptureOnCameraThread(
+ int width, int height, int min_mfps, int max_mfps,
+ Exchanger<Boolean> result) {
Log.d(TAG, "startCapture: " + width + "x" + height + "@" +
min_mfps + ":" + max_mfps);
Throwable error = null;
@@ -114,7 +149,8 @@
}
camera.setPreviewCallbackWithBuffer(this);
camera.startPreview();
- return true;
+ exchange(result, true);
+ return;
} catch (IOException e) {
error = e;
} catch (RuntimeException e) {
@@ -122,13 +158,27 @@
}
Log.e(TAG, "startCapture failed", error);
if (camera != null) {
- stopCapture();
+ Exchanger<Boolean> resultDropper = new Exchanger<Boolean>();
+ stopCaptureOnCameraThread(resultDropper);
+ exchange(resultDropper, false);
}
- return false;
+ exchange(result, false);
+ return;
}
// Called by native code. Returns true when camera is known to be stopped.
private synchronized boolean stopCapture() {
+ final Exchanger<Boolean> result = new Exchanger<Boolean>();
+ cameraThreadHandler.post(new Runnable() {
+ @Override public void run() {
+ stopCaptureOnCameraThread(result);
+ }
+ });
+ return exchange(result, false); // |false| is a dummy value here.
+ }
+
+ private void stopCaptureOnCameraThread(
+ Exchanger<Boolean> result) {
Log.d(TAG, "stopCapture");
if (camera == null) {
throw new RuntimeException("Camera is already stopped!");
@@ -145,14 +195,16 @@
}
camera.release();
camera = null;
- return true;
+ exchange(result, true);
+ return;
} catch (IOException e) {
error = e;
} catch (RuntimeException e) {
error = e;
}
Log.e(TAG, "Failed to stop camera", error);
- return false;
+ exchange(result, false);
+ return;
}
private native void ProvideCameraFrame(
@@ -172,7 +224,15 @@
// Sets the rotation of the preview render window.
// Does not affect the captured video image.
// Called by native code.
- private synchronized void setPreviewRotation(int rotation) {
+ private synchronized void setPreviewRotation(final int rotation) {
+ cameraThreadHandler.post(new Runnable() {
+ @Override public void run() {
+ setPreviewRotationOnCameraThread(rotation);
+ }
+ });
+ }
+
+ private void setPreviewRotationOnCameraThread(int rotation) {
Log.v(TAG, "setPreviewRotation:" + rotation);
if (camera == null) {
@@ -197,14 +257,19 @@
format + ": " + width + "x" + height);
}
- public synchronized void surfaceCreated(SurfaceHolder holder) {
+ public synchronized void surfaceCreated(final SurfaceHolder holder) {
Log.d(TAG, "VideoCaptureAndroid::surfaceCreated");
if (camera == null) {
return;
}
- try {
- camera.setPreviewDisplay(holder);
- } catch (IOException e) {
+ final Exchanger<IOException> result = new Exchanger<IOException>();
+ cameraThreadHandler.post(new Runnable() {
+ @Override public void run() {
+ setPreviewDisplayOnCameraThread(holder, result);
+ }
+ });
+ IOException e = exchange(result, null); // |null| is a dummy value here.
+ if (e != null) {
throw new RuntimeException(e);
}
}
@@ -214,9 +279,36 @@
if (camera == null) {
return;
}
+ final Exchanger<IOException> result = new Exchanger<IOException>();
+ cameraThreadHandler.post(new Runnable() {
+ @Override public void run() {
+ setPreviewDisplayOnCameraThread(null, result);
+ }
+ });
+ IOException e = exchange(result, null); // |null| is a dummy value here.
+ if (e != null) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ private void setPreviewDisplayOnCameraThread(
+ SurfaceHolder holder, Exchanger<IOException> result) {
try {
- camera.setPreviewDisplay(null);
+ camera.setPreviewDisplay(holder);
} catch (IOException e) {
+ exchange(result, e);
+ return;
+ }
+ exchange(result, null);
+ return;
+ }
+
+ // Exchanges |value| with |exchanger|, converting InterruptedExceptions to
+ // RuntimeExceptions (since we expect never to see these).
+ private static <T> T exchange(Exchanger<T> exchanger, T value) {
+ try {
+ return exchanger.exchange(value);
+ } catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
diff --git a/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java b/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java
index f23e9a8..1d681c0 100644
--- a/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java
+++ b/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java
@@ -57,17 +57,22 @@
devices.put(cameraDict);
List<Size> supportedSizes;
List<int[]> supportedFpsRanges;
+ Camera camera = null;
try {
- Camera camera = Camera.open(i);
+ camera = Camera.open(i);
Parameters parameters = camera.getParameters();
supportedSizes = parameters.getSupportedPreviewSizes();
supportedFpsRanges = parameters.getSupportedPreviewFpsRange();
- camera.release();
Log.d(TAG, uniqueName);
} catch (RuntimeException e) {
- Log.e(TAG, "Failed to open " + uniqueName + ", skipping");
+ Log.e(TAG, "Failed to open " + uniqueName + ", skipping", e);
continue;
+ } finally {
+ if (camera != null) {
+ camera.release();
+ }
}
+
JSONArray sizes = new JSONArray();
for (Size supportedSize : supportedSizes) {
JSONObject size = new JSONObject();
@@ -75,16 +80,23 @@
size.put("height", supportedSize.height);
sizes.put(size);
}
- // Android SDK deals in integral "milliframes per second"
- // (i.e. fps*1000, instead of floating-point frames-per-second) so we
- // preserve that through the Java->C++->Java round-trip.
- int[] mfps = supportedFpsRanges.get(supportedFpsRanges.size() - 1);
+
+ JSONArray mfpsRanges = new JSONArray();
+ for (int[] range : supportedFpsRanges) {
+ JSONObject mfpsRange = new JSONObject();
+ // Android SDK deals in integral "milliframes per second"
+ // (i.e. fps*1000, instead of floating-point frames-per-second) so we
+ // preserve that through the Java->C++->Java round-trip.
+ mfpsRange.put("min_mfps", range[Parameters.PREVIEW_FPS_MIN_INDEX]);
+ mfpsRange.put("max_mfps", range[Parameters.PREVIEW_FPS_MAX_INDEX]);
+ mfpsRanges.put(mfpsRange);
+ }
+
cameraDict.put("name", uniqueName);
cameraDict.put("front_facing", isFrontFacing(info))
.put("orientation", info.orientation)
.put("sizes", sizes)
- .put("min_mfps", mfps[Parameters.PREVIEW_FPS_MIN_INDEX])
- .put("max_mfps", mfps[Parameters.PREVIEW_FPS_MAX_INDEX]);
+ .put("mfpsRanges", mfpsRanges);
}
String ret = devices.toString(2);
return ret;
diff --git a/modules/video_capture/android/video_capture_android.cc b/modules/video_capture/android/video_capture_android.cc
index 2b6d606..93c7192 100644
--- a/modules/video_capture/android/video_capture_android.cc
+++ b/modules/video_capture/android/video_capture_android.cc
@@ -39,6 +39,7 @@
}
int32_t SetCaptureAndroidVM(JavaVM* javaVM) {
+ assert(!g_jvm);
g_jvm = javaVM;
AttachThreadScoped ats(g_jvm);
@@ -143,7 +144,8 @@
assert(j_start);
int min_mfps = 0;
int max_mfps = 0;
- _deviceInfo.GetFpsRange(_deviceUniqueId, &min_mfps, &max_mfps);
+ _deviceInfo.GetMFpsRange(_deviceUniqueId, _captureCapability.maxFPS,
+ &min_mfps, &max_mfps);
bool started = env->CallBooleanMethod(_jCapturer, j_start,
_captureCapability.width,
_captureCapability.height,
diff --git a/modules/video_capture/ensure_initialized.cc b/modules/video_capture/ensure_initialized.cc
new file mode 100644
index 0000000..69f22ed
--- /dev/null
+++ b/modules/video_capture/ensure_initialized.cc
@@ -0,0 +1,54 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Platform-specific initialization bits, if any, go here.
+
+#if !defined(ANDROID) || !defined(WEBRTC_CHROMIUM_BUILD)
+
+namespace webrtc {
+namespace videocapturemodule {
+void EnsureInitialized() {}
+} // namespace videocapturemodule
+} // namespace webrtc
+
+#else // !defined(ANDROID) || !defined(WEBRTC_CHROMIUM_BUILD)
+
+#include <assert.h>
+#include <pthread.h>
+
+#include "base/android/jni_android.h"
+
+namespace webrtc {
+
+// Declared in webrtc/modules/video_capture/include/video_capture.h.
+int32_t SetCaptureAndroidVM(JavaVM* javaVM);
+
+namespace videocapturemodule {
+
+static pthread_once_t g_initialize_once = PTHREAD_ONCE_INIT;
+
+void EnsureInitializedOnce() {
+ JNIEnv* jni = ::base::android::AttachCurrentThread();
+ JavaVM* jvm = NULL;
+ int status = jni->GetJavaVM(&jvm);
+ assert(status == 0);
+ status = webrtc::SetCaptureAndroidVM(jvm) == 0;
+ assert(status);
+}
+
+void EnsureInitialized() {
+ int ret = pthread_once(&g_initialize_once, &EnsureInitializedOnce);
+ assert(ret == 0);
+}
+
+} // namespace videocapturemodule
+} // namespace webrtc
+
+#endif // ANDROID & WEBRTC_CHROMIUM_BUILD
diff --git a/modules/video_capture/ensure_initialized.h b/modules/video_capture/ensure_initialized.h
new file mode 100644
index 0000000..4298795
--- /dev/null
+++ b/modules/video_capture/ensure_initialized.h
@@ -0,0 +1,19 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+namespace webrtc {
+namespace videocapturemodule {
+
+// Ensure any necessary initialization of webrtc::videocapturemodule has
+// completed.
+void EnsureInitialized();
+
+} // namespace videocapturemodule.
+} // namespace webrtc.
diff --git a/modules/video_capture/include/video_capture.h b/modules/video_capture/include/video_capture.h
index 5340cb7..6966c23 100644
--- a/modules/video_capture/include/video_capture.h
+++ b/modules/video_capture/include/video_capture.h
@@ -20,7 +20,7 @@
namespace webrtc {
-#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
+#if defined(ANDROID)
int32_t SetCaptureAndroidVM(JavaVM* javaVM);
#endif
diff --git a/modules/video_capture/test/video_capture_unittest.cc b/modules/video_capture/test/video_capture_unittest.cc
index 98b6aa6..db0c818 100644
--- a/modules/video_capture/test/video_capture_unittest.cc
+++ b/modules/video_capture/test/video_capture_unittest.cc
@@ -10,10 +10,14 @@
#include <stdio.h>
+#include <map>
+#include <sstream>
+
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/common_video/interface/i420_video_frame.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/utility/interface/process_thread.h"
+#include "webrtc/modules/video_capture/ensure_initialized.h"
#include "webrtc/modules/video_capture/include/video_capture.h"
#include "webrtc/modules/video_capture/include/video_capture_factory.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
@@ -231,7 +235,9 @@
VideoCaptureTest() : number_of_devices_(0) {}
void SetUp() {
- device_info_.reset(VideoCaptureFactory::CreateDeviceInfo(5));
+ webrtc::videocapturemodule::EnsureInitialized();
+ device_info_.reset(VideoCaptureFactory::CreateDeviceInfo(0));
+ assert(device_info_.get());
number_of_devices_ = device_info_->NumberOfDevices();
ASSERT_GT(number_of_devices_, 0u);
}
@@ -258,7 +264,7 @@
void StartCapture(VideoCaptureModule* capture_module,
VideoCaptureCapability capability) {
- EXPECT_EQ(0, capture_module->StartCapture(capability));
+ ASSERT_EQ(0, capture_module->StartCapture(capability));
EXPECT_TRUE(capture_module->CaptureStarted());
VideoCaptureCapability resulting_capability;
@@ -289,7 +295,7 @@
capability.rawType = webrtc::kVideoUnknown;
#endif
capture_observer.SetExpectedCapability(capability);
- StartCapture(module.get(), capability);
+ ASSERT_NO_FATAL_FAILURE(StartCapture(module.get(), capability));
// Less than 4s to start the camera.
EXPECT_LE(TickTime::MillisecondTimestamp() - start_time, 4000);
@@ -323,17 +329,50 @@
int number_of_capabilities = device_info_->NumberOfCapabilities(
module->CurrentDeviceName());
EXPECT_GT(number_of_capabilities, 0);
+ // Key is <width>x<height>, value is vector of maxFPS values at that
+ // resolution.
+ typedef std::map<std::string, std::vector<int> > FrameRatesByResolution;
+ FrameRatesByResolution frame_rates_by_resolution;
for (int i = 0; i < number_of_capabilities; ++i) {
VideoCaptureCapability capability;
EXPECT_EQ(0, device_info_->GetCapability(module->CurrentDeviceName(), i,
capability));
+ std::ostringstream resolutionStream;
+ resolutionStream << capability.width << "x" << capability.height;
+ resolutionStream.flush();
+ std::string resolution = resolutionStream.str();
+ frame_rates_by_resolution[resolution].push_back(capability.maxFPS);
+
+ // Since Android presents so many resolution/FPS combinations and the test
+ // runner imposes a timeout, we only actually start the capture and test
+ // that a frame was captured for 2 frame-rates at each resolution.
+ if (frame_rates_by_resolution[resolution].size() > 2)
+ continue;
+
capture_observer.SetExpectedCapability(capability);
- StartCapture(module.get(), capability);
- // Make sure 5 frames are captured.
- EXPECT_TRUE_WAIT(capture_observer.incoming_frames() >= 5, kTimeOut);
+ ASSERT_NO_FATAL_FAILURE(StartCapture(module.get(), capability));
+ // Make sure at least one frame is captured.
+ EXPECT_TRUE_WAIT(capture_observer.incoming_frames() >= 1, kTimeOut);
EXPECT_EQ(0, module->StopCapture());
}
+
+#if ANDROID
+ // There's no reason for this to _necessarily_ be true, but in practice all
+ // Android devices this test runs on in fact do support multiple capture
+ // resolutions and multiple frame-rates per captured resolution, so we assert
+ // this fact here as a regression-test against the time that we only noticed a
+ // single frame-rate per resolution (bug 2974). If this test starts being run
+ // on devices for which this is untrue (e.g. Nexus4) then the following should
+ // probably be wrapped in a base::android::BuildInfo::model()/device() check.
+ EXPECT_GT(frame_rates_by_resolution.size(), 1U);
+ for (FrameRatesByResolution::const_iterator it =
+ frame_rates_by_resolution.begin();
+ it != frame_rates_by_resolution.end();
+ ++it) {
+ EXPECT_GT(it->second.size(), 1U) << it->first;
+ }
+#endif // ANDROID
}
// NOTE: flaky, crashes sometimes.
@@ -376,10 +415,12 @@
#endif
capture_observer2.SetExpectedCapability(capability2);
- StartCapture(module1.get(), capability1);
- StartCapture(module2.get(), capability2);
+ ASSERT_NO_FATAL_FAILURE(StartCapture(module1.get(), capability1));
+ ASSERT_NO_FATAL_FAILURE(StartCapture(module2.get(), capability2));
EXPECT_TRUE_WAIT(capture_observer1.incoming_frames() >= 5, kTimeOut);
EXPECT_TRUE_WAIT(capture_observer2.incoming_frames() >= 5, kTimeOut);
+ EXPECT_EQ(0, module2->StopCapture());
+ EXPECT_EQ(0, module1->StopCapture());
}
// Test class for testing external capture and capture feedback information
diff --git a/modules/video_capture/video_capture.gypi b/modules/video_capture/video_capture.gypi
index 970b058..7496683 100644
--- a/modules/video_capture/video_capture.gypi
+++ b/modules/video_capture/video_capture.gypi
@@ -19,6 +19,8 @@
'sources': [
'device_info_impl.cc',
'device_info_impl.h',
+ 'ensure_initialized.cc',
+ 'ensure_initialized.h',
'include/video_capture.h',
'include/video_capture_defines.h',
'include/video_capture_factory.h',
@@ -137,7 +139,7 @@
'targets': [
{
'target_name': 'video_capture_tests',
- 'type': 'executable',
+ 'type': '<(gtest_target_type)',
'dependencies': [
'video_capture_module',
'webrtc_utility',
@@ -164,6 +166,13 @@
'-lX11',
],
}],
+ # TODO(henrike): remove build_with_chromium==1 when the bots are
+ # using Chromium's buildbots.
+ ['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', {
+ 'dependencies': [
+ '<(DEPTH)/testing/android/native_test.gyp:native_test_native_code',
+ ],
+ }],
['OS=="mac"', {
'dependencies': [
# Link with a special main for mac so we can use the webcam.