VideoCaptureAndroid: rewrote the (standalone) implementation of video capture on Android.
Besides being ~40% the size of the previous implementation, this makes it so
that VideoCaptureAndroid can stop and restart capture, which is necessary to
support onPause/onResume reasonably on Android.
BUG=1407
R=henrike@webrtc.org, wu@webrtc.org
Review URL: https://webrtc-codereview.appspot.com/2334004
git-svn-id: http://webrtc.googlecode.com/svn/trunk/webrtc@4915 4adac7df-926f-26a2-2b94-8c16560cd09d
diff --git a/modules/video_capture/android/video_capture_android.cc b/modules/video_capture/android/video_capture_android.cc
index a66ce5c..2b6d606 100644
--- a/modules/video_capture/android/video_capture_android.cc
+++ b/modules/video_capture/android/video_capture_android.cc
@@ -10,599 +10,173 @@
#include "webrtc/modules/video_capture/android/video_capture_android.h"
-#include <stdio.h>
-
+#include "webrtc/modules/utility/interface/helpers_android.h"
+#include "webrtc/modules/video_capture/android/device_info_android.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/interface/logcat_trace_context.h"
#include "webrtc/system_wrappers/interface/ref_count.h"
#include "webrtc/system_wrappers/interface/trace.h"
-namespace webrtc
-{
-#if defined(WEBRTC_ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
-// TODO(leozwang) These SetAndroidVM apis will be refactored, thus we only
-// keep and reference java vm.
-int32_t SetCaptureAndroidVM(void* javaVM, void* javaContext) {
- return videocapturemodule::VideoCaptureAndroid::SetAndroidObjects(
- javaVM,
- javaContext);
-}
-#endif
+static JavaVM* g_jvm = NULL;
+static jclass g_java_capturer_class = NULL; // VideoCaptureAndroid.class.
-namespace videocapturemodule
-{
+namespace webrtc {
+
+// Called by Java when the camera has a new frame to deliver.
+void JNICALL ProvideCameraFrame(
+ JNIEnv* env,
+ jobject,
+ jbyteArray javaCameraFrame,
+ jint length,
+ jlong context) {
+ webrtc::videocapturemodule::VideoCaptureAndroid* captureModule =
+ reinterpret_cast<webrtc::videocapturemodule::VideoCaptureAndroid*>(
+ context);
+ jbyte* cameraFrame = env->GetByteArrayElements(javaCameraFrame, NULL);
+ captureModule->OnIncomingFrame(
+ reinterpret_cast<uint8_t*>(cameraFrame), length, 0);
+ env->ReleaseByteArrayElements(javaCameraFrame, cameraFrame, JNI_ABORT);
+}
+
+int32_t SetCaptureAndroidVM(JavaVM* javaVM) {
+ g_jvm = javaVM;
+ AttachThreadScoped ats(g_jvm);
+
+ videocapturemodule::DeviceInfoAndroid::Initialize(ats.env());
+
+ jclass j_capture_class =
+ ats.env()->FindClass("org/webrtc/videoengine/VideoCaptureAndroid");
+ assert(j_capture_class);
+ g_java_capturer_class =
+ reinterpret_cast<jclass>(ats.env()->NewGlobalRef(j_capture_class));
+ assert(g_java_capturer_class);
+
+ JNINativeMethod native_method = {
+ "ProvideCameraFrame", "([BIJ)V",
+ reinterpret_cast<void*>(&ProvideCameraFrame)
+ };
+ if (ats.env()->RegisterNatives(g_java_capturer_class, &native_method, 1) != 0)
+ assert(false);
+
+ return 0;
+}
+
+namespace videocapturemodule {
VideoCaptureModule* VideoCaptureImpl::Create(
const int32_t id,
const char* deviceUniqueIdUTF8) {
-
RefCountImpl<videocapturemodule::VideoCaptureAndroid>* implementation =
new RefCountImpl<videocapturemodule::VideoCaptureAndroid>(id);
-
- if (!implementation || implementation->Init(id, deviceUniqueIdUTF8) != 0) {
+ if (implementation->Init(id, deviceUniqueIdUTF8) != 0) {
delete implementation;
implementation = NULL;
}
return implementation;
}
-// Android logging, uncomment to print trace to
-// logcat instead of trace file/callback
-// #include <android/log.h>
-// #undef WEBRTC_TRACE
-// #define WEBRTC_TRACE(a,b,c,...)
-// __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__)
-
-JavaVM* VideoCaptureAndroid::g_jvm = NULL;
-//VideoCaptureAndroid.java
-jclass VideoCaptureAndroid::g_javaCmClass = NULL;
-//VideoCaptureDeviceInfoAndroid.java
-jclass VideoCaptureAndroid::g_javaCmDevInfoClass = NULL;
-//static instance of VideoCaptureDeviceInfoAndroid.java
-jobject VideoCaptureAndroid::g_javaCmDevInfoObject = NULL;
-jobject VideoCaptureAndroid::g_javaContext = NULL;
-
-/*
- * Register references to Java Capture class.
- */
-int32_t VideoCaptureAndroid::SetAndroidObjects(void* javaVM,
- void* javaContext) {
-
- g_jvm = static_cast<JavaVM*> (javaVM);
- g_javaContext = static_cast<jobject> (javaContext);
-
- if (javaVM) {
- JNIEnv* env = NULL;
- if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
- "%s: could not get Java environment", __FUNCTION__);
- return -1;
- }
- // get java capture class type (note path to class packet)
- jclass javaCmClassLocal = env->FindClass(AndroidJavaCaptureClass);
- if (!javaCmClassLocal) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
- "%s: could not find java class", __FUNCTION__);
- return -1;
- }
- // create a global reference to the class
- // (to tell JNI that we are referencing it
- // after this function has returned)
- g_javaCmClass = static_cast<jclass>
- (env->NewGlobalRef(javaCmClassLocal));
- if (!g_javaCmClass) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
- "%s: InitVideoEngineJava(): could not create"
- " Java Camera class reference",
- __FUNCTION__);
- return -1;
- }
- // Delete local class ref, we only use the global ref
- env->DeleteLocalRef(javaCmClassLocal);
- JNINativeMethod nativeFunctions =
- { "ProvideCameraFrame", "([BIJ)V",
- (void*) &VideoCaptureAndroid::ProvideCameraFrame };
- if (env->RegisterNatives(g_javaCmClass, &nativeFunctions, 1) == 0) {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
- "%s: Registered native functions", __FUNCTION__);
- }
- else {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
- "%s: Failed to register native functions",
- __FUNCTION__);
- return -1;
- }
-
- jclass capabilityClassLocal = env->FindClass(
- "org/webrtc/videoengine/CaptureCapabilityAndroid");
- if (!capabilityClassLocal) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
- "%s: could not find java class", __FUNCTION__);
- return -1;
- }
- jclass capabilityClassGlobal = reinterpret_cast<jclass>(env->NewGlobalRef(
- capabilityClassLocal));
- DeviceInfoAndroid::SetAndroidCaptureClasses(capabilityClassGlobal);
-
- // get java capture class type (note path to class packet)
- jclass javaCmDevInfoClassLocal = env->FindClass(
- "org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid");
- if (!javaCmDevInfoClassLocal) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
- "%s: could not find java class", __FUNCTION__);
- return -1;
- }
-
- // create a global reference to the class
- // (to tell JNI that we are referencing it
- // after this function has returned)
- g_javaCmDevInfoClass = static_cast<jclass>
- (env->NewGlobalRef(javaCmDevInfoClassLocal));
- if (!g_javaCmDevInfoClass) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
- "%s: InitVideoEngineJava(): could not create Java "
- "Camera Device info class reference",
- __FUNCTION__);
- return -1;
- }
- // Delete local class ref, we only use the global ref
- env->DeleteLocalRef(javaCmDevInfoClassLocal);
-
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
- "VideoCaptureDeviceInfoAndroid get method id");
-
- // get the method ID for the Android Java CaptureClass static
- //CreateVideoCaptureAndroid factory method.
- jmethodID cid = env->GetStaticMethodID(
- g_javaCmDevInfoClass,
- "CreateVideoCaptureDeviceInfoAndroid",
- "(ILandroid/content/Context;)"
- "Lorg/webrtc/videoengine/VideoCaptureDeviceInfoAndroid;");
- if (cid == NULL) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
- "%s: could not get java"
- "VideoCaptureDeviceInfoAndroid constructor ID",
- __FUNCTION__);
- return -1;
- }
-
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
- "%s: construct static java device object", __FUNCTION__);
-
- // construct the object by calling the static constructor object
- jobject javaCameraDeviceInfoObjLocal =
- env->CallStaticObjectMethod(g_javaCmDevInfoClass,
- cid, (int) -1,
- g_javaContext);
- if (!javaCameraDeviceInfoObjLocal) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1,
- "%s: could not create Java Capture Device info object",
- __FUNCTION__);
- return -1;
- }
- // create a reference to the object (to tell JNI that
- // we are referencing it after this function has returned)
- g_javaCmDevInfoObject = env->NewGlobalRef(javaCameraDeviceInfoObjLocal);
- if (!g_javaCmDevInfoObject) {
- WEBRTC_TRACE(webrtc::kTraceError,
- webrtc::kTraceAudioDevice,
- -1,
- "%s: could not create Java"
- "cameradevinceinfo object reference",
- __FUNCTION__);
- return -1;
- }
- // Delete local object ref, we only use the global ref
- env->DeleteLocalRef(javaCameraDeviceInfoObjLocal);
- return 0;
- }
- else {
- WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
- "%s: JVM is NULL, assuming deinit", __FUNCTION__);
- if (!g_jvm) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
- "%s: SetAndroidObjects not called with a valid JVM.",
- __FUNCTION__);
- return -1;
- }
- JNIEnv* env = NULL;
- bool attached = false;
- if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
- // try to attach the thread and get the env
- // Attach this thread to JVM
- jint res = g_jvm->AttachCurrentThread(&env, NULL);
- if ((res < 0) || !env) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
- -1, "%s: Could not attach thread to JVM (%d, %p)",
- __FUNCTION__, res, env);
- return -1;
- }
- attached = true;
- }
- env->DeleteGlobalRef(g_javaCmDevInfoObject);
- env->DeleteGlobalRef(g_javaCmDevInfoClass);
- env->DeleteGlobalRef(g_javaCmClass);
- if (attached && g_jvm->DetachCurrentThread() < 0) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1,
- "%s: Could not detach thread from JVM", __FUNCTION__);
- return -1;
- }
- return 0;
- env = (JNIEnv *) NULL;
- }
- return 0;
+int32_t VideoCaptureAndroid::OnIncomingFrame(uint8_t* videoFrame,
+ int32_t videoFrameLength,
+ int64_t captureTime) {
+ return IncomingFrame(
+ videoFrame, videoFrameLength, _captureCapability, captureTime);
}
-int32_t VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
- JNIEnv*& env,
- jclass& javaCmDevInfoClass,
- jobject& javaCmDevInfoObject,
- bool& attached) {
- // get the JNI env for this thread
- if (!g_jvm) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
- "%s: SetAndroidObjects not called with a valid JVM.",
- __FUNCTION__);
- return -1;
- }
- attached = false;
- if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
- // try to attach the thread and get the env
- // Attach this thread to JVM
- jint res = g_jvm->AttachCurrentThread(&env, NULL);
- if ((res < 0) || !env) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
- "%s: Could not attach thread to JVM (%d, %p)",
- __FUNCTION__, res, env);
- return -1;
- }
- attached = true;
- }
- javaCmDevInfoClass = g_javaCmDevInfoClass;
- javaCmDevInfoObject = g_javaCmDevInfoObject;
- return 0;
-
-}
-
-int32_t VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(
- bool attached) {
- if (attached && g_jvm->DetachCurrentThread() < 0) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1,
- "%s: Could not detach thread from JVM", __FUNCTION__);
- return -1;
- }
- return 0;
-}
-
-/*
- * JNI callback from Java class. Called
- * when the camera has a new frame to deliver
- * Class: org_webrtc_capturemodule_VideoCaptureAndroid
- * Method: ProvideCameraFrame
- * Signature: ([BIJ)V
- */
-void JNICALL VideoCaptureAndroid::ProvideCameraFrame(JNIEnv * env,
- jobject,
- jbyteArray javaCameraFrame,
- jint length,
- jlong context) {
- VideoCaptureAndroid* captureModule =
- reinterpret_cast<VideoCaptureAndroid*>(context);
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture,
- -1, "%s: IncomingFrame %d", __FUNCTION__,length);
- jbyte* cameraFrame= env->GetByteArrayElements(javaCameraFrame,NULL);
- captureModule->IncomingFrame((uint8_t*) cameraFrame,
- length,captureModule->_frameInfo,0);
- env->ReleaseByteArrayElements(javaCameraFrame,cameraFrame,JNI_ABORT);
-}
-
-
-
VideoCaptureAndroid::VideoCaptureAndroid(const int32_t id)
- : VideoCaptureImpl(id), _capInfo(id), _javaCaptureObj(NULL),
+ : VideoCaptureImpl(id),
+ _deviceInfo(id),
+ _jCapturer(NULL),
_captureStarted(false) {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
- "%s: context %x", __FUNCTION__, (int) this);
}
-// ----------------------------------------------------------------------------
-// Init
-//
-// Initializes needed Java resources like the JNI interface to
-// VideoCaptureAndroid.java
-// ----------------------------------------------------------------------------
int32_t VideoCaptureAndroid::Init(const int32_t id,
const char* deviceUniqueIdUTF8) {
const int nameLength = strlen(deviceUniqueIdUTF8);
- if (nameLength >= kVideoCaptureUniqueNameLength) {
+ if (nameLength >= kVideoCaptureUniqueNameLength)
return -1;
- }
// Store the device name
_deviceUniqueId = new char[nameLength + 1];
memcpy(_deviceUniqueId, deviceUniqueIdUTF8, nameLength + 1);
- if (_capInfo.Init() != 0) {
- WEBRTC_TRACE(webrtc::kTraceError,
- webrtc::kTraceVideoCapture,
- _id,
- "%s: Failed to initialize CaptureDeviceInfo",
- __FUNCTION__);
+ AttachThreadScoped ats(g_jvm);
+ JNIEnv* env = ats.env();
+
+ jmethodID ctor = env->GetMethodID(g_java_capturer_class, "<init>", "(IJ)V");
+ assert(ctor);
+ jlong j_this = reinterpret_cast<intptr_t>(this);
+ size_t camera_id = 0;
+ if (!_deviceInfo.FindCameraIndex(deviceUniqueIdUTF8, &camera_id))
return -1;
- }
-
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, "%s:",
- __FUNCTION__);
- // use the jvm that has been set
- if (!g_jvm) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
- "%s: Not a valid Java VM pointer", __FUNCTION__);
- return -1;
- }
- // get the JNI env for this thread
- JNIEnv *env;
- bool isAttached = false;
-
- // get the JNI env for this thread
- if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
- // try to attach the thread and get the env
- // Attach this thread to JVM
- jint res = g_jvm->AttachCurrentThread(&env, NULL);
- if ((res < 0) || !env) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
- "%s: Could not attach thread to JVM (%d, %p)",
- __FUNCTION__, res, env);
- return -1;
- }
- isAttached = true;
- }
-
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
- "get method id");
-
- // get the method ID for the Android Java
- // CaptureDeviceInfoClass AllocateCamera factory method.
- char signature[256];
- sprintf(signature, "(IJLjava/lang/String;)L%s;", AndroidJavaCaptureClass);
-
- jmethodID cid = env->GetMethodID(g_javaCmDevInfoClass, "AllocateCamera",
- signature);
- if (cid == NULL) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
- "%s: could not get constructor ID", __FUNCTION__);
- return -1; /* exception thrown */
- }
-
- jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8);
- // construct the object by calling the static constructor object
- jobject javaCameraObjLocal = env->CallObjectMethod(g_javaCmDevInfoObject,
- cid, (jint) id,
- (jlong) this,
- capureIdString);
- if (!javaCameraObjLocal) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
- "%s: could not create Java Capture object", __FUNCTION__);
- return -1;
- }
-
- // create a reference to the object (to tell JNI that we are referencing it
- // after this function has returned)
- _javaCaptureObj = env->NewGlobalRef(javaCameraObjLocal);
- if (!_javaCaptureObj) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioDevice, _id,
- "%s: could not create Java camera object reference",
- __FUNCTION__);
- return -1;
- }
-
- // Delete local object ref, we only use the global ref
- env->DeleteLocalRef(javaCameraObjLocal);
-
- // Detach this thread if it was attached
- if (isAttached) {
- if (g_jvm->DetachCurrentThread() < 0) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, _id,
- "%s: Could not detach thread from JVM", __FUNCTION__);
- }
- }
-
+ _jCapturer = env->NewGlobalRef(
+ env->NewObject(g_java_capturer_class, ctor, camera_id, j_this));
+ assert(_jCapturer);
return 0;
}
VideoCaptureAndroid::~VideoCaptureAndroid() {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, "%s:",
- __FUNCTION__);
- if (_javaCaptureObj == NULL || g_jvm == NULL) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
- "%s: Nothing to clean", __FUNCTION__);
- }
- else {
- bool isAttached = false;
- // get the JNI env for this thread
- JNIEnv *env;
- if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
- // try to attach the thread and get the env
- // Attach this thread to JVM
- jint res = g_jvm->AttachCurrentThread(&env, NULL);
- if ((res < 0) || !env) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
- _id,
- "%s: Could not attach thread to JVM (%d, %p)",
- __FUNCTION__, res, env);
- }
- else {
- isAttached = true;
- }
- }
-
- // get the method ID for the Android Java CaptureClass static
- // DeleteVideoCaptureAndroid method. Call this to release the camera so
- // another application can use it.
- jmethodID cid = env->GetStaticMethodID(
- g_javaCmClass,
- "DeleteVideoCaptureAndroid",
- "(Lorg/webrtc/videoengine/VideoCaptureAndroid;)V");
- if (cid != NULL) {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
- "%s: Call DeleteVideoCaptureAndroid", __FUNCTION__);
- // Close the camera by calling the static destruct function.
- env->CallStaticVoidMethod(g_javaCmClass, cid, _javaCaptureObj);
-
- // Delete global object ref to the camera.
- env->DeleteGlobalRef(_javaCaptureObj);
- _javaCaptureObj = NULL;
- }
- else {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
- "%s: Failed to find DeleteVideoCaptureAndroid id",
- __FUNCTION__);
- }
-
- // Detach this thread if it was attached
- if (isAttached) {
- if (g_jvm->DetachCurrentThread() < 0) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice,
- _id, "%s: Could not detach thread from JVM",
- __FUNCTION__);
- }
- }
- }
+ // Ensure Java camera is released even if our caller didn't explicitly Stop.
+ if (_captureStarted)
+ StopCapture();
+ AttachThreadScoped ats(g_jvm);
+ ats.env()->DeleteGlobalRef(_jCapturer);
}
int32_t VideoCaptureAndroid::StartCapture(
const VideoCaptureCapability& capability) {
CriticalSectionScoped cs(&_apiCs);
- WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
- "%s: ", __FUNCTION__);
+ AttachThreadScoped ats(g_jvm);
+ JNIEnv* env = ats.env();
- bool isAttached = false;
- int32_t result = 0;
- // get the JNI env for this thread
- JNIEnv *env;
- if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
- // try to attach the thread and get the env
- // Attach this thread to JVM
- jint res = g_jvm->AttachCurrentThread(&env, NULL);
- if ((res < 0) || !env) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
- "%s: Could not attach thread to JVM (%d, %p)",
- __FUNCTION__, res, env);
- }
- else {
- isAttached = true;
- }
- }
-
- if (_capInfo.GetBestMatchedCapability(_deviceUniqueId, capability,
- _frameInfo) < 0) {
+ if (_deviceInfo.GetBestMatchedCapability(
+ _deviceUniqueId, capability, _captureCapability) < 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
- "%s: GetBestMatchedCapability failed. Req cap w%d h%d",
+ "%s: GetBestMatchedCapability failed: %dx%d",
__FUNCTION__, capability.width, capability.height);
return -1;
}
- // Store the new expected capture delay
- _captureDelay = _frameInfo.expectedCaptureDelay;
+ _captureDelay = _captureCapability.expectedCaptureDelay;
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
- "%s: _frameInfo w%d h%d", __FUNCTION__, _frameInfo.width,
- _frameInfo.height);
-
- // get the method ID for the Android Java
- // CaptureClass static StartCapture method.
- jmethodID cid = env->GetMethodID(g_javaCmClass, "StartCapture", "(III)I");
- if (cid != NULL) {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
- "%s: Call StartCapture", __FUNCTION__);
- // Close the camera by calling the static destruct function.
- result = env->CallIntMethod(_javaCaptureObj, cid, _frameInfo.width,
- _frameInfo.height, _frameInfo.maxFPS);
- }
- else {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
- "%s: Failed to find StartCapture id", __FUNCTION__);
- }
-
- // Detach this thread if it was attached
- if (isAttached) {
- if (g_jvm->DetachCurrentThread() < 0) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, _id,
- "%s: Could not detach thread from JVM", __FUNCTION__);
- }
- }
- if (result == 0) {
+ jmethodID j_start =
+ env->GetMethodID(g_java_capturer_class, "startCapture", "(IIII)Z");
+ assert(j_start);
+ int min_mfps = 0;
+ int max_mfps = 0;
+ _deviceInfo.GetFpsRange(_deviceUniqueId, &min_mfps, &max_mfps);
+ bool started = env->CallBooleanMethod(_jCapturer, j_start,
+ _captureCapability.width,
+ _captureCapability.height,
+ min_mfps, max_mfps);
+ if (started) {
_requestedCapability = capability;
_captureStarted = true;
}
- WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
- "%s: result %d", __FUNCTION__, result);
- return result;
+ return started ? 0 : -1;
}
int32_t VideoCaptureAndroid::StopCapture() {
CriticalSectionScoped cs(&_apiCs);
- WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
- "%s: ", __FUNCTION__);
-
- bool isAttached = false;
- int32_t result = 0;
- // get the JNI env for this thread
- JNIEnv *env = NULL;
- if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
- // try to attach the thread and get the env
- // Attach this thread to JVM
- jint res = g_jvm->AttachCurrentThread(&env, NULL);
- if ((res < 0) || !env) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
- "%s: Could not attach thread to JVM (%d, %p)",
- __FUNCTION__, res, env);
- }
- else {
- isAttached = true;
- }
- }
+ AttachThreadScoped ats(g_jvm);
+ JNIEnv* env = ats.env();
memset(&_requestedCapability, 0, sizeof(_requestedCapability));
- memset(&_frameInfo, 0, sizeof(_frameInfo));
-
- // get the method ID for the Android Java CaptureClass StopCapture method.
- jmethodID cid = env->GetMethodID(g_javaCmClass, "StopCapture", "()I");
- if (cid != NULL) {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
- "%s: Call StopCapture", __FUNCTION__);
- // Close the camera by calling the static destruct function.
- result = env->CallIntMethod(_javaCaptureObj, cid);
- }
- else {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
- "%s: Failed to find StopCapture id", __FUNCTION__);
- }
-
- // Detach this thread if it was attached
- if (isAttached) {
- if (g_jvm->DetachCurrentThread() < 0) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, _id,
- "%s: Could not detach thread from JVM", __FUNCTION__);
- }
- }
+ memset(&_captureCapability, 0, sizeof(_captureCapability));
_captureStarted = false;
- WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
- "%s: result %d", __FUNCTION__, result);
- return result;
+ jmethodID j_stop =
+ env->GetMethodID(g_java_capturer_class, "stopCapture", "()Z");
+ return env->CallBooleanMethod(_jCapturer, j_stop) ? 0 : -1;
}
bool VideoCaptureAndroid::CaptureStarted() {
CriticalSectionScoped cs(&_apiCs);
- WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
- "%s: ", __FUNCTION__);
return _captureStarted;
}
int32_t VideoCaptureAndroid::CaptureSettings(
VideoCaptureCapability& settings) {
CriticalSectionScoped cs(&_apiCs);
- WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
- "%s: ", __FUNCTION__);
settings = _requestedCapability;
return 0;
}
@@ -610,64 +184,20 @@
int32_t VideoCaptureAndroid::SetCaptureRotation(
VideoCaptureRotation rotation) {
CriticalSectionScoped cs(&_apiCs);
- if (VideoCaptureImpl::SetCaptureRotation(rotation) == 0) {
- if (!g_jvm)
- return -1;
+ if (VideoCaptureImpl::SetCaptureRotation(rotation) != 0)
+ return 0;
- // get the JNI env for this thread
- JNIEnv *env;
- bool isAttached = false;
+ AttachThreadScoped ats(g_jvm);
+ JNIEnv* env = ats.env();
- // get the JNI env for this thread
- if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
- // try to attach the thread and get the env
- // Attach this thread to JVM
- jint res = g_jvm->AttachCurrentThread(&env, NULL);
- if ((res < 0) || !env) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
- _id,
- "%s: Could not attach thread to JVM (%d, %p)",
- __FUNCTION__, res, env);
- return -1;
- }
- isAttached = true;
- }
-
- jmethodID cid = env->GetMethodID(g_javaCmClass, "SetPreviewRotation",
- "(I)V");
- if (cid == NULL) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
- "%s: could not get java SetPreviewRotation ID",
- __FUNCTION__);
- return -1;
- }
- jint rotateFrame = 0;
- switch (rotation) {
- case kCameraRotate0:
- rotateFrame = 0;
- break;
- case kCameraRotate90:
- rotateFrame = 90;
- break;
- case kCameraRotate180:
- rotateFrame = 180;
- break;
- case kCameraRotate270:
- rotateFrame = 270;
- break;
- }
- env->CallVoidMethod(_javaCaptureObj, cid, rotateFrame);
-
- // Detach this thread if it was attached
- if (isAttached) {
- if (g_jvm->DetachCurrentThread() < 0) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice,
- _id, "%s: Could not detach thread from JVM",
- __FUNCTION__);
- }
- }
-
+ jmethodID j_spr =
+ env->GetMethodID(g_java_capturer_class, "setPreviewRotation", "(I)V");
+ assert(j_spr);
+ int rotation_degrees;
+ if (RotationInDegrees(rotation, &rotation_degrees) != 0) {
+ assert(false);
}
+ env->CallVoidMethod(_jCapturer, j_spr, rotation_degrees);
return 0;
}