Revert of Remove the old AndroidVideoCapturer stack code. (patchset #2 id:20001 of https://codereview.webrtc.org/2235893003/ )
Reason for revert:
Breaks downstream.
Original issue's description:
> Remove the old AndroidVideoCapturer stack code.
>
> This code is no longer needed. Apps should be using the new API introduced here: https://codereview.webrtc.org/2127893002/
>
> Committed: https://crrev.com/1b365a8db070f9cdcbf35ec871f758dcd909e51d
> Cr-Commit-Position: refs/heads/master@{#13950}
TBR=magjed@webrtc.org,glaznev@webrtc.org,kjellander@webrtc.org
# Skipping CQ checks because original CL landed less than 1 days ago.
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
Review-Url: https://codereview.webrtc.org/2291583002
Cr-Commit-Position: refs/heads/master@{#13958}
diff --git a/webrtc/api/BUILD.gn b/webrtc/api/BUILD.gn
index 547fc87..2c26724 100644
--- a/webrtc/api/BUILD.gn
+++ b/webrtc/api/BUILD.gn
@@ -156,6 +156,8 @@
"android/jni/androidmetrics_jni.cc",
"android/jni/androidnetworkmonitor_jni.cc",
"android/jni/androidnetworkmonitor_jni.h",
+ "android/jni/androidvideocapturer_jni.cc",
+ "android/jni/androidvideocapturer_jni.h",
"android/jni/androidvideotracksource_jni.cc",
"android/jni/classreferenceholder.cc",
"android/jni/classreferenceholder.h",
@@ -166,6 +168,8 @@
"android/jni/peerconnection_jni.cc",
"android/jni/surfacetexturehelper_jni.cc",
"android/jni/surfacetexturehelper_jni.h",
+ "androidvideocapturer.cc",
+ "androidvideocapturer.h",
"androidvideotracksource.cc",
"androidvideotracksource.h",
]
diff --git a/webrtc/api/android/java/src/org/webrtc/PeerConnectionFactory.java b/webrtc/api/android/java/src/org/webrtc/PeerConnectionFactory.java
index 860c5a8..8e3e11d 100644
--- a/webrtc/api/android/java/src/org/webrtc/PeerConnectionFactory.java
+++ b/webrtc/api/android/java/src/org/webrtc/PeerConnectionFactory.java
@@ -111,10 +111,21 @@
nativeCreateLocalMediaStream(nativeFactory, label));
}
+ // The VideoSource takes ownership of |capturer|, so capturer.dispose() should not be called
+ // manually after this. Video capturer is automatically started so there is no need to call
+ // startCapture after this method.
+ public VideoSource createVideoSource(
+ VideoCapturer capturer, MediaConstraints constraints) {
+ final EglBase.Context eglContext =
+ localEglbase == null ? null : localEglbase.getEglBaseContext();
+ return new VideoSource(nativeCreateVideoSource(nativeFactory,
+ eglContext, capturer, constraints));
+ }
+
public VideoSource createVideoSource(VideoCapturer capturer) {
final EglBase.Context eglContext =
localEglbase == null ? null : localEglbase.getEglBaseContext();
- long nativeAndroidVideoTrackSource = nativeCreateVideoSource(nativeFactory, eglContext);
+ long nativeAndroidVideoTrackSource = nativeCreateVideoSource2(nativeFactory, eglContext);
VideoCapturer.CapturerObserver capturerObserver
= new VideoCapturer.AndroidVideoTrackSourceObserver(nativeAndroidVideoTrackSource);
nativeInitializeVideoCapturer(nativeFactory, capturer, nativeAndroidVideoTrackSource,
@@ -237,6 +248,10 @@
long nativeFactory, String label);
private static native long nativeCreateVideoSource(
+ long nativeFactory, EglBase.Context eglContext, VideoCapturer videoCapturer,
+ MediaConstraints constraints);
+
+ private static native long nativeCreateVideoSource2(
long nativeFactory, EglBase.Context eglContext);
private static native void nativeInitializeVideoCapturer(
diff --git a/webrtc/api/android/java/src/org/webrtc/VideoCapturer.java b/webrtc/api/android/java/src/org/webrtc/VideoCapturer.java
index 0ecc44f..c92f82a 100644
--- a/webrtc/api/android/java/src/org/webrtc/VideoCapturer.java
+++ b/webrtc/api/android/java/src/org/webrtc/VideoCapturer.java
@@ -41,6 +41,53 @@
// An implementation of CapturerObserver that forwards all calls from
// Java to the C layer.
+ static class NativeObserver implements CapturerObserver {
+ private final long nativeCapturer;
+
+ public NativeObserver(long nativeCapturer) {
+ this.nativeCapturer = nativeCapturer;
+ }
+
+ @Override
+ public void onCapturerStarted(boolean success) {
+ nativeCapturerStarted(nativeCapturer, success);
+ }
+
+ @Override
+ public void onCapturerStopped() {}
+
+ @Override
+ public void onByteBufferFrameCaptured(byte[] data, int width, int height,
+ int rotation, long timeStamp) {
+ nativeOnByteBufferFrameCaptured(nativeCapturer, data, data.length, width, height, rotation,
+ timeStamp);
+ }
+
+ @Override
+ public void onTextureFrameCaptured(
+ int width, int height, int oesTextureId, float[] transformMatrix, int rotation,
+ long timestamp) {
+ nativeOnTextureFrameCaptured(nativeCapturer, width, height, oesTextureId, transformMatrix,
+ rotation, timestamp);
+ }
+
+ @Override
+ public void onOutputFormatRequest(int width, int height, int framerate) {
+ nativeOnOutputFormatRequest(nativeCapturer, width, height, framerate);
+ }
+
+ private native void nativeCapturerStarted(long nativeCapturer,
+ boolean success);
+ private native void nativeOnByteBufferFrameCaptured(long nativeCapturer,
+ byte[] data, int length, int width, int height, int rotation, long timeStamp);
+ private native void nativeOnTextureFrameCaptured(long nativeCapturer, int width, int height,
+ int oesTextureId, float[] transformMatrix, int rotation, long timestamp);
+ private native void nativeOnOutputFormatRequest(long nativeCapturer,
+ int width, int height, int framerate);
+ }
+
+ // An implementation of CapturerObserver that forwards all calls from
+ // Java to the C layer.
static class AndroidVideoTrackSourceObserver implements CapturerObserver {
// Pointer to VideoTrackSourceProxy proxying AndroidVideoTrackSource.
private final long nativeSource;
diff --git a/webrtc/api/android/jni/androidvideocapturer_jni.cc b/webrtc/api/android/jni/androidvideocapturer_jni.cc
new file mode 100644
index 0000000..d31ce26
--- /dev/null
+++ b/webrtc/api/android/jni/androidvideocapturer_jni.cc
@@ -0,0 +1,349 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/api/android/jni/androidvideocapturer_jni.h"
+#include "webrtc/api/android/jni/classreferenceholder.h"
+#include "webrtc/api/android/jni/native_handle_impl.h"
+#include "webrtc/api/android/jni/surfacetexturehelper_jni.h"
+#include "third_party/libyuv/include/libyuv/convert.h"
+#include "webrtc/base/bind.h"
+
+namespace webrtc_jni {
+
+jobject AndroidVideoCapturerJni::application_context_ = nullptr;
+
+// static
+int AndroidVideoCapturerJni::SetAndroidObjects(JNIEnv* jni,
+ jobject appliction_context) {
+ if (application_context_) {
+ jni->DeleteGlobalRef(application_context_);
+ }
+ application_context_ = NewGlobalRef(jni, appliction_context);
+
+ return 0;
+}
+
+AndroidVideoCapturerJni::AndroidVideoCapturerJni(JNIEnv* jni,
+ jobject j_video_capturer,
+ jobject j_egl_context)
+ : j_video_capturer_(jni, j_video_capturer),
+ j_video_capturer_class_(jni, FindClass(jni, "org/webrtc/VideoCapturer")),
+ j_observer_class_(
+ jni,
+ FindClass(jni,
+ "org/webrtc/VideoCapturer$NativeObserver")),
+ surface_texture_helper_(SurfaceTextureHelper::create(
+ jni, "Camera SurfaceTextureHelper", j_egl_context)),
+ capturer_(nullptr) {
+ LOG(LS_INFO) << "AndroidVideoCapturerJni ctor";
+ jobject j_frame_observer =
+ jni->NewObject(*j_observer_class_,
+ GetMethodID(jni, *j_observer_class_, "<init>", "(J)V"),
+ jlongFromPointer(this));
+ CHECK_EXCEPTION(jni) << "error during NewObject";
+ jni->CallVoidMethod(
+ *j_video_capturer_,
+ GetMethodID(jni, *j_video_capturer_class_, "initialize",
+ "(Lorg/webrtc/SurfaceTextureHelper;Landroid/content/"
+ "Context;Lorg/webrtc/VideoCapturer$CapturerObserver;)V"),
+ surface_texture_helper_
+ ? surface_texture_helper_->GetJavaSurfaceTextureHelper()
+ : nullptr,
+ application_context_, j_frame_observer);
+ CHECK_EXCEPTION(jni) << "error during VideoCapturer.initialize()";
+ thread_checker_.DetachFromThread();
+}
+
+AndroidVideoCapturerJni::~AndroidVideoCapturerJni() {
+ LOG(LS_INFO) << "AndroidVideoCapturerJni dtor";
+ jni()->CallVoidMethod(
+ *j_video_capturer_,
+ GetMethodID(jni(), *j_video_capturer_class_, "dispose", "()V"));
+ CHECK_EXCEPTION(jni()) << "error during VideoCapturer.dispose()";
+}
+
+void AndroidVideoCapturerJni::Start(int width, int height, int framerate,
+ webrtc::AndroidVideoCapturer* capturer) {
+ LOG(LS_INFO) << "AndroidVideoCapturerJni start";
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ {
+ rtc::CritScope cs(&capturer_lock_);
+ RTC_CHECK(capturer_ == nullptr);
+ RTC_CHECK(invoker_.get() == nullptr);
+ capturer_ = capturer;
+ invoker_.reset(new rtc::GuardedAsyncInvoker());
+ }
+ jmethodID m =
+ GetMethodID(jni(), *j_video_capturer_class_, "startCapture", "(III)V");
+ jni()->CallVoidMethod(*j_video_capturer_, m, width, height, framerate);
+ CHECK_EXCEPTION(jni()) << "error during VideoCapturer.startCapture";
+}
+
+void AndroidVideoCapturerJni::Stop() {
+ LOG(LS_INFO) << "AndroidVideoCapturerJni stop";
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ {
+ // TODO(nisse): Consider moving this block until *after* the call to
+ // stopCapturer. stopCapturer should ensure that we get no
+ // more frames, and then we shouldn't need the if (!capturer_)
+ // checks in OnMemoryBufferFrame and OnTextureFrame.
+ rtc::CritScope cs(&capturer_lock_);
+ // Destroying |invoker_| will cancel all pending calls to |capturer_|.
+ invoker_ = nullptr;
+ capturer_ = nullptr;
+ }
+ jmethodID m = GetMethodID(jni(), *j_video_capturer_class_,
+ "stopCapture", "()V");
+ jni()->CallVoidMethod(*j_video_capturer_, m);
+ CHECK_EXCEPTION(jni()) << "error during VideoCapturer.stopCapture";
+ LOG(LS_INFO) << "AndroidVideoCapturerJni stop done";
+}
+
+template <typename... Args>
+void AndroidVideoCapturerJni::AsyncCapturerInvoke(
+ const rtc::Location& posted_from,
+ void (webrtc::AndroidVideoCapturer::*method)(Args...),
+ typename Identity<Args>::type... args) {
+ rtc::CritScope cs(&capturer_lock_);
+ if (!invoker_) {
+ LOG(LS_WARNING) << posted_from.function_name()
+ << "() called for closed capturer.";
+ return;
+ }
+ invoker_->AsyncInvoke<void>(posted_from,
+ rtc::Bind(method, capturer_, args...));
+}
+
+std::vector<cricket::VideoFormat>
+AndroidVideoCapturerJni::GetSupportedFormats() {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ jobject j_list_of_formats = jni->CallObjectMethod(
+ *j_video_capturer_,
+ GetMethodID(jni, *j_video_capturer_class_, "getSupportedFormats",
+ "()Ljava/util/List;"));
+ CHECK_EXCEPTION(jni) << "error during getSupportedFormats";
+
+ // Extract Java List<CaptureFormat> to std::vector<cricket::VideoFormat>.
+ jclass j_list_class = jni->FindClass("java/util/List");
+ jclass j_format_class =
+ jni->FindClass("org/webrtc/CameraEnumerationAndroid$CaptureFormat");
+ jclass j_framerate_class = jni->FindClass(
+ "org/webrtc/CameraEnumerationAndroid$CaptureFormat$FramerateRange");
+ const int size = jni->CallIntMethod(
+ j_list_of_formats, GetMethodID(jni, j_list_class, "size", "()I"));
+ jmethodID j_get =
+ GetMethodID(jni, j_list_class, "get", "(I)Ljava/lang/Object;");
+ jfieldID j_framerate_field = GetFieldID(
+ jni, j_format_class, "framerate",
+ "Lorg/webrtc/CameraEnumerationAndroid$CaptureFormat$FramerateRange;");
+ jfieldID j_width_field = GetFieldID(jni, j_format_class, "width", "I");
+ jfieldID j_height_field = GetFieldID(jni, j_format_class, "height", "I");
+ jfieldID j_max_framerate_field =
+ GetFieldID(jni, j_framerate_class, "max", "I");
+
+ std::vector<cricket::VideoFormat> formats;
+ formats.reserve(size);
+ for (int i = 0; i < size; ++i) {
+ jobject j_format = jni->CallObjectMethod(j_list_of_formats, j_get, i);
+ jobject j_framerate = GetObjectField(jni, j_format, j_framerate_field);
+ const int frame_interval = cricket::VideoFormat::FpsToInterval(
+ (GetIntField(jni, j_framerate, j_max_framerate_field) + 999) / 1000);
+ formats.emplace_back(GetIntField(jni, j_format, j_width_field),
+ GetIntField(jni, j_format, j_height_field),
+ frame_interval, cricket::FOURCC_NV21);
+ }
+ CHECK_EXCEPTION(jni) << "error while extracting formats";
+ return formats;
+}
+
+void AndroidVideoCapturerJni::OnCapturerStarted(bool success) {
+ LOG(LS_INFO) << "AndroidVideoCapturerJni capture started: " << success;
+ AsyncCapturerInvoke(
+ RTC_FROM_HERE, &webrtc::AndroidVideoCapturer::OnCapturerStarted, success);
+}
+
+void AndroidVideoCapturerJni::OnMemoryBufferFrame(void* video_frame,
+ int length,
+ int width,
+ int height,
+ int rotation,
+ int64_t timestamp_ns) {
+ RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 ||
+ rotation == 270);
+ rtc::CritScope cs(&capturer_lock_);
+ if (!capturer_) {
+ LOG(LS_WARNING) << "OnMemoryBufferFrame() called for closed capturer.";
+ return;
+ }
+ int adapted_width;
+ int adapted_height;
+ int crop_width;
+ int crop_height;
+ int crop_x;
+ int crop_y;
+ int64_t translated_camera_time_us;
+
+ if (!capturer_->AdaptFrame(width, height,
+ timestamp_ns / rtc::kNumNanosecsPerMicrosec,
+ rtc::TimeMicros(),
+ &adapted_width, &adapted_height,
+ &crop_width, &crop_height, &crop_x, &crop_y,
+ &translated_camera_time_us)) {
+ return;
+ }
+
+ int rotated_width = crop_width;
+ int rotated_height = crop_height;
+
+ if (capturer_->apply_rotation() && (rotation == 90 || rotation == 270)) {
+ std::swap(adapted_width, adapted_height);
+ std::swap(rotated_width, rotated_height);
+ }
+
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
+ pre_scale_pool_.CreateBuffer(rotated_width, rotated_height);
+
+ const uint8_t* y_plane = static_cast<const uint8_t*>(video_frame);
+ const uint8_t* uv_plane = y_plane + width * height;
+
+ // Can only crop at even pixels.
+ crop_x &= ~1;
+ crop_y &= ~1;
+ int uv_width = (width + 1) / 2;
+
+ libyuv::NV12ToI420Rotate(
+ y_plane + width * crop_y + crop_x, width,
+ uv_plane + uv_width * crop_y + crop_x, width,
+ buffer->MutableDataY(), buffer->StrideY(),
+ // Swap U and V, since we have NV21, not NV12.
+ buffer->MutableDataV(), buffer->StrideV(),
+ buffer->MutableDataU(), buffer->StrideU(),
+ crop_width, crop_height, static_cast<libyuv::RotationMode>(
+ capturer_->apply_rotation() ? rotation : 0));
+
+ if (adapted_width != buffer->width() || adapted_height != buffer->height()) {
+ rtc::scoped_refptr<webrtc::I420Buffer> scaled_buffer(
+ post_scale_pool_.CreateBuffer(adapted_width, adapted_height));
+ scaled_buffer->ScaleFrom(buffer);
+ buffer = scaled_buffer;
+ }
+ capturer_->OnFrame(
+ cricket::WebRtcVideoFrame(
+ buffer, capturer_->apply_rotation()
+ ? webrtc::kVideoRotation_0
+ : static_cast<webrtc::VideoRotation>(rotation),
+ translated_camera_time_us, 0),
+ width, height);
+}
+
+void AndroidVideoCapturerJni::OnTextureFrame(int width,
+ int height,
+ int rotation,
+ int64_t timestamp_ns,
+ const NativeHandleImpl& handle) {
+ RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 ||
+ rotation == 270);
+ rtc::CritScope cs(&capturer_lock_);
+ if (!capturer_) {
+ LOG(LS_WARNING) << "OnTextureFrame() called for closed capturer.";
+ surface_texture_helper_->ReturnTextureFrame();
+ return;
+ }
+ int adapted_width;
+ int adapted_height;
+ int crop_width;
+ int crop_height;
+ int crop_x;
+ int crop_y;
+ int64_t translated_camera_time_us;
+
+ if (!capturer_->AdaptFrame(width, height,
+ timestamp_ns / rtc::kNumNanosecsPerMicrosec,
+ rtc::TimeMicros(),
+ &adapted_width, &adapted_height,
+ &crop_width, &crop_height, &crop_x, &crop_y,
+ &translated_camera_time_us)) {
+ surface_texture_helper_->ReturnTextureFrame();
+ return;
+ }
+
+ Matrix matrix = handle.sampling_matrix;
+
+ matrix.Crop(crop_width / static_cast<float>(width),
+ crop_height / static_cast<float>(height),
+ crop_x / static_cast<float>(width),
+ crop_y / static_cast<float>(height));
+
+ if (capturer_->apply_rotation()) {
+ if (rotation == webrtc::kVideoRotation_90 ||
+ rotation == webrtc::kVideoRotation_270) {
+ std::swap(adapted_width, adapted_height);
+ }
+ matrix.Rotate(static_cast<webrtc::VideoRotation>(rotation));
+ }
+
+ capturer_->OnFrame(cricket::WebRtcVideoFrame(
+ surface_texture_helper_->CreateTextureFrame(
+ adapted_width, adapted_height,
+ NativeHandleImpl(handle.oes_texture_id, matrix)),
+ capturer_->apply_rotation()
+ ? webrtc::kVideoRotation_0
+ : static_cast<webrtc::VideoRotation>(rotation),
+ translated_camera_time_us, 0),
+ width, height);
+}
+
+void AndroidVideoCapturerJni::OnOutputFormatRequest(int width,
+ int height,
+ int fps) {
+ AsyncCapturerInvoke(RTC_FROM_HERE,
+ &webrtc::AndroidVideoCapturer::OnOutputFormatRequest,
+ width, height, fps);
+}
+
+JNIEnv* AndroidVideoCapturerJni::jni() { return AttachCurrentThreadIfNeeded(); }
+
+JOW(void,
+ VideoCapturer_00024NativeObserver_nativeOnByteBufferFrameCaptured)
+ (JNIEnv* jni, jclass, jlong j_capturer, jbyteArray j_frame, jint length,
+ jint width, jint height, jint rotation, jlong timestamp) {
+ jboolean is_copy = true;
+ jbyte* bytes = jni->GetByteArrayElements(j_frame, &is_copy);
+ reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)
+ ->OnMemoryBufferFrame(bytes, length, width, height, rotation, timestamp);
+ jni->ReleaseByteArrayElements(j_frame, bytes, JNI_ABORT);
+}
+
+JOW(void, VideoCapturer_00024NativeObserver_nativeOnTextureFrameCaptured)
+ (JNIEnv* jni, jclass, jlong j_capturer, jint j_width, jint j_height,
+ jint j_oes_texture_id, jfloatArray j_transform_matrix,
+ jint j_rotation, jlong j_timestamp) {
+ reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)
+ ->OnTextureFrame(j_width, j_height, j_rotation, j_timestamp,
+ NativeHandleImpl(jni, j_oes_texture_id,
+ j_transform_matrix));
+}
+
+JOW(void, VideoCapturer_00024NativeObserver_nativeCapturerStarted)
+ (JNIEnv* jni, jclass, jlong j_capturer, jboolean j_success) {
+ LOG(LS_INFO) << "NativeObserver_nativeCapturerStarted";
+ reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)->OnCapturerStarted(
+ j_success);
+}
+
+JOW(void, VideoCapturer_00024NativeObserver_nativeOnOutputFormatRequest)
+ (JNIEnv* jni, jclass, jlong j_capturer, jint j_width, jint j_height,
+ jint j_fps) {
+ LOG(LS_INFO) << "NativeObserver_nativeOnOutputFormatRequest";
+ reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)->OnOutputFormatRequest(
+ j_width, j_height, j_fps);
+}
+
+} // namespace webrtc_jni
diff --git a/webrtc/api/android/jni/androidvideocapturer_jni.h b/webrtc/api/android/jni/androidvideocapturer_jni.h
new file mode 100644
index 0000000..3d8db6e
--- /dev/null
+++ b/webrtc/api/android/jni/androidvideocapturer_jni.h
@@ -0,0 +1,101 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_API_JAVA_JNI_ANDROIDVIDEOCAPTURER_JNI_H_
+#define WEBRTC_API_JAVA_JNI_ANDROIDVIDEOCAPTURER_JNI_H_
+
+#include <memory>
+#include <string>
+
+#include "webrtc/api/androidvideocapturer.h"
+#include "webrtc/api/android/jni/jni_helpers.h"
+#include "webrtc/base/asyncinvoker.h"
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/thread_checker.h"
+#include "webrtc/common_video/include/i420_buffer_pool.h"
+
+namespace webrtc_jni {
+
+struct NativeHandleImpl;
+class SurfaceTextureHelper;
+
+// AndroidVideoCapturerJni implements AndroidVideoCapturerDelegate.
+// The purpose of the delegate is to hide the JNI specifics from the C++ only
+// AndroidVideoCapturer.
+class AndroidVideoCapturerJni : public webrtc::AndroidVideoCapturerDelegate {
+ public:
+ static int SetAndroidObjects(JNIEnv* jni, jobject appliction_context);
+
+ AndroidVideoCapturerJni(JNIEnv* jni,
+ jobject j_video_capturer,
+ jobject j_egl_context);
+
+ void Start(int width, int height, int framerate,
+ webrtc::AndroidVideoCapturer* capturer) override;
+ void Stop() override;
+
+ std::vector<cricket::VideoFormat> GetSupportedFormats() override;
+
+ // Called from VideoCapturer::NativeObserver on a Java thread.
+ void OnCapturerStarted(bool success);
+ void OnMemoryBufferFrame(void* video_frame, int length, int width,
+ int height, int rotation, int64_t timestamp_ns);
+ void OnTextureFrame(int width, int height, int rotation, int64_t timestamp_ns,
+ const NativeHandleImpl& handle);
+ void OnOutputFormatRequest(int width, int height, int fps);
+
+ protected:
+ ~AndroidVideoCapturerJni();
+
+ private:
+ JNIEnv* jni();
+
+ // To avoid deducing Args from the 3rd parameter of AsyncCapturerInvoke.
+ template <typename T>
+ struct Identity {
+ typedef T type;
+ };
+
+ // Helper function to make safe asynchronous calls to |capturer_|. The calls
+ // are not guaranteed to be delivered.
+ template <typename... Args>
+ void AsyncCapturerInvoke(
+ const rtc::Location& posted_from,
+ void (webrtc::AndroidVideoCapturer::*method)(Args...),
+ typename Identity<Args>::type... args);
+
+ const ScopedGlobalRef<jobject> j_video_capturer_;
+ const ScopedGlobalRef<jclass> j_video_capturer_class_;
+ const ScopedGlobalRef<jclass> j_observer_class_;
+
+ // Used on the Java thread running the camera.
+ webrtc::I420BufferPool pre_scale_pool_;
+ webrtc::I420BufferPool post_scale_pool_;
+ rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_;
+ rtc::ThreadChecker thread_checker_;
+
+ // |capturer| is a guaranteed to be a valid pointer between a call to
+ // AndroidVideoCapturerDelegate::Start
+ // until AndroidVideoCapturerDelegate::Stop.
+ rtc::CriticalSection capturer_lock_;
+ webrtc::AndroidVideoCapturer* capturer_ GUARDED_BY(capturer_lock_);
+ // |invoker_| is used to communicate with |capturer_| on the thread Start() is
+ // called on.
+ std::unique_ptr<rtc::GuardedAsyncInvoker> invoker_ GUARDED_BY(capturer_lock_);
+
+ static jobject application_context_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(AndroidVideoCapturerJni);
+};
+
+} // namespace webrtc_jni
+
+#endif // WEBRTC_API_JAVA_JNI_ANDROIDVIDEOCAPTURER_JNI_H_
diff --git a/webrtc/api/android/jni/classreferenceholder.cc b/webrtc/api/android/jni/classreferenceholder.cc
index bfc03a3..be5cde2 100644
--- a/webrtc/api/android/jni/classreferenceholder.cc
+++ b/webrtc/api/android/jni/classreferenceholder.cc
@@ -94,6 +94,7 @@
LoadClass(jni, "org/webrtc/StatsReport$Value");
LoadClass(jni, "org/webrtc/SurfaceTextureHelper");
LoadClass(jni, "org/webrtc/VideoCapturer");
+ LoadClass(jni, "org/webrtc/VideoCapturer$NativeObserver");
LoadClass(jni, "org/webrtc/VideoRenderer$I420Frame");
LoadClass(jni, "org/webrtc/VideoTrack");
}
diff --git a/webrtc/api/android/jni/peerconnection_jni.cc b/webrtc/api/android/jni/peerconnection_jni.cc
index 4fd04a7..5095b15 100644
--- a/webrtc/api/android/jni/peerconnection_jni.cc
+++ b/webrtc/api/android/jni/peerconnection_jni.cc
@@ -43,10 +43,12 @@
#include <memory>
#include <utility>
+#include "webrtc/api/androidvideocapturer.h"
#include "webrtc/api/androidvideotracksource.h"
#include "webrtc/api/android/jni/androidmediadecoder_jni.h"
#include "webrtc/api/android/jni/androidmediaencoder_jni.h"
#include "webrtc/api/android/jni/androidnetworkmonitor_jni.h"
+#include "webrtc/api/android/jni/androidvideocapturer_jni.h"
#include "webrtc/api/android/jni/classreferenceholder.h"
#include "webrtc/api/android/jni/jni_helpers.h"
#include "webrtc/api/android/jni/native_handle_impl.h"
@@ -1001,6 +1003,9 @@
RTC_DCHECK(j_application_context == nullptr);
j_application_context = NewGlobalRef(jni, context);
+ if (initialize_video) {
+ failure |= AndroidVideoCapturerJni::SetAndroidObjects(jni, context);
+ }
if (initialize_audio)
failure |= webrtc::VoiceEngine::SetAndroidObjects(GetJVM(), context);
factory_static_initialized = true;
@@ -1257,7 +1262,27 @@
return (jlong)stream.release();
}
-JOW(jlong, PeerConnectionFactory_nativeCreateVideoSource)
+JOW(jlong, PeerConnectionFactory_nativeCreateVideoSource)(
+ JNIEnv* jni, jclass, jlong native_factory, jobject j_egl_context,
+ jobject j_video_capturer, jobject j_constraints) {
+ // Create a cricket::VideoCapturer from |j_video_capturer|.
+ rtc::scoped_refptr<webrtc::AndroidVideoCapturerDelegate> delegate =
+ new rtc::RefCountedObject<AndroidVideoCapturerJni>(
+ jni, j_video_capturer, j_egl_context);
+ std::unique_ptr<cricket::VideoCapturer> capturer(
+ new webrtc::AndroidVideoCapturer(delegate));
+ // Create a webrtc::VideoTrackSourceInterface from the cricket::VideoCapturer,
+ // native factory and constraints.
+ std::unique_ptr<ConstraintsWrapper> constraints(
+ new ConstraintsWrapper(jni, j_constraints));
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ factoryFromJava(native_factory));
+ rtc::scoped_refptr<VideoTrackSourceInterface> source(
+ factory->CreateVideoSource(capturer.release(), constraints.get()));
+ return (jlong)source.release();
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreateVideoSource2)
(JNIEnv* jni, jclass, jlong native_factory, jobject j_egl_context) {
OwnedFactoryAndThreads* factory =
reinterpret_cast<OwnedFactoryAndThreads*>(native_factory);
diff --git a/webrtc/api/androidvideocapturer.cc b/webrtc/api/androidvideocapturer.cc
new file mode 100644
index 0000000..0bdf214
--- /dev/null
+++ b/webrtc/api/androidvideocapturer.cc
@@ -0,0 +1,93 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/api/androidvideocapturer.h"
+
+#include <memory>
+
+#include "webrtc/api/android/jni/native_handle_impl.h"
+#include "webrtc/base/common.h"
+#include "webrtc/base/timeutils.h"
+#include "webrtc/media/engine/webrtcvideoframe.h"
+
+namespace webrtc {
+
+AndroidVideoCapturer::AndroidVideoCapturer(
+ const rtc::scoped_refptr<AndroidVideoCapturerDelegate>& delegate)
+ : running_(false),
+ delegate_(delegate) {
+ thread_checker_.DetachFromThread();
+ SetSupportedFormats(delegate_->GetSupportedFormats());
+}
+
+AndroidVideoCapturer::~AndroidVideoCapturer() {
+ RTC_CHECK(!running_);
+}
+
+cricket::CaptureState AndroidVideoCapturer::Start(
+ const cricket::VideoFormat& capture_format) {
+ RTC_CHECK(thread_checker_.CalledOnValidThread());
+ RTC_CHECK(!running_);
+ const int fps = cricket::VideoFormat::IntervalToFps(capture_format.interval);
+ LOG(LS_INFO) << " AndroidVideoCapturer::Start " << capture_format.width << "x"
+ << capture_format.height << "@" << fps;
+
+ running_ = true;
+ delegate_->Start(capture_format.width, capture_format.height, fps, this);
+ SetCaptureFormat(&capture_format);
+ return cricket::CS_STARTING;
+}
+
+void AndroidVideoCapturer::Stop() {
+ LOG(LS_INFO) << " AndroidVideoCapturer::Stop ";
+ RTC_CHECK(thread_checker_.CalledOnValidThread());
+ RTC_CHECK(running_);
+ running_ = false;
+ SetCaptureFormat(NULL);
+
+ delegate_->Stop();
+ SetCaptureState(cricket::CS_STOPPED);
+}
+
+bool AndroidVideoCapturer::IsRunning() {
+ RTC_CHECK(thread_checker_.CalledOnValidThread());
+ return running_;
+}
+
+bool AndroidVideoCapturer::GetPreferredFourccs(std::vector<uint32_t>* fourccs) {
+ RTC_CHECK(thread_checker_.CalledOnValidThread());
+ fourccs->push_back(cricket::FOURCC_YV12);
+ return true;
+}
+
+void AndroidVideoCapturer::OnCapturerStarted(bool success) {
+ RTC_CHECK(thread_checker_.CalledOnValidThread());
+ const cricket::CaptureState new_state =
+ success ? cricket::CS_RUNNING : cricket::CS_FAILED;
+ SetCaptureState(new_state);
+}
+
+void AndroidVideoCapturer::OnOutputFormatRequest(
+ int width, int height, int fps) {
+ RTC_CHECK(thread_checker_.CalledOnValidThread());
+ cricket::VideoFormat format(width, height,
+ cricket::VideoFormat::FpsToInterval(fps), 0);
+ video_adapter()->OnOutputFormatRequest(format);
+}
+
+bool AndroidVideoCapturer::GetBestCaptureFormat(
+ const cricket::VideoFormat& desired,
+ cricket::VideoFormat* best_format) {
+ // Delegate this choice to VideoCapturer.startCapture().
+ *best_format = desired;
+ return true;
+}
+
+} // namespace webrtc
diff --git a/webrtc/api/androidvideocapturer.h b/webrtc/api/androidvideocapturer.h
new file mode 100644
index 0000000..24294ef
--- /dev/null
+++ b/webrtc/api/androidvideocapturer.h
@@ -0,0 +1,84 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_API_ANDROIDVIDEOCAPTURER_H_
+#define WEBRTC_API_ANDROIDVIDEOCAPTURER_H_
+
+#include <string>
+#include <vector>
+
+#include "webrtc/base/thread_checker.h"
+#include "webrtc/common_video/include/video_frame_buffer.h"
+#include "webrtc/media/base/videocapturer.h"
+
+namespace webrtc {
+
+class AndroidVideoCapturer;
+
+class AndroidVideoCapturerDelegate : public rtc::RefCountInterface {
+ public:
+ virtual ~AndroidVideoCapturerDelegate() {}
+ // Start capturing. The implementation of the delegate must call
+ // AndroidVideoCapturer::OnCapturerStarted with the result of this request.
+ virtual void Start(int width, int height, int framerate,
+ AndroidVideoCapturer* capturer) = 0;
+
+ // Stops capturing.
+ // The delegate may not call into AndroidVideoCapturer after this call.
+ virtual void Stop() = 0;
+
+ virtual std::vector<cricket::VideoFormat> GetSupportedFormats() = 0;
+};
+
+// Android implementation of cricket::VideoCapturer for use with WebRtc
+// PeerConnection.
+class AndroidVideoCapturer : public cricket::VideoCapturer {
+ public:
+ explicit AndroidVideoCapturer(
+ const rtc::scoped_refptr<AndroidVideoCapturerDelegate>& delegate);
+ virtual ~AndroidVideoCapturer();
+
+ // Called from JNI when the capturer has been started.
+ void OnCapturerStarted(bool success);
+
+ // Called from JNI to request a new video format.
+ void OnOutputFormatRequest(int width, int height, int fps);
+
+ AndroidVideoCapturerDelegate* delegate() { return delegate_.get(); }
+
+ // cricket::VideoCapturer implementation.
+ bool GetBestCaptureFormat(const cricket::VideoFormat& desired,
+ cricket::VideoFormat* best_format) override;
+
+ // Expose these protected methods as public, to be used by the
+ // AndroidVideoCapturerJni.
+ using VideoCapturer::AdaptFrame;
+ using VideoCapturer::OnFrame;
+
+ private:
+ // cricket::VideoCapturer implementation.
+ // Video frames will be delivered using
+ // cricket::VideoCapturer::SignalFrameCaptured on the thread that calls Start.
+ cricket::CaptureState Start(
+ const cricket::VideoFormat& capture_format) override;
+ void Stop() override;
+ bool IsRunning() override;
+ bool IsScreencast() const override { return false; }
+ bool GetPreferredFourccs(std::vector<uint32_t>* fourccs) override;
+
+ bool running_;
+ rtc::scoped_refptr<AndroidVideoCapturerDelegate> delegate_;
+
+ rtc::ThreadChecker thread_checker_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_API_ANDROIDVIDEOCAPTURER_H_
diff --git a/webrtc/api/api.gyp b/webrtc/api/api.gyp
index c781ec1..f9f846b 100644
--- a/webrtc/api/api.gyp
+++ b/webrtc/api/api.gyp
@@ -43,6 +43,8 @@
'android/jni/androidmetrics_jni.cc',
'android/jni/androidnetworkmonitor_jni.cc',
'android/jni/androidnetworkmonitor_jni.h',
+ 'android/jni/androidvideocapturer_jni.cc',
+ 'android/jni/androidvideocapturer_jni.h',
'android/jni/androidvideotracksource_jni.cc',
'android/jni/classreferenceholder.cc',
'android/jni/classreferenceholder.h',
@@ -53,6 +55,8 @@
'android/jni/peerconnection_jni.cc',
'android/jni/surfacetexturehelper_jni.cc',
'android/jni/surfacetexturehelper_jni.h',
+ 'androidvideocapturer.cc',
+ 'androidvideocapturer.h',
'androidvideotracksource.cc',
'androidvideotracksource.h',
],