Android VideoCapturer: Send ByteBuffer instead of byte[] The purpose with this CL is to replace GetByteArrayElements() and ReleaseByteArrayElements() with GetDirectBufferAddress(). R=hbos@webrtc.org Review URL: https://codereview.webrtc.org/1372813002 . Cr-Commit-Position: refs/heads/master@{#10091}
diff --git a/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTest.java b/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTest.java index 0fe827d..73ed381 100644 --- a/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTest.java +++ b/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTest.java
@@ -35,6 +35,7 @@ import org.webrtc.CameraEnumerationAndroid.CaptureFormat; import org.webrtc.VideoRenderer.I420Frame; +import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.HashSet; import java.util.List; @@ -104,11 +105,11 @@ } @Override - public void OnFrameCaptured(byte[] frame, int length, int width, int height, + public void OnFrameCaptured(ByteBuffer frame, int width, int height, int rotation, long timeStamp) { synchronized (frameLock) { ++framesCaptured; - frameSize = length; + frameSize = frame.capacity(); timestamps.add(timeStamp); frameLock.notify(); }
diff --git a/talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java b/talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java index ee01eed..72b62c3 100644 --- a/talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java +++ b/talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java
@@ -569,12 +569,11 @@ } rotation = (info.orientation + rotation) % 360; // Mark the frame owning |data| as used. - // Note that since data is directBuffer, - // data.length >= videoBuffers.frameSize. - if (videoBuffers.reserveByteBuffer(data, captureTimeNs)) { + final ByteBuffer buffer = videoBuffers.reserveByteBuffer(data, captureTimeNs); + if (buffer != null) { cameraFramesCount++; - frameObserver.OnFrameCaptured(data, videoBuffers.frameSize, captureFormat.width, - captureFormat.height, rotation, captureTimeNs); + frameObserver.OnFrameCaptured(buffer, captureFormat.width, captureFormat.height, + rotation, captureTimeNs); } else { Logging.w(TAG, "reserveByteBuffer failed - dropping frame."); } @@ -656,7 +655,8 @@ : " Pending buffers: " + pendingFramesTimeStamps() + ".")); } - public boolean reserveByteBuffer(byte[] data, long timeStamp) { + // Returns the reserved byte buffer, or null on failure. + public ByteBuffer reserveByteBuffer(byte[] data, long timeStamp) { checkIsOnValidThread(); final ByteBuffer buffer = queuedBuffers.remove(data); if (buffer == null) { @@ -664,21 +664,21 @@ // capture format in |startPreviewOnCameraThread|. Drop these old frames. Logging.w(TAG, "Received callback buffer from previous configuration with length: " + (data == null ? "null" : data.length)); - return false; + return null; } if (buffer.capacity() != frameSize) { throw new IllegalStateException("Callback buffer has unexpected frame size"); } if (pendingBuffers.containsKey(timeStamp)) { Logging.e(TAG, "Timestamp already present in pending buffers - they need to be unique"); - return false; + return null; } pendingBuffers.put(timeStamp, buffer); if (queuedBuffers.isEmpty()) { Logging.v(TAG, "Camera is running out of capture buffers." + " Pending buffers: " + pendingFramesTimeStamps()); } - return true; + return buffer; } public void returnBuffer(long timeStamp) { @@ -722,8 +722,8 @@ // Delivers a captured frame. Called on a Java thread owned by // VideoCapturerAndroid. - abstract void OnFrameCaptured(byte[] data, int length, int width, int height, - int rotation, long timeStamp); + abstract void OnFrameCaptured(ByteBuffer buffer, int width, int height, int rotation, + long timeStamp); // Requests an output format from the video capturer. Captured frames // by the camera will be scaled/or dropped by the video capturer. @@ -746,9 +746,9 @@ } @Override - public void OnFrameCaptured(byte[] data, int length, int width, int height, - int rotation, long timeStamp) { - nativeOnFrameCaptured(nativeCapturer, data, length, width, height, rotation, timeStamp); + public void OnFrameCaptured(ByteBuffer buffer, int width, int height, int rotation, + long timeStamp) { + nativeOnFrameCaptured(nativeCapturer, buffer, width, height, rotation, timeStamp); } @Override @@ -759,7 +759,7 @@ private native void nativeCapturerStarted(long nativeCapturer, boolean success); private native void nativeOnFrameCaptured(long nativeCapturer, - byte[] data, int length, int width, int height, int rotation, long timeStamp); + ByteBuffer buffer, int width, int height, int rotation, long timeStamp); private native void nativeOnOutputFormatRequest(long nativeCapturer, int width, int height, int fps); }
diff --git a/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc b/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc index 74a9372..eae87bd 100644 --- a/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc +++ b/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc
@@ -176,13 +176,13 @@ success); } -void AndroidVideoCapturerJni::OnIncomingFrame(void* video_frame, +void AndroidVideoCapturerJni::OnIncomingFrame(const uint8_t* video_frame, int length, int width, int height, int rotation, int64 time_stamp) { - const uint8_t* y_plane = static_cast<uint8_t*>(video_frame); + const uint8_t* y_plane = video_frame; // Android guarantees that the stride is a multiple of 16. // http://developer.android.com/reference/android/hardware/Camera.Parameters.html#setPreviewFormat%28int%29 int y_stride; @@ -215,20 +215,14 @@ JNIEnv* AndroidVideoCapturerJni::jni() { return AttachCurrentThreadIfNeeded(); } JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeOnFrameCaptured) - (JNIEnv* jni, jclass, jlong j_capturer, jbyteArray j_frame, jint length, + (JNIEnv* jni, jclass, jlong j_capturer, jobject j_byte_buffer, jint width, jint height, jint rotation, jlong ts) { - jboolean is_copy = true; - jbyte* bytes = jni->GetByteArrayElements(j_frame, &is_copy); - // If this is a copy of the original frame, it means that the memory - // is not direct memory and thus VideoCapturerAndroid does not guarantee - // that the memory is valid when we have released |j_frame|. - // TODO(magjed): Move ReleaseByteArrayElements() into ReturnBuffer() and - // remove this check. - RTC_CHECK(!is_copy) - << "NativeObserver_nativeOnFrameCaptured: frame is a copy"; + const uint8_t* bytes = + static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_byte_buffer)); + const int length = jni->GetDirectBufferCapacity(j_byte_buffer); + RTC_CHECK(bytes != nullptr && length != -1) << "ByteBuffer is not direct"; reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer) ->OnIncomingFrame(bytes, length, width, height, rotation, ts); - jni->ReleaseByteArrayElements(j_frame, bytes, JNI_ABORT); } JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeCapturerStarted)
diff --git a/talk/app/webrtc/java/jni/androidvideocapturer_jni.h b/talk/app/webrtc/java/jni/androidvideocapturer_jni.h index c270439..0c2b5fd 100644 --- a/talk/app/webrtc/java/jni/androidvideocapturer_jni.h +++ b/talk/app/webrtc/java/jni/androidvideocapturer_jni.h
@@ -61,7 +61,7 @@ // Called from VideoCapturerAndroid::NativeObserver on a Java thread. void OnCapturerStarted(bool success); - void OnIncomingFrame(void* video_frame, + void OnIncomingFrame(const uint8_t* video_frame, int length, int width, int height,