Implement NativeToI420Buffer in C++, calling java SurfaceTextureHelper, new method .textureToYUV, to
do the conversion using an opengl fragment shader.
BUG=webrtc:4993
Review URL: https://codereview.webrtc.org/1460703002
Cr-Commit-Position: refs/heads/master@{#10972}
diff --git a/talk/app/webrtc/androidtests/src/org/webrtc/SurfaceTextureHelperTest.java b/talk/app/webrtc/androidtests/src/org/webrtc/SurfaceTextureHelperTest.java
index 60832ad..1a6731b 100644
--- a/talk/app/webrtc/androidtests/src/org/webrtc/SurfaceTextureHelperTest.java
+++ b/talk/app/webrtc/androidtests/src/org/webrtc/SurfaceTextureHelperTest.java
@@ -97,6 +97,14 @@
}
}
+ /** Assert that two integers are close, with difference at most
+ * {@code threshold}. */
+ public static void assertClose(int threshold, int expected, int actual) {
+ if (Math.abs(expected - actual) <= threshold)
+ return;
+ failNotEquals("Not close enough, threshold " + threshold, expected, actual);
+ }
+
/**
* Test normal use by receiving three uniform texture frames. Texture frames are returned as early
* as possible. The texture pixel values are inspected by drawing the texture frame to a pixel
@@ -351,4 +359,81 @@
surfaceTextureHelper.returnTextureFrame();
}
+
+ @MediumTest
+ public static void testTexturetoYUV() throws InterruptedException {
+ final int width = 16;
+ final int height = 16;
+
+ final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PLAIN);
+
+ // Create SurfaceTextureHelper and listener.
+ final SurfaceTextureHelper surfaceTextureHelper =
+ SurfaceTextureHelper.create(eglBase.getEglBaseContext());
+ final MockTextureListener listener = new MockTextureListener();
+ surfaceTextureHelper.setListener(listener);
+ surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height);
+
+ // Create resources for stubbing an OES texture producer. |eglBase| has the SurfaceTexture in
+ // |surfaceTextureHelper| as the target EGLSurface.
+
+ eglBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
+ assertEquals(eglBase.surfaceWidth(), width);
+ assertEquals(eglBase.surfaceHeight(), height);
+
+ final int red[] = new int[] {79, 144, 185};
+ final int green[] = new int[] {66, 210, 162};
+ final int blue[] = new int[] {161, 117, 158};
+
+ final int ref_y[] = new int[] {81, 180, 168};
+ final int ref_u[] = new int[] {173, 93, 122};
+ final int ref_v[] = new int[] {127, 103, 140};
+
+ // Draw three frames.
+ for (int i = 0; i < 3; ++i) {
+ // Draw a constant color frame onto the SurfaceTexture.
+ eglBase.makeCurrent();
+ GLES20.glClearColor(red[i] / 255.0f, green[i] / 255.0f, blue[i] / 255.0f, 1.0f);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ // swapBuffers() will ultimately trigger onTextureFrameAvailable().
+ eglBase.swapBuffers();
+
+ // Wait for an OES texture to arrive.
+ listener.waitForNewFrame();
+
+ // Memory layout: Lines are 16 bytes. First 16 lines are
+ // the Y data. These are followed by 8 lines with 8 bytes of U
+ // data on the left and 8 bytes of V data on the right.
+ //
+ // Offset
+ // 0 YYYYYYYY YYYYYYYY
+ // 16 YYYYYYYY YYYYYYYY
+ // ...
+ // 240 YYYYYYYY YYYYYYYY
+ // 256 UUUUUUUU VVVVVVVV
+ // 272 UUUUUUUU VVVVVVVV
+ // ...
+ // 368 UUUUUUUU VVVVVVVV
+ // 384 buffer end
+ ByteBuffer buffer = ByteBuffer.allocateDirect(width * height * 3 / 2);
+ surfaceTextureHelper.textureToYUV(buffer, width, height, width,
+ listener.oesTextureId, listener.transformMatrix);
+
+ surfaceTextureHelper.returnTextureFrame();
+
+ // Allow off-by-one differences due to different rounding.
+ while (buffer.position() < width*height) {
+ assertClose(1, buffer.get() & 0xff, ref_y[i]);
+ }
+ while (buffer.hasRemaining()) {
+ if (buffer.position() % width < width/2)
+ assertClose(1, buffer.get() & 0xff, ref_u[i]);
+ else
+ assertClose(1, buffer.get() & 0xff, ref_v[i]);
+ }
+ }
+
+ surfaceTextureHelper.disconnect();
+ eglBase.release();
+ }
}
diff --git a/talk/app/webrtc/java/android/org/webrtc/EglBase.java b/talk/app/webrtc/java/android/org/webrtc/EglBase.java
index e44443b..c45aa29 100644
--- a/talk/app/webrtc/java/android/org/webrtc/EglBase.java
+++ b/talk/app/webrtc/java/android/org/webrtc/EglBase.java
@@ -87,6 +87,15 @@
EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT,
EGL10.EGL_NONE
};
+ public static final int[] CONFIG_PIXEL_RGBA_BUFFER = {
+ EGL10.EGL_RED_SIZE, 8,
+ EGL10.EGL_GREEN_SIZE, 8,
+ EGL10.EGL_BLUE_SIZE, 8,
+ EGL10.EGL_ALPHA_SIZE, 8,
+ EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+ EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT,
+ EGL10.EGL_NONE
+ };
public static final int[] CONFIG_RECORDABLE = {
EGL10.EGL_RED_SIZE, 8,
EGL10.EGL_GREEN_SIZE, 8,
diff --git a/talk/app/webrtc/java/android/org/webrtc/SurfaceTextureHelper.java b/talk/app/webrtc/java/android/org/webrtc/SurfaceTextureHelper.java
index c730a41..39c3e6f 100644
--- a/talk/app/webrtc/java/android/org/webrtc/SurfaceTextureHelper.java
+++ b/talk/app/webrtc/java/android/org/webrtc/SurfaceTextureHelper.java
@@ -35,6 +35,8 @@
import android.os.HandlerThread;
import android.os.SystemClock;
+import java.nio.ByteBuffer;
+import java.nio.FloatBuffer;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
@@ -93,11 +95,225 @@
});
}
+ // State for YUV conversion, instantiated on demand.
+ static private class YuvConverter {
+ private final EglBase eglBase;
+ private final GlShader shader;
+ private boolean released = false;
+
+ // Vertex coordinates in Normalized Device Coordinates, i.e.
+ // (-1, -1) is bottom-left and (1, 1) is top-right.
+ private static final FloatBuffer DEVICE_RECTANGLE =
+ GlUtil.createFloatBuffer(new float[] {
+ -1.0f, -1.0f, // Bottom left.
+ 1.0f, -1.0f, // Bottom right.
+ -1.0f, 1.0f, // Top left.
+ 1.0f, 1.0f, // Top right.
+ });
+
+ // Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right.
+ private static final FloatBuffer TEXTURE_RECTANGLE =
+ GlUtil.createFloatBuffer(new float[] {
+ 0.0f, 0.0f, // Bottom left.
+ 1.0f, 0.0f, // Bottom right.
+ 0.0f, 1.0f, // Top left.
+ 1.0f, 1.0f // Top right.
+ });
+
+ private static final String VERTEX_SHADER =
+ "varying vec2 interp_tc;\n"
+ + "attribute vec4 in_pos;\n"
+ + "attribute vec4 in_tc;\n"
+ + "\n"
+ + "uniform mat4 texMatrix;\n"
+ + "\n"
+ + "void main() {\n"
+ + " gl_Position = in_pos;\n"
+ + " interp_tc = (texMatrix * in_tc).xy;\n"
+ + "}\n";
+
+ private static final String FRAGMENT_SHADER =
+ "#extension GL_OES_EGL_image_external : require\n"
+ + "precision mediump float;\n"
+ + "varying vec2 interp_tc;\n"
+ + "\n"
+ + "uniform samplerExternalOES oesTex;\n"
+ // Difference in texture coordinate corresponding to one
+ // sub-pixel in the x direction.
+ + "uniform vec2 xUnit;\n"
+ // Color conversion coefficients, including constant term
+ + "uniform vec4 coeffs;\n"
+ + "\n"
+ + "void main() {\n"
+ // Since the alpha read from the texture is always 1, this could
+ // be written as a mat4 x vec4 multiply. However, that seems to
+ // give a worse framerate, possibly because the additional
+ // multiplies by 1.0 consume resources. TODO(nisse): Could also
+ // try to do it as a vec3 x mat3x4, followed by an add in of a
+ // constant vector.
+ + " gl_FragColor.r = coeffs.a + dot(coeffs.rgb,\n"
+ + " texture2D(oesTex, interp_tc - 1.5 * xUnit).rgb);\n"
+ + " gl_FragColor.g = coeffs.a + dot(coeffs.rgb,\n"
+ + " texture2D(oesTex, interp_tc - 0.5 * xUnit).rgb);\n"
+ + " gl_FragColor.b = coeffs.a + dot(coeffs.rgb,\n"
+ + " texture2D(oesTex, interp_tc + 0.5 * xUnit).rgb);\n"
+ + " gl_FragColor.a = coeffs.a + dot(coeffs.rgb,\n"
+ + " texture2D(oesTex, interp_tc + 1.5 * xUnit).rgb);\n"
+ + "}\n";
+
+ private int texMatrixLoc;
+ private int xUnitLoc;
+ private int coeffsLoc;;
+
+ YuvConverter (EglBase.Context sharedContext) {
+ eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_RGBA_BUFFER);
+ eglBase.createDummyPbufferSurface();
+ eglBase.makeCurrent();
+
+ shader = new GlShader(VERTEX_SHADER, FRAGMENT_SHADER);
+ shader.useProgram();
+ texMatrixLoc = shader.getUniformLocation("texMatrix");
+ xUnitLoc = shader.getUniformLocation("xUnit");
+ coeffsLoc = shader.getUniformLocation("coeffs");
+ GLES20.glUniform1i(shader.getUniformLocation("oesTex"), 0);
+ GlUtil.checkNoGLES2Error("Initialize fragment shader uniform values.");
+ // Initialize vertex shader attributes.
+ shader.setVertexAttribArray("in_pos", 2, DEVICE_RECTANGLE);
+ // If the width is not a multiple of 4 pixels, the texture
+ // will be scaled up slightly and clipped at the right border.
+ shader.setVertexAttribArray("in_tc", 2, TEXTURE_RECTANGLE);
+ eglBase.detachCurrent();
+ }
+
+ synchronized void convert(ByteBuffer buf,
+ int width, int height, int stride, int textureId, float [] transformMatrix) {
+ if (released) {
+ throw new IllegalStateException(
+ "YuvConverter.convert called on released object");
+ }
+
+ // We draw into a buffer laid out like
+ //
+ // +---------+
+ // | |
+ // | Y |
+ // | |
+ // | |
+ // +----+----+
+ // | U | V |
+ // | | |
+ // +----+----+
+ //
+ // In memory, we use the same stride for all of Y, U and V. The
+ // U data starts at offset |height| * |stride| from the Y data,
+ // and the V data starts at at offset |stride/2| from the U
+ // data, with rows of U and V data alternating.
+ //
+ // Now, it would have made sense to allocate a pixel buffer with
+ // a single byte per pixel (EGL10.EGL_COLOR_BUFFER_TYPE,
+ // EGL10.EGL_LUMINANCE_BUFFER,), but that seems to be
+ // unsupported by devices. So do the following hack: Allocate an
+ // RGBA buffer, of width |stride|/4. To render each of these
+ // large pixels, sample the texture at 4 different x coordinates
+ // and store the results in the four components.
+ //
+ // Since the V data needs to start on a boundary of such a
+ // larger pixel, it is not sufficient that |stride| is even, it
+ // has to be a multiple of 8 pixels.
+
+ if (stride % 8 != 0) {
+ throw new IllegalArgumentException(
+ "Invalid stride, must be a multiple of 8");
+ }
+ if (stride < width){
+ throw new IllegalArgumentException(
+ "Invalid stride, must >= width");
+ }
+
+ int y_width = (width+3) / 4;
+ int uv_width = (width+7) / 8;
+ int uv_height = (height+1)/2;
+ int total_height = height + uv_height;
+ int size = stride * total_height;
+
+ if (buf.capacity() < size) {
+ throw new IllegalArgumentException("YuvConverter.convert called with too small buffer");
+ }
+ // Produce a frame buffer starting at top-left corner, not
+ // bottom-left.
+ transformMatrix =
+ RendererCommon.multiplyMatrices(transformMatrix,
+ RendererCommon.verticalFlipMatrix());
+
+ // Create new pBuffferSurface with the correct size if needed.
+ if (eglBase.hasSurface()) {
+ if (eglBase.surfaceWidth() != stride/4 ||
+ eglBase.surfaceHeight() != total_height){
+ eglBase.releaseSurface();
+ eglBase.createPbufferSurface(stride/4, total_height);
+ }
+ } else {
+ eglBase.createPbufferSurface(stride/4, total_height);
+ }
+
+ eglBase.makeCurrent();
+
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);
+ GLES20.glUniformMatrix4fv(texMatrixLoc, 1, false, transformMatrix, 0);
+
+ // Draw Y
+ GLES20.glViewport(0, 0, y_width, height);
+ // Matrix * (1;0;0;0) / width. Note that opengl uses column major order.
+ GLES20.glUniform2f(xUnitLoc,
+ transformMatrix[0] / width,
+ transformMatrix[1] / width);
+ // Y'UV444 to RGB888, see
+ // https://en.wikipedia.org/wiki/YUV#Y.27UV444_to_RGB888_conversion.
+ // We use the ITU-R coefficients for U and V */
+ GLES20.glUniform4f(coeffsLoc, 0.299f, 0.587f, 0.114f, 0.0f);
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+
+ // Draw U
+ GLES20.glViewport(0, height, uv_width, uv_height);
+ // Matrix * (1;0;0;0) / (2*width). Note that opengl uses column major order.
+ GLES20.glUniform2f(xUnitLoc,
+ transformMatrix[0] / (2.0f*width),
+ transformMatrix[1] / (2.0f*width));
+ GLES20.glUniform4f(coeffsLoc, -0.169f, -0.331f, 0.499f, 0.5f);
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+
+ // Draw V
+ GLES20.glViewport(stride/8, height, uv_width, uv_height);
+ GLES20.glUniform4f(coeffsLoc, 0.499f, -0.418f, -0.0813f, 0.5f);
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+
+ GLES20.glReadPixels(0, 0, stride/4, total_height, GLES20.GL_RGBA,
+ GLES20.GL_UNSIGNED_BYTE, buf);
+
+ GlUtil.checkNoGLES2Error("YuvConverter.convert");
+
+ // Unbind texture. Reportedly needed on some devices to get
+ // the texture updated from the camera.
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
+ eglBase.detachCurrent();
+ }
+
+ synchronized void release() {
+ released = true;
+ eglBase.makeCurrent();
+ shader.release();
+ eglBase.release();
+ }
+ }
+
private final Handler handler;
private boolean isOwningThread;
private final EglBase eglBase;
private final SurfaceTexture surfaceTexture;
private final int oesTextureId;
+ private YuvConverter yuvConverter;
+
private OnTextureFrameAvailableListener listener;
// The possible states of this class.
private boolean hasPendingTexture = false;
@@ -120,6 +336,18 @@
surfaceTexture = new SurfaceTexture(oesTextureId);
}
+ private YuvConverter getYuvConverter() {
+ // yuvConverter is assigned once
+ if (yuvConverter != null)
+ return yuvConverter;
+
+ synchronized(this) {
+ if (yuvConverter == null)
+ yuvConverter = new YuvConverter(eglBase.getEglBaseContext());
+ return yuvConverter;
+ }
+ }
+
/**
* Start to stream textures to the given |listener|.
* A Listener can only be set once.
@@ -207,6 +435,14 @@
disconnect();
}
+ public void textureToYUV(ByteBuffer buf,
+ int width, int height, int stride, int textureId, float [] transformMatrix) {
+ if (textureId != oesTextureId)
+ throw new IllegalStateException("textureToByteBuffer called with unexpected textureId");
+
+ getYuvConverter().convert(buf, width, height, stride, textureId, transformMatrix);
+ }
+
private void tryDeliverTextureFrame() {
if (handler.getLooper().getThread() != Thread.currentThread()) {
throw new IllegalStateException("Wrong thread.");
@@ -235,6 +471,10 @@
if (isTextureInUse || !isQuitting) {
throw new IllegalStateException("Unexpected release.");
}
+ synchronized (this) {
+ if (yuvConverter != null)
+ yuvConverter.release();
+ }
eglBase.makeCurrent();
GLES20.glDeleteTextures(1, new int[] {oesTextureId}, 0);
surfaceTexture.release();
diff --git a/talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java b/talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java
index 2bfa978..9a3b5ca 100644
--- a/talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java
+++ b/talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java
@@ -240,7 +240,8 @@
final VideoCapturerAndroid capturer = new VideoCapturerAndroid(cameraId, eventsHandler,
sharedEglContext);
- capturer.setNativeCapturer(nativeCreateVideoCapturer(capturer));
+ capturer.setNativeCapturer(
+ nativeCreateVideoCapturer(capturer, capturer.surfaceHelper));
return capturer;
}
@@ -944,5 +945,7 @@
int width, int height, int framerate);
}
- private static native long nativeCreateVideoCapturer(VideoCapturerAndroid videoCapturer);
+ private static native long nativeCreateVideoCapturer(
+ VideoCapturerAndroid videoCapturer,
+ SurfaceTextureHelper surfaceHelper);
}
diff --git a/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc b/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc
index 054719a..9b3053c 100644
--- a/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc
+++ b/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc
@@ -47,9 +47,12 @@
return 0;
}
-AndroidVideoCapturerJni::AndroidVideoCapturerJni(JNIEnv* jni,
- jobject j_video_capturer)
- : j_capturer_global_(jni, j_video_capturer),
+AndroidVideoCapturerJni::AndroidVideoCapturerJni(
+ JNIEnv* jni,
+ jobject j_video_capturer,
+ jobject j_surface_texture_helper)
+ : j_video_capturer_(jni, j_video_capturer),
+ j_surface_texture_helper_(jni, j_surface_texture_helper),
j_video_capturer_class_(
jni, FindClass(jni, "org/webrtc/VideoCapturerAndroid")),
j_observer_class_(
@@ -64,7 +67,7 @@
AndroidVideoCapturerJni::~AndroidVideoCapturerJni() {
LOG(LS_INFO) << "AndroidVideoCapturerJni dtor";
jni()->CallVoidMethod(
- *j_capturer_global_,
+ *j_video_capturer_,
GetMethodID(jni(), *j_video_capturer_class_, "release", "()V"));
CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.release()";
}
@@ -90,7 +93,7 @@
jni(), *j_video_capturer_class_, "startCapture",
"(IIILandroid/content/Context;"
"Lorg/webrtc/VideoCapturerAndroid$CapturerObserver;)V");
- jni()->CallVoidMethod(*j_capturer_global_,
+ jni()->CallVoidMethod(*j_video_capturer_,
m, width, height,
framerate,
application_context_,
@@ -109,7 +112,7 @@
}
jmethodID m = GetMethodID(jni(), *j_video_capturer_class_,
"stopCapture", "()V");
- jni()->CallVoidMethod(*j_capturer_global_, m);
+ jni()->CallVoidMethod(*j_video_capturer_, m);
CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.stopCapture";
LOG(LS_INFO) << "AndroidVideoCapturerJni stop done";
}
@@ -130,7 +133,7 @@
void AndroidVideoCapturerJni::ReturnBuffer(int64_t time_stamp) {
jmethodID m = GetMethodID(jni(), *j_video_capturer_class_,
"returnBuffer", "(J)V");
- jni()->CallVoidMethod(*j_capturer_global_, m, time_stamp);
+ jni()->CallVoidMethod(*j_video_capturer_, m, time_stamp);
CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.returnBuffer";
}
@@ -139,7 +142,7 @@
GetMethodID(jni(), *j_video_capturer_class_,
"getSupportedFormatsAsJson", "()Ljava/lang/String;");
jstring j_json_caps =
- (jstring) jni()->CallObjectMethod(*j_capturer_global_, m);
+ (jstring) jni()->CallObjectMethod(*j_video_capturer_, m);
CHECK_EXCEPTION(jni()) << "error during supportedFormatsAsJson";
return JavaToStdString(jni(), j_json_caps);
}
@@ -186,7 +189,7 @@
const NativeHandleImpl& handle) {
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer(
new rtc::RefCountedObject<AndroidTextureBuffer>(
- width, height, handle,
+ width, height, handle, *j_surface_texture_helper_,
rtc::Bind(&AndroidVideoCapturerJni::ReturnBuffer, this,
timestamp_ns)));
AsyncCapturerInvoke("OnIncomingFrame",
@@ -248,9 +251,11 @@
}
JOW(jlong, VideoCapturerAndroid_nativeCreateVideoCapturer)
- (JNIEnv* jni, jclass, jobject j_video_capturer) {
+ (JNIEnv* jni, jclass,
+ jobject j_video_capturer, jobject j_surface_texture_helper) {
rtc::scoped_refptr<webrtc::AndroidVideoCapturerDelegate> delegate =
- new rtc::RefCountedObject<AndroidVideoCapturerJni>(jni, j_video_capturer);
+ new rtc::RefCountedObject<AndroidVideoCapturerJni>(
+ jni, j_video_capturer, j_surface_texture_helper);
rtc::scoped_ptr<cricket::VideoCapturer> capturer(
new webrtc::AndroidVideoCapturer(delegate));
// Caller takes ownership of the cricket::VideoCapturer* pointer.
diff --git a/talk/app/webrtc/java/jni/androidvideocapturer_jni.h b/talk/app/webrtc/java/jni/androidvideocapturer_jni.h
index 96def5e..4c0b48c 100644
--- a/talk/app/webrtc/java/jni/androidvideocapturer_jni.h
+++ b/talk/app/webrtc/java/jni/androidvideocapturer_jni.h
@@ -48,7 +48,9 @@
public:
static int SetAndroidObjects(JNIEnv* jni, jobject appliction_context);
- AndroidVideoCapturerJni(JNIEnv* jni, jobject j_video_capturer);
+ AndroidVideoCapturerJni(JNIEnv* jni,
+ jobject j_video_capturer,
+ jobject j_surface_texture_helper);
void Start(int width, int height, int framerate,
webrtc::AndroidVideoCapturer* capturer) override;
@@ -85,7 +87,8 @@
void (webrtc::AndroidVideoCapturer::*method)(Args...),
typename Identity<Args>::type... args);
- const ScopedGlobalRef<jobject> j_capturer_global_;
+ const ScopedGlobalRef<jobject> j_video_capturer_;
+ const ScopedGlobalRef<jobject> j_surface_texture_helper_;
const ScopedGlobalRef<jclass> j_video_capturer_class_;
const ScopedGlobalRef<jclass> j_observer_class_;
diff --git a/talk/app/webrtc/java/jni/native_handle_impl.cc b/talk/app/webrtc/java/jni/native_handle_impl.cc
index 583a038..f589447 100644
--- a/talk/app/webrtc/java/jni/native_handle_impl.cc
+++ b/talk/app/webrtc/java/jni/native_handle_impl.cc
@@ -27,18 +27,24 @@
#include "talk/app/webrtc/java/jni/native_handle_impl.h"
+#include "talk/app/webrtc/java/jni/jni_helpers.h"
+#include "webrtc/base/bind.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/keep_ref_until_done.h"
+#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/scoped_ref_ptr.h"
using webrtc::NativeHandleBuffer;
namespace webrtc_jni {
+// Aligning pointer to 64 bytes for improved performance, e.g. use SIMD.
+static const int kBufferAlignment = 64;
+
NativeHandleImpl::NativeHandleImpl(JNIEnv* jni,
jint j_oes_texture_id,
jfloatArray j_transform_matrix)
- : oes_texture_id(j_oes_texture_id) {
+ : oes_texture_id(j_oes_texture_id) {
RTC_CHECK_EQ(16, jni->GetArrayLength(j_transform_matrix));
jfloat* transform_matrix_ptr =
jni->GetFloatArrayElements(j_transform_matrix, nullptr);
@@ -52,9 +58,11 @@
int width,
int height,
const NativeHandleImpl& native_handle,
+ jobject surface_texture_helper,
const rtc::Callback0<void>& no_longer_used)
: webrtc::NativeHandleBuffer(&native_handle_, width, height),
native_handle_(native_handle),
+ surface_texture_helper_(surface_texture_helper),
no_longer_used_cb_(no_longer_used) {}
AndroidTextureBuffer::~AndroidTextureBuffer() {
@@ -63,9 +71,53 @@
rtc::scoped_refptr<webrtc::VideoFrameBuffer>
AndroidTextureBuffer::NativeToI420Buffer() {
- RTC_NOTREACHED()
- << "AndroidTextureBuffer::NativeToI420Buffer not implemented.";
- return nullptr;
+ int uv_width = (width()+7) / 8;
+ int stride = 8 * uv_width;
+ int uv_height = (height()+1)/2;
+ size_t size = stride * (height() + uv_height);
+ // The data is owned by the frame, and the normal case is that the
+ // data is deleted by the frame's destructor callback.
+ //
+ // TODO(nisse): Use an I420BufferPool. We then need to extend that
+ // class, and I420Buffer, to support our memory layout.
+ rtc::scoped_ptr<uint8_t, webrtc::AlignedFreeDeleter> yuv_data(
+ static_cast<uint8_t*>(webrtc::AlignedMalloc(size, kBufferAlignment)));
+ // See SurfaceTextureHelper.java for the required layout.
+ uint8_t* y_data = yuv_data.get();
+ uint8_t* u_data = y_data + height() * stride;
+ uint8_t* v_data = u_data + stride/2;
+
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> copy =
+ new rtc::RefCountedObject<webrtc::WrappedI420Buffer>(
+ width(), height(),
+ y_data, stride,
+ u_data, stride,
+ v_data, stride,
+ rtc::Bind(&webrtc::AlignedFree, yuv_data.release()));
+
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+
+ jmethodID transform_mid = GetMethodID(
+ jni,
+ GetObjectClass(jni, surface_texture_helper_),
+ "textureToYUV",
+ "(Ljava/nio/ByteBuffer;IIII[F)V");
+
+ jobject byte_buffer = jni->NewDirectByteBuffer(y_data, size);
+
+ // TODO(nisse): Keep java transform matrix around.
+ jfloatArray sampling_matrix = jni->NewFloatArray(16);
+ jni->SetFloatArrayRegion(sampling_matrix, 0, 16,
+ native_handle_.sampling_matrix);
+
+ jni->CallVoidMethod(surface_texture_helper_,
+ transform_mid,
+ byte_buffer, width(), height(), stride,
+ native_handle_.oes_texture_id, sampling_matrix);
+ CHECK_EXCEPTION(jni) << "textureToYUV throwed an exception";
+
+ return copy;
}
rtc::scoped_refptr<AndroidTextureBuffer> AndroidTextureBuffer::CropAndScale(
@@ -82,7 +134,7 @@
// called that happens and when it finishes, the reference count to |this|
// will be decreased by one.
return new rtc::RefCountedObject<AndroidTextureBuffer>(
- dst_widht, dst_height, native_handle_,
+ dst_widht, dst_height, native_handle_, surface_texture_helper_,
rtc::KeepRefUntilDone(this));
}
diff --git a/talk/app/webrtc/java/jni/native_handle_impl.h b/talk/app/webrtc/java/jni/native_handle_impl.h
index 911a3c4..2026486 100644
--- a/talk/app/webrtc/java/jni/native_handle_impl.h
+++ b/talk/app/webrtc/java/jni/native_handle_impl.h
@@ -50,6 +50,7 @@
AndroidTextureBuffer(int width,
int height,
const NativeHandleImpl& native_handle,
+ jobject surface_texture_helper,
const rtc::Callback0<void>& no_longer_used);
~AndroidTextureBuffer();
rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() override;
@@ -62,6 +63,12 @@
private:
NativeHandleImpl native_handle_;
+ // Raw object pointer, relying on the caller, i.e.,
+ // AndroidVideoCapturerJni or the C++ SurfaceTextureHelper, to keep
+ // a global reference. TODO(nisse): Make this a reference to the C++
+ // SurfaceTextureHelper instead, but that requires some refactoring
+ // of AndroidVideoCapturerJni.
+ jobject surface_texture_helper_;
rtc::Callback0<void> no_longer_used_cb_;
};
diff --git a/talk/app/webrtc/java/jni/peerconnection_jni.cc b/talk/app/webrtc/java/jni/peerconnection_jni.cc
index f22d604..c5de965 100644
--- a/talk/app/webrtc/java/jni/peerconnection_jni.cc
+++ b/talk/app/webrtc/java/jni/peerconnection_jni.cc
@@ -1934,6 +1934,7 @@
// Since we can't create platform specific java implementations in Java, we
// defer the creation to C land.
#if defined(ANDROID)
+ // TODO(nisse): This case is intended to be deleted.
jclass j_video_capturer_class(
FindClass(jni, "org/webrtc/VideoCapturerAndroid"));
const int camera_id = jni->CallStaticIntMethod(
@@ -1948,8 +1949,13 @@
j_video_capturer_class,
GetMethodID(jni, j_video_capturer_class, "<init>", "(I)V"), camera_id);
CHECK_EXCEPTION(jni) << "error during creation of VideoCapturerAndroid";
+ jfieldID helper_fid = GetFieldID(jni, j_video_capturer_class, "surfaceHelper",
+ "Lorg/webrtc/SurfaceTextureHelper;");
+
rtc::scoped_refptr<webrtc::AndroidVideoCapturerDelegate> delegate =
- new rtc::RefCountedObject<AndroidVideoCapturerJni>(jni, j_video_capturer);
+ new rtc::RefCountedObject<AndroidVideoCapturerJni>(
+ jni, j_video_capturer,
+ GetObjectField(jni, j_video_capturer, helper_fid));
rtc::scoped_ptr<cricket::VideoCapturer> capturer(
new webrtc::AndroidVideoCapturer(delegate));
diff --git a/talk/app/webrtc/java/jni/surfacetexturehelper_jni.cc b/talk/app/webrtc/java/jni/surfacetexturehelper_jni.cc
index 8fd42a0..8b5742f 100644
--- a/talk/app/webrtc/java/jni/surfacetexturehelper_jni.cc
+++ b/talk/app/webrtc/java/jni/surfacetexturehelper_jni.cc
@@ -72,7 +72,7 @@
SurfaceTextureHelper::CreateTextureFrame(int width, int height,
const NativeHandleImpl& native_handle) {
return new rtc::RefCountedObject<AndroidTextureBuffer>(
- width, height, native_handle,
+ width, height, native_handle, *j_surface_texture_helper_,
rtc::Bind(&SurfaceTextureHelper::ReturnTextureFrame, this));
}