Add a field trial to produce VideoFrames in camera capturers.

The field trials enables producing new VideoFrames in camera classes.
This field trial should be enabled if VideoSinks are used.

BUG=webrtc:7749, webrtc:7760

Review-Url: https://codereview.webrtc.org/2984633002
Cr-Original-Commit-Position: refs/heads/master@{#19467}
Cr-Mirrored-From: https://chromium.googlesource.com/external/webrtc
Cr-Mirrored-Commit: 91073ae78fb9d49f302ffb8a3142cf67a55e8118
diff --git a/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java b/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java
index 02dac15..28e7c7a 100644
--- a/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java
+++ b/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java
@@ -26,8 +26,8 @@
 import java.util.List;
 import java.util.Timer;
 import java.util.TimerTask;
-import java.util.concurrent.Executors;
 import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 import org.appspot.apprtc.AppRTCClient.SignalingParameters;
@@ -56,9 +56,9 @@
 import org.webrtc.VideoTrack;
 import org.webrtc.voiceengine.WebRtcAudioManager;
 import org.webrtc.voiceengine.WebRtcAudioRecord;
-import org.webrtc.voiceengine.WebRtcAudioTrack;
 import org.webrtc.voiceengine.WebRtcAudioRecord.AudioRecordStartErrorCode;
 import org.webrtc.voiceengine.WebRtcAudioRecord.WebRtcAudioRecordErrorCallback;
+import org.webrtc.voiceengine.WebRtcAudioTrack;
 import org.webrtc.voiceengine.WebRtcAudioTrack.WebRtcAudioTrackErrorCallback;
 import org.webrtc.voiceengine.WebRtcAudioUtils;
 
@@ -89,6 +89,9 @@
       "WebRTC-H264HighProfile/Enabled/";
   private static final String DISABLE_WEBRTC_AGC_FIELDTRIAL =
       "WebRTC-Audio-MinimizeResamplingOnMobile/Enabled/";
+  private static final String VIDEO_FRAME_EMIT_FIELDTRIAL =
+      PeerConnectionFactory.VIDEO_FRAME_EMIT_TRIAL + "/" + PeerConnectionFactory.TRIAL_ENABLED
+      + "/";
   private static final String AUDIO_CODEC_PARAM_BITRATE = "maxaveragebitrate";
   private static final String AUDIO_ECHO_CANCELLATION_CONSTRAINT = "googEchoCancellation";
   private static final String AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT = "googAutoGainControl";
@@ -398,6 +401,7 @@
       fieldTrials += DISABLE_WEBRTC_AGC_FIELDTRIAL;
       Log.d(TAG, "Disable WebRTC AGC field trial.");
     }
+    fieldTrials += VIDEO_FRAME_EMIT_FIELDTRIAL;
 
     // Check preferred video codec.
     preferredVideoCodec = VIDEO_CODEC_VP8;
diff --git a/sdk/android/BUILD.gn b/sdk/android/BUILD.gn
index 48ed324..b79e414 100644
--- a/sdk/android/BUILD.gn
+++ b/sdk/android/BUILD.gn
@@ -98,6 +98,7 @@
     "src/jni/native_handle_impl.cc",
     "src/jni/native_handle_impl.h",
     "src/jni/nv12buffer_jni.cc",
+    "src/jni/nv21buffer_jni.cc",
     "src/jni/pc/video_jni.cc",
     "src/jni/surfacetexturehelper_jni.cc",
     "src/jni/surfacetexturehelper_jni.h",
@@ -438,6 +439,7 @@
     "src/java/org/webrtc/I420BufferImpl.java",
     "src/java/org/webrtc/MediaCodecUtils.java",
     "src/java/org/webrtc/NV12Buffer.java",
+    "src/java/org/webrtc/NV21Buffer.java",
     "src/java/org/webrtc/TextureBufferImpl.java",
     "src/java/org/webrtc/VideoCodecType.java",
     "src/java/org/webrtc/VideoDecoderWrapperCallback.java",
diff --git a/sdk/android/api/org/webrtc/PeerConnectionFactory.java b/sdk/android/api/org/webrtc/PeerConnectionFactory.java
index 34dbe3a..59ee91c 100644
--- a/sdk/android/api/org/webrtc/PeerConnectionFactory.java
+++ b/sdk/android/api/org/webrtc/PeerConnectionFactory.java
@@ -29,6 +29,9 @@
     }
   }
 
+  public static final String TRIAL_ENABLED = "Enabled";
+  public static final String VIDEO_FRAME_EMIT_TRIAL = "VideoFrameEmit";
+
   private static final String TAG = "PeerConnectionFactory";
   private static final String VIDEO_CAPTURER_THREAD_NAME = "VideoCapturerThread";
   private final long nativeFactory;
diff --git a/sdk/android/instrumentationtests/src/org/webrtc/Camera1CapturerUsingByteBufferTest.java b/sdk/android/instrumentationtests/src/org/webrtc/Camera1CapturerUsingByteBufferTest.java
index 3ab0554..cb8ad5f 100644
--- a/sdk/android/instrumentationtests/src/org/webrtc/Camera1CapturerUsingByteBufferTest.java
+++ b/sdk/android/instrumentationtests/src/org/webrtc/Camera1CapturerUsingByteBufferTest.java
@@ -59,6 +59,9 @@
 
   @Before
   public void setUp() {
+    // Enable VideoFrame capture.
+    PeerConnectionFactory.initializeFieldTrials(PeerConnectionFactory.VIDEO_FRAME_EMIT_TRIAL + "/"
+        + PeerConnectionFactory.TRIAL_ENABLED + "/");
     fixtures = new CameraVideoCapturerTestFixtures(new TestObjectFactory());
   }
 
diff --git a/sdk/android/instrumentationtests/src/org/webrtc/Camera1CapturerUsingTextureTest.java b/sdk/android/instrumentationtests/src/org/webrtc/Camera1CapturerUsingTextureTest.java
index 10fcad2..fd74ee5 100644
--- a/sdk/android/instrumentationtests/src/org/webrtc/Camera1CapturerUsingTextureTest.java
+++ b/sdk/android/instrumentationtests/src/org/webrtc/Camera1CapturerUsingTextureTest.java
@@ -54,6 +54,9 @@
 
   @Before
   public void setUp() {
+    // Enable VideoFrame capture.
+    PeerConnectionFactory.initializeFieldTrials(PeerConnectionFactory.VIDEO_FRAME_EMIT_TRIAL + "/"
+        + PeerConnectionFactory.TRIAL_ENABLED + "/");
     fixtures = new CameraVideoCapturerTestFixtures(new TestObjectFactory());
   }
 
diff --git a/sdk/android/instrumentationtests/src/org/webrtc/Camera2CapturerTest.java b/sdk/android/instrumentationtests/src/org/webrtc/Camera2CapturerTest.java
index 6b6b18d..a256294 100644
--- a/sdk/android/instrumentationtests/src/org/webrtc/Camera2CapturerTest.java
+++ b/sdk/android/instrumentationtests/src/org/webrtc/Camera2CapturerTest.java
@@ -183,6 +183,9 @@
 
   @Before
   public void setUp() {
+    // Enable VideoFrame capture.
+    PeerConnectionFactory.initializeFieldTrials(PeerConnectionFactory.VIDEO_FRAME_EMIT_TRIAL + "/"
+        + PeerConnectionFactory.TRIAL_ENABLED + "/");
     fixtures = new CameraVideoCapturerTestFixtures(new TestObjectFactory());
   }
 
diff --git a/sdk/android/instrumentationtests/src/org/webrtc/CameraVideoCapturerTestFixtures.java b/sdk/android/instrumentationtests/src/org/webrtc/CameraVideoCapturerTestFixtures.java
index d9af85b..d7130b8 100644
--- a/sdk/android/instrumentationtests/src/org/webrtc/CameraVideoCapturerTestFixtures.java
+++ b/sdk/android/instrumentationtests/src/org/webrtc/CameraVideoCapturerTestFixtures.java
@@ -100,9 +100,7 @@
 
   static private class FakeCapturerObserver implements CameraVideoCapturer.CapturerObserver {
     private int framesCaptured = 0;
-    private int frameSize = 0;
-    private int frameWidth = 0;
-    private int frameHeight = 0;
+    private VideoFrame videoFrame;
     final private Object frameLock = new Object();
     final private Object capturerStartLock = new Object();
     private boolean capturerStartResult = false;
@@ -126,32 +124,27 @@
     @Override
     public void onByteBufferFrameCaptured(
         byte[] frame, int width, int height, int rotation, long timeStamp) {
-      synchronized (frameLock) {
-        ++framesCaptured;
-        frameSize = frame.length;
-        frameWidth = width;
-        frameHeight = height;
-        timestamps.add(timeStamp);
-        frameLock.notify();
-      }
+      throw new RuntimeException("onByteBufferFrameCaptured called");
     }
 
     @Override
     public void onTextureFrameCaptured(int width, int height, int oesTextureId,
         float[] transformMatrix, int rotation, long timeStamp) {
-      synchronized (frameLock) {
-        ++framesCaptured;
-        frameWidth = width;
-        frameHeight = height;
-        frameSize = 0;
-        timestamps.add(timeStamp);
-        frameLock.notify();
-      }
+      throw new RuntimeException("onTextureFrameCaptured called");
     }
 
     @Override
     public void onFrameCaptured(VideoFrame frame) {
-      // Empty on purpose.
+      synchronized (frameLock) {
+        ++framesCaptured;
+        if (videoFrame != null) {
+          videoFrame.release();
+        }
+        videoFrame = frame;
+        videoFrame.retain();
+        timestamps.add(videoFrame.getTimestampNs());
+        frameLock.notify();
+      }
     }
 
     public boolean waitForCapturerToStart() throws InterruptedException {
@@ -170,21 +163,24 @@
       }
     }
 
-    int frameSize() {
-      synchronized (frameLock) {
-        return frameSize;
-      }
-    }
-
     int frameWidth() {
       synchronized (frameLock) {
-        return frameWidth;
+        return videoFrame.getBuffer().getWidth();
       }
     }
 
     int frameHeight() {
       synchronized (frameLock) {
-        return frameHeight;
+        return videoFrame.getBuffer().getHeight();
+      }
+    }
+
+    void releaseFrame() {
+      synchronized (frameLock) {
+        if (videoFrame != null) {
+          videoFrame.release();
+          videoFrame = null;
+        }
       }
     }
 
@@ -385,7 +381,7 @@
     instance.capturer.stopCapture();
     instance.cameraEvents.waitForCameraClosed();
     instance.capturer.dispose();
-    instance.surfaceTextureHelper.returnTextureFrame();
+    instance.observer.releaseFrame();
     instance.surfaceTextureHelper.dispose();
   }
 
@@ -637,7 +633,7 @@
     // Make sure camera is started and then stop it.
     assertTrue(capturerInstance.observer.waitForCapturerToStart());
     capturerInstance.capturer.stopCapture();
-    capturerInstance.surfaceTextureHelper.returnTextureFrame();
+    capturerInstance.observer.releaseFrame();
 
     // We can't change |capturer| at this point, but we should not crash.
     capturerInstance.capturer.switchCamera(null /* switchEventsHandler */);
@@ -687,13 +683,8 @@
             + capturerInstance.format.height + "x" + capturerInstance.format.width);
       }
 
-      if (testObjectFactory.isCapturingToTexture()) {
-        assertEquals(0, capturerInstance.observer.frameSize());
-      } else {
-        assertTrue(capturerInstance.format.frameSize() <= capturerInstance.observer.frameSize());
-      }
       capturerInstance.capturer.stopCapture();
-      capturerInstance.surfaceTextureHelper.returnTextureFrame();
+      capturerInstance.observer.releaseFrame();
     }
     disposeCapturer(capturerInstance);
   }
@@ -710,7 +701,7 @@
 
     startCapture(capturerInstance, 1);
     capturerInstance.observer.waitForCapturerToStart();
-    capturerInstance.surfaceTextureHelper.returnTextureFrame();
+    capturerInstance.observer.releaseFrame();
 
     capturerInstance.observer.waitForNextCapturedFrame();
     capturerInstance.capturer.stopCapture();
diff --git a/sdk/android/src/java/org/webrtc/Camera1Session.java b/sdk/android/src/java/org/webrtc/Camera1Session.java
index d195069..61a51c9 100644
--- a/sdk/android/src/java/org/webrtc/Camera1Session.java
+++ b/sdk/android/src/java/org/webrtc/Camera1Session.java
@@ -36,6 +36,8 @@
 
   private static enum SessionState { RUNNING, STOPPED }
 
+  private final boolean videoFrameEmitTrialEnabled;
+
   private final Handler cameraThreadHandler;
   private final Events events;
   private final boolean captureToTexture;
@@ -85,7 +87,6 @@
 
     updateCameraParameters(camera, parameters, captureFormat, pictureSize, captureToTexture);
 
-    // Initialize the capture buffers.
     if (!captureToTexture) {
       final int frameSize = captureFormat.frameSize();
       for (int i = 0; i < NUMBER_OF_CAPTURE_BUFFERS; ++i) {
@@ -151,6 +152,9 @@
       android.hardware.Camera camera, android.hardware.Camera.CameraInfo info,
       CaptureFormat captureFormat, long constructionTimeNs) {
     Logging.d(TAG, "Create new camera1 session on camera " + cameraId);
+    videoFrameEmitTrialEnabled =
+        PeerConnectionFactory.fieldTrialsFindFullName(PeerConnectionFactory.VIDEO_FRAME_EMIT_TRIAL)
+            .equals(PeerConnectionFactory.TRIAL_ENABLED);
 
     this.cameraThreadHandler = new Handler();
     this.events = events;
@@ -267,8 +271,17 @@
           transformMatrix = RendererCommon.multiplyMatrices(
               transformMatrix, RendererCommon.horizontalFlipMatrix());
         }
-        events.onTextureFrameCaptured(Camera1Session.this, captureFormat.width,
-            captureFormat.height, oesTextureId, transformMatrix, rotation, timestampNs);
+        if (videoFrameEmitTrialEnabled) {
+          final VideoFrame.Buffer buffer =
+              surfaceTextureHelper.createTextureBuffer(captureFormat.width, captureFormat.height,
+                  RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
+          final VideoFrame frame = new VideoFrame(buffer, rotation, timestampNs);
+          events.onFrameCaptured(Camera1Session.this, frame);
+          frame.release();
+        } else {
+          events.onTextureFrameCaptured(Camera1Session.this, captureFormat.width,
+              captureFormat.height, oesTextureId, transformMatrix, rotation, timestampNs);
+        }
       }
     });
   }
@@ -276,7 +289,7 @@
   private void listenForBytebufferFrames() {
     camera.setPreviewCallbackWithBuffer(new android.hardware.Camera.PreviewCallback() {
       @Override
-      public void onPreviewFrame(byte[] data, android.hardware.Camera callbackCamera) {
+      public void onPreviewFrame(final byte[] data, android.hardware.Camera callbackCamera) {
         checkIsOnCameraThread();
 
         if (callbackCamera != camera) {
@@ -298,9 +311,22 @@
           firstFrameReported = true;
         }
 
-        events.onByteBufferFrameCaptured(Camera1Session.this, data, captureFormat.width,
-            captureFormat.height, getFrameOrientation(), captureTimeNs);
-        camera.addCallbackBuffer(data);
+        if (videoFrameEmitTrialEnabled) {
+          VideoFrame.Buffer frameBuffer = new NV21Buffer(data, captureFormat.width,
+              captureFormat.height, () -> cameraThreadHandler.post(() -> {
+                if (state == SessionState.RUNNING) {
+                  camera.addCallbackBuffer(data);
+                }
+              }));
+          final VideoFrame frame =
+              new VideoFrame(frameBuffer, getFrameOrientation(), captureTimeNs);
+          events.onFrameCaptured(Camera1Session.this, frame);
+          frame.release();
+        } else {
+          events.onByteBufferFrameCaptured(Camera1Session.this, data, captureFormat.width,
+              captureFormat.height, getFrameOrientation(), captureTimeNs);
+          camera.addCallbackBuffer(data);
+        }
       }
     });
   }
diff --git a/sdk/android/src/java/org/webrtc/Camera2Session.java b/sdk/android/src/java/org/webrtc/Camera2Session.java
index 8de5be0..b74aaa0 100644
--- a/sdk/android/src/java/org/webrtc/Camera2Session.java
+++ b/sdk/android/src/java/org/webrtc/Camera2Session.java
@@ -12,6 +12,7 @@
 
 import android.annotation.TargetApi;
 import android.content.Context;
+import android.graphics.Matrix;
 import android.graphics.SurfaceTexture;
 import android.hardware.camera2.CameraAccessException;
 import android.hardware.camera2.CameraCaptureSession;
@@ -44,6 +45,8 @@
 
   private static enum SessionState { RUNNING, STOPPED }
 
+  private final boolean videoFrameEmitTrialEnabled;
+
   private final Handler cameraThreadHandler;
   private final CreateSessionCallback callback;
   private final Events events;
@@ -225,8 +228,17 @@
               transformMatrix =
                   RendererCommon.rotateTextureMatrix(transformMatrix, -cameraOrientation);
 
-              events.onTextureFrameCaptured(Camera2Session.this, captureFormat.width,
-                  captureFormat.height, oesTextureId, transformMatrix, rotation, timestampNs);
+              if (videoFrameEmitTrialEnabled) {
+                VideoFrame.Buffer buffer = surfaceTextureHelper.createTextureBuffer(
+                    captureFormat.width, captureFormat.height,
+                    RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
+                final VideoFrame frame = new VideoFrame(buffer, rotation, timestampNs);
+                events.onFrameCaptured(Camera2Session.this, frame);
+                frame.release();
+              } else {
+                events.onTextureFrameCaptured(Camera2Session.this, captureFormat.width,
+                    captureFormat.height, oesTextureId, transformMatrix, rotation, timestampNs);
+              }
             }
           });
       Logging.d(TAG, "Camera device successfully started.");
@@ -301,6 +313,9 @@
       CameraManager cameraManager, SurfaceTextureHelper surfaceTextureHelper,
       MediaRecorder mediaRecorder, String cameraId, int width, int height, int framerate) {
     Logging.d(TAG, "Create new camera2 session on camera " + cameraId);
+    videoFrameEmitTrialEnabled =
+        PeerConnectionFactory.fieldTrialsFindFullName(PeerConnectionFactory.VIDEO_FRAME_EMIT_TRIAL)
+            .equals(PeerConnectionFactory.TRIAL_ENABLED);
 
     constructionTimeNs = System.nanoTime();
 
diff --git a/sdk/android/src/java/org/webrtc/CameraCapturer.java b/sdk/android/src/java/org/webrtc/CameraCapturer.java
index 859b7f7..3b442fb 100644
--- a/sdk/android/src/java/org/webrtc/CameraCapturer.java
+++ b/sdk/android/src/java/org/webrtc/CameraCapturer.java
@@ -177,6 +177,23 @@
     }
 
     @Override
+    public void onFrameCaptured(CameraSession session, VideoFrame frame) {
+      checkIsOnCameraThread();
+      synchronized (stateLock) {
+        if (session != currentSession) {
+          Logging.w(TAG, "onTextureFrameCaptured from another session.");
+          return;
+        }
+        if (!firstFrameObserved) {
+          eventsHandler.onFirstFrameAvailable();
+          firstFrameObserved = true;
+        }
+        cameraStatistics.addFrame();
+        capturerObserver.onFrameCaptured(frame);
+      }
+    }
+
+    @Override
     public void onByteBufferFrameCaptured(
         CameraSession session, byte[] data, int width, int height, int rotation, long timestamp) {
       checkIsOnCameraThread();
diff --git a/sdk/android/src/java/org/webrtc/CameraSession.java b/sdk/android/src/java/org/webrtc/CameraSession.java
index 85be38e..a560378 100644
--- a/sdk/android/src/java/org/webrtc/CameraSession.java
+++ b/sdk/android/src/java/org/webrtc/CameraSession.java
@@ -25,6 +25,9 @@
     void onCameraError(CameraSession session, String error);
     void onCameraDisconnected(CameraSession session);
     void onCameraClosed(CameraSession session);
+    void onFrameCaptured(CameraSession session, VideoFrame frame);
+
+    // The old way of passing frames. Will be removed eventually.
     void onByteBufferFrameCaptured(
         CameraSession session, byte[] data, int width, int height, int rotation, long timestamp);
     void onTextureFrameCaptured(CameraSession session, int width, int height, int oesTextureId,
diff --git a/sdk/android/src/java/org/webrtc/NV21Buffer.java b/sdk/android/src/java/org/webrtc/NV21Buffer.java
new file mode 100644
index 0000000..defcc1b
--- /dev/null
+++ b/sdk/android/src/java/org/webrtc/NV21Buffer.java
@@ -0,0 +1,77 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS.  All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.nio.ByteBuffer;
+
+public class NV21Buffer implements VideoFrame.Buffer {
+  private final byte[] data;
+  private final int width;
+  private final int height;
+  private final Runnable releaseCallback;
+  private final Object refCountLock = new Object();
+
+  private int refCount = 1;
+
+  public NV21Buffer(byte[] data, int width, int height, Runnable releaseCallback) {
+    this.data = data;
+    this.width = width;
+    this.height = height;
+    this.releaseCallback = releaseCallback;
+  }
+
+  @Override
+  public int getWidth() {
+    return width;
+  }
+
+  @Override
+  public int getHeight() {
+    return height;
+  }
+
+  @Override
+  public VideoFrame.I420Buffer toI420() {
+    // Cropping converts the frame to I420. Just crop and scale to the whole image.
+    return (VideoFrame.I420Buffer) cropAndScale(0 /* cropX */, 0 /* cropY */, width /* cropWidth */,
+        height /* cropHeight */, width /* scaleWidth */, height /* scaleHeight */);
+  }
+
+  @Override
+  public void retain() {
+    synchronized (refCountLock) {
+      ++refCount;
+    }
+  }
+
+  @Override
+  public void release() {
+    synchronized (refCountLock) {
+      if (--refCount == 0 && releaseCallback != null) {
+        releaseCallback.run();
+      }
+    }
+  }
+
+  @Override
+  public VideoFrame.Buffer cropAndScale(
+      int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
+    I420BufferImpl newBuffer = I420BufferImpl.allocate(scaleWidth, scaleHeight);
+    nativeCropAndScale(cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight, data, width,
+        height, newBuffer.getDataY(), newBuffer.getStrideY(), newBuffer.getDataU(),
+        newBuffer.getStrideU(), newBuffer.getDataV(), newBuffer.getStrideV());
+    return newBuffer;
+  }
+
+  private static native void nativeCropAndScale(int cropX, int cropY, int cropWidth, int cropHeight,
+      int scaleWidth, int scaleHeight, byte[] src, int srcWidth, int srcHeight, ByteBuffer dstY,
+      int dstStrideY, ByteBuffer dstU, int dstStrideU, ByteBuffer dstV, int dstStrideV);
+}
diff --git a/sdk/android/src/jni/androidmediaencoder_jni.cc b/sdk/android/src/jni/androidmediaencoder_jni.cc
index efbf2c2..78fd075 100644
--- a/sdk/android/src/jni/androidmediaencoder_jni.cc
+++ b/sdk/android/src/jni/androidmediaencoder_jni.cc
@@ -1479,6 +1479,7 @@
 
 JOW(void, MediaCodecVideoEncoder_nativeFillBuffer)
 (JNIEnv* jni,
+ jclass,
  jlong native_encoder,
  jint input_buffer,
  jobject j_buffer_y,
diff --git a/sdk/android/src/jni/nv21buffer_jni.cc b/sdk/android/src/jni/nv21buffer_jni.cc
new file mode 100644
index 0000000..d013edc
--- /dev/null
+++ b/sdk/android/src/jni/nv21buffer_jni.cc
@@ -0,0 +1,73 @@
+/*
+ *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <jni.h>
+#include <vector>
+
+#include "third_party/libyuv/include/libyuv/convert.h"
+#include "third_party/libyuv/include/libyuv/scale.h"
+
+#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/rtc_base/checks.h"
+
+namespace webrtc_jni {
+
+extern "C" JNIEXPORT void JNICALL
+Java_org_webrtc_NV21Buffer_nativeCropAndScale(JNIEnv* jni,
+                                              jclass,
+                                              jint crop_x,
+                                              jint crop_y,
+                                              jint crop_width,
+                                              jint crop_height,
+                                              jint scale_width,
+                                              jint scale_height,
+                                              jbyteArray j_src,
+                                              jint src_width,
+                                              jint src_height,
+                                              jobject j_dst_y,
+                                              jint dst_stride_y,
+                                              jobject j_dst_u,
+                                              jint dst_stride_u,
+                                              jobject j_dst_v,
+                                              jint dst_stride_v) {
+  const int src_stride_y = src_width;
+  const int src_stride_uv = src_width;
+  const int crop_chroma_x = crop_x / 2;
+  const int crop_chroma_y = crop_y / 2;
+  const int crop_chroma_width = (crop_width + 1) / 2;
+  const int crop_chroma_height = (crop_height + 1) / 2;
+  const int tmp_stride_u = crop_chroma_width;
+  const int tmp_stride_v = crop_chroma_width;
+  const int tmp_size = crop_chroma_height * (tmp_stride_u + tmp_stride_v);
+
+  jboolean was_copy;
+  jbyte* src_bytes = jni->GetByteArrayElements(j_src, &was_copy);
+  RTC_DCHECK(!was_copy);
+  uint8_t const* src_y = reinterpret_cast<uint8_t const*>(src_bytes);
+  uint8_t const* src_uv = src_y + src_height * src_stride_y;
+
+  uint8_t* dst_y = static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_y));
+  uint8_t* dst_u = static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_u));
+  uint8_t* dst_v = static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_v));
+
+  // Crop using pointer arithmetic.
+  src_y += crop_x + crop_y * src_stride_y;
+  src_uv += crop_chroma_x + crop_chroma_y * src_stride_uv;
+
+  webrtc::NV12ToI420Scaler scaler;
+  // U- and V-planes are swapped because this is NV21 not NV12.
+  scaler.NV12ToI420Scale(src_y, src_stride_y, src_uv, src_stride_uv, crop_width,
+                         crop_height, dst_y, dst_stride_y, dst_v, dst_stride_v,
+                         dst_u, dst_stride_u, scale_width, scale_height);
+
+  jni->ReleaseByteArrayElements(j_src, src_bytes, JNI_ABORT);
+}
+
+}  // namespace webrtc_jni