Remove the video capture module on Android.
Video capture for android is now implemented in talk/app/webrtc/androidvideocapturer.h

BUG=webrtc:4475

Review URL: https://codereview.webrtc.org/1347083003

Cr-Original-Commit-Position: refs/heads/master@{#9995}
Cr-Mirrored-From: https://chromium.googlesource.com/external/webrtc
Cr-Mirrored-Commit: 35d1767cc3ae1fd48e8fd01b0b8ed9061734538e
diff --git a/build/apk_tests.gyp b/build/apk_tests.gyp
index b56c11d..a9481d2 100644
--- a/build/apk_tests.gyp
+++ b/build/apk_tests.gyp
@@ -216,36 +216,6 @@
       ],
     },
     {
-      'target_name': 'video_capture_tests_apk',
-      'type': 'none',
-       'variables': {
-         'test_suite_name': 'video_capture_tests',
-         'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)video_capture_tests<(SHARED_LIB_SUFFIX)',
-       },
-       'dependencies': [
-         '<(webrtc_root)/modules/modules.gyp:video_capture_tests',
-         'video_capture_java',
-       ],
-       'includes': [
-         '../../build/apk_test.gypi',
-       ],
-    },
-    {
-      # Used only by video_capture_tests_apk above, and impossible to use in the
-      # standalone build, which is why it's declared here instead of under
-      # modules/video_capture/ (to avoid the need for a forked _noop.gyp file
-      # like this file has; see comment at the top of this file).
-      'target_name': 'video_capture_java',
-      'type': 'none',
-      'variables': {
-        'java_in_dir': '<(webrtc_root)/modules/video_capture/android/java',
-        'additional_src_dirs': [ '<(webrtc_root)/base/java/src', ],
-      },
-      'includes': [
-        '../../build/java.gypi',
-      ],
-    },
-    {
       'target_name': 'audio_device_java',
       'type': 'none',
       'variables': {
diff --git a/build/apk_tests_noop.gyp b/build/apk_tests_noop.gyp
index 7c1a6aa..9a4789f 100644
--- a/build/apk_tests_noop.gyp
+++ b/build/apk_tests_noop.gyp
@@ -65,9 +65,5 @@
       'target_name': 'audio_codec_speed_tests_apk',
       'type': 'none',
     },
-    {
-      'target_name': 'video_capture_tests_apk',
-      'type': 'none',
-    },
   ],
 }
diff --git a/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaCodecVideoDecoder.java b/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaCodecVideoDecoder.java
deleted file mode 100644
index ba811d0..0000000
--- a/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaCodecVideoDecoder.java
+++ /dev/null
@@ -1,338 +0,0 @@
-/*
- *  Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc.webrtcdemo;
-
-import android.app.AlertDialog;
-import android.content.Context;
-import android.content.DialogInterface;
-import android.media.MediaCodec;
-import android.media.MediaCrypto;
-import android.media.MediaExtractor;
-import android.media.MediaFormat;
-import android.os.Handler;
-import android.os.Looper;
-import android.os.Message;
-import android.util.Log;
-import android.view.Surface;
-import android.view.SurfaceView;
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.LinkedList;
-
-class MediaCodecVideoDecoder {
-  public static final int DECODE = 0;
-  private enum CodecName { ON2_VP8, GOOGLE_VPX, EXYNOX_VP8 }
-
-  private void check(boolean value, String message) {
-    if (value) {
-      return;
-    }
-    Log.e("WEBRTC-CHECK", message);
-    AlertDialog alertDialog = new AlertDialog.Builder(context).create();
-    alertDialog.setTitle("WebRTC Error");
-    alertDialog.setMessage(message);
-    alertDialog.setButton(DialogInterface.BUTTON_POSITIVE,
-        "OK",
-        new DialogInterface.OnClickListener() {
-          public void onClick(DialogInterface dialog, int which) {
-            return;
-          }
-        }
-                          );
-    alertDialog.show();
-  }
-
-  class Frame {
-    public ByteBuffer buffer;
-    public long timestampUs;
-
-    Frame(ByteBuffer buffer, long timestampUs) {
-      this.buffer = buffer;
-      this.timestampUs = timestampUs;
-    }
-  }
-
-  // This class enables decoding being run on a separate thread.
-  class DecodeHandler extends Handler {
-    @Override
-    public void handleMessage(Message msg) {
-      // TODO(dwkang): figure out exceptions just make this thread finish.
-      try {
-        switch (msg.what) {
-          case DECODE:
-            decodePendingBuffers();
-            long delayMillis = 5;  // Don't busy wait.
-            handler.sendMessageDelayed(
-                handler.obtainMessage(DECODE), delayMillis);
-            break;
-          default:
-            break;
-        }
-      } catch (Exception e) {
-        e.printStackTrace();
-      }
-    }
-  }
-
-  private static String TAG;
-  private Context context;
-  private SurfaceView surfaceView;
-
-  private DecodeHandler handler;
-  private Thread looperThread;
-
-  MediaCodec codec;
-  MediaFormat format;
-
-  // Buffers supplied by MediaCodec for pushing encoded data to and pulling
-  // decoded data from.
-  private ByteBuffer[] codecInputBuffers;
-  private ByteBuffer[] codecOutputBuffers;
-
-  // Frames from the native layer.
-  private LinkedList<Frame> frameQueue;
-  // Indexes to MediaCodec buffers
-  private LinkedList<Integer> availableInputBufferIndices;
-  private LinkedList<Integer> availableOutputBufferIndices;
-  private LinkedList<MediaCodec.BufferInfo> availableOutputBufferInfos;
-
-  // Offset between system time and media time.
-  private long deltaTimeUs;
-
-  public MediaCodecVideoDecoder(Context context) {
-    TAG = context.getString(R.string.tag);
-    this.context = context;
-    surfaceView = new SurfaceView(context);
-    frameQueue = new LinkedList<Frame>();
-    availableInputBufferIndices = new LinkedList<Integer>();
-    availableOutputBufferIndices = new LinkedList<Integer>();
-    availableOutputBufferInfos = new LinkedList<MediaCodec.BufferInfo>();
-  }
-
-  public void dispose() {
-    codec.stop();
-    codec.release();
-  }
-
-  // Return view that is written to by MediaCodec.
-  public SurfaceView getView() { return surfaceView; }
-
-  // Entry point from the native layer. Called when the class should be ready
-  // to start receiving raw frames.
-  private boolean start(int width, int height) {
-    deltaTimeUs = -1;
-    if (!setCodecState(width, height, CodecName.ON2_VP8)) {
-      return false;
-    }
-    startLooperThread();
-    // The decoding must happen on |looperThread| thread.
-    handler.sendMessage(handler.obtainMessage(DECODE));
-    return true;
-  }
-
-  private boolean setCodecState(int width, int height, CodecName codecName) {
-    // TODO(henrike): enable more than ON2_VP8 codec.
-    format = new MediaFormat();
-    format.setInteger(MediaFormat.KEY_WIDTH, width);
-    format.setInteger(MediaFormat.KEY_HEIGHT, height);
-    try {
-      switch (codecName) {
-        case ON2_VP8:
-          format.setString(MediaFormat.KEY_MIME, "video/x-vnd.on2.vp8");
-          codec = MediaCodec.createDecoderByType("video/x-vnd.on2.vp8");
-          break;
-        case GOOGLE_VPX:
-          // SW VP8 decoder
-          codec = MediaCodec.createByCodecName("OMX.google.vpx.decoder");
-          break;
-        case EXYNOX_VP8:
-          // Nexus10 HW VP8 decoder
-          codec = MediaCodec.createByCodecName("OMX.Exynos.VP8.Decoder");
-          break;
-        default:
-          return false;
-      }
-    } catch  (Exception e) {
-      // TODO(dwkang): replace this instanceof/throw with a narrower catch
-      // clause once the SDK advances.
-      if (e instanceof IOException) {
-        Log.e(TAG, "Failed to create MediaCodec for VP8.", e);
-        return false;
-      }
-      throw new RuntimeException(e);
-    }
-    Surface surface = surfaceView.getHolder().getSurface();
-    MediaCrypto crypto = null;  // No crypto.
-    int flags = 0;  // Decoder (1 for encoder)
-    codec.configure(format, surface, crypto, flags);
-    codec.start();
-    codecInputBuffers = codec.getInputBuffers();
-    codecOutputBuffers = codec.getOutputBuffers();
-    return true;
-  }
-
-  private void startLooperThread() {
-    looperThread = new Thread() {
-        @Override
-        public void run() {
-          Looper.prepare();
-          // Handler that is run by this thread.
-          handler = new DecodeHandler();
-          // Notify that the thread has created a handler.
-          synchronized(MediaCodecVideoDecoder.this) {
-            MediaCodecVideoDecoder.this.notify();
-          }
-          Looper.loop();
-        }
-      };
-    looperThread.start();
-    // Wait for thread to notify that Handler has been set up.
-    synchronized(this) {
-      try {
-        wait();
-      } catch (InterruptedException e) {
-        e.printStackTrace();
-      }
-    }
-  }
-
-  // Entry point from the native layer. It pushes the raw buffer to this class.
-  private void pushBuffer(ByteBuffer buffer, long renderTimeMs) {
-    // TODO(dwkang): figure out why exceptions just make this thread finish.
-    try {
-      final long renderTimeUs = renderTimeMs * 1000;
-      synchronized(frameQueue) {
-        frameQueue.add(new Frame(buffer, renderTimeUs));
-      }
-    } catch (Exception e) {
-      e.printStackTrace();
-    }
-  }
-
-  private boolean hasFrame() {
-    synchronized(frameQueue) {
-      return !frameQueue.isEmpty();
-    }
-  }
-
-  private Frame dequeueFrame() {
-    synchronized(frameQueue) {
-      return frameQueue.removeFirst();
-    }
-  }
-
-  private void flush() {
-    availableInputBufferIndices.clear();
-    availableOutputBufferIndices.clear();
-    availableOutputBufferInfos.clear();
-
-    codec.flush();
-  }
-
-  // Media time is relative to previous frame.
-  private long mediaTimeToSystemTime(long mediaTimeUs) {
-    if (deltaTimeUs == -1) {
-      long nowUs = System.currentTimeMillis() * 1000;
-      deltaTimeUs = nowUs - mediaTimeUs;
-    }
-    return deltaTimeUs + mediaTimeUs;
-  }
-
-  private void decodePendingBuffers() {
-    int timeoutUs = 0;  // Don't block on dequeuing input buffer.
-
-    int index = codec.dequeueInputBuffer(timeoutUs);
-    if (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
-      availableInputBufferIndices.add(index);
-    }
-    while (feedInputBuffer()) {}
-
-    MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
-    index = codec.dequeueOutputBuffer(info, timeoutUs);
-    if (index > 0) {
-      availableOutputBufferIndices.add(index);
-      availableOutputBufferInfos.add(info);
-    }
-    if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
-      codecOutputBuffers = codec.getOutputBuffers();
-    }
-
-    while (drainOutputBuffer()) {}
-  }
-
-  // Returns true if MediaCodec is ready for more data and there was data
-  // available from the native layer.
-  private boolean feedInputBuffer() {
-    if (availableInputBufferIndices.isEmpty()) {
-      return false;
-    }
-    if (!hasFrame()) {
-      return false;
-    }
-    Frame frame = dequeueFrame();
-    ByteBuffer buffer = frame.buffer;
-
-    int index = availableInputBufferIndices.pollFirst();
-    ByteBuffer codecData = codecInputBuffers[index];
-    check(codecData.capacity() >= buffer.capacity(),
-        "Buffer is too small to copy a frame.");
-    buffer.rewind();
-    codecData.rewind();
-    codecData.put(buffer);
-
-    try {
-      int offset = 0;
-      int flags = 0;
-      codec.queueInputBuffer(index, offset, buffer.capacity(),
-          frame.timestampUs, flags);
-    } catch (MediaCodec.CryptoException e) {
-      check(false, "CryptoException w/ errorCode " + e.getErrorCode() +
-          ", '" + e.getMessage() + "'");
-    }
-    return true;
-  }
-
-  // Returns true if more output data could be drained.MediaCodec has more data
-  // to deliver.
-  private boolean drainOutputBuffer() {
-    if (availableOutputBufferIndices.isEmpty()) {
-      return false;
-    }
-
-    int index = availableOutputBufferIndices.peekFirst();
-    MediaCodec.BufferInfo info = availableOutputBufferInfos.peekFirst();
-    if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
-      // End of stream is unexpected with streamed video.
-      check(false, "Saw output end of stream.");
-      return false;
-    }
-    long realTimeUs = mediaTimeToSystemTime(info.presentationTimeUs);
-    long nowUs = System.currentTimeMillis() * 1000;
-    long lateUs = nowUs - realTimeUs;
-    if (lateUs < -10000) {
-      // Frame should not be presented yet.
-      return false;
-    }
-
-    // TODO(dwkang): For some extreme cases, just not doing rendering is not
-    // enough. Need to seek to the next key frame.
-    boolean render = lateUs <= 30000;
-    if (!render) {
-      Log.d(TAG, "video late by " + lateUs + " us. Skipping...");
-    }
-    // Decode and render to surface if desired.
-    codec.releaseOutputBuffer(index, render);
-    availableOutputBufferIndices.removeFirst();
-    availableOutputBufferInfos.removeFirst();
-    return true;
-  }
-}
diff --git a/modules/modules_java.gyp b/modules/modules_java.gyp
index 400cd11..060de2a 100644
--- a/modules/modules_java.gyp
+++ b/modules/modules_java.gyp
@@ -19,18 +19,6 @@
       'includes': [ '../../build/java.gypi' ],
     }, # audio_device_module_java
     {
-      'target_name': 'video_capture_module_java',
-      'type': 'none',
-      'dependencies': [
-        'video_render_module_java',
-      ],
-      'variables': {
-        'java_in_dir': 'video_capture/android/java',
-        'additional_src_dirs': [ '../base/java/src', ],
-      },
-      'includes': [ '../../build/java.gypi' ],
-    }, # video_capture_module_java
-    {
       'target_name': 'video_render_module_java',
       'type': 'none',
       'variables': {
diff --git a/modules/modules_java_chromium.gyp b/modules/modules_java_chromium.gyp
index 247a81d..32d2d8d 100644
--- a/modules/modules_java_chromium.gyp
+++ b/modules/modules_java_chromium.gyp
@@ -17,17 +17,6 @@
       'includes': [ '../../../build/java.gypi' ],
     }, # audio_device_module_java
     {
-      'target_name': 'video_capture_module_java',
-      'type': 'none',
-      'dependencies': [
-        'video_render_module_java',
-      ],
-      'variables': {
-        'java_in_dir': 'video_capture/android/java',
-      },
-      'includes': [ '../../../build/java.gypi' ],
-    }, # video_capture_module_java
-    {
       'target_name': 'video_render_module_java',
       'type': 'none',
       'variables': {
diff --git a/modules/video_capture/BUILD.gn b/modules/video_capture/BUILD.gn
index f29e5b6..b0ed6f4 100644
--- a/modules/video_capture/BUILD.gn
+++ b/modules/video_capture/BUILD.gn
@@ -130,21 +130,6 @@
 
       deps += [ "//third_party/winsdk_samples" ]
     }
-    if (is_android) {
-      sources = [
-        "android/device_info_android.cc",
-        "android/device_info_android.h",
-        "android/video_capture_android.cc",
-        "android/video_capture_android.h",
-      ]
-
-      if (rtc_build_json) {
-        deps += [ "//third_party/jsoncpp" ]
-      }
-      if (rtc_build_icu) {
-        deps += [ "//third_party/icu:icuuc" ]
-      }
-    }
     if (is_ios) {
       sources = [
         "ios/device_info_ios.h",
diff --git a/modules/video_capture/android/device_info_android.cc b/modules/video_capture/android/device_info_android.cc
deleted file mode 100644
index 974717c..0000000
--- a/modules/video_capture/android/device_info_android.cc
+++ /dev/null
@@ -1,256 +0,0 @@
-/*
- *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_capture/android/device_info_android.h"
-
-#include <algorithm>
-#include <sstream>
-#include <vector>
-
-#include "unicode/unistr.h"
-#include "webrtc/base/json.h"
-#include "webrtc/modules/video_capture/android/video_capture_android.h"
-#include "webrtc/system_wrappers/interface/logging.h"
-#include "webrtc/system_wrappers/interface/ref_count.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-namespace webrtc {
-
-namespace videocapturemodule {
-
-// Helper for storing lists of pairs of ints.  Used e.g. for resolutions & FPS
-// ranges.
-typedef std::pair<int, int> IntPair;
-typedef std::vector<IntPair> IntPairs;
-
-static std::string IntPairsToString(const IntPairs& pairs, char separator) {
-  std::stringstream stream;
-  for (size_t i = 0; i < pairs.size(); ++i) {
-    if (i > 0)
-      stream << ", ";
-    stream << "(" << pairs[i].first << separator << pairs[i].second << ")";
-  }
-  return stream.str();
-}
-
-struct AndroidCameraInfo {
-  std::string name;
-  bool front_facing;
-  int orientation;
-  IntPairs resolutions;  // Pairs are: (width,height).
-  // Pairs are (min,max) in units of FPS*1000 ("milli-frame-per-second").
-  IntPairs mfpsRanges;
-
-  std::string ToString() {
-    std::stringstream stream;
-    stream << "Name: [" << name << "], MFPS ranges: ["
-           << IntPairsToString(mfpsRanges, ':')
-           << "], front_facing: " << front_facing
-           << ", orientation: " << orientation << ", resolutions: ["
-           << IntPairsToString(resolutions, 'x') << "]";
-    return stream.str();
-  }
-};
-
-// Camera info; populated during DeviceInfoAndroid::Initialize() and immutable
-// thereafter.
-static std::vector<AndroidCameraInfo>* g_camera_info = NULL;
-
-// Set |*index| to the index of |name| in g_camera_info or return false if no
-// match found.
-static bool FindCameraIndexByName(const std::string& name, size_t* index) {
-  for (size_t i = 0; i < g_camera_info->size(); ++i) {
-    if (g_camera_info->at(i).name == name) {
-      *index = i;
-      return true;
-    }
-  }
-  return false;
-}
-
-// Returns a pointer to the named member of g_camera_info, or NULL if no match
-// is found.
-static AndroidCameraInfo* FindCameraInfoByName(const std::string& name) {
-  size_t index = 0;
-  if (FindCameraIndexByName(name, &index))
-    return &g_camera_info->at(index);
-  return NULL;
-}
-
-// static
-void DeviceInfoAndroid::Initialize(JNIEnv* jni) {
-  // TODO(henrike): this "if" would make a lot more sense as an assert, but
-  // Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_GetVideoEngine() and
-  // Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_Terminate() conspire to
-  // prevent this.  Once that code is made to only
-  // VideoEngine::SetAndroidObjects() once per process, this can turn into an
-  // assert.
-  if (g_camera_info)
-    return;
-
-  g_camera_info = new std::vector<AndroidCameraInfo>();
-  jclass j_info_class =
-      jni->FindClass("org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid");
-  assert(j_info_class);
-  jmethodID j_initialize = jni->GetStaticMethodID(
-      j_info_class, "getDeviceInfo", "()Ljava/lang/String;");
-  jstring j_json_info = static_cast<jstring>(
-      jni->CallStaticObjectMethod(j_info_class, j_initialize));
-
-  const jchar* jchars = jni->GetStringChars(j_json_info, NULL);
-  icu::UnicodeString ustr(jchars, jni->GetStringLength(j_json_info));
-  jni->ReleaseStringChars(j_json_info, jchars);
-  std::string json_info;
-  ustr.toUTF8String(json_info);
-
-  Json::Value cameras;
-  Json::Reader reader(Json::Features::strictMode());
-  bool parsed = reader.parse(json_info, cameras);
-  if (!parsed) {
-    std::stringstream stream;
-    stream << "Failed to parse configuration:\n"
-           << reader.getFormattedErrorMessages();
-    assert(false);
-    return;
-  }
-  for (Json::ArrayIndex i = 0; i < cameras.size(); ++i) {
-    const Json::Value& camera = cameras[i];
-    AndroidCameraInfo info;
-    info.name = camera["name"].asString();
-    info.front_facing = camera["front_facing"].asBool();
-    info.orientation = camera["orientation"].asInt();
-    Json::Value sizes = camera["sizes"];
-    for (Json::ArrayIndex j = 0; j < sizes.size(); ++j) {
-      const Json::Value& size = sizes[j];
-      info.resolutions.push_back(std::make_pair(
-          size["width"].asInt(), size["height"].asInt()));
-    }
-    Json::Value mfpsRanges = camera["mfpsRanges"];
-    for (Json::ArrayIndex j = 0; j < mfpsRanges.size(); ++j) {
-      const Json::Value& mfpsRange = mfpsRanges[j];
-      info.mfpsRanges.push_back(std::make_pair(mfpsRange["min_mfps"].asInt(),
-                                               mfpsRange["max_mfps"].asInt()));
-    }
-    g_camera_info->push_back(info);
-  }
-}
-
-void DeviceInfoAndroid::DeInitialize() {
-  if (g_camera_info) {
-    delete g_camera_info;
-    g_camera_info = NULL;
-  }
-}
-
-VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo(
-    const int32_t id) {
-  return new videocapturemodule::DeviceInfoAndroid(id);
-}
-
-DeviceInfoAndroid::DeviceInfoAndroid(const int32_t id) :
-    DeviceInfoImpl(id) {
-}
-
-DeviceInfoAndroid::~DeviceInfoAndroid() {
-}
-
-bool DeviceInfoAndroid::FindCameraIndex(const char* deviceUniqueIdUTF8,
-                                        size_t* index) {
-  return FindCameraIndexByName(deviceUniqueIdUTF8, index);
-}
-
-int32_t DeviceInfoAndroid::Init() {
-  return 0;
-}
-
-uint32_t DeviceInfoAndroid::NumberOfDevices() {
-  return g_camera_info->size();
-}
-
-int32_t DeviceInfoAndroid::GetDeviceName(
-    uint32_t deviceNumber,
-    char* deviceNameUTF8,
-    uint32_t deviceNameLength,
-    char* deviceUniqueIdUTF8,
-    uint32_t deviceUniqueIdUTF8Length,
-    char* /*productUniqueIdUTF8*/,
-    uint32_t /*productUniqueIdUTF8Length*/) {
-  if (deviceNumber >= g_camera_info->size())
-    return -1;
-  const AndroidCameraInfo& info = g_camera_info->at(deviceNumber);
-  if (info.name.length() + 1 > deviceNameLength ||
-      info.name.length() + 1 > deviceUniqueIdUTF8Length) {
-    return -1;
-  }
-  memcpy(deviceNameUTF8, info.name.c_str(), info.name.length() + 1);
-  memcpy(deviceUniqueIdUTF8, info.name.c_str(), info.name.length() + 1);
-  return 0;
-}
-
-int32_t DeviceInfoAndroid::CreateCapabilityMap(
-    const char* deviceUniqueIdUTF8) {
-  _captureCapabilities.clear();
-  const AndroidCameraInfo* info = FindCameraInfoByName(deviceUniqueIdUTF8);
-  if (info == NULL)
-    return -1;
-
-  for (size_t i = 0; i < info->resolutions.size(); ++i) {
-    for (size_t j = 0; j < info->mfpsRanges.size(); ++j) {
-      const IntPair& size = info->resolutions[i];
-      const IntPair& mfpsRange = info->mfpsRanges[j];
-      VideoCaptureCapability cap;
-      cap.width = size.first;
-      cap.height = size.second;
-      cap.maxFPS = mfpsRange.second / 1000;
-      cap.expectedCaptureDelay = kExpectedCaptureDelay;
-      cap.rawType = kVideoNV21;
-      _captureCapabilities.push_back(cap);
-    }
-  }
-  return _captureCapabilities.size();
-}
-
-int32_t DeviceInfoAndroid::GetOrientation(const char* deviceUniqueIdUTF8,
-                                          VideoRotation& orientation) {
-  const AndroidCameraInfo* info = FindCameraInfoByName(deviceUniqueIdUTF8);
-  if (info == NULL ||
-      VideoCaptureImpl::RotationFromDegrees(info->orientation,
-                                            &orientation) != 0) {
-    return -1;
-  }
-  return 0;
-}
-
-void DeviceInfoAndroid::GetMFpsRange(const char* deviceUniqueIdUTF8,
-                                     int max_fps_to_match,
-                                     int* min_mfps, int* max_mfps) {
-  const AndroidCameraInfo* info = FindCameraInfoByName(deviceUniqueIdUTF8);
-  if (info == NULL)
-    return;
-  int desired_mfps = max_fps_to_match * 1000;
-  int best_diff_mfps = 0;
-  LOG(LS_INFO) << "Search for best target mfps " << desired_mfps;
-  // Search for best fps range with preference shifted to constant fps modes.
-  for (size_t i = 0; i < info->mfpsRanges.size(); ++i) {
-    int diff_mfps = abs(info->mfpsRanges[i].first - desired_mfps) +
-        abs(info->mfpsRanges[i].second - desired_mfps) +
-        (info->mfpsRanges[i].second - info->mfpsRanges[i].first) / 2;
-    LOG(LS_INFO) << "Fps range " << info->mfpsRanges[i].first << ":" <<
-        info->mfpsRanges[i].second << ". Distance: " << diff_mfps;
-    if (i == 0 || diff_mfps < best_diff_mfps) {
-      best_diff_mfps = diff_mfps;
-      *min_mfps = info->mfpsRanges[i].first;
-      *max_mfps = info->mfpsRanges[i].second;
-    }
-  }
-}
-
-}  // namespace videocapturemodule
-}  // namespace webrtc
diff --git a/modules/video_capture/android/device_info_android.h b/modules/video_capture/android/device_info_android.h
deleted file mode 100644
index 581312b..0000000
--- a/modules/video_capture/android/device_info_android.h
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_ANDROID_DEVICE_INFO_ANDROID_H_
-#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_ANDROID_DEVICE_INFO_ANDROID_H_
-
-#include <jni.h>
-
-#include "webrtc/modules/video_capture/device_info_impl.h"
-#include "webrtc/modules/video_capture/video_capture_impl.h"
-
-namespace webrtc
-{
-namespace videocapturemodule
-{
-
-class DeviceInfoAndroid : public DeviceInfoImpl {
- public:
-  static void Initialize(JNIEnv* env);
-  static void DeInitialize();
-
-  DeviceInfoAndroid(int32_t id);
-  virtual ~DeviceInfoAndroid();
-
-  // Set |*index| to the index of the camera matching |deviceUniqueIdUTF8|, or
-  // return false if no match.
-  bool FindCameraIndex(const char* deviceUniqueIdUTF8, size_t* index);
-
-  virtual int32_t Init();
-  virtual uint32_t NumberOfDevices();
-  virtual int32_t GetDeviceName(
-      uint32_t deviceNumber,
-      char* deviceNameUTF8,
-      uint32_t deviceNameLength,
-      char* deviceUniqueIdUTF8,
-      uint32_t deviceUniqueIdUTF8Length,
-      char* productUniqueIdUTF8 = 0,
-      uint32_t productUniqueIdUTF8Length = 0);
-  virtual int32_t CreateCapabilityMap(const char* deviceUniqueIdUTF8);
-
-  virtual int32_t DisplayCaptureSettingsDialogBox(
-      const char* /*deviceUniqueIdUTF8*/,
-      const char* /*dialogTitleUTF8*/,
-      void* /*parentWindow*/,
-      uint32_t /*positionX*/,
-      uint32_t /*positionY*/) { return -1; }
-  virtual int32_t GetOrientation(const char* deviceUniqueIdUTF8,
-                                 VideoRotation& orientation);
-
-  // Populate |min_mfps| and |max_mfps| with the closest supported range of the
-  // device to |max_fps_to_match|.
-  void GetMFpsRange(const char* deviceUniqueIdUTF8,
-                    int max_fps_to_match,
-                    int* min_mfps,
-                    int* max_mfps);
-
- private:
-  enum { kExpectedCaptureDelay = 190};
-};
-
-}  // namespace videocapturemodule
-}  // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_ANDROID_DEVICE_INFO_ANDROID_H_
diff --git a/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java b/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java
deleted file mode 100644
index 15c86c9..0000000
--- a/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java
+++ /dev/null
@@ -1,463 +0,0 @@
-/*
- *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc.videoengine;
-
-import java.io.IOException;
-import java.util.List;
-import java.util.concurrent.Exchanger;
-
-import android.content.Context;
-import android.graphics.ImageFormat;
-import android.graphics.SurfaceTexture;
-import android.hardware.Camera.Parameters;
-import android.hardware.Camera.PreviewCallback;
-import android.hardware.Camera;
-import android.opengl.GLES11Ext;
-import android.opengl.GLES20;
-import android.os.Handler;
-import android.os.Looper;
-import android.os.SystemClock;
-import android.view.Surface;
-import android.view.SurfaceHolder.Callback;
-import android.view.SurfaceHolder;
-import android.view.WindowManager;
-
-import org.webrtc.Logging;
-
-// Wrapper for android Camera, with support for direct local preview rendering.
-// Threading notes: this class is called from ViE C++ code, and from Camera &
-// SurfaceHolder Java callbacks.  Since these calls happen on different threads,
-// the entry points to this class are all synchronized.  This shouldn't present
-// a performance bottleneck because only onPreviewFrame() is called more than
-// once (and is called serially on a single thread), so the lock should be
-// uncontended.  Note that each of these synchronized methods must check
-// |camera| for null to account for having possibly waited for stopCapture() to
-// complete.
-public class VideoCaptureAndroid implements PreviewCallback, Callback {
-  private final static String TAG = "WEBRTC-JC";
-
-  private static SurfaceHolder localPreview;
-  private Camera camera;  // Only non-null while capturing.
-  private CameraThread cameraThread;
-  private Handler cameraThreadHandler;
-  private Context context;
-  private final int id;
-  private final Camera.CameraInfo info;
-  private final long native_capturer;  // |VideoCaptureAndroid*| in C++.
-  private SurfaceTexture cameraSurfaceTexture;
-  private int[] cameraGlTextures = null;
-  // Arbitrary queue depth.  Higher number means more memory allocated & held,
-  // lower number means more sensitivity to processing time in the client (and
-  // potentially stalling the capturer if it runs out of buffers to write to).
-  private final int numCaptureBuffers = 3;
-  private double averageDurationMs;
-  private long lastCaptureTimeMs;
-  private int frameCount;
-  private int frameDropRatio;
-
-  // Requests future capturers to send their frames to |localPreview| directly.
-  public static void setLocalPreview(SurfaceHolder localPreview) {
-    // It is a gross hack that this is a class-static.  Doing it right would
-    // mean plumbing this through the C++ API and using it from
-    // webrtc/examples/android/media_demo's MediaEngine class.
-    VideoCaptureAndroid.localPreview = localPreview;
-  }
-
-  public VideoCaptureAndroid(int id, long native_capturer) {
-    this.id = id;
-    this.native_capturer = native_capturer;
-    this.context = GetContext();
-    this.info = new Camera.CameraInfo();
-    Camera.getCameraInfo(id, info);
-  }
-
-  // Return the global application context.
-  private static native Context GetContext();
-
-  private class CameraThread extends Thread {
-    private Exchanger<Handler> handlerExchanger;
-    public CameraThread(Exchanger<Handler> handlerExchanger) {
-      this.handlerExchanger = handlerExchanger;
-    }
-
-    @Override public void run() {
-      Looper.prepare();
-      exchange(handlerExchanger, new Handler());
-      Looper.loop();
-    }
-  }
-
-  // Called by native code.  Returns true if capturer is started.
-  //
-  // Note that this actually opens the camera, and Camera callbacks run on the
-  // thread that calls open(), so this is done on the CameraThread.  Since ViE
-  // API needs a synchronous success return value we wait for the result.
-  private synchronized boolean startCapture(
-      final int width, final int height,
-      final int min_mfps, final int max_mfps) {
-    Logging.d(TAG, "startCapture: " + width + "x" + height + "@" +
-        min_mfps + ":" + max_mfps);
-    if (cameraThread != null || cameraThreadHandler != null) {
-      throw new RuntimeException("Camera thread already started!");
-    }
-    Exchanger<Handler> handlerExchanger = new Exchanger<Handler>();
-    cameraThread = new CameraThread(handlerExchanger);
-    cameraThread.start();
-    cameraThreadHandler = exchange(handlerExchanger, null);
-
-    final Exchanger<Boolean> result = new Exchanger<Boolean>();
-    cameraThreadHandler.post(new Runnable() {
-        @Override public void run() {
-          startCaptureOnCameraThread(width, height, min_mfps, max_mfps, result);
-        }
-      });
-    boolean startResult = exchange(result, false); // |false| is a dummy value.
-    return startResult;
-  }
-
-  private void startCaptureOnCameraThread(
-      int width, int height, int min_mfps, int max_mfps,
-      Exchanger<Boolean> result) {
-    Throwable error = null;
-    try {
-      camera = Camera.open(id);
-
-      if (localPreview != null) {
-        localPreview.addCallback(this);
-        if (localPreview.getSurface() != null &&
-            localPreview.getSurface().isValid()) {
-          camera.setPreviewDisplay(localPreview);
-        }
-      } else {
-        // No local renderer (we only care about onPreviewFrame() buffers, not a
-        // directly-displayed UI element).  Camera won't capture without
-        // setPreview{Texture,Display}, so we create a SurfaceTexture and hand
-        // it over to Camera, but never listen for frame-ready callbacks,
-        // and never call updateTexImage on it.
-        try {
-          cameraGlTextures = new int[1];
-          // Generate one texture pointer and bind it as an external texture.
-          GLES20.glGenTextures(1, cameraGlTextures, 0);
-          GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
-              cameraGlTextures[0]);
-          GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
-              GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
-          GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
-              GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
-          GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
-              GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
-          GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
-              GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
-
-          cameraSurfaceTexture = new SurfaceTexture(cameraGlTextures[0]);
-          cameraSurfaceTexture.setOnFrameAvailableListener(null);
-          camera.setPreviewTexture(cameraSurfaceTexture);
-        } catch (IOException e) {
-          throw new RuntimeException(e);
-        }
-      }
-
-      Logging.d(TAG, "Camera orientation: " + info.orientation +
-          " .Device orientation: " + getDeviceOrientation());
-      Camera.Parameters parameters = camera.getParameters();
-      Logging.d(TAG, "isVideoStabilizationSupported: " +
-          parameters.isVideoStabilizationSupported());
-      if (parameters.isVideoStabilizationSupported()) {
-        parameters.setVideoStabilization(true);
-      }
-      parameters.setPictureSize(width, height);
-      parameters.setPreviewSize(width, height);
-
-      // Check if requested fps range is supported by camera,
-      // otherwise calculate frame drop ratio.
-      List<int[]> supportedFpsRanges = parameters.getSupportedPreviewFpsRange();
-      frameDropRatio = Integer.MAX_VALUE;
-      for (int i = 0; i < supportedFpsRanges.size(); i++) {
-        int[] range = supportedFpsRanges.get(i);
-        if (range[Parameters.PREVIEW_FPS_MIN_INDEX] == min_mfps &&
-            range[Parameters.PREVIEW_FPS_MAX_INDEX] == max_mfps) {
-          frameDropRatio = 1;
-          break;
-        }
-        if (range[Parameters.PREVIEW_FPS_MIN_INDEX] % min_mfps == 0 &&
-            range[Parameters.PREVIEW_FPS_MAX_INDEX] % max_mfps == 0) {
-          int dropRatio = range[Parameters.PREVIEW_FPS_MAX_INDEX] / max_mfps;
-          frameDropRatio = Math.min(dropRatio, frameDropRatio);
-        }
-      }
-      if (frameDropRatio == Integer.MAX_VALUE) {
-        Logging.e(TAG, "Can not find camera fps range");
-        error = new RuntimeException("Can not find camera fps range");
-        exchange(result, false);
-        return;
-      }
-      if (frameDropRatio > 1) {
-        Logging.d(TAG, "Frame dropper is enabled. Ratio: " + frameDropRatio);
-      }
-      min_mfps *= frameDropRatio;
-      max_mfps *= frameDropRatio;
-      Logging.d(TAG, "Camera preview mfps range: " + min_mfps + " - " + max_mfps);
-      parameters.setPreviewFpsRange(min_mfps, max_mfps);
-
-      int format = ImageFormat.NV21;
-      parameters.setPreviewFormat(format);
-      camera.setParameters(parameters);
-      int bufSize = width * height * ImageFormat.getBitsPerPixel(format) / 8;
-      for (int i = 0; i < numCaptureBuffers; i++) {
-        camera.addCallbackBuffer(new byte[bufSize]);
-      }
-      camera.setPreviewCallbackWithBuffer(this);
-      frameCount = 0;
-      averageDurationMs = 1000000.0f / (max_mfps / frameDropRatio);
-      camera.startPreview();
-      exchange(result, true);
-      return;
-    } catch (IOException e) {
-      error = e;
-    } catch (RuntimeException e) {
-      error = e;
-    }
-    Logging.e(TAG, "startCapture failed", error);
-    if (camera != null) {
-      Exchanger<Boolean> resultDropper = new Exchanger<Boolean>();
-      stopCaptureOnCameraThread(resultDropper);
-      exchange(resultDropper, false);
-    }
-    exchange(result, false);
-    return;
-  }
-
-  // Called by native code.  Returns true when camera is known to be stopped.
-  private synchronized boolean stopCapture() {
-    Logging.d(TAG, "stopCapture");
-    final Exchanger<Boolean> result = new Exchanger<Boolean>();
-    cameraThreadHandler.post(new Runnable() {
-        @Override public void run() {
-          stopCaptureOnCameraThread(result);
-        }
-      });
-    boolean status = exchange(result, false);  // |false| is a dummy value here.
-    try {
-      cameraThread.join();
-    } catch (InterruptedException e) {
-      throw new RuntimeException(e);
-    }
-    cameraThreadHandler = null;
-    cameraThread = null;
-    Logging.d(TAG, "stopCapture done");
-    return status;
-  }
-
-  private void stopCaptureOnCameraThread(
-      Exchanger<Boolean> result) {
-    if (camera == null) {
-      throw new RuntimeException("Camera is already stopped!");
-    }
-    Throwable error = null;
-    try {
-      camera.stopPreview();
-      camera.setPreviewCallbackWithBuffer(null);
-      if (localPreview != null) {
-        localPreview.removeCallback(this);
-        camera.setPreviewDisplay(null);
-      } else {
-        camera.setPreviewTexture(null);
-        cameraSurfaceTexture = null;
-        if (cameraGlTextures != null) {
-          GLES20.glDeleteTextures(1, cameraGlTextures, 0);
-          cameraGlTextures = null;
-        }
-      }
-      camera.release();
-      camera = null;
-      exchange(result, true);
-      Looper.myLooper().quit();
-      return;
-    } catch (IOException e) {
-      error = e;
-    } catch (RuntimeException e) {
-      error = e;
-    }
-    Logging.e(TAG, "Failed to stop camera", error);
-    exchange(result, false);
-    Looper.myLooper().quit();
-    return;
-  }
-
-  private int getDeviceOrientation() {
-    int orientation = 0;
-    if (context != null) {
-      WindowManager wm = (WindowManager) context.getSystemService(
-          Context.WINDOW_SERVICE);
-      switch(wm.getDefaultDisplay().getRotation()) {
-        case Surface.ROTATION_90:
-          orientation = 90;
-          break;
-        case Surface.ROTATION_180:
-          orientation = 180;
-          break;
-        case Surface.ROTATION_270:
-          orientation = 270;
-          break;
-        case Surface.ROTATION_0:
-        default:
-          orientation = 0;
-          break;
-      }
-    }
-    return orientation;
-  }
-
-  private native void ProvideCameraFrame(
-      byte[] data, int length, int rotation, long timeStamp, long captureObject);
-
-  // Called on cameraThread so must not "synchronized".
-  @Override
-  public void onPreviewFrame(byte[] data, Camera callbackCamera) {
-    if (Thread.currentThread() != cameraThread) {
-      throw new RuntimeException("Camera callback not on camera thread?!?");
-    }
-    if (camera == null) {
-      return;
-    }
-    if (camera != callbackCamera) {
-      throw new RuntimeException("Unexpected camera in callback!");
-    }
-    frameCount++;
-    // Check if frame needs to be dropped.
-    if ((frameDropRatio > 1) && (frameCount % frameDropRatio) > 0) {
-      camera.addCallbackBuffer(data);
-      return;
-    }
-    long captureTimeMs = SystemClock.elapsedRealtime();
-    if (frameCount > frameDropRatio) {
-      double durationMs = captureTimeMs - lastCaptureTimeMs;
-      averageDurationMs = 0.9 * averageDurationMs + 0.1 * durationMs;
-      if ((frameCount % 30) == 0) {
-        Logging.d(TAG, "Camera TS " + captureTimeMs +
-            ". Duration: " + (int)durationMs + " ms. FPS: " +
-            (int) (1000 / averageDurationMs + 0.5));
-      }
-    }
-    lastCaptureTimeMs = captureTimeMs;
-
-    int rotation = getDeviceOrientation();
-    if (info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
-      rotation = 360 - rotation;
-    }
-    rotation = (info.orientation + rotation) % 360;
-
-    ProvideCameraFrame(data, data.length, rotation,
-        captureTimeMs, native_capturer);
-    camera.addCallbackBuffer(data);
-  }
-
-  // Sets the rotation of the preview render window.
-  // Does not affect the captured video image.
-  // Called by native code.
-  private synchronized void setPreviewRotation(final int rotation) {
-    if (camera == null || cameraThreadHandler == null) {
-      return;
-    }
-    final Exchanger<IOException> result = new Exchanger<IOException>();
-    cameraThreadHandler.post(new Runnable() {
-        @Override public void run() {
-          setPreviewRotationOnCameraThread(rotation, result);
-        }
-      });
-    // Use the exchanger below to block this function until
-    // setPreviewRotationOnCameraThread() completes, holding the synchronized
-    // lock for the duration.  The exchanged value itself is ignored.
-    exchange(result, null);
-  }
-
-  private void setPreviewRotationOnCameraThread(
-      int rotation, Exchanger<IOException> result) {
-    Logging.v(TAG, "setPreviewRotation:" + rotation);
-
-    int resultRotation = 0;
-    if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
-      // This is a front facing camera.  SetDisplayOrientation will flip
-      // the image horizontally before doing the rotation.
-      resultRotation = ( 360 - rotation ) % 360; // Compensate for the mirror.
-    } else {
-      // Back-facing camera.
-      resultRotation = rotation;
-    }
-    camera.setDisplayOrientation(resultRotation);
-    exchange(result, null);
-  }
-
-  @Override
-  public synchronized void surfaceChanged(
-      SurfaceHolder holder, int format, int width, int height) {
-    Logging.d(TAG, "VideoCaptureAndroid::surfaceChanged ignored: " +
-        format + ": " + width + "x" + height);
-  }
-
-  @Override
-  public synchronized void surfaceCreated(final SurfaceHolder holder) {
-    Logging.d(TAG, "VideoCaptureAndroid::surfaceCreated");
-    if (camera == null || cameraThreadHandler == null) {
-      return;
-    }
-    final Exchanger<IOException> result = new Exchanger<IOException>();
-    cameraThreadHandler.post(new Runnable() {
-        @Override public void run() {
-          setPreviewDisplayOnCameraThread(holder, result);
-        }
-      });
-    IOException e = exchange(result, null);  // |null| is a dummy value here.
-    if (e != null) {
-      throw new RuntimeException(e);
-    }
-  }
-
-  @Override
-  public synchronized void surfaceDestroyed(SurfaceHolder holder) {
-    Logging.d(TAG, "VideoCaptureAndroid::surfaceDestroyed");
-    if (camera == null || cameraThreadHandler == null) {
-      return;
-    }
-    final Exchanger<IOException> result = new Exchanger<IOException>();
-    cameraThreadHandler.post(new Runnable() {
-        @Override public void run() {
-          setPreviewDisplayOnCameraThread(null, result);
-        }
-      });
-    IOException e = exchange(result, null);  // |null| is a dummy value here.
-    if (e != null) {
-      throw new RuntimeException(e);
-    }
-  }
-
-  private void setPreviewDisplayOnCameraThread(
-      SurfaceHolder holder, Exchanger<IOException> result) {
-    try {
-      camera.setPreviewDisplay(holder);
-    } catch (IOException e) {
-      exchange(result, e);
-      return;
-    }
-    exchange(result, null);
-    return;
-  }
-
-  // Exchanges |value| with |exchanger|, converting InterruptedExceptions to
-  // RuntimeExceptions (since we expect never to see these).
-  private static <T> T exchange(Exchanger<T> exchanger, T value) {
-    try {
-      return exchanger.exchange(value);
-    } catch (InterruptedException e) {
-      throw new RuntimeException(e);
-    }
-  }
-}
diff --git a/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java b/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java
deleted file mode 100644
index 72d7985..0000000
--- a/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java
+++ /dev/null
@@ -1,133 +0,0 @@
-/*
- *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc.videoengine;
-
-import java.util.List;
-
-import android.hardware.Camera.CameraInfo;
-import android.hardware.Camera.Parameters;
-import android.hardware.Camera.Size;
-import android.hardware.Camera;
-
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
-
-import org.webrtc.Logging;
-
-public class VideoCaptureDeviceInfoAndroid {
-  private final static String TAG = "WEBRTC-JC";
-
-  private static boolean isFrontFacing(CameraInfo info) {
-    return info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT;
-  }
-
-  private static String deviceUniqueName(int index, CameraInfo info) {
-    return "Camera " + index +", Facing " +
-        (isFrontFacing(info) ? "front" : "back") +
-        ", Orientation "+ info.orientation;
-  }
-
-  // Returns information about all cameras on the device as a serialized JSON
-  // array of dictionaries encoding information about a single device.  Since
-  // this reflects static information about the hardware present, there is no
-  // need to call this function more than once in a single process.  It is
-  // marked "private" as it is only called by native code.
-  private static String getDeviceInfo() {
-    try {
-      JSONArray devices = new JSONArray();
-      for (int i = 0; i < Camera.getNumberOfCameras(); ++i) {
-        CameraInfo info = new CameraInfo();
-        Camera.getCameraInfo(i, info);
-        String uniqueName = deviceUniqueName(i, info);
-        JSONObject cameraDict = new JSONObject();
-        devices.put(cameraDict);
-        List<Size> supportedSizes;
-        List<int[]> supportedFpsRanges;
-        Camera camera = null;
-        try {
-          camera = Camera.open(i);
-          Parameters parameters = camera.getParameters();
-          supportedSizes = parameters.getSupportedPreviewSizes();
-          supportedFpsRanges = parameters.getSupportedPreviewFpsRange();
-          Logging.d(TAG, uniqueName);
-        } catch (RuntimeException e) {
-          Logging.e(TAG, "Failed to open " + uniqueName + ", skipping", e);
-          continue;
-        } finally {
-          if (camera != null) {
-            camera.release();
-          }
-        }
-
-        JSONArray sizes = new JSONArray();
-        for (Size supportedSize : supportedSizes) {
-          JSONObject size = new JSONObject();
-          size.put("width", supportedSize.width);
-          size.put("height", supportedSize.height);
-          sizes.put(size);
-        }
-
-        boolean is30fpsRange = false;
-        boolean is15fpsRange = false;
-        // If there is constant 30 fps mode, but no 15 fps - add 15 fps
-        // mode to the list of supported ranges. Frame drop will be done
-        // in software.
-        for (int[] range : supportedFpsRanges) {
-          if (range[Parameters.PREVIEW_FPS_MIN_INDEX] == 30000 &&
-              range[Parameters.PREVIEW_FPS_MAX_INDEX] == 30000) {
-            is30fpsRange = true;
-          }
-          if (range[Parameters.PREVIEW_FPS_MIN_INDEX] == 15000 &&
-              range[Parameters.PREVIEW_FPS_MAX_INDEX] == 15000) {
-            is15fpsRange = true;
-          }
-        }
-        if (is30fpsRange && !is15fpsRange) {
-          Logging.d(TAG, "Adding 15 fps support");
-          int[] newRange = new int [Parameters.PREVIEW_FPS_MAX_INDEX + 1];
-          newRange[Parameters.PREVIEW_FPS_MIN_INDEX] = 15000;
-          newRange[Parameters.PREVIEW_FPS_MAX_INDEX] = 15000;
-          for (int j = 0; j < supportedFpsRanges.size(); j++ ) {
-            int[] range = supportedFpsRanges.get(j);
-            if (range[Parameters.PREVIEW_FPS_MAX_INDEX] >
-                newRange[Parameters.PREVIEW_FPS_MAX_INDEX]) {
-              supportedFpsRanges.add(j, newRange);
-              break;
-            }
-          }
-        }
-
-        JSONArray mfpsRanges = new JSONArray();
-        for (int[] range : supportedFpsRanges) {
-          JSONObject mfpsRange = new JSONObject();
-          // Android SDK deals in integral "milliframes per second"
-          // (i.e. fps*1000, instead of floating-point frames-per-second) so we
-          // preserve that through the Java->C++->Java round-trip.
-          mfpsRange.put("min_mfps", range[Parameters.PREVIEW_FPS_MIN_INDEX]);
-          mfpsRange.put("max_mfps", range[Parameters.PREVIEW_FPS_MAX_INDEX]);
-          mfpsRanges.put(mfpsRange);
-        }
-
-        cameraDict.put("name", uniqueName);
-        cameraDict.put("front_facing", isFrontFacing(info))
-            .put("orientation", info.orientation)
-            .put("sizes", sizes)
-            .put("mfpsRanges", mfpsRanges);
-      }
-      String ret = devices.toString(2);
-      Logging.d(TAG, ret);
-      return ret;
-    } catch (JSONException e) {
-      throw new RuntimeException(e);
-    }
-  }
-}
diff --git a/modules/video_capture/android/video_capture_android.cc b/modules/video_capture/android/video_capture_android.cc
deleted file mode 100644
index 272cec4..0000000
--- a/modules/video_capture/android/video_capture_android.cc
+++ /dev/null
@@ -1,255 +0,0 @@
-/*
- *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_capture/android/video_capture_android.h"
-
-#include "webrtc/base/common.h"
-#include "webrtc/modules/utility/interface/helpers_android.h"
-#include "webrtc/modules/video_capture/android/device_info_android.h"
-#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/logcat_trace_context.h"
-#include "webrtc/system_wrappers/interface/logging.h"
-#include "webrtc/system_wrappers/interface/ref_count.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-static JavaVM* g_jvm = NULL;
-static jclass g_java_capturer_class = NULL;  // VideoCaptureAndroid.class.
-static jobject g_context = NULL;  // Owned android.content.Context.
-
-namespace webrtc {
-
-// Called by Java to get the global application context.
-jobject JNICALL GetContext(JNIEnv* env, jclass) {
-  assert(g_context);
-  return g_context;
-}
-
-// Called by Java when the camera has a new frame to deliver.
-void JNICALL ProvideCameraFrame(
-    JNIEnv* env,
-    jobject,
-    jbyteArray javaCameraFrame,
-    jint length,
-    jint rotation,
-    jlong timeStamp,
-    jlong context) {
-  webrtc::videocapturemodule::VideoCaptureAndroid* captureModule =
-      reinterpret_cast<webrtc::videocapturemodule::VideoCaptureAndroid*>(
-          context);
-  jbyte* cameraFrame = env->GetByteArrayElements(javaCameraFrame, NULL);
-  captureModule->OnIncomingFrame(
-      reinterpret_cast<uint8_t*>(cameraFrame), length, rotation, 0);
-  env->ReleaseByteArrayElements(javaCameraFrame, cameraFrame, JNI_ABORT);
-}
-
-int32_t SetCaptureAndroidVM(JavaVM* javaVM, jobject context) {
-  if (javaVM) {
-    assert(!g_jvm);
-    g_jvm = javaVM;
-    AttachThreadScoped ats(g_jvm);
-    g_context = ats.env()->NewGlobalRef(context);
-
-    videocapturemodule::DeviceInfoAndroid::Initialize(ats.env());
-
-    jclass j_capture_class =
-        ats.env()->FindClass("org/webrtc/videoengine/VideoCaptureAndroid");
-    assert(j_capture_class);
-    g_java_capturer_class =
-        reinterpret_cast<jclass>(ats.env()->NewGlobalRef(j_capture_class));
-    assert(g_java_capturer_class);
-
-    JNINativeMethod native_methods[] = {
-        {"GetContext",
-         "()Landroid/content/Context;",
-         reinterpret_cast<void*>(&GetContext)},
-        {"ProvideCameraFrame",
-         "([BIIJJ)V",
-         reinterpret_cast<void*>(&ProvideCameraFrame)}};
-    if (ats.env()->RegisterNatives(g_java_capturer_class,
-                                   native_methods, 2) != 0)
-      assert(false);
-  } else {
-    if (g_jvm) {
-      AttachThreadScoped ats(g_jvm);
-      ats.env()->UnregisterNatives(g_java_capturer_class);
-      ats.env()->DeleteGlobalRef(g_java_capturer_class);
-      g_java_capturer_class = NULL;
-      ats.env()->DeleteGlobalRef(g_context);
-      g_context = NULL;
-      videocapturemodule::DeviceInfoAndroid::DeInitialize();
-      g_jvm = NULL;
-    }
-  }
-
-  return 0;
-}
-
-namespace videocapturemodule {
-
-VideoCaptureModule* VideoCaptureImpl::Create(
-    const int32_t id,
-    const char* deviceUniqueIdUTF8) {
-  RefCountImpl<videocapturemodule::VideoCaptureAndroid>* implementation =
-      new RefCountImpl<videocapturemodule::VideoCaptureAndroid>(id);
-  if (implementation->Init(id, deviceUniqueIdUTF8) != 0) {
-    delete implementation;
-    implementation = NULL;
-  }
-  return implementation;
-}
-
-int32_t VideoCaptureAndroid::OnIncomingFrame(uint8_t* videoFrame,
-                                             size_t videoFrameLength,
-                                             int32_t degrees,
-                                             int64_t captureTime) {
-  if (!_captureStarted)
-    return 0;
-  VideoRotation current_rotation =
-      (degrees <= 45 || degrees > 315) ? kVideoRotation_0 :
-      (degrees > 45 && degrees <= 135) ? kVideoRotation_90 :
-      (degrees > 135 && degrees <= 225) ? kVideoRotation_180 :
-      (degrees > 225 && degrees <= 315) ? kVideoRotation_270 :
-      kVideoRotation_0;  // Impossible.
-  if (_rotation != current_rotation) {
-    LOG(LS_INFO) << "New camera rotation: " << degrees;
-    _rotation = current_rotation;
-    int32_t status = VideoCaptureImpl::SetCaptureRotation(_rotation);
-    if (status != 0)
-      return status;
-  }
-  return IncomingFrame(
-      videoFrame, videoFrameLength, _captureCapability, captureTime);
-}
-
-VideoCaptureAndroid::VideoCaptureAndroid(const int32_t id)
-    : VideoCaptureImpl(id),
-      _deviceInfo(id),
-      _jCapturer(NULL),
-      _captureStarted(false) {
-}
-
-int32_t VideoCaptureAndroid::Init(const int32_t id,
-                                  const char* deviceUniqueIdUTF8) {
-  const int nameLength = strlen(deviceUniqueIdUTF8);
-  if (nameLength >= kVideoCaptureUniqueNameLength)
-    return -1;
-
-  // Store the device name
-  LOG(LS_INFO) << "VideoCaptureAndroid::Init: " << deviceUniqueIdUTF8;
-  size_t camera_id = 0;
-  if (!_deviceInfo.FindCameraIndex(deviceUniqueIdUTF8, &camera_id))
-    return -1;
-  _deviceUniqueId = new char[nameLength + 1];
-  memcpy(_deviceUniqueId, deviceUniqueIdUTF8, nameLength + 1);
-
-  AttachThreadScoped ats(g_jvm);
-  JNIEnv* env = ats.env();
-  jmethodID ctor = env->GetMethodID(g_java_capturer_class, "<init>", "(IJ)V");
-  assert(ctor);
-  jlong j_this = reinterpret_cast<intptr_t>(this);
-  _jCapturer = env->NewGlobalRef(
-      env->NewObject(g_java_capturer_class, ctor, camera_id, j_this));
-  assert(_jCapturer);
-  _rotation = kVideoRotation_0;
-  return 0;
-}
-
-VideoCaptureAndroid::~VideoCaptureAndroid() {
-  // Ensure Java camera is released even if our caller didn't explicitly Stop.
-  if (_captureStarted)
-    StopCapture();
-  AttachThreadScoped ats(g_jvm);
-  ats.env()->DeleteGlobalRef(_jCapturer);
-}
-
-int32_t VideoCaptureAndroid::StartCapture(
-    const VideoCaptureCapability& capability) {
-  CriticalSectionScoped cs(&_apiCs);
-  AttachThreadScoped ats(g_jvm);
-  JNIEnv* env = ats.env();
-
-  if (_deviceInfo.GetBestMatchedCapability(
-          _deviceUniqueId, capability, _captureCapability) < 0) {
-    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
-                 "%s: GetBestMatchedCapability failed: %dx%d",
-                 __FUNCTION__, capability.width, capability.height);
-    return -1;
-  }
-
-  _captureDelay = _captureCapability.expectedCaptureDelay;
-
-  jmethodID j_start =
-      env->GetMethodID(g_java_capturer_class, "startCapture", "(IIII)Z");
-  assert(j_start);
-  int min_mfps = 0;
-  int max_mfps = 0;
-  _deviceInfo.GetMFpsRange(_deviceUniqueId, _captureCapability.maxFPS,
-                           &min_mfps, &max_mfps);
-  bool started = env->CallBooleanMethod(_jCapturer, j_start,
-                                        _captureCapability.width,
-                                        _captureCapability.height,
-                                        min_mfps, max_mfps);
-  if (started) {
-    _requestedCapability = capability;
-    _captureStarted = true;
-  }
-  return started ? 0 : -1;
-}
-
-int32_t VideoCaptureAndroid::StopCapture() {
-  _apiCs.Enter();
-  AttachThreadScoped ats(g_jvm);
-  JNIEnv* env = ats.env();
-
-  memset(&_requestedCapability, 0, sizeof(_requestedCapability));
-  memset(&_captureCapability, 0, sizeof(_captureCapability));
-  _captureStarted = false;
-  // Exit critical section to avoid blocking camera thread inside
-  // onIncomingFrame() call.
-  _apiCs.Leave();
-
-  jmethodID j_stop =
-      env->GetMethodID(g_java_capturer_class, "stopCapture", "()Z");
-  return env->CallBooleanMethod(_jCapturer, j_stop) ? 0 : -1;
-}
-
-bool VideoCaptureAndroid::CaptureStarted() {
-  CriticalSectionScoped cs(&_apiCs);
-  return _captureStarted;
-}
-
-int32_t VideoCaptureAndroid::CaptureSettings(
-    VideoCaptureCapability& settings) {
-  CriticalSectionScoped cs(&_apiCs);
-  settings = _requestedCapability;
-  return 0;
-}
-
-int32_t VideoCaptureAndroid::SetCaptureRotation(VideoRotation rotation) {
-  int32_t status = VideoCaptureImpl::SetCaptureRotation(rotation);
-  if (status != 0)
-    return status;
-
-  AttachThreadScoped ats(g_jvm);
-  JNIEnv* env = ats.env();
-
-  jmethodID j_spr =
-      env->GetMethodID(g_java_capturer_class, "setPreviewRotation", "(I)V");
-  assert(j_spr);
-  int rotation_degrees;
-  if (RotationInDegrees(rotation, &rotation_degrees) != 0) {
-    assert(false);
-  }
-  env->CallVoidMethod(_jCapturer, j_spr, rotation_degrees);
-  return 0;
-}
-
-}  // namespace videocapturemodule
-}  // namespace webrtc
diff --git a/modules/video_capture/android/video_capture_android.h b/modules/video_capture/android/video_capture_android.h
deleted file mode 100644
index 8c1e7d3..0000000
--- a/modules/video_capture/android/video_capture_android.h
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_ANDROID_VIDEO_CAPTURE_ANDROID_H_
-#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_ANDROID_VIDEO_CAPTURE_ANDROID_H_
-
-#include <jni.h>
-
-#include "webrtc/modules/video_capture/android/device_info_android.h"
-#include "webrtc/modules/video_capture/video_capture_impl.h"
-
-namespace webrtc {
-namespace videocapturemodule {
-
-class VideoCaptureAndroid : public VideoCaptureImpl {
- public:
-  VideoCaptureAndroid(const int32_t id);
-  virtual int32_t Init(const int32_t id, const char* deviceUniqueIdUTF8);
-
-  virtual int32_t StartCapture(const VideoCaptureCapability& capability);
-  virtual int32_t StopCapture();
-  virtual bool CaptureStarted();
-  virtual int32_t CaptureSettings(VideoCaptureCapability& settings);
-  virtual int32_t SetCaptureRotation(VideoRotation rotation);
-
-  int32_t OnIncomingFrame(uint8_t* videoFrame,
-                          size_t videoFrameLength,
-                          int32_t degrees,
-                          int64_t captureTime = 0);
-
- protected:
-  virtual ~VideoCaptureAndroid();
-
-  DeviceInfoAndroid _deviceInfo;
-  jobject _jCapturer; // Global ref to Java VideoCaptureAndroid object.
-  VideoCaptureCapability _captureCapability;
-  VideoRotation _rotation;
-  bool _captureStarted;
-};
-
-}  // namespace videocapturemodule
-}  // namespace webrtc
-#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_ANDROID_VIDEO_CAPTURE_ANDROID_H_
diff --git a/modules/video_capture/ensure_initialized.cc b/modules/video_capture/ensure_initialized.cc
deleted file mode 100644
index bc606bb..0000000
--- a/modules/video_capture/ensure_initialized.cc
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-// Platform-specific initialization bits, if any, go here.
-
-#ifndef ANDROID
-
-namespace webrtc {
-namespace videocapturemodule {
-void EnsureInitialized() {}
-}  // namespace videocapturemodule
-}  // namespace webrtc
-
-#else
-
-#include <pthread.h>
-
-// Note: this dependency is dangerous since it reaches into Chromium's base.
-// There's a risk of e.g. macro clashes. This file may only be used in tests.
-#include "base/android/jni_android.h"
-#include "webrtc/base/checks.h"
-#include "webrtc/modules/video_capture/video_capture_internal.h"
-
-namespace webrtc {
-namespace videocapturemodule {
-
-static pthread_once_t g_initialize_once = PTHREAD_ONCE_INIT;
-
-void EnsureInitializedOnce() {
-  JNIEnv* jni = ::base::android::AttachCurrentThread();
-  jobject context = ::base::android::GetApplicationContext();
-  JavaVM* jvm = NULL;
-  RTC_CHECK_EQ(0, jni->GetJavaVM(&jvm));
-  RTC_CHECK_EQ(0, webrtc::SetCaptureAndroidVM(jvm, context));
-}
-
-void EnsureInitialized() {
-  RTC_CHECK_EQ(0, pthread_once(&g_initialize_once, &EnsureInitializedOnce));
-}
-
-}  // namespace videocapturemodule
-}  // namespace webrtc
-
-#endif  // !ANDROID
diff --git a/modules/video_capture/ensure_initialized.h b/modules/video_capture/ensure_initialized.h
deleted file mode 100644
index 4298795..0000000
--- a/modules/video_capture/ensure_initialized.h
+++ /dev/null
@@ -1,19 +0,0 @@
-/*
- *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-namespace webrtc {
-namespace videocapturemodule {
-
-// Ensure any necessary initialization of webrtc::videocapturemodule has
-// completed.
-void EnsureInitialized();
-
-}  // namespace videocapturemodule.
-}  // namespace webrtc.
diff --git a/modules/video_capture/test/video_capture_unittest.cc b/modules/video_capture/test/video_capture_unittest.cc
index 87794bb..81380f4 100644
--- a/modules/video_capture/test/video_capture_unittest.cc
+++ b/modules/video_capture/test/video_capture_unittest.cc
@@ -18,7 +18,6 @@
 #include "webrtc/base/scoped_ref_ptr.h"
 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
 #include "webrtc/modules/utility/interface/process_thread.h"
-#include "webrtc/modules/video_capture/ensure_initialized.h"
 #include "webrtc/modules/video_capture/include/video_capture.h"
 #include "webrtc/modules/video_capture/include/video_capture_factory.h"
 #include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
@@ -235,7 +234,6 @@
   VideoCaptureTest() : number_of_devices_(0) {}
 
   void SetUp() {
-    webrtc::videocapturemodule::EnsureInitialized();
     device_info_.reset(VideoCaptureFactory::CreateDeviceInfo(0));
     assert(device_info_.get());
     number_of_devices_ = device_info_->NumberOfDevices();
diff --git a/modules/video_capture/video_capture.gypi b/modules/video_capture/video_capture.gypi
index 9163c1c..f552df7 100644
--- a/modules/video_capture/video_capture.gypi
+++ b/modules/video_capture/video_capture.gypi
@@ -17,7 +17,6 @@
       'type': 'static_library',
       'dependencies': [
         'webrtc_utility',
-        '<(webrtc_root)/common.gyp:webrtc_common',
         '<(webrtc_root)/common_video/common_video.gyp:common_video',
         '<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
       ],
@@ -54,11 +53,13 @@
         {
           'target_name': 'video_capture_module_internal_impl',
           'type': 'static_library',
-          'dependencies': [
-            'video_capture_module',
-            '<(webrtc_root)/common.gyp:webrtc_common',
-          ],
           'conditions': [
+            ['OS!="android"', {
+              'dependencies': [
+                'video_capture_module',
+                '<(webrtc_root)/common.gyp:webrtc_common',
+              ],
+            }],
             ['OS=="linux"', {
               'sources': [
                 'linux/device_info_linux.cc',
@@ -115,26 +116,6 @@
                 ],
               },
             }],  # win
-            ['OS=="android"', {
-              'sources': [
-                'android/device_info_android.cc',
-                'android/device_info_android.h',
-                'android/video_capture_android.cc',
-                'android/video_capture_android.h',
-              ],
-              'conditions': [
-                ['build_json==1', {
-                  'dependencies': [
-                    '<(DEPTH)/third_party/jsoncpp/jsoncpp.gyp:jsoncpp',
-                  ],
-                }],
-                ['build_icu==1', {
-                  'dependencies': [
-                    '<(DEPTH)/third_party/icu/icu.gyp:icuuc',
-                  ],
-                }],
-              ],
-            }],  # android
             ['OS=="ios"', {
               'sources': [
                 'ios/device_info_ios.h',
@@ -164,7 +145,7 @@
         },
       ],
     }], # build_with_chromium==0
-    ['include_tests==1', {
+    ['include_tests==1 and OS!="android"', {
       'targets': [
         {
           'target_name': 'video_capture_tests',
@@ -177,8 +158,6 @@
             '<(DEPTH)/testing/gtest.gyp:gtest',
           ],
           'sources': [
-            'ensure_initialized.cc',
-            'ensure_initialized.h',
             'test/video_capture_unittest.cc',
             'test/video_capture_main_mac.mm',
           ],
@@ -198,18 +177,6 @@
                 '-lX11',
               ],
             }],
-            ['OS=="android"', {
-              'dependencies': [
-                '<(DEPTH)/testing/android/native_test.gyp:native_test_native_code',
-              ],
-              # Need to disable error due to the line in
-              # base/android/jni_android.h triggering it:
-              # const BASE_EXPORT jobject GetApplicationContext()
-              # error: type qualifiers ignored on function return type
-              'cflags': [
-                '-Wno-ignored-qualifiers',
-              ],
-            }],
             ['OS=="mac"', {
               'dependencies': [
                 # Link with a special main for mac so we can use the webcam.
@@ -231,36 +198,6 @@
           ] # conditions
         },
       ], # targets
-      'conditions': [
-        ['OS=="android"', {
-          'targets': [
-            {
-              'target_name': 'video_capture_tests_apk_target',
-              'type': 'none',
-              'dependencies': [
-                '<(apk_tests_path):video_capture_tests_apk',
-              ],
-            },
-          ],
-        }],
-        ['test_isolation_mode != "noop"', {
-          'targets': [
-            {
-              'target_name': 'video_capture_tests_run',
-              'type': 'none',
-              'dependencies': [
-                'video_capture_tests',
-              ],
-              'includes': [
-                '../../build/isolate.gypi',
-              ],
-              'sources': [
-                'video_capture_tests.isolate',
-              ],
-            },
-          ],
-        }],
-      ],
     }],
   ],
 }
diff --git a/modules/video_capture/video_capture_factory.cc b/modules/video_capture/video_capture_factory.cc
index 5b44a6c..f88f916 100644
--- a/modules/video_capture/video_capture_factory.cc
+++ b/modules/video_capture/video_capture_factory.cc
@@ -17,7 +17,11 @@
 
 VideoCaptureModule* VideoCaptureFactory::Create(const int32_t id,
     const char* deviceUniqueIdUTF8) {
+#if defined(ANDROID)
+  return nullptr;
+#else
   return videocapturemodule::VideoCaptureImpl::Create(id, deviceUniqueIdUTF8);
+#endif
 }
 
 VideoCaptureModule* VideoCaptureFactory::Create(const int32_t id,
@@ -27,7 +31,11 @@
 
 VideoCaptureModule::DeviceInfo* VideoCaptureFactory::CreateDeviceInfo(
     const int32_t id) {
+#if defined(ANDROID)
+  return nullptr;
+#else
   return videocapturemodule::VideoCaptureImpl::CreateDeviceInfo(id);
+#endif
 }
 
 }  // namespace webrtc