Misc cleanups to webrtc/android code:
- Replace some deprecated calls/enums with their more modern equivalents.
- Clean up some usage of global data and/or hide it better
- Catch specific exceptions instead of Exception, and log the exception instead
  of just its message.
- Random log message cleanups
- Added a build_with_libjingle gyp variable to mimic build_with_chromium for
  when webrtc is built as part of a libjingle project but not part of chromium.

BUG=webrtc:1169
TEST=none

Review URL: https://webrtc-codereview.appspot.com/1105010

git-svn-id: http://webrtc.googlecode.com/svn/trunk/webrtc@3554 4adac7df-926f-26a2-2b94-8c16560cd09d
diff --git a/build/common.gypi b/build/common.gypi
index 3bcf782..f340c18 100644
--- a/build/common.gypi
+++ b/build/common.gypi
@@ -17,11 +17,13 @@
           # This will be set to zero in the supplement.gypi triggered by a
           # gclient hook in the standalone build.
           'build_with_chromium%': 1,
+          'build_with_libjingle%': 0,
         },
         'build_with_chromium%': '<(build_with_chromium)',
+        'build_with_libjingle%': '<(build_with_libjingle)',
 
         'conditions': [
-          ['build_with_chromium==1', {
+          ['build_with_chromium==1 or build_with_libjingle==1', {
             'webrtc_root%': '<(DEPTH)/third_party/webrtc',
           }, {
             'webrtc_root%': '<(DEPTH)/webrtc',
@@ -29,12 +31,14 @@
         ],
       },
       'build_with_chromium%': '<(build_with_chromium)',
+      'build_with_libjingle%': '<(build_with_libjingle)',
       'webrtc_root%': '<(webrtc_root)',
 
       'webrtc_vp8_dir%': '<(webrtc_root)/modules/video_coding/codecs/vp8',
       'include_opus%': 1,
     },
     'build_with_chromium%': '<(build_with_chromium)',
+    'build_with_libjingle%': '<(build_with_libjingle)',
     'webrtc_root%': '<(webrtc_root)',
     'webrtc_vp8_dir%': '<(webrtc_vp8_dir)',
     'include_opus%': '<(include_opus)',
@@ -121,6 +125,9 @@
         'build_libvpx%': 0,
         'include_tests%': 0,
       }],
+      ['build_with_libjingle==1', {
+        'include_tests%': 0,
+      }],
       ['target_arch=="arm"', {
         'prefer_fixed_point%': 1,
       }],
diff --git a/modules/audio_device/android/org/webrtc/voiceengine/WebRTCAudioDevice.java b/modules/audio_device/android/org/webrtc/voiceengine/WebRTCAudioDevice.java
index 3bdfc94..c324b9c 100644
--- a/modules/audio_device/android/org/webrtc/voiceengine/WebRTCAudioDevice.java
+++ b/modules/audio_device/android/org/webrtc/voiceengine/WebRTCAudioDevice.java
@@ -61,10 +61,10 @@
     @SuppressWarnings("unused")
     private int InitRecording(int audioSource, int sampleRate) {
         // get the minimum buffer size that can be used
-        int minRecBufSize =
-                        AudioRecord.getMinBufferSize(sampleRate,
-                                        AudioFormat.CHANNEL_CONFIGURATION_MONO,
-                                        AudioFormat.ENCODING_PCM_16BIT);
+        int minRecBufSize = AudioRecord.getMinBufferSize(
+            sampleRate,
+            AudioFormat.CHANNEL_IN_MONO,
+            AudioFormat.ENCODING_PCM_16BIT);
 
         // DoLog("min rec buf size is " + minRecBufSize);
 
@@ -83,7 +83,7 @@
             _audioRecord = new AudioRecord(
                             audioSource,
                             sampleRate,
-                            AudioFormat.CHANNEL_CONFIGURATION_MONO,
+                            AudioFormat.CHANNEL_IN_MONO,
                             AudioFormat.ENCODING_PCM_16BIT,
                             recBufSize);
 
@@ -127,7 +127,7 @@
         // get the minimum buffer size that can be used
         int minPlayBufSize = AudioTrack.getMinBufferSize(
             sampleRate,
-            AudioFormat.CHANNEL_CONFIGURATION_MONO,
+            AudioFormat.CHANNEL_OUT_MONO,
             AudioFormat.ENCODING_PCM_16BIT);
 
         // DoLog("min play buf size is " + minPlayBufSize);
@@ -149,7 +149,7 @@
             _audioTrack = new AudioTrack(
                             AudioManager.STREAM_VOICE_CALL,
                             sampleRate,
-                            AudioFormat.CHANNEL_CONFIGURATION_MONO,
+                            AudioFormat.CHANNEL_OUT_MONO,
                             AudioFormat.ENCODING_PCM_16BIT,
                             playBufSize, AudioTrack.MODE_STREAM);
         } catch (Exception e) {
@@ -388,7 +388,7 @@
             return -1;
         }
 
-        int apiLevel = Integer.parseInt(android.os.Build.VERSION.SDK);
+        int apiLevel = android.os.Build.VERSION.SDK_INT;
 
         if ((3 == apiLevel) || (4 == apiLevel)) {
             // 1.5 and 1.6 devices
@@ -464,7 +464,7 @@
     }
 
     private void SetAudioMode(boolean startCall) {
-        int apiLevel = Integer.parseInt(android.os.Build.VERSION.SDK);
+        int apiLevel = android.os.Build.VERSION.SDK_INT;
 
         if (_audioManager == null && _context != null) {
             _audioManager = (AudioManager)
diff --git a/modules/audio_device/linux/latebindingsymboltable_linux.cc b/modules/audio_device/linux/latebindingsymboltable_linux.cc
index 8f3c7c8..156ae18 100644
--- a/modules/audio_device/linux/latebindingsymboltable_linux.cc
+++ b/modules/audio_device/linux/latebindingsymboltable_linux.cc
@@ -57,7 +57,7 @@
 #endif
   if (handle == kInvalidDllHandle) {
     WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, -1,
-               "Can't load %s : %d", dll_name, GetDllError());
+               "Can't load %s : %s", dll_name, GetDllError());
   }
   return handle;
 }
@@ -66,7 +66,7 @@
 #ifdef WEBRTC_LINUX
   if (dlclose(handle) != 0) {
     WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
-               "%d", GetDllError());
+               "%s", GetDllError());
   }
 #else
 #error Not implemented
diff --git a/modules/video_capture/android/device_info_android.cc b/modules/video_capture/android/device_info_android.cc
index d119531..ca16ebf 100644
--- a/modules/video_capture/android/device_info_android.cc
+++ b/modules/video_capture/android/device_info_android.cc
@@ -22,6 +22,13 @@
 namespace videocapturemodule
 {
 
+static jclass g_capabilityClass = NULL;
+
+// static
+void DeviceInfoAndroid::SetAndroidCaptureClasses(jclass capabilityClass) {
+  g_capabilityClass = capabilityClass;
+}
+
 VideoCaptureModule::DeviceInfo*
 VideoCaptureImpl::CreateDeviceInfo (const WebRtc_Word32 id) {
   videocapturemodule::DeviceInfoAndroid *deviceInfo =
@@ -172,23 +179,20 @@
     return -1;
 
   // Find the capability class
-  jclass javaCapClassLocal = env->FindClass(AndroidJavaCaptureCapabilityClass);
-  if (javaCapClassLocal == NULL) {
+  jclass javaCapClass = g_capabilityClass;
+  if (javaCapClass == NULL) {
     VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
     WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
-                 "%s: Can't find java class VideoCaptureCapabilityAndroid.",
+                 "%s: SetAndroidCaptureClasses must be called first!",
                  __FUNCTION__);
     return -1;
   }
 
   // get the method ID for the Android Java GetCapabilityArray .
-  char signature[256];
-  sprintf(signature,
-          "(Ljava/lang/String;)[L%s;",
-          AndroidJavaCaptureCapabilityClass);
-  jmethodID cid = env->GetMethodID(javaCmDevInfoClass,
-                                   "GetCapabilityArray",
-                                   signature);
+  jmethodID cid = env->GetMethodID(
+      javaCmDevInfoClass,
+      "GetCapabilityArray",
+      "(Ljava/lang/String;)[Lorg/webrtc/videoengine/CaptureCapabilityAndroid;");
   if (cid == NULL) {
     VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
     WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
@@ -216,9 +220,9 @@
     return -1;
   }
 
-  jfieldID widthField = env->GetFieldID(javaCapClassLocal, "width", "I");
-  jfieldID heigtField = env->GetFieldID(javaCapClassLocal, "height", "I");
-  jfieldID maxFpsField = env->GetFieldID(javaCapClassLocal, "maxFPS", "I");
+  jfieldID widthField = env->GetFieldID(javaCapClass, "width", "I");
+  jfieldID heigtField = env->GetFieldID(javaCapClass, "height", "I");
+  jfieldID maxFpsField = env->GetFieldID(javaCapClass, "maxFPS", "I");
   if (widthField == NULL || heigtField == NULL || maxFpsField == NULL) {
     VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
     WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
@@ -240,7 +244,7 @@
     cap->expectedCaptureDelay = _expectedCaptureDelay;
     cap->rawType = kVideoNV21;
     cap->maxFPS = env->GetIntField(capabilityElement, maxFpsField);
-    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
                  "%s: Cap width %d, height %d, fps %d", __FUNCTION__,
                  cap->width, cap->height, cap->maxFPS);
     _captureCapabilities.Insert(i, cap);
diff --git a/modules/video_capture/android/device_info_android.h b/modules/video_capture/android/device_info_android.h
index 855a291..4db3445 100644
--- a/modules/video_capture/android/device_info_android.h
+++ b/modules/video_capture/android/device_info_android.h
@@ -15,9 +15,6 @@
 #include "../video_capture_impl.h"
 #include "../device_info_impl.h"
 
-#define AndroidJavaCaptureDeviceInfoClass "org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid"
-#define AndroidJavaCaptureCapabilityClass "org/webrtc/videoengine/CaptureCapabilityAndroid"
-
 namespace webrtc
 {
 namespace videocapturemodule
@@ -32,6 +29,7 @@
 class DeviceInfoAndroid : public DeviceInfoImpl {
 
  public:
+  static void SetAndroidCaptureClasses(jclass capabilityClass);
   DeviceInfoAndroid(const WebRtc_Word32 id);
   WebRtc_Word32 Init();
   virtual ~DeviceInfoAndroid();
diff --git a/modules/video_capture/android/java/org/webrtc/videoengine/VideoCaptureAndroid.java b/modules/video_capture/android/java/org/webrtc/videoengine/VideoCaptureAndroid.java
index ef7fc7b..9999d23 100644
--- a/modules/video_capture/android/java/org/webrtc/videoengine/VideoCaptureAndroid.java
+++ b/modules/video_capture/android/java/org/webrtc/videoengine/VideoCaptureAndroid.java
@@ -42,7 +42,6 @@
     private boolean isCaptureStarted = false;
     private boolean isCaptureRunning = false;
     private boolean isSurfaceReady = false;
-    private SurfaceHolder surfaceHolder = null;
 
     private final int numCaptureBuffers = 3;
     private int expectedFrameSize = 0;
@@ -61,6 +60,9 @@
     public static
     void DeleteVideoCaptureAndroid(VideoCaptureAndroid captureAndroid) {
         Log.d(TAG, "DeleteVideoCaptureAndroid");
+        if (captureAndroid.camera == null) {
+            return;
+        }
 
         captureAndroid.StopCapture();
         captureAndroid.camera.release();
@@ -82,53 +84,44 @@
             return -1;
         }
 
-        Log.d(TAG, "tryStartCapture " + width +
-                " height " + height +" frame rate " + frameRate +
-                "isCaptureRunning " + isCaptureRunning +
-                "isSurfaceReady " + isSurfaceReady +
-                "isCaptureStarted " + isCaptureStarted);
+        Log.d(TAG, "tryStartCapture: " + width +
+            "x" + height +", frameRate: " + frameRate +
+            ", isCaptureRunning: " + isCaptureRunning +
+            ", isSurfaceReady: " + isSurfaceReady +
+            ", isCaptureStarted: " + isCaptureStarted);
 
-        if (isCaptureRunning || !isSurfaceReady || !isCaptureStarted) {
+        if (isCaptureRunning || !isCaptureStarted) {
             return 0;
         }
 
-        try {
-            camera.setPreviewDisplay(surfaceHolder);
+        CaptureCapabilityAndroid currentCapability =
+                new CaptureCapabilityAndroid();
+        currentCapability.width = width;
+        currentCapability.height = height;
+        currentCapability.maxFPS = frameRate;
+        PixelFormat.getPixelFormatInfo(PIXEL_FORMAT, pixelFormat);
 
-            CaptureCapabilityAndroid currentCapability =
-                    new CaptureCapabilityAndroid();
-            currentCapability.width = width;
-            currentCapability.height = height;
-            currentCapability.maxFPS = frameRate;
-            PixelFormat.getPixelFormatInfo(PIXEL_FORMAT, pixelFormat);
+        Camera.Parameters parameters = camera.getParameters();
+        parameters.setPreviewSize(currentCapability.width,
+                currentCapability.height);
+        parameters.setPreviewFormat(PIXEL_FORMAT);
+        parameters.setPreviewFrameRate(currentCapability.maxFPS);
+        camera.setParameters(parameters);
 
-            Camera.Parameters parameters = camera.getParameters();
-            parameters.setPreviewSize(currentCapability.width,
-                    currentCapability.height);
-            parameters.setPreviewFormat(PIXEL_FORMAT);
-            parameters.setPreviewFrameRate(currentCapability.maxFPS);
-            camera.setParameters(parameters);
-
-            int bufSize = width * height * pixelFormat.bitsPerPixel / 8;
-            byte[] buffer = null;
-            for (int i = 0; i < numCaptureBuffers; i++) {
-                buffer = new byte[bufSize];
-                camera.addCallbackBuffer(buffer);
-            }
-            camera.setPreviewCallbackWithBuffer(this);
-            ownsBuffers = true;
-
-            camera.startPreview();
-            previewBufferLock.lock();
-            expectedFrameSize = bufSize;
-            isCaptureRunning = true;
-            previewBufferLock.unlock();
-
+        int bufSize = width * height * pixelFormat.bitsPerPixel / 8;
+        byte[] buffer = null;
+        for (int i = 0; i < numCaptureBuffers; i++) {
+            buffer = new byte[bufSize];
+            camera.addCallbackBuffer(buffer);
         }
-        catch (Exception ex) {
-            Log.e(TAG, "Failed to start camera");
-            return -1;
-        }
+        camera.setPreviewCallbackWithBuffer(this);
+        ownsBuffers = true;
+
+        camera.startPreview();
+        previewBufferLock.lock();
+        expectedFrameSize = bufSize;
+        isCaptureRunning = true;
+        previewBufferLock.unlock();
 
         isCaptureRunning = true;
         return 0;
@@ -140,6 +133,9 @@
         // Get the local preview SurfaceHolder from the static render class
         localPreview = ViERenderer.GetLocalRenderer();
         if (localPreview != null) {
+            if (localPreview.getSurface() != null) {
+                surfaceCreated(localPreview);
+            }
             localPreview.addCallback(this);
         }
 
@@ -163,9 +159,8 @@
             previewBufferLock.unlock();
             camera.stopPreview();
             camera.setPreviewCallbackWithBuffer(null);
-        }
-        catch (Exception ex) {
-            Log.e(TAG, "Failed to stop camera");
+        } catch (RuntimeException e) {
+            Log.e(TAG, "Failed to stop camera", e);
             return -1;
         }
 
@@ -237,22 +232,27 @@
     public void surfaceChanged(SurfaceHolder holder,
                                int format, int width, int height) {
         Log.d(TAG, "VideoCaptureAndroid::surfaceChanged");
-
-        captureLock.lock();
-        isSurfaceReady = true;
-        surfaceHolder = holder;
-
-        tryStartCapture(mCaptureWidth, mCaptureHeight, mCaptureFPS);
-        captureLock.unlock();
-        return;
     }
 
     public void surfaceCreated(SurfaceHolder holder) {
         Log.d(TAG, "VideoCaptureAndroid::surfaceCreated");
+        captureLock.lock();
+        try {
+            camera.setPreviewDisplay(holder);
+        } catch (IOException e) {
+            Log.e(TAG, "Failed to set preview surface!", e);
+        }
+        captureLock.unlock();
     }
 
     public void surfaceDestroyed(SurfaceHolder holder) {
         Log.d(TAG, "VideoCaptureAndroid::surfaceDestroyed");
-        isSurfaceReady = false;
+        captureLock.lock();
+        try {
+            camera.setPreviewDisplay(null);
+        } catch (IOException e) {
+            Log.e(TAG, "Failed to clear preview surface!", e);
+        }
+        captureLock.unlock();
     }
 }
diff --git a/modules/video_capture/android/java/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java b/modules/video_capture/android/java/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java
index b0e75cc..af227cd 100644
--- a/modules/video_capture/android/java/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java
+++ b/modules/video_capture/android/java/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java
@@ -88,43 +88,36 @@
     private int Init() {
         // Populate the deviceList with available cameras and their capabilities.
         Camera camera = null;
-        try{
-            if(android.os.Build.VERSION.SDK_INT > 8) {
-                // From Android 2.3 and onwards
-                for(int i = 0; i < Camera.getNumberOfCameras(); ++i) {
-                    AndroidVideoCaptureDevice newDevice = new AndroidVideoCaptureDevice();
+        if(android.os.Build.VERSION.SDK_INT > 8) {
+            // From Android 2.3 and onwards
+            for(int i = 0; i < Camera.getNumberOfCameras(); ++i) {
+                AndroidVideoCaptureDevice newDevice = new AndroidVideoCaptureDevice();
 
-                    Camera.CameraInfo info = new Camera.CameraInfo();
-                    Camera.getCameraInfo(i, info);
-                    newDevice.index = i;
-                    newDevice.orientation=info.orientation;
-                    if(info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
-                        newDevice.deviceUniqueName =
-                                "Camera " + i +", Facing back, Orientation "+ info.orientation;
-                        Log.d(TAG, "Camera " + i +", Facing back, Orientation "+ info.orientation);
+                Camera.CameraInfo info = new Camera.CameraInfo();
+                Camera.getCameraInfo(i, info);
+                newDevice.index = i;
+                newDevice.orientation=info.orientation;
+                if(info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
+                    newDevice.deviceUniqueName =
+                            "Camera " + i +", Facing back, Orientation "+ info.orientation;
+                    Log.d(TAG, "Camera " + i +", Facing back, Orientation "+ info.orientation);
 
-                    }
-                    else {
-                        newDevice.deviceUniqueName =
-                                "Camera " + i +", Facing front, Orientation "+ info.orientation;
-                        newDevice.frontCameraType = FrontFacingCameraType.Android23;
-                        Log.d(TAG, "Camera " + i +", Facing front, Orientation "+ info.orientation);
-                    }
-
-                    camera = Camera.open(i);
-                    Camera.Parameters parameters = camera.getParameters();
-                    AddDeviceInfo(newDevice, parameters);
-                    camera.release();
-                    camera = null;
-                    deviceList.add(newDevice);
                 }
+                else {
+                    newDevice.deviceUniqueName =
+                            "Camera " + i +", Facing front, Orientation "+ info.orientation;
+                    newDevice.frontCameraType = FrontFacingCameraType.Android23;
+                    Log.d(TAG, "Camera " + i +", Facing front, Orientation "+ info.orientation);
+                }
+
+                camera = Camera.open(i);
+                Camera.Parameters parameters = camera.getParameters();
+                AddDeviceInfo(newDevice, parameters);
+                camera.release();
+                camera = null;
+                deviceList.add(newDevice);
             }
         }
-        catch (Exception ex) {
-            Log.e(TAG, "Failed to init VideoCaptureDeviceInfo ex" +
-                    ex.getLocalizedMessage());
-            return -1;
-        }
         VerifyCapabilities();
         return 0;
     }
@@ -149,9 +142,8 @@
             newDevice.captureCapabilies[i].height = s.height;
             newDevice.captureCapabilies[i].width = s.width;
             newDevice.captureCapabilies[i].maxFPS = maxFPS;
-            Log.v(TAG,
-                    "VideoCaptureDeviceInfo " + "maxFPS:" + maxFPS +
-                    " width:" + s.width + " height:" + s.height);
+            Log.v(TAG, "VideoCaptureDeviceInfo " + ", maxFPS: " + maxFPS +
+                ", width: " + s.width + ", height: " + s.height);
         }
     }
 
@@ -187,9 +179,9 @@
         // even though it reports that it can
         if(android.os.Build.MANUFACTURER.equals("motorola") &&
                 android.os.Build.DEVICE.equals("umts_sholes")) {
-            for(AndroidVideoCaptureDevice device:deviceList) {
-                for(CaptureCapabilityAndroid capability:device.captureCapabilies) {
-                    capability.maxFPS=15;
+            for (AndroidVideoCaptureDevice device : deviceList) {
+                for (CaptureCapabilityAndroid capability : device.captureCapabilies) {
+                    capability.maxFPS = 15;
                 }
             }
         }
@@ -286,10 +278,14 @@
             Log.v(TAG, "AllocateCamera - creating VideoCaptureAndroid");
 
             return new VideoCaptureAndroid(id, context, camera, deviceToUse);
-
-        }catch (Exception ex) {
-            Log.e(TAG, "AllocateCamera Failed to open camera- ex " +
-                    ex.getLocalizedMessage());
+        } catch (NoSuchMethodException e) {
+            Log.e(TAG, "AllocateCamera Failed to open camera", e);
+        } catch (ClassNotFoundException e) {
+            Log.e(TAG, "AllocateCamera Failed to open camera", e);
+        } catch (InvocationTargetException e) {
+            Log.e(TAG, "AllocateCamera Failed to open camera", e);
+        } catch (IllegalAccessException e) {
+            Log.e(TAG, "AllocateCamera Failed to open camera", e);
         }
         return null;
     }
@@ -307,20 +303,13 @@
         String cameraId = parameters.get("camera-id");
         if(cameraId != null && cameraId.equals("1")) {
             // This might be a Samsung Galaxy S with a front facing camera.
-            try {
-                parameters.set("camera-id", 2);
-                camera.setParameters(parameters);
-                parameters = camera.getParameters();
-                newDevice.frontCameraType = FrontFacingCameraType.GalaxyS;
-                newDevice.orientation = 0;
-                camera.release();
-                return parameters;
-            }
-            catch (Exception ex) {
-                // Nope - it did not work.
-                Log.e(TAG, "Init Failed to open front camera camera - ex " +
-                        ex.getLocalizedMessage());
-            }
+            parameters.set("camera-id", 2);
+            camera.setParameters(parameters);
+            parameters = camera.getParameters();
+            newDevice.frontCameraType = FrontFacingCameraType.GalaxyS;
+            newDevice.orientation = 0;
+            camera.release();
+            return parameters;
         }
         camera.release();
 
diff --git a/modules/video_capture/android/video_capture_android.cc b/modules/video_capture/android/video_capture_android.cc
index f5abbc9..7321495 100644
--- a/modules/video_capture/android/video_capture_android.cc
+++ b/modules/video_capture/android/video_capture_android.cc
@@ -112,9 +112,20 @@
       return -1;
     }
 
+    jclass capabilityClassLocal = env->FindClass(
+        "org/webrtc/videoengine/CaptureCapabilityAndroid");
+    if (!capabilityClassLocal) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                   "%s: could not find java class", __FUNCTION__);
+      return -1;
+    }
+    jclass capabilityClassGlobal = reinterpret_cast<jclass>(env->NewGlobalRef(
+        capabilityClassLocal));
+    DeviceInfoAndroid::SetAndroidCaptureClasses(capabilityClassGlobal);
+
     // get java capture class type (note path to class packet)
     jclass javaCmDevInfoClassLocal = env->FindClass(
-        AndroidJavaCaptureDeviceInfoClass);
+        "org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid");
     if (!javaCmDevInfoClassLocal) {
       WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
                    "%s: could not find java class", __FUNCTION__);
diff --git a/modules/video_render/android/java/org/webrtc/videoengine/ViERenderer.java b/modules/video_render/android/java/org/webrtc/videoengine/ViERenderer.java
index d45fb81..e2c9921 100644
--- a/modules/video_render/android/java/org/webrtc/videoengine/ViERenderer.java
+++ b/modules/video_render/android/java/org/webrtc/videoengine/ViERenderer.java
@@ -20,7 +20,7 @@
     private static SurfaceHolder g_localRenderer;
 
     public static SurfaceView CreateRenderer(Context context) {
-        return  CreateRenderer(context,false);
+        return CreateRenderer(context, false);
     }
 
     public static SurfaceView CreateRenderer(Context context,
@@ -48,7 +48,7 @@
         SurfaceView localRender = new SurfaceView(context);
         g_localRenderer = localRender.getHolder();
         g_localRenderer.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
-        return  localRender;
+        return localRender;
     }
 
     public static SurfaceHolder GetLocalRenderer() {
diff --git a/modules/video_render/android/video_render_android_impl.cc b/modules/video_render/android/video_render_android_impl.cc
index 0107afe..d6f7c5a 100644
--- a/modules/video_render/android/video_render_android_impl.cc
+++ b/modules/video_render/android/video_render_android_impl.cc
@@ -15,7 +15,7 @@
 #include "thread_wrapper.h"
 #include "tick_util.h"
 
-#ifdef ANDROID_LOG
+#ifdef ANDROID
 #include <stdio.h>
 #include <android/log.h>
 
diff --git a/voice_engine/voe_video_sync_impl.cc b/voice_engine/voe_video_sync_impl.cc
index 7df4603..59f5218 100644
--- a/voice_engine/voe_video_sync_impl.cc
+++ b/voice_engine/voe_video_sync_impl.cc
@@ -51,7 +51,6 @@
 {
     WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
                  "GetPlayoutTimestamp(channel=%d, timestamp=?)", channel);
-    ANDROID_NOT_SUPPORTED(_shared->statistics());
     IPHONE_NOT_SUPPORTED(_shared->statistics());
 
     if (!_shared->statistics().Initialized())
@@ -76,7 +75,6 @@
     WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
                  "SetInitTimestamp(channel=%d, timestamp=%lu)",
                  channel, timestamp);
-    ANDROID_NOT_SUPPORTED(_shared->statistics());
     IPHONE_NOT_SUPPORTED(_shared->statistics());
 
     if (!_shared->statistics().Initialized())
@@ -101,7 +99,6 @@
     WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
                  "SetInitSequenceNumber(channel=%d, sequenceNumber=%hd)",
                  channel, sequenceNumber);
-    ANDROID_NOT_SUPPORTED(_shared->statistics());
     IPHONE_NOT_SUPPORTED(_shared->statistics());
 
     if (!_shared->statistics().Initialized())
@@ -125,7 +122,6 @@
     WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
                  "SetMinimumPlayoutDelay(channel=%d, delayMs=%d)",
                  channel, delayMs);
-    ANDROID_NOT_SUPPORTED(_shared->statistics());
     IPHONE_NOT_SUPPORTED(_shared->statistics());
 
     if (!_shared->statistics().Initialized())
@@ -172,7 +168,6 @@
 {
     WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
                  "GetDelayEstimate(channel=%d, delayMs=?)", channel);
-    ANDROID_NOT_SUPPORTED(_shared->statistics());
     IPHONE_NOT_SUPPORTED(_shared->statistics());
 
     if (!_shared->statistics().Initialized())
@@ -195,7 +190,6 @@
 {
     WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
                "GetPlayoutBufferSize(bufferMs=?)");
-    ANDROID_NOT_SUPPORTED(_shared->statistics());
     IPHONE_NOT_SUPPORTED(_shared->statistics());
 
     if (!_shared->statistics().Initialized())
@@ -223,7 +217,7 @@
 {
     WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
                  "GetRtpRtcp(channel=%i)", channel);
-    
+
     if (!_shared->statistics().Initialized())
     {
         _shared->SetLastError(VE_NOT_INITED, kTraceError);