Android: Add initialize() function to VideoCapturer interface

This CL moves some arguments, e.g. the camera thread, from the startCapture() function to a new initialize() function. These arguments are constant during the lifetime of the VideoCapturer, and are not changed for different startCapture() calls. Setting them once allows for simplifications in the code.

This CL also fixes a bug for camera2 where pendingCameraSwitchSemaphore might not be released when switchEventsHandler is null.

In camera1, the handler lock and 'cameraThreadHandler == null' check is replaced with an atomic boolean to check if the camera is stopped.

BUG=webrtc:5519
R=sakal@webrtc.org

Review URL: https://codereview.webrtc.org/2122693002 .

Cr-Original-Commit-Position: refs/heads/master@{#13404}
Cr-Mirrored-From: https://chromium.googlesource.com/external/webrtc
Cr-Mirrored-Commit: 27dcacdbc137e4b49c3e75f17b5a36cdd1ad73ba
diff --git a/api/android/java/src/org/webrtc/Camera2Capturer.java b/api/android/java/src/org/webrtc/Camera2Capturer.java
index 8e44d69..8110945 100644
--- a/api/android/java/src/org/webrtc/Camera2Capturer.java
+++ b/api/android/java/src/org/webrtc/Camera2Capturer.java
@@ -35,7 +35,7 @@
 import java.util.Arrays;
 import java.util.List;
 import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.Semaphore;
+import java.util.concurrent.atomic.AtomicBoolean;
 
 @TargetApi(21)
 public class Camera2Capturer implements
@@ -58,17 +58,20 @@
   private final CameraManager cameraManager;
   private final CameraEventsHandler eventsHandler;
 
+  // Set once in initialization(), before any other calls, so therefore thread safe.
+  // ---------------------------------------------------------------------------------------------
+  private SurfaceTextureHelper surfaceTextureHelper;
+  private Context applicationContext;
+  private CapturerObserver capturerObserver;
+  // Use postOnCameraThread() instead of posting directly to the handler - this way all callbacks
+  // with a specifed token can be removed at once.
+  private Handler cameraThreadHandler;
 
   // Shared state - guarded by cameraStateLock. Will only be edited from camera thread (when it is
   // running).
   // ---------------------------------------------------------------------------------------------
   private final Object cameraStateLock = new Object();
-  private CameraState cameraState = CameraState.IDLE;
-  // |cameraThreadHandler| must be synchronized on |cameraStateLock| when not on the camera thread,
-  // or when modifying the reference. Use postOnCameraThread() instead of posting directly to
-  // the handler - this way all callbacks with a specifed token can be removed at once.
-  // |cameraThreadHandler| must be null if and only if CameraState is IDLE.
-  private Handler cameraThreadHandler;
+  private volatile CameraState cameraState = CameraState.IDLE;
   // Remember the requested format in case we want to switch cameras.
   private int requestedWidth;
   private int requestedHeight;
@@ -79,22 +82,18 @@
   private boolean isFrontCamera;
   private int cameraOrientation;
 
-  // Semaphore for allowing only one switch at a time.
-  private final Semaphore pendingCameraSwitchSemaphore = new Semaphore(1);
-  // Guarded by pendingCameraSwitchSemaphore
+  // Atomic boolean for allowing only one switch at a time.
+  private final AtomicBoolean isPendingCameraSwitch = new AtomicBoolean();
+  // Guarded by isPendingCameraSwitch.
   private CameraSwitchHandler switchEventsHandler;
 
   // Internal state - must only be modified from camera thread
   // ---------------------------------------------------------
   private CaptureFormat captureFormat;
-  private Context applicationContext;
-  private CapturerObserver capturerObserver;
   private CameraStatistics cameraStatistics;
-  private SurfaceTextureHelper surfaceTextureHelper;
   private CameraCaptureSession captureSession;
   private Surface surface;
   private CameraDevice cameraDevice;
-  private CameraStateCallback cameraStateCallback;
 
   // Factor to convert between Android framerates and CaptureFormat.FramerateRange. It will be
   // either 1 or 1000.
@@ -111,28 +110,16 @@
     setCameraName(cameraName);
   }
 
-  /**
-   * Helper method for checking method is executed on camera thread. Also allows calls from other
-   * threads if camera is closed.
-   */
-  private void checkIsOnCameraThread() {
-    if (cameraState == CameraState.IDLE) {
-      return;
-    }
-
-    checkIsStrictlyOnCameraThread();
+  private boolean isOnCameraThread() {
+    return Thread.currentThread() == cameraThreadHandler.getLooper().getThread();
   }
 
   /**
-   * Like checkIsOnCameraThread but doesn't allow the camera to be stopped.
+   * Helper method for checking method is executed on camera thread.
    */
-  private void checkIsStrictlyOnCameraThread() {
-    if (cameraThreadHandler == null) {
-      throw new IllegalStateException("Camera is closed.");
-    }
-
-    if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
-      throw new IllegalStateException("Wrong thread");
+  private void checkIsOnCameraThread() {
+    if (!isOnCameraThread()) {
+      throw new IllegalStateException("Not on camera thread");
     }
   }
 
@@ -247,14 +234,14 @@
    * thread and camera must not be stopped.
    */
   private void reportError(String errorDescription) {
-    checkIsStrictlyOnCameraThread();
+    checkIsOnCameraThread();
     Logging.e(TAG, "Error in camera at state " + cameraState + ": " + errorDescription);
 
     if (switchEventsHandler != null) {
       switchEventsHandler.onCameraSwitchError(errorDescription);
       switchEventsHandler = null;
-      pendingCameraSwitchSemaphore.release();
     }
+    isPendingCameraSwitch.set(false);
 
     switch (cameraState) {
       case STARTING:
@@ -276,22 +263,19 @@
   }
 
   private void closeAndRelease() {
-    checkIsStrictlyOnCameraThread();
+    checkIsOnCameraThread();
 
     Logging.d(TAG, "Close and release.");
     setCameraState(CameraState.STOPPING);
 
     // Remove all pending Runnables posted from |this|.
     cameraThreadHandler.removeCallbacksAndMessages(this /* token */);
-    applicationContext = null;
-    capturerObserver = null;
     if (cameraStatistics != null) {
       cameraStatistics.release();
       cameraStatistics = null;
     }
     if (surfaceTextureHelper != null) {
       surfaceTextureHelper.stopListening();
-      surfaceTextureHelper = null;
     }
     if (captureSession != null) {
       captureSession.close();
@@ -320,7 +304,6 @@
       Logging.w(TAG, "closeAndRelease called while cameraDevice is null");
       setCameraState(CameraState.IDLE);
     }
-    this.cameraStateCallback = null;
   }
 
   /**
@@ -328,16 +311,9 @@
    */
   private void setCameraState(CameraState newState) {
     // State must only be modified on the camera thread. It can be edited from other threads
-    // if cameraState is IDLE since there is no camera thread.
-    checkIsOnCameraThread();
-
-    if (newState != CameraState.IDLE) {
-      if (cameraThreadHandler == null) {
-        throw new IllegalStateException(
-            "cameraThreadHandler must be null if and only if CameraState is IDLE.");
-      }
-    } else {
-      cameraThreadHandler = null;
+    // if cameraState is IDLE since the camera thread is idle and not modifying the state.
+    if (cameraState != CameraState.IDLE) {
+      checkIsOnCameraThread();
     }
 
     switch (newState) {
@@ -376,37 +352,49 @@
    */
   private void openCamera() {
     try {
-      checkIsStrictlyOnCameraThread();
+      checkIsOnCameraThread();
 
       if (cameraState != CameraState.STARTING) {
         throw new IllegalStateException("Camera should be in state STARTING in openCamera.");
       }
 
-      if (cameraThreadHandler == null) {
-        throw new RuntimeException("Someone set cameraThreadHandler to null while the camera "
-            + "state was STARTING. This should never happen");
-      }
-
       // Camera is in state STARTING so cameraName will not be edited.
-      cameraManager.openCamera(cameraName, cameraStateCallback, cameraThreadHandler);
+      cameraManager.openCamera(cameraName, new CameraStateCallback(), cameraThreadHandler);
     } catch (CameraAccessException e) {
       reportError("Failed to open camera: " + e);
     }
   }
 
-  private void startCaptureOnCameraThread(
-      final int requestedWidth, final int requestedHeight, final int requestedFramerate,
-      final SurfaceTextureHelper surfaceTextureHelper, final Context applicationContext,
-      final CapturerObserver capturerObserver) {
-    checkIsStrictlyOnCameraThread();
+  private boolean isInitialized() {
+    return applicationContext != null && capturerObserver != null;
+  }
 
-    firstFrameReported = false;
-    consecutiveCameraOpenFailures = 0;
-
+  @Override
+  public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext,
+      CapturerObserver capturerObserver) {
+    Logging.d(TAG, "initialize");
+    if (applicationContext == null) {
+      throw new IllegalArgumentException("applicationContext not set.");
+    }
+    if (capturerObserver == null) {
+      throw new IllegalArgumentException("capturerObserver not set.");
+    }
+    if (isInitialized()) {
+      throw new IllegalStateException("Already initialized");
+    }
     this.applicationContext = applicationContext;
     this.capturerObserver = capturerObserver;
     this.surfaceTextureHelper = surfaceTextureHelper;
-    this.cameraStateCallback = new CameraStateCallback();
+    this.cameraThreadHandler =
+        surfaceTextureHelper == null ? null : surfaceTextureHelper.getHandler();
+  }
+
+  private void startCaptureOnCameraThread(
+      final int requestedWidth, final int requestedHeight, final int requestedFramerate) {
+    checkIsOnCameraThread();
+
+    firstFrameReported = false;
+    consecutiveCameraOpenFailures = 0;
 
     synchronized (cameraStateLock) {
       // Remember the requested format in case we want to switch cameras.
@@ -466,36 +454,32 @@
    */
   @Override
   public void startCapture(
-      final int requestedWidth, final int requestedHeight, final int requestedFramerate,
-      final SurfaceTextureHelper surfaceTextureHelper, final Context applicationContext,
-      final CapturerObserver capturerObserver) {
+      final int requestedWidth, final int requestedHeight, final int requestedFramerate) {
     Logging.d(TAG, "startCapture requested: " + requestedWidth + "x" + requestedHeight
         + "@" + requestedFramerate);
+    if (!isInitialized()) {
+      throw new IllegalStateException("startCapture called in uninitialized state");
+    }
     if (surfaceTextureHelper == null) {
-      throw new IllegalArgumentException("surfaceTextureHelper not set.");
+      capturerObserver.onCapturerStarted(false /* success */);
+      if (eventsHandler != null) {
+        eventsHandler.onCameraError("No SurfaceTexture created.");
+      }
+      return;
     }
-    if (applicationContext == null) {
-      throw new IllegalArgumentException("applicationContext not set.");
-    }
-    if (capturerObserver == null) {
-      throw new IllegalArgumentException("capturerObserver not set.");
-    }
-
     synchronized (cameraStateLock) {
       waitForCameraToStopIfStopping();
       if (cameraState != CameraState.IDLE) {
         Logging.e(TAG, "Unexpected camera state for startCapture: " + cameraState);
         return;
       }
-      this.cameraThreadHandler = surfaceTextureHelper.getHandler();
       setCameraState(CameraState.STARTING);
     }
 
     postOnCameraThread(new Runnable() {
       @Override
       public void run() {
-        startCaptureOnCameraThread(requestedWidth, requestedHeight, requestedFramerate,
-            surfaceTextureHelper, applicationContext, capturerObserver);
+        startCaptureOnCameraThread(requestedWidth, requestedHeight, requestedFramerate);
       }
     });
   }
@@ -521,14 +505,14 @@
 
     @Override
     public void onDisconnected(CameraDevice camera) {
-      checkIsStrictlyOnCameraThread();
+      checkIsOnCameraThread();
       cameraDevice = camera;
       reportError("Camera disconnected.");
     }
 
     @Override
     public void onError(CameraDevice camera, int errorCode) {
-      checkIsStrictlyOnCameraThread();
+      checkIsOnCameraThread();
       cameraDevice = camera;
 
       if (cameraState == CameraState.STARTING && (
@@ -555,7 +539,7 @@
 
     @Override
     public void onOpened(CameraDevice camera) {
-      checkIsStrictlyOnCameraThread();
+      checkIsOnCameraThread();
 
       Logging.d(TAG, "Camera opened.");
       if (cameraState != CameraState.STARTING) {
@@ -576,7 +560,7 @@
 
     @Override
     public void onClosed(CameraDevice camera) {
-      checkIsStrictlyOnCameraThread();
+      checkIsOnCameraThread();
 
       Logging.d(TAG, "Camera device closed.");
 
@@ -597,14 +581,14 @@
   final class CaptureSessionCallback extends CameraCaptureSession.StateCallback {
     @Override
     public void onConfigureFailed(CameraCaptureSession session) {
-      checkIsStrictlyOnCameraThread();
+      checkIsOnCameraThread();
       captureSession = session;
       reportError("Failed to configure capture session.");
     }
 
     @Override
     public void onConfigured(CameraCaptureSession session) {
-      checkIsStrictlyOnCameraThread();
+      checkIsOnCameraThread();
       Logging.d(TAG, "Camera capture session configured.");
       captureSession = session;
       try {
@@ -642,8 +626,8 @@
       if (switchEventsHandler != null) {
         switchEventsHandler.onCameraSwitchDone(isFrontCamera);
         switchEventsHandler = null;
-        pendingCameraSwitchSemaphore.release();
       }
+      isPendingCameraSwitch.set(false);
     }
   }
 
@@ -692,8 +676,9 @@
       return;
     }
     // Do not handle multiple camera switch request to avoid blocking camera thread by handling too
-    // many switch request from a queue. We have to be careful to always release this.
-    if (!pendingCameraSwitchSemaphore.tryAcquire()) {
+    // many switch request from a queue. We have to be careful to always release
+    // |isPendingCameraSwitch| by setting it to false when done.
+    if (isPendingCameraSwitch.getAndSet(true)) {
       Logging.w(TAG, "Ignoring camera switch request.");
       if (switchEventsHandler != null) {
         switchEventsHandler.onCameraSwitchError("Pending camera switch already in progress.");
@@ -702,9 +687,6 @@
     }
 
     final String newCameraId;
-    final SurfaceTextureHelper surfaceTextureHelper;
-    final Context applicationContext;
-    final CapturerObserver capturerObserver;
     final int requestedWidth;
     final int requestedHeight;
     final int requestedFramerate;
@@ -717,7 +699,7 @@
         if (switchEventsHandler != null) {
           switchEventsHandler.onCameraSwitchError("Camera is stopped.");
         }
-        pendingCameraSwitchSemaphore.release();
+        isPendingCameraSwitch.set(false);
         return;
       }
 
@@ -731,11 +713,6 @@
       final int newCameraIndex = (currentCameraIndex + 1) % cameraIds.length;
       newCameraId = cameraIds[newCameraIndex];
 
-      // Remember parameters. These are not null since camera is in RUNNING state. They aren't
-      // edited either while camera is in RUNNING state.
-      surfaceTextureHelper = this.surfaceTextureHelper;
-      applicationContext = this.applicationContext;
-      capturerObserver = this.capturerObserver;
       requestedWidth = this.requestedWidth;
       requestedHeight = this.requestedHeight;
       requestedFramerate = this.requestedFramerate;
@@ -745,8 +722,7 @@
     // Make the switch.
     stopCapture();
     setCameraName(newCameraId);
-    startCapture(requestedWidth, requestedHeight, requestedFramerate, surfaceTextureHelper,
-        applicationContext, capturerObserver);
+    startCapture(requestedWidth, requestedHeight, requestedFramerate);
 
     // Note: switchEventsHandler will be called from onConfigured / reportError.
   }
@@ -761,10 +737,6 @@
     postOnCameraThread(new Runnable() {
       @Override
       public void run() {
-        if (capturerObserver == null) {
-          Logging.e(TAG, "Calling onOutputFormatRequest() on stopped camera.");
-          return;
-        }
         Logging.d(TAG,
             "onOutputFormatRequestOnCameraThread: " + width + "x" + height + "@" + framerate);
         capturerObserver.onOutputFormatRequest(width, height, framerate);
@@ -776,10 +748,6 @@
   // is running.
   @Override
   public void changeCaptureFormat(final int width, final int height, final int framerate) {
-    final SurfaceTextureHelper surfaceTextureHelper;
-    final Context applicationContext;
-    final CapturerObserver capturerObserver;
-
     synchronized (cameraStateLock) {
       waitForCameraToStartIfStarting();
 
@@ -791,17 +759,12 @@
       requestedWidth = width;
       requestedHeight = height;
       requestedFramerate = framerate;
-
-      surfaceTextureHelper = this.surfaceTextureHelper;
-      applicationContext = this.applicationContext;
-      capturerObserver = this.capturerObserver;
     }
 
     // Make the switch.
     stopCapture();
     // TODO(magjed/sakal): Just recreate session.
-    startCapture(width, height, framerate,
-        surfaceTextureHelper, applicationContext, capturerObserver);
+    startCapture(width, height, framerate);
   }
 
   @Override
@@ -896,7 +859,7 @@
   @Override
   public void onTextureFrameAvailable(
       int oesTextureId, float[] transformMatrix, long timestampNs) {
-    checkIsStrictlyOnCameraThread();
+    checkIsOnCameraThread();
 
     if (eventsHandler != null && !firstFrameReported) {
       eventsHandler.onFirstFrameAvailable();
diff --git a/api/android/java/src/org/webrtc/VideoCapturer.java b/api/android/java/src/org/webrtc/VideoCapturer.java
index 452009b..afa3a05 100644
--- a/api/android/java/src/org/webrtc/VideoCapturer.java
+++ b/api/android/java/src/org/webrtc/VideoCapturer.java
@@ -88,14 +88,21 @@
   List<CameraEnumerationAndroid.CaptureFormat> getSupportedFormats();
 
   /**
-   * Start capturing frames in a format that is as close as possible to |width| x |height| and
-   * |framerate|. If the VideoCapturer wants to deliver texture frames, it should do this by
-   * rendering on the SurfaceTexture in |surfaceTextureHelper|, register itself as a listener,
-   * and forward the texture frames to CapturerObserver.onTextureFrameCaptured().
+   * This function is used to initialize the camera thread, the android application context, and the
+   * capture observer. It will be called only once and before any startCapture() request. The
+   * camera thread is guaranteed to be valid until dispose() is called. If the VideoCapturer wants
+   * to deliver texture frames, it should do this by rendering on the SurfaceTexture in
+   * |surfaceTextureHelper|, register itself as a listener, and forward the texture frames to
+   * CapturerObserver.onTextureFrameCaptured().
    */
-  void startCapture(
-      int width, int height, int framerate, SurfaceTextureHelper surfaceTextureHelper,
-      Context applicationContext, CapturerObserver frameObserver);
+  void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext,
+      CapturerObserver capturerObserver);
+
+  /**
+   * Start capturing frames in a format that is as close as possible to |width| x |height| and
+   * |framerate|.
+   */
+  void startCapture(int width, int height, int framerate);
 
   /**
    * Stop capturing. This function should block until capture is actually stopped.
diff --git a/api/android/java/src/org/webrtc/VideoCapturerAndroid.java b/api/android/java/src/org/webrtc/VideoCapturerAndroid.java
index 8f6f911..1e9c7ee 100644
--- a/api/android/java/src/org/webrtc/VideoCapturerAndroid.java
+++ b/api/android/java/src/org/webrtc/VideoCapturerAndroid.java
@@ -23,6 +23,7 @@
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
+import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.TimeUnit;
 
@@ -43,15 +44,14 @@
     CameraVideoCapturer,
     android.hardware.Camera.PreviewCallback,
     SurfaceTextureHelper.OnTextureFrameAvailableListener {
-  private final static String TAG = "VideoCapturerAndroid";
+  private static final String TAG = "VideoCapturerAndroid";
   private static final int CAMERA_STOP_TIMEOUT_MS = 7000;
 
   private android.hardware.Camera camera;  // Only non-null while capturing.
-  private final Object handlerLock = new Object();
-  // |cameraThreadHandler| must be synchronized on |handlerLock| when not on the camera thread,
-  // or when modifying the reference. Use maybePostOnCameraThread() instead of posting directly to
-  // the handler - this way all callbacks with a specifed token can be removed at once.
-  private Handler cameraThreadHandler;
+  private final AtomicBoolean isCameraRunning = new AtomicBoolean();
+  // Use maybePostOnCameraThread() instead of posting directly to the handler - this way all
+  // callbacks with a specifed token can be removed at once.
+  private volatile Handler cameraThreadHandler;
   private Context applicationContext;
   // Synchronization lock for |id|.
   private final Object cameraIdLock = new Object();
@@ -117,10 +117,8 @@
 
   public void printStackTrace() {
     Thread cameraThread = null;
-    synchronized (handlerLock) {
-      if (cameraThreadHandler != null) {
-        cameraThread = cameraThreadHandler.getLooper().getThread();
-      }
+    if (cameraThreadHandler != null) {
+      cameraThread = cameraThreadHandler.getLooper().getThread();
     }
     if (cameraThread != null) {
       StackTraceElement[] cameraStackTraces = cameraThread.getStackTrace();
@@ -232,12 +230,10 @@
   }
 
   private void checkIsOnCameraThread() {
-    synchronized (handlerLock) {
-      if (cameraThreadHandler == null) {
-        Logging.e(TAG, "Camera is stopped - can't check thread.");
-      } else if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
-        throw new IllegalStateException("Wrong thread");
-      }
+    if (cameraThreadHandler == null) {
+      Logging.e(TAG, "Camera is not initialized - can't check thread.");
+    } else if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
+      throw new IllegalStateException("Wrong thread");
     }
   }
 
@@ -246,11 +242,9 @@
   }
 
   private boolean maybePostDelayedOnCameraThread(int delayMs, Runnable runnable) {
-    synchronized (handlerLock) {
-      return cameraThreadHandler != null
-          && cameraThreadHandler.postAtTime(
-              runnable, this /* token */, SystemClock.uptimeMillis() + delayMs);
-    }
+    return cameraThreadHandler != null && isCameraRunning.get()
+        && cameraThreadHandler.postAtTime(
+            runnable, this /* token */, SystemClock.uptimeMillis() + delayMs);
   }
 
   @Override
@@ -258,67 +252,75 @@
     Logging.d(TAG, "dispose");
   }
 
-  // Note that this actually opens the camera, and Camera callbacks run on the
-  // thread that calls open(), so this is done on the CameraThread.
+  private boolean isInitialized() {
+    return applicationContext != null && frameObserver != null;
+  }
+
   @Override
-  public void startCapture(
-      final int width, final int height, final int framerate,
-      final SurfaceTextureHelper surfaceTextureHelper, final Context applicationContext,
-      final CapturerObserver frameObserver) {
-    Logging.d(TAG, "startCapture requested: " + width + "x" + height + "@" + framerate);
-    if (surfaceTextureHelper == null) {
-      frameObserver.onCapturerStarted(false /* success */);
-      if (eventsHandler != null) {
-        eventsHandler.onCameraError("No SurfaceTexture created.");
-      }
-      return;
-    }
+  public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext,
+      CapturerObserver frameObserver) {
+    Logging.d(TAG, "initialize");
     if (applicationContext == null) {
       throw new IllegalArgumentException("applicationContext not set.");
     }
     if (frameObserver == null) {
       throw new IllegalArgumentException("frameObserver not set.");
     }
-    synchronized (handlerLock) {
-      if (this.cameraThreadHandler != null) {
-        throw new RuntimeException("Camera has already been started.");
+    if (isInitialized()) {
+      throw new IllegalStateException("Already initialized");
+    }
+    this.applicationContext = applicationContext;
+    this.frameObserver = frameObserver;
+    this.surfaceHelper = surfaceTextureHelper;
+    this.cameraThreadHandler =
+        surfaceTextureHelper == null ? null : surfaceTextureHelper.getHandler();
+  }
+
+  // Note that this actually opens the camera, and Camera callbacks run on the
+  // thread that calls open(), so this is done on the CameraThread.
+  @Override
+  public void startCapture(final int width, final int height, final int framerate) {
+    Logging.d(TAG, "startCapture requested: " + width + "x" + height + "@" + framerate);
+    if (!isInitialized()) {
+      throw new IllegalStateException("startCapture called in uninitialized state");
+    }
+    if (surfaceHelper == null) {
+      frameObserver.onCapturerStarted(false /* success */);
+      if (eventsHandler != null) {
+        eventsHandler.onCameraError("No SurfaceTexture created.");
       }
-      this.cameraThreadHandler = surfaceTextureHelper.getHandler();
-      this.surfaceHelper = surfaceTextureHelper;
-      final boolean didPost = maybePostOnCameraThread(new Runnable() {
-        @Override
-        public void run() {
-          openCameraAttempts = 0;
-          startCaptureOnCameraThread(width, height, framerate, frameObserver,
-              applicationContext);
-        }
-      });
-      if (!didPost) {
-        frameObserver.onCapturerStarted(false);
-        if (eventsHandler != null) {
-          eventsHandler.onCameraError("Could not post task to camera thread.");
-        }
+      return;
+    }
+    if (isCameraRunning.getAndSet(true)) {
+      Logging.e(TAG, "Camera has already been started.");
+      return;
+    }
+    final boolean didPost = maybePostOnCameraThread(new Runnable() {
+      @Override
+      public void run() {
+        openCameraAttempts = 0;
+        startCaptureOnCameraThread(width, height, framerate);
       }
+    });
+    if (!didPost) {
+      frameObserver.onCapturerStarted(false);
+      if (eventsHandler != null) {
+        eventsHandler.onCameraError("Could not post task to camera thread.");
+      }
+      isCameraRunning.set(false);
     }
   }
 
-  private void startCaptureOnCameraThread(
-      final int width, final int height, final int framerate, final CapturerObserver frameObserver,
-      final Context applicationContext) {
-    synchronized (handlerLock) {
-      if (cameraThreadHandler == null) {
-        Logging.e(TAG, "startCaptureOnCameraThread: Camera is stopped");
-        return;
-      } else {
-        checkIsOnCameraThread();
-      }
+  private void startCaptureOnCameraThread(final int width, final int height, final int framerate) {
+    checkIsOnCameraThread();
+    if (!isCameraRunning.get()) {
+      Logging.e(TAG, "startCaptureOnCameraThread: Camera is stopped");
+      return;
     }
     if (camera != null) {
       Logging.e(TAG, "startCaptureOnCameraThread: Camera has already been started.");
       return;
     }
-    this.applicationContext = applicationContext;
-    this.frameObserver = frameObserver;
     this.firstFrameReported = false;
 
     try {
@@ -337,9 +339,9 @@
         if (openCameraAttempts < MAX_OPEN_CAMERA_ATTEMPTS) {
           Logging.e(TAG, "Camera.open failed, retrying", e);
           maybePostDelayedOnCameraThread(OPEN_CAMERA_DELAY_MS, new Runnable() {
-            @Override public void run() {
-              startCaptureOnCameraThread(width, height, framerate, frameObserver,
-                  applicationContext);
+            @Override
+            public void run() {
+              startCaptureOnCameraThread(width, height, framerate);
             }
           });
           return;
@@ -373,13 +375,10 @@
 
   // (Re)start preview with the closest supported format to |width| x |height| @ |framerate|.
   private void startPreviewOnCameraThread(int width, int height, int framerate) {
-    synchronized (handlerLock) {
-      if (cameraThreadHandler == null || camera == null) {
-        Logging.e(TAG, "startPreviewOnCameraThread: Camera is stopped");
-        return;
-      } else {
-        checkIsOnCameraThread();
-      }
+    checkIsOnCameraThread();
+    if (!isCameraRunning.get() || camera == null) {
+      Logging.e(TAG, "startPreviewOnCameraThread: Camera is stopped");
+      return;
     }
     Logging.d(
         TAG, "startPreviewOnCameraThread requested: " + width + "x" + height + "@" + framerate);
@@ -489,13 +488,7 @@
   }
 
   private void stopCaptureOnCameraThread(boolean stopHandler) {
-    synchronized (handlerLock) {
-      if (cameraThreadHandler == null) {
-        Logging.e(TAG, "stopCaptureOnCameraThread: Camera is stopped");
-      } else {
-        checkIsOnCameraThread();
-      }
-    }
+    checkIsOnCameraThread();
     Logging.d(TAG, "stopCaptureOnCameraThread");
     // Note that the camera might still not be started here if startCaptureOnCameraThread failed
     // and we posted a retry.
@@ -505,21 +498,15 @@
       surfaceHelper.stopListening();
     }
     if (stopHandler) {
-      synchronized (handlerLock) {
-        // Clear the cameraThreadHandler first, in case stopPreview or
-        // other driver code deadlocks. Deadlock in
-        // android.hardware.Camera._stopPreview(Native Method) has
-        // been observed on Nexus 5 (hammerhead), OS version LMY48I.
-        // The camera might post another one or two preview frames
-        // before stopped, so we have to check for a null
-        // cameraThreadHandler in our handler. Remove all pending
-        // Runnables posted from |this|.
-        if (cameraThreadHandler != null) {
-          cameraThreadHandler.removeCallbacksAndMessages(this /* token */);
-          cameraThreadHandler = null;
-        }
-        surfaceHelper = null;
-      }
+      // Clear the cameraThreadHandler first, in case stopPreview or
+      // other driver code deadlocks. Deadlock in
+      // android.hardware.Camera._stopPreview(Native Method) has
+      // been observed on Nexus 5 (hammerhead), OS version LMY48I.
+      // The camera might post another one or two preview frames
+      // before stopped, so we have to check |isCameraRunning|.
+      // Remove all pending Runnables posted from |this|.
+      isCameraRunning.set(false);
+      cameraThreadHandler.removeCallbacksAndMessages(this /* token */);
     }
     if (cameraStatistics != null) {
       cameraStatistics.release();
@@ -545,33 +532,22 @@
   }
 
   private void switchCameraOnCameraThread() {
-    synchronized (handlerLock) {
-      if (cameraThreadHandler == null) {
-        Logging.e(TAG, "switchCameraOnCameraThread: Camera is stopped");
-        return;
-      } else {
-        checkIsOnCameraThread();
-      }
+    checkIsOnCameraThread();
+    if (!isCameraRunning.get()) {
+      Logging.e(TAG, "switchCameraOnCameraThread: Camera is stopped");
+      return;
     }
     Logging.d(TAG, "switchCameraOnCameraThread");
     stopCaptureOnCameraThread(false /* stopHandler */);
     synchronized (cameraIdLock) {
       id = (id + 1) % android.hardware.Camera.getNumberOfCameras();
     }
-    startCaptureOnCameraThread(requestedWidth, requestedHeight, requestedFramerate, frameObserver,
-        applicationContext);
+    startCaptureOnCameraThread(requestedWidth, requestedHeight, requestedFramerate);
     Logging.d(TAG, "switchCameraOnCameraThread done");
   }
 
   private void onOutputFormatRequestOnCameraThread(int width, int height, int framerate) {
-    synchronized (handlerLock) {
-      if (cameraThreadHandler == null || camera == null) {
-        Logging.e(TAG, "onOutputFormatRequestOnCameraThread: Camera is stopped");
-        return;
-      } else {
-        checkIsOnCameraThread();
-      }
-    }
+    checkIsOnCameraThread();
     Logging.d(TAG, "onOutputFormatRequestOnCameraThread: " + width + "x" + height +
         "@" + framerate);
     frameObserver.onOutputFormatRequest(width, height, framerate);
@@ -611,13 +587,10 @@
   // Called on cameraThread so must not "synchronized".
   @Override
   public void onPreviewFrame(byte[] data, android.hardware.Camera callbackCamera) {
-    synchronized (handlerLock) {
-      if (cameraThreadHandler == null) {
-        Logging.e(TAG, "onPreviewFrame: Camera is stopped");
-        return;
-      } else {
-        checkIsOnCameraThread();
-      }
+    checkIsOnCameraThread();
+    if (!isCameraRunning.get()) {
+      Logging.e(TAG, "onPreviewFrame: Camera is stopped");
+      return;
     }
     if (!queuedBuffers.contains(data)) {
       // |data| is an old invalid buffer.
@@ -644,14 +617,11 @@
   @Override
   public void onTextureFrameAvailable(
       int oesTextureId, float[] transformMatrix, long timestampNs) {
-    synchronized (handlerLock) {
-      if (cameraThreadHandler == null) {
-        Logging.e(TAG, "onTextureFrameAvailable: Camera is stopped");
-        surfaceHelper.returnTextureFrame();
-        return;
-      } else {
-        checkIsOnCameraThread();
-      }
+    checkIsOnCameraThread();
+    if (!isCameraRunning.get()) {
+      Logging.e(TAG, "onTextureFrameAvailable: Camera is stopped");
+      surfaceHelper.returnTextureFrame();
+      return;
     }
     if (eventsHandler != null && !firstFrameReported) {
       eventsHandler.onFirstFrameAvailable();
diff --git a/api/android/jni/androidvideocapturer_jni.cc b/api/android/jni/androidvideocapturer_jni.cc
index 150f1c4..f9c334b 100644
--- a/api/android/jni/androidvideocapturer_jni.cc
+++ b/api/android/jni/androidvideocapturer_jni.cc
@@ -43,6 +43,21 @@
           jni, "Camera SurfaceTextureHelper", j_egl_context)),
       capturer_(nullptr) {
   LOG(LS_INFO) << "AndroidVideoCapturerJni ctor";
+  jobject j_frame_observer =
+      jni->NewObject(*j_observer_class_,
+                     GetMethodID(jni, *j_observer_class_, "<init>", "(J)V"),
+                     jlongFromPointer(this));
+  CHECK_EXCEPTION(jni) << "error during NewObject";
+  jni->CallVoidMethod(
+      *j_video_capturer_,
+      GetMethodID(jni, *j_video_capturer_class_, "initialize",
+                  "(Lorg/webrtc/SurfaceTextureHelper;Landroid/content/"
+                  "Context;Lorg/webrtc/VideoCapturer$CapturerObserver;)V"),
+      surface_texture_helper_
+          ? surface_texture_helper_->GetJavaSurfaceTextureHelper()
+          : nullptr,
+      application_context_, j_frame_observer);
+  CHECK_EXCEPTION(jni) << "error during VideoCapturer.initialize()";
   thread_checker_.DetachFromThread();
 }
 
@@ -65,22 +80,9 @@
     capturer_ = capturer;
     invoker_.reset(new rtc::GuardedAsyncInvoker());
   }
-  jobject j_frame_observer =
-      jni()->NewObject(*j_observer_class_,
-                       GetMethodID(jni(), *j_observer_class_, "<init>", "(J)V"),
-                       jlongFromPointer(this));
-  CHECK_EXCEPTION(jni()) << "error during NewObject";
-
-  jmethodID m = GetMethodID(
-      jni(), *j_video_capturer_class_, "startCapture",
-      "(IIILorg/webrtc/SurfaceTextureHelper;Landroid/content/Context;"
-      "Lorg/webrtc/VideoCapturer$CapturerObserver;)V");
-  jni()->CallVoidMethod(
-      *j_video_capturer_, m, width, height, framerate,
-      surface_texture_helper_
-          ? surface_texture_helper_->GetJavaSurfaceTextureHelper()
-          : nullptr,
-      application_context_, j_frame_observer);
+  jmethodID m =
+      GetMethodID(jni(), *j_video_capturer_class_, "startCapture", "(III)V");
+  jni()->CallVoidMethod(*j_video_capturer_, m, width, height, framerate);
   CHECK_EXCEPTION(jni()) << "error during VideoCapturer.startCapture";
 }
 
diff --git a/api/androidtests/src/org/webrtc/CameraVideoCapturerTestFixtures.java b/api/androidtests/src/org/webrtc/CameraVideoCapturerTestFixtures.java
index 43ee88d..729e99a 100644
--- a/api/androidtests/src/org/webrtc/CameraVideoCapturerTestFixtures.java
+++ b/api/androidtests/src/org/webrtc/CameraVideoCapturerTestFixtures.java
@@ -307,19 +307,23 @@
   }
 
   // Internal helper methods
-  private CapturerInstance createCapturer(String name) {
+  private CapturerInstance createCapturer(String name, boolean initialize) {
     CapturerInstance instance = new CapturerInstance();
     instance.cameraEvents = new CameraEvents();
     instance.capturer = testObjectFactory.createCapturer(name, instance.cameraEvents);
     instance.surfaceTextureHelper = SurfaceTextureHelper.create(
         "SurfaceTextureHelper test" /* threadName */, null /* sharedContext */);
     instance.observer = new FakeCapturerObserver();
+    if (initialize) {
+      instance.capturer.initialize(
+          instance.surfaceTextureHelper, testObjectFactory.getAppContext(), instance.observer);
+    }
     instance.supportedFormats = instance.capturer.getSupportedFormats();
     return instance;
   }
 
-  private CapturerInstance createCapturer() {
-    return createCapturer("");
+  private CapturerInstance createCapturer(boolean initialize) {
+    return createCapturer("", initialize);
   }
 
   private void startCapture(CapturerInstance instance) {
@@ -330,8 +334,7 @@
     final CameraEnumerationAndroid.CaptureFormat format =
         instance.supportedFormats.get(formatIndex);
 
-    instance.capturer.startCapture(format.width, format.height, format.framerate.max,
-        instance.surfaceTextureHelper, testObjectFactory.getAppContext(), instance.observer);
+    instance.capturer.startCapture(format.width, format.height, format.framerate.max);
     instance.format = format;
   }
 
@@ -390,7 +393,7 @@
       return;
     }
 
-    final CapturerInstance capturerInstance = createCapturer(name);
+    final CapturerInstance capturerInstance = createCapturer(name, false /* initialize */);
     final VideoTrackWithRenderer videoTrackWithRenderer =
         createVideoTrackWithRenderer(capturerInstance.capturer);
     assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
@@ -400,12 +403,12 @@
 
   // Test methods
   public void createCapturerAndDispose() {
-    disposeCapturer(createCapturer());
+    disposeCapturer(createCapturer(true /* initialize */));
   }
 
   public void createNonExistingCamera() {
     try {
-      disposeCapturer(createCapturer("non-existing camera"));
+      disposeCapturer(createCapturer("non-existing camera", false /* initialize */));
     } catch (IllegalArgumentException e) {
       return;
     }
@@ -432,7 +435,7 @@
       return;
     }
 
-    final CapturerInstance capturerInstance = createCapturer();
+    final CapturerInstance capturerInstance = createCapturer(false /* initialize */);
     final VideoTrackWithRenderer videoTrackWithRenderer =
         createVideoTrackWithRenderer(capturerInstance.capturer);
 
@@ -463,7 +466,7 @@
   }
 
   public void cameraEventsInvoked() throws InterruptedException {
-    final CapturerInstance capturerInstance = createCapturer();
+    final CapturerInstance capturerInstance = createCapturer(true /* initialize */);
     startCapture(capturerInstance);
     // Make sure camera is started and first frame is received and then stop it.
     assertTrue(capturerInstance.observer.waitForCapturerToStart());
@@ -476,7 +479,7 @@
   }
 
   public void cameraCallsAfterStop() throws InterruptedException {
-    final CapturerInstance capturerInstance = createCapturer();
+    final CapturerInstance capturerInstance = createCapturer(true /* initialize */);
     startCapture(capturerInstance);
     // Make sure camera is started and then stop it.
     assertTrue(capturerInstance.observer.waitForCapturerToStart());
@@ -492,7 +495,7 @@
   }
 
   public void stopRestartVideoSource() throws InterruptedException {
-    final CapturerInstance capturerInstance = createCapturer();
+    final CapturerInstance capturerInstance = createCapturer(false /* initialize */);
     final VideoTrackWithRenderer videoTrackWithRenderer =
         createVideoTrackWithRenderer(capturerInstance.capturer);
 
@@ -511,7 +514,7 @@
   }
 
   public void startStopWithDifferentResolutions() throws InterruptedException {
-    final CapturerInstance capturerInstance = createCapturer();
+    final CapturerInstance capturerInstance = createCapturer(true /* initialize */);
 
     for(int i = 0; i < 3 ; ++i) {
       startCapture(capturerInstance, i);
@@ -544,7 +547,7 @@
   }
 
   public void returnBufferLate() throws InterruptedException {
-    final CapturerInstance capturerInstance = createCapturer();
+    final CapturerInstance capturerInstance = createCapturer(true /* initialize */);
     startCapture(capturerInstance);
     assertTrue(capturerInstance.observer.waitForCapturerToStart());
 
@@ -568,7 +571,7 @@
 
   public void returnBufferLateEndToEnd()
       throws InterruptedException {
-    final CapturerInstance capturerInstance = createCapturer();
+    final CapturerInstance capturerInstance = createCapturer(false /* initialize */);
     final VideoTrackWithRenderer videoTrackWithRenderer =
         createVideoTrackWithFakeAsyncRenderer(capturerInstance.capturer);
     // Wait for at least one frame that has not been returned.
@@ -596,7 +599,7 @@
   }
 
   public void cameraFreezedEventOnBufferStarvation() throws InterruptedException {
-    final CapturerInstance capturerInstance = createCapturer();
+    final CapturerInstance capturerInstance = createCapturer(true /* initialize */);
     startCapture(capturerInstance);
     // Make sure camera is started.
     assertTrue(capturerInstance.observer.waitForCapturerToStart());
@@ -610,7 +613,7 @@
   }
 
   public void scaleCameraOutput() throws InterruptedException {
-    final CapturerInstance capturerInstance = createCapturer();
+    final CapturerInstance capturerInstance = createCapturer(false /* initialize */);
     final VideoTrackWithRenderer videoTrackWithRenderer =
         createVideoTrackWithRenderer(capturerInstance.capturer);
     assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
@@ -644,7 +647,7 @@
   public void startWhileCameraIsAlreadyOpen() throws InterruptedException {
     final String cameraName = testObjectFactory.getNameOfBackFacingDevice();
     // At this point camera is not actually opened.
-    final CapturerInstance capturerInstance = createCapturer(cameraName);
+    final CapturerInstance capturerInstance = createCapturer(cameraName, true /* initialize */);
 
     final Object competingCamera = testObjectFactory.rawOpenCamera(cameraName);
 
@@ -665,7 +668,7 @@
   public void startWhileCameraIsAlreadyOpenAndCloseCamera() throws InterruptedException {
     final String cameraName = testObjectFactory.getNameOfBackFacingDevice();
     // At this point camera is not actually opened.
-    final CapturerInstance capturerInstance = createCapturer(cameraName);
+    final CapturerInstance capturerInstance = createCapturer(cameraName, false /* initialize */);
 
     Logging.d(TAG, "startWhileCameraIsAlreadyOpenAndCloseCamera: Opening competing camera.");
     final Object competingCamera = testObjectFactory.rawOpenCamera(cameraName);
@@ -689,7 +692,7 @@
   public void startWhileCameraIsAlreadyOpenAndStop() throws InterruptedException {
     final String cameraName = testObjectFactory.getNameOfBackFacingDevice();
     // At this point camera is not actually opened.
-    final CapturerInstance capturerInstance = createCapturer(cameraName);
+    final CapturerInstance capturerInstance = createCapturer(cameraName, true /* initialize */);
 
     final Object competingCamera = testObjectFactory.rawOpenCamera(cameraName);