blob: 2d897e3ed1de617e2b84af84c866ba891bd23731 [file] [log] [blame]
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
import org.webrtc.Metrics.Histogram;
import android.content.Context;
import android.os.Handler;
import android.os.SystemClock;
import android.view.Surface;
import android.view.WindowManager;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
@SuppressWarnings("deprecation")
public class Camera1Session implements CameraSession {
private static final String TAG = "Camera1Session";
private static final int NUMBER_OF_CAPTURE_BUFFERS = 3;
private static final Histogram camera1StartTimeMsHistogram =
Histogram.createCounts("WebRTC.Android.Camera1.StartTimeMs", 1, 10000, 50);
private static final Histogram camera1StopTimeMsHistogram =
Histogram.createCounts("WebRTC.Android.Camera1.StopTimeMs", 1, 10000, 50);
private static enum SessionState { RUNNING, STOPPED };
private final Handler cameraThreadHandler;
private final Events events;
private final boolean captureToTexture;
private final Context applicationContext;
private final SurfaceTextureHelper surfaceTextureHelper;
private final int cameraId;
private final int width;
private final int height;
private final int framerate;
private final android.hardware.Camera camera;
private final android.hardware.Camera.CameraInfo info;
private final CaptureFormat captureFormat;
// Used only for stats. Only used on the camera thread.
private final long constructionTimeNs; // Construction time of this class.
private SessionState state;
private boolean firstFrameReported = false;
public static void create(
final CreateSessionCallback callback, final Events events,
final boolean captureToTexture, final Context applicationContext,
final SurfaceTextureHelper surfaceTextureHelper,
final int cameraId, final int width, final int height, final int framerate) {
final long constructionTimeNs = System.nanoTime();
Logging.d(TAG, "Open camera " + cameraId);
events.onCameraOpening();
final android.hardware.Camera camera;
try {
camera = android.hardware.Camera.open(cameraId);
} catch (RuntimeException e) {
callback.onFailure(e.getMessage());
return;
}
try {
camera.setPreviewTexture(surfaceTextureHelper.getSurfaceTexture());
} catch (IOException e) {
camera.release();
callback.onFailure(e.getMessage());
return;
}
final android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
android.hardware.Camera.getCameraInfo(cameraId, info);
final android.hardware.Camera.Parameters parameters = camera.getParameters();
final CaptureFormat captureFormat = findClosestCaptureFormat(
parameters, width, height, framerate);
final Size pictureSize = findClosestPictureSize(parameters, width, height);
updateCameraParameters(camera, parameters, captureFormat, pictureSize, captureToTexture);
// Initialize the capture buffers.
if (!captureToTexture) {
final int frameSize = captureFormat.frameSize();
for (int i = 0; i < NUMBER_OF_CAPTURE_BUFFERS; ++i) {
final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize);
camera.addCallbackBuffer(buffer.array());
}
}
// Calculate orientation manually and send it as CVO insted.
camera.setDisplayOrientation(0 /* degrees */);
callback.onDone(new Camera1Session(
events, captureToTexture, applicationContext, surfaceTextureHelper,
cameraId, width, height, framerate,
camera, info, captureFormat, constructionTimeNs));
}
private static void updateCameraParameters(android.hardware.Camera camera,
android.hardware.Camera.Parameters parameters, CaptureFormat captureFormat, Size pictureSize,
boolean captureToTexture) {
final List<String> focusModes = parameters.getSupportedFocusModes();
parameters.setPreviewFpsRange(captureFormat.framerate.min, captureFormat.framerate.max);
parameters.setPreviewSize(captureFormat.width, captureFormat.height);
parameters.setPictureSize(pictureSize.width, pictureSize.height);
if (!captureToTexture) {
parameters.setPreviewFormat(captureFormat.imageFormat);
}
if (parameters.isVideoStabilizationSupported()) {
parameters.setVideoStabilization(true);
}
if (focusModes.contains(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
parameters.setFocusMode(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
camera.setParameters(parameters);
}
private static CaptureFormat findClosestCaptureFormat(
android.hardware.Camera.Parameters parameters, int width, int height, int framerate) {
// Find closest supported format for |width| x |height| @ |framerate|.
final List<CaptureFormat.FramerateRange> supportedFramerates =
Camera1Enumerator.convertFramerates(parameters.getSupportedPreviewFpsRange());
Logging.d(TAG, "Available fps ranges: " + supportedFramerates);
final CaptureFormat.FramerateRange fpsRange =
CameraEnumerationAndroid.getClosestSupportedFramerateRange(
supportedFramerates, framerate);
final Size previewSize = CameraEnumerationAndroid.getClosestSupportedSize(
Camera1Enumerator.convertSizes(parameters.getSupportedPreviewSizes()),
width, height);
return new CaptureFormat(previewSize.width, previewSize.height, fpsRange);
}
private static Size findClosestPictureSize(android.hardware.Camera.Parameters parameters,
int width, int height) {
return CameraEnumerationAndroid.getClosestSupportedSize(
Camera1Enumerator.convertSizes(parameters.getSupportedPictureSizes()),
width, height);
}
private Camera1Session(
Events events, boolean captureToTexture,
Context applicationContext, SurfaceTextureHelper surfaceTextureHelper,
int cameraId, int width, int height, int framerate,
android.hardware.Camera camera, android.hardware.Camera.CameraInfo info,
CaptureFormat captureFormat, long constructionTimeNs) {
Logging.d(TAG, "Create new camera1 session on camera " + cameraId);
this.cameraThreadHandler = new Handler();
this.events = events;
this.captureToTexture = captureToTexture;
this.applicationContext = applicationContext;
this.surfaceTextureHelper = surfaceTextureHelper;
this.cameraId = cameraId;
this.width = width;
this.height = height;
this.framerate = framerate;
this.camera = camera;
this.info = info;
this.captureFormat = captureFormat;
this.constructionTimeNs = constructionTimeNs;
startCapturing();
}
@Override
public void stop() {
final long stopStartTime = System.nanoTime();
Logging.d(TAG, "Stop camera1 session on camera " + cameraId);
if (Thread.currentThread() == cameraThreadHandler.getLooper().getThread()) {
if (state != SessionState.STOPPED) {
state = SessionState.STOPPED;
// Post the stopInternal to return earlier.
cameraThreadHandler.post(new Runnable() {
@Override
public void run() {
stopInternal();
final int stopTimeMs =
(int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
camera1StopTimeMsHistogram.addSample(stopTimeMs);
}
});
}
} else {
final CountDownLatch stopLatch = new CountDownLatch(1);
cameraThreadHandler.post(new Runnable() {
@Override
public void run() {
if (state != SessionState.STOPPED) {
state = SessionState.STOPPED;
stopLatch.countDown();
stopInternal();
final int stopTimeMs =
(int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
camera1StopTimeMsHistogram.addSample(stopTimeMs);
}
}
});
ThreadUtils.awaitUninterruptibly(stopLatch);
}
}
private void startCapturing() {
Logging.d(TAG, "Start capturing");
checkIsOnCameraThread();
state = SessionState.RUNNING;
camera.setErrorCallback(new android.hardware.Camera.ErrorCallback() {
@Override
public void onError(int error, android.hardware.Camera camera) {
String errorMessage;
if (error == android.hardware.Camera.CAMERA_ERROR_SERVER_DIED) {
errorMessage = "Camera server died!";
} else {
errorMessage = "Camera error: " + error;
}
Logging.e(TAG, errorMessage);
state = SessionState.STOPPED;
stopInternal();
events.onCameraError(Camera1Session.this, errorMessage);
}
});
if (captureToTexture) {
listenForTextureFrames();
} else {
listenForBytebufferFrames();
}
try {
camera.startPreview();
} catch (RuntimeException e) {
state = SessionState.STOPPED;
stopInternal();
events.onCameraError(this, e.getMessage());
}
}
private void stopInternal() {
Logging.d(TAG, "Stop internal");
checkIsOnCameraThread();
surfaceTextureHelper.stopListening();
// Note: stopPreview or other driver code might deadlock. Deadlock in
// android.hardware.Camera._stopPreview(Native Method) has been observed on
// Nexus 5 (hammerhead), OS version LMY48I.
camera.stopPreview();
camera.release();
events.onCameraClosed(this);
Logging.d(TAG, "Stop done");
}
private void listenForTextureFrames() {
surfaceTextureHelper.startListening(new SurfaceTextureHelper.OnTextureFrameAvailableListener() {
@Override
public void onTextureFrameAvailable(
int oesTextureId, float[] transformMatrix, long timestampNs) {
checkIsOnCameraThread();
if (state != SessionState.RUNNING) {
Logging.d(TAG, "Texture frame captured but camera is no longer running.");
surfaceTextureHelper.returnTextureFrame();
return;
}
if (!firstFrameReported) {
final int startTimeMs =
(int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
camera1StartTimeMsHistogram.addSample(startTimeMs);
firstFrameReported = true;
}
int rotation = getFrameOrientation();
if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) {
// Undo the mirror that the OS "helps" us with.
// http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
transformMatrix = RendererCommon.multiplyMatrices(
transformMatrix, RendererCommon.horizontalFlipMatrix());
}
events.onTextureFrameCaptured(Camera1Session.this, captureFormat.width,
captureFormat.height, oesTextureId, transformMatrix, rotation, timestampNs);
}
});
}
private void listenForBytebufferFrames() {
camera.setPreviewCallbackWithBuffer(new android.hardware.Camera.PreviewCallback() {
@Override
public void onPreviewFrame(byte[] data, android.hardware.Camera callbackCamera) {
checkIsOnCameraThread();
if (callbackCamera != camera) {
Logging.e(TAG, "Callback from a different camera. This should never happen.");
return;
}
if (state != SessionState.RUNNING) {
Logging.d(TAG, "Bytebuffer frame captured but camera is no longer running.");
return;
}
final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
if (!firstFrameReported) {
final int startTimeMs =
(int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
camera1StartTimeMsHistogram.addSample(startTimeMs);
firstFrameReported = true;
}
events.onByteBufferFrameCaptured(Camera1Session.this, data, captureFormat.width,
captureFormat.height, getFrameOrientation(), captureTimeNs);
camera.addCallbackBuffer(data);
}
});
}
private int getDeviceOrientation() {
int orientation = 0;
WindowManager wm = (WindowManager) applicationContext.getSystemService(
Context.WINDOW_SERVICE);
switch(wm.getDefaultDisplay().getRotation()) {
case Surface.ROTATION_90:
orientation = 90;
break;
case Surface.ROTATION_180:
orientation = 180;
break;
case Surface.ROTATION_270:
orientation = 270;
break;
case Surface.ROTATION_0:
default:
orientation = 0;
break;
}
return orientation;
}
private int getFrameOrientation() {
int rotation = getDeviceOrientation();
if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK) {
rotation = 360 - rotation;
}
return (info.orientation + rotation) % 360;
}
private void checkIsOnCameraThread() {
if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
throw new IllegalStateException("Wrong thread");
}
}
}