Delete AndroidVideoCapturer::FrameFactory.
Splits VideoCapturer::OnFrameCaptured into helper methods,
which enables use of the VideoAdaptation logic without
using a frame factory.
Refactors AndroidVideoCapturer to make adaptation decision
earlier, so we can crop and rotate using
NV12ToI420Rotate.
BUG=webrtc:5682
Review-Url: https://codereview.webrtc.org/1973873003
Cr-Commit-Position: refs/heads/master@{#12895}
diff --git a/webrtc/api/androidvideocapturer.cc b/webrtc/api/androidvideocapturer.cc
index ae81a8d..3031185 100644
--- a/webrtc/api/androidvideocapturer.cc
+++ b/webrtc/api/androidvideocapturer.cc
@@ -19,103 +19,10 @@
namespace webrtc {
-// A hack for avoiding deep frame copies in
-// cricket::VideoCapturer.SignalFrameCaptured() using a custom FrameFactory.
-// A frame is injected using UpdateCapturedFrame(), and converted into a
-// cricket::VideoFrame with CreateAliasedFrame(). UpdateCapturedFrame() should
-// be called before CreateAliasedFrame() for every frame.
-// TODO(magjed): Add an interface cricket::VideoCapturer::OnFrameCaptured()
-// for ref counted I420 frames instead of this hack.
-class AndroidVideoCapturer::FrameFactory : public cricket::VideoFrameFactory {
- public:
- explicit FrameFactory(
- const rtc::scoped_refptr<AndroidVideoCapturerDelegate>& delegate)
- : delegate_(delegate) {
- // Create a CapturedFrame that only contains header information, not the
- // actual pixel data.
- captured_frame_.pixel_height = 1;
- captured_frame_.pixel_width = 1;
- captured_frame_.data = nullptr;
- captured_frame_.data_size = cricket::CapturedFrame::kUnknownDataSize;
- captured_frame_.fourcc = static_cast<uint32_t>(cricket::FOURCC_ANY);
- }
-
- void UpdateCapturedFrame(
- const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
- int rotation,
- int64_t time_stamp_in_ns) {
- RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 ||
- rotation == 270);
- buffer_ = buffer;
- captured_frame_.width = buffer->width();
- captured_frame_.height = buffer->height();
- captured_frame_.time_stamp = time_stamp_in_ns;
- captured_frame_.rotation = static_cast<webrtc::VideoRotation>(rotation);
- }
-
- void ClearCapturedFrame() {
- buffer_ = nullptr;
- captured_frame_.width = 0;
- captured_frame_.height = 0;
- captured_frame_.time_stamp = 0;
- }
-
- const cricket::CapturedFrame* GetCapturedFrame() const {
- return &captured_frame_;
- }
-
- cricket::VideoFrame* CreateAliasedFrame(
- const cricket::CapturedFrame* captured_frame,
- int dst_width,
- int dst_height) const override {
- // Check that captured_frame is actually our frame.
- RTC_CHECK(captured_frame == &captured_frame_);
- RTC_CHECK(buffer_->native_handle() == nullptr);
-
- std::unique_ptr<cricket::VideoFrame> frame(new cricket::WebRtcVideoFrame(
- ShallowCenterCrop(buffer_, dst_width, dst_height),
- captured_frame->time_stamp, captured_frame->rotation));
- // Caller takes ownership.
- // TODO(magjed): Change CreateAliasedFrame() to return a std::unique_ptr.
- return apply_rotation_ ? frame->GetCopyWithRotationApplied()->Copy()
- : frame.release();
- }
-
- cricket::VideoFrame* CreateAliasedFrame(
- const cricket::CapturedFrame* input_frame,
- int cropped_input_width,
- int cropped_input_height,
- int output_width,
- int output_height) const override {
- if (buffer_->native_handle() != nullptr) {
- rtc::scoped_refptr<webrtc::VideoFrameBuffer> scaled_buffer(
- static_cast<webrtc_jni::AndroidTextureBuffer*>(buffer_.get())
- ->CropScaleAndRotate(cropped_input_width, cropped_input_height,
- output_width, output_height,
- apply_rotation_ ? input_frame->rotation
- : webrtc::kVideoRotation_0));
- return new cricket::WebRtcVideoFrame(
- scaled_buffer, input_frame->time_stamp,
- apply_rotation_ ? webrtc::kVideoRotation_0 : input_frame->rotation);
- }
- return VideoFrameFactory::CreateAliasedFrame(input_frame,
- cropped_input_width,
- cropped_input_height,
- output_width,
- output_height);
- }
-
- private:
- rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer_;
- cricket::CapturedFrame captured_frame_;
- rtc::scoped_refptr<AndroidVideoCapturerDelegate> delegate_;
-};
-
AndroidVideoCapturer::AndroidVideoCapturer(
const rtc::scoped_refptr<AndroidVideoCapturerDelegate>& delegate)
: running_(false),
delegate_(delegate),
- frame_factory_(NULL),
current_state_(cricket::CS_STOPPED) {
thread_checker_.DetachFromThread();
SetSupportedFormats(delegate_->GetSupportedFormats());
@@ -133,9 +40,6 @@
LOG(LS_INFO) << " AndroidVideoCapturer::Start " << capture_format.width << "x"
<< capture_format.height << "@" << fps;
- frame_factory_ = new AndroidVideoCapturer::FrameFactory(delegate_.get());
- set_frame_factory(frame_factory_);
-
running_ = true;
delegate_->Start(capture_format.width, capture_format.height, fps, this);
SetCaptureFormat(&capture_format);
@@ -176,19 +80,6 @@
SetCaptureState(new_state);
}
-void AndroidVideoCapturer::OnIncomingFrame(
- const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
- int rotation,
- int64_t time_stamp) {
- // NOTE: The frame_factory hack isn't thread safe. It works because
- // all calls to this method are from the same Java thread. In
- // addition, calls are currently syncronized on the caller's
- // AndroidVideoCapturerJni:capturer_lock_.
- frame_factory_->UpdateCapturedFrame(buffer, rotation, time_stamp);
- SignalFrameCaptured(this, frame_factory_->GetCapturedFrame());
- frame_factory_->ClearCapturedFrame();
-}
-
void AndroidVideoCapturer::OnOutputFormatRequest(
int width, int height, int fps) {
RTC_CHECK(thread_checker_.CalledOnValidThread());
diff --git a/webrtc/api/androidvideocapturer.h b/webrtc/api/androidvideocapturer.h
index 4c73f31..bd0dd7c 100644
--- a/webrtc/api/androidvideocapturer.h
+++ b/webrtc/api/androidvideocapturer.h
@@ -48,13 +48,6 @@
// Called from JNI when the capturer has been started.
void OnCapturerStarted(bool success);
- // Called from JNI when a new frame has been captured.
- // Argument |buffer| is intentionally by value, for use with rtc::Bind.
- void OnIncomingFrame(
- const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
- int rotation,
- int64_t time_stamp);
-
// Called from JNI to request a new video format.
void OnOutputFormatRequest(int width, int height, int fps);
@@ -64,6 +57,11 @@
bool GetBestCaptureFormat(const cricket::VideoFormat& desired,
cricket::VideoFormat* best_format) override;
+ // Expose these protected methods as public, to be used by the
+ // AndroidVideoCapturerJni.
+ using VideoCapturer::AdaptFrame;
+ using VideoCapturer::OnFrame;
+
private:
// cricket::VideoCapturer implementation.
// Video frames will be delivered using
@@ -80,9 +78,6 @@
rtc::ThreadChecker thread_checker_;
- class FrameFactory;
- FrameFactory* frame_factory_; // Owned by cricket::VideoCapturer.
-
cricket::CaptureState current_state_;
};
diff --git a/webrtc/api/java/jni/androidmediaencoder_jni.cc b/webrtc/api/java/jni/androidmediaencoder_jni.cc
index da0f2e6..0e36aa1 100644
--- a/webrtc/api/java/jni/androidmediaencoder_jni.cc
+++ b/webrtc/api/java/jni/androidmediaencoder_jni.cc
@@ -683,7 +683,7 @@
rtc::scoped_refptr<webrtc::VideoFrameBuffer> scaled_buffer(
static_cast<AndroidTextureBuffer*>(
frame.video_frame_buffer().get())->CropScaleAndRotate(
- frame.width(), frame.height(),
+ frame.width(), frame.height(), 0, 0,
scaled_resolution.width, scaled_resolution.height,
webrtc::kVideoRotation_0));
input_frame.set_video_frame_buffer(scaled_buffer);
@@ -824,9 +824,7 @@
RTC_CHECK(use_surface_);
NativeHandleImpl* handle = static_cast<NativeHandleImpl*>(
frame.video_frame_buffer()->native_handle());
- jfloatArray sampling_matrix = jni->NewFloatArray(16);
- jni->SetFloatArrayRegion(sampling_matrix, 0, 16, handle->sampling_matrix);
-
+ jfloatArray sampling_matrix = handle->sampling_matrix.ToJava(jni);
bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
j_encode_texture_method_,
key_frame,
diff --git a/webrtc/api/java/jni/androidvideocapturer_jni.cc b/webrtc/api/java/jni/androidvideocapturer_jni.cc
index 5b4a92c..0e8e867 100644
--- a/webrtc/api/java/jni/androidvideocapturer_jni.cc
+++ b/webrtc/api/java/jni/androidvideocapturer_jni.cc
@@ -13,6 +13,7 @@
#include "webrtc/api/java/jni/native_handle_impl.h"
#include "webrtc/api/java/jni/surfacetexturehelper_jni.h"
#include "third_party/libyuv/include/libyuv/convert.h"
+#include "third_party/libyuv/include/libyuv/scale.h"
#include "webrtc/base/bind.h"
namespace webrtc_jni {
@@ -169,25 +170,79 @@
int height,
int rotation,
int64_t timestamp_ns) {
- const uint8_t* y_plane = static_cast<uint8_t*>(video_frame);
- const uint8_t* vu_plane = y_plane + width * height;
-
- rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
- buffer_pool_.CreateBuffer(width, height);
- libyuv::NV21ToI420(
- y_plane, width,
- vu_plane, width,
- buffer->MutableData(webrtc::kYPlane), buffer->stride(webrtc::kYPlane),
- buffer->MutableData(webrtc::kUPlane), buffer->stride(webrtc::kUPlane),
- buffer->MutableData(webrtc::kVPlane), buffer->stride(webrtc::kVPlane),
- width, height);
-
+ RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 ||
+ rotation == 270);
rtc::CritScope cs(&capturer_lock_);
- if (!capturer_) {
- LOG(LS_WARNING) << "OnMemoryBufferFrame() called for closed capturer.";
+
+ int adapted_width;
+ int adapted_height;
+ int crop_width;
+ int crop_height;
+ int crop_x;
+ int crop_y;
+
+ if (!capturer_->AdaptFrame(width, height, timestamp_ns,
+ &adapted_width, &adapted_height,
+ &crop_width, &crop_height, &crop_x, &crop_y)) {
return;
}
- capturer_->OnIncomingFrame(buffer, rotation, timestamp_ns);
+
+ int rotated_width = crop_width;
+ int rotated_height = crop_height;
+
+ if (capturer_->apply_rotation() && (rotation == 90 || rotation == 270)) {
+ std::swap(adapted_width, adapted_height);
+ std::swap(rotated_width, rotated_height);
+ }
+
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
+ pre_scale_pool_.CreateBuffer(rotated_width, rotated_height);
+
+ const uint8_t* y_plane = static_cast<const uint8_t*>(video_frame);
+ const uint8_t* uv_plane = y_plane + width * height;
+
+ // Can only crop at even pixels.
+ crop_x &= ~1;
+ crop_y &= ~1;
+ int uv_width = (width + 1) / 2;
+
+ libyuv::NV12ToI420Rotate(
+ y_plane + width * crop_y + crop_x, width,
+ uv_plane + uv_width * crop_y + crop_x, width,
+ buffer->MutableData(webrtc::kYPlane), buffer->stride(webrtc::kYPlane),
+ // Swap U and V, since we have NV21, not NV12.
+ buffer->MutableData(webrtc::kVPlane), buffer->stride(webrtc::kVPlane),
+ buffer->MutableData(webrtc::kUPlane), buffer->stride(webrtc::kUPlane),
+ crop_width, crop_height, static_cast<libyuv::RotationMode>(
+ capturer_->apply_rotation() ? rotation : 0));
+
+ if (adapted_width != rotated_width || adapted_height != rotated_height) {
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> scaled =
+ post_scale_pool_.CreateBuffer(adapted_width, adapted_height);
+ // TODO(nisse): This should be done by some Scale method in
+ // I420Buffer, but we can't do that right now, since
+ // I420BufferPool uses a wrapper object.
+ if (libyuv::I420Scale(buffer->DataY(), buffer->StrideY(),
+ buffer->DataU(), buffer->StrideU(),
+ buffer->DataV(), buffer->StrideV(),
+ rotated_width, rotated_height,
+ scaled->MutableDataY(), scaled->StrideY(),
+ scaled->MutableDataU(), scaled->StrideU(),
+ scaled->MutableDataV(), scaled->StrideV(),
+ adapted_width, adapted_height,
+ libyuv::kFilterBox) < 0) {
+ LOG(LS_WARNING) << "I420Scale failed";
+ return;
+ }
+ buffer = scaled;
+ }
+ // TODO(nisse): Use microsecond time instead.
+ capturer_->OnFrame(cricket::WebRtcVideoFrame(
+ buffer, timestamp_ns,
+ capturer_->apply_rotation()
+ ? webrtc::kVideoRotation_0
+ : static_cast<webrtc::VideoRotation>(rotation)),
+ width, height);
}
void AndroidVideoCapturerJni::OnTextureFrame(int width,
@@ -195,15 +250,48 @@
int rotation,
int64_t timestamp_ns,
const NativeHandleImpl& handle) {
- rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer(
- surface_texture_helper_->CreateTextureFrame(width, height, handle));
-
+ RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 ||
+ rotation == 270);
rtc::CritScope cs(&capturer_lock_);
- if (!capturer_) {
- LOG(LS_WARNING) << "OnTextureFrame() called for closed capturer.";
+
+ int adapted_width;
+ int adapted_height;
+ int crop_width;
+ int crop_height;
+ int crop_x;
+ int crop_y;
+
+ if (!capturer_->AdaptFrame(width, height, timestamp_ns,
+ &adapted_width, &adapted_height,
+ &crop_width, &crop_height, &crop_x, &crop_y)) {
return;
}
- capturer_->OnIncomingFrame(buffer, rotation, timestamp_ns);
+
+ Matrix matrix = handle.sampling_matrix;
+
+ matrix.Crop(crop_width / static_cast<float>(width),
+ crop_height / static_cast<float>(height),
+ crop_x / static_cast<float>(width),
+ crop_y / static_cast<float>(height));
+
+ if (capturer_->apply_rotation()) {
+ if (rotation == webrtc::kVideoRotation_90 ||
+ rotation == webrtc::kVideoRotation_270) {
+ std::swap(adapted_width, adapted_height);
+ }
+ matrix.Rotate(static_cast<webrtc::VideoRotation>(rotation));
+ }
+
+ // TODO(nisse): Use microsecond time instead.
+ capturer_->OnFrame(
+ cricket::WebRtcVideoFrame(
+ surface_texture_helper_->CreateTextureFrame(
+ adapted_width, adapted_height,
+ NativeHandleImpl(handle.oes_texture_id, matrix)),
+ timestamp_ns, capturer_->apply_rotation()
+ ? webrtc::kVideoRotation_0
+ : static_cast<webrtc::VideoRotation>(rotation)),
+ width, height);
}
void AndroidVideoCapturerJni::OnOutputFormatRequest(int width,
diff --git a/webrtc/api/java/jni/androidvideocapturer_jni.h b/webrtc/api/java/jni/androidvideocapturer_jni.h
index eea56ad..4a803d9 100644
--- a/webrtc/api/java/jni/androidvideocapturer_jni.h
+++ b/webrtc/api/java/jni/androidvideocapturer_jni.h
@@ -77,7 +77,8 @@
const ScopedGlobalRef<jclass> j_observer_class_;
// Used on the Java thread running the camera.
- webrtc::I420BufferPool buffer_pool_;
+ webrtc::I420BufferPool pre_scale_pool_;
+ webrtc::I420BufferPool post_scale_pool_;
rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_;
rtc::ThreadChecker thread_checker_;
diff --git a/webrtc/api/java/jni/native_handle_impl.cc b/webrtc/api/java/jni/native_handle_impl.cc
index 1f180ade..eb71088 100644
--- a/webrtc/api/java/jni/native_handle_impl.cc
+++ b/webrtc/api/java/jni/native_handle_impl.cc
@@ -21,9 +21,24 @@
using webrtc::NativeHandleBuffer;
-namespace {
+namespace webrtc_jni {
-void RotateMatrix(float a[16], webrtc::VideoRotation rotation) {
+Matrix::Matrix(JNIEnv* jni, jfloatArray a) {
+ RTC_CHECK_EQ(16, jni->GetArrayLength(a));
+ jfloat* ptr = jni->GetFloatArrayElements(a, nullptr);
+ for (int i = 0; i < 16; ++i) {
+ elem_[i] = ptr[i];
+ }
+ jni->ReleaseFloatArrayElements(a, ptr, 0);
+}
+
+jfloatArray Matrix::ToJava(JNIEnv* jni) {
+ jfloatArray matrix = jni->NewFloatArray(16);
+ jni->SetFloatArrayRegion(matrix, 0, 16, elem_);
+ return matrix;
+}
+
+void Matrix::Rotate(webrtc::VideoRotation rotation) {
// Texture coordinates are in the range 0 to 1. The transformation of the last
// row in each rotation matrix is needed for proper translation, e.g, to
// mirror x, we don't replace x by -x, but by 1-x.
@@ -32,35 +47,36 @@
break;
case webrtc::kVideoRotation_90: {
const float ROTATE_90[16] =
- { a[4], a[5], a[6], a[7],
- -a[0], -a[1], -a[2], -a[3],
- a[8], a[9], a[10], a[11],
- a[0] + a[12], a[1] + a[13], a[2] + a[14], a[3] + a[15]};
- memcpy(a, ROTATE_90, sizeof(ROTATE_90));
+ { elem_[4], elem_[5], elem_[6], elem_[7],
+ -elem_[0], -elem_[1], -elem_[2], -elem_[3],
+ elem_[8], elem_[9], elem_[10], elem_[11],
+ elem_[0] + elem_[12], elem_[1] + elem_[13],
+ elem_[2] + elem_[14], elem_[3] + elem_[15]};
+ memcpy(elem_, ROTATE_90, sizeof(elem_));
} break;
case webrtc::kVideoRotation_180: {
const float ROTATE_180[16] =
- { -a[0], -a[1], -a[2], -a[3],
- -a[4], -a[5], -a[6], -a[7],
- a[8], a[9], a[10], a[11],
- a[0] + a[4] + a[12], a[1] +a[5] + a[13], a[2] + a[6] + a[14],
- a[3] + a[11]+ a[15]};
- memcpy(a, ROTATE_180, sizeof(ROTATE_180));
- }
- break;
+ { -elem_[0], -elem_[1], -elem_[2], -elem_[3],
+ -elem_[4], -elem_[5], -elem_[6], -elem_[7],
+ elem_[8], elem_[9], elem_[10], elem_[11],
+ elem_[0] + elem_[4] + elem_[12], elem_[1] + elem_[5] + elem_[13],
+ elem_[2] + elem_[6] + elem_[14], elem_[3] + elem_[11]+ elem_[15]};
+ memcpy(elem_, ROTATE_180, sizeof(elem_));
+ } break;
case webrtc::kVideoRotation_270: {
const float ROTATE_270[16] =
- { -a[4], -a[5], -a[6], -a[7],
- a[0], a[1], a[2], a[3],
- a[8], a[9], a[10], a[11],
- a[4] + a[12], a[5] + a[13], a[6] + a[14], a[7] + a[15]};
- memcpy(a, ROTATE_270, sizeof(ROTATE_270));
+ { -elem_[4], -elem_[5], -elem_[6], -elem_[7],
+ elem_[0], elem_[1], elem_[2], elem_[3],
+ elem_[8], elem_[9], elem_[10], elem_[11],
+ elem_[4] + elem_[12], elem_[5] + elem_[13],
+ elem_[6] + elem_[14], elem_[7] + elem_[15]};
+ memcpy(elem_, ROTATE_270, sizeof(elem_));
} break;
}
}
// Calculates result = a * b, in column-major order.
-void MultiplyMatrix(const float a[16], const float b[16], float result[16]) {
+void Matrix::Multiply(const float a[16], const float b[16], float result[16]) {
for (int i = 0; i < 4; ++i) {
for (int j = 0; j < 4; ++j) {
float sum = 0;
@@ -75,40 +91,30 @@
// Center crop by keeping xFraction of the width and yFraction of the height,
// so e.g. cropping from 640x480 to 640x360 would use
// xFraction=1, yFraction=360/480.
-void CropMatrix(float a[16], float xFraction, float yFraction) {
- // Move cropped area to the center of the frame by offsetting half the
- // removed area.
- const float xOffset = (1 - xFraction) / 2;
- const float yOffset = (1 - yFraction) / 2;
- const float crop_matrix[16] = {
- xFraction, 0, 0, 0,
- 0, yFraction, 0, 0,
- 0, 0, 1, 0,
- xOffset, yOffset, 0, 1};
- float mul_result[16];
- MultiplyMatrix(crop_matrix, a, mul_result);
- memcpy(a, mul_result, sizeof(mul_result));
+void Matrix::Crop(float xFraction,
+ float yFraction,
+ float xOffset,
+ float yOffset) {
+ const float crop_matrix[16] =
+ {xFraction, 0, 0, 0,
+ 0, yFraction, 0, 0,
+ 0, 0, 1, 0,
+ xOffset, yOffset, 0, 1};
+ const Matrix old = *this;
+ Multiply(crop_matrix, old.elem_, this->elem_);
}
-} // anonymouse namespace
-
-namespace webrtc_jni {
-
// Aligning pointer to 64 bytes for improved performance, e.g. use SIMD.
static const int kBufferAlignment = 64;
+NativeHandleImpl::NativeHandleImpl(int id, const Matrix& matrix)
+ : oes_texture_id(id), sampling_matrix(matrix) {}
+
NativeHandleImpl::NativeHandleImpl(JNIEnv* jni,
jint j_oes_texture_id,
jfloatArray j_transform_matrix)
- : oes_texture_id(j_oes_texture_id) {
- RTC_CHECK_EQ(16, jni->GetArrayLength(j_transform_matrix));
- jfloat* transform_matrix_ptr =
- jni->GetFloatArrayElements(j_transform_matrix, nullptr);
- for (int i = 0; i < 16; ++i) {
- sampling_matrix[i] = transform_matrix_ptr[i];
- }
- jni->ReleaseFloatArrayElements(j_transform_matrix, transform_matrix_ptr, 0);
-}
+ : oes_texture_id(j_oes_texture_id),
+ sampling_matrix(jni, j_transform_matrix) {}
AndroidTextureBuffer::AndroidTextureBuffer(
int width,
@@ -162,11 +168,7 @@
jobject byte_buffer = jni->NewDirectByteBuffer(y_data, size);
- // TODO(nisse): Keep java transform matrix around.
- jfloatArray sampling_matrix = jni->NewFloatArray(16);
- jni->SetFloatArrayRegion(sampling_matrix, 0, 16,
- native_handle_.sampling_matrix);
-
+ jfloatArray sampling_matrix = native_handle_.sampling_matrix.ToJava(jni);
jni->CallVoidMethod(surface_texture_helper_,
transform_mid,
byte_buffer, width(), height(), stride,
@@ -179,6 +181,8 @@
rtc::scoped_refptr<AndroidTextureBuffer>
AndroidTextureBuffer::CropScaleAndRotate(int cropped_width,
int cropped_height,
+ int crop_x,
+ int crop_y,
int dst_width,
int dst_height,
webrtc::VideoRotation rotation) {
@@ -198,11 +202,13 @@
surface_texture_helper_, rtc::KeepRefUntilDone(this)));
if (cropped_width != width() || cropped_height != height()) {
- CropMatrix(buffer->native_handle_.sampling_matrix,
- cropped_width / static_cast<float>(width()),
- cropped_height / static_cast<float>(height()));
+ buffer->native_handle_.sampling_matrix.Crop(
+ cropped_width / static_cast<float>(width()),
+ cropped_height / static_cast<float>(height()),
+ crop_x / static_cast<float>(width()),
+ crop_y / static_cast<float>(height()));
}
- RotateMatrix(buffer->native_handle_.sampling_matrix, rotation);
+ buffer->native_handle_.sampling_matrix.Rotate(rotation);
return buffer;
}
diff --git a/webrtc/api/java/jni/native_handle_impl.h b/webrtc/api/java/jni/native_handle_impl.h
index b781815..0d01532 100644
--- a/webrtc/api/java/jni/native_handle_impl.h
+++ b/webrtc/api/java/jni/native_handle_impl.h
@@ -18,14 +18,37 @@
namespace webrtc_jni {
+// Open gl texture matrix, in column-major order. Operations are
+// in-place.
+class Matrix {
+ public:
+ Matrix(JNIEnv* jni, jfloatArray a);
+
+ jfloatArray ToJava(JNIEnv* jni);
+
+ // Crop arguments are relative to original size.
+ void Crop(float cropped_width,
+ float cropped_height,
+ float crop_x,
+ float crop_y);
+
+ void Rotate(webrtc::VideoRotation rotation);
+
+ private:
+ static void Multiply(const float a[16], const float b[16], float result[16]);
+ float elem_[16];
+};
+
// Wrapper for texture object.
struct NativeHandleImpl {
NativeHandleImpl(JNIEnv* jni,
jint j_oes_texture_id,
jfloatArray j_transform_matrix);
+ NativeHandleImpl(int id, const Matrix& matrix);
+
const int oes_texture_id;
- float sampling_matrix[16];
+ Matrix sampling_matrix;
};
class AndroidTextureBuffer : public webrtc::NativeHandleBuffer {
@@ -42,6 +65,8 @@
rtc::scoped_refptr<AndroidTextureBuffer> CropScaleAndRotate(
int cropped_width,
int cropped_height,
+ int crop_x,
+ int crop_y,
int dst_width,
int dst_height,
webrtc::VideoRotation rotation);
diff --git a/webrtc/api/java/jni/peerconnection_jni.cc b/webrtc/api/java/jni/peerconnection_jni.cc
index 522ae75..a075805 100644
--- a/webrtc/api/java/jni/peerconnection_jni.cc
+++ b/webrtc/api/java/jni/peerconnection_jni.cc
@@ -794,8 +794,8 @@
jobject CricketToJavaTextureFrame(const cricket::VideoFrame* frame) {
NativeHandleImpl* handle = reinterpret_cast<NativeHandleImpl*>(
frame->video_frame_buffer()->native_handle());
- jfloatArray sampling_matrix = jni()->NewFloatArray(16);
- jni()->SetFloatArrayRegion(sampling_matrix, 0, 16, handle->sampling_matrix);
+ jfloatArray sampling_matrix = handle->sampling_matrix.ToJava(jni());
+
return jni()->NewObject(
*j_frame_class_, j_texture_frame_ctor_id_,
frame->width(), frame->height(),
diff --git a/webrtc/media/base/videoadapter.cc b/webrtc/media/base/videoadapter.cc
index e1f506f..acb0e2c 100644
--- a/webrtc/media/base/videoadapter.cc
+++ b/webrtc/media/base/videoadapter.cc
@@ -153,7 +153,7 @@
return true;
}
-void VideoAdapter::AdaptFrameResolution(int in_width,
+bool VideoAdapter::AdaptFrameResolution(int in_width,
int in_height,
int64_t in_timestamp_ns,
int* cropped_width,
@@ -189,11 +189,7 @@
}
// Drop frame.
- *cropped_width = 0;
- *cropped_height = 0;
- *out_width = 0;
- *out_height = 0;
- return;
+ return false;
}
// Calculate how the input should be cropped.
@@ -250,6 +246,8 @@
previous_width_ = *out_width;
previous_height_ = *out_height;
+
+ return true;
}
void VideoAdapter::OnOutputFormatRequest(const VideoFormat& format) {
diff --git a/webrtc/media/base/videoadapter.h b/webrtc/media/base/videoadapter.h
index 45fd1fa..9d17f5c 100644
--- a/webrtc/media/base/videoadapter.h
+++ b/webrtc/media/base/videoadapter.h
@@ -27,11 +27,11 @@
VideoAdapter();
virtual ~VideoAdapter();
- // Return the adapted resolution given the input resolution. The input
- // resolution should first be cropped to the specified resolution, and then
- // scaled to the final output resolution. The output resolution will be 0x0 if
- // the frame should be dropped.
- void AdaptFrameResolution(int in_width,
+ // Return the adapted resolution and cropping parameters given the
+ // input resolution. The input frame should first be cropped, then
+ // scaled to the final output resolution. Returns true if the frame
+ // should be adapted, and false if it should be dropped.
+ bool AdaptFrameResolution(int in_width,
int in_height,
int64_t in_timestamp_ns,
int* cropped_width,
diff --git a/webrtc/media/base/videoadapter_unittest.cc b/webrtc/media/base/videoadapter_unittest.cc
index d76bbd6..e805679 100644
--- a/webrtc/media/base/videoadapter_unittest.cc
+++ b/webrtc/media/base/videoadapter_unittest.cc
@@ -71,11 +71,10 @@
int cropped_height;
int out_width;
int out_height;
- video_adapter_->AdaptFrameResolution(in_width, in_height,
- captured_frame->time_stamp,
- &cropped_width, &cropped_height,
- &out_width, &out_height);
- if (out_width != 0 && out_height != 0) {
+ if (video_adapter_->AdaptFrameResolution(in_width, in_height,
+ captured_frame->time_stamp,
+ &cropped_width, &cropped_height,
+ &out_width, &out_height)) {
cropped_width_ = cropped_width;
cropped_height_ = cropped_height;
out_width_ = out_width;
@@ -390,9 +389,10 @@
output_format.width *= 10;
output_format.height *= 10;
adapter_.OnOutputFormatRequest(output_format);
- adapter_.AdaptFrameResolution(capture_format_.width, capture_format_.height,
- 0, &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(
+ capture_format_.width, capture_format_.height, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
EXPECT_EQ(capture_format_.width, cropped_width_);
EXPECT_EQ(capture_format_.height, cropped_height_);
EXPECT_EQ(capture_format_.width, out_width_);
@@ -403,9 +403,10 @@
// cropping or resolution change.
TEST_F(VideoAdapterTest, AdaptFrameResolutionIdentical) {
adapter_.OnOutputFormatRequest(capture_format_);
- adapter_.AdaptFrameResolution(capture_format_.width, capture_format_.height,
- 0, &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(
+ capture_format_.width, capture_format_.height, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
EXPECT_EQ(capture_format_.width, cropped_width_);
EXPECT_EQ(capture_format_.height, cropped_height_);
EXPECT_EQ(capture_format_.width, out_width_);
@@ -419,9 +420,10 @@
request_format.width /= 2;
request_format.height /= 2;
adapter_.OnOutputFormatRequest(request_format);
- adapter_.AdaptFrameResolution(capture_format_.width, capture_format_.height,
- 0, &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(
+ capture_format_.width, capture_format_.height, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
EXPECT_EQ(capture_format_.width, cropped_width_);
EXPECT_EQ(capture_format_.height, cropped_height_);
EXPECT_EQ(request_format.width, out_width_);
@@ -434,11 +436,10 @@
output_format.width = 0;
output_format.height = 0;
adapter_.OnOutputFormatRequest(output_format);
- adapter_.AdaptFrameResolution(capture_format_.width, capture_format_.height,
- 0, &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
- EXPECT_EQ(0, out_width_);
- EXPECT_EQ(0, out_height_);
+ EXPECT_FALSE(adapter_.AdaptFrameResolution(
+ capture_format_.width, capture_format_.height, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
}
// Adapt the frame resolution to be a quarter of the capture resolution at the
@@ -503,9 +504,9 @@
TEST_F(VideoAdapterTest, TestOnOutputFormatRequest) {
VideoFormat format(640, 400, 0, 0);
- adapter_.AdaptFrameResolution(640, 400, 0,
- &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 400, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
EXPECT_EQ(640, cropped_width_);
EXPECT_EQ(400, cropped_height_);
EXPECT_EQ(640, out_width_);
@@ -514,9 +515,9 @@
// Format request 640x400.
format.height = 400;
adapter_.OnOutputFormatRequest(format);
- adapter_.AdaptFrameResolution(640, 400, 0,
- &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 400, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
EXPECT_EQ(640, cropped_width_);
EXPECT_EQ(400, cropped_height_);
EXPECT_EQ(640, out_width_);
@@ -527,9 +528,9 @@
format.width = 1280;
format.height = 720;
adapter_.OnOutputFormatRequest(format);
- adapter_.AdaptFrameResolution(640, 400, 0,
- &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 400, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
EXPECT_EQ(640, cropped_width_);
EXPECT_EQ(360, cropped_height_);
EXPECT_EQ(640, out_width_);
@@ -539,19 +540,17 @@
format.width = 0;
format.height = 0;
adapter_.OnOutputFormatRequest(format);
- adapter_.AdaptFrameResolution(640, 400, 0,
- &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
- EXPECT_EQ(0, out_width_);
- EXPECT_EQ(0, out_height_);
+ EXPECT_FALSE(adapter_.AdaptFrameResolution(640, 400, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
// Request 320x200. Expect scaling, but no cropping.
format.width = 320;
format.height = 200;
adapter_.OnOutputFormatRequest(format);
- adapter_.AdaptFrameResolution(640, 400, 0,
- &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 400, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
EXPECT_EQ(640, cropped_width_);
EXPECT_EQ(400, cropped_height_);
EXPECT_EQ(320, out_width_);
@@ -563,9 +562,9 @@
format.width = 424;
format.height = 265;
adapter_.OnOutputFormatRequest(format);
- adapter_.AdaptFrameResolution(640, 400, 0,
- &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 400, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
EXPECT_EQ(640, cropped_width_);
EXPECT_EQ(400, cropped_height_);
EXPECT_EQ(320, out_width_);
@@ -575,9 +574,9 @@
format.width = 640 * 3 / 8;
format.height = 400 * 3 / 8;
adapter_.OnOutputFormatRequest(format);
- adapter_.AdaptFrameResolution(640, 400, 0,
- &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 400, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
EXPECT_EQ(640, cropped_width_);
EXPECT_EQ(400, cropped_height_);
EXPECT_EQ(640 * 3 / 8, out_width_);
@@ -587,9 +586,9 @@
format.width = 320;
format.height = 200;
adapter_.OnOutputFormatRequest(format);
- adapter_.AdaptFrameResolution(640, 400, 0,
- &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 400, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
EXPECT_EQ(640, cropped_width_);
EXPECT_EQ(400, cropped_height_);
EXPECT_EQ(320, out_width_);
@@ -599,9 +598,9 @@
format.width = 480;
format.height = 300;
adapter_.OnOutputFormatRequest(format);
- adapter_.AdaptFrameResolution(640, 400, 0,
- &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 400, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
EXPECT_EQ(640, cropped_width_);
EXPECT_EQ(400, cropped_height_);
EXPECT_EQ(480, out_width_);
@@ -611,9 +610,9 @@
TEST_F(VideoAdapterTest, TestViewRequestPlusCameraSwitch) {
// Start at HD.
VideoFormat format(1280, 720, 0, 0);
- adapter_.AdaptFrameResolution(1280, 720, 0,
- &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
EXPECT_EQ(1280, cropped_width_);
EXPECT_EQ(720, cropped_height_);
EXPECT_EQ(1280, out_width_);
@@ -623,9 +622,9 @@
format.width = 640;
format.height = 360;
adapter_.OnOutputFormatRequest(format);
- adapter_.AdaptFrameResolution(1280, 720, 0,
- &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
EXPECT_EQ(1280, cropped_width_);
EXPECT_EQ(720, cropped_height_);
EXPECT_EQ(640, out_width_);
@@ -633,9 +632,9 @@
// Now, the camera reopens at VGA.
// Both the frame and the output format should be 640x360.
- adapter_.AdaptFrameResolution(640, 360, 0,
- &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 360, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
EXPECT_EQ(640, cropped_width_);
EXPECT_EQ(360, cropped_height_);
EXPECT_EQ(640, out_width_);
@@ -644,9 +643,9 @@
// And another view request comes in for 640x360, which should have no
// real impact.
adapter_.OnOutputFormatRequest(format);
- adapter_.AdaptFrameResolution(640, 360, 0,
- &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 360, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
EXPECT_EQ(640, cropped_width_);
EXPECT_EQ(360, cropped_height_);
EXPECT_EQ(640, out_width_);
@@ -658,9 +657,9 @@
VideoFormat format(640, 360, 0, FOURCC_I420);
adapter_.OnOutputFormatRequest(format);
- adapter_.AdaptFrameResolution(640, 480, 0,
- &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
// Expect cropping.
EXPECT_EQ(640, cropped_width_);
EXPECT_EQ(360, cropped_height_);
@@ -668,17 +667,17 @@
EXPECT_EQ(360, out_height_);
// But if frames come in at 640x360, we shouldn't adapt them down.
- adapter_.AdaptFrameResolution(640, 360, 0,
- &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 360, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
EXPECT_EQ(640, cropped_width_);
EXPECT_EQ(360, cropped_height_);
EXPECT_EQ(640, out_width_);
EXPECT_EQ(360, out_height_);
- adapter_.AdaptFrameResolution(640, 480, 0,
- &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
EXPECT_EQ(640, cropped_width_);
EXPECT_EQ(360, cropped_height_);
EXPECT_EQ(640, out_width_);
@@ -686,9 +685,9 @@
}
TEST_F(VideoAdapterTest, TestOnResolutionRequestInSmallSteps) {
- adapter_.AdaptFrameResolution(1280, 720, 0,
- &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
EXPECT_EQ(1280, cropped_width_);
EXPECT_EQ(720, cropped_height_);
EXPECT_EQ(1280, out_width_);
@@ -697,9 +696,9 @@
// Adapt down one step.
adapter_.OnResolutionRequest(rtc::Optional<int>(1280 * 720 - 1),
rtc::Optional<int>());
- adapter_.AdaptFrameResolution(1280, 720, 0,
- &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
EXPECT_EQ(1280, cropped_width_);
EXPECT_EQ(720, cropped_height_);
EXPECT_EQ(960, out_width_);
@@ -708,9 +707,9 @@
// Adapt down one step more.
adapter_.OnResolutionRequest(rtc::Optional<int>(960 * 540 - 1),
rtc::Optional<int>());
- adapter_.AdaptFrameResolution(1280, 720, 0,
- &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
EXPECT_EQ(1280, cropped_width_);
EXPECT_EQ(720, cropped_height_);
EXPECT_EQ(640, out_width_);
@@ -719,9 +718,9 @@
// Adapt down one step more.
adapter_.OnResolutionRequest(rtc::Optional<int>(640 * 360 - 1),
rtc::Optional<int>());
- adapter_.AdaptFrameResolution(1280, 720, 0,
- &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
EXPECT_EQ(1280, cropped_width_);
EXPECT_EQ(720, cropped_height_);
EXPECT_EQ(480, out_width_);
@@ -730,9 +729,9 @@
// Adapt up one step.
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(480 * 270));
- adapter_.AdaptFrameResolution(1280, 720, 0,
- &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
EXPECT_EQ(1280, cropped_width_);
EXPECT_EQ(720, cropped_height_);
EXPECT_EQ(640, out_width_);
@@ -741,9 +740,9 @@
// Adapt up one step more.
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(640 * 360));
- adapter_.AdaptFrameResolution(1280, 720, 0,
- &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
EXPECT_EQ(1280, cropped_width_);
EXPECT_EQ(720, cropped_height_);
EXPECT_EQ(960, out_width_);
@@ -752,9 +751,9 @@
// Adapt up one step more.
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(960 * 720));
- adapter_.AdaptFrameResolution(1280, 720, 0,
- &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
EXPECT_EQ(1280, cropped_width_);
EXPECT_EQ(720, cropped_height_);
EXPECT_EQ(1280, out_width_);
@@ -762,28 +761,26 @@
}
TEST_F(VideoAdapterTest, TestOnResolutionRequestMaxZero) {
- adapter_.AdaptFrameResolution(1280, 720, 0,
- &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
EXPECT_EQ(1280, cropped_width_);
EXPECT_EQ(720, cropped_height_);
EXPECT_EQ(1280, out_width_);
EXPECT_EQ(720, out_height_);
adapter_.OnResolutionRequest(rtc::Optional<int>(0), rtc::Optional<int>());
- adapter_.AdaptFrameResolution(1280, 720, 0,
- &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
- EXPECT_EQ(0, out_width_);
- EXPECT_EQ(0, out_height_);
+ EXPECT_FALSE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
}
TEST_F(VideoAdapterTest, TestOnResolutionRequestInLargeSteps) {
adapter_.OnResolutionRequest(rtc::Optional<int>(640 * 360 - 1),
rtc::Optional<int>());
- adapter_.AdaptFrameResolution(1280, 720, 0,
- &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
EXPECT_EQ(1280, cropped_width_);
EXPECT_EQ(720, cropped_height_);
EXPECT_EQ(480, out_width_);
@@ -791,9 +788,9 @@
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(960 * 720));
- adapter_.AdaptFrameResolution(1280, 720, 0,
- &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
EXPECT_EQ(1280, cropped_width_);
EXPECT_EQ(720, cropped_height_);
EXPECT_EQ(1280, out_width_);
@@ -803,9 +800,9 @@
TEST_F(VideoAdapterTest, TestOnOutputFormatRequestCapsMaxResolution) {
adapter_.OnResolutionRequest(rtc::Optional<int>(640 * 360 - 1),
rtc::Optional<int>());
- adapter_.AdaptFrameResolution(1280, 720, 0,
- &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
EXPECT_EQ(1280, cropped_width_);
EXPECT_EQ(720, cropped_height_);
EXPECT_EQ(480, out_width_);
@@ -813,9 +810,9 @@
VideoFormat new_format(640, 360, 0, FOURCC_I420);
adapter_.OnOutputFormatRequest(new_format);
- adapter_.AdaptFrameResolution(1280, 720, 0,
- &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
EXPECT_EQ(1280, cropped_width_);
EXPECT_EQ(720, cropped_height_);
EXPECT_EQ(480, out_width_);
@@ -823,9 +820,9 @@
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(960 * 720));
- adapter_.AdaptFrameResolution(1280, 720, 0,
- &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
EXPECT_EQ(1280, cropped_width_);
EXPECT_EQ(720, cropped_height_);
EXPECT_EQ(640, out_width_);
@@ -833,9 +830,9 @@
}
TEST_F(VideoAdapterTest, TestOnResolutionRequestReset) {
- adapter_.AdaptFrameResolution(1280, 720, 0,
- &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
EXPECT_EQ(1280, cropped_width_);
EXPECT_EQ(720, cropped_height_);
EXPECT_EQ(1280, out_width_);
@@ -843,18 +840,18 @@
adapter_.OnResolutionRequest(rtc::Optional<int>(640 * 360 - 1),
rtc::Optional<int>());
- adapter_.AdaptFrameResolution(1280, 720, 0,
- &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
EXPECT_EQ(1280, cropped_width_);
EXPECT_EQ(720, cropped_height_);
EXPECT_EQ(480, out_width_);
EXPECT_EQ(270, out_height_);
adapter_.OnResolutionRequest(rtc::Optional<int>(), rtc::Optional<int>());
- adapter_.AdaptFrameResolution(1280, 720, 0,
- &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
EXPECT_EQ(1280, cropped_width_);
EXPECT_EQ(720, cropped_height_);
EXPECT_EQ(1280, out_width_);
@@ -865,9 +862,9 @@
// Ask for 640x360 (16:9 aspect).
adapter_.OnOutputFormatRequest(VideoFormat(640, 360, 0, FOURCC_I420));
// Send 640x480 (4:3 aspect).
- adapter_.AdaptFrameResolution(640, 480, 0,
- &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
// Expect cropping to 16:9 format and no scaling.
EXPECT_EQ(640, cropped_width_);
EXPECT_EQ(360, cropped_height_);
@@ -878,9 +875,9 @@
adapter_.OnResolutionRequest(rtc::Optional<int>(640 * 360 - 1),
rtc::Optional<int>());
// Expect cropping to 16:9 format and 3/4 scaling.
- adapter_.AdaptFrameResolution(640, 480, 0,
- &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
EXPECT_EQ(640, cropped_width_);
EXPECT_EQ(360, cropped_height_);
EXPECT_EQ(480, out_width_);
@@ -890,9 +887,9 @@
adapter_.OnResolutionRequest(rtc::Optional<int>(480 * 270 - 1),
rtc::Optional<int>());
// Expect cropping to 16:9 format and 1/2 scaling.
- adapter_.AdaptFrameResolution(640, 480, 0,
- &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
EXPECT_EQ(640, cropped_width_);
EXPECT_EQ(360, cropped_height_);
EXPECT_EQ(320, out_width_);
@@ -902,9 +899,9 @@
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(320 * 180));
// Expect cropping to 16:9 format and 3/4 scaling.
- adapter_.AdaptFrameResolution(640, 480, 0,
- &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
EXPECT_EQ(640, cropped_width_);
EXPECT_EQ(360, cropped_height_);
EXPECT_EQ(480, out_width_);
@@ -914,9 +911,9 @@
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(480 * 270));
// Expect cropping to 16:9 format and no scaling.
- adapter_.AdaptFrameResolution(640, 480, 0,
- &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
EXPECT_EQ(640, cropped_width_);
EXPECT_EQ(360, cropped_height_);
EXPECT_EQ(640, out_width_);
@@ -926,9 +923,9 @@
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(640 * 360));
// Expect cropping to 16:9 format and no scaling.
- adapter_.AdaptFrameResolution(640, 480, 0,
- &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
EXPECT_EQ(640, cropped_width_);
EXPECT_EQ(360, cropped_height_);
EXPECT_EQ(640, out_width_);
@@ -943,9 +940,9 @@
rtc::Optional<int>());
// Send 640x480 (4:3 aspect).
- adapter_.AdaptFrameResolution(640, 480, 0,
- &cropped_width_, &cropped_height_,
- &out_width_, &out_height_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
// Instead of getting the exact aspect ratio with cropped resolution 640x360,
// the resolution should be adjusted to get a perfect scale factor instead.
diff --git a/webrtc/media/base/videocapturer.cc b/webrtc/media/base/videocapturer.cc
index 27da97c..96a6055 100644
--- a/webrtc/media/base/videocapturer.cc
+++ b/webrtc/media/base/videocapturer.cc
@@ -214,26 +214,54 @@
}
}
-void VideoCapturer::OnFrameCaptured(VideoCapturer*,
- const CapturedFrame* captured_frame) {
+bool VideoCapturer::AdaptFrame(int width,
+ int height,
+ // TODO(nisse): Switch to us unit.
+ int64_t capture_time_ns,
+ int* out_width,
+ int* out_height,
+ int* crop_width,
+ int* crop_height,
+ int* crop_x,
+ int* crop_y) {
if (!broadcaster_.frame_wanted()) {
- return;
+ return false;
}
- int cropped_width = captured_frame->width;
- int cropped_height = captured_frame->height;
- int out_width = captured_frame->width;
- int out_height = captured_frame->height;
if (enable_video_adapter_ && !IsScreencast()) {
- video_adapter_.AdaptFrameResolution(
- captured_frame->width, captured_frame->height,
- captured_frame->time_stamp,
- &cropped_width, &cropped_height,
- &out_width, &out_height);
- if (out_width == 0 || out_height == 0) {
+ if (!video_adapter_.AdaptFrameResolution(
+ width, height, capture_time_ns,
+ crop_width, crop_height, out_width, out_height)) {
// VideoAdapter dropped the frame.
- return;
+ return false;
}
+ *crop_x = (width - *crop_width) / 2;
+ *crop_y = (height - *crop_height) / 2;
+ } else {
+ *out_width = width;
+ *out_height = height;
+ *crop_width = width;
+ *crop_height = height;
+ *crop_x = 0;
+ *crop_y = 0;
+ }
+ return true;
+}
+
+void VideoCapturer::OnFrameCaptured(VideoCapturer*,
+ const CapturedFrame* captured_frame) {
+ int out_width;
+ int out_height;
+ int crop_width;
+ int crop_height;
+ int crop_x;
+ int crop_y;
+
+ if (!AdaptFrame(captured_frame->width, captured_frame->height,
+ captured_frame->time_stamp,
+ &out_width, &out_height,
+ &crop_width, &crop_height, &crop_x, &crop_y)) {
+ return;
}
if (!frame_factory_) {
@@ -241,9 +269,10 @@
return;
}
- // TODO(nisse): Reorganize frame factory methods.
+ // TODO(nisse): Reorganize frame factory methods. crop_x and crop_y
+ // are ignored for now.
std::unique_ptr<VideoFrame> adapted_frame(frame_factory_->CreateAliasedFrame(
- captured_frame, cropped_width, cropped_height, out_width, out_height));
+ captured_frame, crop_width, crop_height, out_width, out_height));
if (!adapted_frame) {
// TODO(fbarchard): LOG more information about captured frame attributes.
@@ -253,12 +282,14 @@
return;
}
- OnFrame(this, adapted_frame.get());
- UpdateInputSize(captured_frame);
+ OnFrame(*adapted_frame, captured_frame->width, captured_frame->height);
}
-void VideoCapturer::OnFrame(VideoCapturer* capturer, const VideoFrame* frame) {
- broadcaster_.OnFrame(*frame);
+void VideoCapturer::OnFrame(const VideoFrame& frame,
+ int orig_width,
+ int orig_height) {
+ broadcaster_.OnFrame(frame);
+ UpdateInputSize(orig_width, orig_height);
}
void VideoCapturer::SetCaptureState(CaptureState state) {
@@ -399,13 +430,13 @@
format.height > max_format_->height;
}
-void VideoCapturer::UpdateInputSize(const CapturedFrame* captured_frame) {
+void VideoCapturer::UpdateInputSize(int width, int height) {
// Update stats protected from fetches from different thread.
rtc::CritScope cs(&frame_stats_crit_);
input_size_valid_ = true;
- input_width_ = captured_frame->width;
- input_height_ = captured_frame->height;
+ input_width_ = width;
+ input_height_ = height;
}
} // namespace cricket
diff --git a/webrtc/media/base/videocapturer.h b/webrtc/media/base/videocapturer.h
index 6a57331..329ba08 100644
--- a/webrtc/media/base/videocapturer.h
+++ b/webrtc/media/base/videocapturer.h
@@ -168,7 +168,7 @@
return capture_state_;
}
- virtual bool GetApplyRotation() { return apply_rotation_; }
+ virtual bool apply_rotation() { return apply_rotation_; }
// Returns true if the capturer is screencasting. This can be used to
// implement screencast specific behavior.
@@ -222,14 +222,28 @@
// when a sink changes its VideoSinkWants by calling AddOrUpdateSink.
virtual void OnSinkWantsChanged(const rtc::VideoSinkWants& wants);
+ // Reports the appropriate frame size after adaptation. Returns true
+ // if a frame is wanted. Returns false if there are no interested
+ // sinks, or if the VideoAdapter decides to drop the frame.
+ bool AdaptFrame(int width,
+ int height,
+ int64_t capture_time_ns,
+ int* out_width,
+ int* out_height,
+ int* crop_width,
+ int* crop_height,
+ int* crop_x,
+ int* crop_y);
+
// Callback attached to SignalFrameCaptured where SignalVideoFrames is called.
void OnFrameCaptured(VideoCapturer* video_capturer,
const CapturedFrame* captured_frame);
- // Called when a frame has been captured and converted to a VideoFrame.
- // OnFrame can be called directly by an implementation that does not use
- // SignalFrameCaptured or OnFrameCaptured.
- void OnFrame(VideoCapturer* capturer, const VideoFrame* frame);
+ // Called when a frame has been captured and converted to a
+ // VideoFrame. OnFrame can be called directly by an implementation
+ // that does not use SignalFrameCaptured or OnFrameCaptured. The
+ // orig_width and orig_height are used only to produce stats.
+ void OnFrame(const VideoFrame& frame, int orig_width, int orig_height);
VideoAdapter* video_adapter() { return &video_adapter_; }
@@ -268,7 +282,7 @@
// Returns true if format doesn't fulfill all applied restrictions.
bool ShouldFilterFormat(const VideoFormat& format) const;
- void UpdateInputSize(const CapturedFrame* captured_frame);
+ void UpdateInputSize(int width, int height);
rtc::ThreadChecker thread_checker_;
std::string id_;
diff --git a/webrtc/media/engine/webrtcvideoengine2_unittest.cc b/webrtc/media/engine/webrtcvideoengine2_unittest.cc
index fd7a646..22e3f99 100644
--- a/webrtc/media/engine/webrtcvideoengine2_unittest.cc
+++ b/webrtc/media/engine/webrtcvideoengine2_unittest.cc
@@ -259,12 +259,12 @@
channel->SetSource(kSsrc, &capturer);
// Verify capturer has turned off applying rotation.
- EXPECT_FALSE(capturer.GetApplyRotation());
+ EXPECT_FALSE(capturer.apply_rotation());
// Verify removing header extension turns on applying rotation.
parameters.extensions.clear();
EXPECT_TRUE(channel->SetSendParameters(parameters));
- EXPECT_TRUE(capturer.GetApplyRotation());
+ EXPECT_TRUE(capturer.apply_rotation());
}
TEST_F(WebRtcVideoEngine2Test, CVOSetHeaderExtensionBeforeAddSendStream) {
@@ -290,7 +290,7 @@
channel->SetSource(kSsrc, &capturer);
// Verify capturer has turned off applying rotation.
- EXPECT_FALSE(capturer.GetApplyRotation());
+ EXPECT_FALSE(capturer.apply_rotation());
}
TEST_F(WebRtcVideoEngine2Test, CVOSetHeaderExtensionAfterCapturer) {
@@ -311,7 +311,7 @@
channel->SetSource(kSsrc, &capturer);
// Verify capturer has turned on applying rotation.
- EXPECT_TRUE(capturer.GetApplyRotation());
+ EXPECT_TRUE(capturer.apply_rotation());
// Add CVO extension.
const int id = 1;
@@ -322,12 +322,12 @@
EXPECT_TRUE(channel->SetSendParameters(parameters));
// Verify capturer has turned off applying rotation.
- EXPECT_FALSE(capturer.GetApplyRotation());
+ EXPECT_FALSE(capturer.apply_rotation());
// Verify removing header extension turns on applying rotation.
parameters.extensions.clear();
EXPECT_TRUE(channel->SetSendParameters(parameters));
- EXPECT_TRUE(capturer.GetApplyRotation());
+ EXPECT_TRUE(capturer.apply_rotation());
}
TEST_F(WebRtcVideoEngine2Test, SetSendFailsBeforeSettingCodecs) {