New class AdaptedVideoTrackSource.

BUG=webrtc:6353

Review-Url: https://codereview.webrtc.org/2328333002
Cr-Original-Commit-Position: refs/heads/master@{#14345}
Cr-Mirrored-From: https://chromium.googlesource.com/external/webrtc
Cr-Mirrored-Commit: 6f5a6c318890bf544fc12fedd56db4bc29af69ef
diff --git a/api/androidvideotracksource.cc b/api/androidvideotracksource.cc
index 6fe8c95..5129454 100644
--- a/api/androidvideotracksource.cc
+++ b/api/androidvideotracksource.cc
@@ -12,8 +12,6 @@
 
 #include <utility>
 
-#include "third_party/libyuv/include/libyuv/rotate.h"
-
 namespace webrtc {
 
 AndroidVideoTrackSource::AndroidVideoTrackSource(rtc::Thread* signaling_thread,
@@ -27,21 +25,9 @@
           j_egl_context)),
       is_screencast_(is_screencast) {
   LOG(LS_INFO) << "AndroidVideoTrackSource ctor";
-  worker_thread_checker_.DetachFromThread();
   camera_thread_checker_.DetachFromThread();
 }
 
-bool AndroidVideoTrackSource::GetStats(AndroidVideoTrackSource::Stats* stats) {
-  rtc::CritScope lock(&stats_crit_);
-
-  if (!stats_) {
-    return false;
-  }
-
-  *stats = *stats_;
-  return true;
-}
-
 void AndroidVideoTrackSource::SetState(SourceState state) {
   if (rtc::Thread::Current() != signaling_thread_) {
     invoker_.AsyncInvoke<void>(
@@ -56,34 +42,6 @@
   }
 }
 
-void AndroidVideoTrackSource::AddOrUpdateSink(
-    rtc::VideoSinkInterface<cricket::VideoFrame>* sink,
-    const rtc::VideoSinkWants& wants) {
-  RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
-
-  broadcaster_.AddOrUpdateSink(sink, wants);
-  OnSinkWantsChanged(broadcaster_.wants());
-}
-
-void AndroidVideoTrackSource::RemoveSink(
-    rtc::VideoSinkInterface<cricket::VideoFrame>* sink) {
-  RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
-
-  broadcaster_.RemoveSink(sink);
-  OnSinkWantsChanged(broadcaster_.wants());
-}
-
-void AndroidVideoTrackSource::OnSinkWantsChanged(
-    const rtc::VideoSinkWants& wants) {
-  {
-    rtc::CritScope lock(&apply_rotation_crit_);
-    apply_rotation_ = wants.rotation_applied;
-  }
-
-  video_adapter_.OnResolutionRequest(wants.max_pixel_count,
-                                     wants.max_pixel_count_step_up);
-}
-
 void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void* frame_data,
                                                         int length,
                                                         int width,
@@ -94,17 +52,20 @@
   RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 ||
              rotation == 270);
 
+  int64_t camera_time_us = timestamp_ns / rtc::kNumNanosecsPerMicrosec;
+  int64_t translated_camera_time_us =
+      timestamp_aligner_.TranslateTimestamp(camera_time_us, rtc::TimeMicros());
+
   int adapted_width;
   int adapted_height;
   int crop_width;
   int crop_height;
   int crop_x;
   int crop_y;
-  int64_t translated_camera_time_us;
 
-  if (!AdaptFrame(width, height, timestamp_ns / rtc::kNumNanosecsPerMicrosec,
+  if (!AdaptFrame(width, height, camera_time_us,
                   &adapted_width, &adapted_height, &crop_width, &crop_height,
-                  &crop_x, &crop_y, &translated_camera_time_us)) {
+                  &crop_x, &crop_y)) {
     return;
   }
 
@@ -134,33 +95,9 @@
       buffer->MutableDataU(), buffer->StrideU(),
       buffer->width(), buffer->height());
 
-  // Applying rotation is only supported for legacy reasons, and the performance
-  // for this path is not critical.
-  rtc::CritScope lock(&apply_rotation_crit_);
-  if (apply_rotation_ && rotation != 0) {
-    rtc::scoped_refptr<I420Buffer> rotated_buffer =
-        rotation == 180 ? I420Buffer::Create(buffer->width(), buffer->height())
-                        : I420Buffer::Create(buffer->height(), buffer->width());
-
-    libyuv::I420Rotate(
-        buffer->DataY(), buffer->StrideY(),
-        buffer->DataU(), buffer->StrideU(),
-        buffer->DataV(), buffer->StrideV(),
-        rotated_buffer->MutableDataY(), rotated_buffer->StrideY(),
-        rotated_buffer->MutableDataU(), rotated_buffer->StrideU(),
-        rotated_buffer->MutableDataV(), rotated_buffer->StrideV(),
-        buffer->width(), buffer->height(),
-        static_cast<libyuv::RotationMode>(rotation));
-
-    buffer = rotated_buffer;
-  }
-
   OnFrame(cricket::WebRtcVideoFrame(
-              buffer,
-              apply_rotation_ ? webrtc::kVideoRotation_0
-                              : static_cast<webrtc::VideoRotation>(rotation),
-              translated_camera_time_us, 0),
-          width, height);
+              buffer, static_cast<webrtc::VideoRotation>(rotation),
+              translated_camera_time_us, 0));
 }
 
 void AndroidVideoTrackSource::OnTextureFrameCaptured(
@@ -173,17 +110,20 @@
   RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 ||
              rotation == 270);
 
+  int64_t camera_time_us = timestamp_ns / rtc::kNumNanosecsPerMicrosec;
+  int64_t translated_camera_time_us =
+      timestamp_aligner_.TranslateTimestamp(camera_time_us, rtc::TimeMicros());
+
   int adapted_width;
   int adapted_height;
   int crop_width;
   int crop_height;
   int crop_x;
   int crop_y;
-  int64_t translated_camera_time_us;
 
-  if (!AdaptFrame(width, height, timestamp_ns / rtc::kNumNanosecsPerMicrosec,
+  if (!AdaptFrame(width, height, camera_time_us,
                   &adapted_width, &adapted_height, &crop_width, &crop_height,
-                  &crop_x, &crop_y, &translated_camera_time_us)) {
+                  &crop_x, &crop_y)) {
     surface_texture_helper_->ReturnTextureFrame();
     return;
   }
@@ -195,8 +135,11 @@
               crop_x / static_cast<float>(width),
               crop_y / static_cast<float>(height));
 
-  rtc::CritScope lock(&apply_rotation_crit_);
-  if (apply_rotation_) {
+  // Make a local copy, since value of apply_rotation() may change
+  // under our feet.
+  bool do_rotate = apply_rotation();
+
+  if (do_rotate) {
     if (rotation == webrtc::kVideoRotation_90 ||
         rotation == webrtc::kVideoRotation_270) {
       std::swap(adapted_width, adapted_height);
@@ -208,21 +151,9 @@
               surface_texture_helper_->CreateTextureFrame(
                   adapted_width, adapted_height,
                   webrtc_jni::NativeHandleImpl(handle.oes_texture_id, matrix)),
-              apply_rotation_ ? webrtc::kVideoRotation_0
-                              : static_cast<webrtc::VideoRotation>(rotation),
-              translated_camera_time_us, 0),
-          width, height);
-}
-
-void AndroidVideoTrackSource::OnFrame(const cricket::VideoFrame& frame,
-                                      int width,
-                                      int height) {
-  {
-    rtc::CritScope lock(&stats_crit_);
-    stats_ = rtc::Optional<AndroidVideoTrackSource::Stats>({width, height});
-  }
-
-  broadcaster_.OnFrame(frame);
+              do_rotate ? webrtc::kVideoRotation_0
+                        : static_cast<webrtc::VideoRotation>(rotation),
+              translated_camera_time_us, 0));
 }
 
 void AndroidVideoTrackSource::OnOutputFormatRequest(int width,
@@ -230,39 +161,7 @@
                                                     int fps) {
   cricket::VideoFormat format(width, height,
                               cricket::VideoFormat::FpsToInterval(fps), 0);
-  video_adapter_.OnOutputFormatRequest(format);
-}
-
-bool AndroidVideoTrackSource::AdaptFrame(int width,
-                                         int height,
-                                         int64_t camera_time_us,
-                                         int* out_width,
-                                         int* out_height,
-                                         int* crop_width,
-                                         int* crop_height,
-                                         int* crop_x,
-                                         int* crop_y,
-                                         int64_t* translated_camera_time_us) {
-  RTC_DCHECK(camera_thread_checker_.CalledOnValidThread());
-
-  int64_t system_time_us = rtc::TimeMicros();
-  *translated_camera_time_us =
-      timestamp_aligner_.TranslateTimestamp(camera_time_us, system_time_us);
-
-  if (!broadcaster_.frame_wanted()) {
-    return false;
-  }
-
-  if (!video_adapter_.AdaptFrameResolution(
-          width, height, camera_time_us * rtc::kNumNanosecsPerMicrosec,
-          crop_width, crop_height, out_width, out_height)) {
-    // VideoAdapter dropped the frame.
-    return false;
-  }
-  *crop_x = (width - *crop_width) / 2;
-  *crop_y = (height - *crop_height) / 2;
-
-  return true;
+  video_adapter()->OnOutputFormatRequest(format);
 }
 
 }  // namespace webrtc
diff --git a/api/androidvideotracksource.h b/api/androidvideotracksource.h
index 1b2c4b2..4dc921a 100644
--- a/api/androidvideotracksource.h
+++ b/api/androidvideotracksource.h
@@ -13,21 +13,17 @@
 
 #include "webrtc/api/android/jni/native_handle_impl.h"
 #include "webrtc/api/android/jni/surfacetexturehelper_jni.h"
-#include "webrtc/api/mediastreaminterface.h"
-#include "webrtc/api/notifier.h"
 #include "webrtc/base/asyncinvoker.h"
 #include "webrtc/base/checks.h"
 #include "webrtc/base/thread_checker.h"
 #include "webrtc/base/timestampaligner.h"
 #include "webrtc/common_video/include/i420_buffer_pool.h"
 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/media/base/videoadapter.h"
-#include "webrtc/media/base/videobroadcaster.h"
-#include "webrtc/media/base/videosinkinterface.h"
+#include "webrtc/media/base/adaptedvideotracksource.h"
 
 namespace webrtc {
 
-class AndroidVideoTrackSource : public Notifier<VideoTrackSourceInterface> {
+class AndroidVideoTrackSource : public rtc::AdaptedVideoTrackSource {
  public:
   AndroidVideoTrackSource(rtc::Thread* signaling_thread,
                           JNIEnv* jni,
@@ -43,11 +39,6 @@
     return rtc::Optional<bool>(false);
   }
 
-  // Returns false if no stats are available, e.g, for a remote
-  // source, or a source which has not seen its first frame yet.
-  // Should avoid blocking.
-  bool GetStats(Stats* stats) override;
-
   // Called by the native capture observer
   void SetState(SourceState state);
 
@@ -55,10 +46,6 @@
 
   bool remote() const override { return false; }
 
-  void AddOrUpdateSink(rtc::VideoSinkInterface<cricket::VideoFrame>* sink,
-                       const rtc::VideoSinkWants& wants) override;
-  void RemoveSink(rtc::VideoSinkInterface<cricket::VideoFrame>* sink) override;
-
   void OnByteBufferFrameCaptured(const void* frame_data,
                                  int length,
                                  int width,
@@ -82,35 +69,14 @@
  private:
   rtc::Thread* signaling_thread_;
   rtc::AsyncInvoker invoker_;
-  rtc::ThreadChecker worker_thread_checker_;
   rtc::ThreadChecker camera_thread_checker_;
-  rtc::CriticalSection stats_crit_;
-  rtc::Optional<Stats> stats_ GUARDED_BY(stats_crit_);
   SourceState state_;
   rtc::VideoBroadcaster broadcaster_;
   rtc::TimestampAligner timestamp_aligner_;
-  cricket::VideoAdapter video_adapter_;
-  rtc::CriticalSection apply_rotation_crit_;
-  bool apply_rotation_ GUARDED_BY(apply_rotation_crit_);
   webrtc::NV12ToI420Scaler nv12toi420_scaler_;
   webrtc::I420BufferPool buffer_pool_;
   rtc::scoped_refptr<webrtc_jni::SurfaceTextureHelper> surface_texture_helper_;
   const bool is_screencast_;
-
-  void OnFrame(const cricket::VideoFrame& frame, int width, int height);
-
-  void OnSinkWantsChanged(const rtc::VideoSinkWants& wants);
-
-  bool AdaptFrame(int width,
-                  int height,
-                  int64_t camera_time_us,
-                  int* out_width,
-                  int* out_height,
-                  int* crop_width,
-                  int* crop_height,
-                  int* crop_x,
-                  int* crop_y,
-                  int64_t* translated_camera_time_us);
 };
 
 }  // namespace webrtc
diff --git a/media/BUILD.gn b/media/BUILD.gn
index 6179f67..fa68dad 100644
--- a/media/BUILD.gn
+++ b/media/BUILD.gn
@@ -47,6 +47,8 @@
   libs = []
   deps = []
   sources = [
+    "base/adaptedvideotracksource.cc",
+    "base/adaptedvideotracksource.h",
     "base/audiosource.h",
     "base/codec.cc",
     "base/codec.h",
diff --git a/media/base/adaptedvideotracksource.cc b/media/base/adaptedvideotracksource.cc
new file mode 100644
index 0000000..5f6144f
--- /dev/null
+++ b/media/base/adaptedvideotracksource.cc
@@ -0,0 +1,111 @@
+/*
+ *  Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/media/base/adaptedvideotracksource.h"
+
+namespace rtc {
+
+AdaptedVideoTrackSource::AdaptedVideoTrackSource() {
+  thread_checker_.DetachFromThread();
+}
+
+bool AdaptedVideoTrackSource::GetStats(Stats* stats) {
+  rtc::CritScope lock(&stats_crit_);
+
+  if (!stats_) {
+    return false;
+  }
+
+  *stats = *stats_;
+  return true;
+}
+
+void AdaptedVideoTrackSource::OnFrame(const cricket::VideoFrame& frame) {
+  rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer(
+      frame.video_frame_buffer());
+  /* Note that this is a "best effort" approach to
+     wants.rotation_applied; apply_rotation_ can change from false to
+     true between the check of apply_rotation() and the call to
+     broadcaster_.OnFrame(), in which case we generate a frame with
+     pending rotation despite some sink with wants.rotation_applied ==
+     true was just added. The VideoBroadcaster enforces
+     synchronization for us in this case, by not passing the frame on
+     to sinks which don't want it. */
+  if (apply_rotation() &&
+      frame.rotation() != webrtc::kVideoRotation_0 &&
+      !buffer->native_handle()) {
+    /* Apply pending rotation. */
+    broadcaster_.OnFrame(cricket::WebRtcVideoFrame(
+        webrtc::I420Buffer::Rotate(buffer, frame.rotation()),
+        webrtc::kVideoRotation_0, frame.timestamp_us()));
+  } else {
+    broadcaster_.OnFrame(frame);
+  }
+}
+
+void AdaptedVideoTrackSource::AddOrUpdateSink(
+    rtc::VideoSinkInterface<cricket::VideoFrame>* sink,
+    const rtc::VideoSinkWants& wants) {
+  RTC_DCHECK(thread_checker_.CalledOnValidThread());
+
+  broadcaster_.AddOrUpdateSink(sink, wants);
+  OnSinkWantsChanged(broadcaster_.wants());
+}
+
+void AdaptedVideoTrackSource::RemoveSink(
+    rtc::VideoSinkInterface<cricket::VideoFrame>* sink) {
+  RTC_DCHECK(thread_checker_.CalledOnValidThread());
+
+  broadcaster_.RemoveSink(sink);
+  OnSinkWantsChanged(broadcaster_.wants());
+}
+
+bool AdaptedVideoTrackSource::apply_rotation() {
+  return broadcaster_.wants().rotation_applied;
+}
+
+void AdaptedVideoTrackSource::OnSinkWantsChanged(
+    const rtc::VideoSinkWants& wants) {
+  RTC_DCHECK(thread_checker_.CalledOnValidThread());
+  video_adapter_.OnResolutionRequest(wants.max_pixel_count,
+                                     wants.max_pixel_count_step_up);
+}
+
+bool AdaptedVideoTrackSource::AdaptFrame(int width,
+                                         int height,
+                                         int64_t time_us,
+                                         int* out_width,
+                                         int* out_height,
+                                         int* crop_width,
+                                         int* crop_height,
+                                         int* crop_x,
+                                         int* crop_y) {
+  {
+    rtc::CritScope lock(&stats_crit_);
+    stats_ = rtc::Optional<Stats>({width, height});
+  }
+
+  if (!broadcaster_.frame_wanted()) {
+    return false;
+  }
+
+  if (!video_adapter_.AdaptFrameResolution(
+          width, height, time_us * rtc::kNumNanosecsPerMicrosec,
+          crop_width, crop_height, out_width, out_height)) {
+    // VideoAdapter dropped the frame.
+    return false;
+  }
+
+  *crop_x = (width - *crop_width) / 2;
+  *crop_y = (height - *crop_height) / 2;
+  return true;
+}
+
+}  // namespace rtc
diff --git a/media/base/adaptedvideotracksource.h b/media/base/adaptedvideotracksource.h
new file mode 100644
index 0000000..dad24db
--- /dev/null
+++ b/media/base/adaptedvideotracksource.h
@@ -0,0 +1,81 @@
+/*
+ *  Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MEDIA_BASE_ADAPTEDVIDEOTRACKSOURCE_H_
+#define WEBRTC_MEDIA_BASE_ADAPTEDVIDEOTRACKSOURCE_H_
+
+#include "webrtc/api/mediastreaminterface.h"
+#include "webrtc/api/notifier.h"
+#include "webrtc/media/base/videoadapter.h"
+#include "webrtc/media/base/videobroadcaster.h"
+
+namespace rtc {
+
+// Base class for sources which needs video adaptation, e.g., video
+// capture sources. Sinks must be added and removed on one and only
+// one thread, while AdaptFrame and OnFrame may be called on any
+// thread.
+class AdaptedVideoTrackSource
+    : public webrtc::Notifier<webrtc::VideoTrackSourceInterface> {
+ public:
+  AdaptedVideoTrackSource();
+
+ protected:
+  // Checks the apply_rotation() flag. If the frame needs rotation, and it is a
+  // plain memory frame, it is rotated. Subclasses producing native frames must
+  // handle apply_rotation() themselves.
+  void OnFrame(const cricket::VideoFrame& frame);
+
+  // Reports the appropriate frame size after adaptation. Returns true
+  // if a frame is wanted. Returns false if there are no interested
+  // sinks, or if the VideoAdapter decides to drop the frame.
+  bool AdaptFrame(int width,
+                  int height,
+                  int64_t time_us,
+                  int* out_width,
+                  int* out_height,
+                  int* crop_width,
+                  int* crop_height,
+                  int* crop_x,
+                  int* crop_y);
+
+  // Returns the current value of the apply_rotation flag, derived
+  // from the VideoSinkWants of registered sinks. The value is derived
+  // from sinks' wants, in AddOrUpdateSink and RemoveSink. Beware that
+  // when using this method from a different thread, the value may
+  // become stale before it is used.
+  bool apply_rotation();
+
+  cricket::VideoAdapter* video_adapter() { return &video_adapter_; }
+
+ private:
+  // Implements rtc::VideoSourceInterface.
+  void AddOrUpdateSink(rtc::VideoSinkInterface<cricket::VideoFrame>* sink,
+                       const rtc::VideoSinkWants& wants) override;
+  void RemoveSink(rtc::VideoSinkInterface<cricket::VideoFrame>* sink) override;
+
+  // Part of VideoTrackSourceInterface.
+  bool GetStats(Stats* stats) override;
+
+  void OnSinkWantsChanged(const rtc::VideoSinkWants& wants);
+
+  rtc::ThreadChecker thread_checker_;
+
+  cricket::VideoAdapter video_adapter_;
+
+  rtc::CriticalSection stats_crit_;
+  rtc::Optional<Stats> stats_ GUARDED_BY(stats_crit_);
+
+  VideoBroadcaster broadcaster_;
+};
+
+}  // namespace rtc
+
+#endif  // WEBRTC_MEDIA_BASE_ADAPTEDVIDEOTRACKSOURCE_H_
diff --git a/media/base/videobroadcaster.cc b/media/base/videobroadcaster.cc
index ffe9069..d3bc7a0 100644
--- a/media/base/videobroadcaster.cc
+++ b/media/base/videobroadcaster.cc
@@ -45,7 +45,6 @@
 }
 
 VideoSinkWants VideoBroadcaster::wants() const {
-  RTC_DCHECK(thread_checker_.CalledOnValidThread());
   rtc::CritScope cs(&sinks_and_wants_lock_);
   return current_wants_;
 }
@@ -53,6 +52,15 @@
 void VideoBroadcaster::OnFrame(const cricket::VideoFrame& frame) {
   rtc::CritScope cs(&sinks_and_wants_lock_);
   for (auto& sink_pair : sink_pairs()) {
+    if (sink_pair.wants.rotation_applied &&
+        frame.rotation() != webrtc::kVideoRotation_0) {
+      // Calls to OnFrame are not synchronized with changes to the sink wants.
+      // When rotation_applied is set to true, one or a few frames may get here
+      // with rotation still pending. Protect sinks that don't expect any
+      // pending rotation.
+      LOG(LS_VERBOSE) << "Discarding frame with unexpected rotation.";
+      continue;
+    }
     if (sink_pair.wants.black_frames) {
       sink_pair.sink->OnFrame(cricket::WebRtcVideoFrame(
           GetBlackFrameBuffer(frame.width(), frame.height()), frame.rotation(),
diff --git a/media/base/videobroadcaster.h b/media/base/videobroadcaster.h
index 5d4e2ae..1fcc9c3 100644
--- a/media/base/videobroadcaster.h
+++ b/media/base/videobroadcaster.h
@@ -45,6 +45,10 @@
   // aggregated by all VideoSinkWants from all sinks.
   VideoSinkWants wants() const;
 
+  // This method ensures that if a sink sets rotation_applied == true,
+  // it will never receive a frame with pending rotation. Our caller
+  // may pass in frames without precise synchronization with changes
+  // to the VideoSinkWants.
   void OnFrame(const cricket::VideoFrame& frame) override;
 
  protected:
diff --git a/media/media.gyp b/media/media.gyp
index 9a9fbbb..8c82779 100644
--- a/media/media.gyp
+++ b/media/media.gyp
@@ -26,6 +26,8 @@
         ],
       },
       'sources': [
+        'base/adaptedvideotracksource.cc',
+        'base/adaptedvideotracksource.h',
         'base/audiosource.h',
         'base/codec.cc',
         'base/codec.h',