Implement CVO for iOS capturer

The rotation is currently always applied by AVFoundation by
reconfiguring the capture connection video orientation. This CL sets the
rotation field in the frame instead. This avoids the current flash in
the video when the device is rotated, and also avoids reconfiguring the
local encoder and remote decoder when the device is rotated.

BUG=b/30651939

Review-Url: https://codereview.webrtc.org/2271583003
Cr-Commit-Position: refs/heads/master@{#13916}
diff --git a/webrtc/sdk/BUILD.gn b/webrtc/sdk/BUILD.gn
index 4fb0073..0affad0 100644
--- a/webrtc/sdk/BUILD.gn
+++ b/webrtc/sdk/BUILD.gn
@@ -198,6 +198,13 @@
       ":rtc_sdk_common_objc",
       "../api:libjingle_peerconnection",
     ]
+
+    if (rtc_build_libyuv) {
+      deps += [ "$rtc_libyuv_dir" ]
+      public_deps = [
+        "$rtc_libyuv_dir",
+      ]
+    }
   }
 
   # TODO(tkchin): Add the rtc_sdk_framework_objc target.
diff --git a/webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.h b/webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.h
index 37378e7..5ce0462 100644
--- a/webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.h
+++ b/webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.h
@@ -56,13 +56,16 @@
 
   // Converts the sample buffer into a cricket::CapturedFrame and signals the
   // frame for capture.
-  void CaptureSampleBuffer(CMSampleBufferRef sampleBuffer);
+  void CaptureSampleBuffer(CMSampleBufferRef sample_buffer,
+                           webrtc::VideoRotation rotation);
 
   // Handles messages from posts.
   void OnMessage(rtc::Message *msg) override;
 
  private:
-  void OnFrameMessage(CVImageBufferRef image_buffer, int64_t capture_time_ns);
+  void OnFrameMessage(CVImageBufferRef image_buffer,
+                      webrtc::VideoRotation rotation,
+                      int64_t capture_time_ns);
 
   RTCAVFoundationVideoCapturerInternal *_capturer;
   rtc::Thread *_startThread;  // Set in Start(), unset in Stop().
diff --git a/webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.mm b/webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.mm
index 7025d88..625f83d 100644
--- a/webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.mm
+++ b/webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.mm
@@ -22,10 +22,13 @@
 #import "WebRTC/UIDevice+RTCDevice.h"
 #endif
 
+#include "libyuv/rotate.h"
+
 #include "webrtc/base/bind.h"
 #include "webrtc/base/checks.h"
 #include "webrtc/base/thread.h"
 #include "webrtc/common_video/include/corevideo_frame_buffer.h"
+#include "webrtc/common_video/rotation.h"
 
 struct AVCaptureSessionPresetResolution {
   NSString *sessionPreset;
@@ -98,7 +101,7 @@
   AVCaptureVideoDataOutput *_videoDataOutput;
   // The cricket::VideoCapturer that owns this class. Should never be NULL.
   webrtc::AVFoundationVideoCapturer *_capturer;
-  BOOL _orientationHasChanged;
+  webrtc::VideoRotation _rotation;
   BOOL _hasRetriedOnFatalError;
   BOOL _isRunning;
   BOOL _hasStarted;
@@ -232,7 +235,14 @@
   self.hasStarted = YES;
   [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
                                block:^{
-    _orientationHasChanged = NO;
+#if TARGET_OS_IPHONE
+     // Default to portrait orientation on iPhone. This will be reset in
+     // updateOrientation unless orientation is unknown/faceup/facedown.
+     _rotation = webrtc::kVideoRotation_90;
+#else
+    // No rotation on Mac.
+    _rotation = webrtc::kVideoRotation_0;
+#endif
     [self updateOrientation];
 #if TARGET_OS_IPHONE
     [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
@@ -267,7 +277,6 @@
 - (void)deviceOrientationDidChange:(NSNotification *)notification {
   [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
                                block:^{
-    _orientationHasChanged = YES;
     [self updateOrientation];
   }];
 }
@@ -282,7 +291,7 @@
   if (!self.hasStarted) {
     return;
   }
-  _capturer->CaptureSampleBuffer(sampleBuffer);
+  _capturer->CaptureSampleBuffer(sampleBuffer, _rotation);
 }
 
 - (void)captureOutput:(AVCaptureOutput *)captureOutput
@@ -530,36 +539,26 @@
 
 // Called from capture session queue.
 - (void)updateOrientation {
-  AVCaptureConnection *connection =
-      [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
-  if (!connection.supportsVideoOrientation) {
-    // TODO(tkchin): set rotation bit on frames.
-    return;
-  }
 #if TARGET_OS_IPHONE
-  AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;
   switch ([UIDevice currentDevice].orientation) {
     case UIDeviceOrientationPortrait:
-      orientation = AVCaptureVideoOrientationPortrait;
+      _rotation = webrtc::kVideoRotation_90;
       break;
     case UIDeviceOrientationPortraitUpsideDown:
-      orientation = AVCaptureVideoOrientationPortraitUpsideDown;
+      _rotation = webrtc::kVideoRotation_270;
       break;
     case UIDeviceOrientationLandscapeLeft:
-      orientation = AVCaptureVideoOrientationLandscapeRight;
+      _rotation = webrtc::kVideoRotation_180;
       break;
     case UIDeviceOrientationLandscapeRight:
-      orientation = AVCaptureVideoOrientationLandscapeLeft;
+      _rotation = webrtc::kVideoRotation_0;
       break;
     case UIDeviceOrientationFaceUp:
     case UIDeviceOrientationFaceDown:
     case UIDeviceOrientationUnknown:
-      if (!_orientationHasChanged) {
-        connection.videoOrientation = orientation;
-      }
-      return;
+      // Ignore.
+      break;
   }
-  connection.videoOrientation = orientation;
 #endif
 }
 
@@ -598,9 +597,12 @@
 };
 
 struct AVFoundationFrame {
-  AVFoundationFrame(CVImageBufferRef buffer, int64_t time)
-    : image_buffer(buffer), capture_time(time) {}
+  AVFoundationFrame(CVImageBufferRef buffer,
+                    webrtc::VideoRotation rotation,
+                    int64_t time)
+      : image_buffer(buffer), rotation(rotation), capture_time(time) {}
   CVImageBufferRef image_buffer;
+  webrtc::VideoRotation rotation;
   int64_t capture_time;
 };
 
@@ -708,14 +710,14 @@
 }
 
 void AVFoundationVideoCapturer::CaptureSampleBuffer(
-    CMSampleBufferRef sampleBuffer) {
-  if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 ||
-      !CMSampleBufferIsValid(sampleBuffer) ||
-      !CMSampleBufferDataIsReady(sampleBuffer)) {
+    CMSampleBufferRef sample_buffer, webrtc::VideoRotation rotation) {
+  if (CMSampleBufferGetNumSamples(sample_buffer) != 1 ||
+      !CMSampleBufferIsValid(sample_buffer) ||
+      !CMSampleBufferDataIsReady(sample_buffer)) {
     return;
   }
 
-  CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(sampleBuffer);
+  CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(sample_buffer);
   if (image_buffer == NULL) {
     return;
   }
@@ -723,7 +725,7 @@
   // Retain the buffer and post it to the webrtc thread. It will be released
   // after it has successfully been signaled.
   CVBufferRetain(image_buffer);
-  AVFoundationFrame frame(image_buffer, rtc::TimeNanos());
+  AVFoundationFrame frame(image_buffer, rotation, rtc::TimeNanos());
   _startThread->Post(RTC_FROM_HERE, this, kMessageTypeFrame,
                      new rtc::TypedMessageData<AVFoundationFrame>(frame));
 }
@@ -734,7 +736,7 @@
       rtc::TypedMessageData<AVFoundationFrame>* data =
         static_cast<rtc::TypedMessageData<AVFoundationFrame>*>(msg->pdata);
       const AVFoundationFrame& frame = data->data();
-      OnFrameMessage(frame.image_buffer, frame.capture_time);
+      OnFrameMessage(frame.image_buffer, frame.rotation, frame.capture_time);
       delete data;
       break;
     }
@@ -742,6 +744,7 @@
 }
 
 void AVFoundationVideoCapturer::OnFrameMessage(CVImageBufferRef image_buffer,
+                                               webrtc::VideoRotation rotation,
                                                int64_t capture_time_ns) {
   RTC_DCHECK(_startThread->IsCurrent());
 
@@ -769,16 +772,33 @@
   }
 
   if (adapted_width != captured_width || crop_width != captured_width ||
-      adapted_height != captured_height || crop_height != captured_height) {
+      adapted_height != captured_height || crop_height != captured_height ||
+      (apply_rotation() && rotation != webrtc::kVideoRotation_0)) {
     // TODO(magjed): Avoid converting to I420.
     rtc::scoped_refptr<webrtc::I420Buffer> scaled_buffer(
         _buffer_pool.CreateBuffer(adapted_width, adapted_height));
     scaled_buffer->CropAndScaleFrom(buffer->NativeToI420Buffer(), crop_x,
                                     crop_y, crop_width, crop_height);
-    buffer = scaled_buffer;
+    if (!apply_rotation() || rotation == webrtc::kVideoRotation_0) {
+      buffer = scaled_buffer;
+    } else {
+      // Applying rotation is only supported for legacy reasons and performance
+      // is not critical here.
+      buffer = (rotation == webrtc::kVideoRotation_180)
+          ? I420Buffer::Create(adapted_width, adapted_height)
+          : I420Buffer::Create(adapted_height, adapted_width);
+      libyuv::I420Rotate(scaled_buffer->DataY(), scaled_buffer->StrideY(),
+                         scaled_buffer->DataU(), scaled_buffer->StrideU(),
+                         scaled_buffer->DataV(), scaled_buffer->StrideV(),
+                         buffer->MutableDataY(), buffer->StrideY(),
+                         buffer->MutableDataU(), buffer->StrideU(),
+                         buffer->MutableDataV(), buffer->StrideV(),
+                         crop_width, crop_height,
+                         static_cast<libyuv::RotationMode>(rotation));
+    }
   }
 
-  OnFrame(cricket::WebRtcVideoFrame(buffer, webrtc::kVideoRotation_0,
+  OnFrame(cricket::WebRtcVideoFrame(buffer, rotation,
                                     translated_camera_time_us, 0),
           captured_width, captured_height);
 
diff --git a/webrtc/sdk/sdk.gyp b/webrtc/sdk/sdk.gyp
index 4a5fa0e..46d324e 100644
--- a/webrtc/sdk/sdk.gyp
+++ b/webrtc/sdk/sdk.gyp
@@ -189,6 +189,9 @@
             'objc/Framework/Headers/WebRTC/RTCVideoTrack.h',
           ], # sources
           'conditions': [
+            ['build_libyuv==1', {
+              'dependencies': ['<(DEPTH)/third_party/libyuv/libyuv.gyp:libyuv'],
+            }],
             ['OS=="ios"', {
               'sources': [
                 'objc/Framework/Classes/RTCEAGLVideoView.m',