Update output pixel format when starting video capture on iOS.

Update the output pixel format to the client supplied format when
starting camera capture.

Also add a new API method to get the preferred output pixel format
according to the
AVCaptureVideoDataOutput#availableVideoCVPixelFormatTypes method and
use it in AppRTCMobile.

Bug: webrtc:8505
Change-Id: Ia24eaf91d70d0703a34d38b06bb6eea28fb922b8
Reviewed-on: https://webrtc-review.googlesource.com/22680
Commit-Queue: Anders Carlsson <andersc@webrtc.org>
Reviewed-by: Zeke Chin <tkchin@webrtc.org>
Reviewed-by: Magnus Jedvert <magjed@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#20697}
diff --git a/examples/objc/AppRTCMobile/ARDCaptureController.m b/examples/objc/AppRTCMobile/ARDCaptureController.m
index 25aba8c..6830e88 100644
--- a/examples/objc/AppRTCMobile/ARDCaptureController.m
+++ b/examples/objc/AppRTCMobile/ARDCaptureController.m
@@ -79,10 +79,13 @@
 
   for (AVCaptureDeviceFormat *format in formats) {
     CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
+    FourCharCode pixelFormat = CMFormatDescriptionGetMediaSubType(format.formatDescription);
     int diff = abs(targetWidth - dimension.width) + abs(targetHeight - dimension.height);
     if (diff < currentDiff) {
       selectedFormat = format;
       currentDiff = diff;
+    } else if (diff == currentDiff && pixelFormat == [_capturer preferredOutputPixelFormat]) {
+      selectedFormat = format;
     }
   }
 
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCCameraVideoCapturer.m b/sdk/objc/Framework/Classes/PeerConnection/RTCCameraVideoCapturer.m
index b1fc11c..17aa6cc 100644
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCCameraVideoCapturer.m
+++ b/sdk/objc/Framework/Classes/PeerConnection/RTCCameraVideoCapturer.m
@@ -31,6 +31,8 @@
   AVCaptureVideoDataOutput *_videoDataOutput;
   AVCaptureSession *_captureSession;
   AVCaptureDevice *_currentDevice;
+  FourCharCode _preferredOutputPixelFormat;
+  FourCharCode _outputPixelFormat;
   BOOL _hasRetriedOnFatalError;
   BOOL _isRunning;
   // Will the session be running once all asynchronous operations have been completed?
@@ -105,6 +107,10 @@
   return device.formats;
 }
 
+- (FourCharCode)preferredOutputPixelFormat {
+  return _preferredOutputPixelFormat;
+}
+
 - (void)startCaptureWithDevice:(AVCaptureDevice *)device
                         format:(AVCaptureDeviceFormat *)format
                            fps:(NSInteger)fps {
@@ -129,6 +135,7 @@
                       [self reconfigureCaptureSessionInput];
                       [self updateOrientation];
                       [self updateDeviceCaptureFormat:format fps:fps];
+                      [self updateVideoDataOutputPixelFormat:format];
                       [_captureSession startRunning];
                       [_currentDevice unlockForConfiguration];
                       _isRunning = YES;
@@ -385,12 +392,27 @@
   NSNumber *pixelFormat = availablePixelFormats.firstObject;
   NSAssert(pixelFormat, @"Output device has no supported formats.");
 
+  _preferredOutputPixelFormat = [pixelFormat unsignedIntValue];
+  _outputPixelFormat = _preferredOutputPixelFormat;
   videoDataOutput.videoSettings = @{(NSString *)kCVPixelBufferPixelFormatTypeKey : pixelFormat};
   videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
   [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue];
   _videoDataOutput = videoDataOutput;
 }
 
+- (void)updateVideoDataOutputPixelFormat:(AVCaptureDeviceFormat *)format {
+  FourCharCode mediaSubType = CMFormatDescriptionGetMediaSubType(format.formatDescription);
+  if (![[RTCCVPixelBuffer supportedPixelFormats] containsObject:@(mediaSubType)]) {
+    mediaSubType = _preferredOutputPixelFormat;
+  }
+
+  if (mediaSubType != _outputPixelFormat) {
+    _outputPixelFormat = mediaSubType;
+    _videoDataOutput.videoSettings =
+        @{ (NSString *)kCVPixelBufferPixelFormatTypeKey : @(mediaSubType) };
+  }
+}
+
 #pragma mark - Private, called inside capture queue
 
 - (void)updateDeviceCaptureFormat:(AVCaptureDeviceFormat *)format fps:(NSInteger)fps {
diff --git a/sdk/objc/Framework/Headers/WebRTC/RTCCameraVideoCapturer.h b/sdk/objc/Framework/Headers/WebRTC/RTCCameraVideoCapturer.h
index 943ddc8..8f0f377 100644
--- a/sdk/objc/Framework/Headers/WebRTC/RTCCameraVideoCapturer.h
+++ b/sdk/objc/Framework/Headers/WebRTC/RTCCameraVideoCapturer.h
@@ -29,7 +29,13 @@
 // Returns list of formats that are supported by this class for this device.
 + (NSArray<AVCaptureDeviceFormat *> *)supportedFormatsForDevice:(AVCaptureDevice *)device;
 
+// Returns the most efficient supported output pixel format for this capturer.
+- (FourCharCode)preferredOutputPixelFormat;
+
 // Starts and stops the capture session asynchronously.
+// The device will capture video in the format given in the `format` parameter. If the pixel format
+// in `format` is supported by the WebRTC pipeline, the same pixel format will be used for the
+// output. Otherwise, the format returned by `preferredOutputPixelFormat` will be used.
 - (void)startCaptureWithDevice:(AVCaptureDevice *)device
                         format:(AVCaptureDeviceFormat *)format
                            fps:(NSInteger)fps;