Revert "Revert "Support more formats in RTCVideoFrame""

This reverts commit 0789dab2cbd1617e94d7300e375163d42345f3d4.

Reason for revert: Include obc_corevideoframebuffer target

Original change's description:
> Revert "Support more formats in RTCVideoFrame"
> 
> This reverts commit bd2220a9c496ef2e8567b68d4be9435a110bdc34.
> 
> Reason for revert: Broke external clients
> 
> Original change's description:
> > Support more formats in RTCVideoFrame
> > 
> > Implement Obj-C version of webrtc::VideoFrameBuffer and use that in
> > RTCVideoFrame.
> > 
> > Bug: webrtc:7785
> > Change-Id: I49f42bcf451dd6769b3a79a65fe7b400dce22677
> > Reviewed-on: https://chromium-review.googlesource.com/536773
> > Commit-Queue: Anders Carlsson <andersc@webrtc.org>
> > Reviewed-by: Magnus Jedvert <magjed@webrtc.org>
> > Cr-Commit-Position: refs/heads/master@{#18691}
> 
> TBR=magjed@webrtc.org,andersc@webrtc.org
> 
> Change-Id: Id765dd9543ed0613a6b2de108b268c3501025fcd
> No-Presubmit: true
> No-Tree-Checks: true
> No-Try: true
> Bug: webrtc:7785
> Reviewed-on: https://chromium-review.googlesource.com/542837
> Reviewed-by: Anders Carlsson <andersc@webrtc.org>
> Commit-Queue: Anders Carlsson <andersc@webrtc.org>
> Cr-Commit-Position: refs/heads/master@{#18697}

TBR=magjed@webrtc.org,andersc@webrtc.org

Change-Id: I1ef5313b4a6c56eb8c7fd02d95db62c4e3c00255
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Bug: webrtc:7785
Reviewed-on: https://chromium-review.googlesource.com/542838
Commit-Queue: Anders Carlsson <andersc@webrtc.org>
Reviewed-by: Anders Carlsson <andersc@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#18716}
diff --git a/webrtc/sdk/BUILD.gn b/webrtc/sdk/BUILD.gn
index 9fc22aa..9526d60 100644
--- a/webrtc/sdk/BUILD.gn
+++ b/webrtc/sdk/BUILD.gn
@@ -106,8 +106,11 @@
       sources = [
         "objc/Framework/Classes/Video/RTCAVFoundationVideoCapturerInternal.h",
         "objc/Framework/Classes/Video/RTCAVFoundationVideoCapturerInternal.mm",
+        "objc/Framework/Classes/Video/RTCCVPixelBuffer.mm",
         "objc/Framework/Classes/Video/RTCDefaultShader.h",
         "objc/Framework/Classes/Video/RTCDefaultShader.mm",
+        "objc/Framework/Classes/Video/RTCI420Buffer+Private.h",
+        "objc/Framework/Classes/Video/RTCI420Buffer.mm",
         "objc/Framework/Classes/Video/RTCI420TextureCache.h",
         "objc/Framework/Classes/Video/RTCI420TextureCache.mm",
         "objc/Framework/Classes/Video/RTCOpenGLDefines.h",
@@ -117,8 +120,11 @@
         "objc/Framework/Classes/Video/avfoundationformatmapper.mm",
         "objc/Framework/Classes/Video/avfoundationvideocapturer.h",
         "objc/Framework/Classes/Video/avfoundationvideocapturer.mm",
+        "objc/Framework/Classes/Video/objc_frame_buffer.h",
+        "objc/Framework/Classes/Video/objc_frame_buffer.mm",
         "objc/Framework/Classes/Video/objcvideotracksource.h",
         "objc/Framework/Classes/Video/objcvideotracksource.mm",
+        "objc/Framework/Headers/WebRTC/RTCVideoFrameBuffer.h",
       ]
       libs = []
       if (is_ios) {
@@ -143,8 +149,6 @@
 
       deps = [
         ":objc_common",
-        ":objc_corevideoframebuffer",
-        ":objc_videotoolbox",
         "../api:libjingle_peerconnection_api",
         "../base:rtc_base",
         "../common_video",
@@ -271,7 +275,6 @@
         "objc/Framework/Classes/PeerConnection/RTCSessionDescription.mm",
         "objc/Framework/Classes/PeerConnection/RTCTracing.mm",
         "objc/Framework/Classes/PeerConnection/RTCVideoCapturer.m",
-        "objc/Framework/Classes/PeerConnection/RTCVideoFrame+Private.h",
         "objc/Framework/Classes/PeerConnection/RTCVideoFrame.mm",
         "objc/Framework/Classes/PeerConnection/RTCVideoRendererAdapter+Private.h",
         "objc/Framework/Classes/PeerConnection/RTCVideoRendererAdapter.h",
@@ -308,6 +311,7 @@
         "objc/Framework/Headers/WebRTC/RTCTracing.h",
         "objc/Framework/Headers/WebRTC/RTCVideoCapturer.h",
         "objc/Framework/Headers/WebRTC/RTCVideoFrame.h",
+        "objc/Framework/Headers/WebRTC/RTCVideoFrameBuffer.h",
         "objc/Framework/Headers/WebRTC/RTCVideoRenderer.h",
         "objc/Framework/Headers/WebRTC/RTCVideoSource.h",
         "objc/Framework/Headers/WebRTC/RTCVideoTrack.h",
@@ -338,8 +342,10 @@
         ":objc_common",
         ":objc_corevideoframebuffer",
         ":objc_video",
+        ":objc_videotoolbox",
         "../api:video_frame_api",
         "../base:rtc_base",
+        "../common_video",
         "../media:rtc_media_base",
         "../pc:libjingle_peerconnection",
       ]
@@ -461,6 +467,7 @@
           "objc/Framework/Headers/WebRTC/RTCTracing.h",
           "objc/Framework/Headers/WebRTC/RTCVideoCapturer.h",
           "objc/Framework/Headers/WebRTC/RTCVideoFrame.h",
+          "objc/Framework/Headers/WebRTC/RTCVideoFrameBuffer.h",
           "objc/Framework/Headers/WebRTC/RTCVideoRenderer.h",
           "objc/Framework/Headers/WebRTC/RTCVideoSource.h",
           "objc/Framework/Headers/WebRTC/RTCVideoTrack.h",
@@ -552,13 +559,14 @@
         "objc/Framework/Classes/VideoToolbox/nalu_rewriter.h",
         "objc/Framework/Classes/VideoToolbox/videocodecfactory.h",
         "objc/Framework/Classes/VideoToolbox/videocodecfactory.mm",
+        "objc/Framework/Headers/WebRTC/RTCVideoFrameBuffer.h",
       ]
 
       configs += [ "..:common_objc" ]
 
       deps = [
         ":objc_common",
-        ":objc_corevideoframebuffer",
+        ":objc_video",
         "../base:rtc_base_approved",
         "../common_video",
         "../media:rtc_media",
diff --git a/webrtc/sdk/objc/Framework/Classes/Metal/RTCMTLI420Renderer.mm b/webrtc/sdk/objc/Framework/Classes/Metal/RTCMTLI420Renderer.mm
index 7ca1d4e..027db05 100644
--- a/webrtc/sdk/objc/Framework/Classes/Metal/RTCMTLI420Renderer.mm
+++ b/webrtc/sdk/objc/Framework/Classes/Metal/RTCMTLI420Renderer.mm
@@ -9,6 +9,7 @@
  */
 
 #import "RTCMTLI420Renderer.h"
+#import "WebRTC/RTCVideoFrameBuffer.h"
 
 #import <Metal/Metal.h>
 #import <MetalKit/MetalKit.h>
@@ -96,6 +97,8 @@
     return NO;
   }
 
+  id<RTCI420Buffer> buffer = [frame.buffer toI420];
+
   // Luma (y) texture.
   if (!_descriptor || (_width != frame.width && _height != frame.height)) {
     _width = frame.width;
@@ -111,8 +114,8 @@
   // Chroma (u,v) textures
   [_yTexture replaceRegion:MTLRegionMake2D(0, 0, _width, _height)
                mipmapLevel:0
-                 withBytes:frame.dataY
-               bytesPerRow:frame.strideY];
+                 withBytes:buffer.dataY
+               bytesPerRow:buffer.strideY];
 
   if (!_chromaDescriptor ||
       (_chromaWidth != frame.width / 2 && _chromaHeight != frame.height / 2)) {
@@ -130,12 +133,12 @@
 
   [_uTexture replaceRegion:MTLRegionMake2D(0, 0, _chromaWidth, _chromaHeight)
                mipmapLevel:0
-                 withBytes:frame.dataU
-               bytesPerRow:frame.strideU];
+                 withBytes:buffer.dataU
+               bytesPerRow:buffer.strideU];
   [_vTexture replaceRegion:MTLRegionMake2D(0, 0, _chromaWidth, _chromaHeight)
                mipmapLevel:0
-                 withBytes:frame.dataV
-               bytesPerRow:frame.strideV];
+                 withBytes:buffer.dataV
+               bytesPerRow:buffer.strideV];
 
   return (_uTexture != nil) && (_yTexture != nil) && (_vTexture != nil);
 }
diff --git a/webrtc/sdk/objc/Framework/Classes/Metal/RTCMTLNV12Renderer.mm b/webrtc/sdk/objc/Framework/Classes/Metal/RTCMTLNV12Renderer.mm
index b5b9a0f..70ecf54 100644
--- a/webrtc/sdk/objc/Framework/Classes/Metal/RTCMTLNV12Renderer.mm
+++ b/webrtc/sdk/objc/Framework/Classes/Metal/RTCMTLNV12Renderer.mm
@@ -15,6 +15,7 @@
 
 #import "WebRTC/RTCLogging.h"
 #import "WebRTC/RTCVideoFrame.h"
+#import "WebRTC/RTCVideoFrameBuffer.h"
 
 #import "RTCMTLRenderer+Private.h"
 
@@ -85,7 +86,7 @@
 
 - (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame {
   [super setupTexturesForFrame:frame];
-  CVPixelBufferRef pixelBuffer = frame.nativeHandle;
+  CVPixelBufferRef pixelBuffer = ((RTCCVPixelBuffer *)frame.buffer).pixelBuffer;
 
   id<MTLTexture> lumaTexture = nil;
   id<MTLTexture> chromaTexture = nil;
diff --git a/webrtc/sdk/objc/Framework/Classes/Metal/RTCMTLVideoView.m b/webrtc/sdk/objc/Framework/Classes/Metal/RTCMTLVideoView.m
index 96778c4..e36cb3a 100644
--- a/webrtc/sdk/objc/Framework/Classes/Metal/RTCMTLVideoView.m
+++ b/webrtc/sdk/objc/Framework/Classes/Metal/RTCMTLVideoView.m
@@ -15,6 +15,7 @@
 
 #import "WebRTC/RTCLogging.h"
 #import "WebRTC/RTCVideoFrame.h"
+#import "WebRTC/RTCVideoFrameBuffer.h"
 
 #import "RTCMTLI420Renderer.h"
 #import "RTCMTLNV12Renderer.h"
@@ -108,7 +109,7 @@
   }
 
   id<RTCMTLRenderer> renderer = nil;
-  if (self.videoFrame.nativeHandle) {
+  if ([self.videoFrame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
     if (!self.rendererNV12) {
       self.rendererNV12 = [RTCMTLVideoView createNV12Renderer];
       if (![self.rendererNV12 addRenderingDestination:self.metalView]) {
diff --git a/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCCameraVideoCapturer.m b/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCCameraVideoCapturer.m
index cbeddfe..a2290c2 100644
--- a/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCCameraVideoCapturer.m
+++ b/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCCameraVideoCapturer.m
@@ -12,6 +12,7 @@
 
 #import "WebRTC/RTCCameraVideoCapturer.h"
 #import "WebRTC/RTCLogging.h"
+#import "WebRTC/RTCVideoFrameBuffer.h"
 
 #if TARGET_OS_IPHONE
 #import "WebRTC/UIDevice+RTCDevice.h"
@@ -191,11 +192,12 @@
     return;
   }
 
+  RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer];
   int64_t timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) *
                         kNanosecondsPerSecond;
-  RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithPixelBuffer:pixelBuffer
-                                                                rotation:_rotation
-                                                             timeStampNs:timeStampNs];
+  RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer
+                                                           rotation:_rotation
+                                                        timeStampNs:timeStampNs];
   [self.delegate capturer:self didCaptureVideoFrame:videoFrame];
 }
 
diff --git a/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCFileVideoCapturer.m b/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCFileVideoCapturer.m
index 933207d..178a958 100644
--- a/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCFileVideoCapturer.m
+++ b/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCFileVideoCapturer.m
@@ -11,6 +11,7 @@
 #import "RTCFileVideoCapturer.h"
 
 #import "WebRTC/RTCLogging.h"
+#import "WebRTC/RTCVideoFrameBuffer.h"
 
 @implementation RTCFileVideoCapturer {
   AVAssetReader *_reader;
@@ -133,10 +134,11 @@
       return;
     }
 
+    RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer];
     NSTimeInterval timeStampSeconds = CACurrentMediaTime();
     int64_t timeStampNs = lroundf(timeStampSeconds * NSEC_PER_SEC);
     RTCVideoFrame *videoFrame =
-        [[RTCVideoFrame alloc] initWithPixelBuffer:pixelBuffer rotation:0 timeStampNs:timeStampNs];
+        [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer rotation:0 timeStampNs:timeStampNs];
     CFRelease(sampleBuffer);
 
     dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
diff --git a/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCVideoFrame+Private.h b/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCVideoFrame+Private.h
deleted file mode 100644
index 3b36f5b..0000000
--- a/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCVideoFrame+Private.h
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-#import "WebRTC/RTCVideoFrame.h"
-
-#include "webrtc/api/video/video_frame_buffer.h"
-
-NS_ASSUME_NONNULL_BEGIN
-
-@interface RTCVideoFrame ()
-
-@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::VideoFrameBuffer> videoBuffer;
-
-- (instancetype)initWithVideoBuffer:
-                    (rtc::scoped_refptr<webrtc::VideoFrameBuffer>)videoBuffer
-                           rotation:(RTCVideoRotation)rotation
-                        timeStampNs:(int64_t)timeStampNs
-    NS_DESIGNATED_INITIALIZER;
-
-@end
-
-NS_ASSUME_NONNULL_END
diff --git a/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCVideoFrame.mm b/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCVideoFrame.mm
index 20e8f92..ef93fef 100644
--- a/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCVideoFrame.mm
+++ b/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCVideoFrame.mm
@@ -8,22 +8,22 @@
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
-#import "RTCVideoFrame+Private.h"
-
-#include "webrtc/sdk/objc/Framework/Classes/Video/corevideo_frame_buffer.h"
+#import "webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoFrame.h"
+#import "webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoFrameBuffer.h"
 
 @implementation RTCVideoFrame {
-  rtc::scoped_refptr<webrtc::VideoFrameBuffer> _videoBuffer;
   RTCVideoRotation _rotation;
   int64_t _timeStampNs;
 }
 
+@synthesize buffer = _buffer;
+
 - (int)width {
-  return _videoBuffer->width();
+  return _buffer.width;
 }
 
 - (int)height {
-  return _videoBuffer->height();
+  return _buffer.height;
 }
 
 - (RTCVideoRotation)rotation {
@@ -31,27 +31,51 @@
 }
 
 - (const uint8_t *)dataY {
-  return _videoBuffer->GetI420()->DataY();
+  if ([_buffer conformsToProtocol:@protocol(RTCI420Buffer)]) {
+    return ((id<RTCI420Buffer>)_buffer).dataY;
+  } else {
+    return nullptr;
+  }
 }
 
 - (const uint8_t *)dataU {
-  return _videoBuffer->GetI420()->DataU();
+  if ([_buffer conformsToProtocol:@protocol(RTCI420Buffer)]) {
+    return ((id<RTCI420Buffer>)_buffer).dataU;
+  } else {
+    return nullptr;
+  }
 }
 
 - (const uint8_t *)dataV {
-  return _videoBuffer->GetI420()->DataV();
+  if ([_buffer conformsToProtocol:@protocol(RTCI420Buffer)]) {
+    return ((id<RTCI420Buffer>)_buffer).dataV;
+  } else {
+    return nullptr;
+  }
 }
 
 - (int)strideY {
-  return _videoBuffer->GetI420()->StrideY();
+  if ([_buffer conformsToProtocol:@protocol(RTCI420Buffer)]) {
+    return ((id<RTCI420Buffer>)_buffer).strideY;
+  } else {
+    return 0;
+  }
 }
 
 - (int)strideU {
-  return _videoBuffer->GetI420()->StrideU();
+  if ([_buffer conformsToProtocol:@protocol(RTCI420Buffer)]) {
+    return ((id<RTCI420Buffer>)_buffer).strideU;
+  } else {
+    return 0;
+  }
 }
 
 - (int)strideV {
-  return _videoBuffer->GetI420()->StrideV();
+  if ([_buffer conformsToProtocol:@protocol(RTCI420Buffer)]) {
+    return ((id<RTCI420Buffer>)_buffer).strideV;
+  } else {
+    return 0;
+  }
 }
 
 - (int64_t)timeStampNs {
@@ -59,26 +83,25 @@
 }
 
 - (CVPixelBufferRef)nativeHandle {
-  return (_videoBuffer->type() == webrtc::VideoFrameBuffer::Type::kNative) ?
-      static_cast<webrtc::CoreVideoFrameBuffer *>(_videoBuffer.get())->pixel_buffer() :
-      nil;
+  if ([_buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
+    return ((RTCCVPixelBuffer *)_buffer).pixelBuffer;
+  } else {
+    return nullptr;
+  }
 }
 
 - (RTCVideoFrame *)newI420VideoFrame {
-  return [[RTCVideoFrame alloc]
-      initWithVideoBuffer:_videoBuffer->ToI420()
-                 rotation:_rotation
-              timeStampNs:_timeStampNs];
+  return [[RTCVideoFrame alloc] initWithBuffer:[_buffer toI420]
+                                      rotation:_rotation
+                                   timeStampNs:_timeStampNs];
 }
 
 - (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer
                            rotation:(RTCVideoRotation)rotation
                         timeStampNs:(int64_t)timeStampNs {
-  rtc::scoped_refptr<webrtc::VideoFrameBuffer> videoBuffer(
-      new rtc::RefCountedObject<webrtc::CoreVideoFrameBuffer>(pixelBuffer));
-  return [self initWithVideoBuffer:videoBuffer
-                          rotation:rotation
-                       timeStampNs:timeStampNs];
+  return [self initWithBuffer:[[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer]
+                     rotation:rotation
+                  timeStampNs:timeStampNs];
 }
 
 - (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer
@@ -90,33 +113,26 @@
                               cropY:(int)cropY
                            rotation:(RTCVideoRotation)rotation
                         timeStampNs:(int64_t)timeStampNs {
-  rtc::scoped_refptr<webrtc::VideoFrameBuffer> videoBuffer(
-      new rtc::RefCountedObject<webrtc::CoreVideoFrameBuffer>(
-          pixelBuffer,
-          scaledWidth, scaledHeight,
-          cropWidth, cropHeight,
-          cropX, cropY));
-  return [self initWithVideoBuffer:videoBuffer
-                          rotation:rotation
-                       timeStampNs:timeStampNs];
+  RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer
+                                                                      adaptedWidth:scaledWidth
+                                                                     adaptedHeight:scaledHeight
+                                                                         cropWidth:cropWidth
+                                                                        cropHeight:cropHeight
+                                                                             cropX:cropX
+                                                                             cropY:cropY];
+  return [self initWithBuffer:rtcPixelBuffer rotation:rotation timeStampNs:timeStampNs];
 }
 
-#pragma mark - Private
-
-- (instancetype)initWithVideoBuffer:
-                    (rtc::scoped_refptr<webrtc::VideoFrameBuffer>)videoBuffer
-                           rotation:(RTCVideoRotation)rotation
-                        timeStampNs:(int64_t)timeStampNs {
+- (instancetype)initWithBuffer:(id<RTCVideoFrameBuffer>)buffer
+                      rotation:(RTCVideoRotation)rotation
+                   timeStampNs:(int64_t)timeStampNs {
   if (self = [super init]) {
-    _videoBuffer = videoBuffer;
+    _buffer = buffer;
     _rotation = rotation;
     _timeStampNs = timeStampNs;
   }
-  return self;
-}
 
-- (rtc::scoped_refptr<webrtc::VideoFrameBuffer>)videoBuffer {
-  return _videoBuffer;
+  return self;
 }
 
 @end
diff --git a/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCVideoRendererAdapter.mm b/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCVideoRendererAdapter.mm
index 1d5107b..736803b 100644
--- a/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCVideoRendererAdapter.mm
+++ b/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCVideoRendererAdapter.mm
@@ -8,9 +8,11 @@
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
+#import "RTCI420Buffer+Private.h"
 #import "RTCVideoRendererAdapter+Private.h"
-
-#import "RTCVideoFrame+Private.h"
+#import "WebRTC/RTCVideoFrame.h"
+#import "WebRTC/RTCVideoFrameBuffer.h"
+#import "objc_frame_buffer.h"
 
 #include <memory>
 
@@ -25,12 +27,20 @@
   }
 
   void OnFrame(const webrtc::VideoFrame& nativeVideoFrame) override {
+    rtc::scoped_refptr<VideoFrameBuffer> video_frame_buffer = nativeVideoFrame.video_frame_buffer();
+    id<RTCVideoFrameBuffer> rtc_frame_buffer;
+    if (video_frame_buffer->type() == VideoFrameBuffer::Type::kNative) {
+      rtc::scoped_refptr<ObjCFrameBuffer> objc_frame_buffer(
+          static_cast<ObjCFrameBuffer*>(video_frame_buffer.get()));
+      rtc_frame_buffer = (id<RTCVideoFrameBuffer>)objc_frame_buffer->wrapped_frame_buffer();
+    } else {
+      rtc_frame_buffer = [[RTCI420Buffer alloc] initWithFrameBuffer:video_frame_buffer->ToI420()];
+    }
     RTCVideoFrame* videoFrame = [[RTCVideoFrame alloc]
-        initWithVideoBuffer:nativeVideoFrame.video_frame_buffer()
-                   rotation:static_cast<RTCVideoRotation>(
-                                nativeVideoFrame.rotation())
-                timeStampNs:nativeVideoFrame.timestamp_us() *
-                            rtc::kNumNanosecsPerMicrosec];
+        initWithBuffer:rtc_frame_buffer
+              rotation:static_cast<RTCVideoRotation>(nativeVideoFrame.rotation())
+           timeStampNs:nativeVideoFrame.timestamp_us() * rtc::kNumNanosecsPerMicrosec];
+
     CGSize current_size = (videoFrame.rotation % 180 == 0)
                               ? CGSizeMake(videoFrame.width, videoFrame.height)
                               : CGSizeMake(videoFrame.height, videoFrame.width);
diff --git a/webrtc/sdk/objc/Framework/Classes/UI/RTCEAGLVideoView.m b/webrtc/sdk/objc/Framework/Classes/UI/RTCEAGLVideoView.m
index 851c4b4..89b7dfc 100644
--- a/webrtc/sdk/objc/Framework/Classes/UI/RTCEAGLVideoView.m
+++ b/webrtc/sdk/objc/Framework/Classes/UI/RTCEAGLVideoView.m
@@ -17,6 +17,7 @@
 #import "RTCNV12TextureCache.h"
 #import "WebRTC/RTCLogging.h"
 #import "WebRTC/RTCVideoFrame.h"
+#import "WebRTC/RTCVideoFrameBuffer.h"
 
 // RTCDisplayLinkTimer wraps a CADisplayLink and is set to fire every two screen
 // refreshes, which should be 30fps. We wrap the display link in order to avoid
@@ -220,7 +221,7 @@
   }
   [self ensureGLContext];
   glClear(GL_COLOR_BUFFER_BIT);
-  if (frame.nativeHandle) {
+  if ([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
     if (!_nv12TextureCache) {
       _nv12TextureCache = [[RTCNV12TextureCache alloc] initWithContext:_glContext];
     }
diff --git a/webrtc/sdk/objc/Framework/Classes/Video/RTCCVPixelBuffer.mm b/webrtc/sdk/objc/Framework/Classes/Video/RTCCVPixelBuffer.mm
new file mode 100644
index 0000000..a45c212
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/Video/RTCCVPixelBuffer.mm
@@ -0,0 +1,188 @@
+/*
+ *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "WebRTC/RTCVideoFrameBuffer.h"
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+
+@implementation RTCCVPixelBuffer {
+  int _width;
+  int _height;
+  int _bufferWidth;
+  int _bufferHeight;
+  int _cropWidth;
+  int _cropHeight;
+  int _cropX;
+  int _cropY;
+}
+
+@synthesize pixelBuffer = _pixelBuffer;
+
+- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer {
+  return [self initWithPixelBuffer:pixelBuffer
+                      adaptedWidth:CVPixelBufferGetWidth(pixelBuffer)
+                     adaptedHeight:CVPixelBufferGetHeight(pixelBuffer)
+                         cropWidth:CVPixelBufferGetWidth(pixelBuffer)
+                        cropHeight:CVPixelBufferGetHeight(pixelBuffer)
+                             cropX:0
+                             cropY:0];
+}
+
+- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer
+                       adaptedWidth:(int)adaptedWidth
+                      adaptedHeight:(int)adaptedHeight
+                          cropWidth:(int)cropWidth
+                         cropHeight:(int)cropHeight
+                              cropX:(int)cropX
+                              cropY:(int)cropY {
+  if (self = [super init]) {
+    _width = adaptedWidth;
+    _height = adaptedHeight;
+    _pixelBuffer = pixelBuffer;
+    _bufferWidth = CVPixelBufferGetWidth(_pixelBuffer);
+    _bufferHeight = CVPixelBufferGetHeight(_pixelBuffer);
+    _cropWidth = cropWidth;
+    _cropHeight = cropHeight;
+    // Can only crop at even pixels.
+    _cropX = cropX & ~1;
+    _cropY = cropY & ~1;
+    CVBufferRetain(_pixelBuffer);
+  }
+
+  return self;
+}
+
+- (void)dealloc {
+  CVBufferRelease(_pixelBuffer);
+}
+
+- (int)width {
+  return _width;
+}
+
+- (int)height {
+  return _height;
+}
+
+- (BOOL)requiresCropping {
+  return _cropWidth != _bufferWidth || _cropHeight != _bufferHeight;
+}
+
+- (BOOL)requiresScalingToWidth:(int)width height:(int)height {
+  return _cropWidth != width || _cropHeight != height;
+}
+
+- (int)bufferSizeForCroppingAndScalingToWidth:(int)width height:(int)height {
+  int srcChromaWidth = (_cropWidth + 1) / 2;
+  int srcChromaHeight = (_cropHeight + 1) / 2;
+  int dstChromaWidth = (width + 1) / 2;
+  int dstChromaHeight = (height + 1) / 2;
+
+  return srcChromaWidth * srcChromaHeight * 2 + dstChromaWidth * dstChromaHeight * 2;
+}
+
+- (BOOL)cropAndScaleTo:(CVPixelBufferRef)outputPixelBuffer withTempBuffer:(uint8_t*)tmpBuffer {
+  // Prepare output pointers.
+  RTC_DCHECK_EQ(CVPixelBufferGetPixelFormatType(outputPixelBuffer),
+                kCVPixelFormatType_420YpCbCr8BiPlanarFullRange);
+  CVReturn cvRet = CVPixelBufferLockBaseAddress(outputPixelBuffer, 0);
+  if (cvRet != kCVReturnSuccess) {
+    LOG(LS_ERROR) << "Failed to lock base address: " << cvRet;
+    return NO;
+  }
+  const int dstWidth = CVPixelBufferGetWidth(outputPixelBuffer);
+  const int dstHeight = CVPixelBufferGetHeight(outputPixelBuffer);
+  uint8_t* dstY =
+      reinterpret_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 0));
+  const int dstYStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 0);
+  uint8_t* dstUV =
+      reinterpret_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 1));
+  const int dstUVStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 1);
+
+  // Prepare source pointers.
+  const OSType srcPixelFormat = CVPixelBufferGetPixelFormatType(_pixelBuffer);
+  RTC_DCHECK(srcPixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange ||
+             srcPixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange);
+  CVPixelBufferLockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
+  const uint8_t* srcY =
+      static_cast<const uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 0));
+  const int srcYStride = CVPixelBufferGetBytesPerRowOfPlane(_pixelBuffer, 0);
+  const uint8_t* srcUV =
+      static_cast<const uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 1));
+  const int srcUVStride = CVPixelBufferGetBytesPerRowOfPlane(_pixelBuffer, 1);
+
+  // Crop just by modifying pointers.
+  srcY += srcYStride * _cropY + _cropX;
+  srcUV += srcUVStride * (_cropY / 2) + _cropX;
+
+  webrtc::NV12Scale(tmpBuffer,
+                    srcY,
+                    srcYStride,
+                    srcUV,
+                    srcUVStride,
+                    _cropWidth,
+                    _cropHeight,
+                    dstY,
+                    dstYStride,
+                    dstUV,
+                    dstUVStride,
+                    dstWidth,
+                    dstHeight);
+
+  CVPixelBufferUnlockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
+  CVPixelBufferUnlockBaseAddress(outputPixelBuffer, 0);
+
+  return YES;
+}
+
+- (id<RTCI420Buffer>)toI420 {
+  const OSType pixelFormat = CVPixelBufferGetPixelFormatType(_pixelBuffer);
+  RTC_DCHECK(pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange ||
+             pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange);
+
+  CVPixelBufferLockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
+  const uint8_t* srcY =
+      static_cast<const uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 0));
+  const int srcYStride = CVPixelBufferGetBytesPerRowOfPlane(_pixelBuffer, 0);
+  const uint8_t* srcUV =
+      static_cast<const uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(_pixelBuffer, 1));
+  const int srcUVStride = CVPixelBufferGetBytesPerRowOfPlane(_pixelBuffer, 1);
+
+  // Crop just by modifying pointers.
+  srcY += srcYStride * _cropY + _cropX;
+  srcUV += srcUVStride * (_cropY / 2) + _cropX;
+
+  // TODO(magjed): Use a frame buffer pool.
+  webrtc::NV12ToI420Scaler nv12ToI420Scaler;
+  RTCMutableI420Buffer* i420Buffer =
+      [[RTCMutableI420Buffer alloc] initWithWidth:[self width] height:[self height]];
+  nv12ToI420Scaler.NV12ToI420Scale(srcY,
+                                   srcYStride,
+                                   srcUV,
+                                   srcUVStride,
+                                   _cropWidth,
+                                   _cropHeight,
+                                   i420Buffer.mutableDataY,
+                                   i420Buffer.strideY,
+                                   i420Buffer.mutableDataU,
+                                   i420Buffer.strideU,
+                                   i420Buffer.mutableDataV,
+                                   i420Buffer.strideV,
+                                   i420Buffer.width,
+                                   i420Buffer.height);
+
+  CVPixelBufferUnlockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
+
+  return i420Buffer;
+}
+
+@end
diff --git a/webrtc/sdk/objc/Framework/Classes/Video/RTCI420Buffer+Private.h b/webrtc/sdk/objc/Framework/Classes/Video/RTCI420Buffer+Private.h
new file mode 100644
index 0000000..7883c41
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/Video/RTCI420Buffer+Private.h
@@ -0,0 +1,24 @@
+/*
+ *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "WebRTC/RTCVideoFrameBuffer.h"
+
+#include "webrtc/api/video/i420_buffer.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCI420Buffer ()
+
+/** Initialize an RTCI420Buffer with its backing I420BufferInterface. */
+- (instancetype)initWithFrameBuffer:(rtc::scoped_refptr<webrtc::I420BufferInterface>)i420Buffer;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/sdk/objc/Framework/Classes/Video/RTCI420Buffer.mm b/webrtc/sdk/objc/Framework/Classes/Video/RTCI420Buffer.mm
new file mode 100644
index 0000000..042fcc0
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/Video/RTCI420Buffer.mm
@@ -0,0 +1,108 @@
+/*
+ *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "WebRTC/RTCVideoFrameBuffer.h"
+
+#include "webrtc/api/video/i420_buffer.h"
+
+@implementation RTCI420Buffer {
+ @protected
+  rtc::scoped_refptr<webrtc::I420BufferInterface> _i420Buffer;
+}
+
+- (instancetype)initWithWidth:(int)width height:(int)height {
+  if (self = [super init]) {
+    _i420Buffer = webrtc::I420Buffer::Create(width, height);
+  }
+
+  return self;
+}
+
+- (instancetype)initWithWidth:(int)width
+                       height:(int)height
+                      strideY:(int)strideY
+                      strideU:(int)strideU
+                      strideV:(int)strideV {
+  if (self = [super init]) {
+    _i420Buffer = webrtc::I420Buffer::Create(width, height, strideY, strideU, strideV);
+  }
+
+  return self;
+}
+
+- (instancetype)initWithFrameBuffer:(rtc::scoped_refptr<webrtc::I420BufferInterface>)i420Buffer {
+  if (self = [super init]) {
+    _i420Buffer = i420Buffer;
+  }
+
+  return self;
+}
+
+- (int)width {
+  return _i420Buffer->width();
+}
+
+- (int)height {
+  return _i420Buffer->height();
+}
+
+- (int)strideY {
+  return _i420Buffer->StrideY();
+}
+
+- (int)strideU {
+  return _i420Buffer->StrideU();
+}
+
+- (int)strideV {
+  return _i420Buffer->StrideV();
+}
+
+- (int)chromaWidth {
+  return _i420Buffer->ChromaWidth();
+}
+
+- (int)chromaHeight {
+  return _i420Buffer->ChromaHeight();
+}
+
+- (const uint8_t *)dataY {
+  return _i420Buffer->DataY();
+}
+
+- (const uint8_t *)dataU {
+  return _i420Buffer->DataU();
+}
+
+- (const uint8_t *)dataV {
+  return _i420Buffer->DataV();
+}
+
+- (id<RTCI420Buffer>)toI420 {
+  return self;
+}
+
+@end
+
+@implementation RTCMutableI420Buffer
+
+- (uint8_t *)mutableDataY {
+  return static_cast<webrtc::I420Buffer *>(_i420Buffer.get())->MutableDataY();
+}
+
+- (uint8_t *)mutableDataU {
+  return static_cast<webrtc::I420Buffer *>(_i420Buffer.get())->MutableDataU();
+}
+
+- (uint8_t *)mutableDataV {
+  return static_cast<webrtc::I420Buffer *>(_i420Buffer.get())->MutableDataV();
+}
+
+@end
diff --git a/webrtc/sdk/objc/Framework/Classes/Video/RTCI420TextureCache.mm b/webrtc/sdk/objc/Framework/Classes/Video/RTCI420TextureCache.mm
index eeae867..b603130 100644
--- a/webrtc/sdk/objc/Framework/Classes/Video/RTCI420TextureCache.mm
+++ b/webrtc/sdk/objc/Framework/Classes/Video/RTCI420TextureCache.mm
@@ -9,6 +9,7 @@
  */
 
 #import "RTCI420TextureCache.h"
+#import "WebRTC/RTCVideoFrameBuffer.h"
 
 #if TARGET_OS_IPHONE
 #import <OpenGLES/ES3/gl.h>
@@ -123,31 +124,32 @@
 - (void)uploadFrameToTextures:(RTCVideoFrame *)frame {
   _currentTextureSet = (_currentTextureSet + 1) % kNumTextureSets;
 
-  const int chromaWidth = (frame.width + 1) / 2;
-  const int chromaHeight = (frame.height + 1) / 2;
-  if (frame.strideY != frame.width ||
-      frame.strideU != chromaWidth ||
-      frame.strideV != chromaWidth) {
-    _planeBuffer.resize(frame.width * frame.height);
+  id<RTCI420Buffer> buffer = [frame.buffer toI420];
+
+  const int chromaWidth = buffer.chromaWidth;
+  const int chromaHeight = buffer.chromaHeight;
+  if (buffer.strideY != frame.width || buffer.strideU != chromaWidth ||
+      buffer.strideV != chromaWidth) {
+    _planeBuffer.resize(buffer.width * buffer.height);
   }
 
-  [self uploadPlane:frame.dataY
+  [self uploadPlane:buffer.dataY
             texture:self.yTexture
-              width:frame.width
-             height:frame.height
-             stride:frame.strideY];
+              width:buffer.width
+             height:buffer.height
+             stride:buffer.strideY];
 
-  [self uploadPlane:frame.dataU
+  [self uploadPlane:buffer.dataU
             texture:self.uTexture
               width:chromaWidth
              height:chromaHeight
-             stride:frame.strideU];
+             stride:buffer.strideU];
 
-  [self uploadPlane:frame.dataV
+  [self uploadPlane:buffer.dataV
             texture:self.vTexture
               width:chromaWidth
              height:chromaHeight
-             stride:frame.strideV];
+             stride:buffer.strideV];
 }
 
 @end
diff --git a/webrtc/sdk/objc/Framework/Classes/Video/RTCNV12TextureCache.m b/webrtc/sdk/objc/Framework/Classes/Video/RTCNV12TextureCache.m
index e259cee..20a6082 100644
--- a/webrtc/sdk/objc/Framework/Classes/Video/RTCNV12TextureCache.m
+++ b/webrtc/sdk/objc/Framework/Classes/Video/RTCNV12TextureCache.m
@@ -11,6 +11,7 @@
 #import "RTCNV12TextureCache.h"
 
 #import "WebRTC/RTCVideoFrame.h"
+#import "WebRTC/RTCVideoFrameBuffer.h"
 
 @implementation RTCNV12TextureCache {
   CVOpenGLESTextureCacheRef _textureCache;
@@ -73,8 +74,10 @@
 }
 
 - (BOOL)uploadFrameToTextures:(RTCVideoFrame *)frame {
-  CVPixelBufferRef pixelBuffer = frame.nativeHandle;
-  NSParameterAssert(pixelBuffer);
+  NSAssert([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]],
+           @"frame must be CVPixelBuffer backed");
+  RTCCVPixelBuffer *rtcPixelBuffer = (RTCCVPixelBuffer *)frame.buffer;
+  CVPixelBufferRef pixelBuffer = rtcPixelBuffer.pixelBuffer;
   return [self loadTexture:&_yTextureRef
                pixelBuffer:pixelBuffer
                 planeIndex:0
diff --git a/webrtc/sdk/objc/Framework/Classes/Video/avfoundationvideocapturer.mm b/webrtc/sdk/objc/Framework/Classes/Video/avfoundationvideocapturer.mm
index a060b51..c2e2056 100644
--- a/webrtc/sdk/objc/Framework/Classes/Video/avfoundationvideocapturer.mm
+++ b/webrtc/sdk/objc/Framework/Classes/Video/avfoundationvideocapturer.mm
@@ -15,6 +15,7 @@
 #import "RTCAVFoundationVideoCapturerInternal.h"
 #import "RTCDispatcher+Private.h"
 #import "WebRTC/RTCLogging.h"
+#import "WebRTC/RTCVideoFrameBuffer.h"
 
 #include "avfoundationformatmapper.h"
 
@@ -23,7 +24,7 @@
 #include "webrtc/base/checks.h"
 #include "webrtc/base/logging.h"
 #include "webrtc/base/thread.h"
-#include "webrtc/sdk/objc/Framework/Classes/Video/corevideo_frame_buffer.h"
+#include "webrtc/sdk/objc/Framework/Classes/Video/objc_frame_buffer.h"
 
 namespace webrtc {
 
@@ -150,12 +151,15 @@
     return;
   }
 
+  RTCCVPixelBuffer* rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:image_buffer
+                                                                      adaptedWidth:adapted_width
+                                                                     adaptedHeight:adapted_height
+                                                                         cropWidth:crop_width
+                                                                        cropHeight:crop_height
+                                                                             cropX:crop_x
+                                                                             cropY:crop_y];
   rtc::scoped_refptr<VideoFrameBuffer> buffer =
-      new rtc::RefCountedObject<CoreVideoFrameBuffer>(
-          image_buffer,
-          adapted_width, adapted_height,
-          crop_width, crop_height,
-          crop_x, crop_y);
+      new rtc::RefCountedObject<ObjCFrameBuffer>(rtcPixelBuffer);
 
   // Applying rotation is only supported for legacy reasons and performance is
   // not critical here.
diff --git a/webrtc/sdk/objc/Framework/Classes/Video/objc_frame_buffer.h b/webrtc/sdk/objc/Framework/Classes/Video/objc_frame_buffer.h
new file mode 100644
index 0000000..71099e4
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/Video/objc_frame_buffer.h
@@ -0,0 +1,44 @@
+/*
+ *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_SDK_OBJC_FRAMEWORK_CLASSES_VIDEO_OBJC_FRAME_BUFFER_H_
+#define WEBRTC_SDK_OBJC_FRAMEWORK_CLASSES_VIDEO_OBJC_FRAME_BUFFER_H_
+
+#import <CoreVideo/CoreVideo.h>
+
+#include "webrtc/common_video/include/video_frame_buffer.h"
+
+@protocol RTCVideoFrameBuffer;
+
+namespace webrtc {
+
+class ObjCFrameBuffer : public VideoFrameBuffer {
+ public:
+  explicit ObjCFrameBuffer(id<RTCVideoFrameBuffer>);
+  ~ObjCFrameBuffer() override;
+
+  Type type() const override;
+
+  int width() const override;
+  int height() const override;
+
+  rtc::scoped_refptr<I420BufferInterface> ToI420() override;
+
+  id<RTCVideoFrameBuffer> wrapped_frame_buffer() const;
+
+ private:
+  id<RTCVideoFrameBuffer> frame_buffer_;
+  int width_;
+  int height_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_SDK_OBJC_FRAMEWORK_CLASSES_VIDEO_OBJC_FRAME_BUFFER_H_
diff --git a/webrtc/sdk/objc/Framework/Classes/Video/objc_frame_buffer.mm b/webrtc/sdk/objc/Framework/Classes/Video/objc_frame_buffer.mm
new file mode 100644
index 0000000..74e9c13
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Classes/Video/objc_frame_buffer.mm
@@ -0,0 +1,78 @@
+/*
+ *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/sdk/objc/Framework/Classes/Video/objc_frame_buffer.h"
+
+#import "WebRTC/RTCVideoFrameBuffer.h"
+
+namespace webrtc {
+
+namespace {
+
+/** ObjCFrameBuffer that conforms to I420BufferInterface by wrapping RTCI420Buffer */
+class ObjCI420FrameBuffer : public I420BufferInterface {
+ public:
+  explicit ObjCI420FrameBuffer(id<RTCI420Buffer> frame_buffer)
+      : frame_buffer_(frame_buffer), width_(frame_buffer.width), height_(frame_buffer.height) {}
+  ~ObjCI420FrameBuffer() override{};
+
+  int width() const override { return width_; }
+
+  int height() const override { return height_; }
+
+  const uint8_t* DataY() const override { return frame_buffer_.dataY; }
+
+  const uint8_t* DataU() const override { return frame_buffer_.dataU; }
+
+  const uint8_t* DataV() const override { return frame_buffer_.dataV; }
+
+  int StrideY() const override { return frame_buffer_.strideY; }
+
+  int StrideU() const override { return frame_buffer_.strideU; }
+
+  int StrideV() const override { return frame_buffer_.strideV; }
+
+ private:
+  id<RTCI420Buffer> frame_buffer_;
+  int width_;
+  int height_;
+};
+
+}  // namespace
+
+ObjCFrameBuffer::ObjCFrameBuffer(id<RTCVideoFrameBuffer> frame_buffer)
+    : frame_buffer_(frame_buffer), width_(frame_buffer.width), height_(frame_buffer.height) {}
+
+ObjCFrameBuffer::~ObjCFrameBuffer() {}
+
+VideoFrameBuffer::Type ObjCFrameBuffer::type() const {
+  return Type::kNative;
+}
+
+int ObjCFrameBuffer::width() const {
+  return width_;
+}
+
+int ObjCFrameBuffer::height() const {
+  return height_;
+}
+
+rtc::scoped_refptr<I420BufferInterface> ObjCFrameBuffer::ToI420() {
+  rtc::scoped_refptr<I420BufferInterface> buffer =
+      new rtc::RefCountedObject<ObjCI420FrameBuffer>([frame_buffer_ toI420]);
+
+  return buffer;
+}
+
+id<RTCVideoFrameBuffer> ObjCFrameBuffer::wrapped_frame_buffer() const {
+  return frame_buffer_;
+}
+
+}  // namespace webrtc
diff --git a/webrtc/sdk/objc/Framework/Classes/Video/objcvideotracksource.mm b/webrtc/sdk/objc/Framework/Classes/Video/objcvideotracksource.mm
index 387bb1b..4200cbc 100644
--- a/webrtc/sdk/objc/Framework/Classes/Video/objcvideotracksource.mm
+++ b/webrtc/sdk/objc/Framework/Classes/Video/objcvideotracksource.mm
@@ -10,10 +10,11 @@
 
 #include "webrtc/sdk/objc/Framework/Classes/Video/objcvideotracksource.h"
 
-#import "RTCVideoFrame+Private.h"
+#import "WebRTC/RTCVideoFrame.h"
+#import "WebRTC/RTCVideoFrameBuffer.h"
 
 #include "webrtc/api/video/i420_buffer.h"
-#include "webrtc/sdk/objc/Framework/Classes/Video/corevideo_frame_buffer.h"
+#include "webrtc/sdk/objc/Framework/Classes/Video/objc_frame_buffer.h"
 
 namespace webrtc {
 
@@ -43,18 +44,24 @@
   rtc::scoped_refptr<VideoFrameBuffer> buffer;
   if (adapted_width == frame.width && adapted_height == frame.height) {
     // No adaption - optimized path.
-    buffer = frame.videoBuffer;
-  } else if (frame.nativeHandle) {
+    buffer = new rtc::RefCountedObject<ObjCFrameBuffer>(frame.buffer);
+  } else if ([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
     // Adapted CVPixelBuffer frame.
-    buffer = new rtc::RefCountedObject<CoreVideoFrameBuffer>(
-        static_cast<CVPixelBufferRef>(frame.nativeHandle), adapted_width, adapted_height,
-        crop_width, crop_height, crop_x, crop_y);
+    RTCCVPixelBuffer *rtcPixelBuffer = (RTCCVPixelBuffer *)frame.buffer;
+    buffer = new rtc::RefCountedObject<ObjCFrameBuffer>([[RTCCVPixelBuffer alloc]
+        initWithPixelBuffer:rtcPixelBuffer.pixelBuffer
+               adaptedWidth:adapted_width
+              adaptedHeight:adapted_height
+                  cropWidth:crop_width
+                 cropHeight:crop_height
+                      cropX:crop_x
+                      cropY:crop_y]);
   } else {
     // Adapted I420 frame.
     // TODO(magjed): Optimize this I420 path.
     rtc::scoped_refptr<I420Buffer> i420_buffer = I420Buffer::Create(adapted_width, adapted_height);
-    i420_buffer->CropAndScaleFrom(
-        *frame.videoBuffer->ToI420(), crop_x, crop_y, crop_width, crop_height);
+    buffer = new rtc::RefCountedObject<ObjCFrameBuffer>(frame.buffer);
+    i420_buffer->CropAndScaleFrom(*buffer->ToI420(), crop_x, crop_y, crop_width, crop_height);
     buffer = i420_buffer;
   }
 
diff --git a/webrtc/sdk/objc/Framework/Classes/VideoToolbox/decoder.mm b/webrtc/sdk/objc/Framework/Classes/VideoToolbox/decoder.mm
index 1698477..c039442 100644
--- a/webrtc/sdk/objc/Framework/Classes/VideoToolbox/decoder.mm
+++ b/webrtc/sdk/objc/Framework/Classes/VideoToolbox/decoder.mm
@@ -18,9 +18,11 @@
 #include "webrtc/base/checks.h"
 #include "webrtc/base/logging.h"
 #include "webrtc/common_video/include/video_frame.h"
-#include "webrtc/sdk/objc/Framework/Classes/Video/corevideo_frame_buffer.h"
+#include "webrtc/sdk/objc/Framework/Classes/Video/objc_frame_buffer.h"
 #include "webrtc/sdk/objc/Framework/Classes/VideoToolbox/nalu_rewriter.h"
 
+#import "WebRTC/RTCVideoFrameBuffer.h"
+
 #if defined(WEBRTC_IOS)
 #import "Common/RTCUIApplicationStatusObserver.h"
 #endif
@@ -64,8 +66,8 @@
     return;
   }
   // TODO(tkchin): Handle CVO properly.
-  rtc::scoped_refptr<VideoFrameBuffer> buffer =
-      new rtc::RefCountedObject<CoreVideoFrameBuffer>(image_buffer);
+  rtc::scoped_refptr<VideoFrameBuffer> buffer = new rtc::RefCountedObject<ObjCFrameBuffer>(
+      [[RTCCVPixelBuffer alloc] initWithPixelBuffer:image_buffer]);
   VideoFrame decoded_frame(buffer, decode_params->timestamp,
                            CMTimeGetSeconds(timestamp) * kMsPerSec,
                            kVideoRotation_0);
diff --git a/webrtc/sdk/objc/Framework/Classes/VideoToolbox/encoder.mm b/webrtc/sdk/objc/Framework/Classes/VideoToolbox/encoder.mm
index 1d37eba..2bd485f 100644
--- a/webrtc/sdk/objc/Framework/Classes/VideoToolbox/encoder.mm
+++ b/webrtc/sdk/objc/Framework/Classes/VideoToolbox/encoder.mm
@@ -19,11 +19,12 @@
 #import "Common/RTCUIApplicationStatusObserver.h"
 #import "WebRTC/UIDevice+RTCDevice.h"
 #endif
+#import "WebRTC/RTCVideoFrameBuffer.h"
 #include "libyuv/convert_from.h"
 #include "webrtc/base/checks.h"
 #include "webrtc/base/logging.h"
 #include "webrtc/common_video/h264/profile_level_id.h"
-#include "webrtc/sdk/objc/Framework/Classes/Video/corevideo_frame_buffer.h"
+#include "webrtc/sdk/objc/Framework/Classes/Video/objc_frame_buffer.h"
 #include "webrtc/sdk/objc/Framework/Classes/VideoToolbox/nalu_rewriter.h"
 #include "webrtc/system_wrappers/include/clock.h"
 
@@ -411,29 +412,49 @@
   }
 #endif
 
-  CVPixelBufferRef pixel_buffer;
+  CVPixelBufferRef pixel_buffer = nullptr;
   if (frame.video_frame_buffer()->type() == VideoFrameBuffer::Type::kNative) {
-    rtc::scoped_refptr<CoreVideoFrameBuffer> core_video_frame_buffer(
-        static_cast<CoreVideoFrameBuffer*>(frame.video_frame_buffer().get()));
-    if (!core_video_frame_buffer->RequiresCropping()) {
-      pixel_buffer = core_video_frame_buffer->pixel_buffer();
-      // This pixel buffer might have a higher resolution than what the
-      // compression session is configured to. The compression session can
-      // handle that and will output encoded frames in the configured
-      // resolution regardless of the input pixel buffer resolution.
-      CVBufferRetain(pixel_buffer);
-    } else {
-      // Cropping required, we need to crop and scale to a new pixel buffer.
-      pixel_buffer = internal::CreatePixelBuffer(pixel_buffer_pool);
-      if (!pixel_buffer) {
-        return WEBRTC_VIDEO_CODEC_ERROR;
-      }
-      if (!core_video_frame_buffer->CropAndScaleTo(&nv12_scale_buffer_,
-                                                   pixel_buffer)) {
-        return WEBRTC_VIDEO_CODEC_ERROR;
+    // Native frame.
+    rtc::scoped_refptr<ObjCFrameBuffer> objc_frame_buffer(
+        static_cast<ObjCFrameBuffer*>(frame.video_frame_buffer().get()));
+    id<RTCVideoFrameBuffer> wrapped_frame_buffer =
+        (id<RTCVideoFrameBuffer>)objc_frame_buffer->wrapped_frame_buffer();
+
+    if ([wrapped_frame_buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
+      RTCCVPixelBuffer* rtc_pixel_buffer = (RTCCVPixelBuffer*)wrapped_frame_buffer;
+      if (![rtc_pixel_buffer requiresCropping]) {
+        // This pixel buffer might have a higher resolution than what the
+        // compression session is configured to. The compression session can
+        // handle that and will output encoded frames in the configured
+        // resolution regardless of the input pixel buffer resolution.
+        pixel_buffer = rtc_pixel_buffer.pixelBuffer;
+        CVBufferRetain(pixel_buffer);
+      } else {
+        // Cropping required, we need to crop and scale to a new pixel buffer.
+        pixel_buffer = internal::CreatePixelBuffer(pixel_buffer_pool);
+        if (!pixel_buffer) {
+          return WEBRTC_VIDEO_CODEC_ERROR;
+        }
+        int dst_width = CVPixelBufferGetWidth(pixel_buffer);
+        int dst_height = CVPixelBufferGetHeight(pixel_buffer);
+        if ([rtc_pixel_buffer requiresScalingToWidth:dst_width height:dst_height]) {
+          int size =
+              [rtc_pixel_buffer bufferSizeForCroppingAndScalingToWidth:dst_width height:dst_height];
+          nv12_scale_buffer_.resize(size);
+        } else {
+          nv12_scale_buffer_.clear();
+        }
+        nv12_scale_buffer_.shrink_to_fit();
+        if (![rtc_pixel_buffer cropAndScaleTo:pixel_buffer
+                               withTempBuffer:nv12_scale_buffer_.data()]) {
+          return WEBRTC_VIDEO_CODEC_ERROR;
+        }
       }
     }
-  } else {
+  }
+
+  if (!pixel_buffer) {
+    // We did not have a native frame, or the ObjCVideoFrame wrapped a non-native frame
     pixel_buffer = internal::CreatePixelBuffer(pixel_buffer_pool);
     if (!pixel_buffer) {
       return WEBRTC_VIDEO_CODEC_ERROR;
diff --git a/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoFrame.h b/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoFrame.h
index ddf8a6b..78eee5f 100644
--- a/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoFrame.h
+++ b/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoFrame.h
@@ -22,6 +22,8 @@
   RTCVideoRotation_270 = 270,
 };
 
+@protocol RTCVideoFrameBuffer;
+
 // RTCVideoFrame is an ObjectiveC version of webrtc::VideoFrame.
 RTC_EXPORT
 @interface RTCVideoFrame : NSObject
@@ -36,27 +38,35 @@
  *  is null. It is always possible to get such a frame by calling
  *  newI420VideoFrame.
  */
-@property(nonatomic, readonly, nullable) const uint8_t *dataY;
-@property(nonatomic, readonly, nullable) const uint8_t *dataU;
-@property(nonatomic, readonly, nullable) const uint8_t *dataV;
-@property(nonatomic, readonly) int strideY;
-@property(nonatomic, readonly) int strideU;
-@property(nonatomic, readonly) int strideV;
+@property(nonatomic, readonly, nullable)
+    const uint8_t *dataY DEPRECATED_MSG_ATTRIBUTE("use [buffer toI420]");
+@property(nonatomic, readonly, nullable)
+    const uint8_t *dataU DEPRECATED_MSG_ATTRIBUTE("use [buffer toI420]");
+@property(nonatomic, readonly, nullable)
+    const uint8_t *dataV DEPRECATED_MSG_ATTRIBUTE("use [buffer toI420]");
+@property(nonatomic, readonly) int strideY DEPRECATED_MSG_ATTRIBUTE("use [buffer toI420]");
+@property(nonatomic, readonly) int strideU DEPRECATED_MSG_ATTRIBUTE("use [buffer toI420]");
+@property(nonatomic, readonly) int strideV DEPRECATED_MSG_ATTRIBUTE("use [buffer toI420]");
 
 /** Timestamp in nanoseconds. */
 @property(nonatomic, readonly) int64_t timeStampNs;
 
 /** The native handle should be a pixel buffer on iOS. */
-@property(nonatomic, readonly) CVPixelBufferRef nativeHandle;
+@property(nonatomic, readonly)
+    CVPixelBufferRef nativeHandle DEPRECATED_MSG_ATTRIBUTE("use buffer instead");
+
+@property(nonatomic, readonly) id<RTCVideoFrameBuffer> buffer;
 
 - (instancetype)init NS_UNAVAILABLE;
 - (instancetype)new NS_UNAVAILABLE;
 
 /** Initialize an RTCVideoFrame from a pixel buffer, rotation, and timestamp.
+ *  Deprecated - initialize with a RTCCVPixelBuffer instead
  */
 - (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer
                            rotation:(RTCVideoRotation)rotation
-                        timeStampNs:(int64_t)timeStampNs;
+                        timeStampNs:(int64_t)timeStampNs
+    DEPRECATED_MSG_ATTRIBUTE("use initWithBuffer instead");
 
 /** Initialize an RTCVideoFrame from a pixel buffer combined with cropping and
  *  scaling. Cropping will be applied first on the pixel buffer, followed by
@@ -70,7 +80,14 @@
                               cropX:(int)cropX
                               cropY:(int)cropY
                            rotation:(RTCVideoRotation)rotation
-                        timeStampNs:(int64_t)timeStampNs;
+                        timeStampNs:(int64_t)timeStampNs
+    DEPRECATED_MSG_ATTRIBUTE("use initWithBuffer instead");
+
+/** Initialize an RTCVideoFrame from a frame buffer, rotation, and timestamp.
+ */
+- (instancetype)initWithBuffer:(id<RTCVideoFrameBuffer>)frameBuffer
+                      rotation:(RTCVideoRotation)rotation
+                   timeStampNs:(int64_t)timeStampNs;
 
 /** Return a frame that is guaranteed to be I420, i.e. it is possible to access
  *  the YUV data on it.
diff --git a/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoFrameBuffer.h b/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoFrameBuffer.h
new file mode 100644
index 0000000..59986b8
--- /dev/null
+++ b/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoFrameBuffer.h
@@ -0,0 +1,99 @@
+/*
+ *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <AVFoundation/AVFoundation.h>
+#import <WebRTC/RTCMacros.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+@protocol RTCI420Buffer;
+
+// RTCVideoFrameBuffer is an ObjectiveC version of webrtc::VideoFrameBuffer.
+RTC_EXPORT
+@protocol RTCVideoFrameBuffer <NSObject>
+
+@property(nonatomic, readonly) int width;
+@property(nonatomic, readonly) int height;
+
+- (id<RTCI420Buffer>)toI420;
+
+@end
+
+/** Protocol for RTCVideoFrameBuffers containing YUV planar data. */
+@protocol RTCYUVPlanarBuffer <RTCVideoFrameBuffer>
+
+@property(nonatomic, readonly) int chromaWidth;
+@property(nonatomic, readonly) int chromaHeight;
+@property(nonatomic, readonly) const uint8_t *dataY;
+@property(nonatomic, readonly) const uint8_t *dataU;
+@property(nonatomic, readonly) const uint8_t *dataV;
+@property(nonatomic, readonly) int strideY;
+@property(nonatomic, readonly) int strideU;
+@property(nonatomic, readonly) int strideV;
+
+- (instancetype)initWithWidth:(int)width height:(int)height;
+- (instancetype)initWithWidth:(int)width
+                       height:(int)height
+                      strideY:(int)strideY
+                      strideU:(int)strideU
+                      strideV:(int)strideV;
+
+@end
+
+/** Extension of the YUV planar data buffer with mutable data access */
+@protocol RTCMutableYUVPlanarBuffer <RTCYUVPlanarBuffer>
+
+@property(nonatomic, readonly) uint8_t *mutableDataY;
+@property(nonatomic, readonly) uint8_t *mutableDataU;
+@property(nonatomic, readonly) uint8_t *mutableDataV;
+
+@end
+
+/** Protocol for RTCYUVPlanarBuffers containing I420 data */
+@protocol RTCI420Buffer <RTCYUVPlanarBuffer>
+@end
+
+/** Extension of the I420 buffer with mutable data access */
+@protocol RTCMutableI420Buffer <RTCI420Buffer, RTCMutableYUVPlanarBuffer>
+@end
+
+/** RTCVideoFrameBuffer containing a CVPixelBufferRef */
+@interface RTCCVPixelBuffer : NSObject <RTCVideoFrameBuffer>
+
+@property(nonatomic, readonly) CVPixelBufferRef pixelBuffer;
+
+- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer;
+- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer
+                       adaptedWidth:(int)adaptedWidth
+                      adaptedHeight:(int)adaptedHeight
+                          cropWidth:(int)cropWidth
+                         cropHeight:(int)cropHeight
+                              cropX:(int)cropX
+                              cropY:(int)cropY;
+
+- (BOOL)requiresCropping;
+- (BOOL)requiresScalingToWidth:(int)width height:(int)height;
+- (int)bufferSizeForCroppingAndScalingToWidth:(int)width height:(int)height;
+/** The minimum size of the |tmpBuffer| must be the number of bytes returned from the
+ * bufferSizeForCroppingAndScalingToWidth:height: method.
+ */
+- (BOOL)cropAndScaleTo:(CVPixelBufferRef)outputPixelBuffer withTempBuffer:(uint8_t *)tmpBuffer;
+
+@end
+
+/** RTCI420Buffer implements the RTCI420Buffer protocol */
+@interface RTCI420Buffer : NSObject <RTCI420Buffer>
+@end
+
+/** Mutable version of RTCI420Buffer */
+@interface RTCMutableI420Buffer : RTCI420Buffer <RTCMutableI420Buffer>
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/webrtc/sdk/objc/Framework/UnitTests/RTCMTLVideoViewTests.mm b/webrtc/sdk/objc/Framework/UnitTests/RTCMTLVideoViewTests.mm
index 99004ea..3743326 100644
--- a/webrtc/sdk/objc/Framework/UnitTests/RTCMTLVideoViewTests.mm
+++ b/webrtc/sdk/objc/Framework/UnitTests/RTCMTLVideoViewTests.mm
@@ -15,6 +15,7 @@
 
 #include <Metal/RTCMTLNV12Renderer.h>
 #include <WebRTC/RTCMTLVideoView.h>
+#include <WebRTC/RTCVideoFrameBuffer.h>
 
 // Extension of RTCMTLVideoView for testing purposes.
 @interface RTCMTLVideoView (Testing)
@@ -59,12 +60,14 @@
   self.frameMock = nil;
 }
 
-- (id)frameMockWithNativeHandle:(BOOL)hasNativeHandle {
+- (id)frameMockWithCVPixelBuffer:(BOOL)hasCVPixelBuffer {
   id frameMock = OCMClassMock([RTCVideoFrame class]);
-  if (hasNativeHandle) {
-    OCMStub([frameMock nativeHandle]).andReturn((CVPixelBufferRef)[OCMArg anyPointer]);
+  if (hasCVPixelBuffer) {
+    OCMStub([frameMock buffer])
+        .andReturn(
+            [[RTCCVPixelBuffer alloc] initWithPixelBuffer:(CVPixelBufferRef)[OCMArg anyPointer]]);
   } else {
-    OCMStub([frameMock nativeHandle]).andReturn((CVPixelBufferRef) nullptr);
+    OCMStub([frameMock buffer]).andReturn([[RTCI420Buffer alloc] initWithWidth:200 height:200]);
   }
   return frameMock;
 }
@@ -99,7 +102,7 @@
   RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] init];
   self.frameMock = OCMClassMock([RTCVideoFrame class]);
 
-  [[self.frameMock reject] nativeHandle];
+  [[self.frameMock reject] buffer];
   [[self.classMock reject] createNV12Renderer];
   [[self.classMock reject] createI420Renderer];
 
@@ -116,7 +119,7 @@
   // given
   OCMStub([self.classMock isMetalAvailable]).andReturn(YES);
   self.rendererI420Mock = [self rendererMockWithSuccessfulSetup:YES];
-  self.frameMock = [self frameMockWithNativeHandle:NO];
+  self.frameMock = [self frameMockWithCVPixelBuffer:NO];
 
   OCMExpect([self.rendererI420Mock drawFrame:self.frameMock]);
   OCMExpect([self.classMock createI420Renderer]).andReturn(self.rendererI420Mock);
@@ -137,7 +140,7 @@
   // given
   OCMStub([self.classMock isMetalAvailable]).andReturn(YES);
   self.rendererNV12Mock = [self rendererMockWithSuccessfulSetup:YES];
-  self.frameMock = [self frameMockWithNativeHandle:YES];
+  self.frameMock = [self frameMockWithCVPixelBuffer:YES];
 
   OCMExpect([self.rendererNV12Mock drawFrame:self.frameMock]);
   OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock);