Move MutableDataY{,U,V} methods to I420Buffer only.

Deleted from the VideoFrameBuffer base class.

BUG=webrtc:5921

Review-Url: https://codereview.webrtc.org/2278883002
Cr-Commit-Position: refs/heads/master@{#14317}
diff --git a/webrtc/common_video/corevideo_frame_buffer.cc b/webrtc/common_video/corevideo_frame_buffer.cc
index a58ddc7..3245bf5 100644
--- a/webrtc/common_video/corevideo_frame_buffer.cc
+++ b/webrtc/common_video/corevideo_frame_buffer.cc
@@ -35,7 +35,7 @@
   size_t width = CVPixelBufferGetWidthOfPlane(pixel_buffer_, 0);
   size_t height = CVPixelBufferGetHeightOfPlane(pixel_buffer_, 0);
   // TODO(tkchin): Use a frame buffer pool.
-  rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
+  rtc::scoped_refptr<webrtc::I420Buffer> buffer =
       new rtc::RefCountedObject<webrtc::I420Buffer>(width, height);
   CVPixelBufferLockBaseAddress(pixel_buffer_, kCVPixelBufferLock_ReadOnly);
   const uint8_t* src_y = static_cast<const uint8_t*>(
diff --git a/webrtc/common_video/i420_buffer_pool_unittest.cc b/webrtc/common_video/i420_buffer_pool_unittest.cc
index 3e795db..3307539 100644
--- a/webrtc/common_video/i420_buffer_pool_unittest.cc
+++ b/webrtc/common_video/i420_buffer_pool_unittest.cc
@@ -52,7 +52,7 @@
 }
 
 TEST(TestI420BufferPool, FrameValidAfterPoolDestruction) {
-  rtc::scoped_refptr<VideoFrameBuffer> buffer;
+  rtc::scoped_refptr<I420Buffer> buffer;
   {
     I420BufferPool pool;
     buffer = pool.CreateBuffer(16, 16);
diff --git a/webrtc/common_video/i420_video_frame_unittest.cc b/webrtc/common_video/i420_video_frame_unittest.cc
index 406dbd3..f9d46ef 100644
--- a/webrtc/common_video/i420_video_frame_unittest.cc
+++ b/webrtc/common_video/i420_video_frame_unittest.cc
@@ -162,16 +162,14 @@
   EXPECT_EQ(kRotation, small_frame.rotation());
 
   // Frame of larger dimensions.
-  small_frame.CreateEmptyFrame(width, height,
-                               stride_y, stride_u, stride_v);
-  memset(small_frame.video_frame_buffer()->MutableDataY(), 1,
-         small_frame.allocated_size(kYPlane));
-  memset(small_frame.video_frame_buffer()->MutableDataU(), 2,
-         small_frame.allocated_size(kUPlane));
-  memset(small_frame.video_frame_buffer()->MutableDataV(), 3,
-         small_frame.allocated_size(kVPlane));
-  big_frame.CopyFrame(small_frame);
-  EXPECT_TRUE(test::FramesEqual(small_frame, big_frame));
+  rtc::scoped_refptr<I420Buffer> buffer =
+      I420Buffer::Create(width, height, stride_y, stride_u, stride_v);
+  memset(buffer->MutableDataY(), 1, width * height);
+  memset(buffer->MutableDataU(), 2, ((height + 1) / 2) * stride_u);
+  memset(buffer->MutableDataV(), 3, ((height + 1) / 2) * stride_u);
+  VideoFrame other_frame(buffer, 0, 0, webrtc::kVideoRotation_0);
+  big_frame.CopyFrame(other_frame);
+  EXPECT_TRUE(test::FramesEqual(other_frame, big_frame));
 }
 
 TEST(TestVideoFrame, ShallowCopy) {
diff --git a/webrtc/common_video/include/video_frame_buffer.h b/webrtc/common_video/include/video_frame_buffer.h
index 6743970..ae7855f 100644
--- a/webrtc/common_video/include/video_frame_buffer.h
+++ b/webrtc/common_video/include/video_frame_buffer.h
@@ -45,12 +45,6 @@
   virtual const uint8_t* DataU() const = 0;
   virtual const uint8_t* DataV() const = 0;
 
-  // TODO(nisse): Move MutableData methods to the I420Buffer subclass.
-  // Non-const data access.
-  virtual uint8_t* MutableDataY();
-  virtual uint8_t* MutableDataU();
-  virtual uint8_t* MutableDataV();
-
   // Returns the number of bytes between successive rows for a given plane.
   virtual int StrideY() const = 0;
   virtual int StrideU() const = 0;
@@ -98,9 +92,9 @@
   const uint8_t* DataU() const override;
   const uint8_t* DataV() const override;
 
-  uint8_t* MutableDataY() override;
-  uint8_t* MutableDataU() override;
-  uint8_t* MutableDataV() override;
+  uint8_t* MutableDataY();
+  uint8_t* MutableDataU();
+  uint8_t* MutableDataV();
   int StrideY() const override;
   int StrideU() const override;
   int StrideV() const override;
diff --git a/webrtc/common_video/libyuv/include/webrtc_libyuv.h b/webrtc/common_video/libyuv/include/webrtc_libyuv.h
index 699a626..f8cd470 100644
--- a/webrtc/common_video/libyuv/include/webrtc_libyuv.h
+++ b/webrtc/common_video/libyuv/include/webrtc_libyuv.h
@@ -69,6 +69,7 @@
 //                    already open for writing.
 // Return value: 0 if OK, < 0 otherwise.
 int PrintVideoFrame(const VideoFrame& frame, FILE* file);
+int PrintVideoFrame(const VideoFrameBuffer& frame, FILE* file);
 
 // Extract buffer from VideoFrame or VideoFrameBuffer (consecutive
 // planes, no stride)
@@ -92,11 +93,13 @@
 //   - sample_size      : Required only for the parsing of MJPG (set to 0 else).
 //   - rotate           : Rotation mode of output image.
 // Output:
-//   - dst_frame        : Reference to a destination frame.
+//   - dst_buffer       : Reference to a destination frame buffer.
 // Return value: 0 if OK, < 0 otherwise.
 
-// TODO(nisse): Deprecated, see
-// https://bugs.chromium.org/p/webrtc/issues/detail?id=5921.
+// TODO(nisse): Delete this wrapper, and let users call libyuv directly. Most
+// calls pass |src_video_type| == kI420, and should use libyuv::I420Copy. The
+// only exception at the time of this writing is
+// VideoCaptureImpl::IncomingFrame, which still needs libyuv::ConvertToI420.
 int ConvertToI420(VideoType src_video_type,
                   const uint8_t* src_frame,
                   int crop_x,
@@ -105,7 +108,7 @@
                   int src_height,
                   size_t sample_size,
                   VideoRotation rotation,
-                  VideoFrame* dst_frame);
+                  I420Buffer* dst_buffer);
 
 // Convert From I420
 // Input:
diff --git a/webrtc/common_video/libyuv/libyuv_unittest.cc b/webrtc/common_video/libyuv/libyuv_unittest.cc
index ab36559..c6ca212 100644
--- a/webrtc/common_video/libyuv/libyuv_unittest.cc
+++ b/webrtc/common_video/libyuv/libyuv_unittest.cc
@@ -95,21 +95,20 @@
 
   double psnr = 0.0;
 
-  VideoFrame res_i420_frame;
-  res_i420_frame.CreateEmptyFrame(width_, height_, width_,
-                                               (width_ + 1) / 2,
-                                               (width_ + 1) / 2);
+  rtc::scoped_refptr<I420Buffer> res_i420_buffer = I420Buffer::Create(
+      width_, height_, width_, (width_ + 1) / 2, (width_ + 1) / 2);
+
   printf("\nConvert #%d I420 <-> I420 \n", j);
   std::unique_ptr<uint8_t[]> out_i420_buffer(new uint8_t[frame_length_]);
-  EXPECT_EQ(0, ConvertFromI420(orig_frame_, kI420, 0,
-                               out_i420_buffer.get()));
+  EXPECT_EQ(0, ConvertFromI420(orig_frame_, kI420, 0, out_i420_buffer.get()));
   EXPECT_EQ(0, ConvertToI420(kI420, out_i420_buffer.get(), 0, 0, width_,
-                             height_, 0, kVideoRotation_0, &res_i420_frame));
+                             height_, 0, kVideoRotation_0,
+                             res_i420_buffer.get()));
 
-  if (PrintVideoFrame(res_i420_frame, output_file) < 0) {
+  if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
     return;
   }
-  psnr = I420PSNR(&orig_frame_, &res_i420_frame);
+  psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer);
   EXPECT_EQ(48.0, psnr);
   j++;
 
@@ -119,17 +118,18 @@
   int stride_y = 0;
   int stride_uv = 0;
   Calc16ByteAlignedStride(width_, &stride_y, &stride_uv);
-  res_i420_frame.CreateEmptyFrame(width_, height_, stride_y,
-                                  stride_uv, stride_uv);
+  res_i420_buffer =
+      I420Buffer::Create(width_, height_, stride_y, stride_uv, stride_uv);
   EXPECT_EQ(0, ConvertFromI420(orig_frame_, kRGB24, 0, res_rgb_buffer2.get()));
 
   EXPECT_EQ(0, ConvertToI420(kRGB24, res_rgb_buffer2.get(), 0, 0, width_,
-                             height_, 0, kVideoRotation_0, &res_i420_frame));
+                             height_, 0, kVideoRotation_0,
+                             res_i420_buffer.get()));
 
-  if (PrintVideoFrame(res_i420_frame, output_file) < 0) {
+  if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
     return;
   }
-  psnr = I420PSNR(&orig_frame_, &res_i420_frame);
+  psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer);
 
   // Optimization Speed- quality trade-off => 45 dB only (platform dependant).
   EXPECT_GT(ceil(psnr), 44);
@@ -137,44 +137,47 @@
 
   printf("\nConvert #%d I420 <-> UYVY\n", j);
   std::unique_ptr<uint8_t[]> out_uyvy_buffer(new uint8_t[width_ * height_ * 2]);
-  EXPECT_EQ(0, ConvertFromI420(orig_frame_,  kUYVY, 0, out_uyvy_buffer.get()));
+  EXPECT_EQ(0, ConvertFromI420(orig_frame_, kUYVY, 0, out_uyvy_buffer.get()));
   EXPECT_EQ(0, ConvertToI420(kUYVY, out_uyvy_buffer.get(), 0, 0, width_,
-                             height_, 0, kVideoRotation_0, &res_i420_frame));
-  psnr = I420PSNR(&orig_frame_, &res_i420_frame);
+                             height_, 0, kVideoRotation_0,
+                             res_i420_buffer.get()));
+  psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer);
   EXPECT_EQ(48.0, psnr);
-  if (PrintVideoFrame(res_i420_frame, output_file) < 0) {
+  if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
     return;
   }
   j++;
 
   printf("\nConvert #%d I420 <-> YUY2\n", j);
   std::unique_ptr<uint8_t[]> out_yuy2_buffer(new uint8_t[width_ * height_ * 2]);
-  EXPECT_EQ(0, ConvertFromI420(orig_frame_,  kYUY2, 0, out_yuy2_buffer.get()));
+  EXPECT_EQ(0, ConvertFromI420(orig_frame_, kYUY2, 0, out_yuy2_buffer.get()));
 
   EXPECT_EQ(0, ConvertToI420(kYUY2, out_yuy2_buffer.get(), 0, 0, width_,
-                             height_, 0, kVideoRotation_0, &res_i420_frame));
+                             height_, 0,
+                             kVideoRotation_0, res_i420_buffer.get()));
 
-  if (PrintVideoFrame(res_i420_frame, output_file) < 0) {
+  if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
     return;
   }
 
-  psnr = I420PSNR(&orig_frame_, &res_i420_frame);
+  psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer);
   EXPECT_EQ(48.0, psnr);
+
   printf("\nConvert #%d I420 <-> RGB565\n", j);
   std::unique_ptr<uint8_t[]> out_rgb565_buffer(
       new uint8_t[width_ * height_ * 2]);
-  EXPECT_EQ(0, ConvertFromI420(orig_frame_, kRGB565, 0,
-                               out_rgb565_buffer.get()));
+  EXPECT_EQ(0,
+            ConvertFromI420(orig_frame_, kRGB565, 0, out_rgb565_buffer.get()));
 
   EXPECT_EQ(0, ConvertToI420(kRGB565, out_rgb565_buffer.get(), 0, 0, width_,
-                             height_, 0, kVideoRotation_0, &res_i420_frame));
-
-  if (PrintVideoFrame(res_i420_frame, output_file) < 0) {
+                             height_, 0,
+                             kVideoRotation_0, res_i420_buffer.get()));
+  if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
     return;
   }
   j++;
 
-  psnr = I420PSNR(&orig_frame_, &res_i420_frame);
+  psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer);
   // TODO(leozwang) Investigate the right psnr should be set for I420ToRGB565,
   // Another example is I420ToRGB24, the psnr is 44
   // TODO(mikhal): Add psnr for RGB565, 1555, 4444, convert to ARGB.
@@ -183,18 +186,20 @@
   printf("\nConvert #%d I420 <-> ARGB8888\n", j);
   std::unique_ptr<uint8_t[]> out_argb8888_buffer(
       new uint8_t[width_ * height_ * 4]);
-  EXPECT_EQ(0, ConvertFromI420(orig_frame_, kARGB, 0,
-                               out_argb8888_buffer.get()));
+  EXPECT_EQ(0,
+            ConvertFromI420(orig_frame_, kARGB, 0, out_argb8888_buffer.get()));
 
   EXPECT_EQ(0, ConvertToI420(kARGB, out_argb8888_buffer.get(), 0, 0, width_,
-                             height_, 0, kVideoRotation_0, &res_i420_frame));
+                             height_, 0, kVideoRotation_0,
+                             res_i420_buffer.get()));
 
-  if (PrintVideoFrame(res_i420_frame, output_file) < 0) {
+  if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
     return;
   }
 
-  psnr = I420PSNR(&orig_frame_, &res_i420_frame);
-  // TODO(leozwang) Investigate the right psnr should be set for I420ToARGB8888,
+  psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer);
+  // TODO(leozwang) Investigate the right psnr should be set for
+  // I420ToARGB8888,
   EXPECT_GT(ceil(psnr), 42);
 
   ASSERT_EQ(0, fclose(output_file));
@@ -209,49 +214,48 @@
 
   double psnr = 0.0;
 
-  VideoFrame res_i420_frame;
   int stride_y = 0;
   int stride_uv = 0;
   Calc16ByteAlignedStride(width_, &stride_y, &stride_uv);
-  res_i420_frame.CreateEmptyFrame(width_, height_,
-                                  stride_y, stride_uv, stride_uv);
+
+  rtc::scoped_refptr<I420Buffer> res_i420_buffer =
+      I420Buffer::Create(width_, height_, stride_y, stride_uv, stride_uv);
   std::unique_ptr<uint8_t[]> out_i420_buffer(new uint8_t[frame_length_]);
   EXPECT_EQ(0, ConvertFromI420(orig_frame_, kI420, 0,
                                out_i420_buffer.get()));
   EXPECT_EQ(0, ConvertToI420(kI420, out_i420_buffer.get(), 0, 0, width_,
-                             height_, 0, kVideoRotation_0, &res_i420_frame));
+                             height_, 0, kVideoRotation_0,
+                             res_i420_buffer.get()));
 
-  if (PrintVideoFrame(res_i420_frame, output_file) < 0) {
+  if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
     return;
   }
-  psnr = I420PSNR(&orig_frame_, &res_i420_frame);
+  psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer);
   EXPECT_EQ(48.0, psnr);
 }
 
 
 TEST_F(TestLibYuv, RotateTest) {
-  // Use ConvertToI420 for multiple roatations - see that nothing breaks, all
+  // Use ConvertToI420 for multiple rotations - see that nothing breaks, all
   // memory is properly allocated and end result is equal to the starting point.
-  VideoFrame rotated_res_i420_frame;
   int rotated_width = height_;
   int rotated_height = width_;
   int stride_y;
   int stride_uv;
   Calc16ByteAlignedStride(rotated_width, &stride_y, &stride_uv);
-  rotated_res_i420_frame.CreateEmptyFrame(rotated_width,
-                                          rotated_height,
-                                          stride_y,
-                                          stride_uv,
-                                          stride_uv);
+  rtc::scoped_refptr<I420Buffer> rotated_res_i420_buffer = I420Buffer::Create(
+      rotated_width, rotated_height, stride_y, stride_uv, stride_uv);
   EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0, width_, height_,
-                             0, kVideoRotation_90, &rotated_res_i420_frame));
+                             0, kVideoRotation_90,
+                             rotated_res_i420_buffer.get()));
   EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0, width_, height_,
-                             0, kVideoRotation_270, &rotated_res_i420_frame));
-  rotated_res_i420_frame.CreateEmptyFrame(width_, height_,
-                                          width_, (width_ + 1) / 2,
-                                          (width_ + 1) / 2);
+                             0, kVideoRotation_270,
+                             rotated_res_i420_buffer.get()));
+  rotated_res_i420_buffer = I420Buffer::Create(
+      width_, height_, width_, (width_ + 1) / 2, (width_ + 1) / 2);
   EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0, width_, height_,
-                             0, kVideoRotation_180, &rotated_res_i420_frame));
+                             0, kVideoRotation_180,
+                             rotated_res_i420_buffer.get()));
 }
 
 }  // namespace webrtc
diff --git a/webrtc/common_video/libyuv/webrtc_libyuv.cc b/webrtc/common_video/libyuv/webrtc_libyuv.cc
index d05b644..6a7ba15 100644
--- a/webrtc/common_video/libyuv/webrtc_libyuv.cc
+++ b/webrtc/common_video/libyuv/webrtc_libyuv.cc
@@ -103,33 +103,35 @@
 }
 
 // TODO(nisse): Belongs with the test code?
-int PrintVideoFrame(const VideoFrame& frame, FILE* file) {
-  if (file == NULL)
-    return -1;
-  if (frame.IsZeroSize())
-    return -1;
-  int width = frame.video_frame_buffer()->width();
-  int height = frame.video_frame_buffer()->height();
+int PrintVideoFrame(const VideoFrameBuffer& frame, FILE* file) {
+  int width = frame.width();
+  int height = frame.height();
   int chroma_width = (width + 1) / 2;
   int chroma_height = (height + 1) / 2;
 
-  if (PrintPlane(frame.video_frame_buffer()->DataY(), width, height,
-                 frame.video_frame_buffer()->StrideY(), file) < 0) {
+  if (PrintPlane(frame.DataY(), width, height,
+                 frame.StrideY(), file) < 0) {
     return -1;
   }
-  if (PrintPlane(frame.video_frame_buffer()->DataU(),
+  if (PrintPlane(frame.DataU(),
                  chroma_width, chroma_height,
-                 frame.video_frame_buffer()->StrideU(), file) < 0) {
+                 frame.StrideU(), file) < 0) {
     return -1;
   }
-  if (PrintPlane(frame.video_frame_buffer()->DataV(),
+  if (PrintPlane(frame.DataV(),
                  chroma_width, chroma_height,
-                 frame.video_frame_buffer()->StrideV(), file) < 0) {
+                 frame.StrideV(), file) < 0) {
     return -1;
   }
   return 0;
 }
 
+int PrintVideoFrame(const VideoFrame& frame, FILE* file) {
+  if (frame.IsZeroSize())
+    return -1;
+  return PrintVideoFrame(*frame.video_frame_buffer(), file);
+}
+
 int ExtractBuffer(const rtc::scoped_refptr<VideoFrameBuffer>& input_frame,
                   size_t size,
                   uint8_t* buffer) {
@@ -249,23 +251,19 @@
                   int src_height,
                   size_t sample_size,
                   VideoRotation rotation,
-                  VideoFrame* dst_frame) {
-  int dst_width = dst_frame->width();
-  int dst_height = dst_frame->height();
+                  I420Buffer* dst_buffer) {
+  int dst_width = dst_buffer->width();
+  int dst_height = dst_buffer->height();
   // LibYuv expects pre-rotation values for dst.
   // Stride values should correspond to the destination values.
   if (rotation == kVideoRotation_90 || rotation == kVideoRotation_270) {
-    dst_width = dst_frame->height();
-    dst_height = dst_frame->width();
+    std::swap(dst_width, dst_height);
   }
   return libyuv::ConvertToI420(
       src_frame, sample_size,
-      dst_frame->video_frame_buffer()->MutableDataY(),
-      dst_frame->video_frame_buffer()->StrideY(),
-      dst_frame->video_frame_buffer()->MutableDataU(),
-      dst_frame->video_frame_buffer()->StrideU(),
-      dst_frame->video_frame_buffer()->MutableDataV(),
-      dst_frame->video_frame_buffer()->StrideV(),
+      dst_buffer->MutableDataY(), dst_buffer->StrideY(),
+      dst_buffer->MutableDataU(), dst_buffer->StrideU(),
+      dst_buffer->MutableDataV(), dst_buffer->StrideV(),
       crop_x, crop_y,
       src_width, src_height,
       dst_width, dst_height,
diff --git a/webrtc/common_video/video_frame.cc b/webrtc/common_video/video_frame.cc
index e00ca27..4b29be9 100644
--- a/webrtc/common_video/video_frame.cc
+++ b/webrtc/common_video/video_frame.cc
@@ -87,10 +87,18 @@
   const int expected_size_y = height * stride_y;
   const int expected_size_u = half_height * stride_u;
   const int expected_size_v = half_height * stride_v;
-  CreateEmptyFrame(width, height, stride_y, stride_u, stride_v);
-  memcpy(video_frame_buffer_->MutableDataY(), buffer_y, expected_size_y);
-  memcpy(video_frame_buffer_->MutableDataU(), buffer_u, expected_size_u);
-  memcpy(video_frame_buffer_->MutableDataV(), buffer_v, expected_size_v);
+  // Allocate a new buffer.
+  rtc::scoped_refptr<I420Buffer> buffer_ =
+      I420Buffer::Create(width, height, stride_y, stride_u, stride_v);
+
+  memcpy(buffer_->MutableDataY(), buffer_y, expected_size_y);
+  memcpy(buffer_->MutableDataU(), buffer_u, expected_size_u);
+  memcpy(buffer_->MutableDataV(), buffer_v, expected_size_v);
+
+  video_frame_buffer_ = buffer_;
+  timestamp_rtp_ = 0;
+  ntp_time_ms_ = 0;
+  timestamp_us_ = 0;
   rotation_ = rotation;
 }
 
diff --git a/webrtc/common_video/video_frame_buffer.cc b/webrtc/common_video/video_frame_buffer.cc
index 2d7e7f4..dbb2743 100644
--- a/webrtc/common_video/video_frame_buffer.cc
+++ b/webrtc/common_video/video_frame_buffer.cc
@@ -31,19 +31,6 @@
 
 }  // namespace
 
-uint8_t* VideoFrameBuffer::MutableDataY() {
-  RTC_NOTREACHED();
-  return nullptr;
-}
-uint8_t* VideoFrameBuffer::MutableDataU() {
-  RTC_NOTREACHED();
-  return nullptr;
-}
-uint8_t* VideoFrameBuffer::MutableDataV() {
-  RTC_NOTREACHED();
-  return nullptr;
-}
-
 VideoFrameBuffer::~VideoFrameBuffer() {}
 
 I420Buffer::I420Buffer(int width, int height)
diff --git a/webrtc/media/base/videoframe_unittest.h b/webrtc/media/base/videoframe_unittest.h
index 9658f87..42936bb 100644
--- a/webrtc/media/base/videoframe_unittest.h
+++ b/webrtc/media/base/videoframe_unittest.h
@@ -453,7 +453,6 @@
   static bool IsEqual(const cricket::VideoFrame& frame,
                       int width,
                       int height,
-                      int64_t timestamp_us,
                       const uint8_t* y,
                       uint32_t ypitch,
                       const uint8_t* u,
@@ -462,7 +461,6 @@
                       uint32_t vpitch,
                       int max_error) {
     return IsSize(frame, width, height) &&
-           frame.timestamp_us() == timestamp_us &&
            IsPlaneEqual("y", frame.video_frame_buffer()->DataY(),
                         frame.video_frame_buffer()->StrideY(), y, ypitch,
                         static_cast<uint32_t>(width),
@@ -480,15 +478,25 @@
   static bool IsEqual(const cricket::VideoFrame& frame1,
                       const cricket::VideoFrame& frame2,
                       int max_error) {
-    return IsEqual(frame1,
+    return frame1.timestamp_us() == frame2.timestamp_us() &&
+           IsEqual(frame1,
                    frame2.width(), frame2.height(),
-                   frame2.timestamp_us(),
                    frame2.video_frame_buffer()->DataY(),
                    frame2.video_frame_buffer()->StrideY(),
                    frame2.video_frame_buffer()->DataU(),
                    frame2.video_frame_buffer()->StrideU(),
                    frame2.video_frame_buffer()->DataV(),
-                   frame2.video_frame_buffer()->StrideV(),
+                   frame2.video_frame_buffer()->StrideV(), max_error);
+  }
+
+  static bool IsEqual(
+      const cricket::VideoFrame& frame1,
+      const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
+      int max_error) {
+    return IsEqual(frame1, buffer->width(), buffer->height(),
+                   buffer->DataY(), buffer->StrideY(),
+                   buffer->DataU(), buffer->StrideU(),
+                   buffer->DataV(), buffer->StrideV(),
                    max_error);
   }
 
@@ -497,10 +505,10 @@
                               int hcrop, int vcrop, int max_error) {
     return frame1.width() <= frame2.width() &&
            frame1.height() <= frame2.height() &&
+           frame1.timestamp_us() == frame2.timestamp_us() &&
            IsEqual(frame1,
                    frame2.width() - hcrop * 2,
                    frame2.height() - vcrop * 2,
-                   frame2.timestamp_us(),
                    frame2.video_frame_buffer()->DataY()
                        + vcrop * frame2.video_frame_buffer()->StrideY()
                        + hcrop,
@@ -539,8 +547,8 @@
     const uint8_t* y = reinterpret_cast<uint8_t*>(ms.get()->GetBuffer());
     const uint8_t* u = y + kWidth * kHeight;
     const uint8_t* v = u + kWidth * kHeight / 4;
-    EXPECT_TRUE(IsEqual(frame, kWidth, kHeight, 0, y, kWidth, u,
-                        kWidth / 2, v, kWidth / 2, 0));
+    EXPECT_TRUE(IsEqual(frame, kWidth, kHeight, y, kWidth, u, kWidth / 2, v,
+                        kWidth / 2, 0));
   }
 
   // Test constructing an image from a YV12 buffer.
@@ -554,8 +562,8 @@
     const uint8_t* y = reinterpret_cast<uint8_t*>(ms.get()->GetBuffer());
     const uint8_t* v = y + kWidth * kHeight;
     const uint8_t* u = v + kWidth * kHeight / 4;
-    EXPECT_TRUE(IsEqual(frame, kWidth, kHeight, 0, y, kWidth, u,
-                        kWidth / 2, v, kWidth / 2, 0));
+    EXPECT_TRUE(IsEqual(frame, kWidth, kHeight, y, kWidth, u, kWidth / 2, v,
+                        kWidth / 2, 0));
   }
 
   // Test constructing an image from a I422 buffer.
@@ -772,7 +780,8 @@
 // Macro to help test different rotations
 #define TEST_MIRROR(FOURCC, BPP)                                               \
   void Construct##FOURCC##Mirror() {                                           \
-    T frame1, frame2, frame3;                                                  \
+    T frame1, frame2;                                                          \
+    rtc::scoped_refptr<webrtc::I420Buffer> res_buffer;                         \
     std::unique_ptr<rtc::MemoryStream> ms(                                     \
         CreateYuvSample(kWidth, kHeight, BPP));                                \
     ASSERT_TRUE(ms.get() != NULL);                                             \
@@ -788,21 +797,18 @@
                             data_size, 0, webrtc::kVideoRotation_0));          \
     int width_rotate = frame1.width();                                         \
     int height_rotate = frame1.height();                                       \
-    frame3.InitToEmptyBuffer(width_rotate, height_rotate);                     \
+    res_buffer = webrtc::I420Buffer::Create(width_rotate, height_rotate);      \
     libyuv::I420Mirror(frame2.video_frame_buffer()->DataY(),                   \
                        frame2.video_frame_buffer()->StrideY(),                 \
                        frame2.video_frame_buffer()->DataU(),                   \
                        frame2.video_frame_buffer()->StrideU(),                 \
                        frame2.video_frame_buffer()->DataV(),                   \
                        frame2.video_frame_buffer()->StrideV(),                 \
-                       frame3.video_frame_buffer()->MutableDataY(),            \
-                       frame3.video_frame_buffer()->StrideY(),                 \
-                       frame3.video_frame_buffer()->MutableDataU(),            \
-                       frame3.video_frame_buffer()->StrideU(),                 \
-                       frame3.video_frame_buffer()->MutableDataV(),            \
-                       frame3.video_frame_buffer()->StrideV(), kWidth,         \
-                       kHeight);                                               \
-    EXPECT_TRUE(IsEqual(frame1, frame3, 0));                                   \
+                       res_buffer->MutableDataY(), res_buffer->StrideY(),      \
+                       res_buffer->MutableDataU(), res_buffer->StrideU(),      \
+                       res_buffer->MutableDataV(), res_buffer->StrideV(),      \
+                       kWidth, kHeight);                                       \
+    EXPECT_TRUE(IsEqual(frame1, res_buffer, 0));                               \
   }
 
   TEST_MIRROR(I420, 420)
@@ -810,7 +816,8 @@
 // Macro to help test different rotations
 #define TEST_ROTATE(FOURCC, BPP, ROTATE)                                       \
   void Construct##FOURCC##Rotate##ROTATE() {                                   \
-    T frame1, frame2, frame3;                                                  \
+    T frame1, frame2;                                                          \
+    rtc::scoped_refptr<webrtc::I420Buffer> res_buffer;                         \
     std::unique_ptr<rtc::MemoryStream> ms(                                     \
         CreateYuvSample(kWidth, kHeight, BPP));                                \
     ASSERT_TRUE(ms.get() != NULL);                                             \
@@ -826,21 +833,18 @@
                             data_size, 0, webrtc::kVideoRotation_0));          \
     int width_rotate = frame1.width();                                         \
     int height_rotate = frame1.height();                                       \
-    frame3.InitToEmptyBuffer(width_rotate, height_rotate);                     \
+    res_buffer = webrtc::I420Buffer::Create(width_rotate, height_rotate);      \
     libyuv::I420Rotate(frame2.video_frame_buffer()->DataY(),                   \
                        frame2.video_frame_buffer()->StrideY(),                 \
                        frame2.video_frame_buffer()->DataU(),                   \
                        frame2.video_frame_buffer()->StrideU(),                 \
                        frame2.video_frame_buffer()->DataV(),                   \
                        frame2.video_frame_buffer()->StrideV(),                 \
-                       frame3.video_frame_buffer()->MutableDataY(),            \
-                       frame3.video_frame_buffer()->StrideY(),                 \
-                       frame3.video_frame_buffer()->MutableDataU(),            \
-                       frame3.video_frame_buffer()->StrideU(),                 \
-                       frame3.video_frame_buffer()->MutableDataV(),            \
-                       frame3.video_frame_buffer()->StrideV(), kWidth,         \
-                       kHeight, libyuv::kRotate##ROTATE);                      \
-    EXPECT_TRUE(IsEqual(frame1, frame3, 0));                                   \
+                       res_buffer->MutableDataY(), res_buffer->StrideY(),      \
+                       res_buffer->MutableDataU(), res_buffer->StrideU(),      \
+                       res_buffer->MutableDataV(), res_buffer->StrideV(),      \
+                       kWidth, kHeight, libyuv::kRotate##ROTATE);              \
+    EXPECT_TRUE(IsEqual(frame1, res_buffer, 0));                               \
   }
 
   // Test constructing an image with rotation.
@@ -944,7 +948,7 @@
     const uint8_t* y = pixel;
     const uint8_t* u = y + 1;
     const uint8_t* v = u + 1;
-    EXPECT_TRUE(IsEqual(frame, 1, 1, 0, y, 1, u, 1, v, 1, 0));
+    EXPECT_TRUE(IsEqual(frame, 1, 1, y, 1, u, 1, v, 1, 0));
   }
 
   // Test 5 pixel edge case image.
diff --git a/webrtc/media/engine/webrtcvideoengine2_unittest.cc b/webrtc/media/engine/webrtcvideoengine2_unittest.cc
index b9c6604..c81fd88 100644
--- a/webrtc/media/engine/webrtcvideoengine2_unittest.cc
+++ b/webrtc/media/engine/webrtcvideoengine2_unittest.cc
@@ -66,17 +66,13 @@
       cricket::kRtcpFbParamCcm, cricket::kRtcpFbCcmParamFir)));
 }
 
-static void CreateBlackFrame(webrtc::VideoFrame* video_frame,
-                             int width,
-                             int height) {
-  video_frame->CreateEmptyFrame(
-      width, height, width, (width + 1) / 2, (width + 1) / 2);
-  memset(video_frame->video_frame_buffer()->MutableDataY(), 16,
-         video_frame->allocated_size(webrtc::kYPlane));
-  memset(video_frame->video_frame_buffer()->MutableDataU(), 128,
-         video_frame->allocated_size(webrtc::kUPlane));
-  memset(video_frame->video_frame_buffer()->MutableDataV(), 128,
-         video_frame->allocated_size(webrtc::kVPlane));
+static rtc::scoped_refptr<webrtc::VideoFrameBuffer> CreateBlackFrameBuffer(
+    int width,
+    int height) {
+  rtc::scoped_refptr<webrtc::I420Buffer> buffer =
+      webrtc::I420Buffer::Create(width, height);
+  buffer->SetToBlack();
+  return buffer;
 }
 
 void VerifySendStreamHasRtxTypes(const webrtc::VideoSendStream::Config& config,
@@ -2204,9 +2200,9 @@
   cricket::FakeVideoRenderer renderer;
   EXPECT_TRUE(channel_->SetSink(last_ssrc_, &renderer));
 
-  webrtc::VideoFrame video_frame;
-  CreateBlackFrame(&video_frame, 4, 4);
-  video_frame.set_timestamp(kInitialTimestamp);
+  webrtc::VideoFrame video_frame(CreateBlackFrameBuffer(4, 4),
+                                 kInitialTimestamp, 0,
+                                 webrtc::kVideoRotation_0);
   // Initial NTP time is not available on the first frame, but should still be
   // able to be estimated.
   stream->InjectFrame(video_frame);
diff --git a/webrtc/media/engine/webrtcvideoframe.cc b/webrtc/media/engine/webrtcvideoframe.cc
index dfe012b..2a9bbcc 100644
--- a/webrtc/media/engine/webrtcvideoframe.cc
+++ b/webrtc/media/engine/webrtcvideoframe.cc
@@ -121,7 +121,9 @@
     new_height = dw;
   }
 
-  InitToEmptyBuffer(new_width, new_height);
+  rtc::scoped_refptr<webrtc::I420Buffer> buffer =
+      webrtc::I420Buffer::Create(new_width, new_height);
+  video_frame_buffer_ = buffer;
   rotation_ = apply_rotation ? webrtc::kVideoRotation_0 : rotation;
 
   int horiz_crop = ((w - dw) / 2) & ~1;
@@ -132,15 +134,10 @@
   int idh = (h < 0) ? -dh : dh;
   int r = libyuv::ConvertToI420(
       sample, sample_size,
-      video_frame_buffer_->MutableDataY(),
-      video_frame_buffer_->StrideY(),
-      video_frame_buffer_->MutableDataU(),
-      video_frame_buffer_->StrideU(),
-      video_frame_buffer_->MutableDataV(),
-      video_frame_buffer_->StrideV(),
-      horiz_crop, vert_crop,
-      w, h,
-      dw, idh,
+      buffer->MutableDataY(), buffer->StrideY(),
+      buffer->MutableDataU(), buffer->StrideU(),
+      buffer->MutableDataV(), buffer->StrideV(),
+      horiz_crop, vert_crop, w, h, dw, idh,
       static_cast<libyuv::RotationMode>(
           apply_rotation ? rotation : webrtc::kVideoRotation_0),
       format);
@@ -154,7 +151,7 @@
 }
 
 void WebRtcVideoFrame::InitToEmptyBuffer(int w, int h) {
-  video_frame_buffer_ = new rtc::RefCountedObject<webrtc::I420Buffer>(w, h);
+  video_frame_buffer_ = webrtc::I420Buffer::Create(w, h);
   rotation_ = webrtc::kVideoRotation_0;
 }
 
diff --git a/webrtc/modules/video_capture/test/video_capture_unittest.cc b/webrtc/modules/video_capture/test/video_capture_unittest.cc
index 839ab80..10f3a2b 100644
--- a/webrtc/modules/video_capture/test/video_capture_unittest.cc
+++ b/webrtc/modules/video_capture/test/video_capture_unittest.cc
@@ -420,15 +420,19 @@
     capability.maxFPS = kTestFramerate;
     capture_callback_.SetExpectedCapability(capability);
 
-    test_frame_.CreateEmptyFrame(kTestWidth, kTestHeight, kTestWidth,
-                                 ((kTestWidth + 1) / 2), (kTestWidth + 1) / 2);
-    SleepMs(1); // Wait 1ms so that two tests can't have the same timestamp.
-    memset(test_frame_.video_frame_buffer()->MutableDataY(), 127,
-           kTestWidth * kTestHeight);
-    memset(test_frame_.video_frame_buffer()->MutableDataU(), 127,
+    rtc::scoped_refptr<webrtc::I420Buffer> buffer = webrtc::I420Buffer::Create(
+        kTestWidth, kTestHeight,
+        kTestWidth, ((kTestWidth + 1) / 2), (kTestWidth + 1) / 2);
+
+    memset(buffer->MutableDataY(), 127, kTestWidth * kTestHeight);
+    memset(buffer->MutableDataU(), 127,
            ((kTestWidth + 1) / 2) * ((kTestHeight + 1) / 2));
-    memset(test_frame_.video_frame_buffer()->MutableDataV(), 127,
+    memset(buffer->MutableDataV(), 127,
            ((kTestWidth + 1) / 2) * ((kTestHeight + 1) / 2));
+    test_frame_.reset(
+        new webrtc::VideoFrame(buffer, 0, 0, webrtc::kVideoRotation_0));
+
+    SleepMs(1);  // Wait 1ms so that two tests can't have the same timestamp.
 
     capture_module_->RegisterCaptureDataCallback(capture_callback_);
     capture_module_->RegisterCaptureCallback(capture_feedback_);
@@ -443,7 +447,7 @@
   webrtc::VideoCaptureExternal* capture_input_interface_;
   rtc::scoped_refptr<VideoCaptureModule> capture_module_;
   std::unique_ptr<webrtc::ProcessThread> process_module_;
-  webrtc::VideoFrame test_frame_;
+  std::unique_ptr<webrtc::VideoFrame> test_frame_;
   TestVideoCaptureCallback capture_callback_;
   TestVideoCaptureFeedBack capture_feedback_;
 };
@@ -451,13 +455,13 @@
 // Test input of external video frames.
 TEST_F(VideoCaptureExternalTest, TestExternalCapture) {
   size_t length = webrtc::CalcBufferSize(webrtc::kI420,
-                                         test_frame_.width(),
-                                         test_frame_.height());
+                                         test_frame_->width(),
+                                         test_frame_->height());
   std::unique_ptr<uint8_t[]> test_buffer(new uint8_t[length]);
-  webrtc::ExtractBuffer(test_frame_, length, test_buffer.get());
+  webrtc::ExtractBuffer(*test_frame_, length, test_buffer.get());
   EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(),
       length, capture_callback_.capability(), 0));
-  EXPECT_TRUE(capture_callback_.CompareLastFrame(test_frame_));
+  EXPECT_TRUE(capture_callback_.CompareLastFrame(*test_frame_));
 }
 
 // Test frame rate and no picture alarm.
@@ -472,13 +476,14 @@
   uint64_t startTime = rtc::TimeNanos();
 
   while ((rtc::TimeNanos() - startTime) < testTime) {
-     size_t length = webrtc::CalcBufferSize(webrtc::kI420,
-                                            test_frame_.width(),
-                                            test_frame_.height());
-     std::unique_ptr<uint8_t[]> test_buffer(new uint8_t[length]);
-     webrtc::ExtractBuffer(test_frame_, length, test_buffer.get());
-     EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(),
-       length, capture_callback_.capability(), 0));
+    size_t length = webrtc::CalcBufferSize(webrtc::kI420,
+                                           test_frame_->width(),
+                                           test_frame_->height());
+    std::unique_ptr<uint8_t[]> test_buffer(new uint8_t[length]);
+    webrtc::ExtractBuffer(*test_frame_, length, test_buffer.get());
+    EXPECT_EQ(
+        0, capture_input_interface_->IncomingFrame(
+               test_buffer.get(), length, capture_callback_.capability(), 0));
     SleepMs(100);
   }
   EXPECT_TRUE(capture_feedback_.frame_rate() >= 8 &&
@@ -489,10 +494,10 @@
   startTime = rtc::TimeNanos();
   while ((rtc::TimeNanos() - startTime) < testTime) {
     size_t length = webrtc::CalcBufferSize(webrtc::kI420,
-                                           test_frame_.width(),
-                                           test_frame_.height());
+                                           test_frame_->width(),
+                                           test_frame_->height());
     std::unique_ptr<uint8_t[]> test_buffer(new uint8_t[length]);
-    webrtc::ExtractBuffer(test_frame_, length, test_buffer.get());
+    webrtc::ExtractBuffer(*test_frame_, length, test_buffer.get());
     EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(),
       length, capture_callback_.capability(), 0));
     SleepMs(1000 / 30);
@@ -507,10 +512,10 @@
 TEST_F(VideoCaptureExternalTest, Rotation) {
   EXPECT_EQ(0, capture_module_->SetCaptureRotation(webrtc::kVideoRotation_0));
   size_t length = webrtc::CalcBufferSize(webrtc::kI420,
-                                         test_frame_.width(),
-                                         test_frame_.height());
+                                         test_frame_->width(),
+                                         test_frame_->height());
   std::unique_ptr<uint8_t[]> test_buffer(new uint8_t[length]);
-  webrtc::ExtractBuffer(test_frame_, length, test_buffer.get());
+  webrtc::ExtractBuffer(*test_frame_, length, test_buffer.get());
   EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(),
     length, capture_callback_.capability(), 0));
   EXPECT_EQ(0, capture_module_->SetCaptureRotation(webrtc::kVideoRotation_90));
diff --git a/webrtc/modules/video_capture/video_capture_impl.cc b/webrtc/modules/video_capture/video_capture_impl.cc
index e6b2d55..90ac267 100644
--- a/webrtc/modules/video_capture/video_capture_impl.cc
+++ b/webrtc/modules/video_capture/video_capture_impl.cc
@@ -275,14 +275,14 @@
         // Setting absolute height (in case it was negative).
         // In Windows, the image starts bottom left, instead of top left.
         // Setting a negative source height, inverts the image (within LibYuv).
-        _captureFrame.CreateEmptyFrame(target_width,
-                                       abs(target_height),
-                                       stride_y,
-                                       stride_uv, stride_uv);
+
+        // TODO(nisse): Use a pool?
+        rtc::scoped_refptr<I420Buffer> buffer = I420Buffer::Create(
+            target_width, abs(target_height), stride_y, stride_uv, stride_uv);
         const int conversionResult = ConvertToI420(
             commonVideoType, videoFrame, 0, 0,  // No cropping
             width, height, videoFrameLength,
-            apply_rotation ? _rotateFrame : kVideoRotation_0, &_captureFrame);
+            apply_rotation ? _rotateFrame : kVideoRotation_0, buffer.get());
         if (conversionResult < 0)
         {
           LOG(LS_ERROR) << "Failed to convert capture frame from type "
@@ -290,15 +290,12 @@
             return -1;
         }
 
-        if (!apply_rotation) {
-          _captureFrame.set_rotation(_rotateFrame);
-        } else {
-          _captureFrame.set_rotation(kVideoRotation_0);
-        }
-        _captureFrame.set_ntp_time_ms(captureTime);
-        _captureFrame.set_render_time_ms(rtc::TimeMillis());
+        VideoFrame captureFrame(
+            buffer, 0, rtc::TimeMillis(),
+            !apply_rotation ? _rotateFrame : kVideoRotation_0);
+        captureFrame.set_ntp_time_ms(captureTime);
 
-        DeliverCapturedFrame(_captureFrame);
+        DeliverCapturedFrame(captureFrame);
     }
     else // Encoded format
     {
diff --git a/webrtc/modules/video_capture/video_capture_impl.h b/webrtc/modules/video_capture/video_capture_impl.h
index 7d785c3..e9fee7e 100644
--- a/webrtc/modules/video_capture/video_capture_impl.h
+++ b/webrtc/modules/video_capture/video_capture_impl.h
@@ -137,8 +137,6 @@
     VideoRotation _rotateFrame;  // Set if the frame should be rotated by the
                                  // capture module.
 
-    VideoFrame _captureFrame;
-
     // Indicate whether rotation should be applied before delivered externally.
     bool apply_rotation_;
 };
diff --git a/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc b/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc
index bbf1ee1..3cc08d2 100644
--- a/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc
+++ b/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc
@@ -121,52 +121,47 @@
     return ret;
   }
 
-  // The video frame is stored in |video_frame|. |av_frame| is FFmpeg's version
-  // of a video frame and will be set up to reference |video_frame|'s buffers.
-
-  // TODO(nisse): The VideoFrame's timestamp and rotation info is not used.
-  // Refactor to do not use a VideoFrame object at all.
+  // The video frame is stored in |frame_buffer|. |av_frame| is FFmpeg's version
+  // of a video frame and will be set up to reference |frame_buffer|'s data.
 
   // FFmpeg expects the initial allocation to be zero-initialized according to
   // http://crbug.com/390941. Our pool is set up to zero-initialize new buffers.
-  VideoFrame* video_frame = new VideoFrame(
-      decoder->pool_.CreateBuffer(width, height),
-      0 /* timestamp */, 0 /* render_time_ms */, kVideoRotation_0);
+  // TODO(nisse): Delete that feature from the video pool, instead add
+  // an explicit call to InitializeData here.
+  rtc::scoped_refptr<I420Buffer> frame_buffer =
+      decoder->pool_.CreateBuffer(width, height);
 
+  int y_size = width * height;
+  int uv_size = ((width + 1) / 2) * ((height + 1) / 2);
   // DCHECK that we have a continuous buffer as is required.
-  RTC_DCHECK_EQ(video_frame->video_frame_buffer()->DataU(),
-                video_frame->video_frame_buffer()->DataY() +
-                video_frame->allocated_size(kYPlane));
-  RTC_DCHECK_EQ(video_frame->video_frame_buffer()->DataV(),
-                video_frame->video_frame_buffer()->DataU() +
-                video_frame->allocated_size(kUPlane));
-  int total_size = video_frame->allocated_size(kYPlane) +
-                   video_frame->allocated_size(kUPlane) +
-                   video_frame->allocated_size(kVPlane);
+  RTC_DCHECK_EQ(frame_buffer->DataU(), frame_buffer->DataY() + y_size);
+  RTC_DCHECK_EQ(frame_buffer->DataV(), frame_buffer->DataU() + uv_size);
+  int total_size = y_size + 2 * uv_size;
 
   av_frame->format = context->pix_fmt;
   av_frame->reordered_opaque = context->reordered_opaque;
 
   // Set |av_frame| members as required by FFmpeg.
-  av_frame->data[kYPlaneIndex] =
-      video_frame->video_frame_buffer()->MutableDataY();
-  av_frame->linesize[kYPlaneIndex] =
-      video_frame->video_frame_buffer()->StrideY();
-  av_frame->data[kUPlaneIndex] =
-      video_frame->video_frame_buffer()->MutableDataU();
-  av_frame->linesize[kUPlaneIndex] =
-      video_frame->video_frame_buffer()->StrideU();
-  av_frame->data[kVPlaneIndex] =
-      video_frame->video_frame_buffer()->MutableDataV();
-  av_frame->linesize[kVPlaneIndex] =
-      video_frame->video_frame_buffer()->StrideV();
+  av_frame->data[kYPlaneIndex] = frame_buffer->MutableDataY();
+  av_frame->linesize[kYPlaneIndex] = frame_buffer->StrideY();
+  av_frame->data[kUPlaneIndex] = frame_buffer->MutableDataU();
+  av_frame->linesize[kUPlaneIndex] = frame_buffer->StrideU();
+  av_frame->data[kVPlaneIndex] = frame_buffer->MutableDataV();
+  av_frame->linesize[kVPlaneIndex] = frame_buffer->StrideV();
   RTC_DCHECK_EQ(av_frame->extended_data, av_frame->data);
 
-  av_frame->buf[0] = av_buffer_create(av_frame->data[kYPlaneIndex],
-                                      total_size,
-                                      AVFreeBuffer2,
-                                      static_cast<void*>(video_frame),
-                                      0);
+  // Create a VideoFrame object, to keep a reference to the buffer.
+  // TODO(nisse): The VideoFrame's timestamp and rotation info is not used.
+  // Refactor to do not use a VideoFrame object at all.
+  av_frame->buf[0] = av_buffer_create(
+      av_frame->data[kYPlaneIndex],
+      total_size,
+      AVFreeBuffer2,
+      static_cast<void*>(new VideoFrame(frame_buffer,
+                                        0 /* timestamp */,
+                                        0 /* render_time_ms */,
+                                        kVideoRotation_0)),
+      0);
   RTC_CHECK(av_frame->buf[0]);
   return 0;
 }
diff --git a/webrtc/modules/video_coding/codecs/i420/i420.cc b/webrtc/modules/video_coding/codecs/i420/i420.cc
index 93204dd..d0c8d0c 100644
--- a/webrtc/modules/video_coding/codecs/i420/i420.cc
+++ b/webrtc/modules/video_coding/codecs/i420/i420.cc
@@ -137,8 +137,7 @@
 }
 
 I420Decoder::I420Decoder()
-    : _decodedImage(),
-      _width(0),
+    : _width(0),
       _height(0),
       _inited(false),
       _decodeCompleteCallback(NULL) {}
@@ -199,17 +198,19 @@
   }
   // Set decoded image parameters.
   int half_width = (_width + 1) / 2;
-  _decodedImage.CreateEmptyFrame(_width, _height, _width, half_width,
-                                 half_width);
-  // Converting from buffer to plane representation.
+  rtc::scoped_refptr<webrtc::I420Buffer> frame_buffer =
+      I420Buffer::Create(_width, _height, _width, half_width, half_width);
+
+  // Converting from raw buffer I420Buffer.
   int ret = ConvertToI420(kI420, buffer, 0, 0, _width, _height, 0,
-                          kVideoRotation_0, &_decodedImage);
+                          kVideoRotation_0, frame_buffer.get());
   if (ret < 0) {
     return WEBRTC_VIDEO_CODEC_MEMORY;
   }
-  _decodedImage.set_timestamp(inputImage._timeStamp);
 
-  _decodeCompleteCallback->Decoded(_decodedImage);
+  VideoFrame decoded_image(frame_buffer, inputImage._timeStamp, 0,
+                           webrtc::kVideoRotation_0);
+  _decodeCompleteCallback->Decoded(decoded_image);
   return WEBRTC_VIDEO_CODEC_OK;
 }
 
diff --git a/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.cc b/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.cc
index 766e517..1c8037a 100644
--- a/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.cc
@@ -298,11 +298,10 @@
         return ret;
       }
     } else {
-      VideoFrame dst_frame;
-      // Making sure that destination frame is of sufficient size.
       // Aligning stride values based on width.
-      dst_frame.CreateEmptyFrame(dst_width, dst_height, dst_width,
-                                 (dst_width + 1) / 2, (dst_width + 1) / 2);
+      rtc::scoped_refptr<I420Buffer> dst_buffer =
+          I420Buffer::Create(dst_width, dst_height, dst_width,
+                             (dst_width + 1) / 2, (dst_width + 1) / 2);
       libyuv::I420Scale(input_image.video_frame_buffer()->DataY(),
                         input_image.video_frame_buffer()->StrideY(),
                         input_image.video_frame_buffer()->DataU(),
@@ -310,18 +309,16 @@
                         input_image.video_frame_buffer()->DataV(),
                         input_image.video_frame_buffer()->StrideV(),
                         src_width, src_height,
-                        dst_frame.video_frame_buffer()->MutableDataY(),
-                        dst_frame.video_frame_buffer()->StrideY(),
-                        dst_frame.video_frame_buffer()->MutableDataU(),
-                        dst_frame.video_frame_buffer()->StrideU(),
-                        dst_frame.video_frame_buffer()->MutableDataV(),
-                        dst_frame.video_frame_buffer()->StrideV(),
+                        dst_buffer->MutableDataY(), dst_buffer->StrideY(),
+                        dst_buffer->MutableDataU(), dst_buffer->StrideU(),
+                        dst_buffer->MutableDataV(), dst_buffer->StrideV(),
                         dst_width, dst_height,
                         libyuv::kFilterBilinear);
-      dst_frame.set_timestamp(input_image.timestamp());
-      dst_frame.set_render_time_ms(input_image.render_time_ms());
+
       int ret = streaminfos_[stream_idx].encoder->Encode(
-          dst_frame, codec_specific_info, &stream_frame_types);
+          VideoFrame(dst_buffer, input_image.timestamp(),
+                     input_image.render_time_ms(), webrtc::kVideoRotation_0),
+          codec_specific_info, &stream_frame_types);
       if (ret != WEBRTC_VIDEO_CODEC_OK) {
         return ret;
       }
diff --git a/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter_unittest.cc b/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter_unittest.cc
index 47d2322d..d13dbb16 100644
--- a/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter_unittest.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter_unittest.cc
@@ -535,17 +535,11 @@
       .WillOnce(Return(WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE));
 
   // Send a fake frame and assert the return is software fallback.
-  VideoFrame input_frame;
   int half_width = (kDefaultWidth + 1) / 2;
-  input_frame.CreateEmptyFrame(kDefaultWidth, kDefaultHeight, kDefaultWidth,
-                                half_width, half_width);
-  memset(input_frame.video_frame_buffer()->MutableDataY(), 0,
-         input_frame.allocated_size(kYPlane));
-  memset(input_frame.video_frame_buffer()->MutableDataU(), 0,
-         input_frame.allocated_size(kUPlane));
-  memset(input_frame.video_frame_buffer()->MutableDataV(), 0,
-         input_frame.allocated_size(kVPlane));
-
+  rtc::scoped_refptr<I420Buffer> input_buffer = I420Buffer::Create(
+      kDefaultWidth, kDefaultHeight, kDefaultWidth, half_width, half_width);
+  input_buffer->InitializeData();
+  VideoFrame input_frame(input_buffer, 0, 0, webrtc::kVideoRotation_0);
   std::vector<FrameType> frame_types(3, kVideoFrameKey);
   EXPECT_EQ(WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE,
             adapter_->Encode(input_frame, nullptr, &frame_types));
diff --git a/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.h b/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.h
index e2bd71e..22e8645 100644
--- a/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.h
+++ b/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.h
@@ -236,8 +236,8 @@
     }
   }
 
-  // Fills in an VideoFrameBuffer from |plane_colors|.
-  static void CreateImage(const rtc::scoped_refptr<VideoFrameBuffer>& buffer,
+  // Fills in an I420Buffer from |plane_colors|.
+  static void CreateImage(const rtc::scoped_refptr<I420Buffer>& buffer,
                           int plane_colors[kNumOfPlanes]) {
     int width = buffer->width();
     int height = buffer->height();
@@ -317,14 +317,11 @@
     EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200));
     EXPECT_EQ(0, decoder_->InitDecode(&settings_, 1));
     int half_width = (kDefaultWidth + 1) / 2;
-    input_frame_.CreateEmptyFrame(kDefaultWidth, kDefaultHeight, kDefaultWidth,
-                                  half_width, half_width);
-    memset(input_frame_.video_frame_buffer()->MutableDataY(), 0,
-           input_frame_.allocated_size(kYPlane));
-    memset(input_frame_.video_frame_buffer()->MutableDataU(), 0,
-           input_frame_.allocated_size(kUPlane));
-    memset(input_frame_.video_frame_buffer()->MutableDataV(), 0,
-           input_frame_.allocated_size(kVPlane));
+    input_buffer_ = I420Buffer::Create(kDefaultWidth, kDefaultHeight,
+                                       kDefaultWidth, half_width, half_width);
+    input_buffer_->InitializeData();
+    input_frame_.reset(
+        new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0));
   }
 
   virtual void TearDown() {
@@ -396,33 +393,33 @@
     std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
                                        kVideoFrameDelta);
     ExpectStreams(kVideoFrameKey, kNumberOfSimulcastStreams);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
 
     ExpectStreams(kVideoFrameDelta, kNumberOfSimulcastStreams);
-    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
+    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
 
     frame_types[0] = kVideoFrameKey;
     ExpectStreams(kVideoFrameKey, kNumberOfSimulcastStreams);
-    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
+    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
 
     std::fill(frame_types.begin(), frame_types.end(), kVideoFrameDelta);
     frame_types[1] = kVideoFrameKey;
     ExpectStreams(kVideoFrameKey, kNumberOfSimulcastStreams);
-    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
+    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
 
     std::fill(frame_types.begin(), frame_types.end(), kVideoFrameDelta);
     frame_types[2] = kVideoFrameKey;
     ExpectStreams(kVideoFrameKey, kNumberOfSimulcastStreams);
-    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
+    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
 
     std::fill(frame_types.begin(), frame_types.end(), kVideoFrameDelta);
     ExpectStreams(kVideoFrameDelta, kNumberOfSimulcastStreams);
-    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
+    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
   }
 
   void TestPaddingAllStreams() {
@@ -431,11 +428,11 @@
     std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
                                        kVideoFrameDelta);
     ExpectStreams(kVideoFrameKey, 1);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
 
     ExpectStreams(kVideoFrameDelta, 1);
-    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
+    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
   }
 
   void TestPaddingTwoStreams() {
@@ -444,11 +441,11 @@
     std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
                                        kVideoFrameDelta);
     ExpectStreams(kVideoFrameKey, 1);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
 
     ExpectStreams(kVideoFrameDelta, 1);
-    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
+    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
   }
 
   void TestPaddingTwoStreamsOneMaxedOut() {
@@ -458,11 +455,11 @@
     std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
                                        kVideoFrameDelta);
     ExpectStreams(kVideoFrameKey, 1);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
 
     ExpectStreams(kVideoFrameDelta, 1);
-    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
+    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
   }
 
   void TestPaddingOneStream() {
@@ -471,11 +468,11 @@
     std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
                                        kVideoFrameDelta);
     ExpectStreams(kVideoFrameKey, 2);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
 
     ExpectStreams(kVideoFrameDelta, 2);
-    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
+    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
   }
 
   void TestPaddingOneStreamTwoMaxedOut() {
@@ -486,11 +483,11 @@
     std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
                                        kVideoFrameDelta);
     ExpectStreams(kVideoFrameKey, 2);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
 
     ExpectStreams(kVideoFrameDelta, 2);
-    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
+    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
   }
 
   void TestSendAllStreams() {
@@ -500,11 +497,11 @@
     std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
                                        kVideoFrameDelta);
     ExpectStreams(kVideoFrameKey, 3);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
 
     ExpectStreams(kVideoFrameDelta, 3);
-    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
+    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
   }
 
   void TestDisablingStreams() {
@@ -513,47 +510,47 @@
     std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
                                        kVideoFrameDelta);
     ExpectStreams(kVideoFrameKey, 3);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
 
     ExpectStreams(kVideoFrameDelta, 3);
-    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
+    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
 
     // We should only get two streams and padding for one.
     encoder_->SetRates(
         kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] / 2, 30);
     ExpectStreams(kVideoFrameDelta, 2);
-    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
+    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
 
     // We should only get the first stream and padding for two.
     encoder_->SetRates(kTargetBitrates[0] + kMinBitrates[1] / 2, 30);
     ExpectStreams(kVideoFrameDelta, 1);
-    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
+    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
 
     // We don't have enough bitrate for the thumbnail stream, but we should get
     // it anyway with current configuration.
     encoder_->SetRates(kTargetBitrates[0] - 1, 30);
     ExpectStreams(kVideoFrameDelta, 1);
-    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
+    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
 
     // We should only get two streams and padding for one.
     encoder_->SetRates(
         kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] / 2, 30);
     // We get a key frame because a new stream is being enabled.
     ExpectStreams(kVideoFrameKey, 2);
-    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
+    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
 
     // We should get all three streams.
     encoder_->SetRates(
         kTargetBitrates[0] + kTargetBitrates[1] + kTargetBitrates[2], 30);
     // We get a key frame because a new stream is being enabled.
     ExpectStreams(kVideoFrameKey, 3);
-    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
+    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
   }
 
   void SwitchingToOneStream(int width, int height) {
@@ -571,14 +568,12 @@
     }
     // Setting input image to new resolution.
     int half_width = (settings_.width + 1) / 2;
-    input_frame_.CreateEmptyFrame(settings_.width, settings_.height,
-                                  settings_.width, half_width, half_width);
-    memset(input_frame_.video_frame_buffer()->MutableDataY(), 0,
-           input_frame_.allocated_size(kYPlane));
-    memset(input_frame_.video_frame_buffer()->MutableDataU(), 0,
-           input_frame_.allocated_size(kUPlane));
-    memset(input_frame_.video_frame_buffer()->MutableDataV(), 0,
-           input_frame_.allocated_size(kVPlane));
+    input_buffer_ = I420Buffer::Create(settings_.width, settings_.height,
+                                       settings_.width, half_width, half_width);
+    input_buffer_->InitializeData();
+
+    input_frame_.reset(
+        new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0));
 
     // The for loop above did not set the bitrate of the highest layer.
     settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1]
@@ -603,7 +598,7 @@
         .Times(1)
         .WillRepeatedly(Return(
             EncodedImageCallback::Result(EncodedImageCallback::Result::OK, 0)));
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
 
     // Switch back.
     DefaultSettings(&settings_, kDefaultTemporalLayerProfile);
@@ -614,15 +609,12 @@
     ExpectStreams(kVideoFrameKey, 1);
     // Resize |input_frame_| to the new resolution.
     half_width = (settings_.width + 1) / 2;
-    input_frame_.CreateEmptyFrame(settings_.width, settings_.height,
-                                  settings_.width, half_width, half_width);
-    memset(input_frame_.video_frame_buffer()->MutableDataY(), 0,
-           input_frame_.allocated_size(kYPlane));
-    memset(input_frame_.video_frame_buffer()->MutableDataU(), 0,
-           input_frame_.allocated_size(kUPlane));
-    memset(input_frame_.video_frame_buffer()->MutableDataV(), 0,
-           input_frame_.allocated_size(kVPlane));
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
+    input_buffer_ = I420Buffer::Create(settings_.width, settings_.height,
+                                       settings_.width, half_width, half_width);
+    input_buffer_->InitializeData();
+    input_frame_.reset(
+        new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0));
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
   }
 
   void TestSwitchingToOneStream() { SwitchingToOneStream(1024, 768); }
@@ -637,7 +629,7 @@
 
     encoder_->SetRates(kMaxBitrates[2], 30);  // To get all three streams.
 
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
     int picture_id = -1;
     int temporal_layer = -1;
     bool layer_sync = false;
@@ -647,22 +639,22 @@
     EXPECT_TRUE(layer_sync);
     int key_frame_picture_id = picture_id;
 
-    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
+    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
     encoder_callback.GetLastEncodedFrameInfo(&picture_id, &temporal_layer,
                                              &layer_sync, 0);
     EXPECT_EQ(2, temporal_layer);
     EXPECT_TRUE(layer_sync);
 
-    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
+    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
     encoder_callback.GetLastEncodedFrameInfo(&picture_id, &temporal_layer,
                                              &layer_sync, 0);
     EXPECT_EQ(1, temporal_layer);
     EXPECT_TRUE(layer_sync);
 
-    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
+    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
     encoder_callback.GetLastEncodedFrameInfo(&picture_id, &temporal_layer,
                                              &layer_sync, 0);
     EXPECT_EQ(2, temporal_layer);
@@ -675,8 +667,8 @@
     // Must match last key frame to trigger.
     codec_specific.codecSpecific.VP8.pictureIdRPSI = key_frame_picture_id;
 
-    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, &codec_specific, NULL));
+    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, &codec_specific, NULL));
     encoder_callback.GetLastEncodedFrameInfo(&picture_id, &temporal_layer,
                                              &layer_sync, 0);
 
@@ -686,8 +678,8 @@
     // Must match last key frame to trigger, test bad id.
     codec_specific.codecSpecific.VP8.pictureIdRPSI = key_frame_picture_id + 17;
 
-    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, &codec_specific, NULL));
+    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, &codec_specific, NULL));
     encoder_callback.GetLastEncodedFrameInfo(&picture_id, &temporal_layer,
                                              &layer_sync, 0);
 
@@ -711,9 +703,9 @@
     plane_offset[kYPlane] = kColorY;
     plane_offset[kUPlane] = kColorU;
     plane_offset[kVPlane] = kColorV;
-    CreateImage(input_frame_.video_frame_buffer(), plane_offset);
+    CreateImage(input_buffer_, plane_offset);
 
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
     int picture_id = -1;
     int temporal_layer = -1;
     bool layer_sync = false;
@@ -727,27 +719,27 @@
     plane_offset[kYPlane] += 1;
     plane_offset[kUPlane] += 1;
     plane_offset[kVPlane] += 1;
-    CreateImage(input_frame_.video_frame_buffer(), plane_offset);
-    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
+    CreateImage(input_buffer_, plane_offset);
+    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
 
     // Change color.
     plane_offset[kYPlane] += 1;
     plane_offset[kUPlane] += 1;
     plane_offset[kVPlane] += 1;
-    CreateImage(input_frame_.video_frame_buffer(), plane_offset);
+    CreateImage(input_buffer_, plane_offset);
 
-    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
+    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
 
     // Change color.
     plane_offset[kYPlane] += 1;
     plane_offset[kUPlane] += 1;
     plane_offset[kVPlane] += 1;
-    CreateImage(input_frame_.video_frame_buffer(), plane_offset);
+    CreateImage(input_buffer_, plane_offset);
 
-    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
+    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
 
     CodecSpecificInfo codec_specific;
     codec_specific.codecType = kVideoCodecVP8;
@@ -759,10 +751,10 @@
     plane_offset[kYPlane] = kColorY;
     plane_offset[kUPlane] = kColorU;
     plane_offset[kVPlane] = kColorV;
-    CreateImage(input_frame_.video_frame_buffer(), plane_offset);
+    CreateImage(input_buffer_, plane_offset);
 
-    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, &codec_specific, NULL));
+    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, &codec_specific, NULL));
 
     EncodedImage encoded_frame;
     encoder_callback.GetLastEncodedKeyFrame(&encoded_frame);
@@ -784,47 +776,47 @@
     bool expected_layer_sync[3] = {false, false, false};
 
     // First frame: #0.
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
     SetExpectedValues3<int>(0, 0, 0, expected_temporal_idx);
     SetExpectedValues3<bool>(true, true, true, expected_layer_sync);
     VerifyTemporalIdxAndSyncForAllSpatialLayers(
         &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
 
     // Next frame: #1.
-    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
+    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
     SetExpectedValues3<int>(2, 2, 2, expected_temporal_idx);
     SetExpectedValues3<bool>(true, true, true, expected_layer_sync);
     VerifyTemporalIdxAndSyncForAllSpatialLayers(
         &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
 
     // Next frame: #2.
-    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
+    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
     SetExpectedValues3<int>(1, 1, 1, expected_temporal_idx);
     SetExpectedValues3<bool>(true, true, true, expected_layer_sync);
     VerifyTemporalIdxAndSyncForAllSpatialLayers(
         &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
 
     // Next frame: #3.
-    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
+    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
     SetExpectedValues3<int>(2, 2, 2, expected_temporal_idx);
     SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
     VerifyTemporalIdxAndSyncForAllSpatialLayers(
         &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
 
     // Next frame: #4.
-    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
+    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
     SetExpectedValues3<int>(0, 0, 0, expected_temporal_idx);
     SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
     VerifyTemporalIdxAndSyncForAllSpatialLayers(
         &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
 
     // Next frame: #5.
-    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
+    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
     SetExpectedValues3<int>(2, 2, 2, expected_temporal_idx);
     SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
     VerifyTemporalIdxAndSyncForAllSpatialLayers(
@@ -853,47 +845,47 @@
     bool expected_layer_sync[3] = {false, false, false};
 
     // First frame: #0.
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
     SetExpectedValues3<int>(0, 0, 255, expected_temporal_idx);
     SetExpectedValues3<bool>(true, true, false, expected_layer_sync);
     VerifyTemporalIdxAndSyncForAllSpatialLayers(
         &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
 
     // Next frame: #1.
-    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
+    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
     SetExpectedValues3<int>(2, 1, 255, expected_temporal_idx);
     SetExpectedValues3<bool>(true, true, false, expected_layer_sync);
     VerifyTemporalIdxAndSyncForAllSpatialLayers(
         &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
 
     // Next frame: #2.
-    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
+    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
     SetExpectedValues3<int>(1, 0, 255, expected_temporal_idx);
     SetExpectedValues3<bool>(true, false, false, expected_layer_sync);
     VerifyTemporalIdxAndSyncForAllSpatialLayers(
         &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
 
     // Next frame: #3.
-    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
+    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
     SetExpectedValues3<int>(2, 1, 255, expected_temporal_idx);
     SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
     VerifyTemporalIdxAndSyncForAllSpatialLayers(
         &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
 
     // Next frame: #4.
-    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
+    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
     SetExpectedValues3<int>(0, 0, 255, expected_temporal_idx);
     SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
     VerifyTemporalIdxAndSyncForAllSpatialLayers(
         &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
 
     // Next frame: #5.
-    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
+    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
     SetExpectedValues3<int>(2, 1, 255, expected_temporal_idx);
     SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
     VerifyTemporalIdxAndSyncForAllSpatialLayers(
@@ -911,24 +903,27 @@
     // 1. stride > width 2. stride_y != stride_uv/2
     int stride_y = kDefaultWidth + 20;
     int stride_uv = ((kDefaultWidth + 1) / 2) + 5;
-    input_frame_.CreateEmptyFrame(kDefaultWidth, kDefaultHeight, stride_y,
-                                  stride_uv, stride_uv);
+    input_buffer_ = I420Buffer::Create(kDefaultWidth, kDefaultHeight, stride_y,
+                                       stride_uv, stride_uv);
+    input_frame_.reset(
+        new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0));
+
     // Set color.
     int plane_offset[kNumOfPlanes];
     plane_offset[kYPlane] = kColorY;
     plane_offset[kUPlane] = kColorU;
     plane_offset[kVPlane] = kColorV;
-    CreateImage(input_frame_.video_frame_buffer(), plane_offset);
+    CreateImage(input_buffer_, plane_offset);
 
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
 
     // Change color.
     plane_offset[kYPlane] += 1;
     plane_offset[kUPlane] += 1;
     plane_offset[kVPlane] += 1;
-    CreateImage(input_frame_.video_frame_buffer(), plane_offset);
-    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
+    CreateImage(input_buffer_, plane_offset);
+    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
 
     EncodedImage encoded_frame;
     // Only encoding one frame - so will be a key frame.
@@ -968,7 +963,8 @@
   std::unique_ptr<VP8Decoder> decoder_;
   MockDecodedImageCallback decoder_callback_;
   VideoCodec settings_;
-  VideoFrame input_frame_;
+  rtc::scoped_refptr<I420Buffer> input_buffer_;
+  std::unique_ptr<VideoFrame> input_frame_;
 };
 
 }  // namespace testing
diff --git a/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc b/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc
index 12dcb7c..4f3d99b 100644
--- a/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc
@@ -147,13 +147,15 @@
     EXPECT_EQ(stride_y, 176);
     EXPECT_EQ(stride_uv, 96);
 
-    input_frame_.CreateEmptyFrame(codec_inst_.width, codec_inst_.height,
-                                  stride_y, stride_uv, stride_uv);
-    input_frame_.set_timestamp(kTestTimestamp);
+    rtc::scoped_refptr<I420Buffer> buffer = I420Buffer::Create(
+        codec_inst_.width, codec_inst_.height, stride_y, stride_uv, stride_uv);
     // Using ConvertToI420 to add stride to the image.
-    EXPECT_EQ(0, ConvertToI420(kI420, source_buffer_.get(), 0, 0,
-                               codec_inst_.width, codec_inst_.height, 0,
-                               kVideoRotation_0, &input_frame_));
+    EXPECT_EQ(
+        0, ConvertToI420(kI420, source_buffer_.get(), 0, 0, codec_inst_.width,
+                         codec_inst_.height, 0, kVideoRotation_0,
+                         buffer.get()));
+    input_frame_.reset(
+        new VideoFrame(buffer, kTestTimestamp, 0, webrtc::kVideoRotation_0));
   }
 
   void SetUpEncodeDecode() {
@@ -195,7 +197,7 @@
   std::unique_ptr<Vp8UnitTestDecodeCompleteCallback> decode_complete_callback_;
   std::unique_ptr<uint8_t[]> source_buffer_;
   FILE* source_file_;
-  VideoFrame input_frame_;
+  std::unique_ptr<VideoFrame> input_frame_;
   std::unique_ptr<VideoEncoder> encoder_;
   std::unique_ptr<VideoDecoder> decoder_;
   EncodedImage encoded_frame_;
@@ -237,7 +239,7 @@
 #endif
 TEST_F(TestVp8Impl, MAYBE_AlignedStrideEncodeDecode) {
   SetUpEncodeDecode();
-  encoder_->Encode(input_frame_, NULL, NULL);
+  encoder_->Encode(*input_frame_, NULL, NULL);
   EXPECT_GT(WaitForEncodedFrame(), 0u);
   // First frame should be a key frame.
   encoded_frame_._frameType = kVideoFrameKey;
@@ -246,7 +248,7 @@
             decoder_->Decode(encoded_frame_, false, NULL));
   EXPECT_GT(WaitForDecodedFrame(), 0u);
   // Compute PSNR on all planes (faster than SSIM).
-  EXPECT_GT(I420PSNR(&input_frame_, &decoded_frame_), 36);
+  EXPECT_GT(I420PSNR(input_frame_.get(), &decoded_frame_), 36);
   EXPECT_EQ(kTestTimestamp, decoded_frame_.timestamp());
   EXPECT_EQ(kTestNtpTimeMs, decoded_frame_.ntp_time_ms());
 }
@@ -258,7 +260,7 @@
 #endif
 TEST_F(TestVp8Impl, MAYBE_DecodeWithACompleteKeyFrame) {
   SetUpEncodeDecode();
-  encoder_->Encode(input_frame_, NULL, NULL);
+  encoder_->Encode(*input_frame_, NULL, NULL);
   EXPECT_GT(WaitForEncodedFrame(), 0u);
   // Setting complete to false -> should return an error.
   encoded_frame_._completeFrame = false;
@@ -273,7 +275,7 @@
   encoded_frame_._frameType = kVideoFrameKey;
   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
             decoder_->Decode(encoded_frame_, false, NULL));
-  EXPECT_GT(I420PSNR(&input_frame_, &decoded_frame_), 36);
+  EXPECT_GT(I420PSNR(input_frame_.get(), &decoded_frame_), 36);
 }
 
 }  // namespace webrtc
diff --git a/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc b/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
index ef5e8e3..73a7eb6 100644
--- a/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
@@ -1306,18 +1306,18 @@
   last_frame_width_ = img->d_w;
   last_frame_height_ = img->d_h;
   // Allocate memory for decoded image.
-  VideoFrame decoded_image(buffer_pool_.CreateBuffer(img->d_w, img->d_h),
-                           timestamp, 0, kVideoRotation_0);
+  rtc::scoped_refptr<I420Buffer> buffer =
+      buffer_pool_.CreateBuffer(img->d_w, img->d_h);
+
   libyuv::I420Copy(img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y],
                    img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U],
                    img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V],
-                   decoded_image.video_frame_buffer()->MutableDataY(),
-                   decoded_image.video_frame_buffer()->StrideY(),
-                   decoded_image.video_frame_buffer()->MutableDataU(),
-                   decoded_image.video_frame_buffer()->StrideU(),
-                   decoded_image.video_frame_buffer()->MutableDataV(),
-                   decoded_image.video_frame_buffer()->StrideV(),
+                   buffer->MutableDataY(), buffer->StrideY(),
+                   buffer->MutableDataU(), buffer->StrideU(),
+                   buffer->MutableDataV(), buffer->StrideV(),
                    img->d_w, img->d_h);
+
+  VideoFrame decoded_image(buffer, timestamp, 0, kVideoRotation_0);
   decoded_image.set_ntp_time_ms(ntp_time_ms);
   int ret = decode_complete_callback_->Decoded(decoded_image);
   if (ret != 0)
diff --git a/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc b/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc
index d7927eb..dcc0619 100644
--- a/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc
@@ -148,7 +148,7 @@
     return -1;
   }
   EXPECT_EQ(0, decoder->InitDecode(&inst, 1));
-  webrtc::VideoFrame input_frame;
+
   size_t length = webrtc::CalcBufferSize(webrtc::kI420, width, height);
   std::unique_ptr<uint8_t[]> frame_buffer(new uint8_t[length]);
 
@@ -163,14 +163,18 @@
   int64_t starttime = rtc::TimeMillis();
   int frame_cnt = 1;
   int frames_processed = 0;
-  input_frame.CreateEmptyFrame(width, height, width, half_width, half_width);
+  rtc::scoped_refptr<webrtc::I420Buffer> i420_buffer =
+      webrtc::I420Buffer::Create(width, height, width, half_width, half_width);
+
   while (!feof(input_file) &&
          (num_frames == -1 || frames_processed < num_frames)) {
     if (fread(frame_buffer.get(), 1, length, input_file) != length)
       continue;
     if (frame_cnt >= start_frame) {
       webrtc::ConvertToI420(webrtc::kI420, frame_buffer.get(), 0, 0, width,
-                            height, 0, webrtc::kVideoRotation_0, &input_frame);
+                            height, 0, webrtc::kVideoRotation_0, &i420_buffer);
+      webrtc::VideoFrame input_frame(i420_buffer, 0, 0,
+                                     webrtc::kVideoRotation_0);
       encoder->Encode(input_frame, NULL, NULL);
       decoder->Decode(encoder_callback.encoded_image(), false, NULL);
       ++frames_processed;
diff --git a/webrtc/modules/video_processing/test/denoiser_test.cc b/webrtc/modules/video_processing/test/denoiser_test.cc
index 7507e92..a968859 100644
--- a/webrtc/modules/video_processing/test/denoiser_test.cc
+++ b/webrtc/modules/video_processing/test/denoiser_test.cc
@@ -141,8 +141,10 @@
   while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
          frame_length_) {
     // Using ConvertToI420 to add stride to the image.
+    rtc::scoped_refptr<webrtc::I420Buffer> input_buffer =
+        I420Buffer::Create(width_, height_, width_, half_width_, half_width_);
     EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
-                               0, kVideoRotation_0, &video_frame_));
+                               0, kVideoRotation_0, input_buffer.get()));
 
     rtc::scoped_refptr<I420Buffer>* p_denoised_c = &denoised_frame_c;
     rtc::scoped_refptr<I420Buffer>* p_denoised_prev_c = &denoised_frame_prev_c;
@@ -157,11 +159,9 @@
       p_denoised_sse_neon = &denoised_frame_prev_sse_neon;
       p_denoised_prev_sse_neon = &denoised_frame_sse_neon;
     }
-    denoiser_c.DenoiseFrame(video_frame_.video_frame_buffer(),
-                            p_denoised_c, p_denoised_prev_c,
+    denoiser_c.DenoiseFrame(input_buffer, p_denoised_c, p_denoised_prev_c,
                             false);
-    denoiser_sse_neon.DenoiseFrame(video_frame_.video_frame_buffer(),
-                                   p_denoised_sse_neon,
+    denoiser_sse_neon.DenoiseFrame(input_buffer, p_denoised_sse_neon,
                                    p_denoised_prev_sse_neon, false);
     // Invert the flag.
     denoised_frame_toggle ^= 1;
diff --git a/webrtc/modules/video_processing/test/video_processing_unittest.cc b/webrtc/modules/video_processing/test/video_processing_unittest.cc
index 9e61b51..8c1154a 100644
--- a/webrtc/modules/video_processing/test/video_processing_unittest.cc
+++ b/webrtc/modules/video_processing/test/video_processing_unittest.cc
@@ -15,6 +15,7 @@
 #include <memory>
 #include <string>
 
+#include "webrtc/base/keep_ref_until_done.h"
 #include "webrtc/base/timeutils.h"
 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
 #include "webrtc/test/testsupport/fileutils.h"
@@ -33,24 +34,25 @@
                                      int target_height,
                                      VideoProcessing* vpm,
                                      const VideoFrame* out_frame);
-static void CropFrame(const uint8_t* source_data,
-                      int source_width,
-                      int source_height,
-                      int offset_x,
-                      int offset_y,
-                      int cropped_width,
-                      int cropped_height,
-                      VideoFrame* cropped_frame);
+rtc::scoped_refptr<VideoFrameBuffer> CropBuffer(
+    const rtc::scoped_refptr<VideoFrameBuffer>& source_buffer,
+    int source_width,
+    int source_height,
+    int offset_x,
+    int offset_y,
+    int cropped_width,
+    int cropped_height);
 // The |source_data| is cropped and scaled to |target_width| x |target_height|,
 // and then scaled back to the expected cropped size. |expected_psnr| is used to
 // verify basic quality, and is set to be ~0.1/0.05dB lower than actual PSNR
 // verified under the same conditions.
-static void TestSize(const VideoFrame& source_frame,
-                     const VideoFrame& cropped_source_frame,
-                     int target_width,
-                     int target_height,
-                     double expected_psnr,
-                     VideoProcessing* vpm);
+static void TestSize(
+    const VideoFrame& source_frame,
+    const rtc::scoped_refptr<VideoFrameBuffer>& cropped_source_buffer,
+    int target_width,
+    int target_height,
+    double expected_psnr,
+    VideoProcessing* vpm);
 static void WriteProcessedFrameForVisualInspection(const VideoFrame& source,
                                                    const VideoFrame& processed);
 
@@ -68,15 +70,6 @@
   vp_ = VideoProcessing::Create();
   ASSERT_TRUE(vp_ != NULL);
 
-  video_frame_.CreateEmptyFrame(width_, height_, width_,
-                                half_width_, half_width_);
-  // Clear video frame so DrMemory/Valgrind will allow reads of the buffer.
-  memset(video_frame_.video_frame_buffer()->MutableDataY(), 0,
-         video_frame_.allocated_size(kYPlane));
-  memset(video_frame_.video_frame_buffer()->MutableDataU(), 0,
-         video_frame_.allocated_size(kUPlane));
-  memset(video_frame_.video_frame_buffer()->MutableDataV(), 0,
-         video_frame_.allocated_size(kVPlane));
   const std::string video_file =
       webrtc::test::ResourcePath("foreman_cif", "yuv");
   source_file_ = fopen(video_file.c_str(), "rb");
@@ -109,11 +102,18 @@
   VideoFrame* out_frame = NULL;
   // Set rescaling => output frame != NULL.
   vp_->SetInputFrameResampleMode(kFastRescaling);
-  PreprocessFrameAndVerify(video_frame_, resolution, resolution, vp_,
-                           out_frame);
+
+  rtc::scoped_refptr<webrtc::I420Buffer> buffer =
+      I420Buffer::Create(width_, height_, width_, half_width_, half_width_);
+
+  // Clear video frame so DrMemory/Valgrind will allow reads of the buffer.
+  buffer->InitializeData();
+  VideoFrame video_frame(buffer, 0, 0, webrtc::kVideoRotation_0);
+
+  PreprocessFrameAndVerify(video_frame, resolution, resolution, vp_, out_frame);
   // No rescaling=> output frame = NULL.
   vp_->SetInputFrameResampleMode(kNoRescaling);
-  EXPECT_TRUE(vp_->PreprocessFrame(video_frame_) != nullptr);
+  EXPECT_TRUE(vp_->PreprocessFrame(video_frame) != nullptr);
 }
 
 #if defined(WEBRTC_IOS)
@@ -133,15 +133,15 @@
   vp_->EnableTemporalDecimation(false);
 
   // Reading test frame
-  std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
-  ASSERT_EQ(frame_length_,
-            fread(video_buffer.get(), 1, frame_length_, source_file_));
-  // Using ConvertToI420 to add stride to the image.
-  EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
-                             0, kVideoRotation_0, &video_frame_));
-  // Cropped source frame that will contain the expected visible region.
-  VideoFrame cropped_source_frame;
-  cropped_source_frame.CopyFrame(video_frame_);
+  rtc::scoped_refptr<webrtc::I420Buffer> buffer =
+      I420Buffer::Create(width_, height_, width_, half_width_, half_width_);
+
+  ASSERT_EQ(static_cast<size_t>(size_y_),
+            fread(buffer->MutableDataY(), 1, size_y_, source_file_));
+  ASSERT_EQ(static_cast<size_t>(size_uv_),
+            fread(buffer->MutableDataU(), 1, size_uv_, source_file_));
+  ASSERT_EQ(static_cast<size_t>(size_uv_),
+            fread(buffer->MutableDataV(), 1, size_uv_, source_file_));
 
   for (uint32_t run_idx = 0; run_idx < NumRuns; run_idx++) {
     // Initiate test timer.
@@ -149,48 +149,37 @@
 
     // Init the sourceFrame with a timestamp.
     int64_t time_start_ms = time_start / rtc::kNumNanosecsPerMillisec;
-    video_frame_.set_render_time_ms(time_start_ms);
-    video_frame_.set_timestamp(time_start_ms * 90);
+    VideoFrame video_frame(buffer, time_start_ms * 90, time_start_ms,
+                           webrtc::kVideoRotation_0);
 
     // Test scaling to different sizes: source is of |width|/|height| = 352/288.
     // Pure scaling:
-    TestSize(video_frame_, video_frame_, width_ / 4, height_ / 4, 25.2, vp_);
-    TestSize(video_frame_, video_frame_, width_ / 2, height_ / 2, 28.1, vp_);
+    TestSize(video_frame, buffer, width_ / 4, height_ / 4, 25.2, vp_);
+    TestSize(video_frame, buffer, width_ / 2, height_ / 2, 28.1, vp_);
     // No resampling:
-    TestSize(video_frame_, video_frame_, width_, height_, -1, vp_);
-    TestSize(video_frame_, video_frame_, 2 * width_, 2 * height_, 32.2, vp_);
+    TestSize(video_frame, buffer, width_, height_, -1, vp_);
+    TestSize(video_frame, buffer, 2 * width_, 2 * height_, 32.2, vp_);
 
     // Scaling and cropping. The cropped source frame is the largest center
     // aligned region that can be used from the source while preserving aspect
     // ratio.
-    CropFrame(video_buffer.get(), width_, height_, 0, 56, 352, 176,
-              &cropped_source_frame);
-    TestSize(video_frame_, cropped_source_frame, 100, 50, 24.0, vp_);
-
-    CropFrame(video_buffer.get(), width_, height_, 0, 30, 352, 225,
-              &cropped_source_frame);
-    TestSize(video_frame_, cropped_source_frame, 400, 256, 31.3, vp_);
-
-    CropFrame(video_buffer.get(), width_, height_, 68, 0, 216, 288,
-              &cropped_source_frame);
-    TestSize(video_frame_, cropped_source_frame, 480, 640, 32.15, vp_);
-
-    CropFrame(video_buffer.get(), width_, height_, 0, 12, 352, 264,
-              &cropped_source_frame);
-    TestSize(video_frame_, cropped_source_frame, 960, 720, 32.2, vp_);
-
-    CropFrame(video_buffer.get(), width_, height_, 0, 44, 352, 198,
-              &cropped_source_frame);
-    TestSize(video_frame_, cropped_source_frame, 1280, 720, 32.15, vp_);
+    TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 56, 352, 176),
+             100, 50, 24.0, vp_);
+    TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 30, 352, 225),
+             400, 256, 31.3, vp_);
+    TestSize(video_frame, CropBuffer(buffer, width_, height_, 68, 0, 216, 288),
+             480, 640, 32.15, vp_);
+    TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 12, 352, 264),
+             960, 720, 32.2, vp_);
+    TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 44, 352, 198),
+             1280, 720, 32.15, vp_);
 
     // Upsampling to odd size.
-    CropFrame(video_buffer.get(), width_, height_, 0, 26, 352, 233,
-              &cropped_source_frame);
-    TestSize(video_frame_, cropped_source_frame, 501, 333, 32.05, vp_);
+    TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 26, 352, 233),
+             501, 333, 32.05, vp_);
     // Downsample to odd size.
-    CropFrame(video_buffer.get(), width_, height_, 0, 34, 352, 219,
-              &cropped_source_frame);
-    TestSize(video_frame_, cropped_source_frame, 281, 175, 29.3, vp_);
+    TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 34, 352, 219),
+             281, 175, 29.3, vp_);
 
     // Stop timer.
     const int64_t runtime =
@@ -229,24 +218,32 @@
   EXPECT_EQ(target_height, (out_frame)->height());
 }
 
-void CropFrame(const uint8_t* source_data,
-               int source_width,
-               int source_height,
-               int offset_x,
-               int offset_y,
-               int cropped_width,
-               int cropped_height,
-               VideoFrame* cropped_frame) {
-  cropped_frame->CreateEmptyFrame(cropped_width, cropped_height, cropped_width,
-                                  (cropped_width + 1) / 2,
-                                  (cropped_width + 1) / 2);
-  EXPECT_EQ(0,
-            ConvertToI420(kI420, source_data, offset_x, offset_y, source_width,
-                          source_height, 0, kVideoRotation_0, cropped_frame));
+rtc::scoped_refptr<VideoFrameBuffer> CropBuffer(
+    const rtc::scoped_refptr<VideoFrameBuffer>& source_buffer,
+    int source_width,
+    int source_height,
+    int offset_x,
+    int offset_y,
+    int cropped_width,
+    int cropped_height) {
+  // Force even.
+  offset_x &= 1;
+  offset_y &= 1;
+
+  size_t y_start = offset_x + offset_y * source_buffer->StrideY();
+  size_t u_start = (offset_x / 2) + (offset_y / 2) * source_buffer->StrideU();
+  size_t v_start = (offset_x / 2) + (offset_y / 2) * source_buffer->StrideU();
+
+  return rtc::scoped_refptr<VideoFrameBuffer>(
+      new rtc::RefCountedObject<WrappedI420Buffer>(
+          cropped_width, cropped_height, source_buffer->DataY() + y_start,
+          source_buffer->StrideY(), source_buffer->DataU() + u_start,
+          source_buffer->StrideU(), source_buffer->DataV() + v_start,
+          source_buffer->StrideV(), rtc::KeepRefUntilDone(source_buffer)));
 }
 
 void TestSize(const VideoFrame& source_frame,
-              const VideoFrame& cropped_source_frame,
+              const rtc::scoped_refptr<VideoFrameBuffer>& cropped_source_buffer,
               int target_width,
               int target_height,
               double expected_psnr,
@@ -263,12 +260,14 @@
   // Scale |resampled_source_frame| back to the source scale.
   VideoFrame resampled_source_frame;
   resampled_source_frame.CopyFrame(*out_frame);
-  PreprocessFrameAndVerify(resampled_source_frame, cropped_source_frame.width(),
-                           cropped_source_frame.height(), vpm, out_frame);
+  PreprocessFrameAndVerify(resampled_source_frame,
+                           cropped_source_buffer->width(),
+                           cropped_source_buffer->height(), vpm, out_frame);
   WriteProcessedFrameForVisualInspection(resampled_source_frame, *out_frame);
 
   // Compute PSNR against the cropped source frame and check expectation.
-  double psnr = I420PSNR(&cropped_source_frame, out_frame);
+  double psnr =
+      I420PSNR(*cropped_source_buffer, *out_frame->video_frame_buffer());
   EXPECT_GT(psnr, expected_psnr);
   printf(
       "PSNR: %f. PSNR is between source of size %d %d, and a modified "
diff --git a/webrtc/modules/video_processing/test/video_processing_unittest.h b/webrtc/modules/video_processing/test/video_processing_unittest.h
index 3433c6c..6edd72e 100644
--- a/webrtc/modules/video_processing/test/video_processing_unittest.h
+++ b/webrtc/modules/video_processing/test/video_processing_unittest.h
@@ -33,7 +33,6 @@
   static void TearDownTestCase() { Trace::ReturnTrace(); }
   VideoProcessing* vp_;
   FILE* source_file_;
-  VideoFrame video_frame_;
   const int width_;
   const int half_width_;
   const int height_;
diff --git a/webrtc/video/video_encoder_unittest.cc b/webrtc/video/video_encoder_unittest.cc
index 84ac4fd..eb2b450 100644
--- a/webrtc/video/video_encoder_unittest.cc
+++ b/webrtc/video/video_encoder_unittest.cc
@@ -116,22 +116,18 @@
   CountingFakeEncoder fake_encoder_;
   VideoEncoderSoftwareFallbackWrapper fallback_wrapper_;
   VideoCodec codec_ = {};
-  VideoFrame frame_;
+  std::unique_ptr<VideoFrame> frame_;
 };
 
 void VideoEncoderSoftwareFallbackWrapperTest::EncodeFrame() {
-  frame_.CreateEmptyFrame(kWidth, kHeight, kWidth, (kWidth + 1) / 2,
-                          (kWidth + 1) / 2);
-  memset(frame_.video_frame_buffer()->MutableDataY(), 16,
-         frame_.allocated_size(webrtc::kYPlane));
-  memset(frame_.video_frame_buffer()->MutableDataU(), 128,
-         frame_.allocated_size(webrtc::kUPlane));
-  memset(frame_.video_frame_buffer()->MutableDataV(), 128,
-         frame_.allocated_size(webrtc::kVPlane));
-
+  rtc::scoped_refptr<I420Buffer> buffer = I420Buffer::Create(
+      kWidth, kHeight, kWidth, (kWidth + 1) / 2, (kWidth + 1) / 2);
+  buffer->SetToBlack();
   std::vector<FrameType> types(1, kVideoFrameKey);
+
+  frame_.reset(new VideoFrame(buffer, 0, 0, webrtc::kVideoRotation_0));
   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
-            fallback_wrapper_.Encode(frame_, nullptr, &types));
+            fallback_wrapper_.Encode(*frame_, nullptr, &types));
 }
 
 void VideoEncoderSoftwareFallbackWrapperTest::UtilizeFallbackEncoder() {
@@ -225,9 +221,9 @@
 
   // Encoding a frame using the fallback should arrive at the new callback.
   std::vector<FrameType> types(1, kVideoFrameKey);
-  frame_.set_timestamp(frame_.timestamp() + 1000);
+  frame_->set_timestamp(frame_->timestamp() + 1000);
   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
-            fallback_wrapper_.Encode(frame_, nullptr, &types));
+            fallback_wrapper_.Encode(*frame_, nullptr, &types));
 
   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, fallback_wrapper_.Release());
 }