Revert of Move MutableDataY{,U,V} methods to I420Buffer only. (patchset #14 id:260001 of https://codereview.webrtc.org/2278883002/ )

Reason for revert:
Broke downstream application.

Original issue's description:
> Move MutableDataY{,U,V} methods to I420Buffer only.
>
> Deleted from the VideoFrameBuffer base class.
>
> BUG=webrtc:5921
>
> Committed: https://crrev.com/5539ef6c03c273f39fadae41ace47fdc11ac6d60
> Cr-Commit-Position: refs/heads/master@{#14317}

TBR=perkj@webrtc.org,magjed@webrtc.org,pthatcher@webrtc.org,honghaiz@webrtc.org,stefan@webrtc.org
# Skipping CQ checks because original CL landed less than 1 days ago.
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=webrtc:5921

Review-Url: https://codereview.webrtc.org/2354223002
Cr-Commit-Position: refs/heads/master@{#14325}
diff --git a/webrtc/common_video/corevideo_frame_buffer.cc b/webrtc/common_video/corevideo_frame_buffer.cc
index 3245bf5..a58ddc7 100644
--- a/webrtc/common_video/corevideo_frame_buffer.cc
+++ b/webrtc/common_video/corevideo_frame_buffer.cc
@@ -35,7 +35,7 @@
   size_t width = CVPixelBufferGetWidthOfPlane(pixel_buffer_, 0);
   size_t height = CVPixelBufferGetHeightOfPlane(pixel_buffer_, 0);
   // TODO(tkchin): Use a frame buffer pool.
-  rtc::scoped_refptr<webrtc::I420Buffer> buffer =
+  rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
       new rtc::RefCountedObject<webrtc::I420Buffer>(width, height);
   CVPixelBufferLockBaseAddress(pixel_buffer_, kCVPixelBufferLock_ReadOnly);
   const uint8_t* src_y = static_cast<const uint8_t*>(
diff --git a/webrtc/common_video/i420_buffer_pool_unittest.cc b/webrtc/common_video/i420_buffer_pool_unittest.cc
index 3307539..3e795db 100644
--- a/webrtc/common_video/i420_buffer_pool_unittest.cc
+++ b/webrtc/common_video/i420_buffer_pool_unittest.cc
@@ -52,7 +52,7 @@
 }
 
 TEST(TestI420BufferPool, FrameValidAfterPoolDestruction) {
-  rtc::scoped_refptr<I420Buffer> buffer;
+  rtc::scoped_refptr<VideoFrameBuffer> buffer;
   {
     I420BufferPool pool;
     buffer = pool.CreateBuffer(16, 16);
diff --git a/webrtc/common_video/i420_video_frame_unittest.cc b/webrtc/common_video/i420_video_frame_unittest.cc
index f9d46ef..406dbd3 100644
--- a/webrtc/common_video/i420_video_frame_unittest.cc
+++ b/webrtc/common_video/i420_video_frame_unittest.cc
@@ -162,14 +162,16 @@
   EXPECT_EQ(kRotation, small_frame.rotation());
 
   // Frame of larger dimensions.
-  rtc::scoped_refptr<I420Buffer> buffer =
-      I420Buffer::Create(width, height, stride_y, stride_u, stride_v);
-  memset(buffer->MutableDataY(), 1, width * height);
-  memset(buffer->MutableDataU(), 2, ((height + 1) / 2) * stride_u);
-  memset(buffer->MutableDataV(), 3, ((height + 1) / 2) * stride_u);
-  VideoFrame other_frame(buffer, 0, 0, webrtc::kVideoRotation_0);
-  big_frame.CopyFrame(other_frame);
-  EXPECT_TRUE(test::FramesEqual(other_frame, big_frame));
+  small_frame.CreateEmptyFrame(width, height,
+                               stride_y, stride_u, stride_v);
+  memset(small_frame.video_frame_buffer()->MutableDataY(), 1,
+         small_frame.allocated_size(kYPlane));
+  memset(small_frame.video_frame_buffer()->MutableDataU(), 2,
+         small_frame.allocated_size(kUPlane));
+  memset(small_frame.video_frame_buffer()->MutableDataV(), 3,
+         small_frame.allocated_size(kVPlane));
+  big_frame.CopyFrame(small_frame);
+  EXPECT_TRUE(test::FramesEqual(small_frame, big_frame));
 }
 
 TEST(TestVideoFrame, ShallowCopy) {
diff --git a/webrtc/common_video/include/video_frame_buffer.h b/webrtc/common_video/include/video_frame_buffer.h
index ae7855f..6743970 100644
--- a/webrtc/common_video/include/video_frame_buffer.h
+++ b/webrtc/common_video/include/video_frame_buffer.h
@@ -45,6 +45,12 @@
   virtual const uint8_t* DataU() const = 0;
   virtual const uint8_t* DataV() const = 0;
 
+  // TODO(nisse): Move MutableData methods to the I420Buffer subclass.
+  // Non-const data access.
+  virtual uint8_t* MutableDataY();
+  virtual uint8_t* MutableDataU();
+  virtual uint8_t* MutableDataV();
+
   // Returns the number of bytes between successive rows for a given plane.
   virtual int StrideY() const = 0;
   virtual int StrideU() const = 0;
@@ -92,9 +98,9 @@
   const uint8_t* DataU() const override;
   const uint8_t* DataV() const override;
 
-  uint8_t* MutableDataY();
-  uint8_t* MutableDataU();
-  uint8_t* MutableDataV();
+  uint8_t* MutableDataY() override;
+  uint8_t* MutableDataU() override;
+  uint8_t* MutableDataV() override;
   int StrideY() const override;
   int StrideU() const override;
   int StrideV() const override;
diff --git a/webrtc/common_video/libyuv/include/webrtc_libyuv.h b/webrtc/common_video/libyuv/include/webrtc_libyuv.h
index f8cd470..699a626 100644
--- a/webrtc/common_video/libyuv/include/webrtc_libyuv.h
+++ b/webrtc/common_video/libyuv/include/webrtc_libyuv.h
@@ -69,7 +69,6 @@
 //                    already open for writing.
 // Return value: 0 if OK, < 0 otherwise.
 int PrintVideoFrame(const VideoFrame& frame, FILE* file);
-int PrintVideoFrame(const VideoFrameBuffer& frame, FILE* file);
 
 // Extract buffer from VideoFrame or VideoFrameBuffer (consecutive
 // planes, no stride)
@@ -93,13 +92,11 @@
 //   - sample_size      : Required only for the parsing of MJPG (set to 0 else).
 //   - rotate           : Rotation mode of output image.
 // Output:
-//   - dst_buffer       : Reference to a destination frame buffer.
+//   - dst_frame        : Reference to a destination frame.
 // Return value: 0 if OK, < 0 otherwise.
 
-// TODO(nisse): Delete this wrapper, and let users call libyuv directly. Most
-// calls pass |src_video_type| == kI420, and should use libyuv::I420Copy. The
-// only exception at the time of this writing is
-// VideoCaptureImpl::IncomingFrame, which still needs libyuv::ConvertToI420.
+// TODO(nisse): Deprecated, see
+// https://bugs.chromium.org/p/webrtc/issues/detail?id=5921.
 int ConvertToI420(VideoType src_video_type,
                   const uint8_t* src_frame,
                   int crop_x,
@@ -108,7 +105,7 @@
                   int src_height,
                   size_t sample_size,
                   VideoRotation rotation,
-                  I420Buffer* dst_buffer);
+                  VideoFrame* dst_frame);
 
 // Convert From I420
 // Input:
diff --git a/webrtc/common_video/libyuv/libyuv_unittest.cc b/webrtc/common_video/libyuv/libyuv_unittest.cc
index c6ca212..ab36559 100644
--- a/webrtc/common_video/libyuv/libyuv_unittest.cc
+++ b/webrtc/common_video/libyuv/libyuv_unittest.cc
@@ -95,20 +95,21 @@
 
   double psnr = 0.0;
 
-  rtc::scoped_refptr<I420Buffer> res_i420_buffer = I420Buffer::Create(
-      width_, height_, width_, (width_ + 1) / 2, (width_ + 1) / 2);
-
+  VideoFrame res_i420_frame;
+  res_i420_frame.CreateEmptyFrame(width_, height_, width_,
+                                               (width_ + 1) / 2,
+                                               (width_ + 1) / 2);
   printf("\nConvert #%d I420 <-> I420 \n", j);
   std::unique_ptr<uint8_t[]> out_i420_buffer(new uint8_t[frame_length_]);
-  EXPECT_EQ(0, ConvertFromI420(orig_frame_, kI420, 0, out_i420_buffer.get()));
+  EXPECT_EQ(0, ConvertFromI420(orig_frame_, kI420, 0,
+                               out_i420_buffer.get()));
   EXPECT_EQ(0, ConvertToI420(kI420, out_i420_buffer.get(), 0, 0, width_,
-                             height_, 0, kVideoRotation_0,
-                             res_i420_buffer.get()));
+                             height_, 0, kVideoRotation_0, &res_i420_frame));
 
-  if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
+  if (PrintVideoFrame(res_i420_frame, output_file) < 0) {
     return;
   }
-  psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer);
+  psnr = I420PSNR(&orig_frame_, &res_i420_frame);
   EXPECT_EQ(48.0, psnr);
   j++;
 
@@ -118,18 +119,17 @@
   int stride_y = 0;
   int stride_uv = 0;
   Calc16ByteAlignedStride(width_, &stride_y, &stride_uv);
-  res_i420_buffer =
-      I420Buffer::Create(width_, height_, stride_y, stride_uv, stride_uv);
+  res_i420_frame.CreateEmptyFrame(width_, height_, stride_y,
+                                  stride_uv, stride_uv);
   EXPECT_EQ(0, ConvertFromI420(orig_frame_, kRGB24, 0, res_rgb_buffer2.get()));
 
   EXPECT_EQ(0, ConvertToI420(kRGB24, res_rgb_buffer2.get(), 0, 0, width_,
-                             height_, 0, kVideoRotation_0,
-                             res_i420_buffer.get()));
+                             height_, 0, kVideoRotation_0, &res_i420_frame));
 
-  if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
+  if (PrintVideoFrame(res_i420_frame, output_file) < 0) {
     return;
   }
-  psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer);
+  psnr = I420PSNR(&orig_frame_, &res_i420_frame);
 
   // Optimization Speed- quality trade-off => 45 dB only (platform dependant).
   EXPECT_GT(ceil(psnr), 44);
@@ -137,47 +137,44 @@
 
   printf("\nConvert #%d I420 <-> UYVY\n", j);
   std::unique_ptr<uint8_t[]> out_uyvy_buffer(new uint8_t[width_ * height_ * 2]);
-  EXPECT_EQ(0, ConvertFromI420(orig_frame_, kUYVY, 0, out_uyvy_buffer.get()));
+  EXPECT_EQ(0, ConvertFromI420(orig_frame_,  kUYVY, 0, out_uyvy_buffer.get()));
   EXPECT_EQ(0, ConvertToI420(kUYVY, out_uyvy_buffer.get(), 0, 0, width_,
-                             height_, 0, kVideoRotation_0,
-                             res_i420_buffer.get()));
-  psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer);
+                             height_, 0, kVideoRotation_0, &res_i420_frame));
+  psnr = I420PSNR(&orig_frame_, &res_i420_frame);
   EXPECT_EQ(48.0, psnr);
-  if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
+  if (PrintVideoFrame(res_i420_frame, output_file) < 0) {
     return;
   }
   j++;
 
   printf("\nConvert #%d I420 <-> YUY2\n", j);
   std::unique_ptr<uint8_t[]> out_yuy2_buffer(new uint8_t[width_ * height_ * 2]);
-  EXPECT_EQ(0, ConvertFromI420(orig_frame_, kYUY2, 0, out_yuy2_buffer.get()));
+  EXPECT_EQ(0, ConvertFromI420(orig_frame_,  kYUY2, 0, out_yuy2_buffer.get()));
 
   EXPECT_EQ(0, ConvertToI420(kYUY2, out_yuy2_buffer.get(), 0, 0, width_,
-                             height_, 0,
-                             kVideoRotation_0, res_i420_buffer.get()));
+                             height_, 0, kVideoRotation_0, &res_i420_frame));
 
-  if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
+  if (PrintVideoFrame(res_i420_frame, output_file) < 0) {
     return;
   }
 
-  psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer);
+  psnr = I420PSNR(&orig_frame_, &res_i420_frame);
   EXPECT_EQ(48.0, psnr);
-
   printf("\nConvert #%d I420 <-> RGB565\n", j);
   std::unique_ptr<uint8_t[]> out_rgb565_buffer(
       new uint8_t[width_ * height_ * 2]);
-  EXPECT_EQ(0,
-            ConvertFromI420(orig_frame_, kRGB565, 0, out_rgb565_buffer.get()));
+  EXPECT_EQ(0, ConvertFromI420(orig_frame_, kRGB565, 0,
+                               out_rgb565_buffer.get()));
 
   EXPECT_EQ(0, ConvertToI420(kRGB565, out_rgb565_buffer.get(), 0, 0, width_,
-                             height_, 0,
-                             kVideoRotation_0, res_i420_buffer.get()));
-  if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
+                             height_, 0, kVideoRotation_0, &res_i420_frame));
+
+  if (PrintVideoFrame(res_i420_frame, output_file) < 0) {
     return;
   }
   j++;
 
-  psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer);
+  psnr = I420PSNR(&orig_frame_, &res_i420_frame);
   // TODO(leozwang) Investigate the right psnr should be set for I420ToRGB565,
   // Another example is I420ToRGB24, the psnr is 44
   // TODO(mikhal): Add psnr for RGB565, 1555, 4444, convert to ARGB.
@@ -186,20 +183,18 @@
   printf("\nConvert #%d I420 <-> ARGB8888\n", j);
   std::unique_ptr<uint8_t[]> out_argb8888_buffer(
       new uint8_t[width_ * height_ * 4]);
-  EXPECT_EQ(0,
-            ConvertFromI420(orig_frame_, kARGB, 0, out_argb8888_buffer.get()));
+  EXPECT_EQ(0, ConvertFromI420(orig_frame_, kARGB, 0,
+                               out_argb8888_buffer.get()));
 
   EXPECT_EQ(0, ConvertToI420(kARGB, out_argb8888_buffer.get(), 0, 0, width_,
-                             height_, 0, kVideoRotation_0,
-                             res_i420_buffer.get()));
+                             height_, 0, kVideoRotation_0, &res_i420_frame));
 
-  if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
+  if (PrintVideoFrame(res_i420_frame, output_file) < 0) {
     return;
   }
 
-  psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer);
-  // TODO(leozwang) Investigate the right psnr should be set for
-  // I420ToARGB8888,
+  psnr = I420PSNR(&orig_frame_, &res_i420_frame);
+  // TODO(leozwang) Investigate the right psnr should be set for I420ToARGB8888,
   EXPECT_GT(ceil(psnr), 42);
 
   ASSERT_EQ(0, fclose(output_file));
@@ -214,48 +209,49 @@
 
   double psnr = 0.0;
 
+  VideoFrame res_i420_frame;
   int stride_y = 0;
   int stride_uv = 0;
   Calc16ByteAlignedStride(width_, &stride_y, &stride_uv);
-
-  rtc::scoped_refptr<I420Buffer> res_i420_buffer =
-      I420Buffer::Create(width_, height_, stride_y, stride_uv, stride_uv);
+  res_i420_frame.CreateEmptyFrame(width_, height_,
+                                  stride_y, stride_uv, stride_uv);
   std::unique_ptr<uint8_t[]> out_i420_buffer(new uint8_t[frame_length_]);
   EXPECT_EQ(0, ConvertFromI420(orig_frame_, kI420, 0,
                                out_i420_buffer.get()));
   EXPECT_EQ(0, ConvertToI420(kI420, out_i420_buffer.get(), 0, 0, width_,
-                             height_, 0, kVideoRotation_0,
-                             res_i420_buffer.get()));
+                             height_, 0, kVideoRotation_0, &res_i420_frame));
 
-  if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
+  if (PrintVideoFrame(res_i420_frame, output_file) < 0) {
     return;
   }
-  psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer);
+  psnr = I420PSNR(&orig_frame_, &res_i420_frame);
   EXPECT_EQ(48.0, psnr);
 }
 
 
 TEST_F(TestLibYuv, RotateTest) {
-  // Use ConvertToI420 for multiple rotations - see that nothing breaks, all
+  // Use ConvertToI420 for multiple roatations - see that nothing breaks, all
   // memory is properly allocated and end result is equal to the starting point.
+  VideoFrame rotated_res_i420_frame;
   int rotated_width = height_;
   int rotated_height = width_;
   int stride_y;
   int stride_uv;
   Calc16ByteAlignedStride(rotated_width, &stride_y, &stride_uv);
-  rtc::scoped_refptr<I420Buffer> rotated_res_i420_buffer = I420Buffer::Create(
-      rotated_width, rotated_height, stride_y, stride_uv, stride_uv);
+  rotated_res_i420_frame.CreateEmptyFrame(rotated_width,
+                                          rotated_height,
+                                          stride_y,
+                                          stride_uv,
+                                          stride_uv);
   EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0, width_, height_,
-                             0, kVideoRotation_90,
-                             rotated_res_i420_buffer.get()));
+                             0, kVideoRotation_90, &rotated_res_i420_frame));
   EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0, width_, height_,
-                             0, kVideoRotation_270,
-                             rotated_res_i420_buffer.get()));
-  rotated_res_i420_buffer = I420Buffer::Create(
-      width_, height_, width_, (width_ + 1) / 2, (width_ + 1) / 2);
+                             0, kVideoRotation_270, &rotated_res_i420_frame));
+  rotated_res_i420_frame.CreateEmptyFrame(width_, height_,
+                                          width_, (width_ + 1) / 2,
+                                          (width_ + 1) / 2);
   EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0, width_, height_,
-                             0, kVideoRotation_180,
-                             rotated_res_i420_buffer.get()));
+                             0, kVideoRotation_180, &rotated_res_i420_frame));
 }
 
 }  // namespace webrtc
diff --git a/webrtc/common_video/libyuv/webrtc_libyuv.cc b/webrtc/common_video/libyuv/webrtc_libyuv.cc
index 6a7ba15..d05b644 100644
--- a/webrtc/common_video/libyuv/webrtc_libyuv.cc
+++ b/webrtc/common_video/libyuv/webrtc_libyuv.cc
@@ -103,35 +103,33 @@
 }
 
 // TODO(nisse): Belongs with the test code?
-int PrintVideoFrame(const VideoFrameBuffer& frame, FILE* file) {
-  int width = frame.width();
-  int height = frame.height();
+int PrintVideoFrame(const VideoFrame& frame, FILE* file) {
+  if (file == NULL)
+    return -1;
+  if (frame.IsZeroSize())
+    return -1;
+  int width = frame.video_frame_buffer()->width();
+  int height = frame.video_frame_buffer()->height();
   int chroma_width = (width + 1) / 2;
   int chroma_height = (height + 1) / 2;
 
-  if (PrintPlane(frame.DataY(), width, height,
-                 frame.StrideY(), file) < 0) {
+  if (PrintPlane(frame.video_frame_buffer()->DataY(), width, height,
+                 frame.video_frame_buffer()->StrideY(), file) < 0) {
     return -1;
   }
-  if (PrintPlane(frame.DataU(),
+  if (PrintPlane(frame.video_frame_buffer()->DataU(),
                  chroma_width, chroma_height,
-                 frame.StrideU(), file) < 0) {
+                 frame.video_frame_buffer()->StrideU(), file) < 0) {
     return -1;
   }
-  if (PrintPlane(frame.DataV(),
+  if (PrintPlane(frame.video_frame_buffer()->DataV(),
                  chroma_width, chroma_height,
-                 frame.StrideV(), file) < 0) {
+                 frame.video_frame_buffer()->StrideV(), file) < 0) {
     return -1;
   }
   return 0;
 }
 
-int PrintVideoFrame(const VideoFrame& frame, FILE* file) {
-  if (frame.IsZeroSize())
-    return -1;
-  return PrintVideoFrame(*frame.video_frame_buffer(), file);
-}
-
 int ExtractBuffer(const rtc::scoped_refptr<VideoFrameBuffer>& input_frame,
                   size_t size,
                   uint8_t* buffer) {
@@ -251,19 +249,23 @@
                   int src_height,
                   size_t sample_size,
                   VideoRotation rotation,
-                  I420Buffer* dst_buffer) {
-  int dst_width = dst_buffer->width();
-  int dst_height = dst_buffer->height();
+                  VideoFrame* dst_frame) {
+  int dst_width = dst_frame->width();
+  int dst_height = dst_frame->height();
   // LibYuv expects pre-rotation values for dst.
   // Stride values should correspond to the destination values.
   if (rotation == kVideoRotation_90 || rotation == kVideoRotation_270) {
-    std::swap(dst_width, dst_height);
+    dst_width = dst_frame->height();
+    dst_height = dst_frame->width();
   }
   return libyuv::ConvertToI420(
       src_frame, sample_size,
-      dst_buffer->MutableDataY(), dst_buffer->StrideY(),
-      dst_buffer->MutableDataU(), dst_buffer->StrideU(),
-      dst_buffer->MutableDataV(), dst_buffer->StrideV(),
+      dst_frame->video_frame_buffer()->MutableDataY(),
+      dst_frame->video_frame_buffer()->StrideY(),
+      dst_frame->video_frame_buffer()->MutableDataU(),
+      dst_frame->video_frame_buffer()->StrideU(),
+      dst_frame->video_frame_buffer()->MutableDataV(),
+      dst_frame->video_frame_buffer()->StrideV(),
       crop_x, crop_y,
       src_width, src_height,
       dst_width, dst_height,
diff --git a/webrtc/common_video/video_frame.cc b/webrtc/common_video/video_frame.cc
index 4b29be9..e00ca27 100644
--- a/webrtc/common_video/video_frame.cc
+++ b/webrtc/common_video/video_frame.cc
@@ -87,18 +87,10 @@
   const int expected_size_y = height * stride_y;
   const int expected_size_u = half_height * stride_u;
   const int expected_size_v = half_height * stride_v;
-  // Allocate a new buffer.
-  rtc::scoped_refptr<I420Buffer> buffer_ =
-      I420Buffer::Create(width, height, stride_y, stride_u, stride_v);
-
-  memcpy(buffer_->MutableDataY(), buffer_y, expected_size_y);
-  memcpy(buffer_->MutableDataU(), buffer_u, expected_size_u);
-  memcpy(buffer_->MutableDataV(), buffer_v, expected_size_v);
-
-  video_frame_buffer_ = buffer_;
-  timestamp_rtp_ = 0;
-  ntp_time_ms_ = 0;
-  timestamp_us_ = 0;
+  CreateEmptyFrame(width, height, stride_y, stride_u, stride_v);
+  memcpy(video_frame_buffer_->MutableDataY(), buffer_y, expected_size_y);
+  memcpy(video_frame_buffer_->MutableDataU(), buffer_u, expected_size_u);
+  memcpy(video_frame_buffer_->MutableDataV(), buffer_v, expected_size_v);
   rotation_ = rotation;
 }
 
diff --git a/webrtc/common_video/video_frame_buffer.cc b/webrtc/common_video/video_frame_buffer.cc
index dbb2743..2d7e7f4 100644
--- a/webrtc/common_video/video_frame_buffer.cc
+++ b/webrtc/common_video/video_frame_buffer.cc
@@ -31,6 +31,19 @@
 
 }  // namespace
 
+uint8_t* VideoFrameBuffer::MutableDataY() {
+  RTC_NOTREACHED();
+  return nullptr;
+}
+uint8_t* VideoFrameBuffer::MutableDataU() {
+  RTC_NOTREACHED();
+  return nullptr;
+}
+uint8_t* VideoFrameBuffer::MutableDataV() {
+  RTC_NOTREACHED();
+  return nullptr;
+}
+
 VideoFrameBuffer::~VideoFrameBuffer() {}
 
 I420Buffer::I420Buffer(int width, int height)
diff --git a/webrtc/media/base/videoframe_unittest.h b/webrtc/media/base/videoframe_unittest.h
index 42936bb..9658f87 100644
--- a/webrtc/media/base/videoframe_unittest.h
+++ b/webrtc/media/base/videoframe_unittest.h
@@ -453,6 +453,7 @@
   static bool IsEqual(const cricket::VideoFrame& frame,
                       int width,
                       int height,
+                      int64_t timestamp_us,
                       const uint8_t* y,
                       uint32_t ypitch,
                       const uint8_t* u,
@@ -461,6 +462,7 @@
                       uint32_t vpitch,
                       int max_error) {
     return IsSize(frame, width, height) &&
+           frame.timestamp_us() == timestamp_us &&
            IsPlaneEqual("y", frame.video_frame_buffer()->DataY(),
                         frame.video_frame_buffer()->StrideY(), y, ypitch,
                         static_cast<uint32_t>(width),
@@ -478,25 +480,15 @@
   static bool IsEqual(const cricket::VideoFrame& frame1,
                       const cricket::VideoFrame& frame2,
                       int max_error) {
-    return frame1.timestamp_us() == frame2.timestamp_us() &&
-           IsEqual(frame1,
+    return IsEqual(frame1,
                    frame2.width(), frame2.height(),
+                   frame2.timestamp_us(),
                    frame2.video_frame_buffer()->DataY(),
                    frame2.video_frame_buffer()->StrideY(),
                    frame2.video_frame_buffer()->DataU(),
                    frame2.video_frame_buffer()->StrideU(),
                    frame2.video_frame_buffer()->DataV(),
-                   frame2.video_frame_buffer()->StrideV(), max_error);
-  }
-
-  static bool IsEqual(
-      const cricket::VideoFrame& frame1,
-      const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
-      int max_error) {
-    return IsEqual(frame1, buffer->width(), buffer->height(),
-                   buffer->DataY(), buffer->StrideY(),
-                   buffer->DataU(), buffer->StrideU(),
-                   buffer->DataV(), buffer->StrideV(),
+                   frame2.video_frame_buffer()->StrideV(),
                    max_error);
   }
 
@@ -505,10 +497,10 @@
                               int hcrop, int vcrop, int max_error) {
     return frame1.width() <= frame2.width() &&
            frame1.height() <= frame2.height() &&
-           frame1.timestamp_us() == frame2.timestamp_us() &&
            IsEqual(frame1,
                    frame2.width() - hcrop * 2,
                    frame2.height() - vcrop * 2,
+                   frame2.timestamp_us(),
                    frame2.video_frame_buffer()->DataY()
                        + vcrop * frame2.video_frame_buffer()->StrideY()
                        + hcrop,
@@ -547,8 +539,8 @@
     const uint8_t* y = reinterpret_cast<uint8_t*>(ms.get()->GetBuffer());
     const uint8_t* u = y + kWidth * kHeight;
     const uint8_t* v = u + kWidth * kHeight / 4;
-    EXPECT_TRUE(IsEqual(frame, kWidth, kHeight, y, kWidth, u, kWidth / 2, v,
-                        kWidth / 2, 0));
+    EXPECT_TRUE(IsEqual(frame, kWidth, kHeight, 0, y, kWidth, u,
+                        kWidth / 2, v, kWidth / 2, 0));
   }
 
   // Test constructing an image from a YV12 buffer.
@@ -562,8 +554,8 @@
     const uint8_t* y = reinterpret_cast<uint8_t*>(ms.get()->GetBuffer());
     const uint8_t* v = y + kWidth * kHeight;
     const uint8_t* u = v + kWidth * kHeight / 4;
-    EXPECT_TRUE(IsEqual(frame, kWidth, kHeight, y, kWidth, u, kWidth / 2, v,
-                        kWidth / 2, 0));
+    EXPECT_TRUE(IsEqual(frame, kWidth, kHeight, 0, y, kWidth, u,
+                        kWidth / 2, v, kWidth / 2, 0));
   }
 
   // Test constructing an image from a I422 buffer.
@@ -780,8 +772,7 @@
 // Macro to help test different rotations
 #define TEST_MIRROR(FOURCC, BPP)                                               \
   void Construct##FOURCC##Mirror() {                                           \
-    T frame1, frame2;                                                          \
-    rtc::scoped_refptr<webrtc::I420Buffer> res_buffer;                         \
+    T frame1, frame2, frame3;                                                  \
     std::unique_ptr<rtc::MemoryStream> ms(                                     \
         CreateYuvSample(kWidth, kHeight, BPP));                                \
     ASSERT_TRUE(ms.get() != NULL);                                             \
@@ -797,18 +788,21 @@
                             data_size, 0, webrtc::kVideoRotation_0));          \
     int width_rotate = frame1.width();                                         \
     int height_rotate = frame1.height();                                       \
-    res_buffer = webrtc::I420Buffer::Create(width_rotate, height_rotate);      \
+    frame3.InitToEmptyBuffer(width_rotate, height_rotate);                     \
     libyuv::I420Mirror(frame2.video_frame_buffer()->DataY(),                   \
                        frame2.video_frame_buffer()->StrideY(),                 \
                        frame2.video_frame_buffer()->DataU(),                   \
                        frame2.video_frame_buffer()->StrideU(),                 \
                        frame2.video_frame_buffer()->DataV(),                   \
                        frame2.video_frame_buffer()->StrideV(),                 \
-                       res_buffer->MutableDataY(), res_buffer->StrideY(),      \
-                       res_buffer->MutableDataU(), res_buffer->StrideU(),      \
-                       res_buffer->MutableDataV(), res_buffer->StrideV(),      \
-                       kWidth, kHeight);                                       \
-    EXPECT_TRUE(IsEqual(frame1, res_buffer, 0));                               \
+                       frame3.video_frame_buffer()->MutableDataY(),            \
+                       frame3.video_frame_buffer()->StrideY(),                 \
+                       frame3.video_frame_buffer()->MutableDataU(),            \
+                       frame3.video_frame_buffer()->StrideU(),                 \
+                       frame3.video_frame_buffer()->MutableDataV(),            \
+                       frame3.video_frame_buffer()->StrideV(), kWidth,         \
+                       kHeight);                                               \
+    EXPECT_TRUE(IsEqual(frame1, frame3, 0));                                   \
   }
 
   TEST_MIRROR(I420, 420)
@@ -816,8 +810,7 @@
 // Macro to help test different rotations
 #define TEST_ROTATE(FOURCC, BPP, ROTATE)                                       \
   void Construct##FOURCC##Rotate##ROTATE() {                                   \
-    T frame1, frame2;                                                          \
-    rtc::scoped_refptr<webrtc::I420Buffer> res_buffer;                         \
+    T frame1, frame2, frame3;                                                  \
     std::unique_ptr<rtc::MemoryStream> ms(                                     \
         CreateYuvSample(kWidth, kHeight, BPP));                                \
     ASSERT_TRUE(ms.get() != NULL);                                             \
@@ -833,18 +826,21 @@
                             data_size, 0, webrtc::kVideoRotation_0));          \
     int width_rotate = frame1.width();                                         \
     int height_rotate = frame1.height();                                       \
-    res_buffer = webrtc::I420Buffer::Create(width_rotate, height_rotate);      \
+    frame3.InitToEmptyBuffer(width_rotate, height_rotate);                     \
     libyuv::I420Rotate(frame2.video_frame_buffer()->DataY(),                   \
                        frame2.video_frame_buffer()->StrideY(),                 \
                        frame2.video_frame_buffer()->DataU(),                   \
                        frame2.video_frame_buffer()->StrideU(),                 \
                        frame2.video_frame_buffer()->DataV(),                   \
                        frame2.video_frame_buffer()->StrideV(),                 \
-                       res_buffer->MutableDataY(), res_buffer->StrideY(),      \
-                       res_buffer->MutableDataU(), res_buffer->StrideU(),      \
-                       res_buffer->MutableDataV(), res_buffer->StrideV(),      \
-                       kWidth, kHeight, libyuv::kRotate##ROTATE);              \
-    EXPECT_TRUE(IsEqual(frame1, res_buffer, 0));                               \
+                       frame3.video_frame_buffer()->MutableDataY(),            \
+                       frame3.video_frame_buffer()->StrideY(),                 \
+                       frame3.video_frame_buffer()->MutableDataU(),            \
+                       frame3.video_frame_buffer()->StrideU(),                 \
+                       frame3.video_frame_buffer()->MutableDataV(),            \
+                       frame3.video_frame_buffer()->StrideV(), kWidth,         \
+                       kHeight, libyuv::kRotate##ROTATE);                      \
+    EXPECT_TRUE(IsEqual(frame1, frame3, 0));                                   \
   }
 
   // Test constructing an image with rotation.
@@ -948,7 +944,7 @@
     const uint8_t* y = pixel;
     const uint8_t* u = y + 1;
     const uint8_t* v = u + 1;
-    EXPECT_TRUE(IsEqual(frame, 1, 1, y, 1, u, 1, v, 1, 0));
+    EXPECT_TRUE(IsEqual(frame, 1, 1, 0, y, 1, u, 1, v, 1, 0));
   }
 
   // Test 5 pixel edge case image.
diff --git a/webrtc/media/engine/webrtcvideoengine2_unittest.cc b/webrtc/media/engine/webrtcvideoengine2_unittest.cc
index c81fd88..b9c6604 100644
--- a/webrtc/media/engine/webrtcvideoengine2_unittest.cc
+++ b/webrtc/media/engine/webrtcvideoengine2_unittest.cc
@@ -66,13 +66,17 @@
       cricket::kRtcpFbParamCcm, cricket::kRtcpFbCcmParamFir)));
 }
 
-static rtc::scoped_refptr<webrtc::VideoFrameBuffer> CreateBlackFrameBuffer(
-    int width,
-    int height) {
-  rtc::scoped_refptr<webrtc::I420Buffer> buffer =
-      webrtc::I420Buffer::Create(width, height);
-  buffer->SetToBlack();
-  return buffer;
+static void CreateBlackFrame(webrtc::VideoFrame* video_frame,
+                             int width,
+                             int height) {
+  video_frame->CreateEmptyFrame(
+      width, height, width, (width + 1) / 2, (width + 1) / 2);
+  memset(video_frame->video_frame_buffer()->MutableDataY(), 16,
+         video_frame->allocated_size(webrtc::kYPlane));
+  memset(video_frame->video_frame_buffer()->MutableDataU(), 128,
+         video_frame->allocated_size(webrtc::kUPlane));
+  memset(video_frame->video_frame_buffer()->MutableDataV(), 128,
+         video_frame->allocated_size(webrtc::kVPlane));
 }
 
 void VerifySendStreamHasRtxTypes(const webrtc::VideoSendStream::Config& config,
@@ -2200,9 +2204,9 @@
   cricket::FakeVideoRenderer renderer;
   EXPECT_TRUE(channel_->SetSink(last_ssrc_, &renderer));
 
-  webrtc::VideoFrame video_frame(CreateBlackFrameBuffer(4, 4),
-                                 kInitialTimestamp, 0,
-                                 webrtc::kVideoRotation_0);
+  webrtc::VideoFrame video_frame;
+  CreateBlackFrame(&video_frame, 4, 4);
+  video_frame.set_timestamp(kInitialTimestamp);
   // Initial NTP time is not available on the first frame, but should still be
   // able to be estimated.
   stream->InjectFrame(video_frame);
diff --git a/webrtc/media/engine/webrtcvideoframe.cc b/webrtc/media/engine/webrtcvideoframe.cc
index 7b5a680..f35a45a 100644
--- a/webrtc/media/engine/webrtcvideoframe.cc
+++ b/webrtc/media/engine/webrtcvideoframe.cc
@@ -129,9 +129,7 @@
     new_height = dw;
   }
 
-  rtc::scoped_refptr<webrtc::I420Buffer> buffer =
-      webrtc::I420Buffer::Create(new_width, new_height);
-  video_frame_buffer_ = buffer;
+  InitToEmptyBuffer(new_width, new_height);
   rotation_ = apply_rotation ? webrtc::kVideoRotation_0 : rotation;
 
   int horiz_crop = ((w - dw) / 2) & ~1;
@@ -142,10 +140,15 @@
   int idh = (h < 0) ? -dh : dh;
   int r = libyuv::ConvertToI420(
       sample, sample_size,
-      buffer->MutableDataY(), buffer->StrideY(),
-      buffer->MutableDataU(), buffer->StrideU(),
-      buffer->MutableDataV(), buffer->StrideV(),
-      horiz_crop, vert_crop, w, h, dw, idh,
+      video_frame_buffer_->MutableDataY(),
+      video_frame_buffer_->StrideY(),
+      video_frame_buffer_->MutableDataU(),
+      video_frame_buffer_->StrideU(),
+      video_frame_buffer_->MutableDataV(),
+      video_frame_buffer_->StrideV(),
+      horiz_crop, vert_crop,
+      w, h,
+      dw, idh,
       static_cast<libyuv::RotationMode>(
           apply_rotation ? rotation : webrtc::kVideoRotation_0),
       format);
@@ -159,7 +162,7 @@
 }
 
 void WebRtcVideoFrame::InitToEmptyBuffer(int w, int h) {
-  video_frame_buffer_ = webrtc::I420Buffer::Create(w, h);
+  video_frame_buffer_ = new rtc::RefCountedObject<webrtc::I420Buffer>(w, h);
   rotation_ = webrtc::kVideoRotation_0;
 }
 
diff --git a/webrtc/modules/video_capture/test/video_capture_unittest.cc b/webrtc/modules/video_capture/test/video_capture_unittest.cc
index 10f3a2b..839ab80 100644
--- a/webrtc/modules/video_capture/test/video_capture_unittest.cc
+++ b/webrtc/modules/video_capture/test/video_capture_unittest.cc
@@ -420,19 +420,15 @@
     capability.maxFPS = kTestFramerate;
     capture_callback_.SetExpectedCapability(capability);
 
-    rtc::scoped_refptr<webrtc::I420Buffer> buffer = webrtc::I420Buffer::Create(
-        kTestWidth, kTestHeight,
-        kTestWidth, ((kTestWidth + 1) / 2), (kTestWidth + 1) / 2);
-
-    memset(buffer->MutableDataY(), 127, kTestWidth * kTestHeight);
-    memset(buffer->MutableDataU(), 127,
+    test_frame_.CreateEmptyFrame(kTestWidth, kTestHeight, kTestWidth,
+                                 ((kTestWidth + 1) / 2), (kTestWidth + 1) / 2);
+    SleepMs(1); // Wait 1ms so that two tests can't have the same timestamp.
+    memset(test_frame_.video_frame_buffer()->MutableDataY(), 127,
+           kTestWidth * kTestHeight);
+    memset(test_frame_.video_frame_buffer()->MutableDataU(), 127,
            ((kTestWidth + 1) / 2) * ((kTestHeight + 1) / 2));
-    memset(buffer->MutableDataV(), 127,
+    memset(test_frame_.video_frame_buffer()->MutableDataV(), 127,
            ((kTestWidth + 1) / 2) * ((kTestHeight + 1) / 2));
-    test_frame_.reset(
-        new webrtc::VideoFrame(buffer, 0, 0, webrtc::kVideoRotation_0));
-
-    SleepMs(1);  // Wait 1ms so that two tests can't have the same timestamp.
 
     capture_module_->RegisterCaptureDataCallback(capture_callback_);
     capture_module_->RegisterCaptureCallback(capture_feedback_);
@@ -447,7 +443,7 @@
   webrtc::VideoCaptureExternal* capture_input_interface_;
   rtc::scoped_refptr<VideoCaptureModule> capture_module_;
   std::unique_ptr<webrtc::ProcessThread> process_module_;
-  std::unique_ptr<webrtc::VideoFrame> test_frame_;
+  webrtc::VideoFrame test_frame_;
   TestVideoCaptureCallback capture_callback_;
   TestVideoCaptureFeedBack capture_feedback_;
 };
@@ -455,13 +451,13 @@
 // Test input of external video frames.
 TEST_F(VideoCaptureExternalTest, TestExternalCapture) {
   size_t length = webrtc::CalcBufferSize(webrtc::kI420,
-                                         test_frame_->width(),
-                                         test_frame_->height());
+                                         test_frame_.width(),
+                                         test_frame_.height());
   std::unique_ptr<uint8_t[]> test_buffer(new uint8_t[length]);
-  webrtc::ExtractBuffer(*test_frame_, length, test_buffer.get());
+  webrtc::ExtractBuffer(test_frame_, length, test_buffer.get());
   EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(),
       length, capture_callback_.capability(), 0));
-  EXPECT_TRUE(capture_callback_.CompareLastFrame(*test_frame_));
+  EXPECT_TRUE(capture_callback_.CompareLastFrame(test_frame_));
 }
 
 // Test frame rate and no picture alarm.
@@ -476,14 +472,13 @@
   uint64_t startTime = rtc::TimeNanos();
 
   while ((rtc::TimeNanos() - startTime) < testTime) {
-    size_t length = webrtc::CalcBufferSize(webrtc::kI420,
-                                           test_frame_->width(),
-                                           test_frame_->height());
-    std::unique_ptr<uint8_t[]> test_buffer(new uint8_t[length]);
-    webrtc::ExtractBuffer(*test_frame_, length, test_buffer.get());
-    EXPECT_EQ(
-        0, capture_input_interface_->IncomingFrame(
-               test_buffer.get(), length, capture_callback_.capability(), 0));
+     size_t length = webrtc::CalcBufferSize(webrtc::kI420,
+                                            test_frame_.width(),
+                                            test_frame_.height());
+     std::unique_ptr<uint8_t[]> test_buffer(new uint8_t[length]);
+     webrtc::ExtractBuffer(test_frame_, length, test_buffer.get());
+     EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(),
+       length, capture_callback_.capability(), 0));
     SleepMs(100);
   }
   EXPECT_TRUE(capture_feedback_.frame_rate() >= 8 &&
@@ -494,10 +489,10 @@
   startTime = rtc::TimeNanos();
   while ((rtc::TimeNanos() - startTime) < testTime) {
     size_t length = webrtc::CalcBufferSize(webrtc::kI420,
-                                           test_frame_->width(),
-                                           test_frame_->height());
+                                           test_frame_.width(),
+                                           test_frame_.height());
     std::unique_ptr<uint8_t[]> test_buffer(new uint8_t[length]);
-    webrtc::ExtractBuffer(*test_frame_, length, test_buffer.get());
+    webrtc::ExtractBuffer(test_frame_, length, test_buffer.get());
     EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(),
       length, capture_callback_.capability(), 0));
     SleepMs(1000 / 30);
@@ -512,10 +507,10 @@
 TEST_F(VideoCaptureExternalTest, Rotation) {
   EXPECT_EQ(0, capture_module_->SetCaptureRotation(webrtc::kVideoRotation_0));
   size_t length = webrtc::CalcBufferSize(webrtc::kI420,
-                                         test_frame_->width(),
-                                         test_frame_->height());
+                                         test_frame_.width(),
+                                         test_frame_.height());
   std::unique_ptr<uint8_t[]> test_buffer(new uint8_t[length]);
-  webrtc::ExtractBuffer(*test_frame_, length, test_buffer.get());
+  webrtc::ExtractBuffer(test_frame_, length, test_buffer.get());
   EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(),
     length, capture_callback_.capability(), 0));
   EXPECT_EQ(0, capture_module_->SetCaptureRotation(webrtc::kVideoRotation_90));
diff --git a/webrtc/modules/video_capture/video_capture_impl.cc b/webrtc/modules/video_capture/video_capture_impl.cc
index 90ac267..e6b2d55 100644
--- a/webrtc/modules/video_capture/video_capture_impl.cc
+++ b/webrtc/modules/video_capture/video_capture_impl.cc
@@ -275,14 +275,14 @@
         // Setting absolute height (in case it was negative).
         // In Windows, the image starts bottom left, instead of top left.
         // Setting a negative source height, inverts the image (within LibYuv).
-
-        // TODO(nisse): Use a pool?
-        rtc::scoped_refptr<I420Buffer> buffer = I420Buffer::Create(
-            target_width, abs(target_height), stride_y, stride_uv, stride_uv);
+        _captureFrame.CreateEmptyFrame(target_width,
+                                       abs(target_height),
+                                       stride_y,
+                                       stride_uv, stride_uv);
         const int conversionResult = ConvertToI420(
             commonVideoType, videoFrame, 0, 0,  // No cropping
             width, height, videoFrameLength,
-            apply_rotation ? _rotateFrame : kVideoRotation_0, buffer.get());
+            apply_rotation ? _rotateFrame : kVideoRotation_0, &_captureFrame);
         if (conversionResult < 0)
         {
           LOG(LS_ERROR) << "Failed to convert capture frame from type "
@@ -290,12 +290,15 @@
             return -1;
         }
 
-        VideoFrame captureFrame(
-            buffer, 0, rtc::TimeMillis(),
-            !apply_rotation ? _rotateFrame : kVideoRotation_0);
-        captureFrame.set_ntp_time_ms(captureTime);
+        if (!apply_rotation) {
+          _captureFrame.set_rotation(_rotateFrame);
+        } else {
+          _captureFrame.set_rotation(kVideoRotation_0);
+        }
+        _captureFrame.set_ntp_time_ms(captureTime);
+        _captureFrame.set_render_time_ms(rtc::TimeMillis());
 
-        DeliverCapturedFrame(captureFrame);
+        DeliverCapturedFrame(_captureFrame);
     }
     else // Encoded format
     {
diff --git a/webrtc/modules/video_capture/video_capture_impl.h b/webrtc/modules/video_capture/video_capture_impl.h
index e9fee7e..7d785c3 100644
--- a/webrtc/modules/video_capture/video_capture_impl.h
+++ b/webrtc/modules/video_capture/video_capture_impl.h
@@ -137,6 +137,8 @@
     VideoRotation _rotateFrame;  // Set if the frame should be rotated by the
                                  // capture module.
 
+    VideoFrame _captureFrame;
+
     // Indicate whether rotation should be applied before delivered externally.
     bool apply_rotation_;
 };
diff --git a/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc b/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc
index 3cc08d2..bbf1ee1 100644
--- a/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc
+++ b/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc
@@ -121,47 +121,52 @@
     return ret;
   }
 
-  // The video frame is stored in |frame_buffer|. |av_frame| is FFmpeg's version
-  // of a video frame and will be set up to reference |frame_buffer|'s data.
+  // The video frame is stored in |video_frame|. |av_frame| is FFmpeg's version
+  // of a video frame and will be set up to reference |video_frame|'s buffers.
+
+  // TODO(nisse): The VideoFrame's timestamp and rotation info is not used.
+  // Refactor to do not use a VideoFrame object at all.
 
   // FFmpeg expects the initial allocation to be zero-initialized according to
   // http://crbug.com/390941. Our pool is set up to zero-initialize new buffers.
-  // TODO(nisse): Delete that feature from the video pool, instead add
-  // an explicit call to InitializeData here.
-  rtc::scoped_refptr<I420Buffer> frame_buffer =
-      decoder->pool_.CreateBuffer(width, height);
+  VideoFrame* video_frame = new VideoFrame(
+      decoder->pool_.CreateBuffer(width, height),
+      0 /* timestamp */, 0 /* render_time_ms */, kVideoRotation_0);
 
-  int y_size = width * height;
-  int uv_size = ((width + 1) / 2) * ((height + 1) / 2);
   // DCHECK that we have a continuous buffer as is required.
-  RTC_DCHECK_EQ(frame_buffer->DataU(), frame_buffer->DataY() + y_size);
-  RTC_DCHECK_EQ(frame_buffer->DataV(), frame_buffer->DataU() + uv_size);
-  int total_size = y_size + 2 * uv_size;
+  RTC_DCHECK_EQ(video_frame->video_frame_buffer()->DataU(),
+                video_frame->video_frame_buffer()->DataY() +
+                video_frame->allocated_size(kYPlane));
+  RTC_DCHECK_EQ(video_frame->video_frame_buffer()->DataV(),
+                video_frame->video_frame_buffer()->DataU() +
+                video_frame->allocated_size(kUPlane));
+  int total_size = video_frame->allocated_size(kYPlane) +
+                   video_frame->allocated_size(kUPlane) +
+                   video_frame->allocated_size(kVPlane);
 
   av_frame->format = context->pix_fmt;
   av_frame->reordered_opaque = context->reordered_opaque;
 
   // Set |av_frame| members as required by FFmpeg.
-  av_frame->data[kYPlaneIndex] = frame_buffer->MutableDataY();
-  av_frame->linesize[kYPlaneIndex] = frame_buffer->StrideY();
-  av_frame->data[kUPlaneIndex] = frame_buffer->MutableDataU();
-  av_frame->linesize[kUPlaneIndex] = frame_buffer->StrideU();
-  av_frame->data[kVPlaneIndex] = frame_buffer->MutableDataV();
-  av_frame->linesize[kVPlaneIndex] = frame_buffer->StrideV();
+  av_frame->data[kYPlaneIndex] =
+      video_frame->video_frame_buffer()->MutableDataY();
+  av_frame->linesize[kYPlaneIndex] =
+      video_frame->video_frame_buffer()->StrideY();
+  av_frame->data[kUPlaneIndex] =
+      video_frame->video_frame_buffer()->MutableDataU();
+  av_frame->linesize[kUPlaneIndex] =
+      video_frame->video_frame_buffer()->StrideU();
+  av_frame->data[kVPlaneIndex] =
+      video_frame->video_frame_buffer()->MutableDataV();
+  av_frame->linesize[kVPlaneIndex] =
+      video_frame->video_frame_buffer()->StrideV();
   RTC_DCHECK_EQ(av_frame->extended_data, av_frame->data);
 
-  // Create a VideoFrame object, to keep a reference to the buffer.
-  // TODO(nisse): The VideoFrame's timestamp and rotation info is not used.
-  // Refactor to do not use a VideoFrame object at all.
-  av_frame->buf[0] = av_buffer_create(
-      av_frame->data[kYPlaneIndex],
-      total_size,
-      AVFreeBuffer2,
-      static_cast<void*>(new VideoFrame(frame_buffer,
-                                        0 /* timestamp */,
-                                        0 /* render_time_ms */,
-                                        kVideoRotation_0)),
-      0);
+  av_frame->buf[0] = av_buffer_create(av_frame->data[kYPlaneIndex],
+                                      total_size,
+                                      AVFreeBuffer2,
+                                      static_cast<void*>(video_frame),
+                                      0);
   RTC_CHECK(av_frame->buf[0]);
   return 0;
 }
diff --git a/webrtc/modules/video_coding/codecs/i420/i420.cc b/webrtc/modules/video_coding/codecs/i420/i420.cc
index d0c8d0c..93204dd 100644
--- a/webrtc/modules/video_coding/codecs/i420/i420.cc
+++ b/webrtc/modules/video_coding/codecs/i420/i420.cc
@@ -137,7 +137,8 @@
 }
 
 I420Decoder::I420Decoder()
-    : _width(0),
+    : _decodedImage(),
+      _width(0),
       _height(0),
       _inited(false),
       _decodeCompleteCallback(NULL) {}
@@ -198,19 +199,17 @@
   }
   // Set decoded image parameters.
   int half_width = (_width + 1) / 2;
-  rtc::scoped_refptr<webrtc::I420Buffer> frame_buffer =
-      I420Buffer::Create(_width, _height, _width, half_width, half_width);
-
-  // Converting from raw buffer I420Buffer.
+  _decodedImage.CreateEmptyFrame(_width, _height, _width, half_width,
+                                 half_width);
+  // Converting from buffer to plane representation.
   int ret = ConvertToI420(kI420, buffer, 0, 0, _width, _height, 0,
-                          kVideoRotation_0, frame_buffer.get());
+                          kVideoRotation_0, &_decodedImage);
   if (ret < 0) {
     return WEBRTC_VIDEO_CODEC_MEMORY;
   }
+  _decodedImage.set_timestamp(inputImage._timeStamp);
 
-  VideoFrame decoded_image(frame_buffer, inputImage._timeStamp, 0,
-                           webrtc::kVideoRotation_0);
-  _decodeCompleteCallback->Decoded(decoded_image);
+  _decodeCompleteCallback->Decoded(_decodedImage);
   return WEBRTC_VIDEO_CODEC_OK;
 }
 
diff --git a/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.cc b/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.cc
index 1c8037a..766e517 100644
--- a/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.cc
@@ -298,10 +298,11 @@
         return ret;
       }
     } else {
+      VideoFrame dst_frame;
+      // Making sure that destination frame is of sufficient size.
       // Aligning stride values based on width.
-      rtc::scoped_refptr<I420Buffer> dst_buffer =
-          I420Buffer::Create(dst_width, dst_height, dst_width,
-                             (dst_width + 1) / 2, (dst_width + 1) / 2);
+      dst_frame.CreateEmptyFrame(dst_width, dst_height, dst_width,
+                                 (dst_width + 1) / 2, (dst_width + 1) / 2);
       libyuv::I420Scale(input_image.video_frame_buffer()->DataY(),
                         input_image.video_frame_buffer()->StrideY(),
                         input_image.video_frame_buffer()->DataU(),
@@ -309,16 +310,18 @@
                         input_image.video_frame_buffer()->DataV(),
                         input_image.video_frame_buffer()->StrideV(),
                         src_width, src_height,
-                        dst_buffer->MutableDataY(), dst_buffer->StrideY(),
-                        dst_buffer->MutableDataU(), dst_buffer->StrideU(),
-                        dst_buffer->MutableDataV(), dst_buffer->StrideV(),
+                        dst_frame.video_frame_buffer()->MutableDataY(),
+                        dst_frame.video_frame_buffer()->StrideY(),
+                        dst_frame.video_frame_buffer()->MutableDataU(),
+                        dst_frame.video_frame_buffer()->StrideU(),
+                        dst_frame.video_frame_buffer()->MutableDataV(),
+                        dst_frame.video_frame_buffer()->StrideV(),
                         dst_width, dst_height,
                         libyuv::kFilterBilinear);
-
+      dst_frame.set_timestamp(input_image.timestamp());
+      dst_frame.set_render_time_ms(input_image.render_time_ms());
       int ret = streaminfos_[stream_idx].encoder->Encode(
-          VideoFrame(dst_buffer, input_image.timestamp(),
-                     input_image.render_time_ms(), webrtc::kVideoRotation_0),
-          codec_specific_info, &stream_frame_types);
+          dst_frame, codec_specific_info, &stream_frame_types);
       if (ret != WEBRTC_VIDEO_CODEC_OK) {
         return ret;
       }
diff --git a/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter_unittest.cc b/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter_unittest.cc
index d13dbb16..47d2322d 100644
--- a/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter_unittest.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter_unittest.cc
@@ -535,11 +535,17 @@
       .WillOnce(Return(WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE));
 
   // Send a fake frame and assert the return is software fallback.
+  VideoFrame input_frame;
   int half_width = (kDefaultWidth + 1) / 2;
-  rtc::scoped_refptr<I420Buffer> input_buffer = I420Buffer::Create(
-      kDefaultWidth, kDefaultHeight, kDefaultWidth, half_width, half_width);
-  input_buffer->InitializeData();
-  VideoFrame input_frame(input_buffer, 0, 0, webrtc::kVideoRotation_0);
+  input_frame.CreateEmptyFrame(kDefaultWidth, kDefaultHeight, kDefaultWidth,
+                                half_width, half_width);
+  memset(input_frame.video_frame_buffer()->MutableDataY(), 0,
+         input_frame.allocated_size(kYPlane));
+  memset(input_frame.video_frame_buffer()->MutableDataU(), 0,
+         input_frame.allocated_size(kUPlane));
+  memset(input_frame.video_frame_buffer()->MutableDataV(), 0,
+         input_frame.allocated_size(kVPlane));
+
   std::vector<FrameType> frame_types(3, kVideoFrameKey);
   EXPECT_EQ(WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE,
             adapter_->Encode(input_frame, nullptr, &frame_types));
diff --git a/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.h b/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.h
index 22e8645..e2bd71e 100644
--- a/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.h
+++ b/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.h
@@ -236,8 +236,8 @@
     }
   }
 
-  // Fills in an I420Buffer from |plane_colors|.
-  static void CreateImage(const rtc::scoped_refptr<I420Buffer>& buffer,
+  // Fills in an VideoFrameBuffer from |plane_colors|.
+  static void CreateImage(const rtc::scoped_refptr<VideoFrameBuffer>& buffer,
                           int plane_colors[kNumOfPlanes]) {
     int width = buffer->width();
     int height = buffer->height();
@@ -317,11 +317,14 @@
     EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200));
     EXPECT_EQ(0, decoder_->InitDecode(&settings_, 1));
     int half_width = (kDefaultWidth + 1) / 2;
-    input_buffer_ = I420Buffer::Create(kDefaultWidth, kDefaultHeight,
-                                       kDefaultWidth, half_width, half_width);
-    input_buffer_->InitializeData();
-    input_frame_.reset(
-        new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0));
+    input_frame_.CreateEmptyFrame(kDefaultWidth, kDefaultHeight, kDefaultWidth,
+                                  half_width, half_width);
+    memset(input_frame_.video_frame_buffer()->MutableDataY(), 0,
+           input_frame_.allocated_size(kYPlane));
+    memset(input_frame_.video_frame_buffer()->MutableDataU(), 0,
+           input_frame_.allocated_size(kUPlane));
+    memset(input_frame_.video_frame_buffer()->MutableDataV(), 0,
+           input_frame_.allocated_size(kVPlane));
   }
 
   virtual void TearDown() {
@@ -393,33 +396,33 @@
     std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
                                        kVideoFrameDelta);
     ExpectStreams(kVideoFrameKey, kNumberOfSimulcastStreams);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
 
     ExpectStreams(kVideoFrameDelta, kNumberOfSimulcastStreams);
-    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
+    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
 
     frame_types[0] = kVideoFrameKey;
     ExpectStreams(kVideoFrameKey, kNumberOfSimulcastStreams);
-    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
+    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
 
     std::fill(frame_types.begin(), frame_types.end(), kVideoFrameDelta);
     frame_types[1] = kVideoFrameKey;
     ExpectStreams(kVideoFrameKey, kNumberOfSimulcastStreams);
-    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
+    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
 
     std::fill(frame_types.begin(), frame_types.end(), kVideoFrameDelta);
     frame_types[2] = kVideoFrameKey;
     ExpectStreams(kVideoFrameKey, kNumberOfSimulcastStreams);
-    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
+    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
 
     std::fill(frame_types.begin(), frame_types.end(), kVideoFrameDelta);
     ExpectStreams(kVideoFrameDelta, kNumberOfSimulcastStreams);
-    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
+    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
   }
 
   void TestPaddingAllStreams() {
@@ -428,11 +431,11 @@
     std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
                                        kVideoFrameDelta);
     ExpectStreams(kVideoFrameKey, 1);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
 
     ExpectStreams(kVideoFrameDelta, 1);
-    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
+    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
   }
 
   void TestPaddingTwoStreams() {
@@ -441,11 +444,11 @@
     std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
                                        kVideoFrameDelta);
     ExpectStreams(kVideoFrameKey, 1);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
 
     ExpectStreams(kVideoFrameDelta, 1);
-    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
+    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
   }
 
   void TestPaddingTwoStreamsOneMaxedOut() {
@@ -455,11 +458,11 @@
     std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
                                        kVideoFrameDelta);
     ExpectStreams(kVideoFrameKey, 1);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
 
     ExpectStreams(kVideoFrameDelta, 1);
-    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
+    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
   }
 
   void TestPaddingOneStream() {
@@ -468,11 +471,11 @@
     std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
                                        kVideoFrameDelta);
     ExpectStreams(kVideoFrameKey, 2);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
 
     ExpectStreams(kVideoFrameDelta, 2);
-    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
+    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
   }
 
   void TestPaddingOneStreamTwoMaxedOut() {
@@ -483,11 +486,11 @@
     std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
                                        kVideoFrameDelta);
     ExpectStreams(kVideoFrameKey, 2);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
 
     ExpectStreams(kVideoFrameDelta, 2);
-    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
+    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
   }
 
   void TestSendAllStreams() {
@@ -497,11 +500,11 @@
     std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
                                        kVideoFrameDelta);
     ExpectStreams(kVideoFrameKey, 3);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
 
     ExpectStreams(kVideoFrameDelta, 3);
-    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
+    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
   }
 
   void TestDisablingStreams() {
@@ -510,47 +513,47 @@
     std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
                                        kVideoFrameDelta);
     ExpectStreams(kVideoFrameKey, 3);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
 
     ExpectStreams(kVideoFrameDelta, 3);
-    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
+    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
 
     // We should only get two streams and padding for one.
     encoder_->SetRates(
         kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] / 2, 30);
     ExpectStreams(kVideoFrameDelta, 2);
-    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
+    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
 
     // We should only get the first stream and padding for two.
     encoder_->SetRates(kTargetBitrates[0] + kMinBitrates[1] / 2, 30);
     ExpectStreams(kVideoFrameDelta, 1);
-    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
+    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
 
     // We don't have enough bitrate for the thumbnail stream, but we should get
     // it anyway with current configuration.
     encoder_->SetRates(kTargetBitrates[0] - 1, 30);
     ExpectStreams(kVideoFrameDelta, 1);
-    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
+    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
 
     // We should only get two streams and padding for one.
     encoder_->SetRates(
         kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] / 2, 30);
     // We get a key frame because a new stream is being enabled.
     ExpectStreams(kVideoFrameKey, 2);
-    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
+    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
 
     // We should get all three streams.
     encoder_->SetRates(
         kTargetBitrates[0] + kTargetBitrates[1] + kTargetBitrates[2], 30);
     // We get a key frame because a new stream is being enabled.
     ExpectStreams(kVideoFrameKey, 3);
-    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
+    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
   }
 
   void SwitchingToOneStream(int width, int height) {
@@ -568,12 +571,14 @@
     }
     // Setting input image to new resolution.
     int half_width = (settings_.width + 1) / 2;
-    input_buffer_ = I420Buffer::Create(settings_.width, settings_.height,
-                                       settings_.width, half_width, half_width);
-    input_buffer_->InitializeData();
-
-    input_frame_.reset(
-        new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0));
+    input_frame_.CreateEmptyFrame(settings_.width, settings_.height,
+                                  settings_.width, half_width, half_width);
+    memset(input_frame_.video_frame_buffer()->MutableDataY(), 0,
+           input_frame_.allocated_size(kYPlane));
+    memset(input_frame_.video_frame_buffer()->MutableDataU(), 0,
+           input_frame_.allocated_size(kUPlane));
+    memset(input_frame_.video_frame_buffer()->MutableDataV(), 0,
+           input_frame_.allocated_size(kVPlane));
 
     // The for loop above did not set the bitrate of the highest layer.
     settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1]
@@ -598,7 +603,7 @@
         .Times(1)
         .WillRepeatedly(Return(
             EncodedImageCallback::Result(EncodedImageCallback::Result::OK, 0)));
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
 
     // Switch back.
     DefaultSettings(&settings_, kDefaultTemporalLayerProfile);
@@ -609,12 +614,15 @@
     ExpectStreams(kVideoFrameKey, 1);
     // Resize |input_frame_| to the new resolution.
     half_width = (settings_.width + 1) / 2;
-    input_buffer_ = I420Buffer::Create(settings_.width, settings_.height,
-                                       settings_.width, half_width, half_width);
-    input_buffer_->InitializeData();
-    input_frame_.reset(
-        new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0));
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
+    input_frame_.CreateEmptyFrame(settings_.width, settings_.height,
+                                  settings_.width, half_width, half_width);
+    memset(input_frame_.video_frame_buffer()->MutableDataY(), 0,
+           input_frame_.allocated_size(kYPlane));
+    memset(input_frame_.video_frame_buffer()->MutableDataU(), 0,
+           input_frame_.allocated_size(kUPlane));
+    memset(input_frame_.video_frame_buffer()->MutableDataV(), 0,
+           input_frame_.allocated_size(kVPlane));
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
   }
 
   void TestSwitchingToOneStream() { SwitchingToOneStream(1024, 768); }
@@ -629,7 +637,7 @@
 
     encoder_->SetRates(kMaxBitrates[2], 30);  // To get all three streams.
 
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
     int picture_id = -1;
     int temporal_layer = -1;
     bool layer_sync = false;
@@ -639,22 +647,22 @@
     EXPECT_TRUE(layer_sync);
     int key_frame_picture_id = picture_id;
 
-    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
+    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
     encoder_callback.GetLastEncodedFrameInfo(&picture_id, &temporal_layer,
                                              &layer_sync, 0);
     EXPECT_EQ(2, temporal_layer);
     EXPECT_TRUE(layer_sync);
 
-    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
+    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
     encoder_callback.GetLastEncodedFrameInfo(&picture_id, &temporal_layer,
                                              &layer_sync, 0);
     EXPECT_EQ(1, temporal_layer);
     EXPECT_TRUE(layer_sync);
 
-    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
+    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
     encoder_callback.GetLastEncodedFrameInfo(&picture_id, &temporal_layer,
                                              &layer_sync, 0);
     EXPECT_EQ(2, temporal_layer);
@@ -667,8 +675,8 @@
     // Must match last key frame to trigger.
     codec_specific.codecSpecific.VP8.pictureIdRPSI = key_frame_picture_id;
 
-    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, &codec_specific, NULL));
+    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, &codec_specific, NULL));
     encoder_callback.GetLastEncodedFrameInfo(&picture_id, &temporal_layer,
                                              &layer_sync, 0);
 
@@ -678,8 +686,8 @@
     // Must match last key frame to trigger, test bad id.
     codec_specific.codecSpecific.VP8.pictureIdRPSI = key_frame_picture_id + 17;
 
-    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, &codec_specific, NULL));
+    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, &codec_specific, NULL));
     encoder_callback.GetLastEncodedFrameInfo(&picture_id, &temporal_layer,
                                              &layer_sync, 0);
 
@@ -703,9 +711,9 @@
     plane_offset[kYPlane] = kColorY;
     plane_offset[kUPlane] = kColorU;
     plane_offset[kVPlane] = kColorV;
-    CreateImage(input_buffer_, plane_offset);
+    CreateImage(input_frame_.video_frame_buffer(), plane_offset);
 
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
     int picture_id = -1;
     int temporal_layer = -1;
     bool layer_sync = false;
@@ -719,27 +727,27 @@
     plane_offset[kYPlane] += 1;
     plane_offset[kUPlane] += 1;
     plane_offset[kVPlane] += 1;
-    CreateImage(input_buffer_, plane_offset);
-    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
+    CreateImage(input_frame_.video_frame_buffer(), plane_offset);
+    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
 
     // Change color.
     plane_offset[kYPlane] += 1;
     plane_offset[kUPlane] += 1;
     plane_offset[kVPlane] += 1;
-    CreateImage(input_buffer_, plane_offset);
+    CreateImage(input_frame_.video_frame_buffer(), plane_offset);
 
-    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
+    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
 
     // Change color.
     plane_offset[kYPlane] += 1;
     plane_offset[kUPlane] += 1;
     plane_offset[kVPlane] += 1;
-    CreateImage(input_buffer_, plane_offset);
+    CreateImage(input_frame_.video_frame_buffer(), plane_offset);
 
-    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
+    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
 
     CodecSpecificInfo codec_specific;
     codec_specific.codecType = kVideoCodecVP8;
@@ -751,10 +759,10 @@
     plane_offset[kYPlane] = kColorY;
     plane_offset[kUPlane] = kColorU;
     plane_offset[kVPlane] = kColorV;
-    CreateImage(input_buffer_, plane_offset);
+    CreateImage(input_frame_.video_frame_buffer(), plane_offset);
 
-    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, &codec_specific, NULL));
+    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, &codec_specific, NULL));
 
     EncodedImage encoded_frame;
     encoder_callback.GetLastEncodedKeyFrame(&encoded_frame);
@@ -776,47 +784,47 @@
     bool expected_layer_sync[3] = {false, false, false};
 
     // First frame: #0.
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
     SetExpectedValues3<int>(0, 0, 0, expected_temporal_idx);
     SetExpectedValues3<bool>(true, true, true, expected_layer_sync);
     VerifyTemporalIdxAndSyncForAllSpatialLayers(
         &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
 
     // Next frame: #1.
-    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
+    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
     SetExpectedValues3<int>(2, 2, 2, expected_temporal_idx);
     SetExpectedValues3<bool>(true, true, true, expected_layer_sync);
     VerifyTemporalIdxAndSyncForAllSpatialLayers(
         &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
 
     // Next frame: #2.
-    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
+    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
     SetExpectedValues3<int>(1, 1, 1, expected_temporal_idx);
     SetExpectedValues3<bool>(true, true, true, expected_layer_sync);
     VerifyTemporalIdxAndSyncForAllSpatialLayers(
         &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
 
     // Next frame: #3.
-    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
+    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
     SetExpectedValues3<int>(2, 2, 2, expected_temporal_idx);
     SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
     VerifyTemporalIdxAndSyncForAllSpatialLayers(
         &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
 
     // Next frame: #4.
-    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
+    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
     SetExpectedValues3<int>(0, 0, 0, expected_temporal_idx);
     SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
     VerifyTemporalIdxAndSyncForAllSpatialLayers(
         &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
 
     // Next frame: #5.
-    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
+    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
     SetExpectedValues3<int>(2, 2, 2, expected_temporal_idx);
     SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
     VerifyTemporalIdxAndSyncForAllSpatialLayers(
@@ -845,47 +853,47 @@
     bool expected_layer_sync[3] = {false, false, false};
 
     // First frame: #0.
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
     SetExpectedValues3<int>(0, 0, 255, expected_temporal_idx);
     SetExpectedValues3<bool>(true, true, false, expected_layer_sync);
     VerifyTemporalIdxAndSyncForAllSpatialLayers(
         &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
 
     // Next frame: #1.
-    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
+    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
     SetExpectedValues3<int>(2, 1, 255, expected_temporal_idx);
     SetExpectedValues3<bool>(true, true, false, expected_layer_sync);
     VerifyTemporalIdxAndSyncForAllSpatialLayers(
         &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
 
     // Next frame: #2.
-    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
+    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
     SetExpectedValues3<int>(1, 0, 255, expected_temporal_idx);
     SetExpectedValues3<bool>(true, false, false, expected_layer_sync);
     VerifyTemporalIdxAndSyncForAllSpatialLayers(
         &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
 
     // Next frame: #3.
-    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
+    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
     SetExpectedValues3<int>(2, 1, 255, expected_temporal_idx);
     SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
     VerifyTemporalIdxAndSyncForAllSpatialLayers(
         &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
 
     // Next frame: #4.
-    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
+    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
     SetExpectedValues3<int>(0, 0, 255, expected_temporal_idx);
     SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
     VerifyTemporalIdxAndSyncForAllSpatialLayers(
         &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
 
     // Next frame: #5.
-    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
+    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
     SetExpectedValues3<int>(2, 1, 255, expected_temporal_idx);
     SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
     VerifyTemporalIdxAndSyncForAllSpatialLayers(
@@ -903,27 +911,24 @@
     // 1. stride > width 2. stride_y != stride_uv/2
     int stride_y = kDefaultWidth + 20;
     int stride_uv = ((kDefaultWidth + 1) / 2) + 5;
-    input_buffer_ = I420Buffer::Create(kDefaultWidth, kDefaultHeight, stride_y,
-                                       stride_uv, stride_uv);
-    input_frame_.reset(
-        new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0));
-
+    input_frame_.CreateEmptyFrame(kDefaultWidth, kDefaultHeight, stride_y,
+                                  stride_uv, stride_uv);
     // Set color.
     int plane_offset[kNumOfPlanes];
     plane_offset[kYPlane] = kColorY;
     plane_offset[kUPlane] = kColorU;
     plane_offset[kVPlane] = kColorV;
-    CreateImage(input_buffer_, plane_offset);
+    CreateImage(input_frame_.video_frame_buffer(), plane_offset);
 
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
 
     // Change color.
     plane_offset[kYPlane] += 1;
     plane_offset[kUPlane] += 1;
     plane_offset[kVPlane] += 1;
-    CreateImage(input_buffer_, plane_offset);
-    input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
-    EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
+    CreateImage(input_frame_.video_frame_buffer(), plane_offset);
+    input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
+    EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
 
     EncodedImage encoded_frame;
     // Only encoding one frame - so will be a key frame.
@@ -963,8 +968,7 @@
   std::unique_ptr<VP8Decoder> decoder_;
   MockDecodedImageCallback decoder_callback_;
   VideoCodec settings_;
-  rtc::scoped_refptr<I420Buffer> input_buffer_;
-  std::unique_ptr<VideoFrame> input_frame_;
+  VideoFrame input_frame_;
 };
 
 }  // namespace testing
diff --git a/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc b/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc
index 4f3d99b..12dcb7c 100644
--- a/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc
@@ -147,15 +147,13 @@
     EXPECT_EQ(stride_y, 176);
     EXPECT_EQ(stride_uv, 96);
 
-    rtc::scoped_refptr<I420Buffer> buffer = I420Buffer::Create(
-        codec_inst_.width, codec_inst_.height, stride_y, stride_uv, stride_uv);
+    input_frame_.CreateEmptyFrame(codec_inst_.width, codec_inst_.height,
+                                  stride_y, stride_uv, stride_uv);
+    input_frame_.set_timestamp(kTestTimestamp);
     // Using ConvertToI420 to add stride to the image.
-    EXPECT_EQ(
-        0, ConvertToI420(kI420, source_buffer_.get(), 0, 0, codec_inst_.width,
-                         codec_inst_.height, 0, kVideoRotation_0,
-                         buffer.get()));
-    input_frame_.reset(
-        new VideoFrame(buffer, kTestTimestamp, 0, webrtc::kVideoRotation_0));
+    EXPECT_EQ(0, ConvertToI420(kI420, source_buffer_.get(), 0, 0,
+                               codec_inst_.width, codec_inst_.height, 0,
+                               kVideoRotation_0, &input_frame_));
   }
 
   void SetUpEncodeDecode() {
@@ -197,7 +195,7 @@
   std::unique_ptr<Vp8UnitTestDecodeCompleteCallback> decode_complete_callback_;
   std::unique_ptr<uint8_t[]> source_buffer_;
   FILE* source_file_;
-  std::unique_ptr<VideoFrame> input_frame_;
+  VideoFrame input_frame_;
   std::unique_ptr<VideoEncoder> encoder_;
   std::unique_ptr<VideoDecoder> decoder_;
   EncodedImage encoded_frame_;
@@ -239,7 +237,7 @@
 #endif
 TEST_F(TestVp8Impl, MAYBE_AlignedStrideEncodeDecode) {
   SetUpEncodeDecode();
-  encoder_->Encode(*input_frame_, NULL, NULL);
+  encoder_->Encode(input_frame_, NULL, NULL);
   EXPECT_GT(WaitForEncodedFrame(), 0u);
   // First frame should be a key frame.
   encoded_frame_._frameType = kVideoFrameKey;
@@ -248,7 +246,7 @@
             decoder_->Decode(encoded_frame_, false, NULL));
   EXPECT_GT(WaitForDecodedFrame(), 0u);
   // Compute PSNR on all planes (faster than SSIM).
-  EXPECT_GT(I420PSNR(input_frame_.get(), &decoded_frame_), 36);
+  EXPECT_GT(I420PSNR(&input_frame_, &decoded_frame_), 36);
   EXPECT_EQ(kTestTimestamp, decoded_frame_.timestamp());
   EXPECT_EQ(kTestNtpTimeMs, decoded_frame_.ntp_time_ms());
 }
@@ -260,7 +258,7 @@
 #endif
 TEST_F(TestVp8Impl, MAYBE_DecodeWithACompleteKeyFrame) {
   SetUpEncodeDecode();
-  encoder_->Encode(*input_frame_, NULL, NULL);
+  encoder_->Encode(input_frame_, NULL, NULL);
   EXPECT_GT(WaitForEncodedFrame(), 0u);
   // Setting complete to false -> should return an error.
   encoded_frame_._completeFrame = false;
@@ -275,7 +273,7 @@
   encoded_frame_._frameType = kVideoFrameKey;
   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
             decoder_->Decode(encoded_frame_, false, NULL));
-  EXPECT_GT(I420PSNR(input_frame_.get(), &decoded_frame_), 36);
+  EXPECT_GT(I420PSNR(&input_frame_, &decoded_frame_), 36);
 }
 
 }  // namespace webrtc
diff --git a/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc b/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
index 73a7eb6..ef5e8e3 100644
--- a/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
@@ -1306,18 +1306,18 @@
   last_frame_width_ = img->d_w;
   last_frame_height_ = img->d_h;
   // Allocate memory for decoded image.
-  rtc::scoped_refptr<I420Buffer> buffer =
-      buffer_pool_.CreateBuffer(img->d_w, img->d_h);
-
+  VideoFrame decoded_image(buffer_pool_.CreateBuffer(img->d_w, img->d_h),
+                           timestamp, 0, kVideoRotation_0);
   libyuv::I420Copy(img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y],
                    img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U],
                    img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V],
-                   buffer->MutableDataY(), buffer->StrideY(),
-                   buffer->MutableDataU(), buffer->StrideU(),
-                   buffer->MutableDataV(), buffer->StrideV(),
+                   decoded_image.video_frame_buffer()->MutableDataY(),
+                   decoded_image.video_frame_buffer()->StrideY(),
+                   decoded_image.video_frame_buffer()->MutableDataU(),
+                   decoded_image.video_frame_buffer()->StrideU(),
+                   decoded_image.video_frame_buffer()->MutableDataV(),
+                   decoded_image.video_frame_buffer()->StrideV(),
                    img->d_w, img->d_h);
-
-  VideoFrame decoded_image(buffer, timestamp, 0, kVideoRotation_0);
   decoded_image.set_ntp_time_ms(ntp_time_ms);
   int ret = decode_complete_callback_->Decoded(decoded_image);
   if (ret != 0)
diff --git a/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc b/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc
index dcc0619..d7927eb 100644
--- a/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc
@@ -148,7 +148,7 @@
     return -1;
   }
   EXPECT_EQ(0, decoder->InitDecode(&inst, 1));
-
+  webrtc::VideoFrame input_frame;
   size_t length = webrtc::CalcBufferSize(webrtc::kI420, width, height);
   std::unique_ptr<uint8_t[]> frame_buffer(new uint8_t[length]);
 
@@ -163,18 +163,14 @@
   int64_t starttime = rtc::TimeMillis();
   int frame_cnt = 1;
   int frames_processed = 0;
-  rtc::scoped_refptr<webrtc::I420Buffer> i420_buffer =
-      webrtc::I420Buffer::Create(width, height, width, half_width, half_width);
-
+  input_frame.CreateEmptyFrame(width, height, width, half_width, half_width);
   while (!feof(input_file) &&
          (num_frames == -1 || frames_processed < num_frames)) {
     if (fread(frame_buffer.get(), 1, length, input_file) != length)
       continue;
     if (frame_cnt >= start_frame) {
       webrtc::ConvertToI420(webrtc::kI420, frame_buffer.get(), 0, 0, width,
-                            height, 0, webrtc::kVideoRotation_0, &i420_buffer);
-      webrtc::VideoFrame input_frame(i420_buffer, 0, 0,
-                                     webrtc::kVideoRotation_0);
+                            height, 0, webrtc::kVideoRotation_0, &input_frame);
       encoder->Encode(input_frame, NULL, NULL);
       decoder->Decode(encoder_callback.encoded_image(), false, NULL);
       ++frames_processed;
diff --git a/webrtc/modules/video_processing/test/denoiser_test.cc b/webrtc/modules/video_processing/test/denoiser_test.cc
index a968859..7507e92 100644
--- a/webrtc/modules/video_processing/test/denoiser_test.cc
+++ b/webrtc/modules/video_processing/test/denoiser_test.cc
@@ -141,10 +141,8 @@
   while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
          frame_length_) {
     // Using ConvertToI420 to add stride to the image.
-    rtc::scoped_refptr<webrtc::I420Buffer> input_buffer =
-        I420Buffer::Create(width_, height_, width_, half_width_, half_width_);
     EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
-                               0, kVideoRotation_0, input_buffer.get()));
+                               0, kVideoRotation_0, &video_frame_));
 
     rtc::scoped_refptr<I420Buffer>* p_denoised_c = &denoised_frame_c;
     rtc::scoped_refptr<I420Buffer>* p_denoised_prev_c = &denoised_frame_prev_c;
@@ -159,9 +157,11 @@
       p_denoised_sse_neon = &denoised_frame_prev_sse_neon;
       p_denoised_prev_sse_neon = &denoised_frame_sse_neon;
     }
-    denoiser_c.DenoiseFrame(input_buffer, p_denoised_c, p_denoised_prev_c,
+    denoiser_c.DenoiseFrame(video_frame_.video_frame_buffer(),
+                            p_denoised_c, p_denoised_prev_c,
                             false);
-    denoiser_sse_neon.DenoiseFrame(input_buffer, p_denoised_sse_neon,
+    denoiser_sse_neon.DenoiseFrame(video_frame_.video_frame_buffer(),
+                                   p_denoised_sse_neon,
                                    p_denoised_prev_sse_neon, false);
     // Invert the flag.
     denoised_frame_toggle ^= 1;
diff --git a/webrtc/modules/video_processing/test/video_processing_unittest.cc b/webrtc/modules/video_processing/test/video_processing_unittest.cc
index 8c1154a..9e61b51 100644
--- a/webrtc/modules/video_processing/test/video_processing_unittest.cc
+++ b/webrtc/modules/video_processing/test/video_processing_unittest.cc
@@ -15,7 +15,6 @@
 #include <memory>
 #include <string>
 
-#include "webrtc/base/keep_ref_until_done.h"
 #include "webrtc/base/timeutils.h"
 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
 #include "webrtc/test/testsupport/fileutils.h"
@@ -34,25 +33,24 @@
                                      int target_height,
                                      VideoProcessing* vpm,
                                      const VideoFrame* out_frame);
-rtc::scoped_refptr<VideoFrameBuffer> CropBuffer(
-    const rtc::scoped_refptr<VideoFrameBuffer>& source_buffer,
-    int source_width,
-    int source_height,
-    int offset_x,
-    int offset_y,
-    int cropped_width,
-    int cropped_height);
+static void CropFrame(const uint8_t* source_data,
+                      int source_width,
+                      int source_height,
+                      int offset_x,
+                      int offset_y,
+                      int cropped_width,
+                      int cropped_height,
+                      VideoFrame* cropped_frame);
 // The |source_data| is cropped and scaled to |target_width| x |target_height|,
 // and then scaled back to the expected cropped size. |expected_psnr| is used to
 // verify basic quality, and is set to be ~0.1/0.05dB lower than actual PSNR
 // verified under the same conditions.
-static void TestSize(
-    const VideoFrame& source_frame,
-    const rtc::scoped_refptr<VideoFrameBuffer>& cropped_source_buffer,
-    int target_width,
-    int target_height,
-    double expected_psnr,
-    VideoProcessing* vpm);
+static void TestSize(const VideoFrame& source_frame,
+                     const VideoFrame& cropped_source_frame,
+                     int target_width,
+                     int target_height,
+                     double expected_psnr,
+                     VideoProcessing* vpm);
 static void WriteProcessedFrameForVisualInspection(const VideoFrame& source,
                                                    const VideoFrame& processed);
 
@@ -70,6 +68,15 @@
   vp_ = VideoProcessing::Create();
   ASSERT_TRUE(vp_ != NULL);
 
+  video_frame_.CreateEmptyFrame(width_, height_, width_,
+                                half_width_, half_width_);
+  // Clear video frame so DrMemory/Valgrind will allow reads of the buffer.
+  memset(video_frame_.video_frame_buffer()->MutableDataY(), 0,
+         video_frame_.allocated_size(kYPlane));
+  memset(video_frame_.video_frame_buffer()->MutableDataU(), 0,
+         video_frame_.allocated_size(kUPlane));
+  memset(video_frame_.video_frame_buffer()->MutableDataV(), 0,
+         video_frame_.allocated_size(kVPlane));
   const std::string video_file =
       webrtc::test::ResourcePath("foreman_cif", "yuv");
   source_file_ = fopen(video_file.c_str(), "rb");
@@ -102,18 +109,11 @@
   VideoFrame* out_frame = NULL;
   // Set rescaling => output frame != NULL.
   vp_->SetInputFrameResampleMode(kFastRescaling);
-
-  rtc::scoped_refptr<webrtc::I420Buffer> buffer =
-      I420Buffer::Create(width_, height_, width_, half_width_, half_width_);
-
-  // Clear video frame so DrMemory/Valgrind will allow reads of the buffer.
-  buffer->InitializeData();
-  VideoFrame video_frame(buffer, 0, 0, webrtc::kVideoRotation_0);
-
-  PreprocessFrameAndVerify(video_frame, resolution, resolution, vp_, out_frame);
+  PreprocessFrameAndVerify(video_frame_, resolution, resolution, vp_,
+                           out_frame);
   // No rescaling=> output frame = NULL.
   vp_->SetInputFrameResampleMode(kNoRescaling);
-  EXPECT_TRUE(vp_->PreprocessFrame(video_frame) != nullptr);
+  EXPECT_TRUE(vp_->PreprocessFrame(video_frame_) != nullptr);
 }
 
 #if defined(WEBRTC_IOS)
@@ -133,15 +133,15 @@
   vp_->EnableTemporalDecimation(false);
 
   // Reading test frame
-  rtc::scoped_refptr<webrtc::I420Buffer> buffer =
-      I420Buffer::Create(width_, height_, width_, half_width_, half_width_);
-
-  ASSERT_EQ(static_cast<size_t>(size_y_),
-            fread(buffer->MutableDataY(), 1, size_y_, source_file_));
-  ASSERT_EQ(static_cast<size_t>(size_uv_),
-            fread(buffer->MutableDataU(), 1, size_uv_, source_file_));
-  ASSERT_EQ(static_cast<size_t>(size_uv_),
-            fread(buffer->MutableDataV(), 1, size_uv_, source_file_));
+  std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
+  ASSERT_EQ(frame_length_,
+            fread(video_buffer.get(), 1, frame_length_, source_file_));
+  // Using ConvertToI420 to add stride to the image.
+  EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
+                             0, kVideoRotation_0, &video_frame_));
+  // Cropped source frame that will contain the expected visible region.
+  VideoFrame cropped_source_frame;
+  cropped_source_frame.CopyFrame(video_frame_);
 
   for (uint32_t run_idx = 0; run_idx < NumRuns; run_idx++) {
     // Initiate test timer.
@@ -149,37 +149,48 @@
 
     // Init the sourceFrame with a timestamp.
     int64_t time_start_ms = time_start / rtc::kNumNanosecsPerMillisec;
-    VideoFrame video_frame(buffer, time_start_ms * 90, time_start_ms,
-                           webrtc::kVideoRotation_0);
+    video_frame_.set_render_time_ms(time_start_ms);
+    video_frame_.set_timestamp(time_start_ms * 90);
 
     // Test scaling to different sizes: source is of |width|/|height| = 352/288.
     // Pure scaling:
-    TestSize(video_frame, buffer, width_ / 4, height_ / 4, 25.2, vp_);
-    TestSize(video_frame, buffer, width_ / 2, height_ / 2, 28.1, vp_);
+    TestSize(video_frame_, video_frame_, width_ / 4, height_ / 4, 25.2, vp_);
+    TestSize(video_frame_, video_frame_, width_ / 2, height_ / 2, 28.1, vp_);
     // No resampling:
-    TestSize(video_frame, buffer, width_, height_, -1, vp_);
-    TestSize(video_frame, buffer, 2 * width_, 2 * height_, 32.2, vp_);
+    TestSize(video_frame_, video_frame_, width_, height_, -1, vp_);
+    TestSize(video_frame_, video_frame_, 2 * width_, 2 * height_, 32.2, vp_);
 
     // Scaling and cropping. The cropped source frame is the largest center
     // aligned region that can be used from the source while preserving aspect
     // ratio.
-    TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 56, 352, 176),
-             100, 50, 24.0, vp_);
-    TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 30, 352, 225),
-             400, 256, 31.3, vp_);
-    TestSize(video_frame, CropBuffer(buffer, width_, height_, 68, 0, 216, 288),
-             480, 640, 32.15, vp_);
-    TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 12, 352, 264),
-             960, 720, 32.2, vp_);
-    TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 44, 352, 198),
-             1280, 720, 32.15, vp_);
+    CropFrame(video_buffer.get(), width_, height_, 0, 56, 352, 176,
+              &cropped_source_frame);
+    TestSize(video_frame_, cropped_source_frame, 100, 50, 24.0, vp_);
+
+    CropFrame(video_buffer.get(), width_, height_, 0, 30, 352, 225,
+              &cropped_source_frame);
+    TestSize(video_frame_, cropped_source_frame, 400, 256, 31.3, vp_);
+
+    CropFrame(video_buffer.get(), width_, height_, 68, 0, 216, 288,
+              &cropped_source_frame);
+    TestSize(video_frame_, cropped_source_frame, 480, 640, 32.15, vp_);
+
+    CropFrame(video_buffer.get(), width_, height_, 0, 12, 352, 264,
+              &cropped_source_frame);
+    TestSize(video_frame_, cropped_source_frame, 960, 720, 32.2, vp_);
+
+    CropFrame(video_buffer.get(), width_, height_, 0, 44, 352, 198,
+              &cropped_source_frame);
+    TestSize(video_frame_, cropped_source_frame, 1280, 720, 32.15, vp_);
 
     // Upsampling to odd size.
-    TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 26, 352, 233),
-             501, 333, 32.05, vp_);
+    CropFrame(video_buffer.get(), width_, height_, 0, 26, 352, 233,
+              &cropped_source_frame);
+    TestSize(video_frame_, cropped_source_frame, 501, 333, 32.05, vp_);
     // Downsample to odd size.
-    TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 34, 352, 219),
-             281, 175, 29.3, vp_);
+    CropFrame(video_buffer.get(), width_, height_, 0, 34, 352, 219,
+              &cropped_source_frame);
+    TestSize(video_frame_, cropped_source_frame, 281, 175, 29.3, vp_);
 
     // Stop timer.
     const int64_t runtime =
@@ -218,32 +229,24 @@
   EXPECT_EQ(target_height, (out_frame)->height());
 }
 
-rtc::scoped_refptr<VideoFrameBuffer> CropBuffer(
-    const rtc::scoped_refptr<VideoFrameBuffer>& source_buffer,
-    int source_width,
-    int source_height,
-    int offset_x,
-    int offset_y,
-    int cropped_width,
-    int cropped_height) {
-  // Force even.
-  offset_x &= 1;
-  offset_y &= 1;
-
-  size_t y_start = offset_x + offset_y * source_buffer->StrideY();
-  size_t u_start = (offset_x / 2) + (offset_y / 2) * source_buffer->StrideU();
-  size_t v_start = (offset_x / 2) + (offset_y / 2) * source_buffer->StrideU();
-
-  return rtc::scoped_refptr<VideoFrameBuffer>(
-      new rtc::RefCountedObject<WrappedI420Buffer>(
-          cropped_width, cropped_height, source_buffer->DataY() + y_start,
-          source_buffer->StrideY(), source_buffer->DataU() + u_start,
-          source_buffer->StrideU(), source_buffer->DataV() + v_start,
-          source_buffer->StrideV(), rtc::KeepRefUntilDone(source_buffer)));
+void CropFrame(const uint8_t* source_data,
+               int source_width,
+               int source_height,
+               int offset_x,
+               int offset_y,
+               int cropped_width,
+               int cropped_height,
+               VideoFrame* cropped_frame) {
+  cropped_frame->CreateEmptyFrame(cropped_width, cropped_height, cropped_width,
+                                  (cropped_width + 1) / 2,
+                                  (cropped_width + 1) / 2);
+  EXPECT_EQ(0,
+            ConvertToI420(kI420, source_data, offset_x, offset_y, source_width,
+                          source_height, 0, kVideoRotation_0, cropped_frame));
 }
 
 void TestSize(const VideoFrame& source_frame,
-              const rtc::scoped_refptr<VideoFrameBuffer>& cropped_source_buffer,
+              const VideoFrame& cropped_source_frame,
               int target_width,
               int target_height,
               double expected_psnr,
@@ -260,14 +263,12 @@
   // Scale |resampled_source_frame| back to the source scale.
   VideoFrame resampled_source_frame;
   resampled_source_frame.CopyFrame(*out_frame);
-  PreprocessFrameAndVerify(resampled_source_frame,
-                           cropped_source_buffer->width(),
-                           cropped_source_buffer->height(), vpm, out_frame);
+  PreprocessFrameAndVerify(resampled_source_frame, cropped_source_frame.width(),
+                           cropped_source_frame.height(), vpm, out_frame);
   WriteProcessedFrameForVisualInspection(resampled_source_frame, *out_frame);
 
   // Compute PSNR against the cropped source frame and check expectation.
-  double psnr =
-      I420PSNR(*cropped_source_buffer, *out_frame->video_frame_buffer());
+  double psnr = I420PSNR(&cropped_source_frame, out_frame);
   EXPECT_GT(psnr, expected_psnr);
   printf(
       "PSNR: %f. PSNR is between source of size %d %d, and a modified "
diff --git a/webrtc/modules/video_processing/test/video_processing_unittest.h b/webrtc/modules/video_processing/test/video_processing_unittest.h
index 6edd72e..3433c6c 100644
--- a/webrtc/modules/video_processing/test/video_processing_unittest.h
+++ b/webrtc/modules/video_processing/test/video_processing_unittest.h
@@ -33,6 +33,7 @@
   static void TearDownTestCase() { Trace::ReturnTrace(); }
   VideoProcessing* vp_;
   FILE* source_file_;
+  VideoFrame video_frame_;
   const int width_;
   const int half_width_;
   const int height_;
diff --git a/webrtc/video/video_encoder_unittest.cc b/webrtc/video/video_encoder_unittest.cc
index eb2b450..84ac4fd 100644
--- a/webrtc/video/video_encoder_unittest.cc
+++ b/webrtc/video/video_encoder_unittest.cc
@@ -116,18 +116,22 @@
   CountingFakeEncoder fake_encoder_;
   VideoEncoderSoftwareFallbackWrapper fallback_wrapper_;
   VideoCodec codec_ = {};
-  std::unique_ptr<VideoFrame> frame_;
+  VideoFrame frame_;
 };
 
 void VideoEncoderSoftwareFallbackWrapperTest::EncodeFrame() {
-  rtc::scoped_refptr<I420Buffer> buffer = I420Buffer::Create(
-      kWidth, kHeight, kWidth, (kWidth + 1) / 2, (kWidth + 1) / 2);
-  buffer->SetToBlack();
-  std::vector<FrameType> types(1, kVideoFrameKey);
+  frame_.CreateEmptyFrame(kWidth, kHeight, kWidth, (kWidth + 1) / 2,
+                          (kWidth + 1) / 2);
+  memset(frame_.video_frame_buffer()->MutableDataY(), 16,
+         frame_.allocated_size(webrtc::kYPlane));
+  memset(frame_.video_frame_buffer()->MutableDataU(), 128,
+         frame_.allocated_size(webrtc::kUPlane));
+  memset(frame_.video_frame_buffer()->MutableDataV(), 128,
+         frame_.allocated_size(webrtc::kVPlane));
 
-  frame_.reset(new VideoFrame(buffer, 0, 0, webrtc::kVideoRotation_0));
+  std::vector<FrameType> types(1, kVideoFrameKey);
   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
-            fallback_wrapper_.Encode(*frame_, nullptr, &types));
+            fallback_wrapper_.Encode(frame_, nullptr, &types));
 }
 
 void VideoEncoderSoftwareFallbackWrapperTest::UtilizeFallbackEncoder() {
@@ -221,9 +225,9 @@
 
   // Encoding a frame using the fallback should arrive at the new callback.
   std::vector<FrameType> types(1, kVideoFrameKey);
-  frame_->set_timestamp(frame_->timestamp() + 1000);
+  frame_.set_timestamp(frame_.timestamp() + 1000);
   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
-            fallback_wrapper_.Encode(*frame_, nullptr, &types));
+            fallback_wrapper_.Encode(frame_, nullptr, &types));
 
   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, fallback_wrapper_.Release());
 }