Revert of Cleanup of webrtc::VideoFrame. (patchset #6 id:100001 of https://codereview.webrtc.org/1679323002/ )

Reason for revert:
Breaks downstream compilation. Please make non-breaking API changes for the reland or coordinate fixing downstream code quickly with the sheriff.

Original issue's description:
> Cleanup of webrtc::VideoFrame.
>
> Delete EqualsFrame method, used only by tests. Delete one of the
> CreateFrame methods. Drop return value for CreateEmptyFrame, CreateFrame
> and CopyFrame.
>
> BUG=webrtc:5426
>
> Committed: https://crrev.com/208019637bfed975f8f13b16d40b90e200763cd6
> Cr-Commit-Position: refs/heads/master@{#11783}

TBR=pbos@webrtc.org,perkj@webrtc.org,pthatcher@webrtc.org,mflodman@webrtc.org,marpan@webrtc.org,nisse@webrtc.org
# Skipping CQ checks because original CL landed less than 1 days ago.
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=webrtc:5426

Review URL: https://codereview.webrtc.org/1743613002

Cr-Commit-Position: refs/heads/master@{#11789}
diff --git a/webrtc/common_video/common_video_unittests.gyp b/webrtc/common_video/common_video_unittests.gyp
index 545bfa4..20203f1 100644
--- a/webrtc/common_video/common_video_unittests.gyp
+++ b/webrtc/common_video/common_video_unittests.gyp
@@ -17,7 +17,7 @@
          '<(DEPTH)/testing/gtest.gyp:gtest',
          '<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
          '<(webrtc_root)/test/test.gyp:test_support_main',
-         '<(webrtc_root)/test/test.gyp:video_test_common',
+         '<(webrtc_root)/test/test.gyp:fake_video_frames',
       ],
       'sources': [
         'i420_buffer_pool_unittest.cc',
diff --git a/webrtc/common_video/i420_video_frame_unittest.cc b/webrtc/common_video/i420_video_frame_unittest.cc
index bc58e5b..1ec451c 100644
--- a/webrtc/common_video/i420_video_frame_unittest.cc
+++ b/webrtc/common_video/i420_video_frame_unittest.cc
@@ -15,11 +15,15 @@
 #include "webrtc/base/bind.h"
 #include "webrtc/base/scoped_ptr.h"
 #include "webrtc/test/fake_texture_frame.h"
-#include "webrtc/test/frame_utils.h"
 #include "webrtc/video_frame.h"
 
 namespace webrtc {
 
+bool EqualPlane(const uint8_t* data1,
+                const uint8_t* data2,
+                int stride,
+                int width,
+                int height);
 int ExpectedSize(int plane_stride, int image_height, PlaneType type);
 
 TEST(TestVideoFrame, InitialValues) {
@@ -37,7 +41,7 @@
 TEST(TestVideoFrame, WidthHeightValues) {
   VideoFrame frame;
   const int valid_value = 10;
-  frame.CreateEmptyFrame(10, 10, 10, 14, 90);
+  EXPECT_EQ(0, frame.CreateEmptyFrame(10, 10, 10, 14, 90));
   EXPECT_EQ(valid_value, frame.width());
   EXPECT_EQ(valid_value, frame.height());
   frame.set_timestamp(123u);
@@ -50,7 +54,7 @@
 
 TEST(TestVideoFrame, SizeAllocation) {
   VideoFrame frame;
-  frame. CreateEmptyFrame(10, 10, 12, 14, 220);
+  EXPECT_EQ(0, frame. CreateEmptyFrame(10, 10, 12, 14, 220));
   int height = frame.height();
   int stride_y = frame.stride(kYPlane);
   int stride_u = frame.stride(kUPlane);
@@ -75,8 +79,8 @@
   int height = 15;
   // Copy frame.
   VideoFrame small_frame;
-  small_frame.CreateEmptyFrame(width, height,
-                               stride_y, stride_u, stride_v);
+  EXPECT_EQ(0, small_frame.CreateEmptyFrame(width, height,
+                                            stride_y, stride_u, stride_v));
   small_frame.set_timestamp(timestamp);
   small_frame.set_ntp_time_ms(ntp_time_ms);
   small_frame.set_render_time_ms(render_time_ms);
@@ -91,22 +95,23 @@
   memset(buffer_u, 8, kSizeU);
   memset(buffer_v, 4, kSizeV);
   VideoFrame big_frame;
-  big_frame.CreateFrame(buffer_y, buffer_u, buffer_v,
-                        width + 5, height + 5, stride_y + 5,
-                        stride_u, stride_v, kRotation);
+  EXPECT_EQ(0,
+            big_frame.CreateFrame(buffer_y, buffer_u, buffer_v,
+                                  width + 5, height + 5, stride_y + 5,
+                                  stride_u, stride_v, kRotation));
   // Frame of smaller dimensions.
-  small_frame.CopyFrame(big_frame);
-  EXPECT_TRUE(test::FramesEqual(small_frame, big_frame));
+  EXPECT_EQ(0, small_frame.CopyFrame(big_frame));
+  EXPECT_TRUE(small_frame.EqualsFrame(big_frame));
   EXPECT_EQ(kRotation, small_frame.rotation());
 
   // Frame of larger dimensions.
-  small_frame.CreateEmptyFrame(width, height,
-                               stride_y, stride_u, stride_v);
+  EXPECT_EQ(0, small_frame.CreateEmptyFrame(width, height,
+                                            stride_y, stride_u, stride_v));
   memset(small_frame.buffer(kYPlane), 1, small_frame.allocated_size(kYPlane));
   memset(small_frame.buffer(kUPlane), 2, small_frame.allocated_size(kUPlane));
   memset(small_frame.buffer(kVPlane), 3, small_frame.allocated_size(kVPlane));
-  big_frame.CopyFrame(small_frame);
-  EXPECT_TRUE(test::FramesEqual(small_frame, big_frame));
+  EXPECT_EQ(0, big_frame.CopyFrame(small_frame));
+  EXPECT_TRUE(small_frame.EqualsFrame(big_frame));
 }
 
 TEST(TestVideoFrame, ShallowCopy) {
@@ -130,8 +135,8 @@
   memset(buffer_u, 8, kSizeU);
   memset(buffer_v, 4, kSizeV);
   VideoFrame frame1;
-  frame1.CreateFrame(buffer_y, buffer_u, buffer_v, width, height,
-                     stride_y, stride_u, stride_v, kRotation);
+  EXPECT_EQ(0, frame1.CreateFrame(buffer_y, buffer_u, buffer_v, width, height,
+                                  stride_y, stride_u, stride_v, kRotation));
   frame1.set_timestamp(timestamp);
   frame1.set_ntp_time_ms(ntp_time_ms);
   frame1.set_render_time_ms(render_time_ms);
@@ -167,7 +172,7 @@
 
 TEST(TestVideoFrame, Reset) {
   VideoFrame frame;
-  frame.CreateEmptyFrame(5, 5, 5, 5, 5);
+  ASSERT_EQ(frame.CreateEmptyFrame(5, 5, 5, 5, 5), 0);
   frame.set_ntp_time_ms(1);
   frame.set_timestamp(2);
   frame.set_render_time_ms(3);
@@ -188,8 +193,8 @@
   int stride_uv = 10;
   const int kSizeY = 225;
   const int kSizeUv = 80;
-  frame2.CreateEmptyFrame(width, height,
-                          stride_y, stride_uv, stride_uv);
+  EXPECT_EQ(0, frame2.CreateEmptyFrame(width, height,
+                                       stride_y, stride_uv, stride_uv));
   uint8_t buffer_y[kSizeY];
   uint8_t buffer_u[kSizeUv];
   uint8_t buffer_v[kSizeUv];
@@ -197,15 +202,11 @@
   memset(buffer_u, 8, kSizeUv);
   memset(buffer_v, 4, kSizeUv);
   frame2.CreateFrame(buffer_y, buffer_u, buffer_v,
-                     width, height, stride_y, stride_uv, stride_uv,
-                     kVideoRotation_0);
+                     width, height, stride_y, stride_uv, stride_uv);
   // Expect exactly the same pixel data.
-  EXPECT_TRUE(
-      test::EqualPlane(buffer_y, frame2.buffer(kYPlane), stride_y, 15, 15));
-  EXPECT_TRUE(
-      test::EqualPlane(buffer_u, frame2.buffer(kUPlane), stride_uv, 8, 8));
-  EXPECT_TRUE(
-      test::EqualPlane(buffer_v, frame2.buffer(kVPlane), stride_uv, 8, 8));
+  EXPECT_TRUE(EqualPlane(buffer_y, frame2.buffer(kYPlane), stride_y, 15, 15));
+  EXPECT_TRUE(EqualPlane(buffer_u, frame2.buffer(kUPlane), stride_uv, 8, 8));
+  EXPECT_TRUE(EqualPlane(buffer_v, frame2.buffer(kVPlane), stride_uv, 8, 8));
 
   // Compare size.
   EXPECT_LE(kSizeY, frame2.allocated_size(kYPlane));
diff --git a/webrtc/common_video/include/incoming_video_stream.h b/webrtc/common_video/include/incoming_video_stream.h
index 1aa42e1..ecc4d5e 100644
--- a/webrtc/common_video/include/incoming_video_stream.h
+++ b/webrtc/common_video/include/incoming_video_stream.h
@@ -56,10 +56,10 @@
   uint32_t StreamId() const;
   uint32_t IncomingRate() const;
 
-  void SetStartImage(const VideoFrame& video_frame);
+  int32_t SetStartImage(const VideoFrame& video_frame);
 
-  void SetTimeoutImage(const VideoFrame& video_frame,
-                       const uint32_t timeout);
+  int32_t SetTimeoutImage(const VideoFrame& video_frame,
+                          const uint32_t timeout);
 
   int32_t SetExpectedRenderDelay(int32_t delay_ms);
 
diff --git a/webrtc/common_video/incoming_video_stream.cc b/webrtc/common_video/incoming_video_stream.cc
index b0f105c..5082eb7 100644
--- a/webrtc/common_video/incoming_video_stream.cc
+++ b/webrtc/common_video/incoming_video_stream.cc
@@ -90,16 +90,16 @@
   return 0;
 }
 
-void IncomingVideoStream::SetStartImage(const VideoFrame& video_frame) {
+int32_t IncomingVideoStream::SetStartImage(const VideoFrame& video_frame) {
   rtc::CritScope csS(&thread_critsect_);
-  start_image_.CopyFrame(video_frame);
+  return start_image_.CopyFrame(video_frame);
 }
 
-void IncomingVideoStream::SetTimeoutImage(const VideoFrame& video_frame,
-                                          const uint32_t timeout) {
+int32_t IncomingVideoStream::SetTimeoutImage(const VideoFrame& video_frame,
+                                             const uint32_t timeout) {
   rtc::CritScope csS(&thread_critsect_);
   timeout_time_ = timeout;
-  timeout_image_.CopyFrame(video_frame);
+  return timeout_image_.CopyFrame(video_frame);
 }
 
 void IncomingVideoStream::SetRenderCallback(
diff --git a/webrtc/common_video/libyuv/libyuv_unittest.cc b/webrtc/common_video/libyuv/libyuv_unittest.cc
index 9fad44f..826fc0e 100644
--- a/webrtc/common_video/libyuv/libyuv_unittest.cc
+++ b/webrtc/common_video/libyuv/libyuv_unittest.cc
@@ -111,14 +111,13 @@
 
   EXPECT_EQ(frame_length_,
             fread(orig_buffer_.get(), 1, frame_length_, source_file_));
-  orig_frame_.CreateFrame(orig_buffer_.get(),
-                          orig_buffer_.get() + size_y_,
-                          orig_buffer_.get() +
-                          size_y_ + size_uv_,
-                          width_, height_,
-                          width_, (width_ + 1) / 2,
-                          (width_ + 1) / 2,
-                          kVideoRotation_0);
+  EXPECT_EQ(0, orig_frame_.CreateFrame(orig_buffer_.get(),
+                                       orig_buffer_.get() + size_y_,
+                                       orig_buffer_.get() +
+                                       size_y_ + size_uv_,
+                                       width_, height_,
+                                       width_, (width_ + 1) / 2,
+                                       (width_ + 1) / 2));
 }
 
 void TestLibYuv::TearDown() {
@@ -143,9 +142,9 @@
   double psnr = 0.0;
 
   VideoFrame res_i420_frame;
-  res_i420_frame.CreateEmptyFrame(width_, height_, width_,
+  EXPECT_EQ(0, res_i420_frame.CreateEmptyFrame(width_, height_, width_,
                                                (width_ + 1) / 2,
-                                               (width_ + 1) / 2);
+                                               (width_ + 1) / 2));
   printf("\nConvert #%d I420 <-> I420 \n", j);
   rtc::scoped_ptr<uint8_t[]> out_i420_buffer(new uint8_t[frame_length_]);
   EXPECT_EQ(0, ConvertFromI420(orig_frame_, kI420, 0,
@@ -203,8 +202,7 @@
                          outYV120Buffer.get() + size_y_,
                          outYV120Buffer.get() + size_y_ + size_uv_,
                          width_, height_,
-                         width_, (width_ + 1) / 2, (width_ + 1) / 2,
-                         kVideoRotation_0);
+                         width_, (width_ + 1) / 2, (width_ + 1) / 2);
   EXPECT_EQ(0, ConvertFromYV12(yv12_frame, kI420, 0, res_i420_buffer.get()));
   if (fwrite(res_i420_buffer.get(), 1, frame_length_, output_file) !=
       frame_length_) {
@@ -283,8 +281,8 @@
   int stride_y = 0;
   int stride_uv = 0;
   Calc16ByteAlignedStride(width_, &stride_y, &stride_uv);
-  res_i420_frame.CreateEmptyFrame(width_, height_,
-                                  stride_y, stride_uv, stride_uv);
+  EXPECT_EQ(0, res_i420_frame.CreateEmptyFrame(width_, height_,
+                                               stride_y, stride_uv, stride_uv));
   rtc::scoped_ptr<uint8_t[]> out_i420_buffer(new uint8_t[frame_length_]);
   EXPECT_EQ(0, ConvertFromI420(orig_frame_, kI420, 0,
                                out_i420_buffer.get()));
@@ -308,18 +306,18 @@
   int stride_y;
   int stride_uv;
   Calc16ByteAlignedStride(rotated_width, &stride_y, &stride_uv);
-  rotated_res_i420_frame.CreateEmptyFrame(rotated_width,
-                                          rotated_height,
-                                          stride_y,
-                                          stride_uv,
-                                          stride_uv);
+  EXPECT_EQ(0, rotated_res_i420_frame.CreateEmptyFrame(rotated_width,
+                                                       rotated_height,
+                                                       stride_y,
+                                                       stride_uv,
+                                                       stride_uv));
   EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0, width_, height_,
                              0, kVideoRotation_90, &rotated_res_i420_frame));
   EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0, width_, height_,
                              0, kVideoRotation_270, &rotated_res_i420_frame));
-  rotated_res_i420_frame.CreateEmptyFrame(width_, height_,
-                                          width_, (width_ + 1) / 2,
-                                          (width_ + 1) / 2);
+  EXPECT_EQ(0, rotated_res_i420_frame.CreateEmptyFrame(width_, height_,
+                                                       width_, (width_ + 1) / 2,
+                                                       (width_ + 1) / 2));
   EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0, width_, height_,
                              0, kVideoRotation_180, &rotated_res_i420_frame));
 }
diff --git a/webrtc/common_video/libyuv/scaler_unittest.cc b/webrtc/common_video/libyuv/scaler_unittest.cc
index d70cadf..6d02638 100644
--- a/webrtc/common_video/libyuv/scaler_unittest.cc
+++ b/webrtc/common_video/libyuv/scaler_unittest.cc
@@ -104,8 +104,7 @@
                           orig_buffer.get() + size_y_,
                           orig_buffer.get() + size_y_ + size_uv_,
                           width_, height_,
-                          width_, half_width_, half_width_,
-                          kVideoRotation_0);
+                          width_, half_width_, half_width_);
   EXPECT_EQ(0, test_scaler_.Scale(test_frame_, &test_frame2));
   EXPECT_GT(width_ * height_, test_frame2.allocated_size(kYPlane));
   EXPECT_GT(size_uv_, test_frame2.allocated_size(kUPlane));
@@ -373,8 +372,7 @@
                             frame_buffer.get() + size_y + size_uv,
                             src_width, src_height,
                             src_width, (src_width + 1) / 2,
-                            (src_width + 1) / 2,
-                            kVideoRotation_0);
+                            (src_width + 1) / 2);
 
     start_clock = TickTime::MillisecondTimestamp();
     EXPECT_EQ(0, test_scaler_.Scale(input_frame, &output_frame));
diff --git a/webrtc/common_video/video_frame.cc b/webrtc/common_video/video_frame.cc
index a30f658..86de823 100644
--- a/webrtc/common_video/video_frame.cc
+++ b/webrtc/common_video/video_frame.cc
@@ -23,6 +23,20 @@
 // to optimized bitstream readers. See avcodec_decode_video2.
 const size_t EncodedImage::kBufferPaddingBytesH264 = 8;
 
+bool EqualPlane(const uint8_t* data1,
+                const uint8_t* data2,
+                int stride,
+                int width,
+                int height) {
+  for (int y = 0; y < height; ++y) {
+    if (memcmp(data1, data2, width) != 0)
+      return false;
+    data1 += stride;
+    data2 += stride;
+  }
+  return true;
+}
+
 int ExpectedSize(int plane_stride, int image_height, PlaneType type) {
   if (type == kYPlane)
     return plane_stride * image_height;
@@ -46,11 +60,11 @@
       rotation_(rotation) {
 }
 
-void VideoFrame::CreateEmptyFrame(int width,
-                                  int height,
-                                  int stride_y,
-                                  int stride_u,
-                                  int stride_v) {
+int VideoFrame::CreateEmptyFrame(int width,
+                                 int height,
+                                 int stride_y,
+                                 int stride_u,
+                                 int stride_v) {
   const int half_width = (width + 1) / 2;
   RTC_DCHECK_GT(width, 0);
   RTC_DCHECK_GT(height, 0);
@@ -70,23 +84,36 @@
       width == video_frame_buffer_->width() &&
       height == video_frame_buffer_->height() && stride_y == stride(kYPlane) &&
       stride_u == stride(kUPlane) && stride_v == stride(kVPlane)) {
-    return;
+    return 0;
   }
 
   // Need to allocate new buffer.
   video_frame_buffer_ = new rtc::RefCountedObject<I420Buffer>(
       width, height, stride_y, stride_u, stride_v);
+  return 0;
 }
 
-void VideoFrame::CreateFrame(const uint8_t* buffer_y,
-                             const uint8_t* buffer_u,
-                             const uint8_t* buffer_v,
-                             int width,
-                             int height,
-                             int stride_y,
-                             int stride_u,
-                             int stride_v,
-                             VideoRotation rotation) {
+int VideoFrame::CreateFrame(const uint8_t* buffer_y,
+                            const uint8_t* buffer_u,
+                            const uint8_t* buffer_v,
+                            int width,
+                            int height,
+                            int stride_y,
+                            int stride_u,
+                            int stride_v) {
+  return CreateFrame(buffer_y, buffer_u, buffer_v, width, height, stride_y,
+                     stride_u, stride_v, kVideoRotation_0);
+}
+
+int VideoFrame::CreateFrame(const uint8_t* buffer_y,
+                            const uint8_t* buffer_u,
+                            const uint8_t* buffer_v,
+                            int width,
+                            int height,
+                            int stride_y,
+                            int stride_u,
+                            int stride_v,
+                            VideoRotation rotation) {
   const int half_height = (height + 1) / 2;
   const int expected_size_y = height * stride_y;
   const int expected_size_u = half_height * stride_u;
@@ -96,23 +123,24 @@
   memcpy(buffer(kUPlane), buffer_u, expected_size_u);
   memcpy(buffer(kVPlane), buffer_v, expected_size_v);
   rotation_ = rotation;
+  return 0;
 }
 
-void VideoFrame::CreateFrame(const uint8_t* buffer,
-                             int width,
-                             int height,
-                             VideoRotation rotation) {
+int VideoFrame::CreateFrame(const uint8_t* buffer,
+                            int width,
+                            int height,
+                            VideoRotation rotation) {
   const int stride_y = width;
   const int stride_uv = (width + 1) / 2;
 
   const uint8_t* buffer_y = buffer;
   const uint8_t* buffer_u = buffer_y + stride_y * height;
   const uint8_t* buffer_v = buffer_u + stride_uv * ((height + 1) / 2);
-  CreateFrame(buffer_y, buffer_u, buffer_v, width, height, stride_y,
-              stride_uv, stride_uv, rotation);
+  return CreateFrame(buffer_y, buffer_u, buffer_v, width, height, stride_y,
+                     stride_uv, stride_uv, rotation);
 }
 
-void VideoFrame::CopyFrame(const VideoFrame& videoFrame) {
+int VideoFrame::CopyFrame(const VideoFrame& videoFrame) {
   if (videoFrame.IsZeroSize()) {
     video_frame_buffer_ = nullptr;
   } else if (videoFrame.native_handle()) {
@@ -121,14 +149,14 @@
     CreateFrame(videoFrame.buffer(kYPlane), videoFrame.buffer(kUPlane),
                 videoFrame.buffer(kVPlane), videoFrame.width(),
                 videoFrame.height(), videoFrame.stride(kYPlane),
-                videoFrame.stride(kUPlane), videoFrame.stride(kVPlane),
-                kVideoRotation_0);
+                videoFrame.stride(kUPlane), videoFrame.stride(kVPlane));
   }
 
   timestamp_ = videoFrame.timestamp_;
   ntp_time_ms_ = videoFrame.ntp_time_ms_;
   render_time_ms_ = videoFrame.render_time_ms_;
   rotation_ = videoFrame.rotation_;
+  return 0;
 }
 
 void VideoFrame::ShallowCopy(const VideoFrame& videoFrame) {
@@ -198,6 +226,26 @@
   return frame;
 }
 
+bool VideoFrame::EqualsFrame(const VideoFrame& frame) const {
+  if (width() != frame.width() || height() != frame.height() ||
+      stride(kYPlane) != frame.stride(kYPlane) ||
+      stride(kUPlane) != frame.stride(kUPlane) ||
+      stride(kVPlane) != frame.stride(kVPlane) ||
+      timestamp() != frame.timestamp() ||
+      ntp_time_ms() != frame.ntp_time_ms() ||
+      render_time_ms() != frame.render_time_ms()) {
+    return false;
+  }
+  const int half_width = (width() + 1) / 2;
+  const int half_height = (height() + 1) / 2;
+  return EqualPlane(buffer(kYPlane), frame.buffer(kYPlane),
+                    stride(kYPlane), width(), height()) &&
+         EqualPlane(buffer(kUPlane), frame.buffer(kUPlane),
+                    stride(kUPlane), half_width, half_height) &&
+         EqualPlane(buffer(kVPlane), frame.buffer(kVPlane),
+                    stride(kVPlane), half_width, half_height);
+}
+
 size_t EncodedImage::GetBufferPaddingBytes(VideoCodecType codec_type) {
   switch (codec_type) {
     case kVideoCodecVP8:
diff --git a/webrtc/media/engine/fakewebrtcvideocapturemodule.h b/webrtc/media/engine/fakewebrtcvideocapturemodule.h
index 397ce9c..43f72b9 100644
--- a/webrtc/media/engine/fakewebrtcvideocapturemodule.h
+++ b/webrtc/media/engine/fakewebrtcvideocapturemodule.h
@@ -90,14 +90,17 @@
     return 0;
   }
 
-  void SendFrame(int w, int h) {
-    if (!running_) return;
+  bool SendFrame(int w, int h) {
+    if (!running_) return false;
     webrtc::VideoFrame sample;
     // Setting stride based on width.
-    sample.CreateEmptyFrame(w, h, w, (w + 1) / 2, (w + 1) / 2);
+    if (sample.CreateEmptyFrame(w, h, w, (w + 1) / 2, (w + 1) / 2) < 0) {
+      return false;
+    }
     if (callback_) {
       callback_->OnIncomingCapturedFrame(id_, sample);
     }
+    return true;
   }
 
   const webrtc::VideoCaptureCapability& cap() const {
diff --git a/webrtc/media/engine/webrtcvideocapturer_unittest.cc b/webrtc/media/engine/webrtcvideocapturer_unittest.cc
index 89698cf..23d7306 100644
--- a/webrtc/media/engine/webrtcvideocapturer_unittest.cc
+++ b/webrtc/media/engine/webrtcvideocapturer_unittest.cc
@@ -90,7 +90,7 @@
   ASSERT_TRUE(capturer_->GetCaptureFormat() != NULL);
   EXPECT_EQ(format, *capturer_->GetCaptureFormat());
   EXPECT_EQ_WAIT(cricket::CS_RUNNING, listener_.last_capture_state(), 1000);
-  factory_->modules[0]->SendFrame(640, 480);
+  EXPECT_TRUE(factory_->modules[0]->SendFrame(640, 480));
   EXPECT_TRUE_WAIT(listener_.frame_count() > 0, 5000);
   EXPECT_EQ(capturer_->GetCaptureFormat()->fourcc, listener_.frame_fourcc());
   EXPECT_EQ(640, listener_.frame_width());
@@ -117,7 +117,7 @@
   ASSERT_TRUE(capturer_->GetCaptureFormat() != NULL);
   EXPECT_EQ(format, *capturer_->GetCaptureFormat());
   EXPECT_EQ_WAIT(cricket::CS_RUNNING, listener_.last_capture_state(), 1000);
-  factory_->modules[0]->SendFrame(640, 480);
+  EXPECT_TRUE(factory_->modules[0]->SendFrame(640, 480));
   EXPECT_TRUE_WAIT(listener_.frame_count() > 0, 5000);
   EXPECT_EQ(capturer_->GetCaptureFormat()->fourcc, listener_.frame_fourcc());
   EXPECT_EQ(640, listener_.frame_width());
diff --git a/webrtc/modules/modules.gyp b/webrtc/modules/modules.gyp
index 465c5ba..68e4d11 100644
--- a/webrtc/modules/modules.gyp
+++ b/webrtc/modules/modules.gyp
@@ -160,7 +160,7 @@
             '<(webrtc_root)/modules/video_coding/codecs/vp8/vp8.gyp:webrtc_vp8',
             '<(webrtc_root)/modules/video_coding/codecs/vp9/vp9.gyp:webrtc_vp9',
             '<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
-            '<(webrtc_root)/test/test.gyp:video_test_common',
+            '<(webrtc_root)/test/test.gyp:fake_video_frames',
             '<(webrtc_root)/test/test.gyp:rtp_test_utils',
             '<(webrtc_root)/test/test.gyp:test_support_main',
             '<(webrtc_root)/test/webrtc_test_common.gyp:webrtc_test_common',
diff --git a/webrtc/modules/video_capture/video_capture_impl.cc b/webrtc/modules/video_capture/video_capture_impl.cc
index 81f6311..3013a7d 100644
--- a/webrtc/modules/video_capture/video_capture_impl.cc
+++ b/webrtc/modules/video_capture/video_capture_impl.cc
@@ -278,10 +278,16 @@
         // Setting absolute height (in case it was negative).
         // In Windows, the image starts bottom left, instead of top left.
         // Setting a negative source height, inverts the image (within LibYuv).
-        _captureFrame.CreateEmptyFrame(target_width,
-                                       abs(target_height),
-                                       stride_y,
-                                       stride_uv, stride_uv);
+        int ret = _captureFrame.CreateEmptyFrame(target_width,
+                                                 abs(target_height),
+                                                 stride_y,
+                                                 stride_uv, stride_uv);
+        if (ret < 0)
+        {
+            LOG(LS_ERROR) << "Failed to create empty frame, this should only "
+                             "happen due to bad parameters.";
+            return -1;
+        }
         const int conversionResult = ConvertToI420(
             commonVideoType, videoFrame, 0, 0,  // No cropping
             width, height, videoFrameLength,
diff --git a/webrtc/modules/video_processing/test/denoiser_test.cc b/webrtc/modules/video_processing/test/denoiser_test.cc
index 3d24119..551a776 100644
--- a/webrtc/modules/video_processing/test/denoiser_test.cc
+++ b/webrtc/modules/video_processing/test/denoiser_test.cc
@@ -14,7 +14,6 @@
 #include "webrtc/modules/video_processing/include/video_processing.h"
 #include "webrtc/modules/video_processing/test/video_processing_unittest.h"
 #include "webrtc/modules/video_processing/video_denoiser.h"
-#include "webrtc/test/frame_utils.h"
 
 namespace webrtc {
 
@@ -149,7 +148,7 @@
     denoiser_sse_neon.DenoiseFrame(video_frame_, &denoised_frame_sse_neon);
 
     // Denoising results should be the same for C and SSE/NEON denoiser.
-    ASSERT_TRUE(test::FramesEqual(denoised_frame_c, denoised_frame_sse_neon));
+    ASSERT_EQ(true, denoised_frame_c.EqualsFrame(denoised_frame_sse_neon));
   }
   ASSERT_NE(0, feof(source_file_)) << "Error reading source file";
 }
diff --git a/webrtc/modules/video_processing/test/video_processing_unittest.cc b/webrtc/modules/video_processing/test/video_processing_unittest.cc
index b9a22d8..2fd8fb6 100644
--- a/webrtc/modules/video_processing/test/video_processing_unittest.cc
+++ b/webrtc/modules/video_processing/test/video_processing_unittest.cc
@@ -69,8 +69,8 @@
   vp_ = VideoProcessing::Create();
   ASSERT_TRUE(vp_ != NULL);
 
-  video_frame_.CreateEmptyFrame(width_, height_, width_,
-                                half_width_, half_width_);
+  ASSERT_EQ(0, video_frame_.CreateEmptyFrame(width_, height_, width_,
+                                             half_width_, half_width_));
   // Clear video frame so DrMemory/Valgrind will allow reads of the buffer.
   memset(video_frame_.buffer(kYPlane), 0, video_frame_.allocated_size(kYPlane));
   memset(video_frame_.buffer(kUPlane), 0, video_frame_.allocated_size(kUPlane));
@@ -142,7 +142,7 @@
                              0, kVideoRotation_0, &video_frame_));
   vp_->GetFrameStats(video_frame_, &stats);
   EXPECT_GT(stats.num_pixels, 0u);
-  video_frame2.CopyFrame(video_frame_);
+  ASSERT_EQ(0, video_frame2.CopyFrame(video_frame_));
   ASSERT_EQ(0, vp_->Deflickering(&video_frame_, &stats));
 
   // Retrieve frame stats again in case Deflickering() has zeroed them.
diff --git a/webrtc/modules/video_processing/video_denoiser.cc b/webrtc/modules/video_processing/video_denoiser.cc
index 3951381..4902a89 100644
--- a/webrtc/modules/video_processing/video_denoiser.cc
+++ b/webrtc/modules/video_processing/video_denoiser.cc
@@ -70,7 +70,7 @@
     height_ = frame.height();
     denoised_frame->CreateFrame(frame.buffer(kYPlane), frame.buffer(kUPlane),
                                 frame.buffer(kVPlane), width_, height_,
-                                stride_y, stride_u, stride_v, kVideoRotation_0);
+                                stride_y, stride_u, stride_v);
     // Setting time parameters to the output frame.
     denoised_frame->set_timestamp(frame.timestamp());
     denoised_frame->set_render_time_ms(frame.render_time_ms());
diff --git a/webrtc/modules/video_render/video_render_impl.cc b/webrtc/modules/video_render/video_render_impl.cc
index 75403f8..2f145a7 100644
--- a/webrtc/modules/video_render/video_render_impl.cc
+++ b/webrtc/modules/video_render/video_render_impl.cc
@@ -568,8 +568,7 @@
         return -1;
     }
     assert (item->second != NULL);
-    item->second->SetStartImage(videoFrame);
-    return 0;
+    return item->second->SetStartImage(videoFrame);
 
 }
 
@@ -595,8 +594,7 @@
         return -1;
     }
     assert(item->second != NULL);
-    item->second->SetTimeoutImage(videoFrame, timeout);
-    return 0;
+    return item->second->SetTimeoutImage(videoFrame, timeout);
 }
 
 }  // namespace webrtc
diff --git a/webrtc/modules/video_render/video_render_internal_impl.cc b/webrtc/modules/video_render/video_render_internal_impl.cc
index 2090fce..a9ae0b0 100644
--- a/webrtc/modules/video_render/video_render_internal_impl.cc
+++ b/webrtc/modules/video_render/video_render_internal_impl.cc
@@ -791,8 +791,7 @@
         return -1;
     }
     assert (item->second != NULL);
-    item->second->SetStartImage(videoFrame);
-    return 0;
+    return item->second->SetStartImage(videoFrame);
 
 }
 
@@ -818,8 +817,7 @@
         return -1;
     }
     assert(item->second != NULL);
-    item->second->SetTimeoutImage(videoFrame, timeout);
-    return 0;
+    return item->second->SetTimeoutImage(videoFrame, timeout);
 }
 
 }  // namespace webrtc
diff --git a/webrtc/test/frame_generator.cc b/webrtc/test/frame_generator.cc
index 3287aba..589dde4 100644
--- a/webrtc/test/frame_generator.cc
+++ b/webrtc/test/frame_generator.cc
@@ -217,8 +217,7 @@
         kTargetWidth, kTargetHeight,
         current_source_frame_->stride(PlaneType::kYPlane),
         current_source_frame_->stride(PlaneType::kUPlane),
-        current_source_frame_->stride(PlaneType::kVPlane),
-        kVideoRotation_0);
+        current_source_frame_->stride(PlaneType::kVPlane));
   }
 
   Clock* const clock_;
diff --git a/webrtc/test/frame_utils.cc b/webrtc/test/frame_utils.cc
deleted file mode 100644
index 13f358a..0000000
--- a/webrtc/test/frame_utils.cc
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- *  Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/test/frame_utils.h"
-#include "webrtc/video_frame.h"
-
-namespace webrtc {
-namespace test {
-
-bool EqualPlane(const uint8_t* data1,
-                const uint8_t* data2,
-                int stride,
-                int width,
-                int height) {
-  for (int y = 0; y < height; ++y) {
-    if (memcmp(data1, data2, width) != 0)
-      return false;
-    data1 += stride;
-    data2 += stride;
-  }
-  return true;
-}
-bool FramesEqual(const webrtc::VideoFrame& f1, const webrtc::VideoFrame& f2) {
-  if (f1.width() != f2.width() || f1.height() != f2.height() ||
-      f1.stride(webrtc::kYPlane) != f2.stride(webrtc::kYPlane) ||
-      f1.stride(webrtc::kUPlane) != f2.stride(webrtc::kUPlane) ||
-      f1.stride(webrtc::kVPlane) != f2.stride(webrtc::kVPlane) ||
-      f1.timestamp() != f2.timestamp() ||
-      f1.ntp_time_ms() != f2.ntp_time_ms() ||
-      f1.render_time_ms() != f2.render_time_ms()) {
-    return false;
-  }
-  const int half_width = (f1.width() + 1) / 2;
-  const int half_height = (f1.height() + 1) / 2;
-  return EqualPlane(f1.buffer(webrtc::kYPlane), f2.buffer(webrtc::kYPlane),
-                    f1.stride(webrtc::kYPlane), f1.width(), f1.height()) &&
-         EqualPlane(f1.buffer(webrtc::kUPlane), f2.buffer(webrtc::kUPlane),
-                    f1.stride(webrtc::kUPlane), half_width, half_height) &&
-         EqualPlane(f1.buffer(webrtc::kVPlane), f2.buffer(webrtc::kVPlane),
-                    f1.stride(webrtc::kVPlane), half_width, half_height);
-}
-
-}  // namespace test
-}  // namespace webrtc
diff --git a/webrtc/test/frame_utils.h b/webrtc/test/frame_utils.h
deleted file mode 100644
index 42e2cba..0000000
--- a/webrtc/test/frame_utils.h
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- *  Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-#ifndef WEBRTC_TEST_FRAME_UTILS_H_
-#define WEBRTC_TEST_FRAME_UTILS_H_
-
-#include "webrtc/base/basictypes.h"
-
-namespace webrtc {
-class VideoFrame;
-namespace test {
-
-bool EqualPlane(const uint8_t* data1,
-                const uint8_t* data2,
-                int stride,
-                int width,
-                int height);
-
-bool FramesEqual(const webrtc::VideoFrame& f1, const webrtc::VideoFrame& f2);
-
-}  // namespace test
-}  // namespace webrtc
-
-#endif  // WEBRTC_TEST_FRAME_UTILS_H_
diff --git a/webrtc/test/test.gyp b/webrtc/test/test.gyp
index 32c5dc8..cb5650d 100644
--- a/webrtc/test/test.gyp
+++ b/webrtc/test/test.gyp
@@ -62,15 +62,13 @@
       ],  # conditions.
     },
     {
-      'target_name': 'video_test_common',
+      'target_name': 'fake_video_frames',
       'type': 'static_library',
       'sources': [
         'fake_texture_frame.cc',
         'fake_texture_frame.h',
         'frame_generator.cc',
         'frame_generator.h',
-        'frame_utils.cc',
-        'frame_utils.h',
       ],
       'dependencies': [
         '<(webrtc_root)/common_video/common_video.gyp:common_video',
diff --git a/webrtc/test/webrtc_test_common.gyp b/webrtc/test/webrtc_test_common.gyp
index 46f5f65..318f5bb 100644
--- a/webrtc/test/webrtc_test_common.gyp
+++ b/webrtc/test/webrtc_test_common.gyp
@@ -69,7 +69,7 @@
         '<(webrtc_root)/common.gyp:webrtc_common',
         '<(webrtc_root)/modules/modules.gyp:media_file',
         '<(webrtc_root)/modules/modules.gyp:video_render',
-        '<(webrtc_root)/test/test.gyp:video_test_common',
+        '<(webrtc_root)/test/test.gyp:fake_video_frames',
         '<(webrtc_root)/test/test.gyp:test_support',
         '<(webrtc_root)/test/test.gyp:rtp_test_utils',
         '<(webrtc_root)/webrtc.gyp:webrtc',
@@ -134,7 +134,7 @@
       'dependencies': [
         '<(DEPTH)/testing/gtest.gyp:gtest',
         '<(webrtc_root)/modules/modules.gyp:media_file',
-        '<(webrtc_root)/test/test.gyp:video_test_common',
+        '<(webrtc_root)/test/test.gyp:fake_video_frames',
         '<(webrtc_root)/test/test.gyp:test_support',
       ],
       'direct_dependent_settings': {
diff --git a/webrtc/video/video_capture_input_unittest.cc b/webrtc/video/video_capture_input_unittest.cc
index 86b701b..ff1194b 100644
--- a/webrtc/video/video_capture_input_unittest.cc
+++ b/webrtc/video/video_capture_input_unittest.cc
@@ -290,7 +290,7 @@
   uint8_t buffer[kSizeY];
   memset(buffer, data, kSizeY);
   frame->CreateFrame(buffer, buffer, buffer, width, height, width, width / 2,
-                     width / 2, kVideoRotation_0);
+                     width / 2);
   frame->set_render_time_ms(data);
   return frame;
 }
diff --git a/webrtc/video/video_send_stream_tests.cc b/webrtc/video/video_send_stream_tests.cc
index c54bb43..0126773 100644
--- a/webrtc/video/video_send_stream_tests.cc
+++ b/webrtc/video/video_send_stream_tests.cc
@@ -1298,7 +1298,7 @@
   memset(buffer.get(), data, kSizeY);
   VideoFrame frame;
   frame.CreateFrame(buffer.get(), buffer.get(), buffer.get(), width, height,
-                    width, width / 2, width / 2, kVideoRotation_0);
+                    width, width / 2, width / 2);
   frame.set_timestamp(data);
   frame.set_render_time_ms(data);
   return frame;
diff --git a/webrtc/video_frame.h b/webrtc/video_frame.h
index 28a6b87..39ba8ee 100644
--- a/webrtc/video_frame.h
+++ b/webrtc/video_frame.h
@@ -27,39 +27,56 @@
              int64_t render_time_ms,
              VideoRotation rotation);
 
+  // TODO(pbos): Make all create/copy functions void, they should not be able to
+  // fail (which should be RTC_DCHECK/CHECKed instead).
+
   // CreateEmptyFrame: Sets frame dimensions and allocates buffers based
   // on set dimensions - height and plane stride.
   // If required size is bigger than the allocated one, new buffers of adequate
   // size will be allocated.
-  void CreateEmptyFrame(int width,
-                        int height,
-                        int stride_y,
-                        int stride_u,
-                        int stride_v);
+  // Return value: 0 on success, -1 on error.
+  int CreateEmptyFrame(int width,
+                       int height,
+                       int stride_y,
+                       int stride_u,
+                       int stride_v);
 
   // CreateFrame: Sets the frame's members and buffers. If required size is
   // bigger than allocated one, new buffers of adequate size will be allocated.
-  void CreateFrame(const uint8_t* buffer_y,
-                   const uint8_t* buffer_u,
-                   const uint8_t* buffer_v,
-                   int width,
-                   int height,
-                   int stride_y,
-                   int stride_u,
-                   int stride_v,
-                   VideoRotation rotation);
+  // Return value: 0 on success, -1 on error.
+  int CreateFrame(const uint8_t* buffer_y,
+                  const uint8_t* buffer_u,
+                  const uint8_t* buffer_v,
+                  int width,
+                  int height,
+                  int stride_y,
+                  int stride_u,
+                  int stride_v);
+
+  // TODO(guoweis): remove the previous CreateFrame when chromium has this code.
+  int CreateFrame(const uint8_t* buffer_y,
+                  const uint8_t* buffer_u,
+                  const uint8_t* buffer_v,
+                  int width,
+                  int height,
+                  int stride_y,
+                  int stride_u,
+                  int stride_v,
+                  VideoRotation rotation);
 
   // CreateFrame: Sets the frame's members and buffers. If required size is
   // bigger than allocated one, new buffers of adequate size will be allocated.
   // |buffer| must be a packed I420 buffer.
-  void CreateFrame(const uint8_t* buffer,
+  // Return value: 0 on success, -1 on error.
+  int CreateFrame(const uint8_t* buffer,
                   int width,
                   int height,
                   VideoRotation rotation);
 
   // Deep copy frame: If required size is bigger than allocated one, new
   // buffers of adequate size will be allocated.
-  void CopyFrame(const VideoFrame& videoFrame);
+  // Return value: 0 on success, -1 on error.
+  int CopyFrame(const VideoFrame& videoFrame);
 
   // Creates a shallow copy of |videoFrame|, i.e, the this object will retain a
   // reference to the video buffer also retained by |videoFrame|.
@@ -141,6 +158,8 @@
   // called on a non-native-handle frame.
   VideoFrame ConvertNativeToI420Frame() const;
 
+  bool EqualsFrame(const VideoFrame& frame) const;
+
  private:
   // An opaque reference counted handle that stores the pixel data.
   rtc::scoped_refptr<webrtc::VideoFrameBuffer> video_frame_buffer_;