Delete RawVideoType enum, use the VideoType enum instead.

BUG=webrtc:7385

Review-Url: https://codereview.webrtc.org/2765243002
Cr-Commit-Position: refs/heads/master@{#17930}
diff --git a/webrtc/common_types.h b/webrtc/common_types.h
index 06733be..424afd9 100644
--- a/webrtc/common_types.h
+++ b/webrtc/common_types.h
@@ -416,23 +416,25 @@
 // Video specific types
 // ==================================================================
 
-// Raw video types
-enum RawVideoType {
-  kVideoI420 = 0,
-  kVideoYV12 = 1,
-  kVideoYUY2 = 2,
-  kVideoUYVY = 3,
-  kVideoIYUV = 4,
-  kVideoARGB = 5,
-  kVideoRGB24 = 6,
-  kVideoRGB565 = 7,
-  kVideoARGB4444 = 8,
-  kVideoARGB1555 = 9,
-  kVideoMJPEG = 10,
-  kVideoNV12 = 11,
-  kVideoNV21 = 12,
-  kVideoBGRA = 13,
-  kVideoUnknown = 99
+// TODO(nisse): Delete, and switch to fourcc values everywhere?
+// Supported video types.
+enum class VideoType {
+  kUnknown,
+  kI420,
+  kIYUV,
+  kRGB24,
+  kABGR,
+  kARGB,
+  kARGB4444,
+  kRGB565,
+  kARGB1555,
+  kYUY2,
+  kYV12,
+  kUYVY,
+  kMJPEG,
+  kNV21,
+  kNV12,
+  kBGRA,
 };
 
 // Video codec
diff --git a/webrtc/common_video/libyuv/include/webrtc_libyuv.h b/webrtc/common_video/libyuv/include/webrtc_libyuv.h
index 6d7ed1f..74fb67c 100644
--- a/webrtc/common_video/libyuv/include/webrtc_libyuv.h
+++ b/webrtc/common_video/libyuv/include/webrtc_libyuv.h
@@ -19,39 +19,19 @@
 #include <vector>
 
 #include "webrtc/api/video/video_frame.h"
-#include "webrtc/common_types.h"  // RawVideoTypes.
+#include "webrtc/common_types.h"  // VideoTypes.
 #include "webrtc/typedefs.h"
 
 namespace webrtc {
 
 class I420Buffer;
 
-// Supported video types.
-enum VideoType {
-  kUnknown,
-  kI420,
-  kIYUV,
-  kRGB24,
-  kABGR,
-  kARGB,
-  kARGB4444,
-  kRGB565,
-  kARGB1555,
-  kYUY2,
-  kYV12,
-  kUYVY,
-  kMJPG,
-  kNV21,
-  kNV12,
-  kBGRA,
-};
-
 // This is the max PSNR value our algorithms can return.
 const double kPerfectPSNR = 48.0f;
 
-// Conversion between the RawVideoType and the LibYuv videoType.
-// TODO(wu): Consolidate types into one type throughout WebRtc.
-VideoType RawVideoTypeToCommonVideoVideoType(RawVideoType type);
+// TODO(nisse): Some downstream apps call CalcBufferSize with
+// ::webrtc::kI420 as the first argument. Delete after they are updated.
+const VideoType kI420 = VideoType::kI420;
 
 // Calculate the required buffer size.
 // Input:
diff --git a/webrtc/common_video/libyuv/libyuv_unittest.cc b/webrtc/common_video/libyuv/libyuv_unittest.cc
index 8f421f3..c60085c 100644
--- a/webrtc/common_video/libyuv/libyuv_unittest.cc
+++ b/webrtc/common_video/libyuv/libyuv_unittest.cc
@@ -53,7 +53,7 @@
       height_(288),
       size_y_(width_ * height_),
       size_uv_(((width_ + 1) / 2) * ((height_ + 1) / 2)),
-      frame_length_(CalcBufferSize(kI420, 352, 288)) {}
+      frame_length_(CalcBufferSize(VideoType::kI420, 352, 288)) {}
 
 void TestLibYuv::SetUp() {
   const std::string input_file_name = webrtc::test::ResourcePath("foreman_cif",
@@ -94,10 +94,11 @@
 
   printf("\nConvert #%d I420 <-> I420 \n", j);
   std::unique_ptr<uint8_t[]> out_i420_buffer(new uint8_t[frame_length_]);
-  EXPECT_EQ(0, ConvertFromI420(*orig_frame_, kI420, 0, out_i420_buffer.get()));
-  EXPECT_EQ(0, ConvertToI420(kI420, out_i420_buffer.get(), 0, 0, width_,
-                             height_, 0, kVideoRotation_0,
-                             res_i420_buffer.get()));
+  EXPECT_EQ(0, ConvertFromI420(*orig_frame_, VideoType::kI420, 0,
+                               out_i420_buffer.get()));
+  EXPECT_EQ(0,
+            ConvertToI420(VideoType::kI420, out_i420_buffer.get(), 0, 0, width_,
+                          height_, 0, kVideoRotation_0, res_i420_buffer.get()));
 
   if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
     return;
@@ -114,11 +115,12 @@
   Calc16ByteAlignedStride(width_, &stride_y, &stride_uv);
   res_i420_buffer =
       I420Buffer::Create(width_, height_, stride_y, stride_uv, stride_uv);
-  EXPECT_EQ(0, ConvertFromI420(*orig_frame_, kRGB24, 0, res_rgb_buffer2.get()));
+  EXPECT_EQ(0, ConvertFromI420(*orig_frame_, VideoType::kRGB24, 0,
+                               res_rgb_buffer2.get()));
 
-  EXPECT_EQ(0, ConvertToI420(kRGB24, res_rgb_buffer2.get(), 0, 0, width_,
-                             height_, 0, kVideoRotation_0,
-                             res_i420_buffer.get()));
+  EXPECT_EQ(
+      0, ConvertToI420(VideoType::kRGB24, res_rgb_buffer2.get(), 0, 0, width_,
+                       height_, 0, kVideoRotation_0, res_i420_buffer.get()));
 
   if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
     return;
@@ -131,10 +133,11 @@
 
   printf("\nConvert #%d I420 <-> UYVY\n", j);
   std::unique_ptr<uint8_t[]> out_uyvy_buffer(new uint8_t[width_ * height_ * 2]);
-  EXPECT_EQ(0, ConvertFromI420(*orig_frame_, kUYVY, 0, out_uyvy_buffer.get()));
-  EXPECT_EQ(0, ConvertToI420(kUYVY, out_uyvy_buffer.get(), 0, 0, width_,
-                             height_, 0, kVideoRotation_0,
-                             res_i420_buffer.get()));
+  EXPECT_EQ(0, ConvertFromI420(*orig_frame_, VideoType::kUYVY, 0,
+                               out_uyvy_buffer.get()));
+  EXPECT_EQ(0,
+            ConvertToI420(VideoType::kUYVY, out_uyvy_buffer.get(), 0, 0, width_,
+                          height_, 0, kVideoRotation_0, res_i420_buffer.get()));
   psnr = I420PSNR(*orig_frame_->video_frame_buffer(), *res_i420_buffer);
   EXPECT_EQ(48.0, psnr);
   if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
@@ -144,11 +147,12 @@
 
   printf("\nConvert #%d I420 <-> YUY2\n", j);
   std::unique_ptr<uint8_t[]> out_yuy2_buffer(new uint8_t[width_ * height_ * 2]);
-  EXPECT_EQ(0, ConvertFromI420(*orig_frame_, kYUY2, 0, out_yuy2_buffer.get()));
+  EXPECT_EQ(0, ConvertFromI420(*orig_frame_, VideoType::kYUY2, 0,
+                               out_yuy2_buffer.get()));
 
-  EXPECT_EQ(0, ConvertToI420(kYUY2, out_yuy2_buffer.get(), 0, 0, width_,
-                             height_, 0,
-                             kVideoRotation_0, res_i420_buffer.get()));
+  EXPECT_EQ(0,
+            ConvertToI420(VideoType::kYUY2, out_yuy2_buffer.get(), 0, 0, width_,
+                          height_, 0, kVideoRotation_0, res_i420_buffer.get()));
 
   if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
     return;
@@ -160,12 +164,12 @@
   printf("\nConvert #%d I420 <-> RGB565\n", j);
   std::unique_ptr<uint8_t[]> out_rgb565_buffer(
       new uint8_t[width_ * height_ * 2]);
-  EXPECT_EQ(0,
-            ConvertFromI420(*orig_frame_, kRGB565, 0, out_rgb565_buffer.get()));
+  EXPECT_EQ(0, ConvertFromI420(*orig_frame_, VideoType::kRGB565, 0,
+                               out_rgb565_buffer.get()));
 
-  EXPECT_EQ(0, ConvertToI420(kRGB565, out_rgb565_buffer.get(), 0, 0, width_,
-                             height_, 0,
-                             kVideoRotation_0, res_i420_buffer.get()));
+  EXPECT_EQ(0, ConvertToI420(VideoType::kRGB565, out_rgb565_buffer.get(), 0, 0,
+                             width_, height_, 0, kVideoRotation_0,
+                             res_i420_buffer.get()));
   if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
     return;
   }
@@ -180,11 +184,11 @@
   printf("\nConvert #%d I420 <-> ARGB8888\n", j);
   std::unique_ptr<uint8_t[]> out_argb8888_buffer(
       new uint8_t[width_ * height_ * 4]);
-  EXPECT_EQ(0,
-            ConvertFromI420(*orig_frame_, kARGB, 0, out_argb8888_buffer.get()));
+  EXPECT_EQ(0, ConvertFromI420(*orig_frame_, VideoType::kARGB, 0,
+                               out_argb8888_buffer.get()));
 
-  EXPECT_EQ(0, ConvertToI420(kARGB, out_argb8888_buffer.get(), 0, 0, width_,
-                             height_, 0, kVideoRotation_0,
+  EXPECT_EQ(0, ConvertToI420(VideoType::kARGB, out_argb8888_buffer.get(), 0, 0,
+                             width_, height_, 0, kVideoRotation_0,
                              res_i420_buffer.get()));
 
   if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
@@ -215,11 +219,11 @@
   rtc::scoped_refptr<I420Buffer> res_i420_buffer =
       I420Buffer::Create(width_, height_, stride_y, stride_uv, stride_uv);
   std::unique_ptr<uint8_t[]> out_i420_buffer(new uint8_t[frame_length_]);
-  EXPECT_EQ(0, ConvertFromI420(*orig_frame_, kI420, 0,
+  EXPECT_EQ(0, ConvertFromI420(*orig_frame_, VideoType::kI420, 0,
                                out_i420_buffer.get()));
-  EXPECT_EQ(0, ConvertToI420(kI420, out_i420_buffer.get(), 0, 0, width_,
-                             height_, 0, kVideoRotation_0,
-                             res_i420_buffer.get()));
+  EXPECT_EQ(0,
+            ConvertToI420(VideoType::kI420, out_i420_buffer.get(), 0, 0, width_,
+                          height_, 0, kVideoRotation_0, res_i420_buffer.get()));
 
   if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
     return;
@@ -242,17 +246,17 @@
   Calc16ByteAlignedStride(rotated_width, &stride_y, &stride_uv);
   rtc::scoped_refptr<I420Buffer> rotated_res_i420_buffer = I420Buffer::Create(
       rotated_width, rotated_height, stride_y, stride_uv, stride_uv);
-  EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer, 0, 0, width_, height_,
-                             0, kVideoRotation_90,
-                             rotated_res_i420_buffer.get()));
-  EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer, 0, 0, width_, height_,
-                             0, kVideoRotation_270,
-                             rotated_res_i420_buffer.get()));
+  EXPECT_EQ(
+      0, ConvertToI420(VideoType::kI420, orig_buffer, 0, 0, width_, height_, 0,
+                       kVideoRotation_90, rotated_res_i420_buffer.get()));
+  EXPECT_EQ(
+      0, ConvertToI420(VideoType::kI420, orig_buffer, 0, 0, width_, height_, 0,
+                       kVideoRotation_270, rotated_res_i420_buffer.get()));
   rotated_res_i420_buffer = I420Buffer::Create(
       width_, height_, width_, (width_ + 1) / 2, (width_ + 1) / 2);
-  EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer, 0, 0, width_, height_,
-                             0, kVideoRotation_180,
-                             rotated_res_i420_buffer.get()));
+  EXPECT_EQ(
+      0, ConvertToI420(VideoType::kI420, orig_buffer, 0, 0, width_, height_, 0,
+                       kVideoRotation_180, rotated_res_i420_buffer.get()));
 }
 
 static uint8_t Average(int a, int b, int c, int d) {
diff --git a/webrtc/common_video/libyuv/webrtc_libyuv.cc b/webrtc/common_video/libyuv/webrtc_libyuv.cc
index 78cb68a..7366409 100644
--- a/webrtc/common_video/libyuv/webrtc_libyuv.cc
+++ b/webrtc/common_video/libyuv/webrtc_libyuv.cc
@@ -21,69 +21,33 @@
 
 namespace webrtc {
 
-VideoType RawVideoTypeToCommonVideoVideoType(RawVideoType type) {
-  switch (type) {
-    case kVideoI420:
-      return kI420;
-    case kVideoIYUV:
-      return kIYUV;
-    case kVideoRGB24:
-      return kRGB24;
-    case kVideoARGB:
-      return kARGB;
-    case kVideoARGB4444:
-      return kARGB4444;
-    case kVideoRGB565:
-      return kRGB565;
-    case kVideoARGB1555:
-      return kARGB1555;
-    case kVideoYUY2:
-      return kYUY2;
-    case kVideoYV12:
-      return kYV12;
-    case kVideoUYVY:
-      return kUYVY;
-    case kVideoNV21:
-      return kNV21;
-    case kVideoNV12:
-      return kNV12;
-    case kVideoBGRA:
-      return kBGRA;
-    case kVideoMJPEG:
-      return kMJPG;
-    default:
-      RTC_NOTREACHED();
-  }
-  return kUnknown;
-}
-
 size_t CalcBufferSize(VideoType type, int width, int height) {
   RTC_DCHECK_GE(width, 0);
   RTC_DCHECK_GE(height, 0);
   size_t buffer_size = 0;
   switch (type) {
-    case kI420:
-    case kNV12:
-    case kNV21:
-    case kIYUV:
-    case kYV12: {
+    case VideoType::kI420:
+    case VideoType::kNV12:
+    case VideoType::kNV21:
+    case VideoType::kIYUV:
+    case VideoType::kYV12: {
       int half_width = (width + 1) >> 1;
       int half_height = (height + 1) >> 1;
       buffer_size = width * height + half_width * half_height * 2;
       break;
     }
-    case kARGB4444:
-    case kRGB565:
-    case kARGB1555:
-    case kYUY2:
-    case kUYVY:
+    case VideoType::kARGB4444:
+    case VideoType::kRGB565:
+    case VideoType::kARGB1555:
+    case VideoType::kYUY2:
+    case VideoType::kUYVY:
       buffer_size = width * height * 2;
       break;
-    case kRGB24:
+    case VideoType::kRGB24:
       buffer_size = width * height * 3;
       break;
-    case kBGRA:
-    case kARGB:
+    case VideoType::kBGRA:
+    case VideoType::kARGB:
       buffer_size = width * height * 4;
       break;
     default:
@@ -141,7 +105,7 @@
     return -1;
   int width = input_frame->width();
   int height = input_frame->height();
-  size_t length = CalcBufferSize(kI420, width, height);
+  size_t length = CalcBufferSize(VideoType::kI420, width, height);
   if (size < length) {
      return -1;
   }
@@ -207,36 +171,36 @@
 
 int ConvertVideoType(VideoType video_type) {
   switch (video_type) {
-    case kUnknown:
+    case VideoType::kUnknown:
       return libyuv::FOURCC_ANY;
-    case  kI420:
+    case VideoType::kI420:
       return libyuv::FOURCC_I420;
-    case kIYUV:  // same as KYV12
-    case kYV12:
+    case VideoType::kIYUV:  // same as VideoType::kYV12
+    case VideoType::kYV12:
       return libyuv::FOURCC_YV12;
-    case kRGB24:
+    case VideoType::kRGB24:
       return libyuv::FOURCC_24BG;
-    case kABGR:
+    case VideoType::kABGR:
       return libyuv::FOURCC_ABGR;
-    case kRGB565:
+    case VideoType::kRGB565:
       return libyuv::FOURCC_RGBP;
-    case kYUY2:
+    case VideoType::kYUY2:
       return libyuv::FOURCC_YUY2;
-    case kUYVY:
+    case VideoType::kUYVY:
       return libyuv::FOURCC_UYVY;
-    case kMJPG:
+    case VideoType::kMJPEG:
       return libyuv::FOURCC_MJPG;
-    case kNV21:
+    case VideoType::kNV21:
       return libyuv::FOURCC_NV21;
-    case kNV12:
+    case VideoType::kNV12:
       return libyuv::FOURCC_NV12;
-    case kARGB:
+    case VideoType::kARGB:
       return libyuv::FOURCC_ARGB;
-    case kBGRA:
+    case VideoType::kBGRA:
       return libyuv::FOURCC_BGRA;
-    case kARGB4444:
+    case VideoType::kARGB4444:
       return libyuv::FOURCC_R444;
-    case kARGB1555:
+    case VideoType::kARGB1555:
       return libyuv::FOURCC_RGBO;
   }
   RTC_NOTREACHED();
diff --git a/webrtc/media/engine/webrtcvideocapturer.cc b/webrtc/media/engine/webrtcvideocapturer.cc
index 11458d1..9e5c921 100644
--- a/webrtc/media/engine/webrtcvideocapturer.cc
+++ b/webrtc/media/engine/webrtcvideocapturer.cc
@@ -25,44 +25,31 @@
 
 namespace cricket {
 
+namespace {
 struct kVideoFourCCEntry {
   uint32_t fourcc;
-  webrtc::RawVideoType webrtc_type;
+  webrtc::VideoType webrtc_type;
 };
 
 // This indicates our format preferences and defines a mapping between
 // webrtc::RawVideoType (from video_capture_defines.h) to our FOURCCs.
-static kVideoFourCCEntry kSupportedFourCCs[] = {
-  { FOURCC_I420, webrtc::kVideoI420 },   // 12 bpp, no conversion.
-  { FOURCC_YV12, webrtc::kVideoYV12 },   // 12 bpp, no conversion.
-  { FOURCC_YUY2, webrtc::kVideoYUY2 },   // 16 bpp, fast conversion.
-  { FOURCC_UYVY, webrtc::kVideoUYVY },   // 16 bpp, fast conversion.
-  { FOURCC_NV12, webrtc::kVideoNV12 },   // 12 bpp, fast conversion.
-  { FOURCC_NV21, webrtc::kVideoNV21 },   // 12 bpp, fast conversion.
-  { FOURCC_MJPG, webrtc::kVideoMJPEG },  // compressed, slow conversion.
-  { FOURCC_ARGB, webrtc::kVideoARGB },   // 32 bpp, slow conversion.
-  { FOURCC_24BG, webrtc::kVideoRGB24 },  // 24 bpp, slow conversion.
+kVideoFourCCEntry kSupportedFourCCs[] = {
+    {FOURCC_I420, webrtc::VideoType::kI420},   // 12 bpp, no conversion.
+    {FOURCC_YV12, webrtc::VideoType::kYV12},   // 12 bpp, no conversion.
+    {FOURCC_YUY2, webrtc::VideoType::kYUY2},   // 16 bpp, fast conversion.
+    {FOURCC_UYVY, webrtc::VideoType::kUYVY},   // 16 bpp, fast conversion.
+    {FOURCC_NV12, webrtc::VideoType::kNV12},   // 12 bpp, fast conversion.
+    {FOURCC_NV21, webrtc::VideoType::kNV21},   // 12 bpp, fast conversion.
+    {FOURCC_MJPG, webrtc::VideoType::kMJPEG},  // compressed, slow conversion.
+    {FOURCC_ARGB, webrtc::VideoType::kARGB},   // 32 bpp, slow conversion.
+    {FOURCC_24BG, webrtc::VideoType::kRGB24},  // 24 bpp, slow conversion.
 };
 
-class WebRtcVcmFactory : public WebRtcVcmFactoryInterface {
- public:
-  virtual rtc::scoped_refptr<webrtc::VideoCaptureModule> Create(
-      const char* device) {
-    return webrtc::VideoCaptureFactory::Create(device);
-  }
-  virtual webrtc::VideoCaptureModule::DeviceInfo* CreateDeviceInfo() {
-    return webrtc::VideoCaptureFactory::CreateDeviceInfo();
-  }
-  virtual void DestroyDeviceInfo(webrtc::VideoCaptureModule::DeviceInfo* info) {
-    delete info;
-  }
-};
-
-static bool CapabilityToFormat(const webrtc::VideoCaptureCapability& cap,
-                               VideoFormat* format) {
+bool CapabilityToFormat(const webrtc::VideoCaptureCapability& cap,
+                        VideoFormat* format) {
   uint32_t fourcc = 0;
   for (size_t i = 0; i < arraysize(kSupportedFourCCs); ++i) {
-    if (kSupportedFourCCs[i].webrtc_type == cap.rawType) {
+    if (kSupportedFourCCs[i].webrtc_type == cap.videoType) {
       fourcc = kSupportedFourCCs[i].fourcc;
       break;
     }
@@ -78,27 +65,43 @@
   return true;
 }
 
-static bool FormatToCapability(const VideoFormat& format,
-                               webrtc::VideoCaptureCapability* cap) {
-  webrtc::RawVideoType webrtc_type = webrtc::kVideoUnknown;
+bool FormatToCapability(const VideoFormat& format,
+                        webrtc::VideoCaptureCapability* cap) {
+  webrtc::VideoType webrtc_type = webrtc::VideoType::kUnknown;
   for (size_t i = 0; i < arraysize(kSupportedFourCCs); ++i) {
     if (kSupportedFourCCs[i].fourcc == format.fourcc) {
       webrtc_type = kSupportedFourCCs[i].webrtc_type;
       break;
     }
   }
-  if (webrtc_type == webrtc::kVideoUnknown) {
+  if (webrtc_type == webrtc::VideoType::kUnknown) {
     return false;
   }
 
   cap->width = format.width;
   cap->height = format.height;
   cap->maxFPS = VideoFormat::IntervalToFps(format.interval);
-  cap->rawType = webrtc_type;
+  cap->videoType = webrtc_type;
   cap->interlaced = false;
   return true;
 }
 
+}  // namespace
+
+class WebRtcVcmFactory : public WebRtcVcmFactoryInterface {
+ public:
+  virtual rtc::scoped_refptr<webrtc::VideoCaptureModule> Create(
+      const char* device) {
+    return webrtc::VideoCaptureFactory::Create(device);
+  }
+  virtual webrtc::VideoCaptureModule::DeviceInfo* CreateDeviceInfo() {
+    return webrtc::VideoCaptureFactory::CreateDeviceInfo();
+  }
+  virtual void DestroyDeviceInfo(webrtc::VideoCaptureModule::DeviceInfo* info) {
+    delete info;
+  }
+};
+
 ///////////////////////////////////////////////////////////////////////////
 // Implementation of class WebRtcVideoCapturer
 ///////////////////////////////////////////////////////////////////////////
@@ -165,7 +168,7 @@
         supported.push_back(format);
       } else {
         LOG(LS_WARNING) << "Ignoring unsupported WebRTC capture format "
-                        << cap.rawType;
+                        << static_cast<int>(cap.videoType);
       }
     }
   }
diff --git a/webrtc/media/engine/webrtcvideocapturer_unittest.cc b/webrtc/media/engine/webrtcvideocapturer_unittest.cc
index 32dab9b..f660e43 100644
--- a/webrtc/media/engine/webrtcvideocapturer_unittest.cc
+++ b/webrtc/media/engine/webrtcvideocapturer_unittest.cc
@@ -42,7 +42,7 @@
     vga.width = 640;
     vga.height = 480;
     vga.maxFPS = 30;
-    vga.rawType = webrtc::kVideoI420;
+    vga.videoType = webrtc::VideoType::kI420;
     factory_->device_info.AddCapability(kTestDeviceId, vga);
   }
 
diff --git a/webrtc/modules/video_capture/device_info_impl.cc b/webrtc/modules/video_capture/device_info_impl.cc
index eade3ab..35434c0 100644
--- a/webrtc/modules/video_capture/device_info_impl.cc
+++ b/webrtc/modules/video_capture/device_info_impl.cc
@@ -153,7 +153,7 @@
     int32_t bestWidth = 0;
     int32_t bestHeight = 0;
     int32_t bestFrameRate = 0;
-    RawVideoType bestRawType = kVideoUnknown;
+    VideoType bestVideoType = VideoType::kUnknown;
 
     const int32_t numberOfCapabilies =
         static_cast<int32_t>(_captureCapabilities.size());
@@ -193,15 +193,15 @@
                             if ((currentbestDiffFrameRate == diffFrameRate) // Same frame rate as previous  or frame rate allready good enough
                                 || (currentbestDiffFrameRate >= 0))
                             {
-                                if (bestRawType != requested.rawType
-                                    && requested.rawType != kVideoUnknown
-                                    && (capability.rawType == requested.rawType
-                                        || capability.rawType == kVideoI420
-                                        || capability.rawType == kVideoYUY2
-                                        || capability.rawType == kVideoYV12))
-                                {
-                                    bestRawType = capability.rawType;
-                                    bestformatIndex = tmp;
+                              if (bestVideoType != requested.videoType &&
+                                  requested.videoType != VideoType::kUnknown &&
+                                  (capability.videoType ==
+                                       requested.videoType ||
+                                   capability.videoType == VideoType::kI420 ||
+                                   capability.videoType == VideoType::kYUY2 ||
+                                   capability.videoType == VideoType::kYV12)) {
+                                bestVideoType = capability.videoType;
+                                bestformatIndex = tmp;
                                 }
                                 // If width height and frame rate is full filled we can use the camera for encoding if it is supported.
                                 if (capability.height == requested.height
@@ -216,7 +216,7 @@
                                 bestWidth = capability.width;
                                 bestHeight = capability.height;
                                 bestFrameRate = capability.maxFPS;
-                                bestRawType = capability.rawType;
+                                bestVideoType = capability.videoType;
                                 bestformatIndex = tmp;
                             }
                         }
@@ -226,7 +226,7 @@
                         bestWidth = capability.width;
                         bestHeight = capability.height;
                         bestFrameRate = capability.maxFPS;
-                        bestRawType = capability.rawType;
+                        bestVideoType = capability.videoType;
                         bestformatIndex = tmp;
                     }
                 }// else width no good
@@ -236,7 +236,7 @@
                 bestWidth = capability.width;
                 bestHeight = capability.height;
                 bestFrameRate = capability.maxFPS;
-                bestRawType = capability.rawType;
+                bestVideoType = capability.videoType;
                 bestformatIndex = tmp;
             }
         }// else height not good
@@ -244,7 +244,7 @@
 
     LOG(LS_VERBOSE) << "Best camera format: " << bestWidth << "x" << bestHeight
                     << "@" << bestFrameRate
-                    << "fps, color format: " << bestRawType;
+                    << "fps, color format: " << static_cast<int>(bestVideoType);
 
     // Copy the capability
     if (bestformatIndex < 0)
diff --git a/webrtc/modules/video_capture/linux/device_info_linux.cc b/webrtc/modules/video_capture/linux/device_info_linux.cc
index 437d04a..bcbabe4 100644
--- a/webrtc/modules/video_capture/linux/device_info_linux.cc
+++ b/webrtc/modules/video_capture/linux/device_info_linux.cc
@@ -287,26 +287,26 @@
                     cap.height = video_fmt.fmt.pix.height;
                     if (videoFormats[fmts] == V4L2_PIX_FMT_YUYV)
                     {
-                        cap.rawType = kVideoYUY2;
+                      cap.videoType = VideoType::kYUY2;
                     }
                     else if (videoFormats[fmts] == V4L2_PIX_FMT_YUV420)
                     {
-                        cap.rawType = kVideoI420;
+                      cap.videoType = VideoType::kI420;
                     }
                     else if (videoFormats[fmts] == V4L2_PIX_FMT_MJPEG)
                     {
-                        cap.rawType = kVideoMJPEG;
+                      cap.videoType = VideoType::kMJPEG;
                     }
                     else if (videoFormats[fmts] == V4L2_PIX_FMT_UYVY)
                     {
-                        cap.rawType = kVideoUYVY;
+                      cap.videoType = VideoType::kUYVY;
                     }
 
                     // get fps of current camera mode
                     // V4l2 does not have a stable method of knowing so we just guess.
-                    if(cap.width >= 800 && cap.rawType != kVideoMJPEG)
-                    {
-                        cap.maxFPS = 15;
+                    if (cap.width >= 800 &&
+                        cap.videoType != VideoType::kMJPEG) {
+                      cap.maxFPS = 15;
                     }
                     else
                     {
@@ -318,7 +318,7 @@
                     WEBRTC_TRACE(
                         webrtc::kTraceInfo, webrtc::kTraceVideoCapture, 0,
                         "Camera capability, width:%d height:%d type:%d fps:%d",
-                        cap.width, cap.height, cap.rawType, cap.maxFPS);
+                        cap.width, cap.height, cap.videoType, cap.maxFPS);
                 }
             }
         }
diff --git a/webrtc/modules/video_capture/linux/video_capture_linux.cc b/webrtc/modules/video_capture/linux/video_capture_linux.cc
index c7c9f3e..894f038 100644
--- a/webrtc/modules/video_capture/linux/video_capture_linux.cc
+++ b/webrtc/modules/video_capture/linux/video_capture_linux.cc
@@ -48,10 +48,8 @@
       _currentHeight(-1),
       _currentFrameRate(-1),
       _captureStarted(false),
-      _captureVideoType(kVideoI420),
-      _pool(NULL)
-{
-}
+      _captureVideoType(VideoType::kI420),
+      _pool(NULL) {}
 
 int32_t VideoCaptureModuleV4L2::Init(const char* deviceUniqueIdUTF8)
 {
@@ -114,11 +112,10 @@
 {
     if (_captureStarted)
     {
-        if (capability.width == _currentWidth &&
-            capability.height == _currentHeight &&
-            _captureVideoType == capability.rawType)
-        {
-            return 0;
+      if (capability.width == _currentWidth &&
+          capability.height == _currentHeight &&
+          _captureVideoType == capability.videoType) {
+        return 0;
         }
         else
         {
@@ -201,14 +198,14 @@
     video_fmt.fmt.pix.pixelformat = fmts[fmtsIdx];
 
     if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV)
-        _captureVideoType = kVideoYUY2;
+      _captureVideoType = VideoType::kYUY2;
     else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUV420)
-        _captureVideoType = kVideoI420;
+      _captureVideoType = VideoType::kI420;
     else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_UYVY)
-        _captureVideoType = kVideoUYVY;
+      _captureVideoType = VideoType::kUYVY;
     else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG ||
              video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_JPEG)
-        _captureVideoType = kVideoMJPEG;
+      _captureVideoType = VideoType::kMJPEG;
 
     //set format and frame size now
     if (ioctl(_deviceFd, VIDIOC_S_FMT, &video_fmt) < 0)
@@ -252,7 +249,7 @@
     // If driver doesn't support framerate control, need to hardcode.
     // Hardcoding the value based on the frame size.
     if (!driver_framerate_support) {
-      if(_currentWidth >= 800 && _captureVideoType != kVideoMJPEG) {
+      if (_currentWidth >= 800 && _captureVideoType != VideoType::kMJPEG) {
         _currentFrameRate = 15;
       } else {
         _currentFrameRate = 30;
@@ -447,7 +444,7 @@
         VideoCaptureCapability frameInfo;
         frameInfo.width = _currentWidth;
         frameInfo.height = _currentHeight;
-        frameInfo.rawType = _captureVideoType;
+        frameInfo.videoType = _captureVideoType;
 
         // convert to to I420 if needed
         IncomingFrame((unsigned char*) _pool[buf.index].start,
@@ -468,7 +465,7 @@
     settings.width = _currentWidth;
     settings.height = _currentHeight;
     settings.maxFPS = _currentFrameRate;
-    settings.rawType=_captureVideoType;
+    settings.videoType = _captureVideoType;
 
     return 0;
 }
diff --git a/webrtc/modules/video_capture/linux/video_capture_linux.h b/webrtc/modules/video_capture/linux/video_capture_linux.h
index fdfcdb5..6a37048 100644
--- a/webrtc/modules/video_capture/linux/video_capture_linux.h
+++ b/webrtc/modules/video_capture/linux/video_capture_linux.h
@@ -53,7 +53,7 @@
     int32_t _currentHeight;
     int32_t _currentFrameRate;
     bool _captureStarted;
-    RawVideoType _captureVideoType;
+    VideoType _captureVideoType;
     struct Buffer
     {
         void *start;
diff --git a/webrtc/modules/video_capture/objc/device_info_objc.mm b/webrtc/modules/video_capture/objc/device_info_objc.mm
index 30a07b9..1db03a4 100644
--- a/webrtc/modules/video_capture/objc/device_info_objc.mm
+++ b/webrtc/modules/video_capture/objc/device_info_objc.mm
@@ -60,19 +60,19 @@
     capability.width = 352;
     capability.height = 288;
     capability.maxFPS = 30;
-    capability.rawType = webrtc::kVideoNV12;
+    capability.videoType = webrtc::VideoType::kNV12;
     capability.interlaced = false;
   } else if ([preset isEqualToString:AVCaptureSessionPreset640x480]) {
     capability.width = 640;
     capability.height = 480;
     capability.maxFPS = 30;
-    capability.rawType = webrtc::kVideoNV12;
+    capability.videoType = webrtc::VideoType::kNV12;
     capability.interlaced = false;
   } else if ([preset isEqualToString:AVCaptureSessionPreset1280x720]) {
     capability.width = 1280;
     capability.height = 720;
     capability.maxFPS = 30;
-    capability.rawType = webrtc::kVideoNV12;
+    capability.videoType = webrtc::VideoType::kNV12;
     capability.interlaced = false;
   }
 
diff --git a/webrtc/modules/video_capture/objc/rtc_video_capture_objc.mm b/webrtc/modules/video_capture/objc/rtc_video_capture_objc.mm
index 1820562..eab9f99 100644
--- a/webrtc/modules/video_capture/objc/rtc_video_capture_objc.mm
+++ b/webrtc/modules/video_capture/objc/rtc_video_capture_objc.mm
@@ -359,7 +359,7 @@
   tempCaptureCapability.width = CVPixelBufferGetWidth(videoFrame);
   tempCaptureCapability.height = CVPixelBufferGetHeight(videoFrame);
   tempCaptureCapability.maxFPS = _capability.maxFPS;
-  tempCaptureCapability.rawType = kVideoNV12;
+  tempCaptureCapability.videoType = VideoType::kNV12;
 
   _owner->IncomingFrame(baseAddress, frameSize, tempCaptureCapability, 0);
 
diff --git a/webrtc/modules/video_capture/objc/video_capture.mm b/webrtc/modules/video_capture/objc/video_capture.mm
index 4c4908d..2322b0b 100644
--- a/webrtc/modules/video_capture/objc/video_capture.mm
+++ b/webrtc/modules/video_capture/objc/video_capture.mm
@@ -100,6 +100,6 @@
 
 int32_t VideoCaptureIos::CaptureSettings(VideoCaptureCapability& settings) {
   settings = capability_;
-  settings.rawType = kVideoNV12;
+  settings.videoType = VideoType::kNV12;
   return 0;
 }
diff --git a/webrtc/modules/video_capture/test/video_capture_unittest.cc b/webrtc/modules/video_capture/test/video_capture_unittest.cc
index a4a7a72..abf5085 100644
--- a/webrtc/modules/video_capture/test/video_capture_unittest.cc
+++ b/webrtc/modules/video_capture/test/video_capture_unittest.cc
@@ -212,7 +212,7 @@
     capability.width = kTestWidth;
     capability.height = kTestHeight;
     capability.maxFPS = kTestFramerate;
-    capability.rawType = webrtc::kVideoUnknown;
+    capability.videoType = webrtc::VideoType::kUnknown;
 #endif
     capture_observer.SetExpectedCapability(capability);
     ASSERT_NO_FATAL_FAILURE(StartCapture(module.get(), capability));
@@ -319,7 +319,7 @@
   capability1.width = kTestWidth;
   capability1.height = kTestHeight;
   capability1.maxFPS = kTestFramerate;
-  capability1.rawType = webrtc::kVideoUnknown;
+  capability1.videoType = webrtc::VideoType::kUnknown;
 #endif
   capture_observer1.SetExpectedCapability(capability1);
 
@@ -336,7 +336,7 @@
   capability2.width = kTestWidth;
   capability2.height = kTestHeight;
   capability2.maxFPS = kTestFramerate;
-  capability2.rawType = webrtc::kVideoUnknown;
+  capability2.videoType = webrtc::VideoType::kUnknown;
 #endif
   capture_observer2.SetExpectedCapability(capability2);
 
@@ -358,7 +358,7 @@
     VideoCaptureCapability capability;
     capability.width = kTestWidth;
     capability.height = kTestHeight;
-    capability.rawType = webrtc::kVideoYV12;
+    capability.videoType = webrtc::VideoType::kYV12;
     capability.maxFPS = kTestFramerate;
     capture_callback_.SetExpectedCapability(capability);
 
@@ -390,9 +390,8 @@
 
 // Test input of external video frames.
 TEST_F(VideoCaptureExternalTest, TestExternalCapture) {
-  size_t length = webrtc::CalcBufferSize(webrtc::kI420,
-                                         test_frame_->width(),
-                                         test_frame_->height());
+  size_t length = webrtc::CalcBufferSize(
+      webrtc::VideoType::kI420, test_frame_->width(), test_frame_->height());
   std::unique_ptr<uint8_t[]> test_buffer(new uint8_t[length]);
   webrtc::ExtractBuffer(*test_frame_, length, test_buffer.get());
   EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(),
@@ -402,9 +401,8 @@
 
 TEST_F(VideoCaptureExternalTest, Rotation) {
   EXPECT_EQ(0, capture_module_->SetCaptureRotation(webrtc::kVideoRotation_0));
-  size_t length = webrtc::CalcBufferSize(webrtc::kI420,
-                                         test_frame_->width(),
-                                         test_frame_->height());
+  size_t length = webrtc::CalcBufferSize(
+      webrtc::VideoType::kI420, test_frame_->width(), test_frame_->height());
   std::unique_ptr<uint8_t[]> test_buffer(new uint8_t[length]);
   webrtc::ExtractBuffer(*test_frame_, length, test_buffer.get());
   EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(),
diff --git a/webrtc/modules/video_capture/video_capture_defines.h b/webrtc/modules/video_capture/video_capture_defines.h
index 90693ad..ed81362 100644
--- a/webrtc/modules/video_capture/video_capture_defines.h
+++ b/webrtc/modules/video_capture/video_capture_defines.h
@@ -31,7 +31,7 @@
     int32_t width;
     int32_t height;
     int32_t maxFPS;
-    RawVideoType rawType;
+    VideoType videoType;
     bool interlaced;
 
     VideoCaptureCapability()
@@ -39,7 +39,7 @@
         width = 0;
         height = 0;
         maxFPS = 0;
-        rawType = kVideoUnknown;
+        videoType = VideoType::kUnknown;
         interlaced = false;
     }
     ;
@@ -51,8 +51,8 @@
             return true;
         if (maxFPS != other.maxFPS)
             return true;
-        if (rawType != other.rawType)
-            return true;
+        if (videoType != other.videoType)
+          return true;
         if (interlaced != other.interlaced)
             return true;
         return false;
diff --git a/webrtc/modules/video_capture/video_capture_impl.cc b/webrtc/modules/video_capture/video_capture_impl.cc
index 0afa9aa..5ee1066 100644
--- a/webrtc/modules/video_capture/video_capture_impl.cc
+++ b/webrtc/modules/video_capture/video_capture_impl.cc
@@ -89,7 +89,7 @@
     _requestedCapability.width = kDefaultWidth;
     _requestedCapability.height = kDefaultHeight;
     _requestedCapability.maxFPS = 30;
-    _requestedCapability.rawType = kVideoI420;
+    _requestedCapability.videoType = VideoType::kI420;
     memset(_incomingFrameTimesNanos, 0, sizeof(_incomingFrameTimesNanos));
 }
 
@@ -134,11 +134,8 @@
     TRACE_EVENT1("webrtc", "VC::IncomingFrame", "capture_time", captureTime);
 
     // Not encoded, convert to I420.
-    const VideoType commonVideoType =
-        RawVideoTypeToCommonVideoVideoType(frameInfo.rawType);
-
-    if (frameInfo.rawType != kVideoMJPEG &&
-        CalcBufferSize(commonVideoType, width, abs(height)) !=
+    if (frameInfo.videoType != VideoType::kMJPEG &&
+        CalcBufferSize(frameInfo.videoType, width, abs(height)) !=
             videoFrameLength) {
       LOG(LS_ERROR) << "Wrong incoming frame length.";
       return -1;
@@ -169,12 +166,12 @@
     rtc::scoped_refptr<I420Buffer> buffer = I420Buffer::Create(
         target_width, abs(target_height), stride_y, stride_uv, stride_uv);
     const int conversionResult = ConvertToI420(
-        commonVideoType, videoFrame, 0, 0,  // No cropping
+        frameInfo.videoType, videoFrame, 0, 0,  // No cropping
         width, height, videoFrameLength,
         apply_rotation ? _rotateFrame : kVideoRotation_0, buffer.get());
     if (conversionResult < 0) {
       LOG(LS_ERROR) << "Failed to convert capture frame from type "
-                    << frameInfo.rawType << "to I420.";
+                    << static_cast<int>(frameInfo.videoType) << "to I420.";
       return -1;
     }
 
diff --git a/webrtc/modules/video_capture/windows/device_info_ds.cc b/webrtc/modules/video_capture/windows/device_info_ds.cc
index fb2094c..745e4c9 100644
--- a/webrtc/modules/video_capture/windows/device_info_ds.cc
+++ b/webrtc/modules/video_capture/windows/device_info_ds.cc
@@ -589,44 +589,45 @@
             // can't switch MEDIATYPE :~(
             if (pmt->subtype == MEDIASUBTYPE_I420)
             {
-                capability.rawType = kVideoI420;
+              capability.videoType = VideoType::kI420;
             }
             else if (pmt->subtype == MEDIASUBTYPE_IYUV)
             {
-                capability.rawType = kVideoIYUV;
+              capability.videoType = VideoType::kIYUV;
             }
             else if (pmt->subtype == MEDIASUBTYPE_RGB24)
             {
-                capability.rawType = kVideoRGB24;
+              capability.videoType = VideoType::kRGB24;
             }
             else if (pmt->subtype == MEDIASUBTYPE_YUY2)
             {
-                capability.rawType = kVideoYUY2;
+              capability.videoType = VideoType::kYUY2;
             }
             else if (pmt->subtype == MEDIASUBTYPE_RGB565)
             {
-                capability.rawType = kVideoRGB565;
+              capability.videoType = VideoType::kRGB565;
             }
             else if (pmt->subtype == MEDIASUBTYPE_MJPG)
             {
-                capability.rawType = kVideoMJPEG;
+              capability.videoType = VideoType::kMJPEG;
             }
             else if (pmt->subtype == MEDIASUBTYPE_dvsl
                     || pmt->subtype == MEDIASUBTYPE_dvsd
                     || pmt->subtype == MEDIASUBTYPE_dvhd) // If this is an external DV camera
             {
-                capability.rawType = kVideoYUY2;// MS DV filter seems to create this type
+              capability.videoType =
+                  VideoType::kYUY2;  // MS DV filter seems to create this type
             }
             else if (pmt->subtype == MEDIASUBTYPE_UYVY) // Seen used by Declink capture cards
             {
-                capability.rawType = kVideoUYVY;
+              capability.videoType = VideoType::kUYVY;
             }
             else if (pmt->subtype == MEDIASUBTYPE_HDYC) // Seen used by Declink capture cards. Uses BT. 709 color. Not entiry correct to use UYVY. http://en.wikipedia.org/wiki/YCbCr
             {
                 WEBRTC_TRACE(webrtc::kTraceWarning,
                              webrtc::kTraceVideoCapture, 0,
                              "Device support HDYC.");
-                capability.rawType = kVideoUYVY;
+                capability.videoType = VideoType::kUYVY;
             }
             else
             {
@@ -641,10 +642,11 @@
 
             _captureCapabilities.push_back(capability);
             _captureCapabilitiesWindows.push_back(capability);
-            WEBRTC_TRACE( webrtc::kTraceInfo, webrtc::kTraceVideoCapture, 0,
+            WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, 0,
                          "Camera capability, width:%d height:%d type:%d fps:%d",
                          capability.width, capability.height,
-                         capability.rawType, capability.maxFPS);
+                         static_cast<int>(capability.videoType),
+                         capability.maxFPS);
         }
         DeleteMediaType(pmt);
         pmt = NULL;
diff --git a/webrtc/modules/video_capture/windows/sink_filter_ds.cc b/webrtc/modules/video_capture/windows/sink_filter_ds.cc
index 7e99556..1fca73f 100644
--- a/webrtc/modules/video_capture/windows/sink_filter_ds.cc
+++ b/webrtc/modules/video_capture/windows/sink_filter_ds.cc
@@ -205,39 +205,39 @@
         if(*SubType == MEDIASUBTYPE_MJPG
             && pvi->bmiHeader.biCompression == MAKEFOURCC('M','J','P','G'))
         {
-            _resultingCapability.rawType = kVideoMJPEG;
-            return S_OK; // This format is acceptable.
+          _resultingCapability.videoType = VideoType::kMJPEG;
+          return S_OK;  // This format is acceptable.
         }
         if(*SubType == MEDIASUBTYPE_I420
             && pvi->bmiHeader.biCompression == MAKEFOURCC('I','4','2','0'))
         {
-            _resultingCapability.rawType = kVideoI420;
-            return S_OK; // This format is acceptable.
+          _resultingCapability.videoType = VideoType::kI420;
+          return S_OK;  // This format is acceptable.
         }
         if(*SubType == MEDIASUBTYPE_YUY2
             && pvi->bmiHeader.biCompression == MAKEFOURCC('Y','U','Y','2'))
         {
-            _resultingCapability.rawType = kVideoYUY2;
-            ::Sleep(60); // workaround for bad driver
-            return S_OK; // This format is acceptable.
+          _resultingCapability.videoType = VideoType::kYUY2;
+          ::Sleep(60);  // workaround for bad driver
+          return S_OK;  // This format is acceptable.
         }
         if(*SubType == MEDIASUBTYPE_UYVY
             && pvi->bmiHeader.biCompression == MAKEFOURCC('U','Y','V','Y'))
         {
-            _resultingCapability.rawType = kVideoUYVY;
-            return S_OK; // This format is acceptable.
+          _resultingCapability.videoType = VideoType::kUYVY;
+          return S_OK;  // This format is acceptable.
         }
 
         if(*SubType == MEDIASUBTYPE_HDYC)
         {
-            _resultingCapability.rawType = kVideoUYVY;
-            return S_OK; // This format is acceptable.
+          _resultingCapability.videoType = VideoType::kUYVY;
+          return S_OK;  // This format is acceptable.
         }
         if(*SubType == MEDIASUBTYPE_RGB24
             && pvi->bmiHeader.biCompression == BI_RGB)
         {
-            _resultingCapability.rawType = kVideoRGB24;
-            return S_OK; // This format is acceptable.
+          _resultingCapability.videoType = VideoType::kRGB24;
+          return S_OK;  // This format is acceptable.
         }
     }
     if(*formatType == FORMAT_VideoInfo2)
@@ -272,38 +272,38 @@
         if(*SubType == MEDIASUBTYPE_MJPG
             && pvi->bmiHeader.biCompression == MAKEFOURCC('M','J','P','G'))
         {
-            _resultingCapability.rawType = kVideoMJPEG;
-            return S_OK; // This format is acceptable.
+          _resultingCapability.videoType = VideoType::kMJPEG;
+          return S_OK;  // This format is acceptable.
         }
         if(*SubType == MEDIASUBTYPE_I420
             && pvi->bmiHeader.biCompression == MAKEFOURCC('I','4','2','0'))
         {
-            _resultingCapability.rawType = kVideoI420;
-            return S_OK; // This format is acceptable.
+          _resultingCapability.videoType = VideoType::kI420;
+          return S_OK;  // This format is acceptable.
         }
         if(*SubType == MEDIASUBTYPE_YUY2
             && pvi->bmiHeader.biCompression == MAKEFOURCC('Y','U','Y','2'))
         {
-            _resultingCapability.rawType = kVideoYUY2;
-            return S_OK; // This format is acceptable.
+          _resultingCapability.videoType = VideoType::kYUY2;
+          return S_OK;  // This format is acceptable.
         }
         if(*SubType == MEDIASUBTYPE_UYVY
             && pvi->bmiHeader.biCompression == MAKEFOURCC('U','Y','V','Y'))
         {
-            _resultingCapability.rawType = kVideoUYVY;
-            return S_OK; // This format is acceptable.
+          _resultingCapability.videoType = VideoType::kUYVY;
+          return S_OK;  // This format is acceptable.
         }
 
         if(*SubType == MEDIASUBTYPE_HDYC)
         {
-            _resultingCapability.rawType = kVideoUYVY;
-            return S_OK; // This format is acceptable.
+          _resultingCapability.videoType = VideoType::kUYVY;
+          return S_OK;  // This format is acceptable.
         }
         if(*SubType == MEDIASUBTYPE_RGB24
             && pvi->bmiHeader.biCompression == BI_RGB)
         {
-            _resultingCapability.rawType = kVideoRGB24;
-            return S_OK; // This format is acceptable.
+          _resultingCapability.videoType = VideoType::kRGB24;
+          return S_OK;  // This format is acceptable.
         }
     }
     return E_INVALIDARG;
diff --git a/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc b/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc
index 315d347..1a10ddf 100644
--- a/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc
+++ b/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc
@@ -107,8 +107,8 @@
     // should be more than enough to hold any encoded data of future frames of
     // the same size (avoiding possible future reallocation due to variations in
     // required size).
-    encoded_image->_size =
-        CalcBufferSize(kI420, frame_buffer.width(), frame_buffer.height());
+    encoded_image->_size = CalcBufferSize(
+        VideoType::kI420, frame_buffer.width(), frame_buffer.height());
     if (encoded_image->_size < required_size) {
       // Encoded data > unencoded data. Allocate required bytes.
       LOG(LS_WARNING) << "Encoding produced more bytes than the original image "
@@ -254,8 +254,8 @@
                                &video_format);
 
   // Initialize encoded image. Default buffer size: size of unencoded data.
-  encoded_image_._size =
-      CalcBufferSize(kI420, codec_settings->width, codec_settings->height);
+  encoded_image_._size = CalcBufferSize(VideoType::kI420, codec_settings->width,
+                                        codec_settings->height);
   encoded_image_._buffer = new uint8_t[encoded_image_._size];
   encoded_image_buffer_.reset(encoded_image_._buffer);
   encoded_image_._completeFrame = true;
diff --git a/webrtc/modules/video_coding/codecs/i420/i420.cc b/webrtc/modules/video_coding/codecs/i420/i420.cc
index d05ba7f..31127ed 100644
--- a/webrtc/modules/video_coding/codecs/i420/i420.cc
+++ b/webrtc/modules/video_coding/codecs/i420/i420.cc
@@ -57,9 +57,9 @@
     _encodedImage._buffer = NULL;
     _encodedImage._size = 0;
   }
-  const size_t newSize =
-      CalcBufferSize(kI420, codecSettings->width, codecSettings->height) +
-      kI420HeaderSize;
+  const size_t newSize = CalcBufferSize(VideoType::kI420, codecSettings->width,
+                                        codecSettings->height) +
+                         kI420HeaderSize;
   uint8_t* newBuffer = new uint8_t[newSize];
   if (newBuffer == NULL) {
     return WEBRTC_VIDEO_CODEC_MEMORY;
@@ -96,9 +96,9 @@
     return WEBRTC_VIDEO_CODEC_ERR_SIZE;
   }
 
-  size_t req_length =
-      CalcBufferSize(kI420, inputImage.width(), inputImage.height()) +
-      kI420HeaderSize;
+  size_t req_length = CalcBufferSize(VideoType::kI420, inputImage.width(),
+                                     inputImage.height()) +
+                      kI420HeaderSize;
   if (_encodedImage._size > req_length) {
     // Reallocate buffer.
     delete[] _encodedImage._buffer;
@@ -193,7 +193,8 @@
   _height = height;
 
   // Verify that the available length is sufficient:
-  size_t req_length = CalcBufferSize(kI420, _width, _height) + kI420HeaderSize;
+  size_t req_length =
+      CalcBufferSize(VideoType::kI420, _width, _height) + kI420HeaderSize;
 
   if (req_length > inputImage._length) {
     return WEBRTC_VIDEO_CODEC_ERROR;
@@ -204,7 +205,7 @@
       I420Buffer::Create(_width, _height, _width, half_width, half_width);
 
   // Converting from raw buffer I420Buffer.
-  int ret = ConvertToI420(kI420, buffer, 0, 0, _width, _height, 0,
+  int ret = ConvertToI420(VideoType::kI420, buffer, 0, 0, _width, _height, 0,
                           kVideoRotation_0, frame_buffer.get());
   if (ret < 0) {
     return WEBRTC_VIDEO_CODEC_MEMORY;
diff --git a/webrtc/modules/video_coding/codecs/test/videoprocessor.cc b/webrtc/modules/video_coding/codecs/test/videoprocessor.cc
index 0bb04a9..3a918f6 100644
--- a/webrtc/modules/video_coding/codecs/test/videoprocessor.cc
+++ b/webrtc/modules/video_coding/codecs/test/videoprocessor.cc
@@ -251,7 +251,8 @@
   }
 
   if (source_frame_writer_) {
-    size_t length = CalcBufferSize(kI420, buffer->width(), buffer->height());
+    size_t length =
+        CalcBufferSize(VideoType::kI420, buffer->width(), buffer->height());
     rtc::Buffer extracted_buffer(length);
     int extracted_length =
         ExtractBuffer(buffer, length, extracted_buffer.data());
@@ -467,14 +468,15 @@
       scaled_buffer->ScaleFrom(*image.video_frame_buffer());
     }
 
-    size_t length =
-        CalcBufferSize(kI420, scaled_buffer->width(), scaled_buffer->height());
+    size_t length = CalcBufferSize(VideoType::kI420, scaled_buffer->width(),
+                                   scaled_buffer->height());
     extracted_buffer.SetSize(length);
     extracted_length =
         ExtractBuffer(scaled_buffer, length, extracted_buffer.data());
   } else {
     // No resize.
-    size_t length = CalcBufferSize(kI420, image.width(), image.height());
+    size_t length =
+        CalcBufferSize(VideoType::kI420, image.width(), image.height());
     extracted_buffer.SetSize(length);
     if (image.video_frame_buffer()->native_handle()) {
       extracted_length =
diff --git a/webrtc/modules/video_coding/codecs/test/videoprocessor_integrationtest.h b/webrtc/modules/video_coding/codecs/test/videoprocessor_integrationtest.h
index a377618..937f512 100644
--- a/webrtc/modules/video_coding/codecs/test/videoprocessor_integrationtest.h
+++ b/webrtc/modules/video_coding/codecs/test/videoprocessor_integrationtest.h
@@ -248,7 +248,7 @@
         test::OutputPath(), "videoprocessor_integrationtest");
 
     config_.frame_length_in_bytes =
-        CalcBufferSize(kI420, process.width, process.height);
+        CalcBufferSize(VideoType::kI420, process.width, process.height);
     config_.verbose = process.verbose_logging;
     config_.use_single_core = process.use_single_core;
     // Key frame interval and packet loss are set for each test.
diff --git a/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc b/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc
index 9a0f3db..c4c600d 100644
--- a/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc
@@ -198,7 +198,7 @@
     int64_t startTime = rtc::TimeMillis();
     while (rtc::TimeMillis() - startTime < kMaxWaitDecTimeMs) {
       if (decode_complete_callback_->DecodeComplete()) {
-        return CalcBufferSize(kI420, decoded_frame_->width(),
+        return CalcBufferSize(VideoType::kI420, decoded_frame_->width(),
                               decoded_frame_->height());
       }
     }
diff --git a/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc b/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
index 66db72c..a3495ee 100644
--- a/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
@@ -368,7 +368,7 @@
       delete[] encoded_images_[i]._buffer;
     }
     encoded_images_[i]._size =
-        CalcBufferSize(kI420, codec_.width, codec_.height);
+        CalcBufferSize(VideoType::kI420, codec_.width, codec_.height);
     encoded_images_[i]._buffer = new uint8_t[encoded_images_[i]._size];
     encoded_images_[i]._completeFrame = true;
   }
diff --git a/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc b/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc
index 4b0f99e..07474e1 100644
--- a/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc
+++ b/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc
@@ -284,7 +284,8 @@
   if (encoded_image_._buffer != NULL) {
     delete[] encoded_image_._buffer;
   }
-  encoded_image_._size = CalcBufferSize(kI420, codec_.width, codec_.height);
+  encoded_image_._size =
+      CalcBufferSize(VideoType::kI420, codec_.width, codec_.height);
   encoded_image_._buffer = new uint8_t[encoded_image_._size];
   encoded_image_._completeFrame = true;
   // Creating a wrapper to the image - setting image data to NULL. Actual
diff --git a/webrtc/test/frame_generator.cc b/webrtc/test/frame_generator.cc
index e293642..b918dbd 100644
--- a/webrtc/test/frame_generator.cc
+++ b/webrtc/test/frame_generator.cc
@@ -129,7 +129,7 @@
         files_(files),
         width_(width),
         height_(height),
-        frame_size_(CalcBufferSize(kI420,
+        frame_size_(CalcBufferSize(VideoType::kI420,
                                    static_cast<int>(width_),
                                    static_cast<int>(height_))),
         frame_buffer_(new uint8_t[frame_size_]),
diff --git a/webrtc/test/gl/gl_renderer.cc b/webrtc/test/gl/gl_renderer.cc
index 964a688..5f52923 100644
--- a/webrtc/test/gl/gl_renderer.cc
+++ b/webrtc/test/gl/gl_renderer.cc
@@ -78,7 +78,7 @@
     ResizeVideo(frame.width(), frame.height());
   }
 
-  webrtc::ConvertFromI420(frame, kBGRA, 0, buffer_);
+  webrtc::ConvertFromI420(frame, VideoType::kBGRA, 0, buffer_);
 
   glEnable(GL_TEXTURE_2D);
   glBindTexture(GL_TEXTURE_2D, texture_);
diff --git a/webrtc/test/vcm_capturer.cc b/webrtc/test/vcm_capturer.cc
index d66cf23..69c2b6a 100644
--- a/webrtc/test/vcm_capturer.cc
+++ b/webrtc/test/vcm_capturer.cc
@@ -40,7 +40,7 @@
   capability_.width = static_cast<int32_t>(width);
   capability_.height = static_cast<int32_t>(height);
   capability_.maxFPS = static_cast<int32_t>(target_fps);
-  capability_.rawType = kVideoI420;
+  capability_.videoType = VideoType::kI420;
 
   if (vcm_->StartCapture(capability_) != 0) {
     Destroy();
diff --git a/webrtc/test/win/d3d_renderer.cc b/webrtc/test/win/d3d_renderer.cc
index 7c344ba..01b0325 100644
--- a/webrtc/test/win/d3d_renderer.cc
+++ b/webrtc/test/win/d3d_renderer.cc
@@ -203,7 +203,8 @@
   if (texture_->LockRect(0, &lock_rect, NULL, 0) != D3D_OK)
     return;
 
-  ConvertFromI420(frame, kARGB, 0, static_cast<uint8_t*>(lock_rect.pBits));
+  ConvertFromI420(frame, VideoType::kARGB, 0,
+                  static_cast<uint8_t*>(lock_rect.pBits));
   texture_->UnlockRect(0);
 
   d3d_device_->BeginScene();
diff --git a/webrtc/tools/frame_editing/frame_editing_lib.cc b/webrtc/tools/frame_editing/frame_editing_lib.cc
index f68dc1f..175a676 100644
--- a/webrtc/tools/frame_editing/frame_editing_lib.cc
+++ b/webrtc/tools/frame_editing/frame_editing_lib.cc
@@ -35,7 +35,7 @@
   }
 
   // Frame size of I420.
-  size_t frame_length = CalcBufferSize(kI420, width, height);
+  size_t frame_length = CalcBufferSize(VideoType::kI420, width, height);
 
   std::unique_ptr<uint8_t[]> temp_buffer(new uint8_t[frame_length]);
 
diff --git a/webrtc/tools/frame_editing/frame_editing_unittest.cc b/webrtc/tools/frame_editing/frame_editing_unittest.cc
index d61ec00..2e435452 100644
--- a/webrtc/tools/frame_editing/frame_editing_unittest.cc
+++ b/webrtc/tools/frame_editing/frame_editing_unittest.cc
@@ -24,7 +24,7 @@
 
 const int kWidth = 352;
 const int kHeight = 288;
-const size_t kFrameSize = CalcBufferSize(kI420, kWidth, kHeight);
+const size_t kFrameSize = CalcBufferSize(VideoType::kI420, kWidth, kHeight);
 
 class FrameEditingTest : public ::testing::Test {
  protected: