Prepare for splitting FrameType into AudioFrameType and VideoFrameType

This cl deprecates the FrameType enum, and adds aliases AudioFrameType
and VideoFrameType.

After downstream usage is updated, the enums will be separated
and be moved out of common_types.h.

Bug: webrtc:6883
Change-Id: I2aaf660169da45f22574b4cbb16aea8522cc07a6
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/123184
Commit-Queue: Niels Moller <nisse@webrtc.org>
Reviewed-by: Karl Wiberg <kwiberg@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#27011}
diff --git a/BUILD.gn b/BUILD.gn
index f5b3459..ae2f716 100644
--- a/BUILD.gn
+++ b/BUILD.gn
@@ -452,6 +452,7 @@
     "api/video:video_bitrate_allocation",
     "api/video:video_frame",
     "rtc_base:checks",
+    "rtc_base:deprecation",
     "//third_party/abseil-cpp/absl/strings",
   ]
 }
diff --git a/api/fec_controller.h b/api/fec_controller.h
index 59e86cc..6cc46dd 100644
--- a/api/fec_controller.h
+++ b/api/fec_controller.h
@@ -74,8 +74,9 @@
                                   int64_t round_trip_time_ms) = 0;
 
   // Informs of encoded output.
-  virtual void UpdateWithEncodedData(size_t encoded_image_length,
-                                     FrameType encoded_image_frametype) = 0;
+  virtual void UpdateWithEncodedData(
+      size_t encoded_image_length,
+      VideoFrameType encoded_image_frametype) = 0;
 
   // Returns whether this FEC Controller needs Loss Vector Mask as input.
   virtual bool UseLossVectorMask() = 0;
diff --git a/api/test/mock_video_encoder.h b/api/test/mock_video_encoder.h
index 62f17ba..15e3914 100644
--- a/api/test/mock_video_encoder.h
+++ b/api/test/mock_video_encoder.h
@@ -40,7 +40,7 @@
   MOCK_METHOD3(Encode,
                int32_t(const VideoFrame& inputImage,
                        const CodecSpecificInfo* codecSpecificInfo,
-                       const std::vector<FrameType>* frame_types));
+                       const std::vector<VideoFrameType>* frame_types));
   MOCK_METHOD1(RegisterEncodeCompleteCallback,
                int32_t(EncodedImageCallback* callback));
   MOCK_METHOD0(Release, int32_t());
diff --git a/api/test/videocodec_test_stats.h b/api/test/videocodec_test_stats.h
index 5de015f..c9eada3 100644
--- a/api/test/videocodec_test_stats.h
+++ b/api/test/videocodec_test_stats.h
@@ -43,7 +43,7 @@
     size_t encode_time_us = 0;
     size_t target_bitrate_kbps = 0;
     size_t length_bytes = 0;
-    webrtc::FrameType frame_type = kVideoFrameDelta;
+    webrtc::VideoFrameType frame_type = kVideoFrameDelta;
 
     // Layering.
     size_t spatial_idx = 0;
diff --git a/api/video/encoded_image.h b/api/video/encoded_image.h
index 1d3bd46..804e06b 100644
--- a/api/video/encoded_image.h
+++ b/api/video/encoded_image.h
@@ -115,7 +115,7 @@
   // NTP time of the capture time in local timebase in milliseconds.
   int64_t ntp_time_ms_ = 0;
   int64_t capture_time_ms_ = 0;
-  FrameType _frameType = kVideoFrameDelta;
+  VideoFrameType _frameType = kVideoFrameDelta;
   VideoRotation rotation_ = kVideoRotation_0;
   VideoContentType content_type_ = VideoContentType::UNSPECIFIED;
   bool _completeFrame = false;
diff --git a/api/video_codecs/test/video_encoder_software_fallback_wrapper_unittest.cc b/api/video_codecs/test/video_encoder_software_fallback_wrapper_unittest.cc
index 9adcd77..ec861dd 100644
--- a/api/video_codecs/test/video_encoder_software_fallback_wrapper_unittest.cc
+++ b/api/video_codecs/test/video_encoder_software_fallback_wrapper_unittest.cc
@@ -95,7 +95,7 @@
     }
     int32_t Encode(const VideoFrame& frame,
                    const CodecSpecificInfo* codec_specific_info,
-                   const std::vector<FrameType>* frame_types) override {
+                   const std::vector<VideoFrameType>* frame_types) override {
       ++encode_count_;
       if (encode_complete_callback_ &&
           encode_return_code_ == WEBRTC_VIDEO_CODEC_OK) {
@@ -181,7 +181,7 @@
   rtc::scoped_refptr<I420Buffer> buffer =
       I420Buffer::Create(codec_.width, codec_.height);
   I420Buffer::SetBlack(buffer);
-  std::vector<FrameType> types(1, kVideoFrameKey);
+  std::vector<VideoFrameType> types(1, kVideoFrameKey);
 
   frame_ =
       absl::make_unique<VideoFrame>(VideoFrame::Builder()
@@ -293,7 +293,7 @@
   EXPECT_EQ(&callback2, fake_encoder_->encode_complete_callback_);
 
   // Encoding a frame using the fallback should arrive at the new callback.
-  std::vector<FrameType> types(1, kVideoFrameKey);
+  std::vector<VideoFrameType> types(1, kVideoFrameKey);
   frame_->set_timestamp(frame_->timestamp() + 1000);
   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, fallback_wrapper_->Encode(*frame_, &types));
 
diff --git a/api/video_codecs/video_encoder.cc b/api/video_codecs/video_encoder.cc
index c28c181..a8b6f42 100644
--- a/api/video_codecs/video_encoder.cc
+++ b/api/video_codecs/video_encoder.cc
@@ -104,13 +104,13 @@
 // Implementations of the interface must implement one or the other of these two
 // methods.
 int32_t VideoEncoder::Encode(const VideoFrame& frame,
-                             const std::vector<FrameType>* frame_types) {
+                             const std::vector<VideoFrameType>* frame_types) {
   return Encode(frame, nullptr, frame_types);
 }
 
 int32_t VideoEncoder::Encode(const VideoFrame& frame,
                              const CodecSpecificInfo* codec_specific_info,
-                             const std::vector<FrameType>* frame_types) {
+                             const std::vector<VideoFrameType>* frame_types) {
   return Encode(frame, frame_types);
 }
 
diff --git a/api/video_codecs/video_encoder.h b/api/video_codecs/video_encoder.h
index 59dc55f..dc68928 100644
--- a/api/video_codecs/video_encoder.h
+++ b/api/video_codecs/video_encoder.h
@@ -242,12 +242,12 @@
   //                                  WEBRTC_VIDEO_CODEC_MEMORY
   //                                  WEBRTC_VIDEO_CODEC_ERROR
   virtual int32_t Encode(const VideoFrame& frame,
-                         const std::vector<FrameType>* frame_types);
+                         const std::vector<VideoFrameType>* frame_types);
   // TODO(bugs.webrtc.org/10379): Deprecated. Delete, and make above method pure
   // virtual, as soon as downstream applications are updated.
   virtual int32_t Encode(const VideoFrame& frame,
                          const CodecSpecificInfo* codec_specific_info,
-                         const std::vector<FrameType>* frame_types);
+                         const std::vector<VideoFrameType>* frame_types);
 
   // Inform the encoder about the new target bit rate.
   //
diff --git a/api/video_codecs/video_encoder_software_fallback_wrapper.cc b/api/video_codecs/video_encoder_software_fallback_wrapper.cc
index c52262f..4360980 100644
--- a/api/video_codecs/video_encoder_software_fallback_wrapper.cc
+++ b/api/video_codecs/video_encoder_software_fallback_wrapper.cc
@@ -88,7 +88,7 @@
 
   int32_t Release() override;
   int32_t Encode(const VideoFrame& frame,
-                 const std::vector<FrameType>* frame_types) override;
+                 const std::vector<VideoFrameType>* frame_types) override;
   int32_t SetRateAllocation(const VideoBitrateAllocation& bitrate_allocation,
                             uint32_t framerate) override;
   EncoderInfo GetEncoderInfo() const override;
@@ -252,7 +252,7 @@
 
 int32_t VideoEncoderSoftwareFallbackWrapper::Encode(
     const VideoFrame& frame,
-    const std::vector<FrameType>* frame_types) {
+    const std::vector<VideoFrameType>* frame_types) {
   if (use_fallback_encoder_)
     return fallback_encoder_->Encode(frame, frame_types);
   int32_t ret = encoder_->Encode(frame, frame_types);
diff --git a/audio/channel_send.cc b/audio/channel_send.cc
index 196911a..5951b6b 100644
--- a/audio/channel_send.cc
+++ b/audio/channel_send.cc
@@ -55,7 +55,7 @@
 constexpr int64_t kMinRetransmissionWindowMs = 30;
 
 MediaTransportEncodedAudioFrame::FrameType
-MediaTransportFrameTypeForWebrtcFrameType(webrtc::FrameType frame_type) {
+MediaTransportFrameTypeForWebrtcFrameType(webrtc::AudioFrameType frame_type) {
   switch (frame_type) {
     case kAudioFrameSpeech:
       return MediaTransportEncodedAudioFrame::FrameType::kSpeech;
@@ -184,7 +184,7 @@
   class ProcessAndEncodeAudioTask;
 
   // From AudioPacketizationCallback in the ACM
-  int32_t SendData(FrameType frameType,
+  int32_t SendData(AudioFrameType frameType,
                    uint8_t payloadType,
                    uint32_t timeStamp,
                    const uint8_t* payloadData,
@@ -196,13 +196,13 @@
 
   int SetSendRtpHeaderExtension(bool enable, RTPExtensionType type, int id);
 
-  int32_t SendRtpAudio(FrameType frameType,
+  int32_t SendRtpAudio(AudioFrameType frameType,
                        uint8_t payloadType,
                        uint32_t timeStamp,
                        rtc::ArrayView<const uint8_t> payload,
                        const RTPFragmentationHeader* fragmentation);
 
-  int32_t SendMediaTransportAudio(FrameType frameType,
+  int32_t SendMediaTransportAudio(AudioFrameType frameType,
                                   uint8_t payloadType,
                                   uint32_t timeStamp,
                                   rtc::ArrayView<const uint8_t> payload,
@@ -492,7 +492,7 @@
   ChannelSend* const channel_;
 };
 
-int32_t ChannelSend::SendData(FrameType frameType,
+int32_t ChannelSend::SendData(AudioFrameType frameType,
                               uint8_t payloadType,
                               uint32_t timeStamp,
                               const uint8_t* payloadData,
@@ -516,7 +516,7 @@
   }
 }
 
-int32_t ChannelSend::SendRtpAudio(FrameType frameType,
+int32_t ChannelSend::SendRtpAudio(AudioFrameType frameType,
                                   uint8_t payloadType,
                                   uint32_t timeStamp,
                                   rtc::ArrayView<const uint8_t> payload,
@@ -589,7 +589,7 @@
 }
 
 int32_t ChannelSend::SendMediaTransportAudio(
-    FrameType frameType,
+    AudioFrameType frameType,
     uint8_t payloadType,
     uint32_t timeStamp,
     rtc::ArrayView<const uint8_t> payload,
diff --git a/call/rtp_payload_params_unittest.cc b/call/rtp_payload_params_unittest.cc
index 149bd72..d96d268 100644
--- a/call/rtp_payload_params_unittest.cc
+++ b/call/rtp_payload_params_unittest.cc
@@ -347,7 +347,7 @@
 
   void ConvertAndCheck(int temporal_index,
                        int64_t shared_frame_id,
-                       FrameType frame_type,
+                       VideoFrameType frame_type,
                        LayerSync layer_sync,
                        const std::set<int64_t>& expected_deps,
                        uint16_t width = 0,
diff --git a/common_types.h b/common_types.h
index 2b877d6..2dc1783 100644
--- a/common_types.h
+++ b/common_types.h
@@ -25,7 +25,9 @@
 
 namespace webrtc {
 
-enum FrameType {
+// TODO(bugs.webrtc.org/6883): This type should be split into separate types for
+// audio and video, and then moved out of this file.
+enum FrameTypeDeprecated {
   kEmptyFrame = 0,
   kAudioFrameSpeech = 1,
   kAudioFrameCN = 2,
@@ -33,6 +35,12 @@
   kVideoFrameDelta = 4,
 };
 
+// Can't use RTC_DEPRECATED until Chromium is updated.
+typedef FrameTypeDeprecated FrameType;
+
+using AudioFrameType = FrameTypeDeprecated;
+using VideoFrameType = FrameTypeDeprecated;
+
 // Statistics for RTCP packet types.
 struct RtcpPacketTypeCounter {
   RtcpPacketTypeCounter()
diff --git a/media/engine/encoder_simulcast_proxy.cc b/media/engine/encoder_simulcast_proxy.cc
index dd35bdb..e87e103 100644
--- a/media/engine/encoder_simulcast_proxy.cc
+++ b/media/engine/encoder_simulcast_proxy.cc
@@ -43,8 +43,9 @@
   return ret;
 }
 
-int EncoderSimulcastProxy::Encode(const VideoFrame& input_image,
-                                  const std::vector<FrameType>* frame_types) {
+int EncoderSimulcastProxy::Encode(
+    const VideoFrame& input_image,
+    const std::vector<VideoFrameType>* frame_types) {
   return encoder_->Encode(input_image, frame_types);
 }
 
diff --git a/media/engine/encoder_simulcast_proxy.h b/media/engine/encoder_simulcast_proxy.h
index ce408ac..2574fa9 100644
--- a/media/engine/encoder_simulcast_proxy.h
+++ b/media/engine/encoder_simulcast_proxy.h
@@ -46,7 +46,7 @@
                  int number_of_cores,
                  size_t max_payload_size) override;
   int Encode(const VideoFrame& input_image,
-             const std::vector<FrameType>* frame_types) override;
+             const std::vector<VideoFrameType>* frame_types) override;
   int RegisterEncodeCompleteCallback(EncodedImageCallback* callback) override;
   int SetRateAllocation(const VideoBitrateAllocation& bitrate,
                         uint32_t new_framerate) override;
diff --git a/media/engine/encoder_simulcast_proxy_unittest.cc b/media/engine/encoder_simulcast_proxy_unittest.cc
index 15fdaaf9..62e215d 100644
--- a/media/engine/encoder_simulcast_proxy_unittest.cc
+++ b/media/engine/encoder_simulcast_proxy_unittest.cc
@@ -48,7 +48,7 @@
       Encode,
       int32_t(const VideoFrame& inputImage,
               const CodecSpecificInfo* codecSpecificInfo,
-              const std::vector<FrameType>* frame_types) /* override */);
+              const std::vector<VideoFrameType>* frame_types) /* override */);
 
   MOCK_CONST_METHOD0(GetEncoderInfo, VideoEncoder::EncoderInfo(void));
 };
diff --git a/media/engine/fake_webrtc_video_engine.cc b/media/engine/fake_webrtc_video_engine.cc
index 32ce1fa..f275fd3 100644
--- a/media/engine/fake_webrtc_video_engine.cc
+++ b/media/engine/fake_webrtc_video_engine.cc
@@ -151,7 +151,7 @@
 int32_t FakeWebRtcVideoEncoder::Encode(
     const webrtc::VideoFrame& inputImage,
     const webrtc::CodecSpecificInfo* codecSpecificInfo,
-    const std::vector<webrtc::FrameType>* frame_types) {
+    const std::vector<webrtc::VideoFrameType>* frame_types) {
   rtc::CritScope lock(&crit_);
   ++num_frames_encoded_;
   init_encode_event_.Set();
diff --git a/media/engine/fake_webrtc_video_engine.h b/media/engine/fake_webrtc_video_engine.h
index 6d06923..08c7bb9 100644
--- a/media/engine/fake_webrtc_video_engine.h
+++ b/media/engine/fake_webrtc_video_engine.h
@@ -88,9 +88,10 @@
   int32_t InitEncode(const webrtc::VideoCodec* codecSettings,
                      int32_t numberOfCores,
                      size_t maxPayloadSize) override;
-  int32_t Encode(const webrtc::VideoFrame& inputImage,
-                 const webrtc::CodecSpecificInfo* codecSpecificInfo,
-                 const std::vector<webrtc::FrameType>* frame_types) override;
+  int32_t Encode(
+      const webrtc::VideoFrame& inputImage,
+      const webrtc::CodecSpecificInfo* codecSpecificInfo,
+      const std::vector<webrtc::VideoFrameType>* frame_types) override;
   int32_t RegisterEncodeCompleteCallback(
       webrtc::EncodedImageCallback* callback) override;
   int32_t Release() override;
diff --git a/media/engine/simulcast_encoder_adapter.cc b/media/engine/simulcast_encoder_adapter.cc
index fc18aa4..1595cfe 100644
--- a/media/engine/simulcast_encoder_adapter.cc
+++ b/media/engine/simulcast_encoder_adapter.cc
@@ -338,7 +338,7 @@
 
 int SimulcastEncoderAdapter::Encode(
     const VideoFrame& input_image,
-    const std::vector<FrameType>* frame_types) {
+    const std::vector<VideoFrameType>* frame_types) {
   RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_);
 
   if (!Initialized()) {
@@ -375,7 +375,7 @@
       continue;
     }
 
-    std::vector<FrameType> stream_frame_types;
+    std::vector<VideoFrameType> stream_frame_types;
     if (send_key_frame) {
       stream_frame_types.push_back(kVideoFrameKey);
       streaminfos_[stream_idx].key_frame_request = false;
diff --git a/media/engine/simulcast_encoder_adapter.h b/media/engine/simulcast_encoder_adapter.h
index a62e879..039ab62 100644
--- a/media/engine/simulcast_encoder_adapter.h
+++ b/media/engine/simulcast_encoder_adapter.h
@@ -45,7 +45,7 @@
                  int number_of_cores,
                  size_t max_payload_size) override;
   int Encode(const VideoFrame& input_image,
-             const std::vector<FrameType>* frame_types) override;
+             const std::vector<VideoFrameType>* frame_types) override;
   int RegisterEncodeCompleteCallback(EncodedImageCallback* callback) override;
   int SetRateAllocation(const VideoBitrateAllocation& bitrate,
                         uint32_t new_framerate) override;
diff --git a/media/engine/simulcast_encoder_adapter_unittest.cc b/media/engine/simulcast_encoder_adapter_unittest.cc
index 2d575ec..147fe8b 100644
--- a/media/engine/simulcast_encoder_adapter_unittest.cc
+++ b/media/engine/simulcast_encoder_adapter_unittest.cc
@@ -198,7 +198,7 @@
       Encode,
       int32_t(const VideoFrame& inputImage,
               const CodecSpecificInfo* codecSpecificInfo,
-              const std::vector<FrameType>* frame_types) /* override */);
+              const std::vector<VideoFrameType>* frame_types) /* override */);
 
   int32_t RegisterEncodeCompleteCallback(
       EncodedImageCallback* callback) /* override */ {
@@ -556,7 +556,7 @@
                                .set_timestamp_ms(1000)
                                .set_rotation(kVideoRotation_180)
                                .build();
-  std::vector<FrameType> frame_types;
+  std::vector<VideoFrameType> frame_types;
 
   // Encode with three streams.
   EXPECT_EQ(0, adapter_->InitEncode(&codec_, 1, 1200));
@@ -890,7 +890,7 @@
   // frame and can't otherwise be modified/resized.
   for (MockVideoEncoder* encoder : helper_->factory()->encoders())
     EXPECT_CALL(*encoder, Encode(::testing::Ref(input_frame), _, _)).Times(1);
-  std::vector<FrameType> frame_types(3, kVideoFrameKey);
+  std::vector<VideoFrameType> frame_types(3, kVideoFrameKey);
   EXPECT_EQ(0, adapter_->Encode(input_frame, &frame_types));
 }
 
@@ -916,7 +916,7 @@
                                .set_timestamp_us(0)
                                .set_rotation(kVideoRotation_0)
                                .build();
-  std::vector<FrameType> frame_types(3, kVideoFrameKey);
+  std::vector<VideoFrameType> frame_types(3, kVideoFrameKey);
   EXPECT_EQ(WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE,
             adapter_->Encode(input_frame, &frame_types));
 }
@@ -1031,7 +1031,7 @@
   EXPECT_CALL(*original_encoders[1], Encode(_, _, _)).Times(0);
   EXPECT_CALL(*original_encoders[2], Encode(_, _, _)).Times(0);
 
-  std::vector<FrameType> frame_types;
+  std::vector<VideoFrameType> frame_types;
   frame_types.resize(3, kVideoFrameKey);
   EXPECT_EQ(0, adapter_->Encode(input_frame, &frame_types));
 }
diff --git a/modules/audio_coding/acm2/acm_receiver_unittest.cc b/modules/audio_coding/acm2/acm_receiver_unittest.cc
index e5a7684..7667b71 100644
--- a/modules/audio_coding/acm2/acm_receiver_unittest.cc
+++ b/modules/audio_coding/acm2/acm_receiver_unittest.cc
@@ -103,7 +103,7 @@
     return num_10ms_frames;
   }
 
-  int SendData(FrameType frame_type,
+  int SendData(AudioFrameType frame_type,
                uint8_t payload_type,
                uint32_t timestamp,
                const uint8_t* payload_data,
@@ -139,7 +139,7 @@
   uint32_t timestamp_;
   bool packet_sent_;  // Set when SendData is called reset when inserting audio.
   uint32_t last_packet_send_timestamp_;
-  FrameType last_frame_type_;
+  AudioFrameType last_frame_type_;
 };
 
 #if defined(WEBRTC_ANDROID)
diff --git a/modules/audio_coding/acm2/acm_send_test.cc b/modules/audio_coding/acm2/acm_send_test.cc
index b6110b6..4c34e41 100644
--- a/modules/audio_coding/acm2/acm_send_test.cc
+++ b/modules/audio_coding/acm2/acm_send_test.cc
@@ -123,7 +123,7 @@
 
 // This method receives the callback from ACM when a new packet is produced.
 int32_t AcmSendTestOldApi::SendData(
-    FrameType frame_type,
+    AudioFrameType frame_type,
     uint8_t payload_type,
     uint32_t timestamp,
     const uint8_t* payload_data,
diff --git a/modules/audio_coding/acm2/acm_send_test.h b/modules/audio_coding/acm2/acm_send_test.h
index 24d230b..744d015 100644
--- a/modules/audio_coding/acm2/acm_send_test.h
+++ b/modules/audio_coding/acm2/acm_send_test.h
@@ -50,7 +50,7 @@
   std::unique_ptr<Packet> NextPacket() override;
 
   // Inherited from AudioPacketizationCallback.
-  int32_t SendData(FrameType frame_type,
+  int32_t SendData(AudioFrameType frame_type,
                    uint8_t payload_type,
                    uint32_t timestamp,
                    const uint8_t* payload_data,
@@ -75,7 +75,7 @@
   bool codec_registered_;
   int test_duration_ms_;
   // The following member variables are set whenever SendData() is called.
-  FrameType frame_type_;
+  AudioFrameType frame_type_;
   int payload_type_;
   uint32_t timestamp_;
   uint16_t sequence_number_;
diff --git a/modules/audio_coding/acm2/audio_coding_module.cc b/modules/audio_coding/acm2/audio_coding_module.cc
index 1547b37..a4b64b1 100644
--- a/modules/audio_coding/acm2/audio_coding_module.cc
+++ b/modules/audio_coding/acm2/audio_coding_module.cc
@@ -393,7 +393,7 @@
 
   RTPFragmentationHeader my_fragmentation;
   ConvertEncodedInfoToFragmentationHeader(encoded_info, &my_fragmentation);
-  FrameType frame_type;
+  AudioFrameType frame_type;
   if (encode_buffer_.size() == 0 && encoded_info.send_even_if_empty) {
     frame_type = kEmptyFrame;
     encoded_info.payload_type = previous_pltype;
diff --git a/modules/audio_coding/acm2/audio_coding_module_unittest.cc b/modules/audio_coding/acm2/audio_coding_module_unittest.cc
index 4ee9add..797b9b1 100644
--- a/modules/audio_coding/acm2/audio_coding_module_unittest.cc
+++ b/modules/audio_coding/acm2/audio_coding_module_unittest.cc
@@ -104,7 +104,7 @@
         last_payload_type_(-1),
         last_timestamp_(0) {}
 
-  int32_t SendData(FrameType frame_type,
+  int32_t SendData(AudioFrameType frame_type,
                    uint8_t payload_type,
                    uint32_t timestamp,
                    const uint8_t* payload_data,
@@ -129,7 +129,7 @@
     return rtc::checked_cast<int>(last_payload_vec_.size());
   }
 
-  FrameType last_frame_type() const {
+  AudioFrameType last_frame_type() const {
     rtc::CritScope lock(&crit_sect_);
     return last_frame_type_;
   }
@@ -151,7 +151,7 @@
 
  private:
   int num_calls_ RTC_GUARDED_BY(crit_sect_);
-  FrameType last_frame_type_ RTC_GUARDED_BY(crit_sect_);
+  AudioFrameType last_frame_type_ RTC_GUARDED_BY(crit_sect_);
   int last_payload_type_ RTC_GUARDED_BY(crit_sect_);
   uint32_t last_timestamp_ RTC_GUARDED_BY(crit_sect_);
   std::vector<uint8_t> last_payload_vec_ RTC_GUARDED_BY(crit_sect_);
@@ -430,7 +430,7 @@
     // that is contain comfort noise.
     const struct {
       int ix;
-      FrameType type;
+      AudioFrameType type;
     } expectation[] = {
         {2, kAudioFrameCN},  {5, kEmptyFrame},    {8, kEmptyFrame},
         {11, kAudioFrameCN}, {14, kEmptyFrame},   {17, kEmptyFrame},
diff --git a/modules/audio_coding/include/audio_coding_module.h b/modules/audio_coding/include/audio_coding_module.h
index 7e5bf1b..0621473 100644
--- a/modules/audio_coding/include/audio_coding_module.h
+++ b/modules/audio_coding/include/audio_coding_module.h
@@ -40,7 +40,7 @@
  public:
   virtual ~AudioPacketizationCallback() {}
 
-  virtual int32_t SendData(FrameType frame_type,
+  virtual int32_t SendData(AudioFrameType frame_type,
                            uint8_t payload_type,
                            uint32_t timestamp,
                            const uint8_t* payload_data,
@@ -53,7 +53,7 @@
  public:
   virtual ~ACMVADCallback() {}
 
-  virtual int32_t InFrameType(FrameType frame_type) = 0;
+  virtual int32_t InFrameType(AudioFrameType frame_type) = 0;
 };
 
 class AudioCodingModule {
diff --git a/modules/audio_coding/neteq/tools/rtp_encode.cc b/modules/audio_coding/neteq/tools/rtp_encode.cc
index 14c6e58..443dfd8 100644
--- a/modules/audio_coding/neteq/tools/rtp_encode.cc
+++ b/modules/audio_coding/neteq/tools/rtp_encode.cc
@@ -107,7 +107,7 @@
         ssrc_(ssrc),
         timestamp_rate_hz_(timestamp_rate_hz) {}
 
-  int32_t SendData(FrameType frame_type,
+  int32_t SendData(AudioFrameType frame_type,
                    uint8_t payload_type,
                    uint32_t timestamp,
                    const uint8_t* payload_data,
diff --git a/modules/audio_coding/test/Channel.cc b/modules/audio_coding/test/Channel.cc
index adfc0d5..d54faa7 100644
--- a/modules/audio_coding/test/Channel.cc
+++ b/modules/audio_coding/test/Channel.cc
@@ -18,7 +18,7 @@
 
 namespace webrtc {
 
-int32_t Channel::SendData(FrameType frameType,
+int32_t Channel::SendData(AudioFrameType frameType,
                           uint8_t payloadType,
                           uint32_t timeStamp,
                           const uint8_t* payloadData,
diff --git a/modules/audio_coding/test/Channel.h b/modules/audio_coding/test/Channel.h
index 4d7f0b7..6a55b06 100644
--- a/modules/audio_coding/test/Channel.h
+++ b/modules/audio_coding/test/Channel.h
@@ -47,7 +47,7 @@
   Channel(int16_t chID = -1);
   ~Channel() override;
 
-  int32_t SendData(FrameType frameType,
+  int32_t SendData(AudioFrameType frameType,
                    uint8_t payloadType,
                    uint32_t timeStamp,
                    const uint8_t* payloadData,
diff --git a/modules/audio_coding/test/EncodeDecodeTest.cc b/modules/audio_coding/test/EncodeDecodeTest.cc
index 28ee8aa..c961fe5 100644
--- a/modules/audio_coding/test/EncodeDecodeTest.cc
+++ b/modules/audio_coding/test/EncodeDecodeTest.cc
@@ -33,8 +33,10 @@
 }
 
 int32_t TestPacketization::SendData(
-    const FrameType /* frameType */, const uint8_t payloadType,
-    const uint32_t timeStamp, const uint8_t* payloadData,
+    const AudioFrameType /* frameType */,
+    const uint8_t payloadType,
+    const uint32_t timeStamp,
+    const uint8_t* payloadData,
     const size_t payloadSize,
     const RTPFragmentationHeader* /* fragmentation */) {
   _rtpStream->Write(payloadType, timeStamp, _seqNo++, payloadData, payloadSize,
diff --git a/modules/audio_coding/test/EncodeDecodeTest.h b/modules/audio_coding/test/EncodeDecodeTest.h
index cdfc706..6dc7bc9 100644
--- a/modules/audio_coding/test/EncodeDecodeTest.h
+++ b/modules/audio_coding/test/EncodeDecodeTest.h
@@ -28,7 +28,7 @@
  public:
   TestPacketization(RTPStream *rtpStream, uint16_t frequency);
   ~TestPacketization();
-  int32_t SendData(const FrameType frameType,
+  int32_t SendData(const AudioFrameType frameType,
                    const uint8_t payloadType,
                    const uint32_t timeStamp,
                    const uint8_t* payloadData,
diff --git a/modules/audio_coding/test/TestAllCodecs.cc b/modules/audio_coding/test/TestAllCodecs.cc
index 81b83c0..52518ac 100644
--- a/modules/audio_coding/test/TestAllCodecs.cc
+++ b/modules/audio_coding/test/TestAllCodecs.cc
@@ -60,7 +60,7 @@
   return;
 }
 
-int32_t TestPack::SendData(FrameType frame_type,
+int32_t TestPack::SendData(AudioFrameType frame_type,
                            uint8_t payload_type,
                            uint32_t timestamp,
                            const uint8_t* payload_data,
diff --git a/modules/audio_coding/test/TestAllCodecs.h b/modules/audio_coding/test/TestAllCodecs.h
index 3125efe..d8a7711 100644
--- a/modules/audio_coding/test/TestAllCodecs.h
+++ b/modules/audio_coding/test/TestAllCodecs.h
@@ -25,7 +25,7 @@
 
   void RegisterReceiverACM(AudioCodingModule* acm);
 
-  int32_t SendData(FrameType frame_type,
+  int32_t SendData(AudioFrameType frame_type,
                    uint8_t payload_type,
                    uint32_t timestamp,
                    const uint8_t* payload_data,
diff --git a/modules/audio_coding/test/TestStereo.cc b/modules/audio_coding/test/TestStereo.cc
index 2c71f46..2fa56de 100644
--- a/modules/audio_coding/test/TestStereo.cc
+++ b/modules/audio_coding/test/TestStereo.cc
@@ -40,7 +40,7 @@
   return;
 }
 
-int32_t TestPackStereo::SendData(const FrameType frame_type,
+int32_t TestPackStereo::SendData(const AudioFrameType frame_type,
                                  const uint8_t payload_type,
                                  const uint32_t timestamp,
                                  const uint8_t* payload_data,
diff --git a/modules/audio_coding/test/TestStereo.h b/modules/audio_coding/test/TestStereo.h
index da10bf1..9a44a10 100644
--- a/modules/audio_coding/test/TestStereo.h
+++ b/modules/audio_coding/test/TestStereo.h
@@ -31,7 +31,7 @@
 
   void RegisterReceiverACM(AudioCodingModule* acm);
 
-  int32_t SendData(const FrameType frame_type,
+  int32_t SendData(const AudioFrameType frame_type,
                    const uint8_t payload_type,
                    const uint32_t timestamp,
                    const uint8_t* payload_data,
diff --git a/modules/audio_coding/test/TestVADDTX.cc b/modules/audio_coding/test/TestVADDTX.cc
index 7c04b22..b22e97e 100644
--- a/modules/audio_coding/test/TestVADDTX.cc
+++ b/modules/audio_coding/test/TestVADDTX.cc
@@ -33,7 +33,7 @@
   ResetStatistics();
 }
 
-int32_t ActivityMonitor::InFrameType(FrameType frame_type) {
+int32_t ActivityMonitor::InFrameType(AudioFrameType frame_type) {
   counter_[frame_type]++;
   return 0;
 }
diff --git a/modules/audio_coding/test/TestVADDTX.h b/modules/audio_coding/test/TestVADDTX.h
index f2358e7..36d5f95 100644
--- a/modules/audio_coding/test/TestVADDTX.h
+++ b/modules/audio_coding/test/TestVADDTX.h
@@ -25,7 +25,7 @@
 class ActivityMonitor : public ACMVADCallback {
  public:
   ActivityMonitor();
-  int32_t InFrameType(FrameType frame_type);
+  int32_t InFrameType(AudioFrameType frame_type);
   void PrintStatistics();
   void ResetStatistics();
   void GetStatistics(uint32_t* stats);
diff --git a/modules/include/module_common_types.h b/modules/include/module_common_types.h
index 26122b1..ff4fb72 100644
--- a/modules/include/module_common_types.h
+++ b/modules/include/module_common_types.h
@@ -30,7 +30,8 @@
   RTPVideoHeader video;
 
   RTPHeader header;
-  FrameType frameType;
+  // Used for video only.
+  VideoFrameType frameType;
   // NTP time of the capture time in local timebase in milliseconds.
   int64_t ntp_time_ms;
 };
diff --git a/modules/rtp_rtcp/source/rtp_format.cc b/modules/rtp_rtcp/source/rtp_format.cc
index 0010d90..7375a63 100644
--- a/modules/rtp_rtcp/source/rtp_format.cc
+++ b/modules/rtp_rtcp/source/rtp_format.cc
@@ -29,7 +29,7 @@
     PayloadSizeLimits limits,
     // Codec-specific details.
     const RTPVideoHeader& rtp_video_header,
-    FrameType frame_type,
+    VideoFrameType frame_type,
     const RTPFragmentationHeader* fragmentation) {
   switch (type) {
     case kVideoCodecH264: {
diff --git a/modules/rtp_rtcp/source/rtp_format.h b/modules/rtp_rtcp/source/rtp_format.h
index 71c7dc5..c32283b 100644
--- a/modules/rtp_rtcp/source/rtp_format.h
+++ b/modules/rtp_rtcp/source/rtp_format.h
@@ -39,7 +39,7 @@
       PayloadSizeLimits limits,
       // Codec-specific details.
       const RTPVideoHeader& rtp_video_header,
-      FrameType frame_type,
+      VideoFrameType frame_type,
       const RTPFragmentationHeader* fragmentation);
 
   virtual ~RtpPacketizer() = default;
@@ -71,7 +71,7 @@
 
     const uint8_t* payload;
     size_t payload_length;
-    FrameType frame_type;
+    VideoFrameType frame_type;
   };
 
   static RtpDepacketizer* Create(VideoCodecType type);
diff --git a/modules/rtp_rtcp/source/rtp_format_video_generic.cc b/modules/rtp_rtcp/source/rtp_format_video_generic.cc
index 92aada4..7af8121 100644
--- a/modules/rtp_rtcp/source/rtp_format_video_generic.cc
+++ b/modules/rtp_rtcp/source/rtp_format_video_generic.cc
@@ -26,7 +26,7 @@
     rtc::ArrayView<const uint8_t> payload,
     PayloadSizeLimits limits,
     const RTPVideoHeader& rtp_video_header,
-    FrameType frame_type)
+    VideoFrameType frame_type)
     : remaining_payload_(payload) {
   BuildHeader(rtp_video_header, frame_type);
 
@@ -72,7 +72,7 @@
 }
 
 void RtpPacketizerGeneric::BuildHeader(const RTPVideoHeader& rtp_video_header,
-                                       FrameType frame_type) {
+                                       VideoFrameType frame_type) {
   header_size_ = kGenericHeaderLength;
   header_[0] = RtpFormatVideoGeneric::kFirstPacketBit;
   if (frame_type == kVideoFrameKey) {
diff --git a/modules/rtp_rtcp/source/rtp_format_video_generic.h b/modules/rtp_rtcp/source/rtp_format_video_generic.h
index 982e35a..069f20d 100644
--- a/modules/rtp_rtcp/source/rtp_format_video_generic.h
+++ b/modules/rtp_rtcp/source/rtp_format_video_generic.h
@@ -38,7 +38,7 @@
   RtpPacketizerGeneric(rtc::ArrayView<const uint8_t> payload,
                        PayloadSizeLimits limits,
                        const RTPVideoHeader& rtp_video_header,
-                       FrameType frametype);
+                       VideoFrameType frametype);
 
   ~RtpPacketizerGeneric() override;
 
@@ -52,7 +52,7 @@
  private:
   // Fills header_ and header_size_ members.
   void BuildHeader(const RTPVideoHeader& rtp_video_header,
-                   FrameType frame_type);
+                   VideoFrameType frame_type);
 
   uint8_t header_[3];
   size_t header_size_;
diff --git a/modules/rtp_rtcp/source/rtp_sender_audio.cc b/modules/rtp_rtcp/source/rtp_sender_audio.cc
index c049530..2f00603 100644
--- a/modules/rtp_rtcp/source/rtp_sender_audio.cc
+++ b/modules/rtp_rtcp/source/rtp_sender_audio.cc
@@ -30,7 +30,7 @@
 
 namespace {
 
-const char* FrameTypeToString(FrameType frame_type) {
+const char* FrameTypeToString(AudioFrameType frame_type) {
   switch (frame_type) {
     case kEmptyFrame:
       return "empty";
@@ -88,7 +88,7 @@
   return 0;
 }
 
-bool RTPSenderAudio::MarkerBit(FrameType frame_type, int8_t payload_type) {
+bool RTPSenderAudio::MarkerBit(AudioFrameType frame_type, int8_t payload_type) {
   rtc::CritScope cs(&send_audio_critsect_);
   // for audio true for first packet in a speech burst
   bool marker_bit = false;
@@ -131,7 +131,7 @@
   return marker_bit;
 }
 
-bool RTPSenderAudio::SendAudio(FrameType frame_type,
+bool RTPSenderAudio::SendAudio(AudioFrameType frame_type,
                                int8_t payload_type,
                                uint32_t rtp_timestamp,
                                const uint8_t* payload_data,
diff --git a/modules/rtp_rtcp/source/rtp_sender_audio.h b/modules/rtp_rtcp/source/rtp_sender_audio.h
index fa58943..362dd49 100644
--- a/modules/rtp_rtcp/source/rtp_sender_audio.h
+++ b/modules/rtp_rtcp/source/rtp_sender_audio.h
@@ -39,7 +39,7 @@
                                size_t channels,
                                uint32_t rate);
 
-  bool SendAudio(FrameType frame_type,
+  bool SendAudio(AudioFrameType frame_type,
                  int8_t payload_type,
                  uint32_t capture_timestamp,
                  const uint8_t* payload_data,
@@ -60,7 +60,7 @@
       uint16_t duration,
       bool marker_bit);  // set on first packet in talk burst
 
-  bool MarkerBit(FrameType frame_type, int8_t payload_type);
+  bool MarkerBit(AudioFrameType frame_type, int8_t payload_type);
 
  private:
   bool LogAndSendToNetwork(std::unique_ptr<RtpPacketToSend> packet,
diff --git a/modules/rtp_rtcp/source/rtp_sender_video.cc b/modules/rtp_rtcp/source/rtp_sender_video.cc
index 8b835bd..456b478 100644
--- a/modules/rtp_rtcp/source/rtp_sender_video.cc
+++ b/modules/rtp_rtcp/source/rtp_sender_video.cc
@@ -54,7 +54,7 @@
 
 void AddRtpHeaderExtensions(const RTPVideoHeader& video_header,
                             const absl::optional<PlayoutDelay>& playout_delay,
-                            FrameType frame_type,
+                            VideoFrameType frame_type,
                             bool set_video_rotation,
                             bool set_color_space,
                             bool set_frame_marking,
@@ -167,7 +167,7 @@
   return true;
 }
 
-const char* FrameTypeToString(FrameType frame_type) {
+const char* FrameTypeToString(VideoFrameType frame_type) {
   switch (frame_type) {
     case kEmptyFrame:
       return "empty";
@@ -421,7 +421,7 @@
   return absl::nullopt;
 }
 
-bool RTPSenderVideo::SendVideo(FrameType frame_type,
+bool RTPSenderVideo::SendVideo(VideoFrameType frame_type,
                                int8_t payload_type,
                                uint32_t rtp_timestamp,
                                int64_t capture_time_ms,
diff --git a/modules/rtp_rtcp/source/rtp_sender_video.h b/modules/rtp_rtcp/source/rtp_sender_video.h
index 9772b86..afdca1e 100644
--- a/modules/rtp_rtcp/source/rtp_sender_video.h
+++ b/modules/rtp_rtcp/source/rtp_sender_video.h
@@ -59,7 +59,7 @@
                  const WebRtcKeyValueConfig& field_trials);
   virtual ~RTPSenderVideo();
 
-  bool SendVideo(FrameType frame_type,
+  bool SendVideo(VideoFrameType frame_type,
                  int8_t payload_type,
                  uint32_t capture_timestamp,
                  int64_t capture_time_ms,
diff --git a/modules/video_coding/codecs/h264/h264_encoder_impl.cc b/modules/video_coding/codecs/h264/h264_encoder_impl.cc
index 09a5d02..1800b9e 100644
--- a/modules/video_coding/codecs/h264/h264_encoder_impl.cc
+++ b/modules/video_coding/codecs/h264/h264_encoder_impl.cc
@@ -64,7 +64,7 @@
   return 1;
 }
 
-FrameType ConvertToVideoFrameType(EVideoFrameType type) {
+VideoFrameType ConvertToVideoFrameType(EVideoFrameType type) {
   switch (type) {
     case videoFrameTypeIDR:
       return kVideoFrameKey;
@@ -381,9 +381,10 @@
   return WEBRTC_VIDEO_CODEC_OK;
 }
 
-int32_t H264EncoderImpl::Encode(const VideoFrame& input_frame,
-                                const CodecSpecificInfo* codec_specific_info,
-                                const std::vector<FrameType>* frame_types) {
+int32_t H264EncoderImpl::Encode(
+    const VideoFrame& input_frame,
+    const CodecSpecificInfo* codec_specific_info,
+    const std::vector<VideoFrameType>* frame_types) {
   if (encoders_.empty()) {
     ReportError();
     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
diff --git a/modules/video_coding/codecs/h264/h264_encoder_impl.h b/modules/video_coding/codecs/h264/h264_encoder_impl.h
index 75a8758..36a7f02 100644
--- a/modules/video_coding/codecs/h264/h264_encoder_impl.h
+++ b/modules/video_coding/codecs/h264/h264_encoder_impl.h
@@ -68,7 +68,7 @@
   // passed to the encode complete callback.
   int32_t Encode(const VideoFrame& frame,
                  const CodecSpecificInfo* codec_specific_info,
-                 const std::vector<FrameType>* frame_types) override;
+                 const std::vector<VideoFrameType>* frame_types) override;
 
   EncoderInfo GetEncoderInfo() const override;
 
diff --git a/modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h b/modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h
index 0dd1930..de010c9 100644
--- a/modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h
+++ b/modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h
@@ -43,7 +43,7 @@
                  int number_of_cores,
                  size_t max_payload_size) override;
   int Encode(const VideoFrame& input_image,
-             const std::vector<FrameType>* frame_types) override;
+             const std::vector<VideoFrameType>* frame_types) override;
   int RegisterEncodeCompleteCallback(EncodedImageCallback* callback) override;
   int SetRateAllocation(const VideoBitrateAllocation& bitrate,
                         uint32_t new_framerate) override;
diff --git a/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.cc b/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.cc
index dcba67e..e3eceac 100644
--- a/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.cc
+++ b/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.cc
@@ -115,11 +115,13 @@
       ByteReader<uint32_t>::ReadBigEndian(buffer + offset);
   offset += sizeof(uint32_t);
 
+  // TODO(nisse): This makes the wire format depend on the numeric values of the
+  // VideoCodecType and VideoFrameType enum constants.
   frame_header.codec_type = static_cast<VideoCodecType>(
       ByteReader<uint8_t>::ReadBigEndian(buffer + offset));
   offset += sizeof(uint8_t);
 
-  frame_header.frame_type = static_cast<FrameType>(
+  frame_header.frame_type = static_cast<VideoFrameType>(
       ByteReader<uint8_t>::ReadBigEndian(buffer + offset));
   offset += sizeof(uint8_t);
 
@@ -181,8 +183,8 @@
     // key frame so as to decode the whole image without previous frame data.
     // Thus only when all components are key frames, we can mark the combined
     // frame as key frame.
-    if (frame_header.frame_type == FrameType::kVideoFrameDelta) {
-      combined_image._frameType = FrameType::kVideoFrameDelta;
+    if (frame_header.frame_type == VideoFrameType::kVideoFrameDelta) {
+      combined_image._frameType = VideoFrameType::kVideoFrameDelta;
     }
 
     frame_headers.push_back(frame_header);
diff --git a/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.h b/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.h
index 9d9be26..d3505e4 100644
--- a/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.h
+++ b/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.h
@@ -67,7 +67,7 @@
   VideoCodecType codec_type;
 
   // Indicated the underlying frame is a key frame or delta frame.
-  FrameType frame_type;
+  VideoFrameType frame_type;
 };
 const int kMultiplexImageComponentHeaderSize =
     sizeof(uint32_t) + sizeof(uint8_t) + sizeof(uint32_t) + sizeof(uint32_t) +
diff --git a/modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc b/modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc
index 4b27b18..6e3c5e2 100644
--- a/modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc
+++ b/modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc
@@ -138,12 +138,12 @@
 
 int MultiplexEncoderAdapter::Encode(
     const VideoFrame& input_image,
-    const std::vector<FrameType>* frame_types) {
+    const std::vector<VideoFrameType>* frame_types) {
   if (!encoded_complete_callback_) {
     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
   }
 
-  std::vector<FrameType> adjusted_frame_types;
+  std::vector<VideoFrameType> adjusted_frame_types;
   if (key_frame_interval_ > 0 && picture_index_ % key_frame_interval_ == 0) {
     adjusted_frame_types.push_back(kVideoFrameKey);
   } else {
diff --git a/modules/video_coding/codecs/test/videoprocessor.cc b/modules/video_coding/codecs/test/videoprocessor.cc
index ac63738..7458006 100644
--- a/modules/video_coding/codecs/test/videoprocessor.cc
+++ b/modules/video_coding/codecs/test/videoprocessor.cc
@@ -285,9 +285,9 @@
   }
 
   // Encode.
-  const std::vector<FrameType> frame_types =
-      (frame_number == 0) ? std::vector<FrameType>{kVideoFrameKey}
-                          : std::vector<FrameType>{kVideoFrameDelta};
+  const std::vector<VideoFrameType> frame_types =
+      (frame_number == 0) ? std::vector<VideoFrameType>{kVideoFrameKey}
+                          : std::vector<VideoFrameType>{kVideoFrameDelta};
   const int encode_return_code = encoder_->Encode(input_frame, &frame_types);
   for (size_t i = 0; i < num_simulcast_or_spatial_layers_; ++i) {
     FrameStatistics* frame_stat = stats_->GetFrame(frame_number, i);
diff --git a/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc b/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc
index 1bf42ee..94b079f 100644
--- a/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc
+++ b/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc
@@ -737,7 +737,7 @@
 
 int LibvpxVp8Encoder::Encode(const VideoFrame& frame,
                              const CodecSpecificInfo* codec_specific_info,
-                             const std::vector<FrameType>* frame_types) {
+                             const std::vector<VideoFrameType>* frame_types) {
   RTC_DCHECK_EQ(frame.width(), codec_.width);
   RTC_DCHECK_EQ(frame.height(), codec_.height);
 
diff --git a/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h b/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h
index 5a2205b..2710559 100644
--- a/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h
+++ b/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h
@@ -47,7 +47,7 @@
 
   int Encode(const VideoFrame& input_image,
              const CodecSpecificInfo* codec_specific_info,
-             const std::vector<FrameType>* frame_types) override;
+             const std::vector<VideoFrameType>* frame_types) override;
 
   int RegisterEncodeCompleteCallback(EncodedImageCallback* callback) override;
 
diff --git a/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc b/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc
index ec687df..771471f 100644
--- a/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc
+++ b/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc
@@ -74,11 +74,11 @@
                              EncodedImage* encoded_frame,
                              CodecSpecificInfo* codec_specific_info,
                              bool keyframe = false) {
-    std::vector<FrameType> frame_types;
+    std::vector<VideoFrameType> frame_types;
     if (keyframe) {
-      frame_types.emplace_back(FrameType::kVideoFrameKey);
+      frame_types.emplace_back(VideoFrameType::kVideoFrameKey);
     } else {
-      frame_types.emplace_back(FrameType::kVideoFrameDelta);
+      frame_types.emplace_back(VideoFrameType::kVideoFrameDelta);
     }
     EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
               encoder_->Encode(input_frame, &frame_types));
@@ -484,7 +484,7 @@
       .Times(2)
       .WillRepeatedly(Return(vpx_codec_err_t::VPX_CODEC_OK));
 
-  auto delta_frame = std::vector<FrameType>{kVideoFrameDelta};
+  auto delta_frame = std::vector<VideoFrameType>{kVideoFrameDelta};
   encoder.Encode(*NextInputFrame(), nullptr, &delta_frame);
 }
 
diff --git a/modules/video_coding/codecs/vp9/vp9_impl.cc b/modules/video_coding/codecs/vp9/vp9_impl.cc
index a342b39..177c839 100644
--- a/modules/video_coding/codecs/vp9/vp9_impl.cc
+++ b/modules/video_coding/codecs/vp9/vp9_impl.cc
@@ -714,7 +714,7 @@
 
 int VP9EncoderImpl::Encode(const VideoFrame& input_image,
                            const CodecSpecificInfo* codec_specific_info,
-                           const std::vector<FrameType>* frame_types) {
+                           const std::vector<VideoFrameType>* frame_types) {
   if (!inited_) {
     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
   }
diff --git a/modules/video_coding/codecs/vp9/vp9_impl.h b/modules/video_coding/codecs/vp9/vp9_impl.h
index 1e9979f..62aeeb5 100644
--- a/modules/video_coding/codecs/vp9/vp9_impl.h
+++ b/modules/video_coding/codecs/vp9/vp9_impl.h
@@ -45,7 +45,7 @@
 
   int Encode(const VideoFrame& input_image,
              const CodecSpecificInfo* codec_specific_info,
-             const std::vector<FrameType>* frame_types) override;
+             const std::vector<VideoFrameType>* frame_types) override;
 
   int RegisterEncodeCompleteCallback(EncodedImageCallback* callback) override;
 
diff --git a/modules/video_coding/encoded_frame.h b/modules/video_coding/encoded_frame.h
index eeaea15..25c2f9b 100644
--- a/modules/video_coding/encoded_frame.h
+++ b/modules/video_coding/encoded_frame.h
@@ -67,7 +67,7 @@
   /**
    *   Get frame type
    */
-  webrtc::FrameType FrameType() const { return _frameType; }
+  webrtc::VideoFrameType FrameType() const { return _frameType; }
   /**
    *   Get frame rotation
    */
diff --git a/modules/video_coding/fec_controller_default.cc b/modules/video_coding/fec_controller_default.cc
index 3bceecc..4502f2c 100644
--- a/modules/video_coding/fec_controller_default.cc
+++ b/modules/video_coding/fec_controller_default.cc
@@ -177,7 +177,7 @@
 }
 void FecControllerDefault::UpdateWithEncodedData(
     const size_t encoded_image_length,
-    const FrameType encoded_image_frametype) {
+    const VideoFrameType encoded_image_frametype) {
   const size_t encoded_length = encoded_image_length;
   CritScope lock(&crit_sect_);
   if (encoded_length > 0) {
diff --git a/modules/video_coding/fec_controller_default.h b/modules/video_coding/fec_controller_default.h
index 1db39a4..f4bbf22 100644
--- a/modules/video_coding/fec_controller_default.h
+++ b/modules/video_coding/fec_controller_default.h
@@ -44,8 +44,9 @@
                           uint8_t fraction_lost,
                           std::vector<bool> loss_mask_vector,
                           int64_t round_trip_time_ms) override;
-  void UpdateWithEncodedData(const size_t encoded_image_length,
-                             const FrameType encoded_image_frametype) override;
+  void UpdateWithEncodedData(
+      const size_t encoded_image_length,
+      const VideoFrameType encoded_image_frametype) override;
   bool UseLossVectorMask() override;
   float GetProtectionOverheadRateThreshold();
 
diff --git a/modules/video_coding/frame_buffer.cc b/modules/video_coding/frame_buffer.cc
index c4ba820..a4c92ad 100644
--- a/modules/video_coding/frame_buffer.cc
+++ b/modules/video_coding/frame_buffer.cc
@@ -29,7 +29,7 @@
 
 VCMFrameBuffer::~VCMFrameBuffer() {}
 
-webrtc::FrameType VCMFrameBuffer::FrameType() const {
+webrtc::VideoFrameType VCMFrameBuffer::FrameType() const {
   return _sessionInfo.FrameType();
 }
 
diff --git a/modules/video_coding/frame_buffer.h b/modules/video_coding/frame_buffer.h
index 18f40fc..4b5ef7f 100644
--- a/modules/video_coding/frame_buffer.h
+++ b/modules/video_coding/frame_buffer.h
@@ -70,7 +70,7 @@
 
   int64_t LatestPacketTimeMs() const;
 
-  webrtc::FrameType FrameType() const;
+  webrtc::VideoFrameType FrameType() const;
 
  private:
   void SetState(VCMFrameBufferStateEnum state);  // Set state of frame
diff --git a/modules/video_coding/frame_object.cc b/modules/video_coding/frame_object.cc
index 268adfc..884204e 100644
--- a/modules/video_coding/frame_object.cc
+++ b/modules/video_coding/frame_object.cc
@@ -121,7 +121,7 @@
   return times_nacked_;
 }
 
-FrameType RtpFrameObject::frame_type() const {
+VideoFrameType RtpFrameObject::frame_type() const {
   return frame_type_;
 }
 
diff --git a/modules/video_coding/frame_object.h b/modules/video_coding/frame_object.h
index 8b9ad92..c39a896 100644
--- a/modules/video_coding/frame_object.h
+++ b/modules/video_coding/frame_object.h
@@ -36,7 +36,7 @@
   uint16_t first_seq_num() const;
   uint16_t last_seq_num() const;
   int times_nacked() const;
-  enum FrameType frame_type() const;
+  VideoFrameType frame_type() const;
   VideoCodecType codec_type() const;
   int64_t ReceivedTime() const override;
   int64_t RenderTime() const override;
@@ -49,7 +49,7 @@
   void AllocateBitstreamBuffer(size_t frame_size);
 
   rtc::scoped_refptr<PacketBuffer> packet_buffer_;
-  enum FrameType frame_type_;
+  VideoFrameType frame_type_;
   VideoCodecType codec_type_;
   uint16_t first_seq_num_;
   uint16_t last_seq_num_;
diff --git a/modules/video_coding/jitter_buffer_unittest.cc b/modules/video_coding/jitter_buffer_unittest.cc
index 2651565..2863efb 100644
--- a/modules/video_coding/jitter_buffer_unittest.cc
+++ b/modules/video_coding/jitter_buffer_unittest.cc
@@ -362,7 +362,7 @@
     return jitter_buffer_->InsertPacket(packet, &retransmitted);
   }
 
-  VCMFrameBufferEnum InsertFrame(FrameType frame_type) {
+  VCMFrameBufferEnum InsertFrame(VideoFrameType frame_type) {
     stream_generator_->GenerateFrame(
         frame_type, (frame_type != kEmptyFrame) ? 1 : 0,
         (frame_type == kEmptyFrame) ? 1 : 0, clock_->TimeInMilliseconds());
@@ -371,7 +371,7 @@
     return ret;
   }
 
-  VCMFrameBufferEnum InsertFrames(int num_frames, FrameType frame_type) {
+  VCMFrameBufferEnum InsertFrames(int num_frames, VideoFrameType frame_type) {
     VCMFrameBufferEnum ret_for_all = kNoError;
     for (int i = 0; i < num_frames; ++i) {
       VCMFrameBufferEnum ret = InsertFrame(frame_type);
diff --git a/modules/video_coding/packet.cc b/modules/video_coding/packet.cc
index b50e975..1113a6d 100644
--- a/modules/video_coding/packet.cc
+++ b/modules/video_coding/packet.cc
@@ -46,7 +46,7 @@
                      size_t size,
                      const RTPHeader& rtp_header,
                      const RTPVideoHeader& videoHeader,
-                     FrameType frame_type,
+                     VideoFrameType frame_type,
                      int64_t ntp_time_ms)
     : payloadType(rtp_header.payloadType),
       timestamp(rtp_header.timestamp),
diff --git a/modules/video_coding/packet.h b/modules/video_coding/packet.h
index 944aed5..835bfdf 100644
--- a/modules/video_coding/packet.h
+++ b/modules/video_coding/packet.h
@@ -32,7 +32,7 @@
             size_t size,
             const RTPHeader& rtp_header,
             const RTPVideoHeader& video_header,
-            FrameType frame_type,
+            VideoFrameType frame_type,
             int64_t ntp_time_ms);
 
   ~VCMPacket();
@@ -58,7 +58,7 @@
   bool markerBit;
   int timesNacked;
 
-  FrameType frameType;
+  VideoFrameType frameType;
 
   VCMNaluCompleteness completeNALU;  // Default is kNaluIncomplete.
   bool insertStartCode;  // True if a start code should be inserted before this
diff --git a/modules/video_coding/receiver_unittest.cc b/modules/video_coding/receiver_unittest.cc
index ca50dfa..29bb209 100644
--- a/modules/video_coding/receiver_unittest.cc
+++ b/modules/video_coding/receiver_unittest.cc
@@ -56,7 +56,7 @@
     return receiver_.InsertPacket(packet);
   }
 
-  int32_t InsertFrame(FrameType frame_type, bool complete) {
+  int32_t InsertFrame(VideoFrameType frame_type, bool complete) {
     int num_of_packets = complete ? 1 : 2;
     stream_generator_->GenerateFrame(
         frame_type, (frame_type != kEmptyFrame) ? num_of_packets : 0,
@@ -322,7 +322,7 @@
 
   void GenerateAndInsertFrame(int64_t render_timestamp_ms) {
     VCMPacket packet;
-    stream_generator_->GenerateFrame(FrameType::kVideoFrameKey,
+    stream_generator_->GenerateFrame(VideoFrameType::kVideoFrameKey,
                                      1,  // media packets
                                      0,  // empty packets
                                      render_timestamp_ms);
diff --git a/modules/video_coding/session_info.h b/modules/video_coding/session_info.h
index d66101c..47eccce 100644
--- a/modules/video_coding/session_info.h
+++ b/modules/video_coding/session_info.h
@@ -54,7 +54,7 @@
   int NumPackets() const;
   bool HaveFirstPacket() const;
   bool HaveLastPacket() const;
-  webrtc::FrameType FrameType() const { return frame_type_; }
+  webrtc::VideoFrameType FrameType() const { return frame_type_; }
   int LowSequenceNumber() const;
 
   // Returns highest sequence number, media or empty.
@@ -103,7 +103,7 @@
   void UpdateCompleteSession();
 
   bool complete_;
-  webrtc::FrameType frame_type_;
+  webrtc::VideoFrameType frame_type_;
   // Packets in this frame.
   PacketList packets_;
   int empty_seq_num_low_;
diff --git a/modules/video_coding/test/stream_generator.cc b/modules/video_coding/test/stream_generator.cc
index e23aa87..022edb6 100644
--- a/modules/video_coding/test/stream_generator.cc
+++ b/modules/video_coding/test/stream_generator.cc
@@ -29,7 +29,7 @@
   memset(packet_buffer_, 0, sizeof(packet_buffer_));
 }
 
-void StreamGenerator::GenerateFrame(FrameType type,
+void StreamGenerator::GenerateFrame(VideoFrameType type,
                                     int num_media_packets,
                                     int num_empty_packets,
                                     int64_t time_ms) {
@@ -54,7 +54,7 @@
                                           unsigned int size,
                                           bool first_packet,
                                           bool marker_bit,
-                                          FrameType type) {
+                                          VideoFrameType type) {
   EXPECT_LT(size, kMaxPacketSize);
   VCMPacket packet;
   packet.seqNum = sequence_number;
diff --git a/modules/video_coding/test/stream_generator.h b/modules/video_coding/test/stream_generator.h
index 150fa79..548654e 100644
--- a/modules/video_coding/test/stream_generator.h
+++ b/modules/video_coding/test/stream_generator.h
@@ -34,7 +34,7 @@
   // |time_ms| denotes the timestamp you want to put on the frame, and the unit
   // is millisecond. GenerateFrame will translate |time_ms| into a 90kHz
   // timestamp and put it on the frame.
-  void GenerateFrame(FrameType type,
+  void GenerateFrame(VideoFrameType type,
                      int num_media_packets,
                      int num_empty_packets,
                      int64_t time_ms);
@@ -56,7 +56,7 @@
                            unsigned int size,
                            bool first_packet,
                            bool marker_bit,
-                           FrameType type);
+                           VideoFrameType type);
 
   std::list<VCMPacket>::iterator GetPacketIterator(int index);
 
diff --git a/modules/video_coding/utility/simulcast_test_fixture_impl.cc b/modules/video_coding/utility/simulcast_test_fixture_impl.cc
index 2d69654..edef45d 100644
--- a/modules/video_coding/utility/simulcast_test_fixture_impl.cc
+++ b/modules/video_coding/utility/simulcast_test_fixture_impl.cc
@@ -294,8 +294,8 @@
 
 void SimulcastTestFixtureImpl::RunActiveStreamsTest(
     const std::vector<bool> active_streams) {
-  std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
-                                     kVideoFrameDelta);
+  std::vector<VideoFrameType> frame_types(kNumberOfSimulcastStreams,
+                                          kVideoFrameDelta);
   UpdateActiveStreams(active_streams);
   // Set sufficient bitrate for all streams so we can test active without
   // bitrate being an issue.
@@ -326,7 +326,7 @@
 }
 
 void SimulcastTestFixtureImpl::ExpectStreams(
-    FrameType frame_type,
+    VideoFrameType frame_type,
     const std::vector<bool> expected_streams_active) {
   ASSERT_EQ(static_cast<int>(expected_streams_active.size()),
             kNumberOfSimulcastStreams);
@@ -367,7 +367,7 @@
   }
 }
 
-void SimulcastTestFixtureImpl::ExpectStreams(FrameType frame_type,
+void SimulcastTestFixtureImpl::ExpectStreams(VideoFrameType frame_type,
                                              int expected_video_streams) {
   ASSERT_GE(expected_video_streams, 0);
   ASSERT_LE(expected_video_streams, kNumberOfSimulcastStreams);
@@ -396,8 +396,8 @@
 // a key frame was only requested for some of them.
 void SimulcastTestFixtureImpl::TestKeyFrameRequestsOnAllStreams() {
   SetRates(kMaxBitrates[2], 30);  // To get all three streams.
-  std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
-                                     kVideoFrameDelta);
+  std::vector<VideoFrameType> frame_types(kNumberOfSimulcastStreams,
+                                          kVideoFrameDelta);
   ExpectStreams(kVideoFrameKey, kNumberOfSimulcastStreams);
   EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types));
 
@@ -431,8 +431,8 @@
 void SimulcastTestFixtureImpl::TestPaddingAllStreams() {
   // We should always encode the base layer.
   SetRates(kMinBitrates[0] - 1, 30);
-  std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
-                                     kVideoFrameDelta);
+  std::vector<VideoFrameType> frame_types(kNumberOfSimulcastStreams,
+                                          kVideoFrameDelta);
   ExpectStreams(kVideoFrameKey, 1);
   EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types));
 
@@ -444,8 +444,8 @@
 void SimulcastTestFixtureImpl::TestPaddingTwoStreams() {
   // We have just enough to get only the first stream and padding for two.
   SetRates(kMinBitrates[0], 30);
-  std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
-                                     kVideoFrameDelta);
+  std::vector<VideoFrameType> frame_types(kNumberOfSimulcastStreams,
+                                          kVideoFrameDelta);
   ExpectStreams(kVideoFrameKey, 1);
   EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types));
 
@@ -458,8 +458,8 @@
   // We are just below limit of sending second stream, so we should get
   // the first stream maxed out (at |maxBitrate|), and padding for two.
   SetRates(kTargetBitrates[0] + kMinBitrates[1] - 1, 30);
-  std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
-                                     kVideoFrameDelta);
+  std::vector<VideoFrameType> frame_types(kNumberOfSimulcastStreams,
+                                          kVideoFrameDelta);
   ExpectStreams(kVideoFrameKey, 1);
   EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types));
 
@@ -471,8 +471,8 @@
 void SimulcastTestFixtureImpl::TestPaddingOneStream() {
   // We have just enough to send two streams, so padding for one stream.
   SetRates(kTargetBitrates[0] + kMinBitrates[1], 30);
-  std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
-                                     kVideoFrameDelta);
+  std::vector<VideoFrameType> frame_types(kNumberOfSimulcastStreams,
+                                          kVideoFrameDelta);
   ExpectStreams(kVideoFrameKey, 2);
   EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types));
 
@@ -485,8 +485,8 @@
   // We are just below limit of sending third stream, so we should get
   // first stream's rate maxed out at |targetBitrate|, second at |maxBitrate|.
   SetRates(kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] - 1, 30);
-  std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
-                                     kVideoFrameDelta);
+  std::vector<VideoFrameType> frame_types(kNumberOfSimulcastStreams,
+                                          kVideoFrameDelta);
   ExpectStreams(kVideoFrameKey, 2);
   EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types));
 
@@ -498,8 +498,8 @@
 void SimulcastTestFixtureImpl::TestSendAllStreams() {
   // We have just enough to send all streams.
   SetRates(kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2], 30);
-  std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
-                                     kVideoFrameDelta);
+  std::vector<VideoFrameType> frame_types(kNumberOfSimulcastStreams,
+                                          kVideoFrameDelta);
   ExpectStreams(kVideoFrameKey, 3);
   EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types));
 
@@ -511,8 +511,8 @@
 void SimulcastTestFixtureImpl::TestDisablingStreams() {
   // We should get three media streams.
   SetRates(kMaxBitrates[0] + kMaxBitrates[1] + kMaxBitrates[2], 30);
-  std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
-                                     kVideoFrameDelta);
+  std::vector<VideoFrameType> frame_types(kNumberOfSimulcastStreams,
+                                          kVideoFrameDelta);
   ExpectStreams(kVideoFrameKey, 3);
   EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types));
 
@@ -617,8 +617,8 @@
 
   // Encode one frame and verify.
   SetRates(kMaxBitrates[0] + kMaxBitrates[1], 30);
-  std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
-                                     kVideoFrameDelta);
+  std::vector<VideoFrameType> frame_types(kNumberOfSimulcastStreams,
+                                          kVideoFrameDelta);
   EXPECT_CALL(
       encoder_callback_,
       OnEncodedImage(AllOf(Field(&EncodedImage::_frameType, kVideoFrameKey),
diff --git a/modules/video_coding/utility/simulcast_test_fixture_impl.h b/modules/video_coding/utility/simulcast_test_fixture_impl.h
index 8881e06..06437fc 100644
--- a/modules/video_coding/utility/simulcast_test_fixture_impl.h
+++ b/modules/video_coding/utility/simulcast_test_fixture_impl.h
@@ -67,9 +67,9 @@
   void SetRates(uint32_t bitrate_kbps, uint32_t fps);
   void RunActiveStreamsTest(const std::vector<bool> active_streams);
   void UpdateActiveStreams(const std::vector<bool> active_streams);
-  void ExpectStreams(FrameType frame_type,
+  void ExpectStreams(VideoFrameType frame_type,
                      const std::vector<bool> expected_streams_active);
-  void ExpectStreams(FrameType frame_type, int expected_video_streams);
+  void ExpectStreams(VideoFrameType frame_type, int expected_video_streams);
   void VerifyTemporalIdxAndSyncForAllSpatialLayers(
       TestEncodedImageCallback* encoder_callback,
       const int* expected_temporal_idx,
diff --git a/sdk/android/src/jni/android_media_encoder.cc b/sdk/android/src/jni/android_media_encoder.cc
index 36681a5..1640264 100644
--- a/sdk/android/src/jni/android_media_encoder.cc
+++ b/sdk/android/src/jni/android_media_encoder.cc
@@ -100,7 +100,7 @@
                      size_t /* max_payload_size */) override;
   int32_t Encode(const VideoFrame& input_image,
                  const CodecSpecificInfo* /* codec_specific_info */,
-                 const std::vector<FrameType>* frame_types) override;
+                 const std::vector<VideoFrameType>* frame_types) override;
   int32_t RegisterEncodeCompleteCallback(
       EncodedImageCallback* callback) override;
   int32_t Release() override;
@@ -595,7 +595,7 @@
 int32_t MediaCodecVideoEncoder::Encode(
     const VideoFrame& frame,
     const CodecSpecificInfo* /* codec_specific_info */,
-    const std::vector<FrameType>* frame_types) {
+    const std::vector<VideoFrameType>* frame_types) {
   RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_);
   if (sw_fallback_required_)
     return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
diff --git a/sdk/android/src/jni/encoded_image.cc b/sdk/android/src/jni/encoded_image.cc
index c801ce5f..2e8b266 100644
--- a/sdk/android/src/jni/encoded_image.cc
+++ b/sdk/android/src/jni/encoded_image.cc
@@ -20,7 +20,7 @@
 namespace jni {
 
 ScopedJavaLocalRef<jobject> NativeToJavaFrameType(JNIEnv* env,
-                                                  FrameType frame_type) {
+                                                  VideoFrameType frame_type) {
   return Java_FrameType_fromNativeIndex(env, frame_type);
 }
 
@@ -43,7 +43,7 @@
 
 ScopedJavaLocalRef<jobjectArray> NativeToJavaFrameTypeArray(
     JNIEnv* env,
-    const std::vector<FrameType>& frame_types) {
+    const std::vector<VideoFrameType>& frame_types) {
   return NativeToJavaObjectArray(
       env, frame_types, org_webrtc_EncodedImage_00024FrameType_clazz(env),
       &NativeToJavaFrameType);
diff --git a/sdk/android/src/jni/encoded_image.h b/sdk/android/src/jni/encoded_image.h
index 148ba03..118994d 100644
--- a/sdk/android/src/jni/encoded_image.h
+++ b/sdk/android/src/jni/encoded_image.h
@@ -25,12 +25,12 @@
 namespace jni {
 
 ScopedJavaLocalRef<jobject> NativeToJavaFrameType(JNIEnv* env,
-                                                  FrameType frame_type);
+                                                  VideoFrameType frame_type);
 ScopedJavaLocalRef<jobject> NativeToJavaEncodedImage(JNIEnv* jni,
                                                      const EncodedImage& image);
 ScopedJavaLocalRef<jobjectArray> NativeToJavaFrameTypeArray(
     JNIEnv* env,
-    const std::vector<FrameType>& frame_types);
+    const std::vector<VideoFrameType>& frame_types);
 
 }  // namespace jni
 }  // namespace webrtc
diff --git a/sdk/android/src/jni/video_encoder_wrapper.cc b/sdk/android/src/jni/video_encoder_wrapper.cc
index 8dde3d1..76579e3 100644
--- a/sdk/android/src/jni/video_encoder_wrapper.cc
+++ b/sdk/android/src/jni/video_encoder_wrapper.cc
@@ -120,7 +120,7 @@
 int32_t VideoEncoderWrapper::Encode(
     const VideoFrame& frame,
     const CodecSpecificInfo* /* codec_specific_info */,
-    const std::vector<FrameType>* frame_types) {
+    const std::vector<VideoFrameType>* frame_types) {
   if (!initialized_) {
     // Most likely initializing the codec failed.
     return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
@@ -275,7 +275,7 @@
       frame._encodedHeight = encoded_height;
       frame.SetTimestamp(frame_extra_info.timestamp_rtp);
       frame.capture_time_ms_ = capture_time_ns / rtc::kNumNanosecsPerMillisec;
-      frame._frameType = (FrameType)frame_type;
+      frame._frameType = (VideoFrameType)frame_type;
       frame.rotation_ = (VideoRotation)rotation;
       frame._completeFrame = complete_frame;
       if (qp == -1) {
diff --git a/sdk/android/src/jni/video_encoder_wrapper.h b/sdk/android/src/jni/video_encoder_wrapper.h
index ef4f840..de2d67b 100644
--- a/sdk/android/src/jni/video_encoder_wrapper.h
+++ b/sdk/android/src/jni/video_encoder_wrapper.h
@@ -43,7 +43,7 @@
 
   int32_t Encode(const VideoFrame& frame,
                  const CodecSpecificInfo* codec_specific_info,
-                 const std::vector<FrameType>* frame_types) override;
+                 const std::vector<VideoFrameType>* frame_types) override;
 
   int32_t SetRateAllocation(const VideoBitrateAllocation& allocation,
                             uint32_t framerate) override;
diff --git a/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm b/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm
index 669fea2..dda8aac 100644
--- a/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm
+++ b/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm
@@ -52,7 +52,7 @@
   encodedImage.timing_.flags = self.flags;
   encodedImage.timing_.encode_start_ms = self.encodeStartMs;
   encodedImage.timing_.encode_finish_ms = self.encodeFinishMs;
-  encodedImage._frameType = webrtc::FrameType(self.frameType);
+  encodedImage._frameType = webrtc::VideoFrameType(self.frameType);
   encodedImage.rotation_ = webrtc::VideoRotation(self.rotation);
   encodedImage._completeFrame = self.completeFrame;
   encodedImage.qp_ = self.qp ? self.qp.intValue : -1;
diff --git a/sdk/objc/native/src/objc_video_encoder_factory.mm b/sdk/objc/native/src/objc_video_encoder_factory.mm
index bfa76ad..5c90978 100644
--- a/sdk/objc/native/src/objc_video_encoder_factory.mm
+++ b/sdk/objc/native/src/objc_video_encoder_factory.mm
@@ -75,7 +75,7 @@
 
   int32_t Encode(const VideoFrame &frame,
                  const CodecSpecificInfo *codec_specific_info,
-                 const std::vector<FrameType> *frame_types) override {
+                 const std::vector<VideoFrameType> *frame_types) override {
     NSMutableArray<NSNumber *> *rtcFrameTypes = [NSMutableArray array];
     for (size_t i = 0; i < frame_types->size(); ++i) {
       [rtcFrameTypes addObject:@(RTCFrameType(frame_types->at(i)))];
diff --git a/sdk/objc/unittests/objc_video_encoder_factory_tests.mm b/sdk/objc/unittests/objc_video_encoder_factory_tests.mm
index 9e862b0..cef7495 100644
--- a/sdk/objc/unittests/objc_video_encoder_factory_tests.mm
+++ b/sdk/objc/unittests/objc_video_encoder_factory_tests.mm
@@ -84,7 +84,7 @@
                                  .set_rotation(webrtc::kVideoRotation_0)
                                  .set_timestamp_us(0)
                                  .build();
-  std::vector<webrtc::FrameType> frame_types;
+  std::vector<webrtc::VideoFrameType> frame_types;
 
   EXPECT_EQ(encoder->Encode(frame, &frame_types), WEBRTC_VIDEO_CODEC_OK);
 }
@@ -102,7 +102,7 @@
                                  .set_rotation(webrtc::kVideoRotation_0)
                                  .set_timestamp_us(0)
                                  .build();
-  std::vector<webrtc::FrameType> frame_types;
+  std::vector<webrtc::VideoFrameType> frame_types;
 
   EXPECT_EQ(encoder->Encode(frame, &frame_types), WEBRTC_VIDEO_CODEC_ERROR);
 }
diff --git a/test/configurable_frame_size_encoder.cc b/test/configurable_frame_size_encoder.cc
index bc94011..40b9907 100644
--- a/test/configurable_frame_size_encoder.cc
+++ b/test/configurable_frame_size_encoder.cc
@@ -46,7 +46,7 @@
 int32_t ConfigurableFrameSizeEncoder::Encode(
     const VideoFrame& inputImage,
     const CodecSpecificInfo* codecSpecificInfo,
-    const std::vector<FrameType>* frame_types) {
+    const std::vector<VideoFrameType>* frame_types) {
   EncodedImage encodedImage(buffer_.get(), current_frame_size_,
                             max_frame_size_);
   encodedImage._completeFrame = true;
diff --git a/test/configurable_frame_size_encoder.h b/test/configurable_frame_size_encoder.h
index 390b2b1..6b8ca96 100644
--- a/test/configurable_frame_size_encoder.h
+++ b/test/configurable_frame_size_encoder.h
@@ -39,7 +39,7 @@
 
   int32_t Encode(const VideoFrame& input_image,
                  const CodecSpecificInfo* codec_specific_info,
-                 const std::vector<FrameType>* frame_types) override;
+                 const std::vector<VideoFrameType>* frame_types) override;
 
   int32_t RegisterEncodeCompleteCallback(
       EncodedImageCallback* callback) override;
diff --git a/test/fake_encoder.cc b/test/fake_encoder.cc
index 67fc839..5e7131c 100644
--- a/test/fake_encoder.cc
+++ b/test/fake_encoder.cc
@@ -82,7 +82,7 @@
 
 int32_t FakeEncoder::Encode(const VideoFrame& input_image,
                             const CodecSpecificInfo* /*codec_specific_info*/,
-                            const std::vector<FrameType>* frame_types) {
+                            const std::vector<VideoFrameType>* frame_types) {
   unsigned char max_framerate;
   unsigned char num_simulcast_streams;
   SimulcastStream simulcast_streams[kMaxSimulcastStreams];
@@ -161,7 +161,7 @@
 }
 
 FakeEncoder::FrameInfo FakeEncoder::NextFrame(
-    const std::vector<FrameType>* frame_types,
+    const std::vector<VideoFrameType>* frame_types,
     bool keyframe,
     uint8_t num_simulcast_streams,
     const VideoBitrateAllocation& target_bitrate,
@@ -171,7 +171,7 @@
   frame_info.keyframe = keyframe;
 
   if (frame_types) {
-    for (FrameType frame_type : *frame_types) {
+    for (VideoFrameType frame_type : *frame_types) {
       if (frame_type == kVideoFrameKey) {
         frame_info.keyframe = true;
         break;
@@ -356,7 +356,7 @@
 
 int32_t DelayedEncoder::Encode(const VideoFrame& input_image,
                                const CodecSpecificInfo* codec_specific_info,
-                               const std::vector<FrameType>* frame_types) {
+                               const std::vector<VideoFrameType>* frame_types) {
   RTC_DCHECK_CALLED_SEQUENTIALLY(&sequence_checker_);
 
   SleepMs(delay_ms_);
@@ -390,7 +390,7 @@
   EncodeTask(MultithreadedFakeH264Encoder* encoder,
              const VideoFrame& input_image,
              const CodecSpecificInfo* codec_specific_info,
-             const std::vector<FrameType>* frame_types)
+             const std::vector<VideoFrameType>* frame_types)
       : encoder_(encoder),
         input_image_(input_image),
         codec_specific_info_(),
@@ -409,13 +409,13 @@
   MultithreadedFakeH264Encoder* const encoder_;
   VideoFrame input_image_;
   CodecSpecificInfo codec_specific_info_;
-  std::vector<FrameType> frame_types_;
+  std::vector<VideoFrameType> frame_types_;
 };
 
 int32_t MultithreadedFakeH264Encoder::Encode(
     const VideoFrame& input_image,
     const CodecSpecificInfo* codec_specific_info,
-    const std::vector<FrameType>* frame_types) {
+    const std::vector<VideoFrameType>* frame_types) {
   RTC_DCHECK_CALLED_SEQUENTIALLY(&sequence_checker_);
 
   std::unique_ptr<rtc::TaskQueue>& queue =
@@ -434,7 +434,7 @@
 int32_t MultithreadedFakeH264Encoder::EncodeCallback(
     const VideoFrame& input_image,
     const CodecSpecificInfo* codec_specific_info,
-    const std::vector<FrameType>* frame_types) {
+    const std::vector<VideoFrameType>* frame_types) {
   return FakeH264Encoder::Encode(input_image, codec_specific_info, frame_types);
 }
 
diff --git a/test/fake_encoder.h b/test/fake_encoder.h
index ffd672a..3a40083 100644
--- a/test/fake_encoder.h
+++ b/test/fake_encoder.h
@@ -46,7 +46,7 @@
                      size_t max_payload_size) override;
   int32_t Encode(const VideoFrame& input_image,
                  const CodecSpecificInfo* codec_specific_info,
-                 const std::vector<FrameType>* frame_types) override;
+                 const std::vector<VideoFrameType>* frame_types) override;
   int32_t RegisterEncodeCompleteCallback(
       EncodedImageCallback* callback) override;
   int32_t Release() override;
@@ -72,7 +72,7 @@
     std::vector<SpatialLayer> layers;
   };
 
-  FrameInfo NextFrame(const std::vector<FrameType>* frame_types,
+  FrameInfo NextFrame(const std::vector<VideoFrameType>* frame_types,
                       bool keyframe,
                       uint8_t num_simulcast_streams,
                       const VideoBitrateAllocation& target_bitrate,
@@ -126,7 +126,7 @@
   void SetDelay(int delay_ms);
   int32_t Encode(const VideoFrame& input_image,
                  const CodecSpecificInfo* codec_specific_info,
-                 const std::vector<FrameType>* frame_types) override;
+                 const std::vector<VideoFrameType>* frame_types) override;
 
  private:
   int delay_ms_ RTC_GUARDED_BY(sequence_checker_);
@@ -148,11 +148,11 @@
 
   int32_t Encode(const VideoFrame& input_image,
                  const CodecSpecificInfo* codec_specific_info,
-                 const std::vector<FrameType>* frame_types) override;
+                 const std::vector<VideoFrameType>* frame_types) override;
 
   int32_t EncodeCallback(const VideoFrame& input_image,
                          const CodecSpecificInfo* codec_specific_info,
-                         const std::vector<FrameType>* frame_types);
+                         const std::vector<VideoFrameType>* frame_types);
 
   int32_t Release() override;
 
diff --git a/test/fake_vp8_encoder.cc b/test/fake_vp8_encoder.cc
index bf7ec68..6ad4094 100644
--- a/test/fake_vp8_encoder.cc
+++ b/test/fake_vp8_encoder.cc
@@ -92,7 +92,7 @@
 
 void FakeVP8Encoder::PopulateCodecSpecific(CodecSpecificInfo* codec_specific,
                                            size_t size_bytes,
-                                           FrameType frame_type,
+                                           VideoFrameType frame_type,
                                            int stream_idx,
                                            uint32_t timestamp) {
   RTC_DCHECK_CALLED_SEQUENTIALLY(&sequence_checker_);
diff --git a/test/fake_vp8_encoder.h b/test/fake_vp8_encoder.h
index 9607baf..1906e74 100644
--- a/test/fake_vp8_encoder.h
+++ b/test/fake_vp8_encoder.h
@@ -48,7 +48,7 @@
   void SetupTemporalLayers(const VideoCodec& codec);
   void PopulateCodecSpecific(CodecSpecificInfo* codec_specific,
                              size_t size_bytes,
-                             FrameType frame_type,
+                             VideoFrameType frame_type,
                              int stream_idx,
                              uint32_t timestamp);
 
diff --git a/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc b/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc
index e255777..e2f16cc 100644
--- a/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc
+++ b/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc
@@ -87,7 +87,7 @@
 
 int32_t QualityAnalyzingVideoEncoder::Encode(
     const VideoFrame& frame,
-    const std::vector<FrameType>* frame_types) {
+    const std::vector<VideoFrameType>* frame_types) {
   {
     rtc::CritScope crit(&lock_);
     // Store id to be able to retrieve it in analyzing callback.
@@ -245,7 +245,7 @@
       // are equal or less than required one are interesting, so all above
       // have to be discarded. For other frames only required spatial index
       // is interesting, so all others have to be discarded.
-      if (encoded_image._frameType == FrameType::kVideoFrameKey) {
+      if (encoded_image._frameType == VideoFrameType::kVideoFrameKey) {
         return *encoded_image.SpatialIndex() > *required_spatial_index;
       } else {
         return *encoded_image.SpatialIndex() != *required_spatial_index;
diff --git a/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h b/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h
index a9a5873..693817c 100644
--- a/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h
+++ b/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h
@@ -68,7 +68,7 @@
       EncodedImageCallback* callback) override;
   int32_t Release() override;
   int32_t Encode(const VideoFrame& frame,
-                 const std::vector<FrameType>* frame_types) override;
+                 const std::vector<VideoFrameType>* frame_types) override;
   int32_t SetRates(uint32_t bitrate, uint32_t framerate) override;
   int32_t SetRateAllocation(const VideoBitrateAllocation& allocation,
                             uint32_t framerate) override;
diff --git a/test/video_encoder_proxy_factory.h b/test/video_encoder_proxy_factory.h
index 7f5f6fc..55a01a1 100644
--- a/test/video_encoder_proxy_factory.h
+++ b/test/video_encoder_proxy_factory.h
@@ -62,7 +62,7 @@
 
    private:
     int32_t Encode(const VideoFrame& input_image,
-                   const std::vector<FrameType>* frame_types) override {
+                   const std::vector<VideoFrameType>* frame_types) override {
       return encoder_->Encode(input_image, frame_types);
     }
     int32_t InitEncode(const VideoCodec* config,
diff --git a/video/end_to_end_tests/network_state_tests.cc b/video/end_to_end_tests/network_state_tests.cc
index 8350e73..5ba38d9 100644
--- a/video/end_to_end_tests/network_state_tests.cc
+++ b/video/end_to_end_tests/network_state_tests.cc
@@ -269,7 +269,7 @@
 
     int32_t Encode(const VideoFrame& input_image,
                    const CodecSpecificInfo* codec_specific_info,
-                   const std::vector<FrameType>* frame_types) override {
+                   const std::vector<VideoFrameType>* frame_types) override {
       {
         rtc::CritScope lock(&test_crit_);
         if (sender_state_ == kNetworkDown) {
@@ -365,7 +365,7 @@
     }
     int32_t Encode(const VideoFrame& input_image,
                    const CodecSpecificInfo* codec_specific_info,
-                   const std::vector<FrameType>* frame_types) override {
+                   const std::vector<VideoFrameType>* frame_types) override {
       ADD_FAILURE() << "Unexpected frame encode.";
       return test::FakeEncoder::Encode(input_image, codec_specific_info,
                                        frame_types);
@@ -390,7 +390,7 @@
     }
     int32_t Encode(const VideoFrame& input_image,
                    const CodecSpecificInfo* codec_specific_info,
-                   const std::vector<FrameType>* frame_types) override {
+                   const std::vector<VideoFrameType>* frame_types) override {
       encoded_frame_ = true;
       return test::FakeEncoder::Encode(input_image, codec_specific_info,
                                        frame_types);
diff --git a/video/picture_id_tests.cc b/video/picture_id_tests.cc
index 3188786..9d9b6ea 100644
--- a/video/picture_id_tests.cc
+++ b/video/picture_id_tests.cc
@@ -72,7 +72,7 @@
     int16_t picture_id;
     int16_t tl0_pic_idx;
     uint8_t temporal_idx;
-    FrameType frame_type;
+    VideoFrameType frame_type;
   };
 
   bool ParsePayload(const uint8_t* packet,
diff --git a/video/video_quality_test.cc b/video/video_quality_test.cc
index bdb1035..04dc449 100644
--- a/video/video_quality_test.cc
+++ b/video/video_quality_test.cc
@@ -142,7 +142,7 @@
   }
   int32_t Release() override { return encoder_->Release(); }
   int32_t Encode(const VideoFrame& frame,
-                 const std::vector<FrameType>* frame_types) {
+                 const std::vector<VideoFrameType>* frame_types) {
     if (analyzer_) {
       analyzer_->PreEncodeOnFrame(frame);
     }
diff --git a/video/video_send_stream_tests.cc b/video/video_send_stream_tests.cc
index a5f5c1a..fe562d5 100644
--- a/video/video_send_stream_tests.cc
+++ b/video/video_send_stream_tests.cc
@@ -1984,7 +1984,7 @@
 
     int32_t Encode(const VideoFrame& input_image,
                    const CodecSpecificInfo* codec_specific_info,
-                   const std::vector<FrameType>* frame_types) override {
+                   const std::vector<VideoFrameType>* frame_types) override {
       ADD_FAILURE()
           << "Unexpected Encode call since the send stream is not started";
       return 0;
@@ -2318,7 +2318,7 @@
 
     int32_t Encode(const VideoFrame& inputImage,
                    const CodecSpecificInfo* codecSpecificInfo,
-                   const std::vector<FrameType>* frame_types) override {
+                   const std::vector<VideoFrameType>* frame_types) override {
       EXPECT_TRUE(IsReadyForEncode());
 
       observation_complete_.Set();
@@ -2537,7 +2537,7 @@
 
   int32_t Encode(const VideoFrame& input_image,
                  const CodecSpecificInfo* codec_specific_info,
-                 const std::vector<FrameType>* frame_types) override {
+                 const std::vector<VideoFrameType>* frame_types) override {
     // Silently skip the encode, FakeEncoder::Encode doesn't produce VP8.
     return 0;
   }
@@ -3003,7 +3003,7 @@
    private:
     int32_t Encode(const VideoFrame& input_image,
                    const CodecSpecificInfo* codecSpecificInfo,
-                   const std::vector<FrameType>* frame_types) override {
+                   const std::vector<VideoFrameType>* frame_types) override {
       CodecSpecificInfo specifics;
       specifics.codecType = kVideoCodecGeneric;
 
diff --git a/video/video_stream_encoder.cc b/video/video_stream_encoder.cc
index 75f8b9b..9572512 100644
--- a/video/video_stream_encoder.cc
+++ b/video/video_stream_encoder.cc
@@ -1731,7 +1731,8 @@
   // Run post encode tasks, such as overuse detection and frame rate/drop
   // stats for internal encoders.
   const size_t frame_size = encoded_image.size();
-  const bool keyframe = encoded_image._frameType == FrameType::kVideoFrameKey;
+  const bool keyframe =
+      encoded_image._frameType == VideoFrameType::kVideoFrameKey;
 
   if (frame_size > 0) {
     frame_dropper_.Fill(frame_size, !keyframe);
diff --git a/video/video_stream_encoder.h b/video/video_stream_encoder.h
index fd0835191..663a020 100644
--- a/video/video_stream_encoder.h
+++ b/video/video_stream_encoder.h
@@ -316,7 +316,7 @@
 
   // TODO(sprang): Change actually support keyframe per simulcast stream, or
   // turn this into a simple bool |pending_keyframe_request_|.
-  std::vector<FrameType> next_frame_types_ RTC_GUARDED_BY(&encoder_queue_);
+  std::vector<VideoFrameType> next_frame_types_ RTC_GUARDED_BY(&encoder_queue_);
 
   FrameEncodeTimer frame_encoder_timer_;
 
diff --git a/video/video_stream_encoder_unittest.cc b/video/video_stream_encoder_unittest.cc
index ad4200b..8b4084a 100644
--- a/video/video_stream_encoder_unittest.cc
+++ b/video/video_stream_encoder_unittest.cc
@@ -606,14 +606,14 @@
       return last_update_rect_;
     }
 
-    const std::vector<FrameType>& LastFrameTypes() const {
+    const std::vector<VideoFrameType>& LastFrameTypes() const {
       rtc::CritScope lock(&local_crit_sect_);
       return last_frame_types_;
     }
 
     void InjectFrame(const VideoFrame& input_image, bool keyframe) {
-      const std::vector<FrameType> frame_type = {keyframe ? kVideoFrameKey
-                                                          : kVideoFrameDelta};
+      const std::vector<VideoFrameType> frame_type = {
+          keyframe ? kVideoFrameKey : kVideoFrameDelta};
       {
         rtc::CritScope lock(&local_crit_sect_);
         last_frame_types_ = frame_type;
@@ -640,7 +640,7 @@
    private:
     int32_t Encode(const VideoFrame& input_image,
                    const CodecSpecificInfo* codec_specific_info,
-                   const std::vector<FrameType>* frame_types) override {
+                   const std::vector<VideoFrameType>* frame_types) override {
       bool block_encode;
       {
         rtc::CritScope lock(&local_crit_sect_);
@@ -747,7 +747,7 @@
     absl::optional<VideoBitrateAllocation> last_bitrate_allocation_;
     VideoFrame::UpdateRect last_update_rect_
         RTC_GUARDED_BY(local_crit_sect_) = {0, 0, 0, 0};
-    std::vector<FrameType> last_frame_types_;
+    std::vector<VideoFrameType> last_frame_types_;
     bool expect_null_frame_ = false;
     EncodedImageCallback* encoded_image_callback_
         RTC_GUARDED_BY(local_crit_sect_) = nullptr;
@@ -3605,20 +3605,20 @@
   video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr));
   WaitForEncodedFrame(1);
   EXPECT_THAT(fake_encoder_.LastFrameTypes(),
-              testing::ElementsAre(FrameType{kVideoFrameKey}));
+              testing::ElementsAre(VideoFrameType{kVideoFrameKey}));
 
   // Insert delta frame.
   video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr));
   WaitForEncodedFrame(2);
   EXPECT_THAT(fake_encoder_.LastFrameTypes(),
-              testing::ElementsAre(FrameType{kVideoFrameDelta}));
+              testing::ElementsAre(VideoFrameType{kVideoFrameDelta}));
 
   // Request next frame be a key-frame.
   video_stream_encoder_->SendKeyFrame();
   video_source_.IncomingCapturedFrame(CreateFrame(3, nullptr));
   WaitForEncodedFrame(3);
   EXPECT_THAT(fake_encoder_.LastFrameTypes(),
-              testing::ElementsAre(FrameType{kVideoFrameKey}));
+              testing::ElementsAre(VideoFrameType{kVideoFrameKey}));
 
   video_stream_encoder_->Stop();
 }
@@ -3669,23 +3669,23 @@
   fake_encoder_.InjectFrame(CreateFrame(1, nullptr), true);
   EXPECT_TRUE(WaitForFrame(kDefaultTimeoutMs));
   EXPECT_THAT(fake_encoder_.LastFrameTypes(),
-              testing::ElementsAre(FrameType{kVideoFrameKey}));
+              testing::ElementsAre(VideoFrameType{kVideoFrameKey}));
 
-  const std::vector<FrameType> kDeltaFrame = {kVideoFrameDelta};
+  const std::vector<VideoFrameType> kDeltaFrame = {kVideoFrameDelta};
   // Need to set timestamp manually since manually for injected frame.
   VideoFrame frame = CreateFrame(101, nullptr);
   frame.set_timestamp(101);
   fake_encoder_.InjectFrame(frame, false);
   EXPECT_TRUE(WaitForFrame(kDefaultTimeoutMs));
   EXPECT_THAT(fake_encoder_.LastFrameTypes(),
-              testing::ElementsAre(FrameType{kVideoFrameDelta}));
+              testing::ElementsAre(VideoFrameType{kVideoFrameDelta}));
 
   // Request key-frame. The forces a dummy frame down into the encoder.
   fake_encoder_.ExpectNullFrame();
   video_stream_encoder_->SendKeyFrame();
   EXPECT_TRUE(WaitForFrame(kDefaultTimeoutMs));
   EXPECT_THAT(fake_encoder_.LastFrameTypes(),
-              testing::ElementsAre(FrameType{kVideoFrameKey}));
+              testing::ElementsAre(VideoFrameType{kVideoFrameKey}));
 
   video_stream_encoder_->Stop();
 }