Revert of Add content type information to encoded images and corresponding rtp extension header (patchset #31 id:600001 of https://codereview.webrtc.org/2772033002/ )

Reason for revert:
Breaks dependent projects.

Original issue's description:
> Add content type information to Encoded Images and add corresponding RTP extension header.
> Use it to separate UMA e2e delay metric between screenshare from video.
> Content type extension is set based on encoder settings and processed and decoders.
>
> Also,
> Fix full-stack-tests to calculate RTT correctly, so new metric could be tested.
>
> BUG=webrtc:7420
>
> Review-Url: https://codereview.webrtc.org/2772033002
> Cr-Commit-Position: refs/heads/master@{#17640}
> Committed: https://chromium.googlesource.com/external/webrtc/+/64e739aeae5629cbbebf2a19e1d3e6b452bb6d0b

TBR=tommi@webrtc.org,sprang@webrtc.org,stefan@webrtc.org,nisse@webrtc.org,mflodman@webrtc.org
# Skipping CQ checks because original CL landed less than 1 days ago.
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=webrtc:7420

Review-Url: https://codereview.webrtc.org/2816463002
Cr-Commit-Position: refs/heads/master@{#17644}
diff --git a/webrtc/api/video/video_content_type.h b/webrtc/api/video/video_content_type.h
deleted file mode 100644
index 5c468c0..0000000
--- a/webrtc/api/video/video_content_type.h
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- *  Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_API_VIDEO_VIDEO_CONTENT_TYPE_H_
-#define WEBRTC_API_VIDEO_VIDEO_CONTENT_TYPE_H_
-
-#include <stdint.h>
-
-namespace webrtc {
-
-enum class VideoContentType : uint8_t {
-  UNSPECIFIED = 0,
-  SCREENSHARE = 1,
-  TOTAL_CONTENT_TYPES  // Must be the last value in the enum.
-};
-
-}  // namespace webrtc
-
-#endif  // WEBRTC_API_VIDEO_VIDEO_CONTENT_TYPE_H_
diff --git a/webrtc/common_types.cc b/webrtc/common_types.cc
index 17bb265..f5b487f 100644
--- a/webrtc/common_types.cc
+++ b/webrtc/common_types.cc
@@ -31,9 +31,7 @@
       voiceActivity(false),
       audioLevel(0),
       hasVideoRotation(false),
-      videoRotation(kVideoRotation_0),
-      hasVideoContentType(false),
-      videoContentType(VideoContentType::UNSPECIFIED) {}
+      videoRotation(kVideoRotation_0) {}
 
 RTPHeader::RTPHeader()
     : markerBit(false),
diff --git a/webrtc/common_types.h b/webrtc/common_types.h
index 7504201..e1a4c77 100644
--- a/webrtc/common_types.h
+++ b/webrtc/common_types.h
@@ -18,7 +18,6 @@
 #include <string>
 #include <vector>
 
-#include "webrtc/api/video/video_content_type.h"
 #include "webrtc/api/video/video_rotation.h"
 #include "webrtc/base/checks.h"
 #include "webrtc/base/optional.h"
@@ -717,11 +716,6 @@
   bool hasVideoRotation;
   VideoRotation videoRotation;
 
-  // TODO(ilnik): Refactor this and one above to be rtc::Optional() and remove
-  // a corresponding bool flag.
-  bool hasVideoContentType;
-  VideoContentType videoContentType;
-
   PlayoutDelay playout_delay = {-1, -1};
 };
 
diff --git a/webrtc/config.cc b/webrtc/config.cc
index ab2f394..e0c490d 100644
--- a/webrtc/config.cc
+++ b/webrtc/config.cc
@@ -64,10 +64,6 @@
     "http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01";
 const int RtpExtension::kTransportSequenceNumberDefaultId = 5;
 
-const char* RtpExtension::kVideoContentTypeUri =
-    "http://www.webrtc.org/experiments/rtp-hdrext/video-content-type";
-const int RtpExtension::kVideoContentTypeDefaultId = 6;
-
 // This extension allows applications to adaptively limit the playout delay
 // on frames as per the current needs. For example, a gaming application
 // has very different needs on end-to-end delay compared to a video-conference
@@ -89,8 +85,7 @@
          uri == webrtc::RtpExtension::kAbsSendTimeUri ||
          uri == webrtc::RtpExtension::kVideoRotationUri ||
          uri == webrtc::RtpExtension::kTransportSequenceNumberUri ||
-         uri == webrtc::RtpExtension::kPlayoutDelayUri ||
-         uri == webrtc::RtpExtension::kVideoContentTypeUri;
+         uri == webrtc::RtpExtension::kPlayoutDelayUri;
 }
 
 VideoStream::VideoStream()
diff --git a/webrtc/config.h b/webrtc/config.h
index f0039b3..f8c9e8b 100644
--- a/webrtc/config.h
+++ b/webrtc/config.h
@@ -88,10 +88,6 @@
   static const char* kVideoRotationUri;
   static const int kVideoRotationDefaultId;
 
-  // Header extension for video content type. E.g. default or screenshare.
-  static const char* kVideoContentTypeUri;
-  static const int kVideoContentTypeDefaultId;
-
   // Header extension for transport sequence number, see url for details:
   // http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions
   static const char* kTransportSequenceNumberUri;
diff --git a/webrtc/media/engine/webrtcvideoengine2_unittest.cc b/webrtc/media/engine/webrtcvideoengine2_unittest.cc
index 37e227f..f422ffc 100644
--- a/webrtc/media/engine/webrtcvideoengine2_unittest.cc
+++ b/webrtc/media/engine/webrtcvideoengine2_unittest.cc
@@ -3852,7 +3852,7 @@
   EXPECT_EQ(rtpHeader.ssrc, recv_stream->GetConfig().rtp.remote_ssrc);
   // Verify that the receive stream sinks to a renderer.
   webrtc::VideoFrame video_frame2(CreateBlackFrameBuffer(4, 4), 200, 0,
-                                  webrtc::kVideoRotation_0);
+                                 webrtc::kVideoRotation_0);
   recv_stream->InjectFrame(video_frame2);
   EXPECT_EQ(2, renderer.num_rendered_frames());
 
@@ -3869,7 +3869,7 @@
   EXPECT_EQ(rtpHeader.ssrc, recv_stream->GetConfig().rtp.remote_ssrc);
   // Verify that the receive stream sinks to a renderer.
   webrtc::VideoFrame video_frame3(CreateBlackFrameBuffer(4, 4), 300, 0,
-                                  webrtc::kVideoRotation_0);
+                                 webrtc::kVideoRotation_0);
   recv_stream->InjectFrame(video_frame3);
   EXPECT_EQ(3, renderer.num_rendered_frames());
 #endif
diff --git a/webrtc/modules/include/module_common_types.h b/webrtc/modules/include/module_common_types.h
index ffa0798..a16c939 100644
--- a/webrtc/modules/include/module_common_types.h
+++ b/webrtc/modules/include/module_common_types.h
@@ -58,8 +58,6 @@
 
   PlayoutDelay playout_delay;
 
-  VideoContentType content_type;
-
   union {
     bool is_first_packet_in_frame;
     RTC_DEPRECATED bool isFirstPacket;  // first packet in frame
@@ -89,7 +87,7 @@
         fragmentationOffset(NULL),
         fragmentationLength(NULL),
         fragmentationTimeDiff(NULL),
-        fragmentationPlType(NULL) {}
+        fragmentationPlType(NULL) {};
 
   ~RTPFragmentationHeader() {
     delete[] fragmentationOffset;
diff --git a/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h b/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h
index 56aa9bd..ddfec4d 100644
--- a/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h
+++ b/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h
@@ -76,8 +76,7 @@
   kRtpExtensionVideoRotation,
   kRtpExtensionTransportSequenceNumber,
   kRtpExtensionPlayoutDelay,
-  kRtpExtensionVideoContentType,
-  kRtpExtensionNumberOfExtensions  // Must be the last entity in the enum.
+  kRtpExtensionNumberOfExtensions,
 };
 
 enum RTCPAppSubTypes { kAppSubtypeBwe = 0x00 };
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_header_extension.cc b/webrtc/modules/rtp_rtcp/source/rtp_header_extension.cc
index 1d39259..bbbb143 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_header_extension.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_header_extension.cc
@@ -39,7 +39,6 @@
     CreateExtensionInfo<VideoOrientation>(),
     CreateExtensionInfo<TransportSequenceNumber>(),
     CreateExtensionInfo<PlayoutDelayLimits>(),
-    CreateExtensionInfo<VideoContentTypeExtension>(),
 };
 
 // Because of kRtpExtensionNone, NumberOfExtension is 1 bigger than the actual
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc b/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc
index 8141f02..1b311e6 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc
@@ -215,33 +215,4 @@
   return true;
 }
 
-// Video Content Type.
-//
-// E.g. default video or screenshare.
-//
-//    0                   1
-//    0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
-//   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-//   |  ID   | len=0 | Content type  |
-//   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-constexpr RTPExtensionType VideoContentTypeExtension::kId;
-constexpr uint8_t VideoContentTypeExtension::kValueSizeBytes;
-constexpr const char* VideoContentTypeExtension::kUri;
-
-bool VideoContentTypeExtension::Parse(rtc::ArrayView<const uint8_t> data,
-                                      VideoContentType* content_type) {
-  if (data.size() == 1 &&
-      data[0] < static_cast<uint8_t>(VideoContentType::TOTAL_CONTENT_TYPES)) {
-    *content_type = static_cast<VideoContentType>(data[0]);
-    return true;
-  }
-  return false;
-}
-
-bool VideoContentTypeExtension::Write(uint8_t* data,
-                                      VideoContentType content_type) {
-  data[0] = static_cast<uint8_t>(content_type);
-  return true;
-}
-
 }  // namespace webrtc
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h b/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h
index 0d30848..543688c 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h
+++ b/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h
@@ -12,7 +12,6 @@
 
 #include <stdint.h>
 
-#include "webrtc/api/video/video_content_type.h"
 #include "webrtc/api/video/video_rotation.h"
 #include "webrtc/base/array_view.h"
 #include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
@@ -99,17 +98,5 @@
   static bool Write(uint8_t* data, const PlayoutDelay& playout_delay);
 };
 
-class VideoContentTypeExtension {
- public:
-  static constexpr RTPExtensionType kId = kRtpExtensionVideoContentType;
-  static constexpr uint8_t kValueSizeBytes = 1;
-  static constexpr const char* kUri =
-      "http://www.webrtc.org/experiments/rtp-hdrext/video-content-type";
-
-  static bool Parse(rtc::ArrayView<const uint8_t> data,
-                    VideoContentType* content_type);
-  static bool Write(uint8_t* data, VideoContentType content_type);
-};
-
 }  // namespace webrtc
 #endif  // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_HEADER_EXTENSIONS_H_
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_packet.cc b/webrtc/modules/rtp_rtcp/source/rtp_packet.cc
index 2e87528..7a7c45d 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_packet.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_packet.cc
@@ -169,9 +169,6 @@
       &header->extension.voiceActivity, &header->extension.audioLevel);
   header->extension.hasVideoRotation =
       GetExtension<VideoOrientation>(&header->extension.videoRotation);
-  header->extension.hasVideoContentType =
-      GetExtension<VideoContentTypeExtension>(
-          &header->extension.videoContentType);
 }
 
 size_t Packet::headers_size() const {
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc b/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc
index debe836..d6c5e5c 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc
@@ -90,7 +90,6 @@
   rtp_header->frameType = parsed_payload.frame_type;
   rtp_header->type = parsed_payload.type;
   rtp_header->type.Video.rotation = kVideoRotation_0;
-  rtp_header->type.Video.content_type = VideoContentType::UNSPECIFIED;
 
   // Retrieve the video rotation information.
   if (rtp_header->header.extension.hasVideoRotation) {
@@ -98,11 +97,6 @@
         rtp_header->header.extension.videoRotation;
   }
 
-  if (rtp_header->header.extension.hasVideoContentType) {
-    rtp_header->type.Video.content_type =
-        rtp_header->header.extension.videoContentType;
-  }
-
   rtp_header->type.Video.playout_delay =
       rtp_header->header.extension.playout_delay;
 
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc b/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
index d6c54d0..f77e59c1 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
@@ -40,8 +40,6 @@
     return kRtpExtensionTransportSequenceNumber;
   if (extension == RtpExtension::kPlayoutDelayUri)
     return kRtpExtensionPlayoutDelay;
-  if (extension == RtpExtension::kVideoContentTypeUri)
-    return kRtpExtensionVideoContentType;
   RTC_NOTREACHED() << "Looking up unsupported RTP extension.";
   return kRtpExtensionNone;
 }
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc b/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc
index 75e2dc1..66ee51f 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc
@@ -203,7 +203,6 @@
     rtp_video_header.width = codec_.width;
     rtp_video_header.height = codec_.height;
     rtp_video_header.rotation = kVideoRotation_0;
-    rtp_video_header.content_type = VideoContentType::UNSPECIFIED;
     rtp_video_header.playout_delay = {-1, -1};
     rtp_video_header.is_first_packet_in_frame = true;
     rtp_video_header.simulcastIdx = 0;
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc b/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc
index b89aefe..849ed78 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc
@@ -324,12 +324,6 @@
           current_rotation != kVideoRotation_0)
         rtp_header->SetExtension<VideoOrientation>(current_rotation);
       last_rotation_ = current_rotation;
-      // Report content type only for key frames.
-      if (frame_type == kVideoFrameKey &&
-          video_header->content_type != VideoContentType::UNSPECIFIED) {
-        rtp_header->SetExtension<VideoContentTypeExtension>(
-            video_header->content_type);
-      }
     }
 
     // FEC settings.
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_utility.cc b/webrtc/modules/rtp_rtcp/source/rtp_utility.cc
index 1c12c89..def431f 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_utility.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_utility.cc
@@ -254,10 +254,6 @@
   header->extension.playout_delay.min_ms = -1;
   header->extension.playout_delay.max_ms = -1;
 
-  // May not be present in packet.
-  header->extension.hasVideoContentType = false;
-  header->extension.videoContentType = VideoContentType::UNSPECIFIED;
-
   if (X) {
     /* RTP header extension, RFC 3550.
      0                   1                   2                   3
@@ -450,25 +446,6 @@
               max_playout_delay * PlayoutDelayLimits::kGranularityMs;
           break;
         }
-        case kRtpExtensionVideoContentType: {
-          if (len != 0) {
-            LOG(LS_WARNING) << "Incorrect video content type len: " << len;
-            return;
-          }
-          //    0                   1
-          //    0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
-          //   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-          //   |  ID   | len=0 | Content type  |
-          //   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-
-          if (ptr[0] <
-              static_cast<uint8_t>(VideoContentType::TOTAL_CONTENT_TYPES)) {
-            header->extension.hasVideoContentType = true;
-            header->extension.videoContentType =
-                static_cast<VideoContentType>(ptr[0]);
-          }
-          break;
-        }
         case kRtpExtensionNone:
         case kRtpExtensionNumberOfExtensions: {
           RTC_NOTREACHED() << "Invalid extension type: " << type;
diff --git a/webrtc/modules/video_capture/video_capture_impl.cc b/webrtc/modules/video_capture/video_capture_impl.cc
index 0afa9aa..a0908f0 100644
--- a/webrtc/modules/video_capture/video_capture_impl.cc
+++ b/webrtc/modules/video_capture/video_capture_impl.cc
@@ -32,8 +32,9 @@
   return implementation;
 }
 
-const char* VideoCaptureImpl::CurrentDeviceName() const {
-  return _deviceUniqueId;
+const char* VideoCaptureImpl::CurrentDeviceName() const
+{
+    return _deviceUniqueId;
 }
 
 // static
@@ -135,13 +136,14 @@
 
     // Not encoded, convert to I420.
     const VideoType commonVideoType =
-        RawVideoTypeToCommonVideoVideoType(frameInfo.rawType);
+              RawVideoTypeToCommonVideoVideoType(frameInfo.rawType);
 
     if (frameInfo.rawType != kVideoMJPEG &&
-        CalcBufferSize(commonVideoType, width, abs(height)) !=
-            videoFrameLength) {
-      LOG(LS_ERROR) << "Wrong incoming frame length.";
-      return -1;
+        CalcBufferSize(commonVideoType, width,
+                       abs(height)) != videoFrameLength)
+    {
+        LOG(LS_ERROR) << "Wrong incoming frame length.";
+        return -1;
     }
 
     int stride_y = width;
@@ -172,14 +174,16 @@
         commonVideoType, videoFrame, 0, 0,  // No cropping
         width, height, videoFrameLength,
         apply_rotation ? _rotateFrame : kVideoRotation_0, buffer.get());
-    if (conversionResult < 0) {
+    if (conversionResult < 0)
+    {
       LOG(LS_ERROR) << "Failed to convert capture frame from type "
                     << frameInfo.rawType << "to I420.";
-      return -1;
+        return -1;
     }
 
-    VideoFrame captureFrame(buffer, 0, rtc::TimeMillis(),
-                            !apply_rotation ? _rotateFrame : kVideoRotation_0);
+    VideoFrame captureFrame(
+        buffer, 0, rtc::TimeMillis(),
+        !apply_rotation ? _rotateFrame : kVideoRotation_0);
     captureFrame.set_ntp_time_ms(captureTime);
 
     DeliverCapturedFrame(captureFrame);
@@ -201,40 +205,52 @@
   return true;
 }
 
-void VideoCaptureImpl::UpdateFrameCount() {
-  if (_incomingFrameTimesNanos[0] / rtc::kNumNanosecsPerMicrosec == 0) {
-    // first no shift
-  } else {
-    // shift
-    for (int i = (kFrameRateCountHistorySize - 2); i >= 0; --i) {
-      _incomingFrameTimesNanos[i + 1] = _incomingFrameTimesNanos[i];
+void VideoCaptureImpl::UpdateFrameCount()
+{
+  if (_incomingFrameTimesNanos[0] / rtc::kNumNanosecsPerMicrosec == 0)
+    {
+        // first no shift
     }
-  }
-  _incomingFrameTimesNanos[0] = rtc::TimeNanos();
+    else
+    {
+        // shift
+        for (int i = (kFrameRateCountHistorySize - 2); i >= 0; i--)
+        {
+            _incomingFrameTimesNanos[i + 1] = _incomingFrameTimesNanos[i];
+        }
+    }
+    _incomingFrameTimesNanos[0] = rtc::TimeNanos();
 }
 
-uint32_t VideoCaptureImpl::CalculateFrameRate(int64_t now_ns) {
-  int32_t num = 0;
-  int32_t nrOfFrames = 0;
-  for (num = 1; num < (kFrameRateCountHistorySize - 1); ++num) {
-    if (_incomingFrameTimesNanos[num] <= 0 ||
-        (now_ns - _incomingFrameTimesNanos[num]) /
-                rtc::kNumNanosecsPerMillisec >
-            kFrameRateHistoryWindowMs) {  // don't use data older than 2sec
-      break;
-    } else {
-      nrOfFrames++;
+uint32_t VideoCaptureImpl::CalculateFrameRate(int64_t now_ns)
+{
+    int32_t num = 0;
+    int32_t nrOfFrames = 0;
+    for (num = 1; num < (kFrameRateCountHistorySize - 1); num++)
+    {
+        if (_incomingFrameTimesNanos[num] <= 0 ||
+            (now_ns - _incomingFrameTimesNanos[num]) /
+            rtc::kNumNanosecsPerMillisec >
+                kFrameRateHistoryWindowMs) // don't use data older than 2sec
+        {
+            break;
+        }
+        else
+        {
+            nrOfFrames++;
+        }
     }
-  }
-  if (num > 1) {
-    int64_t diff = (now_ns - _incomingFrameTimesNanos[num - 1]) /
-                   rtc::kNumNanosecsPerMillisec;
-    if (diff > 0) {
-      return uint32_t((nrOfFrames * 1000.0f / diff) + 0.5f);
+    if (num > 1)
+    {
+        int64_t diff = (now_ns - _incomingFrameTimesNanos[num - 1]) /
+                       rtc::kNumNanosecsPerMillisec;
+        if (diff > 0)
+        {
+            return uint32_t((nrOfFrames * 1000.0f / diff) + 0.5f);
+        }
     }
-  }
 
-  return nrOfFrames;
+    return nrOfFrames;
 }
 }  // namespace videocapturemodule
 }  // namespace webrtc
diff --git a/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc b/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc
index 315d347..84bfafb 100644
--- a/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc
+++ b/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc
@@ -367,9 +367,6 @@
   encoded_image_.ntp_time_ms_ = input_frame.ntp_time_ms();
   encoded_image_.capture_time_ms_ = input_frame.render_time_ms();
   encoded_image_.rotation_ = input_frame.rotation();
-  encoded_image_.content_type_ = (mode_ == kScreensharing)
-                                     ? VideoContentType::SCREENSHARE
-                                     : VideoContentType::UNSPECIFIED;
   encoded_image_._frameType = ConvertToVideoFrameType(info.eFrameType);
 
   // Split encoded image up into fragments. This also updates |encoded_image_|.
diff --git a/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc b/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
index 66db72c..41fd7ff 100644
--- a/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
+++ b/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
@@ -878,9 +878,6 @@
     encoded_images_[encoder_idx].capture_time_ms_ =
         input_image.render_time_ms();
     encoded_images_[encoder_idx].rotation_ = input_image.rotation();
-    encoded_images_[encoder_idx].content_type_ =
-        (codec_.mode == kScreensharing) ? VideoContentType::SCREENSHARE
-                                        : VideoContentType::UNSPECIFIED;
 
     int qp = -1;
     vpx_codec_control(&encoders_[encoder_idx], VP8E_GET_LAST_QUANTIZER_64, &qp);
diff --git a/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc b/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc
index 4b0f99e..4d7df86 100644
--- a/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc
+++ b/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc
@@ -706,9 +706,6 @@
     encoded_image_._timeStamp = input_image_->timestamp();
     encoded_image_.capture_time_ms_ = input_image_->render_time_ms();
     encoded_image_.rotation_ = input_image_->rotation();
-    encoded_image_.content_type_ = (codec_.mode == kScreensharing)
-                                       ? VideoContentType::SCREENSHARE
-                                       : VideoContentType::UNSPECIFIED;
     encoded_image_._encodedHeight = raw_->d_h;
     encoded_image_._encodedWidth = raw_->d_w;
     int qp = -1;
diff --git a/webrtc/modules/video_coding/encoded_frame.cc b/webrtc/modules/video_coding/encoded_frame.cc
index 1807fa5..fb12c5b 100644
--- a/webrtc/modules/video_coding/encoded_frame.cc
+++ b/webrtc/modules/video_coding/encoded_frame.cc
@@ -87,7 +87,6 @@
   _codecSpecificInfo.codecType = kVideoCodecUnknown;
   _codec = kVideoCodecUnknown;
   rotation_ = kVideoRotation_0;
-  content_type_ = VideoContentType::UNSPECIFIED;
   _rotation_set = false;
 }
 
diff --git a/webrtc/modules/video_coding/encoded_frame.h b/webrtc/modules/video_coding/encoded_frame.h
index 96f9d00..840cd20 100644
--- a/webrtc/modules/video_coding/encoded_frame.h
+++ b/webrtc/modules/video_coding/encoded_frame.h
@@ -77,12 +77,8 @@
   */
   VideoRotation rotation() const { return rotation_; }
   /**
-   *  Get video content type
-   */
-  VideoContentType contentType() const { return content_type_; }
-  /**
-   *   True if this frame is complete, false otherwise
-   */
+  *   True if this frame is complete, false otherwise
+  */
   bool Complete() const { return _completeFrame; }
   /**
   *   True if there's a frame missing before this frame
diff --git a/webrtc/modules/video_coding/frame_buffer.cc b/webrtc/modules/video_coding/frame_buffer.cc
index 5ea12dc..1439a17 100644
--- a/webrtc/modules/video_coding/frame_buffer.cc
+++ b/webrtc/modules/video_coding/frame_buffer.cc
@@ -163,7 +163,6 @@
     RTC_DCHECK(!_rotation_set);
     rotation_ = packet.video_header.rotation;
     _rotation_set = true;
-    content_type_ = packet.video_header.content_type;
   }
 
   if (packet.is_first_packet_in_frame) {
diff --git a/webrtc/modules/video_coding/frame_object.cc b/webrtc/modules/video_coding/frame_object.cc
index 9e5ce09..70b0a02 100644
--- a/webrtc/modules/video_coding/frame_object.cc
+++ b/webrtc/modules/video_coding/frame_object.cc
@@ -79,7 +79,6 @@
   // (HEVC)).
   rotation_ = last_packet->video_header.rotation;
   _rotation_set = true;
-  content_type_ = last_packet->video_header.content_type;
 }
 
 RtpFrameObject::~RtpFrameObject() {
diff --git a/webrtc/modules/video_coding/generic_decoder.cc b/webrtc/modules/video_coding/generic_decoder.cc
index f5d9cfe..2121ab6 100644
--- a/webrtc/modules/video_coding/generic_decoder.cc
+++ b/webrtc/modules/video_coding/generic_decoder.cc
@@ -87,7 +87,7 @@
   decodedImage.set_timestamp_us(
       frameInfo->renderTimeMs * rtc::kNumMicrosecsPerMillisec);
   decodedImage.set_rotation(frameInfo->rotation);
-  _receiveCallback->FrameToRender(decodedImage, qp, frameInfo->content_type);
+  _receiveCallback->FrameToRender(decodedImage, qp);
 }
 
 int32_t VCMDecodedFrameCallback::ReceivedDecodedReferenceFrame(
@@ -131,8 +131,7 @@
       _decoder(decoder),
       _codecType(kVideoCodecUnknown),
       _isExternal(isExternal),
-      _keyFrameDecoded(false),
-      _last_keyframe_content_type(VideoContentType::UNSPECIFIED) {}
+      _keyFrameDecoded(false) {}
 
 VCMGenericDecoder::~VCMGenericDecoder() {}
 
@@ -150,15 +149,6 @@
     _frameInfos[_nextFrameInfoIdx].decodeStartTimeMs = nowMs;
     _frameInfos[_nextFrameInfoIdx].renderTimeMs = frame.RenderTimeMs();
     _frameInfos[_nextFrameInfoIdx].rotation = frame.rotation();
-    // Set correctly only for key frames. Thus, use latest key frame
-    // content type. If the corresponding key frame was lost, decode will fail
-    // and content type will be ignored.
-    if (frame.FrameType() == kVideoFrameKey) {
-      _frameInfos[_nextFrameInfoIdx].content_type = frame.contentType();
-      _last_keyframe_content_type = frame.contentType();
-    } else {
-      _frameInfos[_nextFrameInfoIdx].content_type = _last_keyframe_content_type;
-    }
     _callback->Map(frame.TimeStamp(), &_frameInfos[_nextFrameInfoIdx]);
 
     _nextFrameInfoIdx = (_nextFrameInfoIdx + 1) % kDecoderFrameMemoryLength;
diff --git a/webrtc/modules/video_coding/generic_decoder.h b/webrtc/modules/video_coding/generic_decoder.h
index 71b8d81..891ec89 100644
--- a/webrtc/modules/video_coding/generic_decoder.h
+++ b/webrtc/modules/video_coding/generic_decoder.h
@@ -30,7 +30,6 @@
   int64_t decodeStartTimeMs;
   void* userData;
   VideoRotation rotation;
-  VideoContentType content_type;
 };
 
 class VCMDecodedFrameCallback : public DecodedImageCallback {
@@ -110,7 +109,6 @@
   VideoCodecType _codecType;
   bool _isExternal;
   bool _keyFrameDecoded;
-  VideoContentType _last_keyframe_content_type;
 };
 
 }  // namespace webrtc
diff --git a/webrtc/modules/video_coding/include/mock/mock_vcm_callbacks.h b/webrtc/modules/video_coding/include/mock/mock_vcm_callbacks.h
index 21b154f..8a53c1d 100644
--- a/webrtc/modules/video_coding/include/mock/mock_vcm_callbacks.h
+++ b/webrtc/modules/video_coding/include/mock/mock_vcm_callbacks.h
@@ -33,8 +33,7 @@
   MockVCMReceiveCallback() {}
   virtual ~MockVCMReceiveCallback() {}
 
-  MOCK_METHOD3(FrameToRender,
-               int32_t(VideoFrame&, rtc::Optional<uint8_t>, VideoContentType));
+  MOCK_METHOD2(FrameToRender, int32_t(VideoFrame&, rtc::Optional<uint8_t>));
   MOCK_METHOD1(ReceivedDecodedReferenceFrame, int32_t(const uint64_t));
   MOCK_METHOD1(OnIncomingPayloadType, void(int));
   MOCK_METHOD1(OnDecoderImplementationName, void(const char*));
diff --git a/webrtc/modules/video_coding/include/video_coding_defines.h b/webrtc/modules/video_coding/include/video_coding_defines.h
index 3ae9981..4ed80a6 100644
--- a/webrtc/modules/video_coding/include/video_coding_defines.h
+++ b/webrtc/modules/video_coding/include/video_coding_defines.h
@@ -62,8 +62,7 @@
 class VCMReceiveCallback {
  public:
   virtual int32_t FrameToRender(VideoFrame& videoFrame,  // NOLINT
-                                rtc::Optional<uint8_t> qp,
-                                VideoContentType content_type) = 0;
+                                rtc::Optional<uint8_t> qp) = 0;
   virtual int32_t ReceivedDecodedReferenceFrame(const uint64_t pictureId) {
     return -1;
   }
diff --git a/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc b/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc
index 6425015..44acccf 100644
--- a/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc
+++ b/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc
@@ -1042,10 +1042,6 @@
       image->_timeStamp = output_timestamp_;
       image->capture_time_ms_ = output_render_time_ms_;
       image->rotation_ = output_rotation_;
-      image->content_type_ =
-          (codec_mode_ == webrtc::VideoCodecMode::kScreensharing)
-              ? webrtc::VideoContentType::SCREENSHARE
-              : webrtc::VideoContentType::UNSPECIFIED;
       image->_frameType =
           (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta);
       image->_completeFrame = true;
diff --git a/webrtc/sdk/objc/Framework/Classes/h264_video_toolbox_encoder.h b/webrtc/sdk/objc/Framework/Classes/h264_video_toolbox_encoder.h
index 09aa7db..5de9a24 100644
--- a/webrtc/sdk/objc/Framework/Classes/h264_video_toolbox_encoder.h
+++ b/webrtc/sdk/objc/Framework/Classes/h264_video_toolbox_encoder.h
@@ -85,7 +85,6 @@
   uint32_t encoder_bitrate_bps_;
   int32_t width_;
   int32_t height_;
-  VideoCodecMode mode_;
   const CFStringRef profile_;
 
   H264BitstreamParser h264_bitstream_parser_;
diff --git a/webrtc/sdk/objc/Framework/Classes/h264_video_toolbox_encoder.mm b/webrtc/sdk/objc/Framework/Classes/h264_video_toolbox_encoder.mm
index bc46b35..e50b225 100644
--- a/webrtc/sdk/objc/Framework/Classes/h264_video_toolbox_encoder.mm
+++ b/webrtc/sdk/objc/Framework/Classes/h264_video_toolbox_encoder.mm
@@ -364,7 +364,6 @@
 
   width_ = codec_settings->width;
   height_ = codec_settings->height;
-  mode_ = codec_settings->mode;
   // We can only set average bitrate on the HW encoder.
   target_bitrate_bps_ = codec_settings->startBitrate;
   bitrate_adjuster_.SetTargetBitrateBps(target_bitrate_bps_);
@@ -723,9 +722,6 @@
   frame._timeStamp = timestamp;
   frame.rotation_ = rotation;
 
-  frame.content_type_ =
-      (mode_ == kScreensharing) ? VideoContentType::SCREENSHARE : VideoContentType::UNSPECIFIED;
-
   h264_bitstream_parser_.ParseBitstream(buffer->data(), buffer->size());
   h264_bitstream_parser_.GetLastSliceQp(&frame.qp_);
 
diff --git a/webrtc/test/call_test.cc b/webrtc/test/call_test.cc
index 6ec3fda..5c0b42c 100644
--- a/webrtc/test/call_test.cc
+++ b/webrtc/test/call_test.cc
@@ -208,8 +208,6 @@
     video_send_config_.rtp.extensions.push_back(
         RtpExtension(RtpExtension::kTransportSequenceNumberUri,
                      kTransportSequenceNumberExtensionId));
-    video_send_config_.rtp.extensions.push_back(RtpExtension(
-        RtpExtension::kVideoContentTypeUri, kVideoContentTypeExtensionId));
     FillEncoderConfiguration(num_video_streams, &video_encoder_config_);
 
     for (size_t i = 0; i < num_video_streams; ++i)
diff --git a/webrtc/test/constants.cc b/webrtc/test/constants.cc
index a789cc0..43f9adc 100644
--- a/webrtc/test/constants.cc
+++ b/webrtc/test/constants.cc
@@ -17,7 +17,5 @@
 const int kAbsSendTimeExtensionId = 7;
 const int kTransportSequenceNumberExtensionId = 8;
 const int kVideoRotationExtensionId = 9;
-const int kVideoContentTypeExtensionId = 10;
-
 }  // namespace test
 }  // namespace webrtc
diff --git a/webrtc/test/constants.h b/webrtc/test/constants.h
index d0f73d0..1b5b0cb 100644
--- a/webrtc/test/constants.h
+++ b/webrtc/test/constants.h
@@ -15,6 +15,5 @@
 extern const int kAbsSendTimeExtensionId;
 extern const int kTransportSequenceNumberExtensionId;
 extern const int kVideoRotationExtensionId;
-extern const int kVideoContentTypeExtensionId;
 }  // namespace test
 }  // namespace webrtc
diff --git a/webrtc/test/fake_encoder.cc b/webrtc/test/fake_encoder.cc
index fce12c6..1db93eb 100644
--- a/webrtc/test/fake_encoder.cc
+++ b/webrtc/test/fake_encoder.cc
@@ -61,7 +61,6 @@
   int max_target_bitrate_kbps;
   int64_t last_encode_time_ms;
   size_t num_encoded_bytes;
-  VideoCodecMode mode;
   {
     rtc::CritScope cs(&crit_sect_);
     max_framerate = config_.maxFramerate;
@@ -74,7 +73,6 @@
     max_target_bitrate_kbps = max_target_bitrate_kbps_;
     last_encode_time_ms = last_encode_time_ms_;
     num_encoded_bytes = sizeof(encoded_buffer_);
-    mode = config_.mode;
   }
 
   int64_t time_now_ms = clock_->TimeInMilliseconds();
@@ -144,9 +142,6 @@
     encoded._encodedWidth = simulcast_streams[i].width;
     encoded._encodedHeight = simulcast_streams[i].height;
     encoded.rotation_ = input_image.rotation();
-    encoded.content_type_ = (mode == kScreensharing)
-                                ? VideoContentType::SCREENSHARE
-                                : VideoContentType::UNSPECIFIED;
     specifics.codec_name = ImplementationName();
     RTC_DCHECK(callback);
     if (callback->OnEncodedImage(encoded, &specifics, nullptr).error !=
diff --git a/webrtc/test/fuzzers/rtp_packet_fuzzer.cc b/webrtc/test/fuzzers/rtp_packet_fuzzer.cc
index 7cf65cf..613f125 100644
--- a/webrtc/test/fuzzers/rtp_packet_fuzzer.cc
+++ b/webrtc/test/fuzzers/rtp_packet_fuzzer.cc
@@ -85,10 +85,6 @@
         PlayoutDelay playout;
         packet.GetExtension<PlayoutDelayLimits>(&playout);
         break;
-      case kRtpExtensionVideoContentType:
-        VideoContentType content_type;
-        packet.GetExtension<VideoContentTypeExtension>(&content_type);
-        break;
     }
   }
 }
diff --git a/webrtc/video/end_to_end_tests.cc b/webrtc/video/end_to_end_tests.cc
index f171c5b..f31a68e 100644
--- a/webrtc/video/end_to_end_tests.cc
+++ b/webrtc/video/end_to_end_tests.cc
@@ -2652,8 +2652,7 @@
   EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.CurrentDelayInMs"));
   EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.OnewayDelayInMs"));
 
-  EXPECT_EQ(1, metrics::NumSamples(video_prefix + "EndToEndDelayInMs"));
-  EXPECT_EQ(1, metrics::NumSamples(video_prefix + "EndToEndDelayMaxInMs"));
+  EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.EndToEndDelayInMs"));
   EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.RenderSqrtPixelsPerSecond"));
 
   EXPECT_EQ(1, metrics::NumSamples(video_prefix + "EncodeTimeInMs"));
@@ -2693,118 +2692,6 @@
             metrics::NumSamples("WebRTC.Video.ReceivedFecPacketsInPercent"));
 }
 
-TEST_F(EndToEndTest, ContentTypeSwitches) {
-  class StatsObserver : public test::BaseTest,
-                        public rtc::VideoSinkInterface<VideoFrame> {
-   public:
-    StatsObserver() : BaseTest(kLongTimeoutMs), num_frames_received_(0) {}
-
-    bool ShouldCreateReceivers() const override { return true; }
-
-    void OnFrame(const VideoFrame& video_frame) override {
-      // The RTT is needed to estimate |ntp_time_ms| which is used by
-      // end-to-end delay stats. Therefore, start counting received frames once
-      // |ntp_time_ms| is valid.
-      if (video_frame.ntp_time_ms() > 0 &&
-          Clock::GetRealTimeClock()->CurrentNtpInMilliseconds() >=
-              video_frame.ntp_time_ms()) {
-        rtc::CritScope lock(&crit_);
-        ++num_frames_received_;
-      }
-    }
-
-    Action OnSendRtp(const uint8_t* packet, size_t length) override {
-      if (MinNumberOfFramesReceived())
-        observation_complete_.Set();
-      return SEND_PACKET;
-    }
-
-    bool MinNumberOfFramesReceived() const {
-      const int kMinRequiredHistogramSamples = 200;
-      rtc::CritScope lock(&crit_);
-      return num_frames_received_ > kMinRequiredHistogramSamples;
-    }
-
-    // May be called several times.
-    void PerformTest() override {
-      EXPECT_TRUE(Wait()) << "Timed out waiting for enough packets.";
-      // Reset frame counter so next PerformTest() call will do something.
-      {
-        rtc::CritScope lock(&crit_);
-        num_frames_received_ = 0;
-      }
-    }
-
-    rtc::CriticalSection crit_;
-    int num_frames_received_ GUARDED_BY(&crit_);
-  } test;
-
-  metrics::Reset();
-
-  Call::Config send_config(test.GetSenderCallConfig());
-  CreateSenderCall(send_config);
-  Call::Config recv_config(test.GetReceiverCallConfig());
-  CreateReceiverCall(recv_config);
-  receive_transport_.reset(test.CreateReceiveTransport());
-  send_transport_.reset(test.CreateSendTransport(sender_call_.get()));
-  send_transport_->SetReceiver(receiver_call_->Receiver());
-  receive_transport_->SetReceiver(sender_call_->Receiver());
-  receiver_call_->SignalChannelNetworkState(MediaType::VIDEO, kNetworkUp);
-  CreateSendConfig(1, 0, 0, send_transport_.get());
-  CreateMatchingReceiveConfigs(receive_transport_.get());
-
-  // Modify send and receive configs.
-  video_send_config_.rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
-  video_receive_configs_[0].rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
-  video_receive_configs_[0].renderer = &test;
-  // RTT needed for RemoteNtpTimeEstimator for the receive stream.
-  video_receive_configs_[0].rtp.rtcp_xr.receiver_reference_time_report = true;
-  // Start with realtime video.
-  video_encoder_config_.content_type =
-      VideoEncoderConfig::ContentType::kRealtimeVideo;
-  // Second encoder config for the second part of the test uses screenshare
-  VideoEncoderConfig encoder_config_with_screenshare_ =
-      video_encoder_config_.Copy();
-  encoder_config_with_screenshare_.content_type =
-      VideoEncoderConfig::ContentType::kScreen;
-
-  CreateVideoStreams();
-  CreateFrameGeneratorCapturer(kDefaultFramerate, kDefaultWidth,
-                               kDefaultHeight);
-  Start();
-
-  test.PerformTest();
-
-  // Replace old send stream.
-  sender_call_->DestroyVideoSendStream(video_send_stream_);
-  video_send_stream_ = sender_call_->CreateVideoSendStream(
-      video_send_config_.Copy(), encoder_config_with_screenshare_.Copy());
-  video_send_stream_->SetSource(
-      frame_generator_capturer_.get(),
-      VideoSendStream::DegradationPreference::kBalanced);
-  video_send_stream_->Start();
-
-  // Continue to run test but now with screenshare.
-  test.PerformTest();
-
-  send_transport_->StopSending();
-  receive_transport_->StopSending();
-  Stop();
-  DestroyStreams();
-  DestroyCalls();
-  // Delete the call for Call stats to be reported.
-  sender_call_.reset();
-  receiver_call_.reset();
-
-  // Verify that stats have been updated for both screenshare and video.
-  EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.EndToEndDelayInMs"));
-  EXPECT_EQ(1,
-            metrics::NumSamples("WebRTC.Video.Screenshare.EndToEndDelayInMs"));
-  EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.EndToEndDelayMaxInMs"));
-  EXPECT_EQ(
-      1, metrics::NumSamples("WebRTC.Video.Screenshare.EndToEndDelayMaxInMs"));
-}
-
 TEST_F(EndToEndTest, VerifyHistogramStatsWithRtx) {
   const bool kEnabledRtx = true;
   const bool kEnabledRed = false;
diff --git a/webrtc/video/payload_router.cc b/webrtc/video/payload_router.cc
index 52e9d46..f2f4309 100644
--- a/webrtc/video/payload_router.cc
+++ b/webrtc/video/payload_router.cc
@@ -129,7 +129,6 @@
   if (codec_specific_info)
     CopyCodecSpecific(codec_specific_info, &rtp_video_header);
   rtp_video_header.rotation = encoded_image.rotation_;
-  rtp_video_header.content_type = encoded_image.content_type_;
   rtp_video_header.playout_delay = encoded_image.playout_delay_;
 
   int stream_index = rtp_video_header.simulcastIdx;
diff --git a/webrtc/video/receive_statistics_proxy.cc b/webrtc/video/receive_statistics_proxy.cc
index e40a7ef..2ed2fae 100644
--- a/webrtc/video/receive_statistics_proxy.cc
+++ b/webrtc/video/receive_statistics_proxy.cc
@@ -74,12 +74,9 @@
       render_fps_tracker_(100, 10u),
       render_pixel_tracker_(100, 10u),
       total_byte_tracker_(100, 10u),  // bucket_interval_ms, bucket_count
-      e2e_delay_max_ms_video_(-1),
-      e2e_delay_max_ms_screenshare_(-1),
       freq_offset_counter_(clock, nullptr, kFreqOffsetProcessIntervalMs),
       first_report_block_time_ms_(-1),
-      avg_rtt_ms_(0),
-      last_content_type_(VideoContentType::UNSPECIFIED) {
+      avg_rtt_ms_(0) {
   stats_.ssrc = config_.rtp.remote_ssrc;
   // TODO(brandtr): Replace |rtx_stats_| with a single instance of
   // StreamDataCounters.
@@ -172,30 +169,9 @@
   if (delay_ms != -1)
     RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.OnewayDelayInMs", delay_ms);
 
-  int e2e_delay_ms_video = e2e_delay_counter_video_.Avg(kMinRequiredSamples);
-  if (e2e_delay_ms_video != -1) {
-    RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.EndToEndDelayInMs",
-                               e2e_delay_ms_video);
-  }
-
-  int e2e_delay_ms_screenshare =
-      e2e_delay_counter_screenshare_.Avg(kMinRequiredSamples);
-  if (e2e_delay_ms_screenshare != -1) {
-    RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.Screenshare.EndToEndDelayInMs",
-                               e2e_delay_ms_screenshare);
-  }
-
-  int e2e_delay_max_ms_video = e2e_delay_max_ms_video_;
-  if (e2e_delay_max_ms_video != -1) {
-    RTC_HISTOGRAM_COUNTS_100000("WebRTC.Video.EndToEndDelayMaxInMs",
-                                e2e_delay_max_ms_video);
-  }
-
-  int e2e_delay_max_ms_screenshare = e2e_delay_max_ms_screenshare_;
-  if (e2e_delay_max_ms_screenshare != -1) {
-    RTC_HISTOGRAM_COUNTS_100000("WebRTC.Video.Screenshare.EndToEndDelayMaxInMs",
-                                e2e_delay_max_ms_screenshare);
-  }
+  int e2e_delay_ms = e2e_delay_counter_.Avg(kMinRequiredSamples);
+  if (e2e_delay_ms != -1)
+    RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.EndToEndDelayInMs", e2e_delay_ms);
 
   StreamDataCounters rtp = stats_.rtp_stats;
   StreamDataCounters rtx;
@@ -455,8 +431,7 @@
     total_byte_tracker_.AddSamples(total_bytes - last_total_bytes);
 }
 
-void ReceiveStatisticsProxy::OnDecodedFrame(rtc::Optional<uint8_t> qp,
-                                            VideoContentType content_type) {
+void ReceiveStatisticsProxy::OnDecodedFrame(rtc::Optional<uint8_t> qp) {
   uint64_t now = clock_->TimeInMilliseconds();
 
   rtc::CritScope lock(&crit_);
@@ -476,7 +451,6 @@
         << "QP sum was already set and no QP was given for a frame.";
     stats_.qp_sum = rtc::Optional<uint64_t>();
   }
-  last_content_type_ = content_type;
   decode_fps_estimator_.Update(1, now);
   stats_.decode_frame_rate = decode_fps_estimator_.Rate(now).value_or(0);
 }
@@ -501,16 +475,8 @@
 
   if (frame.ntp_time_ms() > 0) {
     int64_t delay_ms = clock_->CurrentNtpInMilliseconds() - frame.ntp_time_ms();
-    if (delay_ms >= 0) {
-      if (last_content_type_ == VideoContentType::SCREENSHARE) {
-        e2e_delay_max_ms_screenshare_ =
-            std::max(delay_ms, e2e_delay_max_ms_screenshare_);
-        e2e_delay_counter_screenshare_.Add(delay_ms);
-      } else {
-        e2e_delay_max_ms_video_ = std::max(delay_ms, e2e_delay_max_ms_video_);
-        e2e_delay_counter_video_.Add(delay_ms);
-      }
-    }
+    if (delay_ms >= 0)
+      e2e_delay_counter_.Add(delay_ms);
   }
 }
 
diff --git a/webrtc/video/receive_statistics_proxy.h b/webrtc/video/receive_statistics_proxy.h
index e1d0971..07e59b4 100644
--- a/webrtc/video/receive_statistics_proxy.h
+++ b/webrtc/video/receive_statistics_proxy.h
@@ -46,7 +46,7 @@
 
   VideoReceiveStream::Stats GetStats() const;
 
-  void OnDecodedFrame(rtc::Optional<uint8_t> qp, VideoContentType content_type);
+  void OnDecodedFrame(rtc::Optional<uint8_t> qp);
   void OnSyncOffsetUpdated(int64_t sync_offset_ms, double estimated_freq_khz);
   void OnRenderedFrame(const VideoFrame& frame);
   void OnIncomingPayloadType(int payload_type);
@@ -140,10 +140,7 @@
   SampleCounter target_delay_counter_ GUARDED_BY(crit_);
   SampleCounter current_delay_counter_ GUARDED_BY(crit_);
   SampleCounter delay_counter_ GUARDED_BY(crit_);
-  SampleCounter e2e_delay_counter_video_ GUARDED_BY(crit_);
-  SampleCounter e2e_delay_counter_screenshare_ GUARDED_BY(crit_);
-  int64_t e2e_delay_max_ms_video_ GUARDED_BY(crit_);
-  int64_t e2e_delay_max_ms_screenshare_ GUARDED_BY(crit_);
+  SampleCounter e2e_delay_counter_ GUARDED_BY(crit_);
   MaxCounter freq_offset_counter_ GUARDED_BY(crit_);
   int64_t first_report_block_time_ms_ GUARDED_BY(crit_);
   ReportBlockStats report_block_stats_ GUARDED_BY(crit_);
@@ -151,7 +148,6 @@
   std::map<uint32_t, StreamDataCounters> rtx_stats_ GUARDED_BY(crit_);
   int64_t avg_rtt_ms_ GUARDED_BY(crit_);
   mutable std::map<int64_t, size_t> frame_window_ GUARDED_BY(&crit_);
-  VideoContentType last_content_type_ GUARDED_BY(&crit_);
 };
 
 }  // namespace webrtc
diff --git a/webrtc/video/receive_statistics_proxy_unittest.cc b/webrtc/video/receive_statistics_proxy_unittest.cc
index 84943e2..af7ae68 100644
--- a/webrtc/video/receive_statistics_proxy_unittest.cc
+++ b/webrtc/video/receive_statistics_proxy_unittest.cc
@@ -54,8 +54,7 @@
 TEST_F(ReceiveStatisticsProxyTest, OnDecodedFrameIncreasesFramesDecoded) {
   EXPECT_EQ(0u, statistics_proxy_->GetStats().frames_decoded);
   for (uint32_t i = 1; i <= 3; ++i) {
-    statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(),
-                                      VideoContentType::UNSPECIFIED);
+    statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>());
     EXPECT_EQ(i, statistics_proxy_->GetStats().frames_decoded);
   }
 }
@@ -63,47 +62,40 @@
 TEST_F(ReceiveStatisticsProxyTest, OnDecodedFrameWithQpResetsFramesDecoded) {
   EXPECT_EQ(0u, statistics_proxy_->GetStats().frames_decoded);
   for (uint32_t i = 1; i <= 3; ++i) {
-    statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(),
-                                      VideoContentType::UNSPECIFIED);
+    statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>());
     EXPECT_EQ(i, statistics_proxy_->GetStats().frames_decoded);
   }
-  statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(1u),
-                                    VideoContentType::UNSPECIFIED);
+  statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(1u));
   EXPECT_EQ(1u, statistics_proxy_->GetStats().frames_decoded);
 }
 
 TEST_F(ReceiveStatisticsProxyTest, OnDecodedFrameIncreasesQpSum) {
   EXPECT_EQ(rtc::Optional<uint64_t>(), statistics_proxy_->GetStats().qp_sum);
-  statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(3u),
-                                    VideoContentType::UNSPECIFIED);
+  statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(3u));
   EXPECT_EQ(rtc::Optional<uint64_t>(3u), statistics_proxy_->GetStats().qp_sum);
-  statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(127u),
-                                    VideoContentType::UNSPECIFIED);
+  statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(127u));
   EXPECT_EQ(rtc::Optional<uint64_t>(130u),
             statistics_proxy_->GetStats().qp_sum);
 }
 
 TEST_F(ReceiveStatisticsProxyTest, OnDecodedFrameWithoutQpQpSumWontExist) {
   EXPECT_EQ(rtc::Optional<uint64_t>(), statistics_proxy_->GetStats().qp_sum);
-  statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(),
-                                    VideoContentType::UNSPECIFIED);
+  statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>());
   EXPECT_EQ(rtc::Optional<uint64_t>(), statistics_proxy_->GetStats().qp_sum);
 }
 
 TEST_F(ReceiveStatisticsProxyTest, OnDecodedFrameWithoutQpResetsQpSum) {
   EXPECT_EQ(rtc::Optional<uint64_t>(), statistics_proxy_->GetStats().qp_sum);
-  statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(3u),
-                                    VideoContentType::UNSPECIFIED);
+  statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(3u));
   EXPECT_EQ(rtc::Optional<uint64_t>(3u), statistics_proxy_->GetStats().qp_sum);
-  statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>(),
-                                    VideoContentType::UNSPECIFIED);
+  statistics_proxy_->OnDecodedFrame(rtc::Optional<uint8_t>());
   EXPECT_EQ(rtc::Optional<uint64_t>(), statistics_proxy_->GetStats().qp_sum);
 }
 
 TEST_F(ReceiveStatisticsProxyTest, OnRenderedFrameIncreasesFramesRendered) {
   EXPECT_EQ(0u, statistics_proxy_->GetStats().frames_rendered);
-  webrtc::VideoFrame frame(webrtc::I420Buffer::Create(1, 1), 0, 0,
-                           webrtc::kVideoRotation_0);
+  webrtc::VideoFrame frame(
+      webrtc::I420Buffer::Create(1, 1), 0, 0, webrtc::kVideoRotation_0);
   for (uint32_t i = 1; i <= 3; ++i) {
     statistics_proxy_->OnRenderedFrame(frame);
     EXPECT_EQ(i, statistics_proxy_->GetStats().frames_rendered);
diff --git a/webrtc/video/rtp_stream_receiver.cc b/webrtc/video/rtp_stream_receiver.cc
index 00f1c46..90dd0da 100644
--- a/webrtc/video/rtp_stream_receiver.cc
+++ b/webrtc/video/rtp_stream_receiver.cc
@@ -502,10 +502,6 @@
   if (header.extension.hasVideoRotation) {
     rtp_header.type.Video.rotation = header.extension.videoRotation;
   }
-  rtp_header.type.Video.content_type = VideoContentType::UNSPECIFIED;
-  if (header.extension.hasVideoContentType) {
-    rtp_header.type.Video.content_type = header.extension.videoContentType;
-  }
   rtp_header.type.Video.playout_delay = header.extension.playout_delay;
 
   OnReceivedPayloadData(nullptr, 0, &rtp_header);
diff --git a/webrtc/video/video_quality_test.cc b/webrtc/video/video_quality_test.cc
index cea8af1..0baf42c 100644
--- a/webrtc/video/video_quality_test.cc
+++ b/webrtc/video/video_quality_test.cc
@@ -1301,8 +1301,6 @@
     video_send_config_.rtp.extensions.push_back(RtpExtension(
         RtpExtension::kAbsSendTimeUri, test::kAbsSendTimeExtensionId));
   }
-  video_send_config_.rtp.extensions.push_back(RtpExtension(
-      RtpExtension::kVideoContentTypeUri, test::kVideoContentTypeExtensionId));
 
   video_encoder_config_.min_transmit_bitrate_bps =
       params_.video.min_transmit_bps;
@@ -1330,8 +1328,6 @@
         kSendRtxPayloadType;
     video_receive_configs_[i].rtp.transport_cc = params_.call.send_side_bwe;
     video_receive_configs_[i].rtp.remb = !params_.call.send_side_bwe;
-    // Enable RTT calculation so NTP time estimator will work.
-    video_receive_configs_[i].rtp.rtcp_xr.receiver_reference_time_report = true;
     // Force fake decoders on non-selected simulcast streams.
     if (i != params_.ss.selected_stream) {
       VideoReceiveStream::Decoder decoder;
diff --git a/webrtc/video/video_send_stream_tests.cc b/webrtc/video/video_send_stream_tests.cc
index e24cb41..894b840 100644
--- a/webrtc/video/video_send_stream_tests.cc
+++ b/webrtc/video/video_send_stream_tests.cc
@@ -291,43 +291,6 @@
   RunBaseTest(&test);
 }
 
-TEST_F(VideoSendStreamTest, SupportsVideoContentType) {
-  class VideoRotationObserver : public test::SendTest {
-   public:
-    VideoRotationObserver() : SendTest(kDefaultTimeoutMs) {
-      EXPECT_TRUE(parser_->RegisterRtpHeaderExtension(
-          kRtpExtensionVideoContentType, test::kVideoContentTypeExtensionId));
-    }
-
-    Action OnSendRtp(const uint8_t* packet, size_t length) override {
-      RTPHeader header;
-      EXPECT_TRUE(parser_->Parse(packet, length, &header));
-      EXPECT_TRUE(header.extension.hasVideoContentType);
-      EXPECT_EQ(VideoContentType::SCREENSHARE,
-                header.extension.videoContentType);
-      observation_complete_.Set();
-      return SEND_PACKET;
-    }
-
-    void ModifyVideoConfigs(
-        VideoSendStream::Config* send_config,
-        std::vector<VideoReceiveStream::Config>* receive_configs,
-        VideoEncoderConfig* encoder_config) override {
-      send_config->rtp.extensions.clear();
-      send_config->rtp.extensions.push_back(
-          RtpExtension(RtpExtension::kVideoContentTypeUri,
-                       test::kVideoContentTypeExtensionId));
-      encoder_config->content_type = VideoEncoderConfig::ContentType::kScreen;
-    }
-
-    void PerformTest() override {
-      EXPECT_TRUE(Wait()) << "Timed out while waiting for single RTP packet.";
-    }
-  } test;
-
-  RunBaseTest(&test);
-}
-
 class FakeReceiveStatistics : public NullReceiveStatistics {
  public:
   FakeReceiveStatistics(uint32_t send_ssrc,
diff --git a/webrtc/video/video_stream_decoder.cc b/webrtc/video/video_stream_decoder.cc
index a7688ce..6eea13f 100644
--- a/webrtc/video/video_stream_decoder.cc
+++ b/webrtc/video/video_stream_decoder.cc
@@ -76,10 +76,10 @@
 // thread may have held the lock when calling VideoDecoder::Decode, Reset, or
 // Release. Acquiring the same lock in the path of decode callback can deadlock.
 int32_t VideoStreamDecoder::FrameToRender(VideoFrame& video_frame,
-                                          rtc::Optional<uint8_t> qp,
-                                          VideoContentType content_type) {
-  receive_stats_callback_->OnDecodedFrame(qp, content_type);
+                                          rtc::Optional<uint8_t> qp) {
+  receive_stats_callback_->OnDecodedFrame(qp);
   incoming_video_stream_->OnFrame(video_frame);
+
   return 0;
 }
 
diff --git a/webrtc/video/video_stream_decoder.h b/webrtc/video/video_stream_decoder.h
index b670b12..4bca3ed 100644
--- a/webrtc/video/video_stream_decoder.h
+++ b/webrtc/video/video_stream_decoder.h
@@ -59,8 +59,7 @@
 
   // Implements VCMReceiveCallback.
   int32_t FrameToRender(VideoFrame& video_frame,
-                        rtc::Optional<uint8_t> qp,
-                        VideoContentType content_type) override;
+                        rtc::Optional<uint8_t> qp) override;
   int32_t ReceivedDecodedReferenceFrame(const uint64_t picture_id) override;
   void OnIncomingPayloadType(int payload_type) override;
   void OnDecoderImplementationName(const char* implementation_name) override;
diff --git a/webrtc/video_frame.h b/webrtc/video_frame.h
index 47e58a1..3b0c16c 100644
--- a/webrtc/video_frame.h
+++ b/webrtc/video_frame.h
@@ -57,7 +57,6 @@
   size_t _length;
   size_t _size;
   VideoRotation rotation_ = kVideoRotation_0;
-  VideoContentType content_type_ = VideoContentType::UNSPECIFIED;
   bool _completeFrame = false;
   AdaptReason adapt_reason_;
   int qp_ = -1;  // Quantizer value.