Don't copy video frame metadata in each encoder/decoder

As this is handled higher up the pipeline in a single
place for all encoders/decoders

Bug: webrtc:10460
Change-Id: I95b0a69aecaf07283c8776ac0d7e85d097e3576b
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/139882
Commit-Queue: Ilya Nikolaevskiy <ilnik@webrtc.org>
Reviewed-by: Erik Språng <sprang@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#28172}
diff --git a/modules/video_coding/codecs/h264/h264_decoder_impl.cc b/modules/video_coding/codecs/h264/h264_decoder_impl.cc
index bc86a34..d12de31 100644
--- a/modules/video_coding/codecs/h264/h264_decoder_impl.cc
+++ b/modules/video_coding/codecs/h264/h264_decoder_impl.cc
@@ -341,8 +341,6 @@
   VideoFrame decoded_frame = VideoFrame::Builder()
                                  .set_video_frame_buffer(decoded_buffer)
                                  .set_timestamp_rtp(input_image.Timestamp())
-                                 .set_rotation(input_image.rotation_)
-                                 .set_ntp_time_ms(input_image.ntp_time_ms_)
                                  .set_color_space(color_space)
                                  .build();
 
diff --git a/modules/video_coding/codecs/h264/h264_encoder_impl.cc b/modules/video_coding/codecs/h264/h264_encoder_impl.cc
index 88b667c..2fc1ca9 100644
--- a/modules/video_coding/codecs/h264/h264_encoder_impl.cc
+++ b/modules/video_coding/codecs/h264/h264_encoder_impl.cc
@@ -501,15 +501,6 @@
     encoded_images_[i]._encodedWidth = configurations_[i].width;
     encoded_images_[i]._encodedHeight = configurations_[i].height;
     encoded_images_[i].SetTimestamp(input_frame.timestamp());
-    encoded_images_[i].ntp_time_ms_ = input_frame.ntp_time_ms();
-    encoded_images_[i].capture_time_ms_ = input_frame.render_time_ms();
-    encoded_images_[i].rotation_ = input_frame.rotation();
-    encoded_images_[i].SetColorSpace(input_frame.color_space());
-    encoded_images_[i].content_type_ =
-            (codec_.mode == VideoCodecMode::kScreensharing)
-            ? VideoContentType::SCREENSHARE
-            : VideoContentType::UNSPECIFIED;
-    encoded_images_[i].timing_.flags = VideoSendTiming::kInvalid;
     encoded_images_[i]._frameType = ConvertToVideoFrameType(info.eFrameType);
     encoded_images_[i].SetSpatialIndex(configurations_[i].simulcast_idx);
 
diff --git a/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc b/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc
index 2d05feb..d4d9e34 100644
--- a/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc
+++ b/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc
@@ -265,8 +265,7 @@
   vpx_codec_err_t vpx_ret =
       vpx_codec_control(decoder_, VPXD_GET_LAST_QUANTIZER, &qp);
   RTC_DCHECK_EQ(vpx_ret, VPX_CODEC_OK);
-  ret = ReturnFrame(img, input_image.Timestamp(), input_image.ntp_time_ms_, qp,
-                    input_image.ColorSpace());
+  ret = ReturnFrame(img, input_image.Timestamp(), qp);
   if (ret != 0) {
     // Reset to avoid requesting key frames too often.
     if (ret < 0 && propagation_cnt_ > 0)
@@ -282,12 +281,9 @@
   return WEBRTC_VIDEO_CODEC_OK;
 }
 
-int LibvpxVp8Decoder::ReturnFrame(
-    const vpx_image_t* img,
-    uint32_t timestamp,
-    int64_t ntp_time_ms,
-    int qp,
-    const webrtc::ColorSpace* explicit_color_space) {
+int LibvpxVp8Decoder::ReturnFrame(const vpx_image_t* img,
+                                  uint32_t timestamp,
+                                  int qp) {
   if (img == NULL) {
     // Decoder OK and NULL image => No show frame
     return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
@@ -322,8 +318,6 @@
   VideoFrame decoded_image = VideoFrame::Builder()
                                  .set_video_frame_buffer(buffer)
                                  .set_timestamp_rtp(timestamp)
-                                 .set_ntp_time_ms(ntp_time_ms)
-                                 .set_color_space(explicit_color_space)
                                  .build();
   decode_complete_callback_->Decoded(decoded_image, absl::nullopt, qp);
 
diff --git a/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h b/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h
index 96c7222..47d54dc 100644
--- a/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h
+++ b/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h
@@ -48,11 +48,7 @@
 
  private:
   class QpSmoother;
-  int ReturnFrame(const vpx_image_t* img,
-                  uint32_t timeStamp,
-                  int64_t ntp_time_ms,
-                  int qp,
-                  const webrtc::ColorSpace* explicit_color_space);
+  int ReturnFrame(const vpx_image_t* img, uint32_t timeStamp, int qp);
   const bool use_postproc_arm_;
 
   I420BufferPool buffer_pool_;
diff --git a/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc b/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc
index 61a5aa2..f740b80 100644
--- a/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc
+++ b/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc
@@ -1135,15 +1135,6 @@
       }
     }
     encoded_images_[encoder_idx].SetTimestamp(input_image.timestamp());
-    encoded_images_[encoder_idx].capture_time_ms_ =
-        input_image.render_time_ms();
-    encoded_images_[encoder_idx].rotation_ = input_image.rotation();
-    encoded_images_[encoder_idx].content_type_ =
-        (codec_.mode == VideoCodecMode::kScreensharing)
-            ? VideoContentType::SCREENSHARE
-            : VideoContentType::UNSPECIFIED;
-    encoded_images_[encoder_idx].timing_.flags = VideoSendTiming::kInvalid;
-    encoded_images_[encoder_idx].SetColorSpace(input_image.color_space());
     encoded_images_[encoder_idx].SetRetransmissionAllowed(
         retransmission_allowed);
 
diff --git a/modules/video_coding/codecs/vp9/vp9_impl.cc b/modules/video_coding/codecs/vp9/vp9_impl.cc
index 8648ba5..18acf02 100644
--- a/modules/video_coding/codecs/vp9/vp9_impl.cc
+++ b/modules/video_coding/codecs/vp9/vp9_impl.cc
@@ -1446,20 +1446,13 @@
 
   TRACE_COUNTER1("webrtc", "EncodedFrameSize", encoded_image_.size());
   encoded_image_.SetTimestamp(input_image_->timestamp());
-  encoded_image_.capture_time_ms_ = input_image_->render_time_ms();
-  encoded_image_.rotation_ = input_image_->rotation();
-  encoded_image_.content_type_ = (codec_.mode == VideoCodecMode::kScreensharing)
-                                     ? VideoContentType::SCREENSHARE
-                                     : VideoContentType::UNSPECIFIED;
   encoded_image_._encodedHeight =
       pkt->data.frame.height[layer_id.spatial_layer_id];
   encoded_image_._encodedWidth =
       pkt->data.frame.width[layer_id.spatial_layer_id];
-  encoded_image_.timing_.flags = VideoSendTiming::kInvalid;
   int qp = -1;
   vpx_codec_control(encoder_, VP8E_GET_LAST_QUANTIZER, &qp);
   encoded_image_.qp_ = qp;
-  encoded_image_.SetColorSpace(input_image_->color_space());
 
   if (full_superframe_drop_) {
     const bool end_of_picture = encoded_image_.SpatialIndex().value_or(0) + 1 ==
@@ -1682,20 +1675,16 @@
   vpx_codec_err_t vpx_ret =
       vpx_codec_control(decoder_, VPXD_GET_LAST_QUANTIZER, &qp);
   RTC_DCHECK_EQ(vpx_ret, VPX_CODEC_OK);
-  int ret = ReturnFrame(img, input_image.Timestamp(), input_image.ntp_time_ms_,
-                        qp, input_image.ColorSpace());
+  int ret = ReturnFrame(img, input_image.Timestamp(), qp);
   if (ret != 0) {
     return ret;
   }
   return WEBRTC_VIDEO_CODEC_OK;
 }
 
-int VP9DecoderImpl::ReturnFrame(
-    const vpx_image_t* img,
-    uint32_t timestamp,
-    int64_t ntp_time_ms,
-    int qp,
-    const webrtc::ColorSpace* explicit_color_space) {
+int VP9DecoderImpl::ReturnFrame(const vpx_image_t* img,
+                                uint32_t timestamp,
+                                int qp) {
   if (img == nullptr) {
     // Decoder OK and nullptr image => No show frame.
     return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
@@ -1739,16 +1728,9 @@
 
   auto builder = VideoFrame::Builder()
                      .set_video_frame_buffer(img_wrapped_buffer)
-                     .set_timestamp_ms(0)
                      .set_timestamp_rtp(timestamp)
-                     .set_ntp_time_ms(ntp_time_ms)
-                     .set_rotation(webrtc::kVideoRotation_0);
-  if (explicit_color_space) {
-    builder.set_color_space(*explicit_color_space);
-  } else {
-    builder.set_color_space(
-        ExtractVP9ColorSpace(img->cs, img->range, img->bit_depth));
-  }
+                     .set_color_space(ExtractVP9ColorSpace(img->cs, img->range,
+                                                           img->bit_depth));
 
   VideoFrame decoded_image = builder.build();
 
diff --git a/modules/video_coding/codecs/vp9/vp9_impl.h b/modules/video_coding/codecs/vp9/vp9_impl.h
index fb195a7..f58fc34 100644
--- a/modules/video_coding/codecs/vp9/vp9_impl.h
+++ b/modules/video_coding/codecs/vp9/vp9_impl.h
@@ -194,11 +194,7 @@
   const char* ImplementationName() const override;
 
  private:
-  int ReturnFrame(const vpx_image_t* img,
-                  uint32_t timestamp,
-                  int64_t ntp_time_ms,
-                  int qp,
-                  const webrtc::ColorSpace* explicit_color_space);
+  int ReturnFrame(const vpx_image_t* img, uint32_t timestamp, int qp);
 
   // Memory pool used to share buffers between libvpx and webrtc.
   Vp9FrameBufferPool frame_buffer_pool_;
diff --git a/test/fake_encoder.cc b/test/fake_encoder.cc
index 7c24b84..fa7641c 100644
--- a/test/fake_encoder.cc
+++ b/test/fake_encoder.cc
@@ -126,15 +126,10 @@
     // Write a counter to the image to make each frame unique.
     WriteCounter(encoded.data() + frame_info.layers[i].size - 4, counter);
     encoded.SetTimestamp(input_image.timestamp());
-    encoded.capture_time_ms_ = input_image.render_time_ms();
     encoded._frameType = frame_info.keyframe ? VideoFrameType::kVideoFrameKey
                                              : VideoFrameType::kVideoFrameDelta;
     encoded._encodedWidth = simulcast_streams[i].width;
     encoded._encodedHeight = simulcast_streams[i].height;
-    encoded.rotation_ = input_image.rotation();
-    encoded.content_type_ = (mode == VideoCodecMode::kScreensharing)
-                                ? VideoContentType::SCREENSHARE
-                                : VideoContentType::UNSPECIFIED;
     encoded.SetSpatialIndex(i);
     CodecSpecificInfo codec_specific;
     std::unique_ptr<RTPFragmentationHeader> fragmentation =