Propagate absolute capture time through video receiving side.

Prototype link:
https://webrtc-review.googlesource.com/c/src/+/158520


Bug: webrtc:10739
Change-Id: I8d30b729ac5bca484608af7f0378998987df7d53
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/160341
Commit-Queue: Ruslan Burakov <kuddai@webrtc.org>
Reviewed-by: Stefan Holmer <stefan@webrtc.org>
Reviewed-by: Philip Eliasson <philipel@webrtc.org>
Reviewed-by: Danil Chapovalov <danilchap@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#29939}
diff --git a/video/rtp_video_stream_receiver.cc b/video/rtp_video_stream_receiver.cc
index 404d3f3..563aca2 100644
--- a/video/rtp_video_stream_receiver.cc
+++ b/video/rtp_video_stream_receiver.cc
@@ -214,7 +214,8 @@
       rtcp_feedback_buffer_(this, nack_sender, this),
       packet_buffer_(clock_, kPacketBufferStartSize, PacketBufferMaxSize()),
       has_received_frame_(false),
-      frames_decryptable_(false) {
+      frames_decryptable_(false),
+      absolute_capture_time_receiver_(clock) {
   constexpr bool remb_candidate = true;
   if (packet_router_)
     packet_router_->AddReceiveRtpModule(rtp_rtcp_.get(), remb_candidate);
@@ -331,6 +332,18 @@
       rtp_packet, video, ntp_estimator_.Estimate(rtp_packet.Timestamp()),
       clock_->TimeInMilliseconds());
 
+  // Try to extrapolate absolute capture time if it is missing.
+  // TODO(bugs.webrtc.org/10739): Add support for estimated capture clock
+  // offset.
+  packet.packet_info.set_absolute_capture_time(
+      absolute_capture_time_receiver_.OnReceivePacket(
+          AbsoluteCaptureTimeReceiver::GetSource(packet.packet_info.ssrc(),
+                                                 packet.packet_info.csrcs()),
+          packet.packet_info.rtp_timestamp(),
+          // Assume frequency is the same one for all video frames.
+          kVideoPayloadTypeFrequency,
+          packet.packet_info.absolute_capture_time()));
+
   RTPVideoHeader& video_header = packet.video_header;
   video_header.rotation = kVideoRotation_0;
   video_header.content_type = VideoContentType::UNSPECIFIED;
diff --git a/video/rtp_video_stream_receiver.h b/video/rtp_video_stream_receiver.h
index 7021c3c..5bd5061 100644
--- a/video/rtp_video_stream_receiver.h
+++ b/video/rtp_video_stream_receiver.h
@@ -31,6 +31,7 @@
 #include "modules/rtp_rtcp/include/rtp_header_extension_map.h"
 #include "modules/rtp_rtcp/include/rtp_rtcp.h"
 #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "modules/rtp_rtcp/source/absolute_capture_time_receiver.h"
 #include "modules/rtp_rtcp/source/rtp_packet_received.h"
 #include "modules/rtp_rtcp/source/rtp_video_header.h"
 #include "modules/video_coding/h264_sps_pps_tracker.h"
@@ -319,6 +320,9 @@
   std::atomic<bool> frames_decryptable_;
   absl::optional<ColorSpace> last_color_space_;
 
+  AbsoluteCaptureTimeReceiver absolute_capture_time_receiver_
+      RTC_GUARDED_BY(worker_task_checker_);
+
   int64_t last_completed_picture_id_ = 0;
 };
 
diff --git a/video/rtp_video_stream_receiver_unittest.cc b/video/rtp_video_stream_receiver_unittest.cc
index e4a8545..f7e6269 100644
--- a/video/rtp_video_stream_receiver_unittest.cc
+++ b/video/rtp_video_stream_receiver_unittest.cc
@@ -36,6 +36,7 @@
 #include "test/gtest.h"
 
 using ::testing::_;
+using ::testing::ElementsAre;
 using ::testing::Invoke;
 using ::testing::SizeIs;
 using ::testing::Values;
@@ -46,6 +47,18 @@
 
 const uint8_t kH264StartCode[] = {0x00, 0x00, 0x00, 0x01};
 
+std::vector<uint64_t> GetAbsoluteCaptureTimestamps(
+    const video_coding::EncodedFrame* frame) {
+  std::vector<uint64_t> result;
+  for (const auto& packet_info : frame->PacketInfos()) {
+    if (packet_info.absolute_capture_time()) {
+      result.push_back(
+          packet_info.absolute_capture_time()->absolute_capture_timestamp);
+    }
+  }
+  return result;
+}
+
 class MockTransport : public Transport {
  public:
   MOCK_METHOD3(SendRtp,
@@ -342,6 +355,85 @@
                                                     video_header);
 }
 
+TEST_F(RtpVideoStreamReceiverTest, PacketInfoIsPropagatedIntoVideoFrames) {
+  constexpr uint64_t kAbsoluteCaptureTimestamp = 12;
+  constexpr int kId0 = 1;
+
+  RtpHeaderExtensionMap extension_map;
+  extension_map.Register<AbsoluteCaptureTimeExtension>(kId0);
+  RtpPacketReceived rtp_packet(&extension_map);
+  RTPVideoHeader video_header;
+  const std::vector<uint8_t> data({1, 2, 3, 4});
+  rtp_packet.SetSequenceNumber(1);
+  rtp_packet.SetTimestamp(1);
+  rtp_packet.SetSsrc(kSsrc);
+  rtp_packet.SetExtension<AbsoluteCaptureTimeExtension>(
+      AbsoluteCaptureTime{kAbsoluteCaptureTimestamp,
+                          /*estimated_capture_clock_offset=*/absl::nullopt});
+
+  video_header.is_first_packet_in_frame = true;
+  video_header.is_last_packet_in_frame = true;
+  video_header.codec = kVideoCodecGeneric;
+  video_header.frame_type = VideoFrameType::kVideoFrameKey;
+  mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(),
+                                                           data.size());
+  EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_))
+      .WillOnce(Invoke(
+          [kAbsoluteCaptureTimestamp](video_coding::EncodedFrame* frame) {
+            EXPECT_THAT(GetAbsoluteCaptureTimestamps(frame),
+                        ElementsAre(kAbsoluteCaptureTimestamp));
+          }));
+  rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
+                                                    video_header);
+}
+
+TEST_F(RtpVideoStreamReceiverTest,
+       MissingAbsoluteCaptureTimeIsFilledWithExtrapolatedValue) {
+  constexpr uint64_t kAbsoluteCaptureTimestamp = 12;
+  constexpr int kId0 = 1;
+
+  RtpHeaderExtensionMap extension_map;
+  extension_map.Register<AbsoluteCaptureTimeExtension>(kId0);
+  RtpPacketReceived rtp_packet(&extension_map);
+
+  RTPVideoHeader video_header;
+  const std::vector<uint8_t> data({1, 2, 3, 4});
+  uint16_t sequence_number = 1;
+  uint32_t rtp_timestamp = 1;
+  rtp_packet.SetSequenceNumber(sequence_number);
+  rtp_packet.SetTimestamp(rtp_timestamp);
+  rtp_packet.SetSsrc(kSsrc);
+  rtp_packet.SetExtension<AbsoluteCaptureTimeExtension>(
+      AbsoluteCaptureTime{kAbsoluteCaptureTimestamp,
+                          /*estimated_capture_clock_offset=*/absl::nullopt});
+
+  video_header.is_first_packet_in_frame = true;
+  video_header.is_last_packet_in_frame = true;
+  video_header.codec = kVideoCodecGeneric;
+  video_header.frame_type = VideoFrameType::kVideoFrameKey;
+  mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(),
+                                                           data.size());
+  EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_));
+  rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
+                                                    video_header);
+
+  // Rtp packet without absolute capture time.
+  rtp_packet = RtpPacketReceived(&extension_map);
+  rtp_packet.SetSequenceNumber(++sequence_number);
+  rtp_packet.SetTimestamp(++rtp_timestamp);
+  rtp_packet.SetSsrc(kSsrc);
+
+  // There is no absolute capture time in the second packet.
+  // Expect rtp video stream receiver to extrapolate it for the resulting video
+  // frame using absolute capture time from the previous packet.
+  EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_))
+      .WillOnce(Invoke([](video_coding::EncodedFrame* frame) {
+        EXPECT_THAT(GetAbsoluteCaptureTimestamps(frame), SizeIs(1));
+      }));
+  rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
+                                                    video_header);
+}
+
 TEST_F(RtpVideoStreamReceiverTest, NoInfiniteRecursionOnEncapsulatedRedPacket) {
   const uint8_t kRedPayloadType = 125;
   VideoCodec codec;