Use microsecond timestamp in cricket::VideoFrame.

BUG=webrtc:5740

Committed: https://crrev.com/f30ba114bb33dd1d8643bc640dda2e0c86dbbd32
Cr-Commit-Position: refs/heads/master@{#12348}

Review URL: https://codereview.webrtc.org/1865283002

Cr-Original-Commit-Position: refs/heads/master@{#12358}
Cr-Mirrored-From: https://chromium.googlesource.com/external/webrtc
Cr-Mirrored-Commit: b17712ff894b0bb06c36d12354f0a12def11ee81
diff --git a/media/base/fakevideocapturer.h b/media/base/fakevideocapturer.h
index 89dcf65..026bf80 100644
--- a/media/base/fakevideocapturer.h
+++ b/media/base/fakevideocapturer.h
@@ -31,7 +31,7 @@
  public:
   FakeVideoCapturer(bool is_screencast)
       : running_(false),
-        initial_unix_timestamp_(time(NULL) * rtc::kNumNanosecsPerSec),
+        initial_timestamp_(rtc::TimeNanos()),
         next_timestamp_(rtc::kNumNanosecsPerMillisec),
         is_screencast_(is_screencast),
         rotation_(webrtc::kVideoRotation_0) {
@@ -99,7 +99,7 @@
     frame.height = height;
     frame.fourcc = fourcc;
     frame.data_size = size;
-    frame.time_stamp = initial_unix_timestamp_ + next_timestamp_;
+    frame.time_stamp = initial_timestamp_ + next_timestamp_;
     next_timestamp_ += timestamp_interval;
 
     std::unique_ptr<char[]> data(new char[size]);
@@ -153,7 +153,7 @@
 
  private:
   bool running_;
-  int64_t initial_unix_timestamp_;
+  int64_t initial_timestamp_;
   int64_t next_timestamp_;
   const bool is_screencast_;
   webrtc::VideoRotation rotation_;
diff --git a/media/base/videobroadcaster.cc b/media/base/videobroadcaster.cc
index c9a2d99..18c38b7 100644
--- a/media/base/videobroadcaster.cc
+++ b/media/base/videobroadcaster.cc
@@ -99,13 +99,13 @@
   if (black_frame_ && black_frame_->width() == frame.width() &&
       black_frame_->height() == frame.height() &&
       black_frame_->rotation() == frame.rotation()) {
-    black_frame_->SetTimeStamp(frame.GetTimeStamp());
+    black_frame_->set_timestamp_us(frame.timestamp_us());
     return *black_frame_;
   }
   black_frame_.reset(new cricket::WebRtcVideoFrame(
-      new rtc::RefCountedObject<webrtc::I420Buffer>(
-          frame.width(), frame.height()),
-      frame.GetTimeStamp(), frame.rotation()));
+      new rtc::RefCountedObject<webrtc::I420Buffer>(frame.width(),
+                                                    frame.height()),
+      frame.rotation(), frame.timestamp_us()));
   black_frame_->SetToBlack();
   return *black_frame_;
 }
diff --git a/media/base/videobroadcaster_unittest.cc b/media/base/videobroadcaster_unittest.cc
index c6a4df0..0299d0e 100644
--- a/media/base/videobroadcaster_unittest.cc
+++ b/media/base/videobroadcaster_unittest.cc
@@ -135,14 +135,14 @@
   broadcaster.AddOrUpdateSink(&sink2, wants2);
 
   cricket::WebRtcVideoFrame frame1;
-  frame1.InitToBlack(100, 200, 10 /*ts*/);
+  frame1.InitToBlack(100, 200, 10000 /*ts*/);
   // Make it not all-black
   frame1.GetUPlane()[0] = 0;
   broadcaster.OnFrame(frame1);
   EXPECT_TRUE(sink1.black_frame());
-  EXPECT_EQ(10, sink1.timestamp());
+  EXPECT_EQ(10000, sink1.timestamp());
   EXPECT_FALSE(sink2.black_frame());
-  EXPECT_EQ(10, sink2.timestamp());
+  EXPECT_EQ(10000, sink2.timestamp());
 
   // Switch the sink wants.
   wants1.black_frames = false;
@@ -151,12 +151,12 @@
   broadcaster.AddOrUpdateSink(&sink2, wants2);
 
   cricket::WebRtcVideoFrame frame2;
-  frame2.InitToBlack(100, 200, 30 /*ts*/);
+  frame2.InitToBlack(100, 200, 30000 /*ts*/);
   // Make it not all-black
   frame2.GetUPlane()[0] = 0;
   broadcaster.OnFrame(frame2);
   EXPECT_FALSE(sink1.black_frame());
-  EXPECT_EQ(30, sink1.timestamp());
+  EXPECT_EQ(30000, sink1.timestamp());
   EXPECT_TRUE(sink2.black_frame());
-  EXPECT_EQ(30, sink2.timestamp());
+  EXPECT_EQ(30000, sink2.timestamp());
 }
diff --git a/media/base/videoframe.h b/media/base/videoframe.h
index d1ae9d7..9e0fbfd 100644
--- a/media/base/videoframe.h
+++ b/media/base/videoframe.h
@@ -57,8 +57,19 @@
   virtual rtc::scoped_refptr<webrtc::VideoFrameBuffer> video_frame_buffer()
       const = 0;
 
-  virtual int64_t GetTimeStamp() const = 0;
-  virtual void SetTimeStamp(int64_t time_stamp) = 0;
+  // System monotonic clock, same timebase as rtc::TimeMicros().
+  virtual int64_t timestamp_us() const = 0;
+  virtual void set_timestamp_us(int64_t time_us) = 0;
+
+  // Deprecated methods, for backwards compatibility.
+  // TODO(nisse): Delete when usage in Chrome and other applications
+  // have been replaced.
+  virtual int64_t GetTimeStamp() const {
+    return rtc::kNumNanosecsPerMicrosec * timestamp_us();
+  }
+  virtual void SetTimeStamp(int64_t time_ns) {
+    set_timestamp_us(time_ns / rtc::kNumNanosecsPerMicrosec);
+  }
 
   // Indicates the rotation angle in degrees.
   virtual webrtc::VideoRotation rotation() const = 0;
@@ -137,8 +148,9 @@
                             int32_t dst_pitch_v) const;
 
   // Creates an empty frame.
-  virtual VideoFrame *CreateEmptyFrame(int w, int h,
-                                       int64_t time_stamp) const = 0;
+  virtual VideoFrame* CreateEmptyFrame(int w,
+                                       int h,
+                                       int64_t timestamp_us) const = 0;
   virtual void set_rotation(webrtc::VideoRotation rotation) = 0;
 };
 
diff --git a/media/engine/webrtcvideoengine2.cc b/media/engine/webrtcvideoengine2.cc
index bafa79d..4914d9f 100644
--- a/media/engine/webrtcvideoengine2.cc
+++ b/media/engine/webrtcvideoengine2.cc
@@ -1511,7 +1511,6 @@
       pending_encoder_reconfiguration_(false),
       allocated_encoder_(nullptr, webrtc::kVideoCodecUnknown, false),
       sending_(false),
-      first_frame_timestamp_ms_(0),
       last_frame_timestamp_ms_(0) {
   parameters_.config.rtp.max_packet_size = kVideoMtu;
   parameters_.conference_mode = send_params.conference_mode;
@@ -1570,12 +1569,15 @@
   }
 
   int64_t frame_delta_ms = frame.GetTimeStamp() / rtc::kNumNanosecsPerMillisec;
+
   // frame->GetTimeStamp() is essentially a delta, align to webrtc time
-  if (first_frame_timestamp_ms_ == 0) {
-    first_frame_timestamp_ms_ = rtc::Time() - frame_delta_ms;
+  if (!first_frame_timestamp_ms_) {
+    first_frame_timestamp_ms_ =
+        rtc::Optional<int64_t>(rtc::Time() - frame_delta_ms);
   }
 
-  last_frame_timestamp_ms_ = first_frame_timestamp_ms_ + frame_delta_ms;
+  last_frame_timestamp_ms_ = *first_frame_timestamp_ms_ + frame_delta_ms;
+
   video_frame.set_render_time_ms(last_frame_timestamp_ms_);
   // Reconfigure codec if necessary.
   SetDimensions(video_frame.width(), video_frame.height());
@@ -1605,7 +1607,7 @@
 
     // Reset timestamps to realign new incoming frames to a webrtc timestamp. A
     // new capturer may have a different timestamp delta than the previous one.
-    first_frame_timestamp_ms_ = 0;
+    first_frame_timestamp_ms_ = rtc::Optional<int64_t>();
 
     if (source == NULL) {
       if (stream_ != NULL) {
@@ -2389,8 +2391,8 @@
   last_height_ = frame.height();
 
   const WebRtcVideoFrame render_frame(
-      frame.video_frame_buffer(),
-      frame.render_time_ms() * rtc::kNumNanosecsPerMillisec, frame.rotation());
+      frame.video_frame_buffer(), frame.rotation(),
+      frame.render_time_ms() * rtc::kNumNanosecsPerMicrosec);
   sink_->OnFrame(render_frame);
 }
 
diff --git a/media/engine/webrtcvideoengine2.h b/media/engine/webrtcvideoengine2.h
index b9ab0b7..438953b 100644
--- a/media/engine/webrtcvideoengine2.h
+++ b/media/engine/webrtcvideoengine2.h
@@ -395,7 +395,7 @@
 
     // The timestamp of the first frame received
     // Used to generate the timestamps of subsequent frames
-    int64_t first_frame_timestamp_ms_ GUARDED_BY(lock_);
+    rtc::Optional<int64_t> first_frame_timestamp_ms_ GUARDED_BY(lock_);
 
     // The timestamp of the last frame received
     // Used to generate timestamp for the black frame when source is removed
diff --git a/media/engine/webrtcvideoengine2_unittest.cc b/media/engine/webrtcvideoengine2_unittest.cc
index c0bb461..d765f82 100644
--- a/media/engine/webrtcvideoengine2_unittest.cc
+++ b/media/engine/webrtcvideoengine2_unittest.cc
@@ -512,8 +512,8 @@
   std::unique_ptr<char[]> data(new char[frame.data_size]);
   frame.data = data.get();
   memset(frame.data, 1, frame.data_size);
-  const int kInitialTimestamp = 123456;
-  frame.time_stamp = kInitialTimestamp;
+  int64_t initial_timestamp = rtc::TimeNanos();
+  frame.time_stamp = initial_timestamp;
 
   // Deliver initial frame.
   capturer1.SignalCapturedFrame(&frame);
@@ -531,7 +531,7 @@
   rtc::Thread::Current()->SleepMs(1);
   // Deliver with a timestamp (10 seconds) before the previous initial one,
   // these should not be related at all anymore and it should still work fine.
-  frame.time_stamp = kInitialTimestamp - 10000;
+  frame.time_stamp = initial_timestamp - 10 * rtc::kNumNanosecsPerSec;
   capturer2.SignalCapturedFrame(&frame);
 
   // New timestamp should be at least 1ms in the future and not old.
diff --git a/media/engine/webrtcvideoframe.cc b/media/engine/webrtcvideoframe.cc
index b7d87a5..9c18235 100644
--- a/media/engine/webrtcvideoframe.cc
+++ b/media/engine/webrtcvideoframe.cc
@@ -22,18 +22,24 @@
 
 namespace cricket {
 
-WebRtcVideoFrame::WebRtcVideoFrame():
-    time_stamp_ns_(0),
-    rotation_(webrtc::kVideoRotation_0) {}
+WebRtcVideoFrame::WebRtcVideoFrame()
+    : timestamp_us_(0), rotation_(webrtc::kVideoRotation_0) {}
+
+WebRtcVideoFrame::WebRtcVideoFrame(
+    const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
+    webrtc::VideoRotation rotation,
+    int64_t timestamp_us)
+    : video_frame_buffer_(buffer),
+      timestamp_us_(timestamp_us),
+      rotation_(rotation) {}
 
 WebRtcVideoFrame::WebRtcVideoFrame(
     const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
     int64_t time_stamp_ns,
     webrtc::VideoRotation rotation)
-    : video_frame_buffer_(buffer),
-      time_stamp_ns_(time_stamp_ns),
-      rotation_(rotation) {
-}
+    : WebRtcVideoFrame(buffer,
+                       rotation,
+                       time_stamp_ns / rtc::kNumNanosecsPerMicrosec) {}
 
 WebRtcVideoFrame::~WebRtcVideoFrame() {}
 
@@ -47,7 +53,7 @@
                             int64_t time_stamp_ns,
                             webrtc::VideoRotation rotation) {
   return Reset(format, w, h, dw, dh, sample, sample_size,
-               time_stamp_ns, rotation,
+               time_stamp_ns / rtc::kNumNanosecsPerMicrosec, rotation,
                true /*apply_rotation*/);
 }
 
@@ -55,7 +61,7 @@
                             bool apply_rotation) {
   return Reset(frame->fourcc, frame->width, frame->height, dw, dh,
                static_cast<uint8_t*>(frame->data), frame->data_size,
-               frame->time_stamp,
+               frame->time_stamp / rtc::kNumNanosecsPerMicrosec,
                frame->rotation, apply_rotation);
 }
 
@@ -126,9 +132,7 @@
 }
 
 VideoFrame* WebRtcVideoFrame::Copy() const {
-  WebRtcVideoFrame* new_frame = new WebRtcVideoFrame(
-      video_frame_buffer_, time_stamp_ns_, rotation_);
-  return new_frame;
+  return new WebRtcVideoFrame(video_frame_buffer_, rotation_, timestamp_us_);
 }
 
 size_t WebRtcVideoFrame::ConvertToRgbBuffer(uint32_t to_fourcc,
@@ -147,7 +151,7 @@
                              int dh,
                              uint8_t* sample,
                              size_t sample_size,
-                             int64_t time_stamp_ns,
+                             int64_t timestamp_us,
                              webrtc::VideoRotation rotation,
                              bool apply_rotation) {
   if (!Validate(format, w, h, sample, sample_size)) {
@@ -166,8 +170,7 @@
     new_height = dw;
   }
 
-  InitToEmptyBuffer(new_width, new_height,
-                    time_stamp_ns);
+  InitToEmptyBuffer(new_width, new_height);
   rotation_ = apply_rotation ? webrtc::kVideoRotation_0 : rotation;
 
   int horiz_crop = ((w - dw) / 2) & ~1;
@@ -192,21 +195,27 @@
                   << " return code : " << r;
     return false;
   }
+  timestamp_us_ = timestamp_us;
   return true;
 }
 
-VideoFrame* WebRtcVideoFrame::CreateEmptyFrame(
-    int w, int h,
-    int64_t time_stamp_ns) const {
+VideoFrame* WebRtcVideoFrame::CreateEmptyFrame(int w,
+                                               int h,
+                                               int64_t timestamp_us) const {
   WebRtcVideoFrame* frame = new WebRtcVideoFrame();
-  frame->InitToEmptyBuffer(w, h, time_stamp_ns);
+  frame->InitToEmptyBuffer(w, h, rtc::kNumNanosecsPerMicrosec * timestamp_us);
   return frame;
 }
 
+void WebRtcVideoFrame::InitToEmptyBuffer(int w, int h) {
+  video_frame_buffer_ = new rtc::RefCountedObject<webrtc::I420Buffer>(w, h);
+  rotation_ = webrtc::kVideoRotation_0;
+}
+
 void WebRtcVideoFrame::InitToEmptyBuffer(int w, int h,
                                          int64_t time_stamp_ns) {
   video_frame_buffer_ = new rtc::RefCountedObject<webrtc::I420Buffer>(w, h);
-  time_stamp_ns_ = time_stamp_ns;
+  SetTimeStamp(time_stamp_ns);
   rotation_ = webrtc::kVideoRotation_0;
 }
 
@@ -237,8 +246,8 @@
     rotated_height = orig_width;
   }
 
-  rotated_frame_.reset(CreateEmptyFrame(rotated_width, rotated_height,
-                                        GetTimeStamp()));
+  rotated_frame_.reset(
+      CreateEmptyFrame(rotated_width, rotated_height, timestamp_us_));
 
   // TODO(guoweis): Add a function in webrtc_libyuv.cc to convert from
   // VideoRotation to libyuv::RotationMode.
diff --git a/media/engine/webrtcvideoframe.h b/media/engine/webrtcvideoframe.h
index 0e1d809..a2034ec 100644
--- a/media/engine/webrtcvideoframe.h
+++ b/media/engine/webrtcvideoframe.h
@@ -27,6 +27,13 @@
 class WebRtcVideoFrame : public VideoFrame {
  public:
   WebRtcVideoFrame();
+
+  // Preferred construction, with microsecond timestamp.
+  WebRtcVideoFrame(const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
+                   webrtc::VideoRotation rotation,
+                   int64_t timestamp_us);
+
+  // TODO(nisse): Deprecate/delete.
   WebRtcVideoFrame(const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
                    int64_t time_stamp_ns,
                    webrtc::VideoRotation rotation);
@@ -47,8 +54,13 @@
             int64_t time_stamp_ns,
             webrtc::VideoRotation rotation);
 
+  // The timestamp of the captured frame is expected to use the same
+  // timescale and epoch as rtc::Time.
+  // TODO(nisse): Consider adding a warning message, or even an RTC_DCHECK, if
+  // the time is too far off.
   bool Init(const CapturedFrame* frame, int dw, int dh, bool apply_rotation);
 
+  void InitToEmptyBuffer(int w, int h);
   void InitToEmptyBuffer(int w, int h, int64_t time_stamp_ns);
 
   bool InitToBlack(int w, int h, int64_t time_stamp_ns);
@@ -69,10 +81,9 @@
   rtc::scoped_refptr<webrtc::VideoFrameBuffer> video_frame_buffer()
       const override;
 
-  int64_t GetTimeStamp() const override { return time_stamp_ns_; }
-  void SetTimeStamp(int64_t time_stamp_ns) override {
-    time_stamp_ns_ = time_stamp_ns;
-  }
+  /* System monotonic clock */
+  int64_t timestamp_us() const override { return timestamp_us_; }
+  void set_timestamp_us(int64_t time_us) override { timestamp_us_ = time_us; };
 
   webrtc::VideoRotation rotation() const override { return rotation_; }
 
@@ -95,15 +106,15 @@
   // |dh| is destination height, like |dw|, but must be a positive number.
   // Returns whether the function succeeded or failed.
   bool Reset(uint32_t format,
-                     int w,
-                     int h,
-                     int dw,
-                     int dh,
-                     uint8_t* sample,
-                     size_t sample_size,
-                     int64_t time_stamp_ns,
-                     webrtc::VideoRotation rotation,
-                     bool apply_rotation);
+             int w,
+             int h,
+             int dw,
+             int dh,
+             uint8_t* sample,
+             size_t sample_size,
+             int64_t timestamp_us,
+             webrtc::VideoRotation rotation,
+             bool apply_rotation);
 
  private:
   VideoFrame* CreateEmptyFrame(int w, int h,
@@ -111,7 +122,7 @@
 
   // An opaque reference counted handle that stores the pixel data.
   rtc::scoped_refptr<webrtc::VideoFrameBuffer> video_frame_buffer_;
-  int64_t time_stamp_ns_;
+  int64_t timestamp_us_;
   webrtc::VideoRotation rotation_;
 
   // This is mutable as the calculation is expensive but once calculated, it
diff --git a/media/engine/webrtcvideoframe_unittest.cc b/media/engine/webrtcvideoframe_unittest.cc
index 12c8bfe..7849de6 100644
--- a/media/engine/webrtcvideoframe_unittest.cc
+++ b/media/engine/webrtcvideoframe_unittest.cc
@@ -20,6 +20,7 @@
 
 class WebRtcVideoTestFrame : public cricket::WebRtcVideoFrame {
  public:
+  // The ApplyRotationToFrame test needs this as a public method.
   using cricket::WebRtcVideoFrame::set_rotation;
 
   virtual VideoFrame* CreateEmptyFrame(int w,
@@ -47,7 +48,7 @@
     // Build the CapturedFrame.
     cricket::CapturedFrame captured_frame;
     captured_frame.fourcc = cricket::FOURCC_I420;
-    captured_frame.time_stamp = 5678;
+    captured_frame.time_stamp = rtc::TimeNanos();
     captured_frame.rotation = frame_rotation;
     captured_frame.width = frame_width;
     captured_frame.height = frame_height;
@@ -66,7 +67,8 @@
                    apply_rotation));
 
     // Verify the new frame.
-    EXPECT_EQ(5678, frame.GetTimeStamp());
+    EXPECT_EQ(captured_frame.time_stamp / rtc::kNumNanosecsPerMicrosec,
+              frame.timestamp_us());
     if (apply_rotation)
       EXPECT_EQ(webrtc::kVideoRotation_0, frame.rotation());
     else
@@ -271,13 +273,16 @@
   webrtc::NativeHandleBuffer* buffer =
       new rtc::RefCountedObject<webrtc::test::FakeNativeHandleBuffer>(
           dummy_handle, 640, 480);
-  cricket::WebRtcVideoFrame frame(buffer, 200, webrtc::kVideoRotation_0);
+  // Timestamp is converted from ns to us, so last three digits are lost.
+  cricket::WebRtcVideoFrame frame(buffer, 20000, webrtc::kVideoRotation_0);
   EXPECT_EQ(dummy_handle, frame.GetNativeHandle());
   EXPECT_EQ(640, frame.width());
   EXPECT_EQ(480, frame.height());
-  EXPECT_EQ(200, frame.GetTimeStamp());
-  frame.SetTimeStamp(400);
-  EXPECT_EQ(400, frame.GetTimeStamp());
+  EXPECT_EQ(20000, frame.GetTimeStamp());
+  EXPECT_EQ(20, frame.timestamp_us());
+  frame.set_timestamp_us(40);
+  EXPECT_EQ(40000, frame.GetTimeStamp());
+  EXPECT_EQ(40, frame.timestamp_us());
 }
 
 TEST_F(WebRtcVideoFrameTest, CopyTextureFrame) {
@@ -286,12 +291,14 @@
   webrtc::NativeHandleBuffer* buffer =
       new rtc::RefCountedObject<webrtc::test::FakeNativeHandleBuffer>(
           dummy_handle, 640, 480);
-  cricket::WebRtcVideoFrame frame1(buffer, 200, webrtc::kVideoRotation_0);
+  // Timestamp is converted from ns to us, so last three digits are lost.
+  cricket::WebRtcVideoFrame frame1(buffer, 20000, webrtc::kVideoRotation_0);
   cricket::VideoFrame* frame2 = frame1.Copy();
   EXPECT_EQ(frame1.GetNativeHandle(), frame2->GetNativeHandle());
   EXPECT_EQ(frame1.width(), frame2->width());
   EXPECT_EQ(frame1.height(), frame2->height());
   EXPECT_EQ(frame1.GetTimeStamp(), frame2->GetTimeStamp());
+  EXPECT_EQ(frame1.timestamp_us(), frame2->timestamp_us());
   delete frame2;
 }
 
diff --git a/media/engine/webrtcvideoframefactory_unittest.cc b/media/engine/webrtcvideoframefactory_unittest.cc
index 45de453..dd7e7d6 100644
--- a/media/engine/webrtcvideoframefactory_unittest.cc
+++ b/media/engine/webrtcvideoframefactory_unittest.cc
@@ -29,7 +29,7 @@
     captured_frame_.fourcc = cricket::FOURCC_I420;
     captured_frame_.pixel_width = 1;
     captured_frame_.pixel_height = 1;
-    captured_frame_.time_stamp = 5678;
+    captured_frame_.time_stamp = rtc::TimeNanos();
     captured_frame_.rotation = frame_rotation;
     captured_frame_.width = frame_width;
     captured_frame_.height = frame_height;