Change capture time format to nanoseconds in EncodedImage.

The millisecond field is deprecated and will be removed once the
dependencies have been updated.

BUG=webrtc:7760

Review-Url: https://codereview.webrtc.org/3010623002
Cr-Original-Commit-Position: refs/heads/master@{#19622}
Cr-Mirrored-From: https://chromium.googlesource.com/external/webrtc
Cr-Mirrored-Commit: e172d89f73775d81fa72d8aa18604fec30ce60d9
diff --git a/sdk/android/api/org/webrtc/EncodedImage.java b/sdk/android/api/org/webrtc/EncodedImage.java
index ec45f0a..7aef023 100644
--- a/sdk/android/api/org/webrtc/EncodedImage.java
+++ b/sdk/android/api/org/webrtc/EncodedImage.java
@@ -11,6 +11,7 @@
 package org.webrtc;
 
 import java.nio.ByteBuffer;
+import java.util.concurrent.TimeUnit;
 
 /**
  * An encoded frame from a video stream. Used as an input for decoders and as an output for
@@ -26,18 +27,20 @@
   public final ByteBuffer buffer;
   public final int encodedWidth;
   public final int encodedHeight;
-  public final long captureTimeMs;
+  public final long captureTimeMs; // Deprecated
+  public final long captureTimeNs;
   public final FrameType frameType;
   public final int rotation;
   public final boolean completeFrame;
   public final Integer qp;
 
-  private EncodedImage(ByteBuffer buffer, int encodedWidth, int encodedHeight, long captureTimeMs,
+  private EncodedImage(ByteBuffer buffer, int encodedWidth, int encodedHeight, long captureTimeNs,
       FrameType frameType, int rotation, boolean completeFrame, Integer qp) {
     this.buffer = buffer;
     this.encodedWidth = encodedWidth;
     this.encodedHeight = encodedHeight;
-    this.captureTimeMs = captureTimeMs;
+    this.captureTimeMs = TimeUnit.NANOSECONDS.toMillis(captureTimeNs);
+    this.captureTimeNs = captureTimeNs;
     this.frameType = frameType;
     this.rotation = rotation;
     this.completeFrame = completeFrame;
@@ -52,7 +55,7 @@
     private ByteBuffer buffer;
     private int encodedWidth;
     private int encodedHeight;
-    private long captureTimeMs;
+    private long captureTimeNs;
     private EncodedImage.FrameType frameType;
     private int rotation;
     private boolean completeFrame;
@@ -75,8 +78,14 @@
       return this;
     }
 
+    @Deprecated
     public Builder setCaptureTimeMs(long captureTimeMs) {
-      this.captureTimeMs = captureTimeMs;
+      this.captureTimeNs = TimeUnit.MILLISECONDS.toNanos(captureTimeMs);
+      return this;
+    }
+
+    public Builder setCaptureTimeNs(long captureTimeNs) {
+      this.captureTimeNs = captureTimeNs;
       return this;
     }
 
@@ -101,7 +110,7 @@
     }
 
     public EncodedImage createEncodedImage() {
-      return new EncodedImage(buffer, encodedWidth, encodedHeight, captureTimeMs, frameType,
+      return new EncodedImage(buffer, encodedWidth, encodedHeight, captureTimeNs, frameType,
           rotation, completeFrame, qp);
     }
   }
diff --git a/sdk/android/instrumentationtests/src/org/webrtc/HardwareVideoEncoderTest.java b/sdk/android/instrumentationtests/src/org/webrtc/HardwareVideoEncoderTest.java
index 39a9672..b9aead5 100644
--- a/sdk/android/instrumentationtests/src/org/webrtc/HardwareVideoEncoderTest.java
+++ b/sdk/android/instrumentationtests/src/org/webrtc/HardwareVideoEncoderTest.java
@@ -80,7 +80,7 @@
 
     VideoEncoder encoder = factory.createEncoder(supportedCodecs[0]);
 
-    final long presentationTimestampUs = 20000;
+    final long presentationTimestampNs = 20000;
     final CountDownLatch encodeDone = new CountDownLatch(1);
 
     VideoEncoder.Callback callback = new VideoEncoder.Callback() {
@@ -89,7 +89,7 @@
         assertTrue(image.buffer.capacity() > 0);
         assertEquals(image.encodedWidth, SETTINGS.width);
         assertEquals(image.encodedHeight, SETTINGS.height);
-        assertEquals(image.captureTimeMs, presentationTimestampUs / 1000);
+        assertEquals(image.captureTimeNs, presentationTimestampNs);
         assertEquals(image.frameType, EncodedImage.FrameType.VideoFrameKey);
         assertEquals(image.rotation, 0);
         assertTrue(image.completeFrame);
@@ -101,7 +101,7 @@
     assertEquals(encoder.initEncode(SETTINGS, callback), VideoCodecStatus.OK);
 
     VideoFrame.I420Buffer buffer = I420BufferImpl.allocate(SETTINGS.width, SETTINGS.height);
-    VideoFrame frame = new VideoFrame(buffer, 0 /* rotation */, presentationTimestampUs * 1000);
+    VideoFrame frame = new VideoFrame(buffer, 0 /* rotation */, presentationTimestampNs);
     VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo(
         new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameKey});
 
@@ -130,7 +130,7 @@
 
     VideoEncoder encoder = factory.createEncoder(supportedCodecs[0]);
 
-    final long presentationTimestampUs = 20000;
+    final long presentationTimestampNs = 20000;
     final CountDownLatch encodeDone = new CountDownLatch(1);
 
     VideoEncoder.Callback callback = new VideoEncoder.Callback() {
@@ -139,7 +139,7 @@
         assertTrue(image.buffer.capacity() > 0);
         assertEquals(image.encodedWidth, SETTINGS.width);
         assertEquals(image.encodedHeight, SETTINGS.height);
-        assertEquals(image.captureTimeMs, presentationTimestampUs / 1000);
+        assertEquals(image.captureTimeNs, presentationTimestampNs);
         assertEquals(image.frameType, EncodedImage.FrameType.VideoFrameKey);
         assertEquals(image.rotation, 0);
         assertTrue(image.completeFrame);
@@ -193,7 +193,7 @@
         return null;
       }
     };
-    VideoFrame frame = new VideoFrame(buffer, 0 /* rotation */, presentationTimestampUs * 1000);
+    VideoFrame frame = new VideoFrame(buffer, 0 /* rotation */, presentationTimestampNs);
     VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo(
         new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameKey});
 
diff --git a/sdk/android/src/java/org/webrtc/HardwareVideoDecoder.java b/sdk/android/src/java/org/webrtc/HardwareVideoDecoder.java
index e6f2cfd..7f7d538 100644
--- a/sdk/android/src/java/org/webrtc/HardwareVideoDecoder.java
+++ b/sdk/android/src/java/org/webrtc/HardwareVideoDecoder.java
@@ -20,6 +20,7 @@
 import java.nio.ByteBuffer;
 import java.util.concurrent.BlockingDeque;
 import java.util.concurrent.LinkedBlockingDeque;
+import java.util.concurrent.TimeUnit;
 import org.webrtc.ThreadUtils.ThreadChecker;
 
 /** Android hardware video decoder. */
@@ -282,8 +283,8 @@
 
     frameInfos.offer(new FrameInfo(SystemClock.elapsedRealtime(), frame.rotation));
     try {
-      codec.queueInputBuffer(
-          index, 0 /* offset */, size, frame.captureTimeMs * 1000, 0 /* flags */);
+      codec.queueInputBuffer(index, 0 /* offset */, size,
+          TimeUnit.NANOSECONDS.toMicros(frame.captureTimeNs), 0 /* flags */);
     } catch (IllegalStateException e) {
       Logging.e(TAG, "queueInputBuffer failed", e);
       frameInfos.pollLast();
diff --git a/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java b/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java
index fc868c4..7bb3857 100644
--- a/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java
+++ b/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java
@@ -25,6 +25,7 @@
 import java.util.HashSet;
 import java.util.Set;
 import java.util.concurrent.LinkedBlockingDeque;
+import java.util.concurrent.TimeUnit;
 
 /** Android hardware video encoder. */
 @TargetApi(19)
@@ -55,9 +56,9 @@
   private final int keyFrameIntervalSec;
   // Interval at which to force a key frame. Used to reduce color distortions caused by some
   // Qualcomm video encoders.
-  private final long forcedKeyFrameMs;
+  private final long forcedKeyFrameNs;
   // Presentation timestamp of the last requested (or forced) key frame.
-  private long lastKeyFrameMs;
+  private long lastKeyFrameNs;
 
   private final BitrateAdjuster bitrateAdjuster;
   private int adjustedBitrate;
@@ -125,7 +126,7 @@
       this.inputColorFormat = null;
     }
     this.keyFrameIntervalSec = keyFrameIntervalSec;
-    this.forcedKeyFrameMs = forceKeyFrameIntervalMs;
+    this.forcedKeyFrameNs = TimeUnit.MILLISECONDS.toNanos(forceKeyFrameIntervalMs);
     this.bitrateAdjuster = bitrateAdjuster;
     this.outputBuilders = new LinkedBlockingDeque<>();
     this.textureContext = textureContext;
@@ -150,7 +151,7 @@
 
     this.callback = callback;
 
-    lastKeyFrameMs = -1;
+    lastKeyFrameNs = -1;
 
     try {
       codec = MediaCodec.createByCodecName(codecName);
@@ -257,11 +258,8 @@
       }
     }
 
-    // Frame timestamp rounded to the nearest microsecond and millisecond.
-    long presentationTimestampUs = (videoFrame.getTimestampNs() + 500) / 1000;
-    long presentationTimestampMs = (presentationTimestampUs + 500) / 1000;
-    if (requestedKeyFrame || shouldForceKeyFrame(presentationTimestampMs)) {
-      requestKeyFrame(presentationTimestampMs);
+    if (requestedKeyFrame || shouldForceKeyFrame(videoFrame.getTimestampNs())) {
+      requestKeyFrame(videoFrame.getTimestampNs());
     }
 
     VideoFrame.Buffer videoFrameBuffer = videoFrame.getBuffer();
@@ -269,7 +267,7 @@
     // subsampled at one byte per four pixels.
     int bufferSize = videoFrameBuffer.getHeight() * videoFrameBuffer.getWidth() * 3 / 2;
     EncodedImage.Builder builder = EncodedImage.builder()
-                                       .setCaptureTimeMs(presentationTimestampMs)
+                                       .setCaptureTimeNs(videoFrame.getTimestampNs())
                                        .setCompleteFrame(true)
                                        .setEncodedWidth(videoFrame.getBuffer().getWidth())
                                        .setEncodedHeight(videoFrame.getBuffer().getHeight())
@@ -287,7 +285,7 @@
       if (videoFrameBuffer instanceof VideoFrame.TextureBuffer) {
         Logging.w(TAG, "Encoding texture buffer in byte mode; this may be inefficient");
       }
-      return encodeByteBuffer(videoFrame, videoFrameBuffer, bufferSize, presentationTimestampUs);
+      return encodeByteBuffer(videoFrame, videoFrameBuffer, bufferSize);
     }
   }
 
@@ -321,8 +319,11 @@
     return VideoCodecStatus.OK;
   }
 
-  private VideoCodecStatus encodeByteBuffer(VideoFrame videoFrame,
-      VideoFrame.Buffer videoFrameBuffer, int bufferSize, long presentationTimestampUs) {
+  private VideoCodecStatus encodeByteBuffer(
+      VideoFrame videoFrame, VideoFrame.Buffer videoFrameBuffer, int bufferSize) {
+    // Frame timestamp rounded to the nearest microsecond.
+    long presentationTimestampUs = (videoFrame.getTimestampNs() + 500) / 1000;
+
     // No timeout.  Don't block for an input buffer, drop frames if the encoder falls behind.
     int index;
     try {
@@ -397,11 +398,11 @@
     return initEncodeInternal(newWidth, newHeight, 0, 0, callback);
   }
 
-  private boolean shouldForceKeyFrame(long presentationTimestampMs) {
-    return forcedKeyFrameMs > 0 && presentationTimestampMs > lastKeyFrameMs + forcedKeyFrameMs;
+  private boolean shouldForceKeyFrame(long presentationTimestampNs) {
+    return forcedKeyFrameNs > 0 && presentationTimestampNs > lastKeyFrameNs + forcedKeyFrameNs;
   }
 
-  private void requestKeyFrame(long presentationTimestampMs) {
+  private void requestKeyFrame(long presentationTimestampNs) {
     // Ideally MediaCodec would honor BUFFER_FLAG_SYNC_FRAME so we could
     // indicate this in queueInputBuffer() below and guarantee _this_ frame
     // be encoded as a key frame, but sadly that flag is ignored.  Instead,
@@ -414,7 +415,7 @@
       Logging.e(TAG, "requestKeyFrame failed", e);
       return;
     }
-    lastKeyFrameMs = presentationTimestampMs;
+    lastKeyFrameNs = presentationTimestampNs;
   }
 
   private Thread createOutputThread() {
diff --git a/sdk/android/src/jni/videodecoderwrapper.cc b/sdk/android/src/jni/videodecoderwrapper.cc
index 536e874..23a8314 100644
--- a/sdk/android/src/jni/videodecoderwrapper.cc
+++ b/sdk/android/src/jni/videodecoderwrapper.cc
@@ -15,6 +15,7 @@
 #include "webrtc/modules/video_coding/utility/vp8_header_parser.h"
 #include "webrtc/modules/video_coding/utility/vp9_uncompressed_header_parser.h"
 #include "webrtc/rtc_base/logging.h"
+#include "webrtc/rtc_base/timeutils.h"
 #include "webrtc/sdk/android/src/jni/classreferenceholder.h"
 
 namespace webrtc {
@@ -127,7 +128,8 @@
   ScopedLocalRefFrame local_ref_frame(jni);
 
   FrameExtraInfo frame_extra_info;
-  frame_extra_info.capture_time_ms = input_image.capture_time_ms_;
+  frame_extra_info.capture_time_ns =
+      input_image.capture_time_ms_ * rtc::kNumNanosecsPerMillisec;
   frame_extra_info.timestamp_rtp = input_image._timeStamp;
   frame_extra_info.qp =
       qp_parsing_enabled_ ? ParseQP(input_image) : rtc::Optional<uint8_t>();
@@ -174,7 +176,6 @@
                                          jobject jqp) {
   const jlong capture_time_ns =
       jni->CallLongMethod(jframe, video_frame_get_timestamp_ns_method_);
-  const uint32_t capture_time_ms = capture_time_ns / 1000 / 1000;
   FrameExtraInfo frame_extra_info;
   do {
     if (frame_extra_infos_.empty()) {
@@ -186,7 +187,7 @@
     frame_extra_infos_.pop_front();
     // If the decoder might drop frames so iterate through the queue until we
     // find a matching timestamp.
-  } while (frame_extra_info.capture_time_ms != capture_time_ms);
+  } while (frame_extra_info.capture_time_ns != capture_time_ns);
 
   VideoFrame frame = android_video_buffer_factory_.CreateFrame(
       jni, jframe, frame_extra_info.timestamp_rtp);
@@ -237,12 +238,12 @@
   if (image.qp_ != -1) {
     qp = jni->NewObject(*integer_class_, integer_constructor_, image.qp_);
   }
-  return jni->NewObject(*encoded_image_class_, encoded_image_constructor_,
-                        buffer, static_cast<jint>(image._encodedWidth),
-                        static_cast<jint>(image._encodedHeight),
-                        static_cast<jlong>(image.capture_time_ms_), frame_type,
-                        static_cast<jint>(image.rotation_),
-                        image._completeFrame, qp);
+  return jni->NewObject(
+      *encoded_image_class_, encoded_image_constructor_, buffer,
+      static_cast<jint>(image._encodedWidth),
+      static_cast<jint>(image._encodedHeight),
+      static_cast<jlong>(image.capture_time_ms_ * rtc::kNumNanosecsPerMillisec),
+      frame_type, static_cast<jint>(image.rotation_), image._completeFrame, qp);
 }
 
 int32_t VideoDecoderWrapper::HandleReturnCode(JNIEnv* jni, jobject code) {
diff --git a/sdk/android/src/jni/videodecoderwrapper.h b/sdk/android/src/jni/videodecoderwrapper.h
index 19cf92f..c737456 100644
--- a/sdk/android/src/jni/videodecoderwrapper.h
+++ b/sdk/android/src/jni/videodecoderwrapper.h
@@ -58,7 +58,7 @@
 
  private:
   struct FrameExtraInfo {
-    uint32_t capture_time_ms;  // Used as an identifier of the frame.
+    uint64_t capture_time_ns;  // Used as an identifier of the frame.
 
     uint32_t timestamp_rtp;
     rtc::Optional<uint8_t> qp;