Introduce VideoFrame::id to keep track of frames inside application.

Also switch webrtc code from deprecated constructors to the builder API.

Change-Id: Ie325bf1e9b4ff1e413fef3431ced8ed9ff725107
Bug: webrtc:10138
Reviewed-on: https://webrtc-review.googlesource.com/c/114422
Reviewed-by: Stefan Holmer <stefan@webrtc.org>
Reviewed-by: Niels Moller <nisse@webrtc.org>
Commit-Queue: Artem Titov <titovartem@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#26132}
diff --git a/api/video/video_frame.cc b/api/video/video_frame.cc
index eaae33b..c80ee8b 100644
--- a/api/video/video_frame.cc
+++ b/api/video/video_frame.cc
@@ -20,7 +20,7 @@
 VideoFrame::Builder::~Builder() = default;
 
 VideoFrame VideoFrame::Builder::build() {
-  return VideoFrame(video_frame_buffer_, timestamp_us_, timestamp_rtp_,
+  return VideoFrame(id_, video_frame_buffer_, timestamp_us_, timestamp_rtp_,
                     ntp_time_ms_, rotation_, color_space_);
 }
 
@@ -71,6 +71,11 @@
   return *this;
 }
 
+VideoFrame::Builder& VideoFrame::Builder::set_id(uint16_t id) {
+  id_ = id;
+  return *this;
+}
+
 VideoFrame::VideoFrame(const rtc::scoped_refptr<VideoFrameBuffer>& buffer,
                        webrtc::VideoRotation rotation,
                        int64_t timestamp_us)
@@ -92,13 +97,15 @@
   RTC_DCHECK(buffer);
 }
 
-VideoFrame::VideoFrame(const rtc::scoped_refptr<VideoFrameBuffer>& buffer,
+VideoFrame::VideoFrame(uint16_t id,
+                       const rtc::scoped_refptr<VideoFrameBuffer>& buffer,
                        int64_t timestamp_us,
                        uint32_t timestamp_rtp,
                        int64_t ntp_time_ms,
                        VideoRotation rotation,
                        const absl::optional<ColorSpace>& color_space)
-    : video_frame_buffer_(buffer),
+    : id_(id),
+      video_frame_buffer_(buffer),
       timestamp_rtp_(timestamp_rtp),
       ntp_time_ms_(ntp_time_ms),
       timestamp_us_(timestamp_us),
diff --git a/api/video/video_frame.h b/api/video/video_frame.h
index cc61fee..728d818 100644
--- a/api/video/video_frame.h
+++ b/api/video/video_frame.h
@@ -41,8 +41,10 @@
     Builder& set_rotation(VideoRotation rotation);
     Builder& set_color_space(const ColorSpace& color_space);
     Builder& set_color_space(const ColorSpace* color_space);
+    Builder& set_id(uint16_t id);
 
    private:
+    uint16_t id_ = 0;
     rtc::scoped_refptr<webrtc::VideoFrameBuffer> video_frame_buffer_;
     int64_t timestamp_us_ = 0;
     uint32_t timestamp_rtp_ = 0;
@@ -75,6 +77,15 @@
   // Get frame size in pixels.
   uint32_t size() const;
 
+  // Get frame ID. Returns 0 if ID is not set. Not guarantee to be transferred
+  // from the sender to the receiver, but preserved on single side. The id
+  // should be propagated between all frame modifications during its lifetime
+  // from capturing to sending as encoded image. It is intended to be unique
+  // over a time window of a few minutes for peer connection, to which
+  // corresponding video stream belongs to.
+  uint16_t id() const { return id_; }
+  void set_id(uint16_t id) { id_ = id; }
+
   // System monotonic clock, same timebase as rtc::TimeMicros().
   int64_t timestamp_us() const { return timestamp_us_; }
   void set_timestamp_us(int64_t timestamp_us) { timestamp_us_ = timestamp_us; }
@@ -138,13 +149,15 @@
   }
 
  private:
-  VideoFrame(const rtc::scoped_refptr<VideoFrameBuffer>& buffer,
+  VideoFrame(uint16_t id,
+             const rtc::scoped_refptr<VideoFrameBuffer>& buffer,
              int64_t timestamp_us,
              uint32_t timestamp_rtp,
              int64_t ntp_time_ms,
              VideoRotation rotation,
              const absl::optional<ColorSpace>& color_space);
 
+  uint16_t id_;
   // An opaque reference counted handle that stores the pixel data.
   rtc::scoped_refptr<webrtc::VideoFrameBuffer> video_frame_buffer_;
   uint32_t timestamp_rtp_;
diff --git a/api/video_codecs/test/BUILD.gn b/api/video_codecs/test/BUILD.gn
index fced560..acb77cc 100644
--- a/api/video_codecs/test/BUILD.gn
+++ b/api/video_codecs/test/BUILD.gn
@@ -37,6 +37,7 @@
       "../../video:video_frame",
       "../../video:video_frame_i420",
       "//testing/gtest",
+      "//third_party/abseil-cpp/absl/memory:memory",
     ]
   }
 }
diff --git a/api/video_codecs/test/video_encoder_software_fallback_wrapper_unittest.cc b/api/video_codecs/test/video_encoder_software_fallback_wrapper_unittest.cc
index ddaabd0..9fd5aed 100644
--- a/api/video_codecs/test/video_encoder_software_fallback_wrapper_unittest.cc
+++ b/api/video_codecs/test/video_encoder_software_fallback_wrapper_unittest.cc
@@ -14,6 +14,7 @@
 #include <string>
 #include <vector>
 
+#include "absl/memory/memory.h"
 #include "absl/types/optional.h"
 #include "api/test/mock_video_encoder.h"
 #include "api/video/encoded_image.h"
@@ -182,8 +183,12 @@
   I420Buffer::SetBlack(buffer);
   std::vector<FrameType> types(1, kVideoFrameKey);
 
-  frame_.reset(
-      new VideoFrame(buffer, webrtc::kVideoRotation_0, 0 /* timestamp_us */));
+  frame_ =
+      absl::make_unique<VideoFrame>(VideoFrame::Builder()
+                                        .set_video_frame_buffer(buffer)
+                                        .set_rotation(webrtc::kVideoRotation_0)
+                                        .set_timestamp_us(0)
+                                        .build());
   EXPECT_EQ(expected_ret, fallback_wrapper_->Encode(*frame_, nullptr, &types));
 }
 
diff --git a/common_video/BUILD.gn b/common_video/BUILD.gn
index 14755af..fc36ff8 100644
--- a/common_video/BUILD.gn
+++ b/common_video/BUILD.gn
@@ -102,6 +102,7 @@
       "../test:test_support",
       "../test:video_test_common",
       "//testing/gtest",
+      "//third_party/abseil-cpp/absl/memory:memory",
       "//third_party/libyuv",
     ]
 
diff --git a/common_video/libyuv/libyuv_unittest.cc b/common_video/libyuv/libyuv_unittest.cc
index 79e7cb6..9f2c241 100644
--- a/common_video/libyuv/libyuv_unittest.cc
+++ b/common_video/libyuv/libyuv_unittest.cc
@@ -13,6 +13,7 @@
 
 #include <memory>
 
+#include "absl/memory/memory.h"
 #include "api/video/i420_buffer.h"
 #include "api/video/video_frame.h"
 #include "common_video/libyuv/include/webrtc_libyuv.h"
@@ -66,7 +67,12 @@
   rtc::scoped_refptr<I420BufferInterface> buffer(
       test::ReadI420Buffer(width_, height_, source_file_));
 
-  orig_frame_.reset(new VideoFrame(buffer, kVideoRotation_0, 0));
+  orig_frame_ =
+      absl::make_unique<VideoFrame>(VideoFrame::Builder()
+                                        .set_video_frame_buffer(buffer)
+                                        .set_rotation(webrtc::kVideoRotation_0)
+                                        .set_timestamp_us(0)
+                                        .build());
 }
 
 void TestLibYuv::TearDown() {
diff --git a/common_video/video_frame_unittest.cc b/common_video/video_frame_unittest.cc
index 9d01339..6a2c452 100644
--- a/common_video/video_frame_unittest.cc
+++ b/common_video/video_frame_unittest.cc
@@ -270,9 +270,12 @@
 }  // namespace
 
 TEST(TestVideoFrame, WidthHeightValues) {
-  VideoFrame frame(I420Buffer::Create(10, 10, 10, 14, 90),
-                   webrtc::kVideoRotation_0,
-                   789 * rtc::kNumMicrosecsPerMillisec);
+  VideoFrame frame =
+      VideoFrame::Builder()
+          .set_video_frame_buffer(I420Buffer::Create(10, 10, 10, 14, 90))
+          .set_rotation(webrtc::kVideoRotation_0)
+          .set_timestamp_ms(789)
+          .build();
   const int valid_value = 10;
   EXPECT_EQ(valid_value, frame.width());
   EXPECT_EQ(valid_value, frame.height());
@@ -304,9 +307,13 @@
   memset(buffer_u, 8, kSizeU);
   memset(buffer_v, 4, kSizeV);
 
-  VideoFrame frame1(I420Buffer::Copy(width, height, buffer_y, stride_y,
-                                     buffer_u, stride_u, buffer_v, stride_v),
-                    kRotation, 0);
+  VideoFrame frame1 = VideoFrame::Builder()
+                          .set_video_frame_buffer(I420Buffer::Copy(
+                              width, height, buffer_y, stride_y, buffer_u,
+                              stride_u, buffer_v, stride_v))
+                          .set_rotation(kRotation)
+                          .set_timestamp_us(0)
+                          .build();
   frame1.set_timestamp(timestamp);
   frame1.set_ntp_time_ms(ntp_time_ms);
   frame1.set_timestamp_us(timestamp_us);
diff --git a/media/base/adaptedvideotracksource.cc b/media/base/adaptedvideotracksource.cc
index ff7a1dd..c0a4d36 100644
--- a/media/base/adaptedvideotracksource.cc
+++ b/media/base/adaptedvideotracksource.cc
@@ -54,9 +54,15 @@
   if (apply_rotation() && frame.rotation() != webrtc::kVideoRotation_0 &&
       buffer->type() == webrtc::VideoFrameBuffer::Type::kI420) {
     /* Apply pending rotation. */
-    broadcaster_.OnFrame(webrtc::VideoFrame(
-        webrtc::I420Buffer::Rotate(*buffer->GetI420(), frame.rotation()),
-        webrtc::kVideoRotation_0, frame.timestamp_us()));
+    webrtc::VideoFrame rotated_frame =
+        webrtc::VideoFrame::Builder()
+            .set_video_frame_buffer(webrtc::I420Buffer::Rotate(
+                *buffer->GetI420(), frame.rotation()))
+            .set_rotation(webrtc::kVideoRotation_0)
+            .set_timestamp_us(frame.timestamp_us())
+            .set_id(frame.id())
+            .build();
+    broadcaster_.OnFrame(rotated_frame);
   } else {
     broadcaster_.OnFrame(frame);
   }
diff --git a/media/base/fakeframesource.cc b/media/base/fakeframesource.cc
index 7665c14..5f829af 100644
--- a/media/base/fakeframesource.cc
+++ b/media/base/fakeframesource.cc
@@ -77,8 +77,11 @@
       webrtc::I420Buffer::Create(width, height));
 
   buffer->InitializeData();
-  webrtc::VideoFrame frame =
-      webrtc::VideoFrame(buffer, rotation, next_timestamp_us_);
+  webrtc::VideoFrame frame = webrtc::VideoFrame::Builder()
+                                 .set_video_frame_buffer(buffer)
+                                 .set_rotation(rotation)
+                                 .set_timestamp_us(next_timestamp_us_)
+                                 .build();
 
   next_timestamp_us_ += interval_us;
   return frame;
diff --git a/media/base/fakevideocapturer.cc b/media/base/fakevideocapturer.cc
index 46eb16a..c604387 100644
--- a/media/base/fakevideocapturer.cc
+++ b/media/base/fakevideocapturer.cc
@@ -90,8 +90,14 @@
         webrtc::I420Buffer::Create(adapted_width, adapted_height));
     buffer->InitializeData();
 
-    OnFrame(webrtc::VideoFrame(buffer, frame.rotation(), frame.timestamp_us()),
-            frame.width(), frame.height());
+    webrtc::VideoFrame adapted_frame =
+        webrtc::VideoFrame::Builder()
+            .set_video_frame_buffer(buffer)
+            .set_rotation(frame.rotation())
+            .set_timestamp_us(frame.timestamp_us())
+            .set_id(frame.id())
+            .build();
+    OnFrame(adapted_frame, frame.width(), frame.height());
   }
 
   return true;
diff --git a/media/base/videobroadcaster.cc b/media/base/videobroadcaster.cc
index 125cf17..4ee6045 100644
--- a/media/base/videobroadcaster.cc
+++ b/media/base/videobroadcaster.cc
@@ -63,9 +63,15 @@
       continue;
     }
     if (sink_pair.wants.black_frames) {
-      sink_pair.sink->OnFrame(
-          webrtc::VideoFrame(GetBlackFrameBuffer(frame.width(), frame.height()),
-                             frame.rotation(), frame.timestamp_us()));
+      webrtc::VideoFrame black_frame =
+          webrtc::VideoFrame::Builder()
+              .set_video_frame_buffer(
+                  GetBlackFrameBuffer(frame.width(), frame.height()))
+              .set_rotation(frame.rotation())
+              .set_timestamp_us(frame.timestamp_us())
+              .set_id(frame.id())
+              .build();
+      sink_pair.sink->OnFrame(black_frame);
     } else {
       sink_pair.sink->OnFrame(frame);
     }
diff --git a/media/base/videobroadcaster_unittest.cc b/media/base/videobroadcaster_unittest.cc
index 66cdbba..5b191a7 100644
--- a/media/base/videobroadcaster_unittest.cc
+++ b/media/base/videobroadcaster_unittest.cc
@@ -49,7 +49,11 @@
   // Initialize, to avoid warnings on use of initialized values.
   webrtc::I420Buffer::SetBlack(buffer);
 
-  webrtc::VideoFrame frame(buffer, webrtc::kVideoRotation_0, 0);
+  webrtc::VideoFrame frame = webrtc::VideoFrame::Builder()
+                                 .set_video_frame_buffer(buffer)
+                                 .set_rotation(webrtc::kVideoRotation_0)
+                                 .set_timestamp_us(0)
+                                 .build();
 
   broadcaster.OnFrame(frame);
   EXPECT_EQ(1, sink1.num_rendered_frames());
@@ -172,8 +176,11 @@
   // Makes it not all black.
   buffer->InitializeData();
 
-  webrtc::VideoFrame frame1(buffer, webrtc::kVideoRotation_0,
-                            10 /* timestamp_us */);
+  webrtc::VideoFrame frame1 = webrtc::VideoFrame::Builder()
+                                  .set_video_frame_buffer(buffer)
+                                  .set_rotation(webrtc::kVideoRotation_0)
+                                  .set_timestamp_us(10)
+                                  .build();
   broadcaster.OnFrame(frame1);
   EXPECT_TRUE(sink1.black_frame());
   EXPECT_EQ(10, sink1.timestamp_us());
@@ -186,8 +193,11 @@
   wants2.black_frames = true;
   broadcaster.AddOrUpdateSink(&sink2, wants2);
 
-  webrtc::VideoFrame frame2(buffer, webrtc::kVideoRotation_0,
-                            30 /* timestamp_us */);
+  webrtc::VideoFrame frame2 = webrtc::VideoFrame::Builder()
+                                  .set_video_frame_buffer(buffer)
+                                  .set_rotation(webrtc::kVideoRotation_0)
+                                  .set_timestamp_us(30)
+                                  .build();
   broadcaster.OnFrame(frame2);
   EXPECT_FALSE(sink1.black_frame());
   EXPECT_EQ(30, sink1.timestamp_us());
diff --git a/media/base/videocapturer.cc b/media/base/videocapturer.cc
index 03a6b96..20dcf4e 100644
--- a/media/base/videocapturer.cc
+++ b/media/base/videocapturer.cc
@@ -227,9 +227,15 @@
       RTC_LOG(LS_WARNING) << "Non-I420 frame requiring rotation. Discarding.";
       return;
     }
-    broadcaster_.OnFrame(webrtc::VideoFrame(
-        webrtc::I420Buffer::Rotate(*buffer->GetI420(), frame.rotation()),
-        webrtc::kVideoRotation_0, frame.timestamp_us()));
+    webrtc::VideoFrame rotated_frame =
+        webrtc::VideoFrame::Builder()
+            .set_video_frame_buffer(webrtc::I420Buffer::Rotate(
+                *buffer->GetI420(), frame.rotation()))
+            .set_rotation(webrtc::kVideoRotation_0)
+            .set_timestamp_us(frame.timestamp_us())
+            .set_id(frame.id())
+            .build();
+    broadcaster_.OnFrame(rotated_frame);
   } else {
     broadcaster_.OnFrame(frame);
   }
diff --git a/media/engine/simulcast_encoder_adapter.cc b/media/engine/simulcast_encoder_adapter.cc
index ca35362..563ca45 100644
--- a/media/engine/simulcast_encoder_adapter.cc
+++ b/media/engine/simulcast_encoder_adapter.cc
@@ -409,10 +409,14 @@
                         dst_buffer->StrideV(), dst_width, dst_height,
                         libyuv::kFilterBilinear);
 
+      VideoFrame frame = VideoFrame::Builder()
+                             .set_video_frame_buffer(dst_buffer)
+                             .set_timestamp_rtp(input_image.timestamp())
+                             .set_rotation(webrtc::kVideoRotation_0)
+                             .set_timestamp_ms(input_image.render_time_ms())
+                             .build();
       int ret = streaminfos_[stream_idx].encoder->Encode(
-          VideoFrame(dst_buffer, input_image.timestamp(),
-                     input_image.render_time_ms(), webrtc::kVideoRotation_0),
-          codec_specific_info, &stream_frame_types);
+          frame, codec_specific_info, &stream_frame_types);
       if (ret != WEBRTC_VIDEO_CODEC_OK) {
         return ret;
       }
diff --git a/media/engine/simulcast_encoder_adapter_unittest.cc b/media/engine/simulcast_encoder_adapter_unittest.cc
index 02f40c6..741e3fc 100644
--- a/media/engine/simulcast_encoder_adapter_unittest.cc
+++ b/media/engine/simulcast_encoder_adapter_unittest.cc
@@ -539,7 +539,12 @@
 
   // Input data.
   rtc::scoped_refptr<VideoFrameBuffer> buffer(I420Buffer::Create(1280, 720));
-  VideoFrame input_frame(buffer, 100, 1000, kVideoRotation_180);
+  VideoFrame input_frame = VideoFrame::Builder()
+                               .set_video_frame_buffer(buffer)
+                               .set_timestamp_rtp(100)
+                               .set_timestamp_ms(1000)
+                               .set_rotation(kVideoRotation_180)
+                               .build();
   std::vector<FrameType> frame_types;
 
   // Encode with three streams.
@@ -864,7 +869,12 @@
 
   rtc::scoped_refptr<VideoFrameBuffer> buffer(
       new rtc::RefCountedObject<FakeNativeBufferNoI420>(1280, 720));
-  VideoFrame input_frame(buffer, 100, 1000, kVideoRotation_180);
+  VideoFrame input_frame = VideoFrame::Builder()
+                               .set_video_frame_buffer(buffer)
+                               .set_timestamp_rtp(100)
+                               .set_timestamp_ms(1000)
+                               .set_rotation(kVideoRotation_180)
+                               .build();
   // Expect calls with the given video frame verbatim, since it's a texture
   // frame and can't otherwise be modified/resized.
   for (MockVideoEncoder* encoder : helper_->factory()->encoders())
@@ -889,7 +899,12 @@
   rtc::scoped_refptr<I420Buffer> input_buffer =
       I420Buffer::Create(kDefaultWidth, kDefaultHeight);
   input_buffer->InitializeData();
-  VideoFrame input_frame(input_buffer, 0, 0, webrtc::kVideoRotation_0);
+  VideoFrame input_frame = VideoFrame::Builder()
+                               .set_video_frame_buffer(input_buffer)
+                               .set_timestamp_rtp(0)
+                               .set_timestamp_us(0)
+                               .set_rotation(kVideoRotation_0)
+                               .build();
   std::vector<FrameType> frame_types(3, kVideoFrameKey);
   EXPECT_EQ(WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE,
             adapter_->Encode(input_frame, nullptr, &frame_types));
@@ -954,7 +969,12 @@
 
   // Input data.
   rtc::scoped_refptr<VideoFrameBuffer> buffer(I420Buffer::Create(1280, 720));
-  VideoFrame input_frame(buffer, 100, 1000, kVideoRotation_180);
+  VideoFrame input_frame = VideoFrame::Builder()
+                               .set_video_frame_buffer(buffer)
+                               .set_timestamp_rtp(100)
+                               .set_timestamp_ms(1000)
+                               .set_rotation(kVideoRotation_180)
+                               .build();
 
   // Encode with three streams.
   EXPECT_EQ(0, adapter_->InitEncode(&codec_, 1, 1200));
@@ -987,7 +1007,12 @@
 
   // Input data.
   rtc::scoped_refptr<VideoFrameBuffer> buffer(I420Buffer::Create(1280, 720));
-  VideoFrame input_frame(buffer, 100, 1000, kVideoRotation_180);
+  VideoFrame input_frame = VideoFrame::Builder()
+                               .set_video_frame_buffer(buffer)
+                               .set_timestamp_rtp(100)
+                               .set_timestamp_ms(1000)
+                               .set_rotation(kVideoRotation_180)
+                               .build();
 
   // No encoder trusted, so simulcast adapter should not be either.
   EXPECT_EQ(0, adapter_->InitEncode(&codec_, 1, 1200));
diff --git a/media/engine/webrtcvideoengine_unittest.cc b/media/engine/webrtcvideoengine_unittest.cc
index 31dfb55..590d8bc 100644
--- a/media/engine/webrtcvideoengine_unittest.cc
+++ b/media/engine/webrtcvideoengine_unittest.cc
@@ -3555,9 +3555,13 @@
   cricket::FakeVideoRenderer renderer;
   EXPECT_TRUE(channel_->SetSink(last_ssrc_, &renderer));
 
-  webrtc::VideoFrame video_frame(CreateBlackFrameBuffer(4, 4),
-                                 kInitialTimestamp, 0,
-                                 webrtc::kVideoRotation_0);
+  webrtc::VideoFrame video_frame =
+      webrtc::VideoFrame::Builder()
+          .set_video_frame_buffer(CreateBlackFrameBuffer(4, 4))
+          .set_timestamp_rtp(kInitialTimestamp)
+          .set_timestamp_us(0)
+          .set_rotation(webrtc::kVideoRotation_0)
+          .build();
   // Initial NTP time is not available on the first frame, but should still be
   // able to be estimated.
   stream->InjectFrame(video_frame);
@@ -5326,8 +5330,13 @@
   FakeVideoReceiveStream* recv_stream = fake_call_->GetVideoReceiveStreams()[0];
   EXPECT_EQ(rtpHeader.ssrc, recv_stream->GetConfig().rtp.remote_ssrc);
   // Verify that the receive stream sinks to a renderer.
-  webrtc::VideoFrame video_frame(CreateBlackFrameBuffer(4, 4), 100, 0,
-                                 webrtc::kVideoRotation_0);
+  webrtc::VideoFrame video_frame =
+      webrtc::VideoFrame::Builder()
+          .set_video_frame_buffer(CreateBlackFrameBuffer(4, 4))
+          .set_timestamp_rtp(100)
+          .set_timestamp_us(0)
+          .set_rotation(webrtc::kVideoRotation_0)
+          .build();
   recv_stream->InjectFrame(video_frame);
   EXPECT_EQ(1, renderer.num_rendered_frames());
 
@@ -5342,8 +5351,13 @@
   recv_stream = fake_call_->GetVideoReceiveStreams()[0];
   EXPECT_EQ(rtpHeader.ssrc, recv_stream->GetConfig().rtp.remote_ssrc);
   // Verify that the receive stream sinks to a renderer.
-  webrtc::VideoFrame video_frame2(CreateBlackFrameBuffer(4, 4), 200, 0,
-                                  webrtc::kVideoRotation_0);
+  webrtc::VideoFrame video_frame2 =
+      webrtc::VideoFrame::Builder()
+          .set_video_frame_buffer(CreateBlackFrameBuffer(4, 4))
+          .set_timestamp_rtp(200)
+          .set_timestamp_us(0)
+          .set_rotation(webrtc::kVideoRotation_0)
+          .build();
   recv_stream->InjectFrame(video_frame2);
   EXPECT_EQ(2, renderer.num_rendered_frames());
 
@@ -5359,8 +5373,13 @@
   recv_stream = fake_call_->GetVideoReceiveStreams()[0];
   EXPECT_EQ(rtpHeader.ssrc, recv_stream->GetConfig().rtp.remote_ssrc);
   // Verify that the receive stream sinks to a renderer.
-  webrtc::VideoFrame video_frame3(CreateBlackFrameBuffer(4, 4), 300, 0,
-                                  webrtc::kVideoRotation_0);
+  webrtc::VideoFrame video_frame3 =
+      webrtc::VideoFrame::Builder()
+          .set_video_frame_buffer(CreateBlackFrameBuffer(4, 4))
+          .set_timestamp_rtp(300)
+          .set_timestamp_us(0)
+          .set_rotation(webrtc::kVideoRotation_0)
+          .build();
   recv_stream->InjectFrame(video_frame3);
   EXPECT_EQ(3, renderer.num_rendered_frames());
 #endif
diff --git a/modules/video_capture/BUILD.gn b/modules/video_capture/BUILD.gn
index 44db2eb..6ff80d4 100644
--- a/modules/video_capture/BUILD.gn
+++ b/modules/video_capture/BUILD.gn
@@ -195,6 +195,7 @@
         "../../test:video_test_common",
         "../utility",
         "//testing/gtest",
+        "//third_party/abseil-cpp/absl/memory:memory",
       ]
       deps += [ "../../test:test_main" ]
     }
diff --git a/modules/video_capture/test/video_capture_unittest.cc b/modules/video_capture/test/video_capture_unittest.cc
index 1fa7144..8238c20 100644
--- a/modules/video_capture/test/video_capture_unittest.cc
+++ b/modules/video_capture/test/video_capture_unittest.cc
@@ -14,6 +14,7 @@
 #include <memory>
 #include <sstream>
 
+#include "absl/memory/memory.h"
 #include "api/video/i420_buffer.h"
 #include "api/video/video_frame.h"
 #include "common_video/libyuv/include/webrtc_libyuv.h"
@@ -366,8 +367,12 @@
            buffer->ChromaHeight() * buffer->StrideU());
     memset(buffer->MutableDataV(), 127,
            buffer->ChromaHeight() * buffer->StrideV());
-    test_frame_.reset(new webrtc::VideoFrame(buffer, webrtc::kVideoRotation_0,
-                                             0 /* timestamp_us */));
+    test_frame_ = absl::make_unique<webrtc::VideoFrame>(
+        webrtc::VideoFrame::Builder()
+            .set_video_frame_buffer(buffer)
+            .set_rotation(webrtc::kVideoRotation_0)
+            .set_timestamp_us(0)
+            .build());
 
     SleepMs(1);  // Wait 1ms so that two tests can't have the same timestamp.
 
diff --git a/modules/video_capture/video_capture_impl.cc b/modules/video_capture/video_capture_impl.cc
index e434d0f..bd2ecf4 100644
--- a/modules/video_capture/video_capture_impl.cc
+++ b/modules/video_capture/video_capture_impl.cc
@@ -196,8 +196,13 @@
     return -1;
   }
 
-  VideoFrame captureFrame(buffer, 0, rtc::TimeMillis(),
-                          !apply_rotation ? _rotateFrame : kVideoRotation_0);
+  VideoFrame captureFrame =
+      VideoFrame::Builder()
+          .set_video_frame_buffer(buffer)
+          .set_timestamp_rtp(0)
+          .set_timestamp_ms(rtc::TimeMillis())
+          .set_rotation(!apply_rotation ? _rotateFrame : kVideoRotation_0)
+          .build();
   captureFrame.set_ntp_time_ms(captureTime);
 
   DeliverCapturedFrame(captureFrame);
diff --git a/modules/video_coding/BUILD.gn b/modules/video_coding/BUILD.gn
index 80f3d6d..9f6d9f3 100644
--- a/modules/video_coding/BUILD.gn
+++ b/modules/video_coding/BUILD.gn
@@ -615,6 +615,7 @@
       "../../rtc_base:checks",
       "../../rtc_base:rtc_base_approved",
       "../../test:test_support",
+      "//third_party/abseil-cpp/absl/memory:memory",
     ]
   }
 
diff --git a/modules/video_coding/codecs/h264/h264_decoder_impl.cc b/modules/video_coding/codecs/h264/h264_decoder_impl.cc
index ea74cc4..0853d8f 100644
--- a/modules/video_coding/codecs/h264/h264_decoder_impl.cc
+++ b/modules/video_coding/codecs/h264/h264_decoder_impl.cc
@@ -20,6 +20,7 @@
 #include "third_party/ffmpeg/libavutil/imgutils.h"
 }  // extern "C"
 
+#include "absl/memory/memory.h"
 #include "api/video/color_space.h"
 #include "api/video/i420_buffer.h"
 #include "common_video/include/video_frame_buffer.h"
@@ -120,12 +121,14 @@
   // TODO(nisse): The VideoFrame's timestamp and rotation info is not used.
   // Refactor to do not use a VideoFrame object at all.
   av_frame->buf[0] = av_buffer_create(
-      av_frame->data[kYPlaneIndex],
-      total_size,
-      AVFreeBuffer2,
-      static_cast<void*>(new VideoFrame(frame_buffer,
-                                        kVideoRotation_0,
-                                        0 /* timestamp_us */)),
+      av_frame->data[kYPlaneIndex], total_size, AVFreeBuffer2,
+      static_cast<void*>(absl::make_unique<VideoFrame>(
+                             VideoFrame::Builder()
+                                 .set_video_frame_buffer(frame_buffer)
+                                 .set_rotation(kVideoRotation_0)
+                                 .set_timestamp_us(0)
+                                 .build())
+                             .release()),
       0);
   RTC_CHECK(av_frame->buf[0]);
   return 0;
diff --git a/modules/video_coding/codecs/multiplex/multiplex_decoder_adapter.cc b/modules/video_coding/codecs/multiplex/multiplex_decoder_adapter.cc
index a27bc8d..0facbe4 100644
--- a/modules/video_coding/codecs/multiplex/multiplex_decoder_adapter.cc
+++ b/modules/video_coding/codecs/multiplex/multiplex_decoder_adapter.cc
@@ -58,10 +58,14 @@
 struct MultiplexDecoderAdapter::DecodedImageData {
   explicit DecodedImageData(AlphaCodecStream stream_idx)
       : stream_idx_(stream_idx),
-        decoded_image_(I420Buffer::Create(1 /* width */, 1 /* height */),
-                       0,
-                       0,
-                       kVideoRotation_0) {
+        decoded_image_(
+            VideoFrame::Builder()
+                .set_video_frame_buffer(
+                    I420Buffer::Create(1 /* width */, 1 /* height */))
+                .set_timestamp_rtp(0)
+                .set_timestamp_us(0)
+                .set_rotation(kVideoRotation_0)
+                .build()) {
     RTC_DCHECK_EQ(kAXXStream, stream_idx);
   }
   DecodedImageData(AlphaCodecStream stream_idx,
@@ -253,8 +257,13 @@
             merged_buffer, std::move(augmenting_data), augmenting_data_length));
   }
 
-  VideoFrame merged_image(merged_buffer, decoded_image->timestamp(),
-                          0 /* render_time_ms */, decoded_image->rotation());
+  VideoFrame merged_image = VideoFrame::Builder()
+                                .set_video_frame_buffer(merged_buffer)
+                                .set_timestamp_rtp(decoded_image->timestamp())
+                                .set_timestamp_us(0)
+                                .set_rotation(decoded_image->rotation())
+                                .set_id(decoded_image->id())
+                                .build();
   decoded_complete_callback_->Decoded(merged_image, decode_time_ms, qp);
 }
 
diff --git a/modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc b/modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc
index 582549e..4aabec0 100644
--- a/modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc
+++ b/modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc
@@ -197,8 +197,13 @@
                      multiplex_dummy_planes_.data(), yuva_buffer->StrideU(),
                      multiplex_dummy_planes_.data(), yuva_buffer->StrideV(),
                      rtc::KeepRefUntilDone(input_image.video_frame_buffer()));
-  VideoFrame alpha_image(alpha_buffer, input_image.timestamp(),
-                         input_image.render_time_ms(), input_image.rotation());
+  VideoFrame alpha_image = VideoFrame::Builder()
+                               .set_video_frame_buffer(alpha_buffer)
+                               .set_timestamp_rtp(input_image.timestamp())
+                               .set_timestamp_ms(input_image.render_time_ms())
+                               .set_rotation(input_image.rotation())
+                               .set_id(input_image.id())
+                               .build();
   rv = encoders_[kAXXStream]->Encode(alpha_image, codec_specific_info,
                                      &adjusted_frame_types);
   return rv;
diff --git a/modules/video_coding/codecs/multiplex/test/multiplex_adapter_unittest.cc b/modules/video_coding/codecs/multiplex/test/multiplex_adapter_unittest.cc
index 8ced0a9..4c904c1 100644
--- a/modules/video_coding/codecs/multiplex/test/multiplex_adapter_unittest.cc
+++ b/modules/video_coding/codecs/multiplex/test/multiplex_adapter_unittest.cc
@@ -95,9 +95,14 @@
     rtc::scoped_refptr<AugmentedVideoFrameBuffer> augmented_video_frame_buffer =
         new rtc::RefCountedObject<AugmentedVideoFrameBuffer>(
             video_buffer, std::move(data), 16);
-    return absl::WrapUnique<VideoFrame>(
-        new VideoFrame(augmented_video_frame_buffer, video_frame->timestamp(),
-                       video_frame->render_time_ms(), video_frame->rotation()));
+    return absl::make_unique<VideoFrame>(
+        VideoFrame::Builder()
+            .set_video_frame_buffer(augmented_video_frame_buffer)
+            .set_timestamp_rtp(video_frame->timestamp())
+            .set_timestamp_ms(video_frame->render_time_ms())
+            .set_rotation(video_frame->rotation())
+            .set_id(video_frame->id())
+            .build());
   }
 
   std::unique_ptr<VideoFrame> CreateI420AInputFrame() {
@@ -109,9 +114,13 @@
         yuv_buffer->StrideY(), yuv_buffer->DataU(), yuv_buffer->StrideU(),
         yuv_buffer->DataV(), yuv_buffer->StrideV(), yuv_buffer->DataY(),
         yuv_buffer->StrideY(), rtc::KeepRefUntilDone(yuv_buffer));
-    return absl::WrapUnique<VideoFrame>(
-        new VideoFrame(yuva_buffer, 123 /* RTP timestamp */,
-                       345 /* render_time_ms */, kVideoRotation_0));
+    return absl::make_unique<VideoFrame>(
+        VideoFrame::Builder()
+            .set_video_frame_buffer(yuva_buffer)
+            .set_timestamp_rtp(123)
+            .set_timestamp_ms(345)
+            .set_rotation(kVideoRotation_0)
+            .build());
   }
 
   std::unique_ptr<VideoFrame> CreateInputFrame(bool contains_alpha) {
@@ -120,9 +129,14 @@
       video_frame = CreateI420AInputFrame();
     } else {
       VideoFrame* next_frame = NextInputFrame();
-      video_frame = absl::WrapUnique<VideoFrame>(new VideoFrame(
-          next_frame->video_frame_buffer(), next_frame->timestamp(),
-          next_frame->render_time_ms(), next_frame->rotation()));
+      video_frame = absl::make_unique<VideoFrame>(
+          VideoFrame::Builder()
+              .set_video_frame_buffer(next_frame->video_frame_buffer())
+              .set_timestamp_rtp(next_frame->timestamp())
+              .set_timestamp_ms(next_frame->render_time_ms())
+              .set_rotation(next_frame->rotation())
+              .set_id(next_frame->id())
+              .build());
     }
     if (supports_augmenting_data_) {
       video_frame = CreateDataAugmentedInputFrame(video_frame.get());
@@ -158,9 +172,12 @@
         yuva_buffer->StrideA(), yuva_buffer->DataU(), yuva_buffer->StrideU(),
         yuva_buffer->DataV(), yuva_buffer->StrideV(),
         rtc::KeepRefUntilDone(video_frame_buffer));
-    return absl::WrapUnique<VideoFrame>(
-        new VideoFrame(axx_buffer, 123 /* RTP timestamp */,
-                       345 /* render_time_ms */, kVideoRotation_0));
+    return absl::make_unique<VideoFrame>(VideoFrame::Builder()
+                                             .set_video_frame_buffer(axx_buffer)
+                                             .set_timestamp_rtp(123)
+                                             .set_timestamp_ms(345)
+                                             .set_rotation(kVideoRotation_0)
+                                             .build());
   }
 
  private:
diff --git a/modules/video_coding/codecs/test/videoprocessor.cc b/modules/video_coding/codecs/test/videoprocessor.cc
index ac13f94..83181a0 100644
--- a/modules/video_coding/codecs/test/videoprocessor.cc
+++ b/modules/video_coding/codecs/test/videoprocessor.cc
@@ -267,9 +267,13 @@
   RTC_CHECK(buffer) << "Tried to read too many frames from the file.";
   const size_t timestamp =
       last_inputed_timestamp_ + kVideoPayloadTypeFrequency / framerate_fps_;
-  VideoFrame input_frame(buffer, static_cast<uint32_t>(timestamp),
-                         static_cast<int64_t>(timestamp / kMsToRtpTimestamp),
-                         webrtc::kVideoRotation_0);
+  VideoFrame input_frame =
+      VideoFrame::Builder()
+          .set_video_frame_buffer(buffer)
+          .set_timestamp_rtp(static_cast<uint32_t>(timestamp))
+          .set_timestamp_ms(static_cast<int64_t>(timestamp / kMsToRtpTimestamp))
+          .set_rotation(webrtc::kVideoRotation_0)
+          .build();
   // Store input frame as a reference for quality calculations.
   if (config_.decode && !config_.measure_cpu) {
     if (input_frames_.size() == kMaxBufferedInputFrames) {
@@ -323,8 +327,13 @@
   if (!task_queue_->IsCurrent()) {
     // There might be a limited amount of output buffers, make a copy to make
     // sure we don't block the decoder.
-    VideoFrame copy(I420Buffer::Copy(*image.video_frame_buffer()->ToI420()),
-                    image.rotation(), image.timestamp_us());
+    VideoFrame copy = VideoFrame::Builder()
+                          .set_video_frame_buffer(I420Buffer::Copy(
+                              *image.video_frame_buffer()->ToI420()))
+                          .set_rotation(image.rotation())
+                          .set_timestamp_us(image.timestamp_us())
+                          .set_id(image.id())
+                          .build();
     copy.set_timestamp(image.timestamp());
 
     task_queue_->PostTask([this, copy]() {
diff --git a/modules/video_coding/generic_encoder.cc b/modules/video_coding/generic_encoder.cc
index 1f24159..9dd07bc 100644
--- a/modules/video_coding/generic_encoder.cc
+++ b/modules/video_coding/generic_encoder.cc
@@ -146,9 +146,12 @@
   // VideoSendStreamTest.VideoSendStreamStopSetEncoderRateToZero, set
   // internal_source to true and use FakeEncoder. And the latter will
   // happily encode this 1x1 frame and pass it on down the pipeline.
-  return encoder_->Encode(
-      VideoFrame(I420Buffer::Create(1, 1), kVideoRotation_0, 0), NULL,
-      &frame_types);
+  return encoder_->Encode(VideoFrame::Builder()
+                              .set_video_frame_buffer(I420Buffer::Create(1, 1))
+                              .set_rotation(kVideoRotation_0)
+                              .set_timestamp_us(0)
+                              .build(),
+                          NULL, &frame_types);
 }
 
 bool VCMGenericEncoder::InternalSource() const {
diff --git a/modules/video_coding/utility/simulcast_test_fixture_impl.cc b/modules/video_coding/utility/simulcast_test_fixture_impl.cc
index 7199dad..0230316 100644
--- a/modules/video_coding/utility/simulcast_test_fixture_impl.cc
+++ b/modules/video_coding/utility/simulcast_test_fixture_impl.cc
@@ -15,6 +15,7 @@
 #include <memory>
 #include <vector>
 
+#include "absl/memory/memory.h"
 #include "api/video/encoded_image.h"
 #include "api/video_codecs/sdp_video_format.h"
 #include "common_video/libyuv/include/webrtc_libyuv.h"
@@ -273,8 +274,12 @@
   EXPECT_EQ(0, decoder_->InitDecode(&settings_, 1));
   input_buffer_ = I420Buffer::Create(kDefaultWidth, kDefaultHeight);
   input_buffer_->InitializeData();
-  input_frame_.reset(new VideoFrame(input_buffer_, webrtc::kVideoRotation_0,
-                                    0 /* timestamp_us */));
+  input_frame_ = absl::make_unique<webrtc::VideoFrame>(
+      webrtc::VideoFrame::Builder()
+          .set_video_frame_buffer(input_buffer_)
+          .set_rotation(webrtc::kVideoRotation_0)
+          .set_timestamp_us(0)
+          .build());
 }
 
 void SimulcastTestFixtureImpl::SetUpRateAllocator() {
@@ -591,8 +596,12 @@
   input_buffer_ = I420Buffer::Create(settings_.width, settings_.height);
   input_buffer_->InitializeData();
 
-  input_frame_.reset(new VideoFrame(input_buffer_, webrtc::kVideoRotation_0,
-                                    0 /* timestamp_us */));
+  input_frame_ = absl::make_unique<webrtc::VideoFrame>(
+      webrtc::VideoFrame::Builder()
+          .set_video_frame_buffer(input_buffer_)
+          .set_rotation(webrtc::kVideoRotation_0)
+          .set_timestamp_us(0)
+          .build());
 
   // The for loop above did not set the bitrate of the highest layer.
   settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1].maxBitrate =
@@ -631,8 +640,12 @@
   // Resize |input_frame_| to the new resolution.
   input_buffer_ = I420Buffer::Create(settings_.width, settings_.height);
   input_buffer_->InitializeData();
-  input_frame_.reset(new VideoFrame(input_buffer_, webrtc::kVideoRotation_0,
-                                    0 /* timestamp_us */));
+  input_frame_ = absl::make_unique<webrtc::VideoFrame>(
+      webrtc::VideoFrame::Builder()
+          .set_video_frame_buffer(input_buffer_)
+          .set_rotation(webrtc::kVideoRotation_0)
+          .set_timestamp_us(0)
+          .build());
   EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
 }
 
@@ -791,8 +804,12 @@
   int stride_uv = ((kDefaultWidth + 1) / 2) + 5;
   input_buffer_ = I420Buffer::Create(kDefaultWidth, kDefaultHeight, stride_y,
                                      stride_uv, stride_uv);
-  input_frame_.reset(new VideoFrame(input_buffer_, webrtc::kVideoRotation_0,
-                                    0 /* timestamp_us */));
+  input_frame_ = absl::make_unique<webrtc::VideoFrame>(
+      webrtc::VideoFrame::Builder()
+          .set_video_frame_buffer(input_buffer_)
+          .set_rotation(webrtc::kVideoRotation_0)
+          .set_timestamp_us(0)
+          .build());
 
   // Set color.
   int plane_offset[kNumOfPlanes];
diff --git a/modules/video_coding/video_sender.cc b/modules/video_coding/video_sender.cc
index da59fe9..eaf270d 100644
--- a/modules/video_coding/video_sender.cc
+++ b/modules/video_coding/video_sender.cc
@@ -325,10 +325,13 @@
       RTC_LOG(LS_ERROR) << "Frame conversion failed, dropping frame.";
       return VCM_PARAMETER_ERROR;
     }
-    converted_frame = VideoFrame(converted_buffer,
-                                 converted_frame.timestamp(),
-                                 converted_frame.render_time_ms(),
-                                 converted_frame.rotation());
+    converted_frame = VideoFrame::Builder()
+                          .set_video_frame_buffer(converted_buffer)
+                          .set_timestamp_rtp(converted_frame.timestamp())
+                          .set_timestamp_ms(converted_frame.render_time_ms())
+                          .set_rotation(converted_frame.rotation())
+                          .set_id(converted_frame.id())
+                          .build();
   }
   int32_t ret =
       _encoder->Encode(converted_frame, codecSpecificInfo, next_frame_types);
diff --git a/modules/video_coding/video_sender_unittest.cc b/modules/video_coding/video_sender_unittest.cc
index 8e727df..986bd3f 100644
--- a/modules/video_coding/video_sender_unittest.cc
+++ b/modules/video_coding/video_sender_unittest.cc
@@ -11,6 +11,7 @@
 #include <memory>
 #include <vector>
 
+#include "absl/memory/memory.h"
 #include "api/test/mock_video_encoder.h"
 #include "api/video/i420_buffer.h"
 #include "api/video_codecs/vp8_temporal_layers.h"
@@ -76,8 +77,12 @@
  public:
   EmptyFrameGenerator(int width, int height) : width_(width), height_(height) {}
   VideoFrame* NextFrame() override {
-    frame_.reset(new VideoFrame(I420Buffer::Create(width_, height_),
-                                webrtc::kVideoRotation_0, 0));
+    frame_ = absl::make_unique<VideoFrame>(
+        VideoFrame::Builder()
+            .set_video_frame_buffer(I420Buffer::Create(width_, height_))
+            .set_rotation(webrtc::kVideoRotation_0)
+            .set_timestamp_us(0)
+            .build());
     return frame_.get();
   }
 
diff --git a/sdk/android/src/jni/androidmediadecoder.cc b/sdk/android/src/jni/androidmediadecoder.cc
index 61a38d8..70d5b58 100644
--- a/sdk/android/src/jni/androidmediadecoder.cc
+++ b/sdk/android/src/jni/androidmediadecoder.cc
@@ -729,7 +729,12 @@
 
   // If the frame was dropped, frame_buffer is left as nullptr.
   if (frame_buffer) {
-    VideoFrame decoded_frame(frame_buffer, 0, 0, kVideoRotation_0);
+    VideoFrame decoded_frame = VideoFrame::Builder()
+                                   .set_video_frame_buffer(frame_buffer)
+                                   .set_timestamp_rtp(0)
+                                   .set_timestamp_ms(0)
+                                   .set_rotation(kVideoRotation_0)
+                                   .build();
     decoded_frame.set_timestamp(output_timestamps_ms);
     decoded_frame.set_ntp_time_ms(output_ntp_timestamps_ms);
 
diff --git a/sdk/android/src/jni/androidmediaencoder.cc b/sdk/android/src/jni/androidmediaencoder.cc
index aeb510f..0bf7186 100644
--- a/sdk/android/src/jni/androidmediaencoder.cc
+++ b/sdk/android/src/jni/androidmediaencoder.cc
@@ -670,8 +670,13 @@
 
   rtc::scoped_refptr<VideoFrameBuffer> input_buffer(frame.video_frame_buffer());
 
-  VideoFrame input_frame(input_buffer, frame.timestamp(),
-                         frame.render_time_ms(), frame.rotation());
+  VideoFrame input_frame = VideoFrame::Builder()
+                               .set_video_frame_buffer(input_buffer)
+                               .set_timestamp_rtp(frame.timestamp())
+                               .set_timestamp_ms(frame.render_time_ms())
+                               .set_rotation(frame.rotation())
+                               .set_id(frame.id())
+                               .build();
 
   if (!MaybeReconfigureEncoder(jni, input_frame)) {
     ALOGE << "Failed to reconfigure encoder.";
diff --git a/sdk/android/src/jni/androidvideotracksource.cc b/sdk/android/src/jni/androidvideotracksource.cc
index e72850f..f573eda 100644
--- a/sdk/android/src/jni/androidvideotracksource.cc
+++ b/sdk/android/src/jni/androidvideotracksource.cc
@@ -113,7 +113,11 @@
     buffer = buffer->ToI420();
   }
 
-  OnFrame(VideoFrame(buffer, rotation, translated_camera_time_us));
+  OnFrame(VideoFrame::Builder()
+              .set_video_frame_buffer(buffer)
+              .set_rotation(rotation)
+              .set_timestamp_us(translated_camera_time_us)
+              .build());
 }
 
 void AndroidVideoTrackSource::OnOutputFormatRequest(int landscape_width,
diff --git a/sdk/android/src/jni/videoframe.cc b/sdk/android/src/jni/videoframe.cc
index dd13cb3..1d52433 100644
--- a/sdk/android/src/jni/videoframe.cc
+++ b/sdk/android/src/jni/videoframe.cc
@@ -196,9 +196,12 @@
   int64_t timestamp_ns = Java_VideoFrame_getTimestampNs(jni, j_video_frame);
   rtc::scoped_refptr<AndroidVideoBuffer> buffer =
       AndroidVideoBuffer::Create(jni, j_video_frame_buffer);
-  return VideoFrame(buffer, timestamp_rtp,
-                    timestamp_ns / rtc::kNumNanosecsPerMillisec,
-                    static_cast<VideoRotation>(rotation));
+  return VideoFrame::Builder()
+      .set_video_frame_buffer(buffer)
+      .set_timestamp_rtp(timestamp_rtp)
+      .set_timestamp_ms(timestamp_ns / rtc::kNumNanosecsPerMillisec)
+      .set_rotation(static_cast<VideoRotation>(rotation))
+      .build();
 }
 
 ScopedJavaLocalRef<jobject> NativeToJavaVideoFrame(JNIEnv* jni,
diff --git a/sdk/objc/native/src/objc_video_decoder_factory.mm b/sdk/objc/native/src/objc_video_decoder_factory.mm
index 0af3582..6e0ff27 100644
--- a/sdk/objc/native/src/objc_video_decoder_factory.mm
+++ b/sdk/objc/native/src/objc_video_decoder_factory.mm
@@ -77,10 +77,13 @@
     [decoder_ setCallback:^(RTCVideoFrame *frame) {
       const rtc::scoped_refptr<VideoFrameBuffer> buffer =
           new rtc::RefCountedObject<ObjCFrameBuffer>(frame.buffer);
-      VideoFrame videoFrame(buffer,
-                            (uint32_t)(frame.timeStampNs / rtc::kNumNanosecsPerMicrosec),
-                            0,
-                            (VideoRotation)frame.rotation);
+      VideoFrame videoFrame =
+          VideoFrame::Builder()
+              .set_video_frame_buffer(buffer)
+              .set_timestamp_rtp((uint32_t)(frame.timeStampNs / rtc::kNumNanosecsPerMicrosec))
+              .set_timestamp_ms(0)
+              .set_rotation((VideoRotation)frame.rotation)
+              .build();
       videoFrame.set_timestamp(frame.timeStamp);
 
       callback->Decoded(videoFrame);
diff --git a/sdk/objc/native/src/objc_video_track_source.mm b/sdk/objc/native/src/objc_video_track_source.mm
index 49736fe..053158a 100644
--- a/sdk/objc/native/src/objc_video_track_source.mm
+++ b/sdk/objc/native/src/objc_video_track_source.mm
@@ -116,7 +116,11 @@
     rotation = kVideoRotation_0;
   }
 
-  OnFrame(VideoFrame(buffer, rotation, translated_timestamp_us));
+  OnFrame(VideoFrame::Builder()
+              .set_video_frame_buffer(buffer)
+              .set_rotation(rotation)
+              .set_timestamp_us(translated_timestamp_us)
+              .build());
 }
 
 }  // namespace webrtc
diff --git a/sdk/objc/unittests/objc_video_encoder_factory_tests.mm b/sdk/objc/unittests/objc_video_encoder_factory_tests.mm
index 50c30c0..dd19a16 100644
--- a/sdk/objc/unittests/objc_video_encoder_factory_tests.mm
+++ b/sdk/objc/unittests/objc_video_encoder_factory_tests.mm
@@ -79,7 +79,11 @@
   rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
       new rtc::RefCountedObject<webrtc::ObjCFrameBuffer>(
           [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixel_buffer]);
-  webrtc::VideoFrame frame(buffer, webrtc::kVideoRotation_0, 0);
+  webrtc::VideoFrame frame = webrtc::VideoFrame::Builder()
+                                 .set_video_frame_buffer(buffer)
+                                 .set_rotation(webrtc::kVideoRotation_0)
+                                 .set_timestamp_us(0)
+                                 .build();
   webrtc::CodecSpecificInfo info;
   info.codecType = webrtc::kVideoCodecH264;
   std::vector<webrtc::FrameType> frame_types;
@@ -95,7 +99,11 @@
   rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
       new rtc::RefCountedObject<webrtc::ObjCFrameBuffer>(
           [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixel_buffer]);
-  webrtc::VideoFrame frame(buffer, webrtc::kVideoRotation_0, 0);
+  webrtc::VideoFrame frame = webrtc::VideoFrame::Builder()
+                                 .set_video_frame_buffer(buffer)
+                                 .set_rotation(webrtc::kVideoRotation_0)
+                                 .set_timestamp_us(0)
+                                 .build();
   webrtc::CodecSpecificInfo info;
   info.codecType = webrtc::kVideoCodecH264;
   std::vector<webrtc::FrameType> frame_types;
diff --git a/test/fake_decoder.cc b/test/fake_decoder.cc
index c0a2ed6..2114c64 100644
--- a/test/fake_decoder.cc
+++ b/test/fake_decoder.cc
@@ -46,9 +46,12 @@
     height_ = input._encodedHeight;
   }
 
-  VideoFrame frame(I420Buffer::Create(width_, height_),
-                   webrtc::kVideoRotation_0,
-                   render_time_ms * rtc::kNumMicrosecsPerMillisec);
+  VideoFrame frame =
+      VideoFrame::Builder()
+          .set_video_frame_buffer(I420Buffer::Create(width_, height_))
+          .set_rotation(webrtc::kVideoRotation_0)
+          .set_timestamp_ms(render_time_ms)
+          .build();
   frame.set_timestamp(input.Timestamp());
   frame.set_ntp_time_ms(input.ntp_time_ms_);
 
diff --git a/test/fake_texture_frame.cc b/test/fake_texture_frame.cc
index 68d76dc..45d6130 100644
--- a/test/fake_texture_frame.cc
+++ b/test/fake_texture_frame.cc
@@ -21,8 +21,13 @@
                                          uint32_t timestamp,
                                          int64_t render_time_ms,
                                          VideoRotation rotation) {
-  return VideoFrame(new rtc::RefCountedObject<FakeNativeBuffer>(width, height),
-                    timestamp, render_time_ms, rotation);
+  return VideoFrame::Builder()
+      .set_video_frame_buffer(
+          new rtc::RefCountedObject<FakeNativeBuffer>(width, height))
+      .set_timestamp_rtp(timestamp)
+      .set_timestamp_ms(render_time_ms)
+      .set_rotation(rotation)
+      .build();
 }
 
 VideoFrameBuffer::Type FakeNativeBuffer::type() const {
diff --git a/test/fake_vp8_decoder.cc b/test/fake_vp8_decoder.cc
index 1cf6f29..c5ece83 100644
--- a/test/fake_vp8_decoder.cc
+++ b/test/fake_vp8_decoder.cc
@@ -54,9 +54,12 @@
   }
   ParseFakeVp8(input._buffer, &width_, &height_);
 
-  VideoFrame frame(I420Buffer::Create(width_, height_),
-                   webrtc::kVideoRotation_0,
-                   render_time_ms * rtc::kNumMicrosecsPerMillisec);
+  VideoFrame frame =
+      VideoFrame::Builder()
+          .set_video_frame_buffer(I420Buffer::Create(width_, height_))
+          .set_rotation(webrtc::kVideoRotation_0)
+          .set_timestamp_ms(render_time_ms)
+          .build();
   frame.set_timestamp(input.Timestamp());
   frame.set_ntp_time_ms(input.ntp_time_ms_);
 
diff --git a/test/frame_generator.cc b/test/frame_generator.cc
index e400504..9acffb1 100644
--- a/test/frame_generator.cc
+++ b/test/frame_generator.cc
@@ -14,6 +14,7 @@
 #include <cstdio>
 #include <memory>
 
+#include "absl/memory/memory.h"
 #include "api/video/i010_buffer.h"
 #include "api/video/i420_buffer.h"
 #include "api/video/video_frame_buffer.h"
@@ -100,8 +101,12 @@
       buffer = I010Buffer::Copy(*buffer->ToI420());
     }
 
-    frame_.reset(
-        new VideoFrame(buffer, webrtc::kVideoRotation_0, 0 /* timestamp_us */));
+    frame_ = absl::make_unique<VideoFrame>(
+        VideoFrame::Builder()
+            .set_video_frame_buffer(buffer)
+            .set_rotation(webrtc::kVideoRotation_0)
+            .set_timestamp_us(0)
+            .build());
     return frame_.get();
   }
 
@@ -203,8 +208,12 @@
     if (++current_display_count_ >= frame_display_count_)
       current_display_count_ = 0;
 
-    temp_frame_.reset(new VideoFrame(
-        last_read_buffer_, webrtc::kVideoRotation_0, 0 /* timestamp_us */));
+    temp_frame_ = absl::make_unique<VideoFrame>(
+        VideoFrame::Builder()
+            .set_video_frame_buffer(last_read_buffer_)
+            .set_rotation(webrtc::kVideoRotation_0)
+            .set_timestamp_us(0)
+            .build());
     return temp_frame_.get();
   }
 
@@ -260,8 +269,12 @@
     if (++current_display_count_ >= frame_display_count_)
       current_display_count_ = 0;
 
-    frame_.reset(new VideoFrame(buffer_, webrtc::kVideoRotation_0,
-                                0 /* timestamp_us */));
+    frame_ = absl::make_unique<VideoFrame>(
+        VideoFrame::Builder()
+            .set_video_frame_buffer(buffer_)
+            .set_rotation(webrtc::kVideoRotation_0)
+            .set_timestamp_us(0)
+            .build());
     return frame_.get();
   }
 
@@ -393,13 +406,16 @@
     int offset_v = (i420_buffer->StrideV() * (pixels_scrolled_y / 2)) +
                    (pixels_scrolled_x / 2);
 
-    current_frame_ = webrtc::VideoFrame(
-        WrapI420Buffer(target_width_, target_height_,
-                       &i420_buffer->DataY()[offset_y], i420_buffer->StrideY(),
-                       &i420_buffer->DataU()[offset_u], i420_buffer->StrideU(),
-                       &i420_buffer->DataV()[offset_v], i420_buffer->StrideV(),
-                       KeepRefUntilDone(i420_buffer)),
-        kVideoRotation_0, 0);
+    current_frame_ =
+        VideoFrame::Builder()
+            .set_video_frame_buffer(WrapI420Buffer(
+                target_width_, target_height_, &i420_buffer->DataY()[offset_y],
+                i420_buffer->StrideY(), &i420_buffer->DataU()[offset_u],
+                i420_buffer->StrideU(), &i420_buffer->DataV()[offset_v],
+                i420_buffer->StrideV(), KeepRefUntilDone(i420_buffer)))
+            .set_rotation(kVideoRotation_0)
+            .set_timestamp_us(0)
+            .build();
   }
 
   Clock* const clock_;
diff --git a/test/test_video_capturer.cc b/test/test_video_capturer.cc
index 0d57715..ef3d858 100644
--- a/test/test_video_capturer.cc
+++ b/test/test_video_capturer.cc
@@ -41,8 +41,12 @@
     rtc::scoped_refptr<I420Buffer> scaled_buffer =
         I420Buffer::Create(out_width, out_height);
     scaled_buffer->ScaleFrom(*frame.video_frame_buffer()->ToI420());
-    broadcaster_.OnFrame(
-        VideoFrame(scaled_buffer, kVideoRotation_0, frame.timestamp_us()));
+    broadcaster_.OnFrame(VideoFrame::Builder()
+                             .set_video_frame_buffer(scaled_buffer)
+                             .set_rotation(kVideoRotation_0)
+                             .set_timestamp_us(frame.timestamp_us())
+                             .set_id(frame.id())
+                             .build());
   } else {
     // No adaptations needed, just return the frame as is.
     broadcaster_.OnFrame(frame);
diff --git a/video/overuse_frame_detector_unittest.cc b/video/overuse_frame_detector_unittest.cc
index 91050d5..f6fa364 100644
--- a/video/overuse_frame_detector_unittest.cc
+++ b/video/overuse_frame_detector_unittest.cc
@@ -93,8 +93,12 @@
                                                int width,
                                                int height,
                                                int delay_us) {
-    VideoFrame frame(I420Buffer::Create(width, height),
-                     webrtc::kVideoRotation_0, 0);
+    VideoFrame frame =
+        VideoFrame::Builder()
+            .set_video_frame_buffer(I420Buffer::Create(width, height))
+            .set_rotation(webrtc::kVideoRotation_0)
+            .set_timestamp_us(0)
+            .build();
     uint32_t timestamp = 0;
     while (num_frames-- > 0) {
       frame.set_timestamp(timestamp);
@@ -115,8 +119,12 @@
       int height,
       // One element per layer
       rtc::ArrayView<const int> delays_us) {
-    VideoFrame frame(I420Buffer::Create(width, height),
-                     webrtc::kVideoRotation_0, 0);
+    VideoFrame frame =
+        VideoFrame::Builder()
+            .set_video_frame_buffer(I420Buffer::Create(width, height))
+            .set_rotation(webrtc::kVideoRotation_0)
+            .set_timestamp_us(0)
+            .build();
     uint32_t timestamp = 0;
     while (num_frames-- > 0) {
       frame.set_timestamp(timestamp);
@@ -146,8 +154,12 @@
                                                      int delay_us) {
     webrtc::Random random(17);
 
-    VideoFrame frame(I420Buffer::Create(width, height),
-                     webrtc::kVideoRotation_0, 0);
+    VideoFrame frame =
+        VideoFrame::Builder()
+            .set_video_frame_buffer(I420Buffer::Create(width, height))
+            .set_rotation(webrtc::kVideoRotation_0)
+            .set_timestamp_us(0)
+            .build();
     uint32_t timestamp = 0;
     while (num_frames-- > 0) {
       frame.set_timestamp(timestamp);
@@ -359,8 +371,12 @@
   EXPECT_CALL(mock_observer_, AdaptDown(reason_)).Times(testing::AtLeast(1));
   static const int kIntervalUs = 33 * rtc::kNumMicrosecsPerMillisec;
   static const size_t kNumFramesEncodingDelay = 3;
-  VideoFrame frame(I420Buffer::Create(kWidth, kHeight),
-                   webrtc::kVideoRotation_0, 0);
+  VideoFrame frame =
+      VideoFrame::Builder()
+          .set_video_frame_buffer(I420Buffer::Create(kWidth, kHeight))
+          .set_rotation(webrtc::kVideoRotation_0)
+          .set_timestamp_us(0)
+          .build();
   for (size_t i = 0; i < 1000; ++i) {
     // Unique timestamps.
     frame.set_timestamp(static_cast<uint32_t>(i));
@@ -382,8 +398,12 @@
   EXPECT_CALL(mock_observer_, AdaptDown(reason_)).Times(testing::AtLeast(1));
   static const int kIntervalUs = 33 * rtc::kNumMicrosecsPerMillisec;
   static const int kDelayUs = 30 * rtc::kNumMicrosecsPerMillisec;
-  VideoFrame frame(I420Buffer::Create(kWidth, kHeight),
-                   webrtc::kVideoRotation_0, 0);
+  VideoFrame frame =
+      VideoFrame::Builder()
+          .set_video_frame_buffer(I420Buffer::Create(kWidth, kHeight))
+          .set_rotation(webrtc::kVideoRotation_0)
+          .set_timestamp_us(0)
+          .build();
   uint32_t timestamp = 0;
   for (size_t i = 0; i < 1000; ++i) {
     frame.set_timestamp(timestamp);
@@ -642,8 +662,12 @@
                                        int width,
                                        int height,
                                        int delay_us) override {
-    VideoFrame frame(I420Buffer::Create(width, height),
-                     webrtc::kVideoRotation_0, 0);
+    VideoFrame frame =
+        VideoFrame::Builder()
+            .set_video_frame_buffer(I420Buffer::Create(width, height))
+            .set_rotation(webrtc::kVideoRotation_0)
+            .set_timestamp_us(0)
+            .build();
     while (num_frames-- > 0) {
       int64_t capture_time_us = rtc::TimeMicros();
       overuse_detector_->FrameCaptured(frame, capture_time_us /* ignored */);
@@ -662,8 +686,12 @@
                                              int delay_us) override {
     webrtc::Random random(17);
 
-    VideoFrame frame(I420Buffer::Create(width, height),
-                     webrtc::kVideoRotation_0, 0);
+    VideoFrame frame =
+        VideoFrame::Builder()
+            .set_video_frame_buffer(I420Buffer::Create(width, height))
+            .set_rotation(webrtc::kVideoRotation_0)
+            .set_timestamp_us(0)
+            .build();
     for (int i = 0; i < num_frames; i++) {
       int interval_us = random.Rand(min_interval_us, max_interval_us);
       int64_t capture_time_us = rtc::TimeMicros();
@@ -823,8 +851,12 @@
   EXPECT_CALL(mock_observer_, AdaptDown(reason_)).Times(testing::AtLeast(1));
   static const int kIntervalUs = 33 * rtc::kNumMicrosecsPerMillisec;
   static const size_t kNumFramesEncodingDelay = 3;
-  VideoFrame frame(I420Buffer::Create(kWidth, kHeight),
-                   webrtc::kVideoRotation_0, 0);
+  VideoFrame frame =
+      VideoFrame::Builder()
+          .set_video_frame_buffer(I420Buffer::Create(kWidth, kHeight))
+          .set_rotation(webrtc::kVideoRotation_0)
+          .set_timestamp_us(0)
+          .build();
   for (size_t i = 0; i < 1000; ++i) {
     // Unique timestamps.
     frame.set_timestamp(static_cast<uint32_t>(i));
@@ -846,8 +878,12 @@
   EXPECT_CALL(mock_observer_, AdaptDown(reason_)).Times(testing::AtLeast(1));
   static const int kIntervalUs = 33 * rtc::kNumMicrosecsPerMillisec;
   static const int kDelayUs = 30 * rtc::kNumMicrosecsPerMillisec;
-  VideoFrame frame(I420Buffer::Create(kWidth, kHeight),
-                   webrtc::kVideoRotation_0, 0);
+  VideoFrame frame =
+      VideoFrame::Builder()
+          .set_video_frame_buffer(I420Buffer::Create(kWidth, kHeight))
+          .set_rotation(webrtc::kVideoRotation_0)
+          .set_timestamp_us(0)
+          .build();
   uint32_t timestamp = 0;
   for (size_t i = 0; i < 1000; ++i) {
     frame.set_timestamp(timestamp);
diff --git a/video/receive_statistics_proxy_unittest.cc b/video/receive_statistics_proxy_unittest.cc
index 08d899c..a0f515b 100644
--- a/video/receive_statistics_proxy_unittest.cc
+++ b/video/receive_statistics_proxy_unittest.cc
@@ -68,8 +68,13 @@
   }
 
   VideoFrame CreateVideoFrame(int width, int height, int64_t render_time_ms) {
-    VideoFrame frame(I420Buffer::Create(width, height), 0, render_time_ms,
-                     kVideoRotation_0);
+    VideoFrame frame =
+        VideoFrame::Builder()
+            .set_video_frame_buffer(I420Buffer::Create(width, height))
+            .set_timestamp_rtp(0)
+            .set_timestamp_ms(render_time_ms)
+            .set_rotation(kVideoRotation_0)
+            .build();
     frame.set_ntp_time_ms(fake_clock_.CurrentNtpInMilliseconds());
     return frame;
   }
@@ -248,8 +253,13 @@
 
 TEST_F(ReceiveStatisticsProxyTest, OnRenderedFrameIncreasesFramesRendered) {
   EXPECT_EQ(0u, statistics_proxy_->GetStats().frames_rendered);
-  webrtc::VideoFrame frame(webrtc::I420Buffer::Create(1, 1), 0, 0,
-                           webrtc::kVideoRotation_0);
+  webrtc::VideoFrame frame =
+      VideoFrame::Builder()
+          .set_video_frame_buffer(webrtc::I420Buffer::Create(1, 1))
+          .set_timestamp_rtp(0)
+          .set_timestamp_us(0)
+          .set_rotation(kVideoRotation_0)
+          .build();
   for (uint32_t i = 1; i <= 3; ++i) {
     statistics_proxy_->OnRenderedFrame(frame);
     EXPECT_EQ(i, statistics_proxy_->GetStats().frames_rendered);
@@ -724,7 +734,11 @@
   const int kDefaultFps = 30;
   rtc::scoped_refptr<VideoFrameBuffer> video_frame_buffer(
       I420Buffer::Create(kWidth, kHeight));
-  VideoFrame frame(video_frame_buffer, kVideoRotation_0, 0);
+  VideoFrame frame = VideoFrame::Builder()
+                         .set_video_frame_buffer(video_frame_buffer)
+                         .set_rotation(webrtc::kVideoRotation_0)
+                         .set_timestamp_us(0)
+                         .build();
 
   for (int i = 0; i < kDefaultFps; ++i) {
     // Since OnRenderedFrame is never called the fps in each sample will be 0,
@@ -1058,8 +1072,13 @@
   const int kFreezeDelayMs = 200;
   const int kCallDurationMs =
       kMinRequiredSamples * kInterFrameDelayMs + kFreezeDelayMs;
-  webrtc::VideoFrame frame(webrtc::I420Buffer::Create(1, 1), 0, 0,
-                           webrtc::kVideoRotation_0);
+  webrtc::VideoFrame frame =
+      VideoFrame::Builder()
+          .set_video_frame_buffer(webrtc::I420Buffer::Create(1, 1))
+          .set_timestamp_rtp(0)
+          .set_timestamp_us(0)
+          .set_rotation(kVideoRotation_0)
+          .build();
   for (int i = 0; i < kMinRequiredSamples; ++i) {
     statistics_proxy_->OnDecodedFrame(absl::nullopt, kWidth, kHeight,
                                       content_type);
@@ -1100,8 +1119,13 @@
   const VideoContentType content_type = GetParam();
   const int kInterFrameDelayMs = 33;
   const int kPauseDurationMs = 10000;
-  webrtc::VideoFrame frame(webrtc::I420Buffer::Create(1, 1), 0, 0,
-                           webrtc::kVideoRotation_0);
+  webrtc::VideoFrame frame =
+      VideoFrame::Builder()
+          .set_video_frame_buffer(webrtc::I420Buffer::Create(1, 1))
+          .set_timestamp_rtp(0)
+          .set_timestamp_us(0)
+          .set_rotation(kVideoRotation_0)
+          .build();
   for (int i = 0; i <= kMinRequiredSamples; ++i) {
     statistics_proxy_->OnDecodedFrame(absl::nullopt, kWidth, kHeight,
                                       content_type);
diff --git a/video/video_send_stream_tests.cc b/video/video_send_stream_tests.cc
index 37613e8..46a8079 100644
--- a/video/video_send_stream_tests.cc
+++ b/video/video_send_stream_tests.cc
@@ -2242,7 +2242,12 @@
   const int kSizeY = width * height * 2;
   std::unique_ptr<uint8_t[]> buffer(new uint8_t[kSizeY]);
   memset(buffer.get(), data, kSizeY);
-  VideoFrame frame(I420Buffer::Create(width, height), kVideoRotation_0, data);
+  VideoFrame frame =
+      webrtc::VideoFrame::Builder()
+          .set_video_frame_buffer(I420Buffer::Create(width, height))
+          .set_rotation(webrtc::kVideoRotation_0)
+          .set_timestamp_us(data)
+          .build();
   frame.set_timestamp(data);
   // Use data as a ms timestamp.
   frame.set_timestamp_us(data * rtc::kNumMicrosecsPerMillisec);
diff --git a/video/video_stream_decoder_impl.cc b/video/video_stream_decoder_impl.cc
index 2e8fc1a..d3ecf57 100644
--- a/video/video_stream_decoder_impl.cc
+++ b/video/video_stream_decoder_impl.cc
@@ -272,8 +272,12 @@
                             frame_timestamps->render_time_us / 1000);
 
     callbacks_->OnDecodedFrame(
-        VideoFrame(decoded_image.video_frame_buffer(), decoded_image.rotation(),
-                   frame_timestamps->render_time_us),
+        VideoFrame::Builder()
+            .set_video_frame_buffer(decoded_image.video_frame_buffer())
+            .set_rotation(decoded_image.rotation())
+            .set_timestamp_us(frame_timestamps->render_time_us)
+            .set_id(decoded_image.id())
+            .build(),
         casted_decode_time_ms, casted_qp);
   });
 }
diff --git a/video/video_stream_encoder.cc b/video/video_stream_encoder.cc
index dfacb21..8484708 100644
--- a/video/video_stream_encoder.cc
+++ b/video/video_stream_encoder.cc
@@ -873,9 +873,13 @@
       cropped_buffer->ScaleFrom(
           *video_frame.video_frame_buffer()->ToI420().get());
     }
-    out_frame =
-        VideoFrame(cropped_buffer, video_frame.timestamp(),
-                   video_frame.render_time_ms(), video_frame.rotation());
+    out_frame = VideoFrame::Builder()
+                    .set_video_frame_buffer(cropped_buffer)
+                    .set_timestamp_rtp(video_frame.timestamp())
+                    .set_timestamp_ms(video_frame.render_time_ms())
+                    .set_rotation(video_frame.rotation())
+                    .set_id(video_frame.id())
+                    .build();
     out_frame.set_ntp_time_ms(video_frame.ntp_time_ms());
   }
 
diff --git a/video/video_stream_encoder_unittest.cc b/video/video_stream_encoder_unittest.cc
index c45c65f..84521fb 100644
--- a/video/video_stream_encoder_unittest.cc
+++ b/video/video_stream_encoder_unittest.cc
@@ -191,9 +191,14 @@
               video_frame.width(), video_frame.height(),
               video_frame.timestamp_us() * 1000, &cropped_width,
               &cropped_height, &out_width, &out_height)) {
-        VideoFrame adapted_frame(new rtc::RefCountedObject<TestBuffer>(
-                                     nullptr, out_width, out_height),
-                                 99, 99, kVideoRotation_0);
+        VideoFrame adapted_frame =
+            VideoFrame::Builder()
+                .set_video_frame_buffer(new rtc::RefCountedObject<TestBuffer>(
+                    nullptr, out_width, out_height))
+                .set_timestamp_rtp(99)
+                .set_timestamp_ms(99)
+                .set_rotation(kVideoRotation_0)
+                .build();
         adapted_frame.set_ntp_time_ms(video_frame.ntp_time_ms());
         test::FrameForwarder::IncomingCapturedFrame(adapted_frame);
         last_width_.emplace(adapted_frame.width());
@@ -354,17 +359,27 @@
 
   VideoFrame CreateFrame(int64_t ntp_time_ms,
                          rtc::Event* destruction_event) const {
-    VideoFrame frame(new rtc::RefCountedObject<TestBuffer>(
-                         destruction_event, codec_width_, codec_height_),
-                     99, 99, kVideoRotation_0);
+    VideoFrame frame =
+        VideoFrame::Builder()
+            .set_video_frame_buffer(new rtc::RefCountedObject<TestBuffer>(
+                destruction_event, codec_width_, codec_height_))
+            .set_timestamp_rtp(99)
+            .set_timestamp_ms(99)
+            .set_rotation(kVideoRotation_0)
+            .build();
     frame.set_ntp_time_ms(ntp_time_ms);
     return frame;
   }
 
   VideoFrame CreateFrame(int64_t ntp_time_ms, int width, int height) const {
-    VideoFrame frame(
-        new rtc::RefCountedObject<TestBuffer>(nullptr, width, height), 99, 99,
-        kVideoRotation_0);
+    VideoFrame frame =
+        VideoFrame::Builder()
+            .set_video_frame_buffer(
+                new rtc::RefCountedObject<TestBuffer>(nullptr, width, height))
+            .set_timestamp_rtp(99)
+            .set_timestamp_ms(99)
+            .set_rotation(kVideoRotation_0)
+            .build();
     frame.set_ntp_time_ms(ntp_time_ms);
     frame.set_timestamp_us(ntp_time_ms * 1000);
     return frame;