Use propagated clock instead global time functions in video/

Bug: webrtc:42223992
Change-Id: I3491df6617208bd8b3ff25c4418512d09509bb68
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/404300
Commit-Queue: Danil Chapovalov <danilchap@webrtc.org>
Reviewed-by: Philip Eliasson <philipel@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#45338}
diff --git a/video/BUILD.gn b/video/BUILD.gn
index b9bbf1c..e3122cb 100644
--- a/video/BUILD.gn
+++ b/video/BUILD.gn
@@ -230,6 +230,7 @@
     "../api:fec_controller_api",
     "../api:field_trials_view",
     "../api:sequence_checker",
+    "../api/environment",
     "../api/video:encoded_frame",
     "../api/video:encoded_image",
     "../api/video:video_frame",
@@ -240,7 +241,6 @@
     "../modules/video_coding:video_coding_utility",
     "../rtc_base:macromagic",
     "../rtc_base:stringutils",
-    "../rtc_base:timeutils",
     "../rtc_base/synchronization:mutex",
     "../rtc_base/system:file_wrapper",
     "//third_party/abseil-cpp/absl/algorithm:container",
@@ -271,7 +271,6 @@
     "../rtc_base:macromagic",
     "../rtc_base:race_checker",
     "../rtc_base:rate_statistics",
-    "../rtc_base:timeutils",
     "../rtc_base/synchronization:mutex",
     "../rtc_base/system:no_unique_address",
     "../rtc_base/system:unused",
@@ -1021,6 +1020,7 @@
       "../rtc_base/synchronization:mutex",
       "../system_wrappers",
       "../system_wrappers:metrics",
+      "../test:create_test_environment",
       "../test:create_test_field_trials",
       "../test:direct_transport",
       "../test:encoder_settings",
diff --git a/video/adaptation/overuse_frame_detector.cc b/video/adaptation/overuse_frame_detector.cc
index 704c94c..827b0a5 100644
--- a/video/adaptation/overuse_frame_detector.cc
+++ b/video/adaptation/overuse_frame_detector.cc
@@ -340,11 +340,13 @@
 // Class used for manual testing of overuse, enabled via field trial flag.
 class OverdoseInjector : public OveruseFrameDetector::ProcessingUsage {
  public:
-  OverdoseInjector(std::unique_ptr<OveruseFrameDetector::ProcessingUsage> usage,
+  OverdoseInjector(const Environment& env,
+                   std::unique_ptr<OveruseFrameDetector::ProcessingUsage> usage,
                    int64_t normal_period_ms,
                    int64_t overuse_period_ms,
                    int64_t underuse_period_ms)
-      : usage_(std::move(usage)),
+      : env_(env),
+        usage_(std::move(usage)),
         normal_period_ms_(normal_period_ms),
         overuse_period_ms_(overuse_period_ms),
         underuse_period_ms_(underuse_period_ms),
@@ -383,7 +385,7 @@
   }
 
   int Value() override {
-    int64_t now_ms = TimeMillis();
+    int64_t now_ms = env_.clock().TimeInMilliseconds();
     if (last_toggling_ms_ == -1) {
       last_toggling_ms_ = now_ms;
     } else {
@@ -428,6 +430,7 @@
   }
 
  private:
+  const Environment env_;
   const std::unique_ptr<OveruseFrameDetector::ProcessingUsage> usage_;
   const int64_t normal_period_ms_;
   const int64_t overuse_period_ms_;
@@ -436,19 +439,17 @@
   int64_t last_toggling_ms_;
 };
 
-}  // namespace
-
-std::unique_ptr<OveruseFrameDetector::ProcessingUsage>
-OveruseFrameDetector::CreateProcessingUsage(const FieldTrialsView& field_trials,
-                                            const CpuOveruseOptions& options) {
-  std::unique_ptr<ProcessingUsage> instance;
+std::unique_ptr<OveruseFrameDetector::ProcessingUsage> CreateProcessingUsage(
+    const Environment& env,
+    const CpuOveruseOptions& options) {
+  std::unique_ptr<OveruseFrameDetector::ProcessingUsage> instance;
   if (options.filter_time_ms > 0) {
     instance = std::make_unique<SendProcessingUsage2>(options);
   } else {
     instance = std::make_unique<SendProcessingUsage1>(options);
   }
   std::string toggling_interval =
-      field_trials.Lookup("WebRTC-ForceSimulatedOveruseIntervalMs");
+      env.field_trials().Lookup("WebRTC-ForceSimulatedOveruseIntervalMs");
   if (!toggling_interval.empty()) {
     int normal_period_ms = 0;
     int overuse_period_ms = 0;
@@ -458,7 +459,7 @@
       if (normal_period_ms > 0 && overuse_period_ms > 0 &&
           underuse_period_ms > 0) {
         instance = std::make_unique<OverdoseInjector>(
-            std::move(instance), normal_period_ms, overuse_period_ms,
+            env, std::move(instance), normal_period_ms, overuse_period_ms,
             underuse_period_ms);
       } else {
         RTC_LOG(LS_WARNING)
@@ -474,6 +475,8 @@
   return instance;
 }
 
+}  // namespace
+
 OveruseFrameDetector::OveruseFrameDetector(
     const Environment& env,
     CpuOveruseMetricsObserver* metrics_observer)
@@ -654,7 +657,7 @@
   }
   // Force reset with next frame.
   num_pixels_ = 0;
-  usage_ = CreateProcessingUsage(env_.field_trials(), options);
+  usage_ = CreateProcessingUsage(env_, options);
 }
 
 bool OveruseFrameDetector::IsOverusing(int usage_percent) {
diff --git a/video/adaptation/overuse_frame_detector.h b/video/adaptation/overuse_frame_detector.h
index 719f1fa..ecd39fc 100644
--- a/video/adaptation/overuse_frame_detector.h
+++ b/video/adaptation/overuse_frame_detector.h
@@ -16,7 +16,6 @@
 #include <optional>
 
 #include "api/environment/environment.h"
-#include "api/field_trials_view.h"
 #include "api/sequence_checker.h"
 #include "api/task_queue/task_queue_base.h"
 #include "api/units/time_delta.h"
@@ -145,10 +144,6 @@
 
   void ResetAll(int num_pixels);
 
-  static std::unique_ptr<ProcessingUsage> CreateProcessingUsage(
-      const FieldTrialsView& field_trials,
-      const CpuOveruseOptions& options);
-
   const Environment env_;
   RTC_NO_UNIQUE_ADDRESS SequenceChecker task_checker_;
   // Owned by the task queue from where StartCheckForOveruse is called.
diff --git a/video/frame_dumping_encoder.cc b/video/frame_dumping_encoder.cc
index dcd5ed6..6701763 100644
--- a/video/frame_dumping_encoder.cc
+++ b/video/frame_dumping_encoder.cc
@@ -18,6 +18,7 @@
 #include <vector>
 
 #include "absl/algorithm/container.h"
+#include "api/environment/environment.h"
 #include "api/fec_controller_override.h"
 #include "api/field_trials_view.h"
 #include "api/video/encoded_image.h"
@@ -30,7 +31,6 @@
 #include "rtc_base/synchronization/mutex.h"
 #include "rtc_base/system/file_wrapper.h"
 #include "rtc_base/thread_annotations.h"
-#include "rtc_base/time_utils.h"
 
 namespace webrtc {
 namespace {
@@ -138,16 +138,16 @@
 }  // namespace
 
 std::unique_ptr<VideoEncoder> MaybeCreateFrameDumpingEncoderWrapper(
-    std::unique_ptr<VideoEncoder> encoder,
-    const FieldTrialsView& field_trials) {
+    const Environment& env,
+    std::unique_ptr<VideoEncoder> encoder) {
   auto output_directory =
-      field_trials.Lookup(kEncoderDataDumpDirectoryFieldTrial);
+      env.field_trials().Lookup(kEncoderDataDumpDirectoryFieldTrial);
   if (output_directory.empty() || !encoder) {
     return encoder;
   }
   absl::c_replace(output_directory, ';', '/');
-  return std::make_unique<FrameDumpingEncoder>(std::move(encoder), TimeMicros(),
-                                               output_directory);
+  return std::make_unique<FrameDumpingEncoder>(
+      std::move(encoder), env.clock().TimeInMicroseconds(), output_directory);
 }
 
 }  // namespace webrtc
diff --git a/video/frame_dumping_encoder.h b/video/frame_dumping_encoder.h
index 2fd543a..b9744cc 100644
--- a/video/frame_dumping_encoder.h
+++ b/video/frame_dumping_encoder.h
@@ -13,7 +13,7 @@
 
 #include <memory>
 
-#include "api/field_trials_view.h"
+#include "api/environment/environment.h"
 #include "api/video_codecs/video_encoder.h"
 
 namespace webrtc {
@@ -26,8 +26,8 @@
 // passed encoder. The directory specified by the field trial parameter should
 // be delimited by ';'.
 std::unique_ptr<VideoEncoder> MaybeCreateFrameDumpingEncoderWrapper(
-    std::unique_ptr<VideoEncoder> encoder,
-    const FieldTrialsView& field_trials);
+    const Environment& env,
+    std::unique_ptr<VideoEncoder> encoder);
 
 }  // namespace webrtc
 
diff --git a/video/frame_encode_metadata_writer.cc b/video/frame_encode_metadata_writer.cc
index 32c15b0..04b85f8 100644
--- a/video/frame_encode_metadata_writer.cc
+++ b/video/frame_encode_metadata_writer.cc
@@ -17,6 +17,7 @@
 #include <optional>
 #include <utility>
 
+#include "api/environment/environment.h"
 #include "api/make_ref_counted.h"
 #include "api/video/encoded_image.h"
 #include "api/video/video_bitrate_allocation.h"
@@ -65,6 +66,17 @@
     default;
 
 FrameEncodeMetadataWriter::FrameEncodeMetadataWriter(
+    const Environment& /*env*/,
+    EncodedImageCallback* frame_drop_callback)
+// TODO: bugs.webrtc.org/42223992 - Save `Environment` into member and use
+// it to query current time when deprecated constructor is removed.
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wdeprecated-declarations"
+    : FrameEncodeMetadataWriter(frame_drop_callback) {
+}
+#pragma clang diagnostic pop
+
+FrameEncodeMetadataWriter::FrameEncodeMetadataWriter(
     EncodedImageCallback* frame_drop_callback)
     : frame_drop_callback_(frame_drop_callback),
       framerate_fps_(0),
diff --git a/video/frame_encode_metadata_writer.h b/video/frame_encode_metadata_writer.h
index 1f91568..fb45487 100644
--- a/video/frame_encode_metadata_writer.h
+++ b/video/frame_encode_metadata_writer.h
@@ -17,6 +17,7 @@
 #include <optional>
 #include <vector>
 
+#include "api/environment/environment.h"
 #include "api/rtp_packet_infos.h"
 #include "api/video/color_space.h"
 #include "api/video/encoded_image.h"
@@ -33,7 +34,10 @@
 
 class FrameEncodeMetadataWriter {
  public:
+  [[deprecated("bugs.webrtc.org/42223992")]]
   explicit FrameEncodeMetadataWriter(EncodedImageCallback* frame_drop_callback);
+  FrameEncodeMetadataWriter(const Environment& env,
+                            EncodedImageCallback* frame_drop_callback);
   ~FrameEncodeMetadataWriter();
 
   void OnEncoderInit(const VideoCodec& codec);
diff --git a/video/frame_encode_metadata_writer_unittest.cc b/video/frame_encode_metadata_writer_unittest.cc
index 4580caa..fca9921 100644
--- a/video/frame_encode_metadata_writer_unittest.cc
+++ b/video/frame_encode_metadata_writer_unittest.cc
@@ -32,6 +32,7 @@
 #include "common_video/test/utilities.h"
 #include "modules/video_coding/include/video_codec_interface.h"
 #include "modules/video_coding/include/video_coding_defines.h"
+#include "test/create_test_environment.h"
 #include "test/gmock.h"
 #include "test/gtest.h"
 
@@ -85,7 +86,7 @@
     const int num_streams,
     const int num_frames) {
   FakeEncodedImageCallback sink;
-  FrameEncodeMetadataWriter encode_timer(&sink);
+  FrameEncodeMetadataWriter encode_timer(CreateTestEnvironment(), &sink);
   VideoCodec codec_settings;
   codec_settings.numberOfSimulcastStreams = num_streams;
   codec_settings.timing_frame_thresholds = {delay_ms,
@@ -213,7 +214,7 @@
   image.SetRtpTimestamp(static_cast<uint32_t>(timestamp * 90));
 
   FakeEncodedImageCallback sink;
-  FrameEncodeMetadataWriter encode_timer(&sink);
+  FrameEncodeMetadataWriter encode_timer(CreateTestEnvironment(), &sink);
   VideoCodec codec_settings;
   // Make all frames timing frames.
   codec_settings.timing_frame_thresholds.delay_ms = 1;
@@ -247,7 +248,7 @@
   const int64_t kTimestampMs4 = 47721870;
 
   FakeEncodedImageCallback sink;
-  FrameEncodeMetadataWriter encode_timer(&sink);
+  FrameEncodeMetadataWriter encode_timer(CreateTestEnvironment(), &sink);
   encode_timer.OnEncoderInit(VideoCodec());
   // Any non-zero bitrate needed to be set before the first frame.
   VideoBitrateAllocation bitrate_allocation;
@@ -304,7 +305,7 @@
   const int64_t kTimestampMs = 123456;
   FakeEncodedImageCallback sink;
 
-  FrameEncodeMetadataWriter encode_timer(&sink);
+  FrameEncodeMetadataWriter encode_timer(CreateTestEnvironment(), &sink);
   encode_timer.OnEncoderInit(VideoCodec());
   // Any non-zero bitrate needed to be set before the first frame.
   VideoBitrateAllocation bitrate_allocation;
@@ -329,7 +330,7 @@
   const int64_t kTimestampMs = 123456;
   FakeEncodedImageCallback sink;
 
-  FrameEncodeMetadataWriter encode_timer(&sink);
+  FrameEncodeMetadataWriter encode_timer(CreateTestEnvironment(), &sink);
   encode_timer.OnEncoderInit(VideoCodec());
   // Any non-zero bitrate needed to be set before the first frame.
   VideoBitrateAllocation bitrate_allocation;
@@ -353,7 +354,7 @@
   const int64_t kTimestampMs = 123456;
   FakeEncodedImageCallback sink;
 
-  FrameEncodeMetadataWriter encode_timer(&sink);
+  FrameEncodeMetadataWriter encode_timer(CreateTestEnvironment(), &sink);
   VideoCodec codec;
   codec.mode = VideoCodecMode::kScreensharing;
   encode_timer.OnEncoderInit(codec);
@@ -379,7 +380,7 @@
   const int64_t kTimestampMs = 123456;
   FakeEncodedImageCallback sink;
 
-  FrameEncodeMetadataWriter encode_timer(&sink);
+  FrameEncodeMetadataWriter encode_timer(CreateTestEnvironment(), &sink);
   encode_timer.OnEncoderInit(VideoCodec());
   // Any non-zero bitrate needed to be set before the first frame.
   VideoBitrateAllocation bitrate_allocation;
@@ -405,7 +406,7 @@
   const int64_t kTimestampMs = 123456;
   FakeEncodedImageCallback sink;
 
-  FrameEncodeMetadataWriter encode_timer(&sink);
+  FrameEncodeMetadataWriter encode_timer(CreateTestEnvironment(), &sink);
   encode_timer.OnEncoderInit(VideoCodec());
   // Any non-zero bitrate needed to be set before the first frame.
   VideoBitrateAllocation bitrate_allocation;
@@ -440,7 +441,7 @@
   const int64_t kTimestampMs = 123456;
   FakeEncodedImageCallback sink;
 
-  FrameEncodeMetadataWriter encode_timer(&sink);
+  FrameEncodeMetadataWriter encode_timer(CreateTestEnvironment(), &sink);
   encode_timer.OnEncoderInit(VideoCodec());
   // Any non-zero bitrate needed to be set before the first frame.
   VideoBitrateAllocation bitrate_allocation;
@@ -467,7 +468,8 @@
   image.SetEncodedData(image_buffer);
 
   FakeEncodedImageCallback sink;
-  FrameEncodeMetadataWriter encode_metadata_writer(&sink);
+  FrameEncodeMetadataWriter encode_metadata_writer(CreateTestEnvironment(),
+                                                   &sink);
   encode_metadata_writer.UpdateBitstream(nullptr, &image);
   EXPECT_EQ(image.GetEncodedData(), image_buffer);
   EXPECT_EQ(image.size(), sizeof(buffer));
@@ -482,7 +484,8 @@
   codec_specific_info.codecType = kVideoCodecVP8;
 
   FakeEncodedImageCallback sink;
-  FrameEncodeMetadataWriter encode_metadata_writer(&sink);
+  FrameEncodeMetadataWriter encode_metadata_writer(CreateTestEnvironment(),
+                                                   &sink);
   encode_metadata_writer.UpdateBitstream(&codec_specific_info, &image);
   EXPECT_EQ(image.GetEncodedData(), image_buffer);
   EXPECT_EQ(image.size(), sizeof(buffer));
@@ -506,7 +509,8 @@
   codec_specific_info.codecType = kVideoCodecH264;
 
   FakeEncodedImageCallback sink;
-  FrameEncodeMetadataWriter encode_metadata_writer(&sink);
+  FrameEncodeMetadataWriter encode_metadata_writer(CreateTestEnvironment(),
+                                                   &sink);
   encode_metadata_writer.UpdateBitstream(&codec_specific_info, &image);
 
   EXPECT_THAT(std::vector<uint8_t>(image.data(), image.data() + image.size()),
diff --git a/video/render/BUILD.gn b/video/render/BUILD.gn
index 3f49506..23797c7 100644
--- a/video/render/BUILD.gn
+++ b/video/render/BUILD.gn
@@ -19,6 +19,7 @@
   deps = [
     ":video_render_frames",
     "../../api:sequence_checker",
+    "../../api/environment",
     "../../api/task_queue",
     "../../api/units:time_delta",
     "../../api/video:video_frame",
@@ -38,10 +39,10 @@
   ]
 
   deps = [
+    "../../api/environment",
     "../../api/video:video_frame",
     "../../rtc_base:checks",
     "../../rtc_base:logging",
-    "../../rtc_base:timeutils",
     "../../system_wrappers:metrics",
   ]
 }
diff --git a/video/render/incoming_video_stream.cc b/video/render/incoming_video_stream.cc
index 970f303..ed16953 100644
--- a/video/render/incoming_video_stream.cc
+++ b/video/render/incoming_video_stream.cc
@@ -15,6 +15,7 @@
 #include <optional>
 #include <utility>
 
+#include "api/environment/environment.h"
 #include "api/sequence_checker.h"
 #include "api/task_queue/task_queue_factory.h"
 #include "api/units/time_delta.h"
@@ -28,12 +29,12 @@
 namespace webrtc {
 
 IncomingVideoStream::IncomingVideoStream(
-    TaskQueueFactory* task_queue_factory,
+    const Environment& env,
     int32_t delay_ms,
     VideoSinkInterface<VideoFrame>* callback)
-    : render_buffers_(delay_ms),
+    : render_buffers_(env, delay_ms),
       callback_(callback),
-      incoming_render_queue_(task_queue_factory->CreateTaskQueue(
+      incoming_render_queue_(env.task_queue_factory().CreateTaskQueue(
           "IncomingVideoStream",
           TaskQueueFactory::Priority::HIGH)) {}
 
diff --git a/video/render/incoming_video_stream.h b/video/render/incoming_video_stream.h
index c61e4db..c265562 100644
--- a/video/render/incoming_video_stream.h
+++ b/video/render/incoming_video_stream.h
@@ -15,9 +15,9 @@
 
 #include <memory>
 
+#include "api/environment/environment.h"
 #include "api/sequence_checker.h"
 #include "api/task_queue/task_queue_base.h"
-#include "api/task_queue/task_queue_factory.h"
 #include "api/video/video_frame.h"
 #include "api/video/video_sink_interface.h"
 #include "rtc_base/race_checker.h"
@@ -28,7 +28,7 @@
 
 class IncomingVideoStream : public VideoSinkInterface<VideoFrame> {
  public:
-  IncomingVideoStream(TaskQueueFactory* task_queue_factory,
+  IncomingVideoStream(const Environment& env,
                       int32_t delay_ms,
                       VideoSinkInterface<VideoFrame>* callback);
   ~IncomingVideoStream() override;
diff --git a/video/render/video_render_frames.cc b/video/render/video_render_frames.cc
index ca5385b..4ec472a 100644
--- a/video/render/video_render_frames.cc
+++ b/video/render/video_render_frames.cc
@@ -15,9 +15,9 @@
 #include <optional>
 #include <utility>
 
+#include "api/environment/environment.h"
 #include "api/video/video_frame.h"
 #include "rtc_base/logging.h"
-#include "rtc_base/time_utils.h"
 #include "system_wrappers/include/metrics.h"
 
 namespace webrtc {
@@ -39,8 +39,9 @@
 }
 }  // namespace
 
-VideoRenderFrames::VideoRenderFrames(uint32_t render_delay_ms)
-    : render_delay_ms_(EnsureValidRenderDelay(render_delay_ms)) {}
+VideoRenderFrames::VideoRenderFrames(const Environment& env,
+                                     uint32_t render_delay_ms)
+    : env_(env), render_delay_ms_(EnsureValidRenderDelay(render_delay_ms)) {}
 
 VideoRenderFrames::~VideoRenderFrames() {
   frames_dropped_ += incoming_frames_.size();
@@ -51,7 +52,7 @@
 }
 
 int32_t VideoRenderFrames::AddFrame(VideoFrame&& new_frame) {
-  const int64_t time_now = TimeMillis();
+  const int64_t time_now = env_.clock().TimeInMilliseconds();
 
   // Drop old frames only when there are other frames in the queue, otherwise, a
   // really slow system never renders any frames.
@@ -108,7 +109,8 @@
     return kEventMaxWaitTimeMs;
   }
   const int64_t time_to_release = incoming_frames_.front().render_time_ms() -
-                                  render_delay_ms_ - TimeMillis();
+                                  render_delay_ms_ -
+                                  env_.clock().TimeInMilliseconds();
   return time_to_release < 0 ? 0u : static_cast<uint32_t>(time_to_release);
 }
 
diff --git a/video/render/video_render_frames.h b/video/render/video_render_frames.h
index c69f9df..9547de0 100644
--- a/video/render/video_render_frames.h
+++ b/video/render/video_render_frames.h
@@ -17,6 +17,7 @@
 #include <list>
 #include <optional>
 
+#include "api/environment/environment.h"
 #include "api/video/video_frame.h"
 
 namespace webrtc {
@@ -24,7 +25,7 @@
 // Class definitions
 class VideoRenderFrames {
  public:
-  explicit VideoRenderFrames(uint32_t render_delay_ms);
+  VideoRenderFrames(const Environment& env, uint32_t render_delay_ms);
   VideoRenderFrames(const VideoRenderFrames&) = delete;
   ~VideoRenderFrames();
 
@@ -40,6 +41,8 @@
   bool HasPendingFrames() const;
 
  private:
+  const Environment env_;
+
   // Sorted list with framed to be rendered, oldest first.
   std::list<VideoFrame> incoming_frames_;
 
diff --git a/video/rtp_streams_synchronizer2.cc b/video/rtp_streams_synchronizer2.cc
index a7b655e..d953ee0 100644
--- a/video/rtp_streams_synchronizer2.cc
+++ b/video/rtp_streams_synchronizer2.cc
@@ -13,6 +13,7 @@
 #include <cstdint>
 #include <optional>
 
+#include "api/environment/environment.h"
 #include "api/sequence_checker.h"
 #include "api/task_queue/task_queue_base.h"
 #include "api/units/time_delta.h"
@@ -22,7 +23,6 @@
 #include "rtc_base/logging.h"
 #include "rtc_base/rtp_to_ntp_estimator.h"
 #include "rtc_base/task_utils/repeating_task.h"
-#include "rtc_base/time_utils.h"
 #include "rtc_base/trace_event.h"
 #include "system_wrappers/include/ntp_time.h"
 #include "video/stream_synchronization.h"
@@ -45,11 +45,13 @@
 
 }  // namespace
 
-RtpStreamsSynchronizer::RtpStreamsSynchronizer(TaskQueueBase* main_queue,
+RtpStreamsSynchronizer::RtpStreamsSynchronizer(const Environment& env,
+                                               TaskQueueBase* main_queue,
                                                Syncable* syncable_video)
-    : task_queue_(main_queue),
+    : env_(env),
+      task_queue_(main_queue),
       syncable_video_(syncable_video),
-      last_stats_log_ms_(TimeMillis()) {
+      last_stats_log_ms_(env_.clock().TimeInMilliseconds()) {
   RTC_DCHECK(syncable_video);
 }
 
@@ -94,7 +96,7 @@
   RTC_DCHECK(sync_.get());
 
   bool log_stats = false;
-  const int64_t now_ms = TimeMillis();
+  const int64_t now_ms = env_.clock().TimeInMilliseconds();
   if (now_ms - last_stats_log_ms_ > kStatsLogIntervalMs) {
     last_stats_log_ms_ = now_ms;
     log_stats = true;
@@ -210,7 +212,7 @@
   int64_t latest_video_ntp_ms = latest_video_ntp.ToMs();
 
   // Current audio ntp.
-  Timestamp now = Timestamp::Millis(TimeMillis());
+  Timestamp now = env_.clock().CurrentTime();
   latest_audio_ntp_ms += (now - audio->time).ms();
 
   // Remove video playout delay.
diff --git a/video/rtp_streams_synchronizer2.h b/video/rtp_streams_synchronizer2.h
index ec04a7c..41438c6 100644
--- a/video/rtp_streams_synchronizer2.h
+++ b/video/rtp_streams_synchronizer2.h
@@ -14,6 +14,7 @@
 #include <cstdint>
 #include <memory>
 
+#include "api/environment/environment.h"
 #include "api/sequence_checker.h"
 #include "api/task_queue/task_queue_base.h"
 #include "rtc_base/system/no_unique_address.h"
@@ -31,7 +32,9 @@
 // a given audio receive stream and video receive stream.
 class RtpStreamsSynchronizer {
  public:
-  RtpStreamsSynchronizer(TaskQueueBase* main_queue, Syncable* syncable_video);
+  RtpStreamsSynchronizer(const Environment& env,
+                         TaskQueueBase* main_queue,
+                         Syncable* syncable_video);
   ~RtpStreamsSynchronizer();
 
   void ConfigureSync(Syncable* syncable_audio);
@@ -50,6 +53,7 @@
  private:
   void UpdateDelay();
 
+  const Environment env_;
   TaskQueueBase* const task_queue_;
 
   // Used to check if we're running on the main thread/task queue.
diff --git a/video/video_receive_stream2.cc b/video/video_receive_stream2.cc
index 170600c..eccec79 100644
--- a/video/video_receive_stream2.cc
+++ b/video/video_receive_stream2.cc
@@ -259,7 +259,7 @@
                                  this,  // OnCompleteFrameCallback
                                  std::move(config_.frame_decryptor),
                                  std::move(config_.frame_transformer)),
-      rtp_stream_sync_(call->worker_thread(), this),
+      rtp_stream_sync_(env_, call->worker_thread(), this),
       max_wait_for_keyframe_(DetermineMaxWaitForFrame(
           TimeDelta::Millis(config_.rtp.nack.rtp_history_ms),
           true)),
@@ -387,8 +387,8 @@
   transport_adapter_.Enable();
   VideoSinkInterface<VideoFrame>* renderer = nullptr;
   if (config_.enable_prerenderer_smoothing) {
-    incoming_video_stream_.reset(new IncomingVideoStream(
-        &env_.task_queue_factory(), config_.render_delay_ms, this));
+    incoming_video_stream_ = std::make_unique<IncomingVideoStream>(
+        env_, config_.render_delay_ms, this);
     renderer = incoming_video_stream_.get();
   } else {
     renderer = this;
diff --git a/video/video_stream_encoder.cc b/video/video_stream_encoder.cc
index 7961f64..9464b2b 100644
--- a/video/video_stream_encoder.cc
+++ b/video/video_stream_encoder.cc
@@ -1028,8 +1028,8 @@
     encoder_.reset();
 
     encoder_ = MaybeCreateFrameDumpingEncoderWrapper(
-        settings_.encoder_factory->Create(env_, encoder_config_.video_format),
-        env_.field_trials());
+        env_,
+        settings_.encoder_factory->Create(env_, encoder_config_.video_format));
     if (!encoder_) {
       RTC_LOG(LS_ERROR) << "CreateVideoEncoder failed, failing encoder format: "
                         << encoder_config_.video_format.ToString();
diff --git a/video/video_stream_encoder.h b/video/video_stream_encoder.h
index 4d2d49e..8cee4f2 100644
--- a/video/video_stream_encoder.h
+++ b/video/video_stream_encoder.h
@@ -382,7 +382,7 @@
   // turn this into a simple bool `pending_keyframe_request_`.
   std::vector<VideoFrameType> next_frame_types_ RTC_GUARDED_BY(encoder_queue_);
 
-  FrameEncodeMetadataWriter frame_encode_metadata_writer_{this};
+  FrameEncodeMetadataWriter frame_encode_metadata_writer_{env_, this};
 
   // Provides video stream input states: current resolution and frame rate.
   VideoStreamInputStateProvider input_state_provider_;