Use webrtc name specifier instead of rtc/cricket in rtc_tools

WebRTC has unified all namespaces to webrtc, and the rtc:: and cricket::
name specifiers need to be replaced with webrtc::. This was generated using
a combination of clang AST rewriting tools and sed.

This CL was uploaded by git cl split.

Bug: webrtc:42232595
Change-Id: Ic40fffd7ebe551c1f9838d92322aa93e71b2684c
No-Iwyu: LSC
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/386644
Commit-Queue: Björn Terelius <terelius@webrtc.org>
Reviewed-by: Björn Terelius <terelius@webrtc.org>
Auto-Submit: Evan Shrubsole <eshr@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#44397}
diff --git a/rtc_tools/data_channel_benchmark/data_channel_benchmark.cc b/rtc_tools/data_channel_benchmark/data_channel_benchmark.cc
index 876bfe4..7c5d05e 100644
--- a/rtc_tools/data_channel_benchmark/data_channel_benchmark.cc
+++ b/rtc_tools/data_channel_benchmark/data_channel_benchmark.cc
@@ -78,7 +78,7 @@
 
   static SetupMessage FromString(absl::string_view sv) {
     SetupMessage result;
-    auto parameters = rtc::split(sv, ',');
+    auto parameters = webrtc::split(sv, ',');
     std::from_chars(parameters[0].data(),
                     parameters[0].data() + parameters[0].size(),
                     result.packet_size, 10);
@@ -143,7 +143,7 @@
     RTC_CHECK(remaining_data_) << "Error: no data to send";
     std::string data(std::min(setup_.packet_size, remaining_data_), '0');
     webrtc::DataBuffer* data_buffer =
-        new webrtc::DataBuffer(rtc::CopyOnWriteBuffer(data), true);
+        new webrtc::DataBuffer(webrtc::CopyOnWriteBuffer(data), true);
     total_queued_up_ = data_buffer->size();
     dc_->SendAsync(*data_buffer,
                    [this, data_buffer = data_buffer](webrtc::RTCError err) {
@@ -252,8 +252,9 @@
         signaling_thread.get());
 
     auto grpc_server = webrtc::GrpcSignalingServerInterface::Create(
-        [factory = rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface>(
-             factory),
+        [factory =
+             webrtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface>(
+                 factory),
          signaling_thread =
              signaling_thread.get()](webrtc::SignalingInterface* signaling) {
           webrtc::PeerConnectionClient client(factory.get(), signaling);
@@ -328,10 +329,10 @@
     std::unique_ptr<DataChannelClientObserverImpl> observer;
 
     // Set up the callback to receive the data channel from the sender.
-    rtc::scoped_refptr<webrtc::DataChannelInterface> data_channel;
+    webrtc::scoped_refptr<webrtc::DataChannelInterface> data_channel;
     webrtc::Event got_data_channel;
     client.SetOnDataChannel(
-        [&](rtc::scoped_refptr<webrtc::DataChannelInterface> channel) {
+        [&](webrtc::scoped_refptr<webrtc::DataChannelInterface> channel) {
           data_channel = std::move(channel);
           // DataChannel needs an observer to drain the read queue.
           observer = std::make_unique<DataChannelClientObserverImpl>(
@@ -382,11 +383,11 @@
   absl::ParseCommandLine(argc, argv);
 
   // Make sure that higher severity number means more logs by reversing the
-  // rtc::LoggingSeverity values.
+  // webrtc::LoggingSeverity values.
   auto logging_severity =
-      std::max(0, rtc::LS_NONE - absl::GetFlag(FLAGS_verbose));
-  rtc::LogMessage::LogToDebug(
-      static_cast<rtc::LoggingSeverity>(logging_severity));
+      std::max(0, webrtc::LS_NONE - absl::GetFlag(FLAGS_verbose));
+  webrtc::LogMessage::LogToDebug(
+      static_cast<webrtc::LoggingSeverity>(logging_severity));
 
   bool is_server = absl::GetFlag(FLAGS_server);
   std::string field_trials = absl::GetFlag(FLAGS_force_fieldtrials);
diff --git a/rtc_tools/data_channel_benchmark/peer_connection_client.cc b/rtc_tools/data_channel_benchmark/peer_connection_client.cc
index 8bafba0..74175bd 100644
--- a/rtc_tools/data_channel_benchmark/peer_connection_client.cc
+++ b/rtc_tools/data_channel_benchmark/peer_connection_client.cc
@@ -50,10 +50,10 @@
     : public webrtc::SetLocalDescriptionObserverInterface {
  public:
   using Callback = std::function<void(webrtc::RTCError)>;
-  static rtc::scoped_refptr<SetLocalDescriptionObserverAdapter> Create(
+  static webrtc::scoped_refptr<SetLocalDescriptionObserverAdapter> Create(
       Callback callback) {
-    return rtc::scoped_refptr<SetLocalDescriptionObserverAdapter>(
-        new rtc::RefCountedObject<SetLocalDescriptionObserverAdapter>(
+    return webrtc::scoped_refptr<SetLocalDescriptionObserverAdapter>(
+        new webrtc::RefCountedObject<SetLocalDescriptionObserverAdapter>(
             std::move(callback)));
   }
 
@@ -73,10 +73,10 @@
     : public webrtc::SetRemoteDescriptionObserverInterface {
  public:
   using Callback = std::function<void(webrtc::RTCError)>;
-  static rtc::scoped_refptr<SetRemoteDescriptionObserverAdapter> Create(
+  static webrtc::scoped_refptr<SetRemoteDescriptionObserverAdapter> Create(
       Callback callback) {
-    return rtc::scoped_refptr<SetRemoteDescriptionObserverAdapter>(
-        new rtc::RefCountedObject<SetRemoteDescriptionObserverAdapter>(
+    return webrtc::scoped_refptr<SetRemoteDescriptionObserverAdapter>(
+        new webrtc::RefCountedObject<SetRemoteDescriptionObserverAdapter>(
             std::move(callback)));
   }
 
@@ -98,11 +98,11 @@
   using Success = std::function<void(webrtc::SessionDescriptionInterface*)>;
   using Failure = std::function<void(webrtc::RTCError)>;
 
-  static rtc::scoped_refptr<CreateSessionDescriptionObserverAdapter> Create(
+  static webrtc::scoped_refptr<CreateSessionDescriptionObserverAdapter> Create(
       Success success,
       Failure failure) {
-    return rtc::scoped_refptr<CreateSessionDescriptionObserverAdapter>(
-        new rtc::RefCountedObject<CreateSessionDescriptionObserverAdapter>(
+    return webrtc::scoped_refptr<CreateSessionDescriptionObserverAdapter>(
+        new webrtc::RefCountedObject<CreateSessionDescriptionObserverAdapter>(
             std::move(success), std::move(failure)));
   }
 
@@ -146,7 +146,7 @@
   Disconnect();
 }
 
-rtc::scoped_refptr<PeerConnectionFactoryInterface>
+scoped_refptr<PeerConnectionFactoryInterface>
 PeerConnectionClient::CreateDefaultFactory(Thread* signaling_thread) {
   auto factory = webrtc::CreatePeerConnectionFactory(
       /*network_thread=*/nullptr, /*worker_thread=*/nullptr,
@@ -258,7 +258,7 @@
 }
 
 void PeerConnectionClient::OnDataChannel(
-    rtc::scoped_refptr<webrtc::DataChannelInterface> channel) {
+    scoped_refptr<webrtc::DataChannelInterface> channel) {
   RTC_LOG(LS_INFO) << __FUNCTION__ << " remote datachannel created";
   if (on_data_channel_callback_)
     on_data_channel_callback_(channel);
@@ -266,7 +266,7 @@
 }
 
 void PeerConnectionClient::SetOnDataChannel(
-    std::function<void(rtc::scoped_refptr<webrtc::DataChannelInterface>)>
+    std::function<void(webrtc::scoped_refptr<webrtc::DataChannelInterface>)>
         callback) {
   on_data_channel_callback_ = callback;
 }
diff --git a/rtc_tools/data_channel_benchmark/peer_connection_client.h b/rtc_tools/data_channel_benchmark/peer_connection_client.h
index a72f837..f8f42e6 100644
--- a/rtc_tools/data_channel_benchmark/peer_connection_client.h
+++ b/rtc_tools/data_channel_benchmark/peer_connection_client.h
@@ -51,22 +51,21 @@
   // Disconnect from the call.
   void Disconnect();
 
-  rtc::scoped_refptr<webrtc::PeerConnectionInterface> peerConnection() {
+  scoped_refptr<webrtc::PeerConnectionInterface> peerConnection() {
     return peer_connection_;
   }
 
   // Set a callback to run when a DataChannel is created by the remote peer.
   void SetOnDataChannel(
-      std::function<void(rtc::scoped_refptr<webrtc::DataChannelInterface>)>
+      std::function<void(webrtc::scoped_refptr<webrtc::DataChannelInterface>)>
           callback);
 
-  std::vector<rtc::scoped_refptr<webrtc::DataChannelInterface>>&
-  dataChannels() {
+  std::vector<scoped_refptr<webrtc::DataChannelInterface>>& dataChannels() {
     return data_channels_;
   }
 
   // Creates a default PeerConnectionFactory object.
-  static rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface>
+  static scoped_refptr<webrtc::PeerConnectionFactoryInterface>
   CreateDefaultFactory(Thread* signaling_thread);
 
  private:
@@ -86,7 +85,7 @@
     RTC_LOG(LS_INFO) << __FUNCTION__ << " new state: " << new_state;
   }
   void OnDataChannel(
-      rtc::scoped_refptr<webrtc::DataChannelInterface> channel) override;
+      scoped_refptr<webrtc::DataChannelInterface> channel) override;
   void OnNegotiationNeededEvent(uint32_t event_id) override;
   void OnIceConnectionChange(
       webrtc::PeerConnectionInterface::IceConnectionState new_state) override;
@@ -97,10 +96,10 @@
     RTC_LOG(LS_INFO) << __FUNCTION__ << " receiving? " << receiving;
   }
 
-  rtc::scoped_refptr<webrtc::PeerConnectionInterface> peer_connection_;
-  std::function<void(rtc::scoped_refptr<webrtc::DataChannelInterface>)>
+  scoped_refptr<webrtc::PeerConnectionInterface> peer_connection_;
+  std::function<void(webrtc::scoped_refptr<webrtc::DataChannelInterface>)>
       on_data_channel_callback_;
-  std::vector<rtc::scoped_refptr<webrtc::DataChannelInterface>> data_channels_;
+  std::vector<scoped_refptr<webrtc::DataChannelInterface>> data_channels_;
   webrtc::SignalingInterface* signaling_;
 };
 
diff --git a/rtc_tools/frame_analyzer/frame_analyzer.cc b/rtc_tools/frame_analyzer/frame_analyzer.cc
index 2f4bcb3..ca59f6c 100644
--- a/rtc_tools/frame_analyzer/frame_analyzer.cc
+++ b/rtc_tools/frame_analyzer/frame_analyzer.cc
@@ -110,9 +110,9 @@
 
   webrtc::test::ResultsContainer results;
 
-  rtc::scoped_refptr<webrtc::test::Video> reference_video =
+  webrtc::scoped_refptr<webrtc::test::Video> reference_video =
       webrtc::test::OpenYuvOrY4mFile(reference_file_name, width, height);
-  rtc::scoped_refptr<webrtc::test::Video> test_video =
+  webrtc::scoped_refptr<webrtc::test::Video> test_video =
       webrtc::test::OpenYuvOrY4mFile(test_file_name, width, height);
 
   if (!reference_video || !test_video) {
@@ -126,7 +126,7 @@
   // Align the reference video both temporally and geometrically. I.e. align the
   // frames to match up in order to the test video, and align a crop region of
   // the reference video to match up to the test video.
-  const rtc::scoped_refptr<webrtc::test::Video> aligned_reference_video =
+  const webrtc::scoped_refptr<webrtc::test::Video> aligned_reference_video =
       AdjustCropping(ReorderVideo(reference_video, matching_indices),
                      test_video);
 
@@ -147,7 +147,7 @@
 
   // Adjust all frames in the test video with the calculated color
   // transformation.
-  const rtc::scoped_refptr<webrtc::test::Video> color_adjusted_test_video =
+  const webrtc::scoped_refptr<webrtc::test::Video> color_adjusted_test_video =
       AdjustColors(color_transformation, test_video);
 
   results.frames = webrtc::test::RunAnalysis(
diff --git a/rtc_tools/frame_analyzer/reference_less_video_analysis_lib.cc b/rtc_tools/frame_analyzer/reference_less_video_analysis_lib.cc
index ebfc665..a0bc848 100644
--- a/rtc_tools/frame_analyzer/reference_less_video_analysis_lib.cc
+++ b/rtc_tools/frame_analyzer/reference_less_video_analysis_lib.cc
@@ -105,13 +105,13 @@
   printf("\n");
 }
 
-void compute_metrics(const rtc::scoped_refptr<webrtc::test::Video>& video,
+void compute_metrics(const webrtc::scoped_refptr<webrtc::test::Video>& video,
                      std::vector<double>* psnr_per_frame,
                      std::vector<double>* ssim_per_frame) {
   for (size_t i = 0; i < video->number_of_frames() - 1; ++i) {
-    const rtc::scoped_refptr<webrtc::I420BufferInterface> current_frame =
+    const webrtc::scoped_refptr<webrtc::I420BufferInterface> current_frame =
         video->GetFrame(i);
-    const rtc::scoped_refptr<webrtc::I420BufferInterface> next_frame =
+    const webrtc::scoped_refptr<webrtc::I420BufferInterface> next_frame =
         video->GetFrame(i + 1);
     double result_psnr = webrtc::test::Psnr(current_frame, next_frame);
     double result_ssim = webrtc::test::Ssim(current_frame, next_frame);
@@ -124,7 +124,7 @@
 int run_analysis(const std::string& video_file) {
   std::vector<double> psnr_per_frame;
   std::vector<double> ssim_per_frame;
-  rtc::scoped_refptr<webrtc::test::Video> video =
+  webrtc::scoped_refptr<webrtc::test::Video> video =
       webrtc::test::OpenY4mFile(video_file);
   if (video) {
     compute_metrics(video, &psnr_per_frame, &ssim_per_frame);
diff --git a/rtc_tools/frame_analyzer/reference_less_video_analysis_lib.h b/rtc_tools/frame_analyzer/reference_less_video_analysis_lib.h
index 3c93119..b9f8281 100644
--- a/rtc_tools/frame_analyzer/reference_less_video_analysis_lib.h
+++ b/rtc_tools/frame_analyzer/reference_less_video_analysis_lib.h
@@ -37,7 +37,7 @@
 
 // Compute the metrics like freezing score based on PSNR and SSIM values for a
 // given video file.
-void compute_metrics(const rtc::scoped_refptr<webrtc::test::Video>& video,
+void compute_metrics(const webrtc::scoped_refptr<webrtc::test::Video>& video,
                      std::vector<double>* psnr_per_frame,
                      std::vector<double>* ssim_per_frame);
 
diff --git a/rtc_tools/frame_analyzer/reference_less_video_analysis_unittest.cc b/rtc_tools/frame_analyzer/reference_less_video_analysis_unittest.cc
index b98a014..37cfaaf 100644
--- a/rtc_tools/frame_analyzer/reference_less_video_analysis_unittest.cc
+++ b/rtc_tools/frame_analyzer/reference_less_video_analysis_unittest.cc
@@ -23,7 +23,7 @@
     ASSERT_TRUE(video);
   }
 
-  rtc::scoped_refptr<webrtc::test::Video> video;
+  webrtc::scoped_refptr<webrtc::test::Video> video;
   std::vector<double> psnr_per_frame;
   std::vector<double> ssim_per_frame;
 };
diff --git a/rtc_tools/frame_analyzer/video_color_aligner.cc b/rtc_tools/frame_analyzer/video_color_aligner.cc
index 5983e47..54f7fd4 100644
--- a/rtc_tools/frame_analyzer/video_color_aligner.cc
+++ b/rtc_tools/frame_analyzer/video_color_aligner.cc
@@ -33,11 +33,11 @@
 // Helper function for AdjustColors(). This functions calculates a single output
 // row for y with the given color coefficients. The u/v channels are assumed to
 // be subsampled by a factor of 2, which is the case of I420.
-void CalculateYChannel(rtc::ArrayView<const uint8_t> y_data,
-                       rtc::ArrayView<const uint8_t> u_data,
-                       rtc::ArrayView<const uint8_t> v_data,
+void CalculateYChannel(ArrayView<const uint8_t> y_data,
+                       ArrayView<const uint8_t> u_data,
+                       ArrayView<const uint8_t> v_data,
                        const std::array<float, 4>& coeff,
-                       rtc::ArrayView<uint8_t> output) {
+                       ArrayView<uint8_t> output) {
   RTC_CHECK_EQ(y_data.size(), output.size());
   // Each u/v element represents two y elements. Make sure we have enough to
   // cover the Y values.
@@ -72,11 +72,11 @@
 // Helper function for AdjustColors(). This functions calculates a single output
 // row for either u or v, with the given color coefficients. Y, U, and V are
 // assumed to be the same size, i.e. no subsampling.
-void CalculateUVChannel(rtc::ArrayView<const uint8_t> y_data,
-                        rtc::ArrayView<const uint8_t> u_data,
-                        rtc::ArrayView<const uint8_t> v_data,
+void CalculateUVChannel(ArrayView<const uint8_t> y_data,
+                        ArrayView<const uint8_t> u_data,
+                        ArrayView<const uint8_t> v_data,
                         const std::array<float, 4>& coeff,
-                        rtc::ArrayView<uint8_t> output) {
+                        ArrayView<uint8_t> output) {
   RTC_CHECK_EQ(y_data.size(), u_data.size());
   RTC_CHECK_EQ(y_data.size(), v_data.size());
   RTC_CHECK_EQ(y_data.size(), output.size());
@@ -92,7 +92,7 @@
 
 // Convert a frame to four vectors consisting of [y, u, v, 1].
 std::vector<std::vector<uint8_t>> FlattenYuvData(
-    const rtc::scoped_refptr<I420BufferInterface>& frame) {
+    const scoped_refptr<I420BufferInterface>& frame) {
   std::vector<std::vector<uint8_t>> result(
       4, std::vector<uint8_t>(frame->ChromaWidth() * frame->ChromaHeight()));
 
@@ -128,8 +128,8 @@
 }  // namespace
 
 ColorTransformationMatrix CalculateColorTransformationMatrix(
-    const rtc::scoped_refptr<I420BufferInterface>& reference_frame,
-    const rtc::scoped_refptr<I420BufferInterface>& test_frame) {
+    const scoped_refptr<I420BufferInterface>& reference_frame,
+    const scoped_refptr<I420BufferInterface>& test_frame) {
   IncrementalLinearLeastSquares incremental_lls;
   incremental_lls.AddObservations(FlattenYuvData(test_frame),
                                   FlattenYuvData(reference_frame));
@@ -137,8 +137,8 @@
 }
 
 ColorTransformationMatrix CalculateColorTransformationMatrix(
-    const rtc::scoped_refptr<Video>& reference_video,
-    const rtc::scoped_refptr<Video>& test_video) {
+    const scoped_refptr<Video>& reference_video,
+    const scoped_refptr<Video>& test_video) {
   RTC_CHECK_GE(reference_video->number_of_frames(),
                test_video->number_of_frames());
 
@@ -152,13 +152,13 @@
   return VectorToColorMatrix(incremental_lls.GetBestSolution());
 }
 
-rtc::scoped_refptr<Video> AdjustColors(
+scoped_refptr<Video> AdjustColors(
     const ColorTransformationMatrix& color_transformation,
-    const rtc::scoped_refptr<Video>& video) {
+    const scoped_refptr<Video>& video) {
   class ColorAdjustedVideo : public Video {
    public:
     ColorAdjustedVideo(const ColorTransformationMatrix& color_transformation,
-                       const rtc::scoped_refptr<Video>& video)
+                       const scoped_refptr<Video>& video)
         : color_transformation_(color_transformation), video_(video) {}
 
     int width() const override { return video_->width(); }
@@ -167,24 +167,23 @@
       return video_->number_of_frames();
     }
 
-    rtc::scoped_refptr<I420BufferInterface> GetFrame(
-        size_t index) const override {
+    scoped_refptr<I420BufferInterface> GetFrame(size_t index) const override {
       return AdjustColors(color_transformation_, video_->GetFrame(index));
     }
 
    private:
     const ColorTransformationMatrix color_transformation_;
-    const rtc::scoped_refptr<Video> video_;
+    const scoped_refptr<Video> video_;
   };
 
-  return rtc::make_ref_counted<ColorAdjustedVideo>(color_transformation, video);
+  return make_ref_counted<ColorAdjustedVideo>(color_transformation, video);
 }
 
-rtc::scoped_refptr<I420BufferInterface> AdjustColors(
+scoped_refptr<I420BufferInterface> AdjustColors(
     const ColorTransformationMatrix& color_matrix,
-    const rtc::scoped_refptr<I420BufferInterface>& frame) {
+    const scoped_refptr<I420BufferInterface>& frame) {
   // Allocate I420 buffer that will hold the color adjusted frame.
-  rtc::scoped_refptr<I420Buffer> adjusted_frame =
+  scoped_refptr<I420Buffer> adjusted_frame =
       I420Buffer::Create(frame->width(), frame->height());
 
   // Create a downscaled Y plane with the same size as the U/V planes to
@@ -199,13 +198,13 @@
   // Fill in the adjusted data row by row.
   for (int y = 0; y < frame->height(); ++y) {
     const int half_y = y / 2;
-    rtc::ArrayView<const uint8_t> y_row(frame->DataY() + frame->StrideY() * y,
-                                        frame->width());
-    rtc::ArrayView<const uint8_t> u_row(
-        frame->DataU() + frame->StrideU() * half_y, frame->ChromaWidth());
-    rtc::ArrayView<const uint8_t> v_row(
-        frame->DataV() + frame->StrideV() * half_y, frame->ChromaWidth());
-    rtc::ArrayView<uint8_t> output_y_row(
+    ArrayView<const uint8_t> y_row(frame->DataY() + frame->StrideY() * y,
+                                   frame->width());
+    ArrayView<const uint8_t> u_row(frame->DataU() + frame->StrideU() * half_y,
+                                   frame->ChromaWidth());
+    ArrayView<const uint8_t> v_row(frame->DataV() + frame->StrideV() * half_y,
+                                   frame->ChromaWidth());
+    ArrayView<uint8_t> output_y_row(
         adjusted_frame->MutableDataY() + adjusted_frame->StrideY() * y,
         frame->width());
 
@@ -213,13 +212,13 @@
 
     // Chroma channels only exist every second row for I420.
     if (y % 2 == 0) {
-      rtc::ArrayView<const uint8_t> downscaled_y_row(
+      ArrayView<const uint8_t> downscaled_y_row(
           downscaled_y_plane.data() + frame->ChromaWidth() * half_y,
           frame->ChromaWidth());
-      rtc::ArrayView<uint8_t> output_u_row(
+      ArrayView<uint8_t> output_u_row(
           adjusted_frame->MutableDataU() + adjusted_frame->StrideU() * half_y,
           frame->ChromaWidth());
-      rtc::ArrayView<uint8_t> output_v_row(
+      ArrayView<uint8_t> output_v_row(
           adjusted_frame->MutableDataV() + adjusted_frame->StrideV() * half_y,
           frame->ChromaWidth());
 
diff --git a/rtc_tools/frame_analyzer/video_color_aligner.h b/rtc_tools/frame_analyzer/video_color_aligner.h
index b51e060..ee6157b 100644
--- a/rtc_tools/frame_analyzer/video_color_aligner.h
+++ b/rtc_tools/frame_analyzer/video_color_aligner.h
@@ -27,23 +27,22 @@
 // Calculate the optimal color transformation that should be applied to the test
 // video to match as closely as possible to the reference video.
 ColorTransformationMatrix CalculateColorTransformationMatrix(
-    const rtc::scoped_refptr<Video>& reference_video,
-    const rtc::scoped_refptr<Video>& test_video);
+    const scoped_refptr<Video>& reference_video,
+    const scoped_refptr<Video>& test_video);
 
 // Calculate color transformation for a single I420 frame.
 ColorTransformationMatrix CalculateColorTransformationMatrix(
-    const rtc::scoped_refptr<I420BufferInterface>& reference_frame,
-    const rtc::scoped_refptr<I420BufferInterface>& test_frame);
+    const scoped_refptr<I420BufferInterface>& reference_frame,
+    const scoped_refptr<I420BufferInterface>& test_frame);
 
 // Apply a color transformation to a video.
-rtc::scoped_refptr<Video> AdjustColors(
-    const ColorTransformationMatrix& color_matrix,
-    const rtc::scoped_refptr<Video>& video);
+scoped_refptr<Video> AdjustColors(const ColorTransformationMatrix& color_matrix,
+                                  const scoped_refptr<Video>& video);
 
 // Apply a color transformation to a single I420 frame.
-rtc::scoped_refptr<I420BufferInterface> AdjustColors(
+scoped_refptr<I420BufferInterface> AdjustColors(
     const ColorTransformationMatrix& color_matrix,
-    const rtc::scoped_refptr<I420BufferInterface>& frame);
+    const scoped_refptr<I420BufferInterface>& frame);
 
 }  // namespace test
 }  // namespace webrtc
diff --git a/rtc_tools/frame_analyzer/video_color_aligner_unittest.cc b/rtc_tools/frame_analyzer/video_color_aligner_unittest.cc
index 980898b..90b2f92 100644
--- a/rtc_tools/frame_analyzer/video_color_aligner_unittest.cc
+++ b/rtc_tools/frame_analyzer/video_color_aligner_unittest.cc
@@ -50,11 +50,11 @@
     ASSERT_TRUE(reference_video_);
   }
 
-  rtc::scoped_refptr<Video> reference_video_;
+  scoped_refptr<Video> reference_video_;
 };
 
 TEST_F(VideoColorAlignerTest, AdjustColorsFrameIdentity) {
-  const rtc::scoped_refptr<I420BufferInterface> test_frame =
+  const scoped_refptr<I420BufferInterface> test_frame =
       reference_video_->GetFrame(0);
 
   // Assume perfect match, i.e. ssim == 1.
@@ -69,11 +69,11 @@
   const uint8_t data_y[] = {2};
   const uint8_t data_u[] = {6};
   const uint8_t data_v[] = {7};
-  const rtc::scoped_refptr<I420BufferInterface> i420_buffer = I420Buffer::Copy(
+  const scoped_refptr<I420BufferInterface> i420_buffer = I420Buffer::Copy(
       /* width= */ 1, /* height= */ 1, data_y, /* stride_y= */ 1, data_u,
       /* stride_u= */ 1, data_v, /* stride_v= */ 1);
 
-  const rtc::scoped_refptr<I420BufferInterface> adjusted_buffer =
+  const scoped_refptr<I420BufferInterface> adjusted_buffer =
       AdjustColors(color_matrix, i420_buffer);
 
   EXPECT_EQ(2 * 1 + 6 * 2 + 7 * 3 + 4, adjusted_buffer->DataY()[0]);
@@ -88,11 +88,11 @@
   const uint8_t data_y[] = {2};
   const uint8_t data_u[] = {6};
   const uint8_t data_v[] = {7};
-  const rtc::scoped_refptr<I420BufferInterface> i420_buffer = I420Buffer::Copy(
+  const scoped_refptr<I420BufferInterface> i420_buffer = I420Buffer::Copy(
       /* width= */ 1, /* height= */ 1, data_y, /* stride_y= */ 1, data_u,
       /* stride_u= */ 1, data_v, /* stride_v= */ 1);
 
-  const rtc::scoped_refptr<I420BufferInterface> adjusted_buffer =
+  const scoped_refptr<I420BufferInterface> adjusted_buffer =
       AdjustColors(color_matrix, i420_buffer);
 
   EXPECT_EQ(255 - 2, adjusted_buffer->DataY()[0]);
@@ -107,11 +107,11 @@
   const uint8_t data_y[] = {0, 1, 3, 4};
   const uint8_t data_u[] = {6};
   const uint8_t data_v[] = {7};
-  const rtc::scoped_refptr<I420BufferInterface> i420_buffer = I420Buffer::Copy(
+  const scoped_refptr<I420BufferInterface> i420_buffer = I420Buffer::Copy(
       /* width= */ 2, /* height= */ 2, data_y, /* stride_y= */ 2, data_u,
       /* stride_u= */ 1, data_v, /* stride_v= */ 1);
 
-  const rtc::scoped_refptr<I420BufferInterface> adjusted_buffer =
+  const scoped_refptr<I420BufferInterface> adjusted_buffer =
       AdjustColors(color_matrix, i420_buffer);
 
   EXPECT_EQ(0 * 1 + 6 * 2 + 7 * 3 + 4, adjusted_buffer->DataY()[0]);
@@ -133,10 +133,9 @@
                                   8, 9, 10, 11, 12, 13, 14, 15};
   const uint8_t small_data_u[] = {15, 13, 17, 29};
   const uint8_t small_data_v[] = {3, 200, 170, 29};
-  const rtc::scoped_refptr<I420BufferInterface> small_i420_buffer =
-      I420Buffer::Copy(
-          /* width= */ 4, /* height= */ 4, small_data_y, /* stride_y= */ 4,
-          small_data_u, /* stride_u= */ 2, small_data_v, /* stride_v= */ 2);
+  const scoped_refptr<I420BufferInterface> small_i420_buffer = I420Buffer::Copy(
+      /* width= */ 4, /* height= */ 4, small_data_y, /* stride_y= */ 4,
+      small_data_u, /* stride_u= */ 2, small_data_v, /* stride_v= */ 2);
 
   uint8_t big_data_y[16];
   uint8_t big_data_u[4];
@@ -149,10 +148,9 @@
   for (int i = 0; i < 4; ++i)
     big_data_v[i] = small_data_v[i] + 10;
 
-  const rtc::scoped_refptr<I420BufferInterface> big_i420_buffer =
-      I420Buffer::Copy(
-          /* width= */ 4, /* height= */ 4, big_data_y, /* stride_y= */ 4,
-          big_data_u, /* stride_u= */ 2, big_data_v, /* stride_v= */ 2);
+  const scoped_refptr<I420BufferInterface> big_i420_buffer = I420Buffer::Copy(
+      /* width= */ 4, /* height= */ 4, big_data_y, /* stride_y= */ 4,
+      big_data_u, /* stride_u= */ 2, big_data_v, /* stride_v= */ 2);
 
   const ColorTransformationMatrix color_matrix =
       CalculateColorTransformationMatrix(big_i420_buffer, small_i420_buffer);
diff --git a/rtc_tools/frame_analyzer/video_geometry_aligner.cc b/rtc_tools/frame_analyzer/video_geometry_aligner.cc
index efb0333..c029a0c 100644
--- a/rtc_tools/frame_analyzer/video_geometry_aligner.cc
+++ b/rtc_tools/frame_analyzer/video_geometry_aligner.cc
@@ -24,7 +24,7 @@
 namespace {
 
 bool IsValidRegion(const CropRegion& region,
-                   const rtc::scoped_refptr<I420BufferInterface>& frame) {
+                   const scoped_refptr<I420BufferInterface>& frame) {
   return region.left >= 0 && region.right >= 0 && region.top >= 0 &&
          region.bottom >= 0 && region.left + region.right < frame->width() &&
          region.top + region.bottom < frame->height();
@@ -32,9 +32,9 @@
 
 }  // namespace
 
-rtc::scoped_refptr<I420BufferInterface> CropAndZoom(
+scoped_refptr<I420BufferInterface> CropAndZoom(
     const CropRegion& crop_region,
-    const rtc::scoped_refptr<I420BufferInterface>& frame) {
+    const scoped_refptr<I420BufferInterface>& frame) {
   RTC_CHECK(IsValidRegion(crop_region, frame));
 
   const int uv_crop_left = crop_region.left / 2;
@@ -54,7 +54,7 @@
       frame->DataV() + frame->StrideV() * uv_crop_top + uv_crop_left;
 
   // Stretch the cropped frame to the original size using libyuv.
-  rtc::scoped_refptr<I420Buffer> adjusted_frame =
+  scoped_refptr<I420Buffer> adjusted_frame =
       I420Buffer::Create(frame->width(), frame->height());
   libyuv::I420Scale(y_plane, frame->StrideY(), u_plane, frame->StrideU(),
                     v_plane, frame->StrideV(), cropped_width, cropped_height,
@@ -67,8 +67,8 @@
 }
 
 CropRegion CalculateCropRegion(
-    const rtc::scoped_refptr<I420BufferInterface>& reference_frame,
-    const rtc::scoped_refptr<I420BufferInterface>& test_frame) {
+    const scoped_refptr<I420BufferInterface>& reference_frame,
+    const scoped_refptr<I420BufferInterface>& test_frame) {
   RTC_CHECK_EQ(reference_frame->width(), test_frame->width());
   RTC_CHECK_EQ(reference_frame->height(), test_frame->height());
 
@@ -122,20 +122,19 @@
   return best_region;
 }
 
-rtc::scoped_refptr<I420BufferInterface> AdjustCropping(
-    const rtc::scoped_refptr<I420BufferInterface>& reference_frame,
-    const rtc::scoped_refptr<I420BufferInterface>& test_frame) {
+scoped_refptr<I420BufferInterface> AdjustCropping(
+    const scoped_refptr<I420BufferInterface>& reference_frame,
+    const scoped_refptr<I420BufferInterface>& test_frame) {
   return CropAndZoom(CalculateCropRegion(reference_frame, test_frame),
                      reference_frame);
 }
 
-rtc::scoped_refptr<Video> AdjustCropping(
-    const rtc::scoped_refptr<Video>& reference_video,
-    const rtc::scoped_refptr<Video>& test_video) {
+scoped_refptr<Video> AdjustCropping(const scoped_refptr<Video>& reference_video,
+                                    const scoped_refptr<Video>& test_video) {
   class CroppedVideo : public Video {
    public:
-    CroppedVideo(const rtc::scoped_refptr<Video>& reference_video,
-                 const rtc::scoped_refptr<Video>& test_video)
+    CroppedVideo(const scoped_refptr<Video>& reference_video,
+                 const scoped_refptr<Video>& test_video)
         : reference_video_(reference_video), test_video_(test_video) {
       RTC_CHECK_EQ(reference_video->number_of_frames(),
                    test_video->number_of_frames());
@@ -149,9 +148,8 @@
       return test_video_->number_of_frames();
     }
 
-    rtc::scoped_refptr<I420BufferInterface> GetFrame(
-        size_t index) const override {
-      const rtc::scoped_refptr<I420BufferInterface> reference_frame =
+    scoped_refptr<I420BufferInterface> GetFrame(size_t index) const override {
+      const scoped_refptr<I420BufferInterface> reference_frame =
           reference_video_->GetFrame(index);
 
       // Only calculate cropping region once per frame since it's expensive.
@@ -164,14 +162,14 @@
     }
 
    private:
-    const rtc::scoped_refptr<Video> reference_video_;
-    const rtc::scoped_refptr<Video> test_video_;
+    const scoped_refptr<Video> reference_video_;
+    const scoped_refptr<Video> test_video_;
     // Mutable since this is a cache that affects performance and not logical
     // behavior.
     mutable std::map<size_t, CropRegion> crop_regions_;
   };
 
-  return rtc::make_ref_counted<CroppedVideo>(reference_video, test_video);
+  return make_ref_counted<CroppedVideo>(reference_video, test_video);
 }
 
 }  // namespace test
diff --git a/rtc_tools/frame_analyzer/video_geometry_aligner.h b/rtc_tools/frame_analyzer/video_geometry_aligner.h
index 47667b0..73a80bc 100644
--- a/rtc_tools/frame_analyzer/video_geometry_aligner.h
+++ b/rtc_tools/frame_analyzer/video_geometry_aligner.h
@@ -28,28 +28,27 @@
 
 // Crops and zooms in on the cropped region so that the returned frame has the
 // same resolution as the input frame.
-rtc::scoped_refptr<I420BufferInterface> CropAndZoom(
+scoped_refptr<I420BufferInterface> CropAndZoom(
     const CropRegion& crop_region,
-    const rtc::scoped_refptr<I420BufferInterface>& frame);
+    const scoped_refptr<I420BufferInterface>& frame);
 
 // Calculate the optimal cropping region on the reference frame to maximize SSIM
 // to the test frame.
 CropRegion CalculateCropRegion(
-    const rtc::scoped_refptr<I420BufferInterface>& reference_frame,
-    const rtc::scoped_refptr<I420BufferInterface>& test_frame);
+    const scoped_refptr<I420BufferInterface>& reference_frame,
+    const scoped_refptr<I420BufferInterface>& test_frame);
 
 // Returns a cropped and zoomed version of the reference frame that matches up
 // to the test frame. This is a simple helper function on top of
 // CalculateCropRegion() and CropAndZoom().
-rtc::scoped_refptr<I420BufferInterface> AdjustCropping(
-    const rtc::scoped_refptr<I420BufferInterface>& reference_frame,
-    const rtc::scoped_refptr<I420BufferInterface>& test_frame);
+scoped_refptr<I420BufferInterface> AdjustCropping(
+    const scoped_refptr<I420BufferInterface>& reference_frame,
+    const scoped_refptr<I420BufferInterface>& test_frame);
 
 // Returns a cropped and zoomed version of the reference video that matches up
 // to the test video. Frames are individually adjusted for cropping.
-rtc::scoped_refptr<Video> AdjustCropping(
-    const rtc::scoped_refptr<Video>& reference_video,
-    const rtc::scoped_refptr<Video>& test_video);
+scoped_refptr<Video> AdjustCropping(const scoped_refptr<Video>& reference_video,
+                                    const scoped_refptr<Video>& test_video);
 
 }  // namespace test
 }  // namespace webrtc
diff --git a/rtc_tools/frame_analyzer/video_geometry_aligner_unittest.cc b/rtc_tools/frame_analyzer/video_geometry_aligner_unittest.cc
index a86b8c5..62f853c 100644
--- a/rtc_tools/frame_analyzer/video_geometry_aligner_unittest.cc
+++ b/rtc_tools/frame_analyzer/video_geometry_aligner_unittest.cc
@@ -38,8 +38,8 @@
         /* stride_u= */ 2, data_v, /* stride_v= */ 2);
   }
 
-  rtc::scoped_refptr<Video> reference_video_;
-  rtc::scoped_refptr<I420BufferInterface> test_frame_;
+  scoped_refptr<Video> reference_video_;
+  scoped_refptr<I420BufferInterface> test_frame_;
 };
 
 // Teach gtest how to compare CropRegions.
@@ -49,7 +49,7 @@
 }
 
 TEST_F(VideoGeometryAlignerTest, CropAndZoomIdentity) {
-  const rtc::scoped_refptr<I420BufferInterface> frame =
+  const scoped_refptr<I420BufferInterface> frame =
       reference_video_->GetFrame(0);
 
   // Assume perfect match, i.e. SSIM == 1.
@@ -60,7 +60,7 @@
 TEST_F(VideoGeometryAlignerTest, CropAndZoomLeft) {
   CropRegion region;
   region.left = 2;
-  const rtc::scoped_refptr<I420BufferInterface> cropped_frame =
+  const scoped_refptr<I420BufferInterface> cropped_frame =
       CropAndZoom(region, test_frame_);
   EXPECT_EQ(std::vector<uint8_t>(
                 {2, 2, 3, 3, 6, 6, 7, 7, 10, 10, 11, 11, 14, 14, 15, 15}),
@@ -78,7 +78,7 @@
 TEST_F(VideoGeometryAlignerTest, DISABLED_CropAndZoomTop) {
   CropRegion region;
   region.top = 2;
-  const rtc::scoped_refptr<I420BufferInterface> cropped_frame =
+  const scoped_refptr<I420BufferInterface> cropped_frame =
       CropAndZoom(region, test_frame_);
   EXPECT_EQ(std::vector<uint8_t>(
                 {8, 9, 10, 11, 10, 11, 12, 13, 12, 13, 14, 15, 12, 13, 14, 15}),
@@ -95,7 +95,7 @@
 TEST_F(VideoGeometryAlignerTest, CropAndZoomRight) {
   CropRegion region;
   region.right = 2;
-  const rtc::scoped_refptr<I420BufferInterface> cropped_frame =
+  const scoped_refptr<I420BufferInterface> cropped_frame =
       CropAndZoom(region, test_frame_);
   EXPECT_EQ(std::vector<uint8_t>(
                 {0, 0, 1, 1, 4, 4, 5, 5, 8, 8, 9, 9, 12, 12, 13, 13}),
@@ -113,7 +113,7 @@
 TEST_F(VideoGeometryAlignerTest, DISABLED_CropAndZoomBottom) {
   CropRegion region;
   region.bottom = 2;
-  const rtc::scoped_refptr<I420BufferInterface> cropped_frame =
+  const scoped_refptr<I420BufferInterface> cropped_frame =
       CropAndZoom(region, test_frame_);
   EXPECT_EQ(
       std::vector<uint8_t>({0, 1, 2, 3, 2, 3, 4, 5, 4, 5, 6, 7, 4, 5, 6, 7}),
@@ -128,7 +128,7 @@
 }
 
 TEST_F(VideoGeometryAlignerTest, CalculateCropRegionIdentity) {
-  const rtc::scoped_refptr<I420BufferInterface> frame =
+  const scoped_refptr<I420BufferInterface> frame =
       reference_video_->GetFrame(0);
   CropRegion identity_region;
   EXPECT_EQ(identity_region, CalculateCropRegion(frame, frame));
@@ -142,7 +142,7 @@
   crop_region.right = 5;
   crop_region.bottom = 3;
 
-  const rtc::scoped_refptr<I420BufferInterface> frame =
+  const scoped_refptr<I420BufferInterface> frame =
       reference_video_->GetFrame(0);
 
   EXPECT_EQ(crop_region,
diff --git a/rtc_tools/frame_analyzer/video_quality_analysis.cc b/rtc_tools/frame_analyzer/video_quality_analysis.cc
index 1832438..1229d0a 100644
--- a/rtc_tools/frame_analyzer/video_quality_analysis.cc
+++ b/rtc_tools/frame_analyzer/video_quality_analysis.cc
@@ -29,8 +29,8 @@
 template <typename FrameMetricFunction>
 static double CalculateMetric(
     const FrameMetricFunction& frame_metric_function,
-    const rtc::scoped_refptr<I420BufferInterface>& ref_buffer,
-    const rtc::scoped_refptr<I420BufferInterface>& test_buffer) {
+    const scoped_refptr<I420BufferInterface>& ref_buffer,
+    const scoped_refptr<I420BufferInterface>& test_buffer) {
   RTC_CHECK_EQ(ref_buffer->width(), test_buffer->width());
   RTC_CHECK_EQ(ref_buffer->height(), test_buffer->height());
   return frame_metric_function(
@@ -41,28 +41,28 @@
       test_buffer->width(), test_buffer->height());
 }
 
-double Psnr(const rtc::scoped_refptr<I420BufferInterface>& ref_buffer,
-            const rtc::scoped_refptr<I420BufferInterface>& test_buffer) {
+double Psnr(const scoped_refptr<I420BufferInterface>& ref_buffer,
+            const scoped_refptr<I420BufferInterface>& test_buffer) {
   // LibYuv sets the max psnr value to 128, we restrict it to 48.
   // In case of 0 mse in one frame, 128 can skew the results significantly.
   return std::min(48.0,
                   CalculateMetric(&libyuv::I420Psnr, ref_buffer, test_buffer));
 }
 
-double Ssim(const rtc::scoped_refptr<I420BufferInterface>& ref_buffer,
-            const rtc::scoped_refptr<I420BufferInterface>& test_buffer) {
+double Ssim(const scoped_refptr<I420BufferInterface>& ref_buffer,
+            const scoped_refptr<I420BufferInterface>& test_buffer) {
   return CalculateMetric(&libyuv::I420Ssim, ref_buffer, test_buffer);
 }
 
 std::vector<AnalysisResult> RunAnalysis(
-    const rtc::scoped_refptr<webrtc::test::Video>& reference_video,
-    const rtc::scoped_refptr<webrtc::test::Video>& test_video,
+    const scoped_refptr<webrtc::test::Video>& reference_video,
+    const scoped_refptr<webrtc::test::Video>& test_video,
     const std::vector<size_t>& test_frame_indices) {
   std::vector<AnalysisResult> results;
   for (size_t i = 0; i < test_video->number_of_frames(); ++i) {
-    const rtc::scoped_refptr<I420BufferInterface>& test_frame =
+    const scoped_refptr<I420BufferInterface>& test_frame =
         test_video->GetFrame(i);
-    const rtc::scoped_refptr<I420BufferInterface>& reference_frame =
+    const scoped_refptr<I420BufferInterface>& reference_frame =
         reference_video->GetFrame(i);
 
     // Fill in the result struct.
diff --git a/rtc_tools/frame_analyzer/video_quality_analysis.h b/rtc_tools/frame_analyzer/video_quality_analysis.h
index 701b585..0231aea 100644
--- a/rtc_tools/frame_analyzer/video_quality_analysis.h
+++ b/rtc_tools/frame_analyzer/video_quality_analysis.h
@@ -53,19 +53,19 @@
 // position in the original video. We also need to provide a map from test frame
 // indices to reference frame indices.
 std::vector<AnalysisResult> RunAnalysis(
-    const rtc::scoped_refptr<webrtc::test::Video>& reference_video,
-    const rtc::scoped_refptr<webrtc::test::Video>& test_video,
+    const scoped_refptr<webrtc::test::Video>& reference_video,
+    const scoped_refptr<webrtc::test::Video>& test_video,
     const std::vector<size_t>& test_frame_indices);
 
 // Compute PSNR for an I420 buffer (all planes). The max return value (in the
 // case where the test and reference frames are exactly the same) will be 48.
-double Psnr(const rtc::scoped_refptr<I420BufferInterface>& ref_buffer,
-            const rtc::scoped_refptr<I420BufferInterface>& test_buffer);
+double Psnr(const scoped_refptr<I420BufferInterface>& ref_buffer,
+            const scoped_refptr<I420BufferInterface>& test_buffer);
 
 // Compute SSIM for an I420 buffer (all planes). The max return value (in the
 // case where the test and reference frames are exactly the same) will be 1.
-double Ssim(const rtc::scoped_refptr<I420BufferInterface>& ref_buffer,
-            const rtc::scoped_refptr<I420BufferInterface>& test_buffer);
+double Ssim(const scoped_refptr<I420BufferInterface>& ref_buffer,
+            const scoped_refptr<I420BufferInterface>& test_buffer);
 
 // Prints the result from the analysis in Chromium performance
 // numbers compatible format to stdout. If the results object contains no frames
diff --git a/rtc_tools/frame_analyzer/video_temporal_aligner.cc b/rtc_tools/frame_analyzer/video_temporal_aligner.cc
index 4b940d0..84afee1 100644
--- a/rtc_tools/frame_analyzer/video_temporal_aligner.cc
+++ b/rtc_tools/frame_analyzer/video_temporal_aligner.cc
@@ -39,8 +39,7 @@
 // Helper class that takes a video and generates an infinite looping video.
 class LoopingVideo : public Video {
  public:
-  explicit LoopingVideo(const rtc::scoped_refptr<Video>& video)
-      : video_(video) {}
+  explicit LoopingVideo(const scoped_refptr<Video>& video) : video_(video) {}
 
   int width() const override { return video_->width(); }
   int height() const override { return video_->height(); }
@@ -48,20 +47,19 @@
     return std::numeric_limits<size_t>::max();
   }
 
-  rtc::scoped_refptr<I420BufferInterface> GetFrame(
-      size_t index) const override {
+  scoped_refptr<I420BufferInterface> GetFrame(size_t index) const override {
     return video_->GetFrame(index % video_->number_of_frames());
   }
 
  private:
-  const rtc::scoped_refptr<Video> video_;
+  const scoped_refptr<Video> video_;
 };
 
 // Helper class that take a vector of frame indices and a video and produces a
 // new video where the frames have been reshuffled.
 class ReorderedVideo : public Video {
  public:
-  ReorderedVideo(const rtc::scoped_refptr<Video>& video,
+  ReorderedVideo(const scoped_refptr<Video>& video,
                  const std::vector<size_t>& indices)
       : video_(video), indices_(indices) {}
 
@@ -69,20 +67,19 @@
   int height() const override { return video_->height(); }
   size_t number_of_frames() const override { return indices_.size(); }
 
-  rtc::scoped_refptr<I420BufferInterface> GetFrame(
-      size_t index) const override {
+  scoped_refptr<I420BufferInterface> GetFrame(size_t index) const override {
     return video_->GetFrame(indices_.at(index));
   }
 
  private:
-  const rtc::scoped_refptr<Video> video_;
+  const scoped_refptr<Video> video_;
   const std::vector<size_t> indices_;
 };
 
 // Helper class that takes a video and produces a downscaled video.
 class DownscaledVideo : public Video {
  public:
-  DownscaledVideo(float scale_factor, const rtc::scoped_refptr<Video>& video)
+  DownscaledVideo(float scale_factor, const scoped_refptr<Video>& video)
       : downscaled_width_(
             static_cast<int>(std::round(scale_factor * video->width()))),
         downscaled_height_(
@@ -95,11 +92,9 @@
     return video_->number_of_frames();
   }
 
-  rtc::scoped_refptr<I420BufferInterface> GetFrame(
-      size_t index) const override {
-    const rtc::scoped_refptr<I420BufferInterface> frame =
-        video_->GetFrame(index);
-    rtc::scoped_refptr<I420Buffer> downscaled_frame =
+  scoped_refptr<I420BufferInterface> GetFrame(size_t index) const override {
+    const scoped_refptr<I420BufferInterface> frame = video_->GetFrame(index);
+    scoped_refptr<I420Buffer> downscaled_frame =
         I420Buffer::Create(downscaled_width_, downscaled_height_);
     downscaled_frame->ScaleFrom(*frame);
     return downscaled_frame;
@@ -108,14 +103,14 @@
  private:
   const int downscaled_width_;
   const int downscaled_height_;
-  const rtc::scoped_refptr<Video> video_;
+  const scoped_refptr<Video> video_;
 };
 
 // Helper class that takes a video and caches the latest frame access. This
 // improves performance a lot since the original source is often from a file.
 class CachedVideo : public Video {
  public:
-  CachedVideo(int max_cache_size, const rtc::scoped_refptr<Video>& video)
+  CachedVideo(int max_cache_size, const scoped_refptr<Video>& video)
       : max_cache_size_(max_cache_size), video_(video) {}
 
   int width() const override { return video_->width(); }
@@ -124,14 +119,13 @@
     return video_->number_of_frames();
   }
 
-  rtc::scoped_refptr<I420BufferInterface> GetFrame(
-      size_t index) const override {
+  scoped_refptr<I420BufferInterface> GetFrame(size_t index) const override {
     for (const CachedFrame& cached_frame : cache_) {
       if (cached_frame.index == index)
         return cached_frame.frame;
     }
 
-    rtc::scoped_refptr<I420BufferInterface> frame = video_->GetFrame(index);
+    scoped_refptr<I420BufferInterface> frame = video_->GetFrame(index);
     cache_.push_front({index, frame});
     if (cache_.size() > max_cache_size_)
       cache_.pop_back();
@@ -142,17 +136,17 @@
  private:
   struct CachedFrame {
     size_t index;
-    rtc::scoped_refptr<I420BufferInterface> frame;
+    scoped_refptr<I420BufferInterface> frame;
   };
 
   const size_t max_cache_size_;
-  const rtc::scoped_refptr<Video> video_;
+  const scoped_refptr<Video> video_;
   mutable std::deque<CachedFrame> cache_;
 };
 
 // Try matching the test frame against all frames in the reference video and
 // return the index of the best matching frame.
-size_t FindBestMatch(const rtc::scoped_refptr<I420BufferInterface>& test_frame,
+size_t FindBestMatch(const scoped_refptr<I420BufferInterface>& test_frame,
                      const Video& reference_video) {
   std::vector<double> ssim;
   for (const auto& ref_frame : reference_video)
@@ -164,7 +158,7 @@
 // Find and return the index of the frame matching the test frame. The search
 // starts at the starting index and continues until there is no better match
 // within the next kNumberOfFramesLookAhead frames.
-size_t FindNextMatch(const rtc::scoped_refptr<I420BufferInterface>& test_frame,
+size_t FindNextMatch(const scoped_refptr<I420BufferInterface>& test_frame,
                      const Video& reference_video,
                      size_t start_index) {
   const double start_ssim =
@@ -182,25 +176,25 @@
 }  // namespace
 
 std::vector<size_t> FindMatchingFrameIndices(
-    const rtc::scoped_refptr<Video>& reference_video,
-    const rtc::scoped_refptr<Video>& test_video) {
+    const scoped_refptr<Video>& reference_video,
+    const scoped_refptr<Video>& test_video) {
   // This is done to get a 10x speedup. We don't need the full resolution in
   // order to match frames, and we should limit file access and not read the
   // same memory tens of times.
   const float kScaleFactor = 0.25f;
-  const rtc::scoped_refptr<Video> cached_downscaled_reference_video =
-      rtc::make_ref_counted<CachedVideo>(kNumberOfFramesLookAhead,
-                                         rtc::make_ref_counted<DownscaledVideo>(
-                                             kScaleFactor, reference_video));
-  const rtc::scoped_refptr<Video> downscaled_test_video =
-      rtc::make_ref_counted<DownscaledVideo>(kScaleFactor, test_video);
+  const scoped_refptr<Video> cached_downscaled_reference_video =
+      make_ref_counted<CachedVideo>(
+          kNumberOfFramesLookAhead,
+          make_ref_counted<DownscaledVideo>(kScaleFactor, reference_video));
+  const scoped_refptr<Video> downscaled_test_video =
+      make_ref_counted<DownscaledVideo>(kScaleFactor, test_video);
 
   // Assume the video is looping around.
-  const rtc::scoped_refptr<Video> looping_reference_video =
-      rtc::make_ref_counted<LoopingVideo>(cached_downscaled_reference_video);
+  const scoped_refptr<Video> looping_reference_video =
+      make_ref_counted<LoopingVideo>(cached_downscaled_reference_video);
 
   std::vector<size_t> match_indices;
-  for (const rtc::scoped_refptr<I420BufferInterface>& test_frame :
+  for (const scoped_refptr<I420BufferInterface>& test_frame :
        *downscaled_test_video) {
     if (match_indices.empty()) {
       // First frame.
@@ -215,21 +209,21 @@
   return match_indices;
 }
 
-rtc::scoped_refptr<Video> ReorderVideo(const rtc::scoped_refptr<Video>& video,
-                                       const std::vector<size_t>& indices) {
-  return rtc::make_ref_counted<ReorderedVideo>(
-      rtc::make_ref_counted<LoopingVideo>(video), indices);
+scoped_refptr<Video> ReorderVideo(const scoped_refptr<Video>& video,
+                                  const std::vector<size_t>& indices) {
+  return make_ref_counted<ReorderedVideo>(make_ref_counted<LoopingVideo>(video),
+                                          indices);
 }
 
-rtc::scoped_refptr<Video> GenerateAlignedReferenceVideo(
-    const rtc::scoped_refptr<Video>& reference_video,
-    const rtc::scoped_refptr<Video>& test_video) {
+scoped_refptr<Video> GenerateAlignedReferenceVideo(
+    const scoped_refptr<Video>& reference_video,
+    const scoped_refptr<Video>& test_video) {
   return GenerateAlignedReferenceVideo(
       reference_video, FindMatchingFrameIndices(reference_video, test_video));
 }
 
-rtc::scoped_refptr<Video> GenerateAlignedReferenceVideo(
-    const rtc::scoped_refptr<Video>& reference_video,
+scoped_refptr<Video> GenerateAlignedReferenceVideo(
+    const scoped_refptr<Video>& reference_video,
     const std::vector<size_t>& indices) {
   return ReorderVideo(reference_video, indices);
 }
diff --git a/rtc_tools/frame_analyzer/video_temporal_aligner.h b/rtc_tools/frame_analyzer/video_temporal_aligner.h
index 26a4088..8e5cf54 100644
--- a/rtc_tools/frame_analyzer/video_temporal_aligner.h
+++ b/rtc_tools/frame_analyzer/video_temporal_aligner.h
@@ -28,15 +28,15 @@
 // video and they should be interpreted modulo that size. The matching frames
 // will be determined by maximizing SSIM.
 std::vector<size_t> FindMatchingFrameIndices(
-    const rtc::scoped_refptr<Video>& reference_video,
-    const rtc::scoped_refptr<Video>& test_video);
+    const scoped_refptr<Video>& reference_video,
+    const scoped_refptr<Video>& test_video);
 
 // Generate a new video using the frames from the original video. The returned
 // video will have the same number of frames as the size of `indices`, and
 // frame nr i in the returned video will point to frame nr indices[i] in the
 // original video.
-rtc::scoped_refptr<Video> ReorderVideo(const rtc::scoped_refptr<Video>& video,
-                                       const std::vector<size_t>& indices);
+scoped_refptr<Video> ReorderVideo(const scoped_refptr<Video>& video,
+                                  const std::vector<size_t>& indices);
 
 // Returns a modified version of the reference video where the frames have
 // been aligned to the test video. The test video is assumed to be captured
@@ -46,13 +46,13 @@
 // over when it reaches the end. The returned result is a version of the
 // reference video where the missing frames are left out so it aligns to the
 // test video.
-rtc::scoped_refptr<Video> GenerateAlignedReferenceVideo(
-    const rtc::scoped_refptr<Video>& reference_video,
-    const rtc::scoped_refptr<Video>& test_video);
+scoped_refptr<Video> GenerateAlignedReferenceVideo(
+    const scoped_refptr<Video>& reference_video,
+    const scoped_refptr<Video>& test_video);
 
 // As above, but using precalculated indices.
-rtc::scoped_refptr<Video> GenerateAlignedReferenceVideo(
-    const rtc::scoped_refptr<Video>& reference_video,
+scoped_refptr<Video> GenerateAlignedReferenceVideo(
+    const scoped_refptr<Video>& reference_video,
     const std::vector<size_t>& indices);
 
 }  // namespace test
diff --git a/rtc_tools/frame_analyzer/video_temporal_aligner_unittest.cc b/rtc_tools/frame_analyzer/video_temporal_aligner_unittest.cc
index 9519a74..1fcbd9c 100644
--- a/rtc_tools/frame_analyzer/video_temporal_aligner_unittest.cc
+++ b/rtc_tools/frame_analyzer/video_temporal_aligner_unittest.cc
@@ -28,11 +28,11 @@
     ASSERT_TRUE(reference_video);
   }
 
-  rtc::scoped_refptr<Video> reference_video;
+  scoped_refptr<Video> reference_video;
 };
 
 TEST_F(VideoTemporalAlignerTest, FindMatchingFrameIndicesEmpty) {
-  rtc::scoped_refptr<Video> empty_test_video =
+  scoped_refptr<Video> empty_test_video =
       ReorderVideo(reference_video, std::vector<size_t>());
 
   const std::vector<size_t> matched_indices =
@@ -54,7 +54,7 @@
   const std::vector<size_t> indices = {2, 2, 2, 2};
 
   // Generate a test video based on this sequence.
-  rtc::scoped_refptr<Video> test_video = ReorderVideo(reference_video, indices);
+  scoped_refptr<Video> test_video = ReorderVideo(reference_video, indices);
 
   const std::vector<size_t> matched_indices =
       FindMatchingFrameIndices(reference_video, test_video);
@@ -68,7 +68,7 @@
     indices.push_back(i % reference_video->number_of_frames());
 
   // Generate a test video based on this sequence.
-  rtc::scoped_refptr<Video> test_video = ReorderVideo(reference_video, indices);
+  scoped_refptr<Video> test_video = ReorderVideo(reference_video, indices);
 
   const std::vector<size_t> matched_indices =
       FindMatchingFrameIndices(reference_video, test_video);
@@ -93,7 +93,7 @@
   indices.push_back((start_index + 32) % reference_video->number_of_frames());
 
   // Generate a test video based on this sequence.
-  rtc::scoped_refptr<Video> test_video = ReorderVideo(reference_video, indices);
+  scoped_refptr<Video> test_video = ReorderVideo(reference_video, indices);
 
   const std::vector<size_t> matched_indices =
       FindMatchingFrameIndices(reference_video, test_video);
@@ -113,9 +113,9 @@
   }
 
   // Generate a test video based on this sequence.
-  rtc::scoped_refptr<Video> test_video = ReorderVideo(reference_video, indices);
+  scoped_refptr<Video> test_video = ReorderVideo(reference_video, indices);
 
-  rtc::scoped_refptr<Video> aligned_reference_video =
+  scoped_refptr<Video> aligned_reference_video =
       GenerateAlignedReferenceVideo(reference_video, test_video);
 
   // Assume perfect match, i.e. ssim == 1, for all frames.
diff --git a/rtc_tools/network_tester/jni.cc b/rtc_tools/network_tester/jni.cc
index f192739..a0950f1 100644
--- a/rtc_tools/network_tester/jni.cc
+++ b/rtc_tools/network_tester/jni.cc
@@ -21,7 +21,7 @@
 Java_com_google_media_networktester_NetworkTester_CreateTestController(
     JNIEnv* jni,
     jclass) {
-  rtc::ThreadManager::Instance()->WrapCurrentThread();
+  webrtc::ThreadManager::Instance()->WrapCurrentThread();
   return reinterpret_cast<intptr_t>(new webrtc::TestController(
       0, 0, "/mnt/sdcard/network_tester_client_config.dat",
       "/mnt/sdcard/network_tester_client_packet_log.dat"));
@@ -51,7 +51,7 @@
     jclass,
     jlong native_pointer) {
   // 100 ms arbitrary chosen, but it works well.
-  rtc::Thread::Current()->ProcessMessages(/*cms=*/100);
+  webrtc::Thread::Current()->ProcessMessages(/*cms=*/100);
 }
 
 extern "C" JNIEXPORT void JNICALL
@@ -64,5 +64,5 @@
   if (test_controller) {
     delete test_controller;
   }
-  rtc::ThreadManager::Instance()->UnwrapCurrentThread();
+  webrtc::ThreadManager::Instance()->UnwrapCurrentThread();
 }
diff --git a/rtc_tools/network_tester/network_tester_unittest.cc b/rtc_tools/network_tester/network_tester_unittest.cc
index 1596ad5..8f522af 100644
--- a/rtc_tools/network_tester/network_tester_unittest.cc
+++ b/rtc_tools/network_tester/network_tester_unittest.cc
@@ -27,9 +27,9 @@
   // running the test in parallel in stress runs. Skipping all reserved ports.
   const int MIN_PORT = 49152;
   const int MAX_PORT = 65535;
-  int port = webrtc::Random(rtc::TimeMicros()).Rand(MIN_PORT, MAX_PORT);
+  int port = webrtc::Random(webrtc::TimeMicros()).Rand(MIN_PORT, MAX_PORT);
 
-  rtc::AutoThread main_thread;
+  webrtc::AutoThread main_thread;
 
   TestController client(
       0, 0, webrtc::test::ResourcePath("network_tester/client_config", "dat"),
diff --git a/rtc_tools/network_tester/packet_sender.cc b/rtc_tools/network_tester/packet_sender.cc
index f4b93c1..0ab5b77 100644
--- a/rtc_tools/network_tester/packet_sender.cc
+++ b/rtc_tools/network_tester/packet_sender.cc
@@ -28,7 +28,7 @@
 
 absl::AnyInvocable<void() &&> SendPacketTask(
     PacketSender* packet_sender,
-    rtc::scoped_refptr<webrtc::PendingTaskSafetyFlag> task_safety_flag,
+    scoped_refptr<webrtc::PendingTaskSafetyFlag> task_safety_flag,
     int64_t target_time_ms = TimeMillis()) {
   return [target_time_ms, packet_sender,
           task_safety_flag = std::move(task_safety_flag)]() mutable {
@@ -48,7 +48,7 @@
 absl::AnyInvocable<void() &&> UpdateTestSettingTask(
     PacketSender* packet_sender,
     std::unique_ptr<ConfigReader> config_reader,
-    rtc::scoped_refptr<webrtc::PendingTaskSafetyFlag> task_safety_flag) {
+    scoped_refptr<webrtc::PendingTaskSafetyFlag> task_safety_flag) {
   return [packet_sender, config_reader = std::move(config_reader),
           task_safety_flag = std::move(task_safety_flag)]() mutable {
     if (!task_safety_flag->alive()) {
@@ -73,7 +73,7 @@
 PacketSender::PacketSender(
     TestController* test_controller,
     webrtc::TaskQueueBase* worker_queue,
-    rtc::scoped_refptr<webrtc::PendingTaskSafetyFlag> task_safety_flag,
+    scoped_refptr<webrtc::PendingTaskSafetyFlag> task_safety_flag,
     const std::string& config_file_path)
     : packet_size_(0),
       send_interval_ms_(0),
diff --git a/rtc_tools/network_tester/packet_sender.h b/rtc_tools/network_tester/packet_sender.h
index caa9249..832ee49 100644
--- a/rtc_tools/network_tester/packet_sender.h
+++ b/rtc_tools/network_tester/packet_sender.h
@@ -32,11 +32,10 @@
 
 class PacketSender {
  public:
-  PacketSender(
-      TestController* test_controller,
-      webrtc::TaskQueueBase* worker_queue,
-      rtc::scoped_refptr<webrtc::PendingTaskSafetyFlag> task_safety_flag,
-      const std::string& config_file_path);
+  PacketSender(TestController* test_controller,
+               webrtc::TaskQueueBase* worker_queue,
+               scoped_refptr<webrtc::PendingTaskSafetyFlag> task_safety_flag,
+               const std::string& config_file_path);
   ~PacketSender();
 
   PacketSender(const PacketSender&) = delete;
@@ -60,7 +59,7 @@
   const std::string config_file_path_;
   TestController* const test_controller_;
   webrtc::TaskQueueBase* worker_queue_;
-  rtc::scoped_refptr<webrtc::PendingTaskSafetyFlag> task_safety_flag_;
+  scoped_refptr<webrtc::PendingTaskSafetyFlag> task_safety_flag_;
 };
 
 }  // namespace webrtc
diff --git a/rtc_tools/network_tester/test_controller.cc b/rtc_tools/network_tester/test_controller.cc
index 7f71459..c076f15 100644
--- a/rtc_tools/network_tester/test_controller.cc
+++ b/rtc_tools/network_tester/test_controller.cc
@@ -56,7 +56,7 @@
             SocketAddress(GetAnyIP(AF_INET), 0), min_port, max_port));
     RTC_CHECK(udp_socket_ != nullptr);
     udp_socket_->RegisterReceivedPacketCallback(
-        [&](rtc::AsyncPacketSocket* socket, const rtc::ReceivedPacket& packet) {
+        [&](AsyncPacketSocket* socket, const ReceivedIpPacket& packet) {
           OnReadPacket(socket, packet);
         });
   });
@@ -98,7 +98,7 @@
   if (data_size && *data_size > packet_size)
     packet_size = *data_size;
   udp_socket_->SendTo((const void*)send_data_.data(), packet_size,
-                      remote_address_, rtc::PacketOptions());
+                      remote_address_, AsyncSocketPacketOptions());
 }
 
 void TestController::OnTestDone() {
@@ -117,7 +117,7 @@
 }
 
 void TestController::OnReadPacket(AsyncPacketSocket* socket,
-                                  const rtc::ReceivedPacket& received_packet) {
+                                  const ReceivedIpPacket& received_packet) {
   RTC_DCHECK_RUN_ON(packet_sender_thread_.get());
   RTC_LOG(LS_VERBOSE) << "OnReadPacket";
   size_t packet_size = received_packet.payload()[0];
diff --git a/rtc_tools/network_tester/test_controller.h b/rtc_tools/network_tester/test_controller.h
index 484f78a..8a29c5d 100644
--- a/rtc_tools/network_tester/test_controller.h
+++ b/rtc_tools/network_tester/test_controller.h
@@ -65,9 +65,9 @@
 
  private:
   void OnReadPacket(AsyncPacketSocket* socket,
-                    const rtc::ReceivedPacket& received_packet);
+                    const ReceivedIpPacket& received_packet);
   RTC_NO_UNIQUE_ADDRESS SequenceChecker test_controller_thread_checker_;
-  std::unique_ptr<rtc::SocketServer> socket_server_;
+  std::unique_ptr<SocketServer> socket_server_;
   std::unique_ptr<Thread> packet_sender_thread_;
   BasicPacketSocketFactory socket_factory_
       RTC_GUARDED_BY(packet_sender_thread_);
@@ -83,7 +83,7 @@
   SocketAddress remote_address_;
   std::unique_ptr<PacketSender> packet_sender_
       RTC_GUARDED_BY(packet_sender_thread_);
-  rtc::scoped_refptr<webrtc::PendingTaskSafetyFlag> task_safety_flag_;
+  scoped_refptr<webrtc::PendingTaskSafetyFlag> task_safety_flag_;
 };
 
 }  // namespace webrtc
diff --git a/rtc_tools/psnr_ssim_analyzer/psnr_ssim_analyzer.cc b/rtc_tools/psnr_ssim_analyzer/psnr_ssim_analyzer.cc
index bcbb1bb..38ff935 100644
--- a/rtc_tools/psnr_ssim_analyzer/psnr_ssim_analyzer.cc
+++ b/rtc_tools/psnr_ssim_analyzer/psnr_ssim_analyzer.cc
@@ -36,17 +36,17 @@
           "The test YUV file to run the analysis for");
 
 void CompareFiles(
-    const rtc::scoped_refptr<webrtc::test::Video>& reference_video,
-    const rtc::scoped_refptr<webrtc::test::Video>& test_video,
+    const webrtc::scoped_refptr<webrtc::test::Video>& reference_video,
+    const webrtc::scoped_refptr<webrtc::test::Video>& test_video,
     const char* results_file_name) {
   FILE* results_file = fopen(results_file_name, "w");
 
   const size_t num_frames = std::min(reference_video->number_of_frames(),
                                      test_video->number_of_frames());
   for (size_t i = 0; i < num_frames; ++i) {
-    const rtc::scoped_refptr<webrtc::I420BufferInterface> ref_buffer =
+    const webrtc::scoped_refptr<webrtc::I420BufferInterface> ref_buffer =
         reference_video->GetFrame(i);
-    const rtc::scoped_refptr<webrtc::I420BufferInterface> test_buffer =
+    const webrtc::scoped_refptr<webrtc::I420BufferInterface> test_buffer =
         test_video->GetFrame(i);
 
     // Calculate the PSNR and SSIM.
@@ -83,9 +83,9 @@
       "--test_file=test.yuv --results_file=results.txt\n");
   absl::ParseCommandLine(argc, argv);
 
-  rtc::scoped_refptr<webrtc::test::Video> reference_video =
+  webrtc::scoped_refptr<webrtc::test::Video> reference_video =
       webrtc::test::OpenY4mFile(absl::GetFlag(FLAGS_reference_file));
-  rtc::scoped_refptr<webrtc::test::Video> test_video =
+  webrtc::scoped_refptr<webrtc::test::Video> test_video =
       webrtc::test::OpenY4mFile(absl::GetFlag(FLAGS_test_file));
 
   if (!reference_video || !test_video) {
diff --git a/rtc_tools/rtc_event_log_to_text/main.cc b/rtc_tools/rtc_event_log_to_text/main.cc
index 5a81040..d37374a 100644
--- a/rtc_tools/rtc_event_log_to_text/main.cc
+++ b/rtc_tools/rtc_event_log_to_text/main.cc
@@ -45,10 +45,10 @@
   std::vector<char*> args = absl::ParseCommandLine(argc, argv);
 
   // Print RTC_LOG warnings and errors even in release builds.
-  if (rtc::LogMessage::GetLogToDebug() > rtc::LS_WARNING) {
-    rtc::LogMessage::LogToDebug(rtc::LS_WARNING);
+  if (webrtc::LogMessage::GetLogToDebug() > webrtc::LS_WARNING) {
+    webrtc::LogMessage::LogToDebug(webrtc::LS_WARNING);
   }
-  rtc::LogMessage::SetLogToStderr(true);
+  webrtc::LogMessage::SetLogToStderr(true);
 
   webrtc::ParsedRtcEventLog::UnconfiguredHeaderExtensions header_extensions =
       webrtc::ParsedRtcEventLog::UnconfiguredHeaderExtensions::kDontParse;
diff --git a/rtc_tools/rtc_event_log_visualizer/analyze_audio.cc b/rtc_tools/rtc_event_log_visualizer/analyze_audio.cc
index b6c6e08..7a34309 100644
--- a/rtc_tools/rtc_event_log_visualizer/analyze_audio.cc
+++ b/rtc_tools/rtc_event_log_visualizer/analyze_audio.cc
@@ -258,9 +258,9 @@
 
   std::unique_ptr<test::VoidAudioSink> output(new test::VoidAudioSink());
 
-  rtc::scoped_refptr<AudioDecoderFactory> decoder_factory =
-      rtc::make_ref_counted<ReplacementAudioDecoderFactory>(
-          replacement_file_name, file_sample_rate_hz);
+  scoped_refptr<AudioDecoderFactory> decoder_factory =
+      make_ref_counted<ReplacementAudioDecoderFactory>(replacement_file_name,
+                                                       file_sample_rate_hz);
 
   test::NetEqTest::DecoderMap codecs = {
       {kReplacementPt, SdpAudioFormat("l16", 48000, 1)}};
diff --git a/rtc_tools/rtc_event_log_visualizer/analyzer.cc b/rtc_tools/rtc_event_log_visualizer/analyzer.cc
index 6f8abce..4aea669 100644
--- a/rtc_tools/rtc_event_log_visualizer/analyzer.cc
+++ b/rtc_tools/rtc_event_log_visualizer/analyzer.cc
@@ -1853,7 +1853,7 @@
             0u,  // Per packet overhead bytes.,
             Timestamp::Micros(rtp_packet.rtp.log_time_us()));
       }
-      rtc::SentPacket sent_packet;
+      SentPacketInfo sent_packet;
       sent_packet.send_time_ms = rtp_packet.rtp.log_time_ms();
       sent_packet.info.included_in_allocation = true;
       sent_packet.info.packet_size_bytes = rtp_packet.rtp.total_length;
diff --git a/rtc_tools/rtc_event_log_visualizer/analyzer_bindings.cc b/rtc_tools/rtc_event_log_visualizer/analyzer_bindings.cc
index 35a6375..254a763 100644
--- a/rtc_tools/rtc_event_log_visualizer/analyzer_bindings.cc
+++ b/rtc_tools/rtc_event_log_visualizer/analyzer_bindings.cc
@@ -96,7 +96,7 @@
   webrtc::analytics::ChartCollection proto_charts;
   collection.ExportProtobuf(&proto_charts);
   std::string serialized_charts = proto_charts.SerializeAsString();
-  if (rtc::checked_cast<uint32_t>(serialized_charts.size()) > *output_size) {
+  if (webrtc::checked_cast<uint32_t>(serialized_charts.size()) > *output_size) {
     std::cerr << "Serialized charts larger than available output buffer: "
               << serialized_charts.size() << " vs " << *output_size
               << std::endl;
@@ -105,5 +105,5 @@
   }
 
   memcpy(output, serialized_charts.data(), serialized_charts.size());
-  *output_size = rtc::checked_cast<uint32_t>(serialized_charts.size());
+  *output_size = webrtc::checked_cast<uint32_t>(serialized_charts.size());
 }
diff --git a/rtc_tools/rtc_event_log_visualizer/log_simulation.cc b/rtc_tools/rtc_event_log_visualizer/log_simulation.cc
index 8362b5b..47bdb76 100644
--- a/rtc_tools/rtc_event_log_visualizer/log_simulation.cc
+++ b/rtc_tools/rtc_event_log_visualizer/log_simulation.cc
@@ -108,7 +108,7 @@
     transport_feedback_.AddPacket(send_packet, probe_info, packet.overhead,
                                   packet.log_packet_time);
   }
-  rtc::SentPacket sent_packet;
+  SentPacketInfo sent_packet;
   sent_packet.send_time_ms = packet.log_packet_time.ms();
   sent_packet.info.included_in_allocation = true;
   sent_packet.info.packet_size_bytes = packet.size + packet.overhead;
diff --git a/rtc_tools/rtc_event_log_visualizer/main.cc b/rtc_tools/rtc_event_log_visualizer/main.cc
index ed865b3..466203b 100644
--- a/rtc_tools/rtc_event_log_visualizer/main.cc
+++ b/rtc_tools/rtc_event_log_visualizer/main.cc
@@ -152,10 +152,10 @@
   std::vector<char*> args = absl::ParseCommandLine(argc, argv);
 
   // Print RTC_LOG warnings and errors even in release builds.
-  if (rtc::LogMessage::GetLogToDebug() > rtc::LS_WARNING) {
-    rtc::LogMessage::LogToDebug(rtc::LS_WARNING);
+  if (webrtc::LogMessage::GetLogToDebug() > webrtc::LS_WARNING) {
+    webrtc::LogMessage::LogToDebug(webrtc::LS_WARNING);
   }
-  rtc::LogMessage::SetLogToStderr(true);
+  webrtc::LogMessage::SetLogToStderr(true);
 
   // InitFieldTrialsFromString stores the char*, so the char array must outlive
   // the application.
diff --git a/rtc_tools/rtp_generator/rtp_generator.cc b/rtc_tools/rtp_generator/rtp_generator.cc
index 7161215..cb4a14c 100644
--- a/rtc_tools/rtp_generator/rtp_generator.cc
+++ b/rtc_tools/rtp_generator/rtp_generator.cc
@@ -209,12 +209,12 @@
     if (video_config.rtp.payload_name == kVp8CodecName) {
       VideoCodecVP8 settings = VideoEncoder::GetDefaultVp8Settings();
       encoder_config.encoder_specific_settings =
-          rtc::make_ref_counted<VideoEncoderConfig::Vp8EncoderSpecificSettings>(
+          make_ref_counted<VideoEncoderConfig::Vp8EncoderSpecificSettings>(
               settings);
     } else if (video_config.rtp.payload_name == kVp9CodecName) {
       VideoCodecVP9 settings = VideoEncoder::GetDefaultVp9Settings();
       encoder_config.encoder_specific_settings =
-          rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+          make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
               settings);
     } else if (video_config.rtp.payload_name == kH264CodecName) {
       encoder_config.encoder_specific_settings = nullptr;
@@ -280,14 +280,14 @@
                                    webrtc::kNetworkDown);
 }
 
-bool RtpGenerator::SendRtp(rtc::ArrayView<const uint8_t> packet,
+bool RtpGenerator::SendRtp(ArrayView<const uint8_t> packet,
                            const webrtc::PacketOptions& options) {
   test::RtpPacket rtp_packet = DataToRtpPacket(packet.data(), packet.size());
   rtp_dump_writer_->WritePacket(&rtp_packet);
   return true;
 }
 
-bool RtpGenerator::SendRtcp(rtc::ArrayView<const uint8_t> packet) {
+bool RtpGenerator::SendRtcp(ArrayView<const uint8_t> packet) {
   test::RtpPacket rtcp_packet = DataToRtpPacket(packet.data(), packet.size());
   rtp_dump_writer_->WritePacket(&rtcp_packet);
   return true;
diff --git a/rtc_tools/rtp_generator/rtp_generator.h b/rtc_tools/rtp_generator/rtp_generator.h
index 9a37f39..bb5f141 100644
--- a/rtc_tools/rtp_generator/rtp_generator.h
+++ b/rtc_tools/rtp_generator/rtp_generator.h
@@ -91,10 +91,10 @@
   // webrtc::Transport implementation
   // Captured RTP packets are written to the RTPDump file instead of over the
   // network.
-  bool SendRtp(rtc::ArrayView<const uint8_t> packet,
+  bool SendRtp(ArrayView<const uint8_t> packet,
                const webrtc::PacketOptions& options) override;
   // RTCP packets are ignored for now.
-  bool SendRtcp(rtc::ArrayView<const uint8_t> packet) override;
+  bool SendRtcp(ArrayView<const uint8_t> packet) override;
   // Returns the maximum duration
   int GetMaxDuration() const;
   // Waits until all video streams have finished.
diff --git a/rtc_tools/video_encoder/video_encoder.cc b/rtc_tools/video_encoder/video_encoder.cc
index 910b490..1303084 100644
--- a/rtc_tools/video_encoder/video_encoder.cc
+++ b/rtc_tools/video_encoder/video_encoder.cc
@@ -559,12 +559,12 @@
   absl::ParseCommandLine(argc, argv);
 
   if (absl::GetFlag(FLAGS_verbose)) {
-    rtc::LogMessage::LogToDebug(rtc::LS_VERBOSE);
+    webrtc::LogMessage::LogToDebug(webrtc::LS_VERBOSE);
   } else {
-    rtc::LogMessage::LogToDebug(rtc::LS_INFO);
+    webrtc::LogMessage::LogToDebug(webrtc::LS_INFO);
   }
 
-  rtc::LogMessage::SetLogToStderr(true);
+  webrtc::LogMessage::SetLogToStderr(true);
 
   const bool list_formats = absl::GetFlag(FLAGS_list_formats);
   const bool validate_psnr = absl::GetFlag(FLAGS_validate_psnr);
diff --git a/rtc_tools/video_file_reader.cc b/rtc_tools/video_file_reader.cc
index 9e8d88f..70e17ca 100644
--- a/rtc_tools/video_file_reader.cc
+++ b/rtc_tools/video_file_reader.cc
@@ -50,12 +50,12 @@
   int width() const override { return width_; }
   int height() const override { return height_; }
 
-  rtc::scoped_refptr<I420BufferInterface> GetFrame(
+  scoped_refptr<I420BufferInterface> GetFrame(
       size_t frame_index) const override {
     RTC_CHECK_LT(frame_index, frame_positions_.size());
 
     fsetpos(file_, &frame_positions_[frame_index]);
-    rtc::scoped_refptr<I420Buffer> buffer = I420Buffer::Create(width_, height_);
+    scoped_refptr<I420Buffer> buffer = I420Buffer::Create(width_, height_);
 
     if (!ReadBytes(buffer->MutableDataY(), width_ * height_, file_) ||
         !ReadBytes(buffer->MutableDataU(),
@@ -78,8 +78,7 @@
 
 }  // namespace
 
-Video::Iterator::Iterator(const rtc::scoped_refptr<const Video>& video,
-                          size_t index)
+Video::Iterator::Iterator(const scoped_refptr<const Video>& video, size_t index)
     : video_(video), index_(index) {}
 
 Video::Iterator::Iterator(const Video::Iterator& other) = default;
@@ -88,7 +87,7 @@
 Video::Iterator& Video::Iterator::operator=(const Video::Iterator&) = default;
 Video::Iterator::~Iterator() = default;
 
-rtc::scoped_refptr<I420BufferInterface> Video::Iterator::operator*() const {
+scoped_refptr<I420BufferInterface> Video::Iterator::operator*() const {
   return video_->GetFrame(index_);
 }
 bool Video::Iterator::operator==(const Video::Iterator& other) const {
@@ -110,14 +109,14 @@
 }
 
 Video::Iterator Video::begin() const {
-  return Iterator(rtc::scoped_refptr<const Video>(this), 0);
+  return Iterator(scoped_refptr<const Video>(this), 0);
 }
 
 Video::Iterator Video::end() const {
-  return Iterator(rtc::scoped_refptr<const Video>(this), number_of_frames());
+  return Iterator(scoped_refptr<const Video>(this), number_of_frames());
 }
 
-rtc::scoped_refptr<Video> OpenY4mFile(const std::string& file_name) {
+scoped_refptr<Video> OpenY4mFile(const std::string& file_name) {
   FILE* file = fopen(file_name.c_str(), "rb");
   if (file == nullptr) {
     RTC_LOG(LS_ERROR) << "Could not open input file for reading: " << file_name;
@@ -224,13 +223,12 @@
   }
   RTC_LOG(LS_INFO) << "Video has " << frame_positions.size() << " frames";
 
-  return rtc::make_ref_counted<VideoFile>(*width, *height, frame_positions,
-                                          file);
+  return make_ref_counted<VideoFile>(*width, *height, frame_positions, file);
 }
 
-rtc::scoped_refptr<Video> OpenYuvFile(const std::string& file_name,
-                                      int width,
-                                      int height) {
+scoped_refptr<Video> OpenYuvFile(const std::string& file_name,
+                                 int width,
+                                 int height) {
   FILE* file = fopen(file_name.c_str(), "rb");
   if (file == nullptr) {
     RTC_LOG(LS_ERROR) << "Could not open input file for reading: " << file_name;
@@ -266,12 +264,12 @@
   }
   RTC_LOG(LS_INFO) << "Video has " << frame_positions.size() << " frames";
 
-  return rtc::make_ref_counted<VideoFile>(width, height, frame_positions, file);
+  return make_ref_counted<VideoFile>(width, height, frame_positions, file);
 }
 
-rtc::scoped_refptr<Video> OpenYuvOrY4mFile(const std::string& file_name,
-                                           int width,
-                                           int height) {
+scoped_refptr<Video> OpenYuvOrY4mFile(const std::string& file_name,
+                                      int width,
+                                      int height) {
   if (absl::EndsWith(file_name, ".yuv"))
     return OpenYuvFile(file_name, width, height);
   if (absl::EndsWith(file_name, ".y4m"))
diff --git a/rtc_tools/video_file_reader.h b/rtc_tools/video_file_reader.h
index 270955b..0ff0af3 100644
--- a/rtc_tools/video_file_reader.h
+++ b/rtc_tools/video_file_reader.h
@@ -35,14 +35,14 @@
     typedef int& reference;
     typedef std::input_iterator_tag iterator_category;
 
-    Iterator(const rtc::scoped_refptr<const Video>& video, size_t index);
+    Iterator(const scoped_refptr<const Video>& video, size_t index);
     Iterator(const Iterator& other);
     Iterator(Iterator&& other);
     Iterator& operator=(Iterator&&);
     Iterator& operator=(const Iterator&);
     ~Iterator();
 
-    rtc::scoped_refptr<I420BufferInterface> operator*() const;
+    scoped_refptr<I420BufferInterface> operator*() const;
     bool operator==(const Iterator& other) const;
     bool operator!=(const Iterator& other) const;
 
@@ -50,7 +50,7 @@
     Iterator& operator++();
 
    private:
-    rtc::scoped_refptr<const Video> video_;
+    scoped_refptr<const Video> video_;
     size_t index_;
   };
 
@@ -60,21 +60,20 @@
   virtual int width() const = 0;
   virtual int height() const = 0;
   virtual size_t number_of_frames() const = 0;
-  virtual rtc::scoped_refptr<I420BufferInterface> GetFrame(
-      size_t index) const = 0;
+  virtual scoped_refptr<I420BufferInterface> GetFrame(size_t index) const = 0;
 };
 
-rtc::scoped_refptr<Video> OpenY4mFile(const std::string& file_name);
+scoped_refptr<Video> OpenY4mFile(const std::string& file_name);
 
-rtc::scoped_refptr<Video> OpenYuvFile(const std::string& file_name,
-                                      int width,
-                                      int height);
+scoped_refptr<Video> OpenYuvFile(const std::string& file_name,
+                                 int width,
+                                 int height);
 
 // This is a helper function for the two functions above. It reads the file
 // extension to determine whether it is a .yuv or a .y4m file.
-rtc::scoped_refptr<Video> OpenYuvOrY4mFile(const std::string& file_name,
-                                           int width,
-                                           int height);
+scoped_refptr<Video> OpenYuvOrY4mFile(const std::string& file_name,
+                                      int width,
+                                      int height);
 
 }  // namespace test
 }  // namespace webrtc
diff --git a/rtc_tools/video_file_reader_unittest.cc b/rtc_tools/video_file_reader_unittest.cc
index fc00c7c..a357699 100644
--- a/rtc_tools/video_file_reader_unittest.cc
+++ b/rtc_tools/video_file_reader_unittest.cc
@@ -49,7 +49,7 @@
     ASSERT_TRUE(video);
   }
 
-  rtc::scoped_refptr<webrtc::test::Video> video;
+  scoped_refptr<webrtc::test::Video> video;
 };
 
 TEST_F(Y4mFileReaderTest, TestParsingFileHeader) {
@@ -63,7 +63,7 @@
 
 TEST_F(Y4mFileReaderTest, TestPixelContent) {
   int cnt = 0;
-  for (const rtc::scoped_refptr<I420BufferInterface> frame : *video) {
+  for (const scoped_refptr<I420BufferInterface> frame : *video) {
     for (int i = 0; i < 6 * 4; ++i, ++cnt)
       EXPECT_EQ(cnt, frame->DataY()[i]);
     for (int i = 0; i < 3 * 2; ++i, ++cnt)
@@ -99,7 +99,7 @@
     ASSERT_TRUE(video);
   }
 
-  rtc::scoped_refptr<webrtc::test::Video> video;
+  scoped_refptr<webrtc::test::Video> video;
 };
 
 TEST_F(YuvFileReaderTest, TestParsingFileHeader) {
@@ -113,7 +113,7 @@
 
 TEST_F(YuvFileReaderTest, TestPixelContent) {
   int cnt = 0;
-  for (const rtc::scoped_refptr<I420BufferInterface> frame : *video) {
+  for (const scoped_refptr<I420BufferInterface> frame : *video) {
     for (int i = 0; i < 6 * 4; ++i, ++cnt)
       EXPECT_EQ(cnt, frame->DataY()[i]);
     for (int i = 0; i < 3 * 2; ++i, ++cnt)
diff --git a/rtc_tools/video_file_writer.cc b/rtc_tools/video_file_writer.cc
index 371560a..57628a2 100644
--- a/rtc_tools/video_file_writer.cc
+++ b/rtc_tools/video_file_writer.cc
@@ -23,7 +23,7 @@
 namespace test {
 namespace {
 
-void WriteVideoToFile(const rtc::scoped_refptr<Video>& video,
+void WriteVideoToFile(const scoped_refptr<Video>& video,
                       const std::string& file_name,
                       int fps,
                       bool isY4m) {
@@ -43,7 +43,7 @@
       std::string frame = "FRAME\n";
       fwrite(frame.c_str(), 1, 6, output_file);
     }
-    rtc::scoped_refptr<I420BufferInterface> buffer = video->GetFrame(i);
+    scoped_refptr<I420BufferInterface> buffer = video->GetFrame(i);
     RTC_CHECK(buffer) << "Frame: " << i
                       << "\nWhile trying to create: " << file_name;
     const uint8_t* data_y = buffer->DataY();
@@ -70,20 +70,20 @@
 
 }  // Anonymous namespace
 
-void WriteVideoToFile(const rtc::scoped_refptr<Video>& video,
+void WriteVideoToFile(const scoped_refptr<Video>& video,
                       const std::string& file_name,
                       int fps) {
   WriteVideoToFile(video, file_name, fps,
                    /*isY4m=*/absl::EndsWith(file_name, ".y4m"));
 }
 
-void WriteY4mVideoToFile(const rtc::scoped_refptr<Video>& video,
+void WriteY4mVideoToFile(const scoped_refptr<Video>& video,
                          const std::string& file_name,
                          int fps) {
   WriteVideoToFile(video, file_name, fps, /*isY4m=*/true);
 }
 
-void WriteYuvVideoToFile(const rtc::scoped_refptr<Video>& video,
+void WriteYuvVideoToFile(const scoped_refptr<Video>& video,
                          const std::string& file_name,
                          int fps) {
   WriteVideoToFile(video, file_name, fps, /*isY4m=*/false);
diff --git a/rtc_tools/video_file_writer.h b/rtc_tools/video_file_writer.h
index 9fbb284..a101bce 100644
--- a/rtc_tools/video_file_writer.h
+++ b/rtc_tools/video_file_writer.h
@@ -19,17 +19,17 @@
 namespace test {
 
 // Writes video to file, determining YUV or Y4M format from the file extension.
-void WriteVideoToFile(const rtc::scoped_refptr<Video>& video,
+void WriteVideoToFile(const scoped_refptr<Video>& video,
                       const std::string& file_name,
                       int fps);
 
 // Writes Y4M video to file.
-void WriteY4mVideoToFile(const rtc::scoped_refptr<Video>& video,
+void WriteY4mVideoToFile(const scoped_refptr<Video>& video,
                          const std::string& file_name,
                          int fps);
 
 // Writes YUV video to file.
-void WriteYuvVideoToFile(const rtc::scoped_refptr<Video>& video,
+void WriteYuvVideoToFile(const scoped_refptr<Video>& video,
                          const std::string& file_name,
                          int fps);
 
diff --git a/rtc_tools/video_file_writer_unittest.cc b/rtc_tools/video_file_writer_unittest.cc
index c4afa8f..bcf10c0 100644
--- a/rtc_tools/video_file_writer_unittest.cc
+++ b/rtc_tools/video_file_writer_unittest.cc
@@ -92,8 +92,8 @@
   const int width = 6;
   const int height = 4;
   const int fps = 60;
-  rtc::scoped_refptr<webrtc::test::Video> video_;
-  rtc::scoped_refptr<webrtc::test::Video> written_video_;
+  scoped_refptr<webrtc::test::Video> video_;
+  scoped_refptr<webrtc::test::Video> written_video_;
   // Each video object must be backed by file!
   std::string video_filename_;
   std::string written_video_filename_;
@@ -124,7 +124,7 @@
 TEST_F(VideoFileWriterTest, TestPixelContentY4m) {
   WriteVideoY4m();
   int cnt = 0;
-  for (const rtc::scoped_refptr<I420BufferInterface> frame : *written_video_) {
+  for (const scoped_refptr<I420BufferInterface> frame : *written_video_) {
     for (int i = 0; i < width * height; ++i, ++cnt)
       EXPECT_EQ(cnt, frame->DataY()[i]);
     for (int i = 0; i < width / 2 * height / 2; ++i, ++cnt)
@@ -137,7 +137,7 @@
 TEST_F(VideoFileWriterTest, TestPixelContentYuv) {
   WriteVideoYuv();
   int cnt = 0;
-  for (const rtc::scoped_refptr<I420BufferInterface> frame : *written_video_) {
+  for (const scoped_refptr<I420BufferInterface> frame : *written_video_) {
     for (int i = 0; i < width * height; ++i, ++cnt)
       EXPECT_EQ(cnt, frame->DataY()[i]);
     for (int i = 0; i < width / 2 * height / 2; ++i, ++cnt)
diff --git a/rtc_tools/video_replay.cc b/rtc_tools/video_replay.cc
index 08c12ef..4fa2767 100644
--- a/rtc_tools/video_replay.cc
+++ b/rtc_tools/video_replay.cc
@@ -389,7 +389,7 @@
   // them from deallocating.
   std::stringstream window_title;
   window_title << "Playback Video (" << rtp_dump_path << ")";
-  std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>> playback_video;
+  std::unique_ptr<VideoSinkInterface<VideoFrame>> playback_video;
   if (absl::GetFlag(FLAGS_disable_preview)) {
     playback_video = std::make_unique<NullRenderer>();
   } else {
@@ -613,7 +613,7 @@
       if (!rtp_reader_->NextPacket(&packet)) {
         break;
       }
-      rtc::CopyOnWriteBuffer packet_buffer(
+      CopyOnWriteBuffer packet_buffer(
           packet.original_length > 0 ? packet.original_length : packet.length);
       memcpy(packet_buffer.MutableData(), packet.data, packet.length);
       if (packet.length < packet.original_length) {
diff --git a/tools_webrtc/sanitizers/lsan_suppressions_webrtc.cc b/tools_webrtc/sanitizers/lsan_suppressions_webrtc.cc
index eb9f6a4..f92adba 100644
--- a/tools_webrtc/sanitizers/lsan_suppressions_webrtc.cc
+++ b/tools_webrtc/sanitizers/lsan_suppressions_webrtc.cc
@@ -51,16 +51,16 @@
 
     // rtc_unittest
     // https://code.google.com/p/webrtc/issues/detail?id=3827 for details.
-    "leak:rtc::unstarted_task_test_DoNotDeleteTask2_Test::TestBody\n"
-    "leak:rtc::HttpServer::HandleConnection\n"
-    "leak:rtc::HttpServer::Connection::onHttpHeaderComplete\n"
-    "leak:rtc::HttpResponseData::set_success\n"
-    "leak:rtc::HttpData::changeHeader\n"
+    "leak:webrtc::unstarted_task_test_DoNotDeleteTask2_Test::TestBody\n"
+    "leak:webrtc::HttpServer::HandleConnection\n"
+    "leak:webrtc::HttpServer::Connection::onHttpHeaderComplete\n"
+    "leak:webrtc::HttpResponseData::set_success\n"
+    "leak:webrtc::HttpData::changeHeader\n"
     // https://code.google.com/p/webrtc/issues/detail?id=4149 for details.
     "leak:StartDNSLookup\n"
 
     // rtc_media_unittests
-    "leak:cricket::FakeNetworkInterface::SetOption\n"
+    "leak:webrtc::FakeNetworkInterface::SetOption\n"
     "leak:CodecTest_TestCodecOperators_Test::TestBody\n"
     "leak:VideoEngineTest*::ConstrainNewCodecBody\n"
     "leak:VideoMediaChannelTest*::AddRemoveRecvStreams\n"
@@ -73,7 +73,7 @@
 
     // peerconnection_unittests
     // https://code.google.com/p/webrtc/issues/detail?id=2528
-    "leak:cricket::FakeVideoMediaChannel::~FakeVideoMediaChannel\n"
+    "leak:webrtc::FakeVideoMediaChannel::~FakeVideoMediaChannel\n"
     "leak:DtmfSenderTest_InsertEmptyTonesToCancelPreviousTask_Test::TestBody\n"
     "leak:sigslot::_signal_base2*::~_signal_base2\n"
     "leak:testing::internal::CmpHelperEQ\n"