Use webrtc name specifier instead of rtc/cricket in sdk/android

WebRTC has unified all namespaces to webrtc, and the rtc:: and cricket::
name specifiers need to be replaced with webrtc::. This was generated using
a combination of clang AST rewriting tools and sed.

This CL was uploaded by git cl split.

Bug: webrtc:42232595
Change-Id: I02dd77af907bb0e1d6cbd68f41adac8dd29166ac
No-Iwyu: LSC
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/386722
Reviewed-by: Harald Alvestrand <hta@webrtc.org>
Commit-Queue: Evan Shrubsole <eshr@webrtc.org>
Auto-Submit: Evan Shrubsole <eshr@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#44389}
diff --git a/sdk/android/api/org/webrtc/PeerConnection.java b/sdk/android/api/org/webrtc/PeerConnection.java
index 1f56425..c70f8e4 100644
--- a/sdk/android/api/org/webrtc/PeerConnection.java
+++ b/sdk/android/api/org/webrtc/PeerConnection.java
@@ -419,7 +419,7 @@
     }
   }
 
-  /** Java version of rtc::KeyType */
+  /** Java version of webrtc::KeyType */
   public enum KeyType { RSA, ECDSA }
 
   /** Java version of PeerConnectionInterface.ContinualGatheringPolicy */
diff --git a/sdk/android/api/org/webrtc/PeerConnectionFactory.java b/sdk/android/api/org/webrtc/PeerConnectionFactory.java
index 7f360eb..2b2ea15 100644
--- a/sdk/android/api/org/webrtc/PeerConnectionFactory.java
+++ b/sdk/android/api/org/webrtc/PeerConnectionFactory.java
@@ -439,10 +439,10 @@
 
   /**
    * Create video source with given parameters. If alignTimestamps is false, the caller is
-   * responsible for aligning the frame timestamps to rtc::TimeNanos(). This can be used to achieve
+   * responsible for aligning the frame timestamps to webrtc::TimeNanos(). This can be used to achieve
    * higher accuracy if there is a big delay between frame creation and frames being delivered to
    * the returned video source. If alignTimestamps is true, timestamps will be aligned to
-   * rtc::TimeNanos() when they arrive to the returned video source.
+   * webrtc::TimeNanos() when they arrive to the returned video source.
    */
   public VideoSource createVideoSource(boolean isScreencast, boolean alignTimestamps) {
     checkPeerConnectionFactoryExists();
diff --git a/sdk/android/api/org/webrtc/SurfaceEglRenderer.java b/sdk/android/api/org/webrtc/SurfaceEglRenderer.java
index 348b09a..483a123 100644
--- a/sdk/android/api/org/webrtc/SurfaceEglRenderer.java
+++ b/sdk/android/api/org/webrtc/SurfaceEglRenderer.java
@@ -18,7 +18,7 @@
  * renderFrame() is asynchronous to avoid blocking the calling thread.
  * This class is thread safe and handles access from potentially three different threads:
  * Interaction from the main app in init, release and setMirror.
- * Interaction from C++ rtc::VideoSinkInterface in renderFrame.
+ * Interaction from C++ webrtc::VideoSinkInterface in renderFrame.
  * Interaction from SurfaceHolder lifecycle in surfaceCreated, surfaceChanged, and surfaceDestroyed.
  */
 public class SurfaceEglRenderer extends EglRenderer implements SurfaceHolder.Callback {
diff --git a/sdk/android/api/org/webrtc/SurfaceTextureHelper.java b/sdk/android/api/org/webrtc/SurfaceTextureHelper.java
index 3ea2273..8735ed2 100644
--- a/sdk/android/api/org/webrtc/SurfaceTextureHelper.java
+++ b/sdk/android/api/org/webrtc/SurfaceTextureHelper.java
@@ -51,8 +51,8 @@
    * Construct a new SurfaceTextureHelper sharing OpenGL resources with `sharedContext`. A dedicated
    * thread and handler is created for handling the SurfaceTexture. May return null if EGL fails to
    * initialize a pixel buffer surface and make it current. If alignTimestamps is true, the frame
-   * timestamps will be aligned to rtc::TimeNanos(). If frame timestamps are aligned to
-   * rtc::TimeNanos() there is no need for aligning timestamps again in
+   * timestamps will be aligned to webrtc::TimeNanos(). If frame timestamps are aligned to
+   * webrtc::TimeNanos() there is no need for aligning timestamps again in
    * PeerConnectionFactory.createVideoSource(). This makes the timestamps more accurate and
    * closer to actual creation time.
    */
diff --git a/sdk/android/api/org/webrtc/TimestampAligner.java b/sdk/android/api/org/webrtc/TimestampAligner.java
index d96c939..37c92b3 100644
--- a/sdk/android/api/org/webrtc/TimestampAligner.java
+++ b/sdk/android/api/org/webrtc/TimestampAligner.java
@@ -12,15 +12,15 @@
 
 /**
  * The TimestampAligner class helps translating camera timestamps into the same timescale as is
- * used by rtc::TimeNanos(). Some cameras have built in timestamping which is more accurate than
+ * used by webrtc::TimeNanos(). Some cameras have built in timestamping which is more accurate than
  * reading the system clock, but using a different epoch and unknown clock drift. Frame timestamps
- * in webrtc should use rtc::TimeNanos (system monotonic time), and this class provides a filter
- * which lets us use the rtc::TimeNanos timescale, and at the same time take advantage of higher
- * accuracy of the camera clock. This class is a wrapper on top of rtc::TimestampAligner.
+ * in webrtc should use webrtc::TimeNanos (system monotonic time), and this class provides a filter
+ * which lets us use the webrtc::TimeNanos timescale, and at the same time take advantage of higher
+ * accuracy of the camera clock. This class is a wrapper on top of webrtc::TimestampAligner.
  */
 public class TimestampAligner {
   /**
-   * Wrapper around rtc::TimeNanos(). This is normally same as System.nanoTime(), but call this
+   * Wrapper around webrtc::TimeNanos(). This is normally same as System.nanoTime(), but call this
    * function to be safe.
    */
   public static long getRtcTimeNanos() {
@@ -30,7 +30,7 @@
   private volatile long nativeTimestampAligner = nativeCreateTimestampAligner();
 
   /**
-   * Translates camera timestamps to the same timescale as is used by rtc::TimeNanos().
+   * Translates camera timestamps to the same timescale as is used by webrtc::TimeNanos().
    * `cameraTimeNs` is assumed to be accurate, but with an unknown epoch and clock drift. Returns
    * the translated timestamp.
    */
diff --git a/sdk/android/api/org/webrtc/VideoSink.java b/sdk/android/api/org/webrtc/VideoSink.java
index 5a0a6c7..e5cd4b4 100644
--- a/sdk/android/api/org/webrtc/VideoSink.java
+++ b/sdk/android/api/org/webrtc/VideoSink.java
@@ -11,7 +11,7 @@
 package org.webrtc;
 
 /**
- * Java version of rtc::VideoSinkInterface.
+ * Java version of webrtc::VideoSinkInterface.
  */
 public interface VideoSink {
   /**
diff --git a/sdk/android/instrumentationtests/video_frame_buffer_test.cc b/sdk/android/instrumentationtests/video_frame_buffer_test.cc
index ede3982..ed2425c 100644
--- a/sdk/android/instrumentationtests/video_frame_buffer_test.cc
+++ b/sdk/android/instrumentationtests/video_frame_buffer_test.cc
@@ -24,7 +24,7 @@
                          jobject video_frame_buffer) {
   const jni_zero::JavaParamRef<jobject> j_video_frame_buffer(
       jni, video_frame_buffer);
-  rtc::scoped_refptr<VideoFrameBuffer> buffer =
+  webrtc::scoped_refptr<VideoFrameBuffer> buffer =
       JavaToNativeFrameBuffer(jni, j_video_frame_buffer);
   return static_cast<jint>(buffer->type());
 }
@@ -35,11 +35,12 @@
                          jclass,
                          jobject i420_buffer) {
   const jni_zero::JavaParamRef<jobject> j_i420_buffer(jni, i420_buffer);
-  rtc::scoped_refptr<VideoFrameBuffer> buffer =
+  webrtc::scoped_refptr<VideoFrameBuffer> buffer =
       JavaToNativeFrameBuffer(jni, j_i420_buffer);
   const I420BufferInterface* inputBuffer = buffer->GetI420();
   RTC_DCHECK(inputBuffer != nullptr);
-  rtc::scoped_refptr<I420Buffer> outputBuffer = I420Buffer::Copy(*inputBuffer);
+  webrtc::scoped_refptr<I420Buffer> outputBuffer =
+      I420Buffer::Copy(*inputBuffer);
   return WrapI420Buffer(jni, outputBuffer).Release();
 }
 
diff --git a/sdk/android/native_api/audio_device_module/audio_device_android.cc b/sdk/android/native_api/audio_device_module/audio_device_android.cc
index 052781c..2ee8b74 100644
--- a/sdk/android/native_api/audio_device_module/audio_device_android.cc
+++ b/sdk/android/native_api/audio_device_module/audio_device_android.cc
@@ -56,7 +56,7 @@
 }  // namespace
 
 #if defined(WEBRTC_AUDIO_DEVICE_INCLUDE_ANDROID_AAUDIO)
-rtc::scoped_refptr<AudioDeviceModule> CreateAAudioAudioDeviceModule(
+webrtc::scoped_refptr<AudioDeviceModule> CreateAAudioAudioDeviceModule(
     JNIEnv* env,
     jobject application_context) {
   RTC_DLOG(LS_INFO) << __FUNCTION__;
@@ -74,7 +74,7 @@
       std::make_unique<jni::AAudioPlayer>(output_parameters));
 }
 
-rtc::scoped_refptr<AudioDeviceModule>
+webrtc::scoped_refptr<AudioDeviceModule>
 CreateJavaInputAndAAudioOutputAudioDeviceModule(JNIEnv* env,
                                                 jobject application_context) {
   RTC_DLOG(LS_INFO) << __FUNCTION__;
@@ -100,7 +100,7 @@
 }
 #endif
 
-rtc::scoped_refptr<AudioDeviceModule> CreateJavaAudioDeviceModule(
+webrtc::scoped_refptr<AudioDeviceModule> CreateJavaAudioDeviceModule(
     JNIEnv* env,
     jobject application_context) {
   RTC_DLOG(LS_INFO) << __FUNCTION__;
@@ -128,7 +128,7 @@
       std::move(audio_output));
 }
 
-rtc::scoped_refptr<AudioDeviceModule> CreateOpenSLESAudioDeviceModule(
+webrtc::scoped_refptr<AudioDeviceModule> CreateOpenSLESAudioDeviceModule(
     JNIEnv* env,
     jobject application_context) {
   RTC_DLOG(LS_INFO) << __FUNCTION__;
@@ -138,7 +138,7 @@
   GetDefaultAudioParameters(env, application_context, &input_parameters,
                             &output_parameters);
   // Create ADM from OpenSLESRecorder and OpenSLESPlayer.
-  rtc::scoped_refptr<jni::OpenSLEngineManager> engine_manager(
+  webrtc::scoped_refptr<jni::OpenSLEngineManager> engine_manager(
       new jni::OpenSLEngineManager());
   auto audio_input =
       std::make_unique<jni::OpenSLESRecorder>(input_parameters, engine_manager);
@@ -151,7 +151,7 @@
       std::move(audio_output));
 }
 
-rtc::scoped_refptr<AudioDeviceModule>
+webrtc::scoped_refptr<AudioDeviceModule>
 CreateJavaInputAndOpenSLESOutputAudioDeviceModule(JNIEnv* env,
                                                   jobject application_context) {
   RTC_DLOG(LS_INFO) << __FUNCTION__;
@@ -169,7 +169,7 @@
       jni::AudioRecordJni::CreateJavaWebRtcAudioRecord(env, j_context,
                                                        j_audio_manager));
 
-  rtc::scoped_refptr<jni::OpenSLEngineManager> engine_manager(
+  webrtc::scoped_refptr<jni::OpenSLEngineManager> engine_manager(
       new jni::OpenSLEngineManager());
   auto audio_output = std::make_unique<jni::OpenSLESPlayer>(
       output_parameters, std::move(engine_manager));
@@ -180,7 +180,7 @@
       std::move(audio_output));
 }
 
-rtc::scoped_refptr<AudioDeviceModule> CreateAndroidAudioDeviceModule(
+webrtc::scoped_refptr<AudioDeviceModule> CreateAndroidAudioDeviceModule(
     AudioDeviceModule::AudioLayer audio_layer) {
   auto env = AttachCurrentThreadIfNeeded();
   auto j_context = webrtc::GetAppContext(env);
diff --git a/sdk/android/native_api/audio_device_module/audio_device_android.h b/sdk/android/native_api/audio_device_module/audio_device_android.h
index 4b7d311..4bfd7e8 100644
--- a/sdk/android/native_api/audio_device_module/audio_device_android.h
+++ b/sdk/android/native_api/audio_device_module/audio_device_android.h
@@ -18,28 +18,28 @@
 namespace webrtc {
 
 #if defined(WEBRTC_AUDIO_DEVICE_INCLUDE_ANDROID_AAUDIO)
-rtc::scoped_refptr<AudioDeviceModule> CreateAAudioAudioDeviceModule(
+webrtc::scoped_refptr<AudioDeviceModule> CreateAAudioAudioDeviceModule(
     JNIEnv* env,
     jobject application_context);
 #endif
 
-rtc::scoped_refptr<AudioDeviceModule> CreateJavaAudioDeviceModule(
+webrtc::scoped_refptr<AudioDeviceModule> CreateJavaAudioDeviceModule(
     JNIEnv* env,
     jobject application_context);
 
-rtc::scoped_refptr<AudioDeviceModule> CreateOpenSLESAudioDeviceModule(
+webrtc::scoped_refptr<AudioDeviceModule> CreateOpenSLESAudioDeviceModule(
     JNIEnv* env,
     jobject application_context);
 
-rtc::scoped_refptr<AudioDeviceModule>
+webrtc::scoped_refptr<AudioDeviceModule>
 CreateJavaInputAndOpenSLESOutputAudioDeviceModule(JNIEnv* env,
                                                   jobject application_context);
 
-rtc::scoped_refptr<AudioDeviceModule>
+webrtc::scoped_refptr<AudioDeviceModule>
 CreateJavaInputAndAAudioOutputAudioDeviceModule(JNIEnv* env,
                                                 jobject application_context);
 
-rtc::scoped_refptr<AudioDeviceModule> CreateAndroidAudioDeviceModule(
+webrtc::scoped_refptr<AudioDeviceModule> CreateAndroidAudioDeviceModule(
     AudioDeviceModule::AudioLayer audio_layer);
 
 }  // namespace webrtc
diff --git a/sdk/android/native_api/jni/java_types.cc b/sdk/android/native_api/jni/java_types.cc
index 2f2f183..3f7558c 100644
--- a/sdk/android/native_api/jni/java_types.cc
+++ b/sdk/android/native_api/jni/java_types.cc
@@ -231,7 +231,7 @@
 
 ScopedJavaLocalRef<jbyteArray> NativeToJavaByteArray(
     JNIEnv* env,
-    rtc::ArrayView<int8_t> container) {
+    ArrayView<int8_t> container) {
   jni_zero::ScopedJavaLocalRef<jbyteArray> jarray(
       env, env->NewByteArray(container.size()));
   int8_t* array_ptr =
@@ -243,7 +243,7 @@
 
 ScopedJavaLocalRef<jintArray> NativeToJavaIntArray(
     JNIEnv* env,
-    rtc::ArrayView<int32_t> container) {
+    ArrayView<int32_t> container) {
   jni_zero::ScopedJavaLocalRef<jintArray> jarray(
       env, env->NewIntArray(container.size()));
   int32_t* array_ptr =
diff --git a/sdk/android/native_api/jni/java_types.h b/sdk/android/native_api/jni/java_types.h
index 13dc6ae..7e39160 100644
--- a/sdk/android/native_api/jni/java_types.h
+++ b/sdk/android/native_api/jni/java_types.h
@@ -239,10 +239,10 @@
 
 ScopedJavaLocalRef<jbyteArray> NativeToJavaByteArray(
     JNIEnv* env,
-    rtc::ArrayView<int8_t> container);
+    ArrayView<int8_t> container);
 ScopedJavaLocalRef<jintArray> NativeToJavaIntArray(
     JNIEnv* env,
-    rtc::ArrayView<int32_t> container);
+    ArrayView<int32_t> container);
 
 std::vector<int8_t> JavaToNativeByteArray(
     JNIEnv* env,
diff --git a/sdk/android/native_api/peerconnection/peer_connection_factory.cc b/sdk/android/native_api/peerconnection/peer_connection_factory.cc
index 9ed9896..ca87f80 100644
--- a/sdk/android/native_api/peerconnection/peer_connection_factory.cc
+++ b/sdk/android/native_api/peerconnection/peer_connection_factory.cc
@@ -20,7 +20,7 @@
 
 jobject NativeToJavaPeerConnectionFactory(
     JNIEnv* jni,
-    rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> pcf,
+    scoped_refptr<webrtc::PeerConnectionFactoryInterface> pcf,
     std::unique_ptr<SocketFactory> socket_factory,
     std::unique_ptr<Thread> network_thread,
     std::unique_ptr<Thread> worker_thread,
diff --git a/sdk/android/native_api/peerconnection/peer_connection_factory.h b/sdk/android/native_api/peerconnection/peer_connection_factory.h
index 6ae468b..e249f0f 100644
--- a/sdk/android/native_api/peerconnection/peer_connection_factory.h
+++ b/sdk/android/native_api/peerconnection/peer_connection_factory.h
@@ -23,7 +23,7 @@
 // Creates java PeerConnectionFactory with specified `pcf`.
 jobject NativeToJavaPeerConnectionFactory(
     JNIEnv* jni,
-    rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> pcf,
+    scoped_refptr<webrtc::PeerConnectionFactoryInterface> pcf,
     std::unique_ptr<SocketFactory> socket_factory,
     std::unique_ptr<Thread> network_thread,
     std::unique_ptr<Thread> worker_thread,
diff --git a/sdk/android/native_api/stacktrace/stacktrace.cc b/sdk/android/native_api/stacktrace/stacktrace.cc
index 175391a..4f1a025 100644
--- a/sdk/android/native_api/stacktrace/stacktrace.cc
+++ b/sdk/android/native_api/stacktrace/stacktrace.cc
@@ -46,7 +46,7 @@
 // Note: This class is only meant for use within this file, and for the
 // simplified use case of a single Wait() and a single Signal(), followed by
 // discarding the object (never reused).
-// This is a replacement of rtc::Event that is async-safe and doesn't use
+// This is a replacement of webrtc::Event that is async-safe and doesn't use
 // pthread api. This is necessary since signal handlers cannot allocate memory
 // or use pthread api. This class is ported from Chromium.
 class AsyncSafeWaitableEvent {
diff --git a/sdk/android/native_api/video/video_source.cc b/sdk/android/native_api/video/video_source.cc
index b900ad1..ba2d486 100644
--- a/sdk/android/native_api/video/video_source.cc
+++ b/sdk/android/native_api/video/video_source.cc
@@ -28,11 +28,10 @@
                            bool is_screencast,
                            bool align_timestamps)
       : android_video_track_source_(
-            rtc::make_ref_counted<jni::AndroidVideoTrackSource>(
-                signaling_thread,
-                env,
-                is_screencast,
-                align_timestamps)),
+            make_ref_counted<jni::AndroidVideoTrackSource>(signaling_thread,
+                                                           env,
+                                                           is_screencast,
+                                                           align_timestamps)),
         native_capturer_observer_(jni::CreateJavaNativeCapturerObserver(
             env,
             android_video_track_source_)) {}
@@ -93,22 +92,22 @@
   bool SupportsEncodedOutput() const override { return false; }
   void GenerateKeyFrame() override {}
   void AddEncodedSink(
-      rtc::VideoSinkInterface<webrtc::RecordableEncodedFrame>* sink) override {}
+      VideoSinkInterface<webrtc::RecordableEncodedFrame>* sink) override {}
   void RemoveEncodedSink(
-      rtc::VideoSinkInterface<webrtc::RecordableEncodedFrame>* sink) override {}
+      VideoSinkInterface<webrtc::RecordableEncodedFrame>* sink) override {}
 
-  rtc::scoped_refptr<jni::AndroidVideoTrackSource> android_video_track_source_;
+  scoped_refptr<jni::AndroidVideoTrackSource> android_video_track_source_;
   ScopedJavaGlobalRef<jobject> native_capturer_observer_;
 };
 
 }  // namespace
 
-rtc::scoped_refptr<JavaVideoTrackSourceInterface> CreateJavaVideoSource(
+scoped_refptr<JavaVideoTrackSourceInterface> CreateJavaVideoSource(
     JNIEnv* jni,
     Thread* signaling_thread,
     bool is_screencast,
     bool align_timestamps) {
-  return rtc::make_ref_counted<JavaVideoTrackSourceImpl>(
+  return make_ref_counted<JavaVideoTrackSourceImpl>(
       jni, signaling_thread, is_screencast, align_timestamps);
 }
 
diff --git a/sdk/android/native_api/video/video_source.h b/sdk/android/native_api/video/video_source.h
index 91b4b0d..abf22ae 100644
--- a/sdk/android/native_api/video/video_source.h
+++ b/sdk/android/native_api/video/video_source.h
@@ -30,7 +30,7 @@
 };
 
 // Creates an instance of JavaVideoTrackSourceInterface,
-rtc::scoped_refptr<JavaVideoTrackSourceInterface> CreateJavaVideoSource(
+scoped_refptr<JavaVideoTrackSourceInterface> CreateJavaVideoSource(
     JNIEnv* env,
     Thread* signaling_thread,
     bool is_screencast,
diff --git a/sdk/android/native_api/video/wrapper.h b/sdk/android/native_api/video/wrapper.h
index c10897b..bb1dd8b 100644
--- a/sdk/android/native_api/video/wrapper.h
+++ b/sdk/android/native_api/video/wrapper.h
@@ -21,7 +21,7 @@
 
 namespace webrtc {
 
-// Creates an instance of rtc::VideoSinkInterface<VideoFrame> from Java
+// Creates an instance of webrtc::VideoSinkInterface<VideoFrame> from Java
 // VideoSink.
 std::unique_ptr<VideoSinkInterface<VideoFrame>> JavaToNativeVideoSink(
     JNIEnv* jni,
diff --git a/sdk/android/native_unittests/android_network_monitor_unittest.cc b/sdk/android/native_unittests/android_network_monitor_unittest.cc
index 76a7253..defbf9e 100644
--- a/sdk/android/native_unittests/android_network_monitor_unittest.cc
+++ b/sdk/android/native_unittests/android_network_monitor_unittest.cc
@@ -32,7 +32,7 @@
 jni::NetworkInformation CreateNetworkInformation(
     const std::string& interface_name,
     jni::NetworkHandle network_handle,
-    const rtc::IPAddress& ip_address) {
+    const webrtc::IPAddress& ip_address) {
   jni::NetworkInformation net_info;
   net_info.interface_name = interface_name;
   net_info.handle = network_handle;
@@ -41,9 +41,9 @@
   return net_info;
 }
 
-rtc::IPAddress GetIpAddressFromIpv6String(const std::string& str) {
-  rtc::IPAddress ipv6;
-  RTC_CHECK(rtc::IPFromString(str, &ipv6));
+webrtc::IPAddress GetIpAddressFromIpv6String(const std::string& str) {
+  webrtc::IPAddress ipv6;
+  RTC_CHECK(webrtc::IPFromString(str, &ipv6));
   return ipv6;
 }
 
@@ -72,13 +72,13 @@
 
  protected:
   test::ScopedKeyValueConfig field_trials_;
-  rtc::AutoThread main_thread_;
+  webrtc::AutoThread main_thread_;
   std::unique_ptr<jni::AndroidNetworkMonitor> network_monitor_;
 };
 
 TEST_F(AndroidNetworkMonitorTest, TestFindNetworkHandleUsingIpv4Address) {
   jni::NetworkHandle ipv4_handle = 100;
-  rtc::IPAddress ipv4_address(kTestIpv4Address);
+  webrtc::IPAddress ipv4_address(kTestIpv4Address);
   jni::NetworkInformation net_info =
       CreateNetworkInformation(kTestIfName1, ipv4_handle, ipv4_address);
   std::vector<jni::NetworkInformation> net_infos(1, net_info);
@@ -93,8 +93,10 @@
 
 TEST_F(AndroidNetworkMonitorTest, TestFindNetworkHandleUsingFullIpv6Address) {
   jni::NetworkHandle ipv6_handle = 200;
-  rtc::IPAddress ipv6_address1 = GetIpAddressFromIpv6String(kTestIpv6Address1);
-  rtc::IPAddress ipv6_address2 = GetIpAddressFromIpv6String(kTestIpv6Address2);
+  webrtc::IPAddress ipv6_address1 =
+      GetIpAddressFromIpv6String(kTestIpv6Address1);
+  webrtc::IPAddress ipv6_address2 =
+      GetIpAddressFromIpv6String(kTestIpv6Address2);
   // Set up an IPv6 network.
   jni::NetworkInformation net_info =
       CreateNetworkInformation(kTestIfName1, ipv6_handle, ipv6_address1);
@@ -119,8 +121,10 @@
   // Start() updates the states introduced by the field trial.
   network_monitor_->Start();
   jni::NetworkHandle ipv6_handle = 200;
-  rtc::IPAddress ipv6_address1 = GetIpAddressFromIpv6String(kTestIpv6Address1);
-  rtc::IPAddress ipv6_address2 = GetIpAddressFromIpv6String(kTestIpv6Address2);
+  webrtc::IPAddress ipv6_address1 =
+      GetIpAddressFromIpv6String(kTestIpv6Address1);
+  webrtc::IPAddress ipv6_address2 =
+      GetIpAddressFromIpv6String(kTestIpv6Address2);
   // Set up an IPv6 network.
   jni::NetworkInformation net_info =
       CreateNetworkInformation(kTestIfName1, ipv6_handle, ipv6_address1);
@@ -142,7 +146,8 @@
   // Start() updates the states introduced by the field trial.
   network_monitor_->Start();
   jni::NetworkHandle ipv6_handle = 200;
-  rtc::IPAddress ipv6_address1 = GetIpAddressFromIpv6String(kTestIpv6Address1);
+  webrtc::IPAddress ipv6_address1 =
+      GetIpAddressFromIpv6String(kTestIpv6Address1);
 
   // Set up an IPv6 network.
   jni::NetworkInformation net_info =
@@ -150,7 +155,7 @@
   std::vector<jni::NetworkInformation> net_infos(1, net_info);
   network_monitor_->OnNetworkConnected_n(net_info);
 
-  rtc::IPAddress ipv4_address(kTestIpv4Address);
+  webrtc::IPAddress ipv4_address(kTestIpv4Address);
 
   // Search using ip address only...
   auto network_handle1 =
@@ -169,14 +174,14 @@
   ScopedKeyValueConfig field_trials(field_trials_,
                                     "WebRTC-BindUsingInterfaceName/Enabled/");
   jni::NetworkHandle ipv4_handle = 100;
-  rtc::IPAddress ipv4_address(kTestIpv4Address);
+  webrtc::IPAddress ipv4_address(kTestIpv4Address);
   jni::NetworkInformation net_info =
       CreateNetworkInformation(kTestIfName1, ipv4_handle, ipv4_address);
   net_info.type = jni::NETWORK_VPN;
   net_info.underlying_type_for_vpn = jni::NETWORK_WIFI;
   network_monitor_->OnNetworkConnected_n(net_info);
 
-  EXPECT_EQ(rtc::ADAPTER_TYPE_WIFI,
+  EXPECT_EQ(webrtc::ADAPTER_TYPE_WIFI,
             network_monitor_->GetInterfaceInfo(kTestIfName1V4)
                 .underlying_type_for_vpn);
 }
@@ -186,7 +191,7 @@
   network_monitor_->Start();
 
   jni::NetworkHandle ipv4_handle = 100;
-  rtc::IPAddress ipv4_address(kTestIpv4Address);
+  webrtc::IPAddress ipv4_address(kTestIpv4Address);
   jni::NetworkInformation net_info =
       CreateNetworkInformation(kTestIfName1, ipv4_handle, ipv4_address);
   net_info.type = jni::NETWORK_WIFI;
@@ -198,7 +203,7 @@
           ->FindNetworkHandleFromAddressOrName(ipv4_address, kTestIfName1V4)
           .has_value());
   EXPECT_EQ(network_monitor_->GetInterfaceInfo(kTestIfName1V4).adapter_type,
-            rtc::ADAPTER_TYPE_WIFI);
+            webrtc::ADAPTER_TYPE_WIFI);
 
   // Check that values are reset on disconnect().
   Disconnect(ipv4_handle);
@@ -208,7 +213,7 @@
           ->FindNetworkHandleFromAddressOrName(ipv4_address, kTestIfName1V4)
           .has_value());
   EXPECT_EQ(network_monitor_->GetInterfaceInfo(kTestIfName1V4).adapter_type,
-            rtc::ADAPTER_TYPE_UNKNOWN);
+            webrtc::ADAPTER_TYPE_UNKNOWN);
 }
 
 // Verify that Stop() resets all caches.
@@ -216,7 +221,7 @@
   network_monitor_->Start();
 
   jni::NetworkHandle ipv4_handle = 100;
-  rtc::IPAddress ipv4_address(kTestIpv4Address);
+  webrtc::IPAddress ipv4_address(kTestIpv4Address);
   jni::NetworkInformation net_info =
       CreateNetworkInformation(kTestIfName1, ipv4_handle, ipv4_address);
   net_info.type = jni::NETWORK_WIFI;
@@ -228,7 +233,7 @@
           ->FindNetworkHandleFromAddressOrName(ipv4_address, kTestIfName1V4)
           .has_value());
   EXPECT_EQ(network_monitor_->GetInterfaceInfo(kTestIfName1V4).adapter_type,
-            rtc::ADAPTER_TYPE_WIFI);
+            webrtc::ADAPTER_TYPE_WIFI);
 
   // Check that values are reset on Stop().
   network_monitor_->Stop();
@@ -238,20 +243,21 @@
           ->FindNetworkHandleFromAddressOrName(ipv4_address, kTestIfName1V4)
           .has_value());
   EXPECT_EQ(network_monitor_->GetInterfaceInfo(kTestIfName1V4).adapter_type,
-            rtc::ADAPTER_TYPE_UNKNOWN);
+            webrtc::ADAPTER_TYPE_UNKNOWN);
 }
 
 TEST_F(AndroidNetworkMonitorTest, DuplicateIfname) {
   network_monitor_->Start();
 
   jni::NetworkHandle ipv4_handle = 100;
-  rtc::IPAddress ipv4_address(kTestIpv4Address);
+  webrtc::IPAddress ipv4_address(kTestIpv4Address);
   jni::NetworkInformation net_info1 =
       CreateNetworkInformation(kTestIfName1, ipv4_handle, ipv4_address);
   net_info1.type = jni::NETWORK_WIFI;
 
   jni::NetworkHandle ipv6_handle = 101;
-  rtc::IPAddress ipv6_address = GetIpAddressFromIpv6String(kTestIpv6Address1);
+  webrtc::IPAddress ipv6_address =
+      GetIpAddressFromIpv6String(kTestIpv6Address1);
   jni::NetworkInformation net_info2 =
       CreateNetworkInformation(kTestIfName1, ipv6_handle, ipv6_address);
   net_info2.type = jni::NETWORK_UNKNOWN_CELLULAR;
@@ -262,7 +268,7 @@
   // The last added.
   EXPECT_TRUE(network_monitor_->GetInterfaceInfo(kTestIfName1).available);
   EXPECT_EQ(network_monitor_->GetInterfaceInfo(kTestIfName1V4).adapter_type,
-            rtc::ADAPTER_TYPE_CELLULAR);
+            webrtc::ADAPTER_TYPE_CELLULAR);
 
   // But both IP addresses are still searchable.
   EXPECT_EQ(
@@ -277,13 +283,14 @@
   network_monitor_->Start();
 
   jni::NetworkHandle ipv4_handle = 100;
-  rtc::IPAddress ipv4_address(kTestIpv4Address);
+  webrtc::IPAddress ipv4_address(kTestIpv4Address);
   jni::NetworkInformation net_info1 =
       CreateNetworkInformation(kTestIfName1, ipv4_handle, ipv4_address);
   net_info1.type = jni::NETWORK_WIFI;
 
   jni::NetworkHandle ipv6_handle = 101;
-  rtc::IPAddress ipv6_address = GetIpAddressFromIpv6String(kTestIpv6Address1);
+  webrtc::IPAddress ipv6_address =
+      GetIpAddressFromIpv6String(kTestIpv6Address1);
   jni::NetworkInformation net_info2 =
       CreateNetworkInformation(kTestIfName1, ipv6_handle, ipv6_address);
   net_info2.type = jni::NETWORK_UNKNOWN_CELLULAR;
@@ -294,27 +301,28 @@
   // The last added.
   EXPECT_TRUE(network_monitor_->GetInterfaceInfo(kTestIfName1).available);
   EXPECT_EQ(network_monitor_->GetInterfaceInfo(kTestIfName1V4).adapter_type,
-            rtc::ADAPTER_TYPE_CELLULAR);
+            webrtc::ADAPTER_TYPE_CELLULAR);
 
   Disconnect(ipv6_handle);
 
   // We should now find ipv4_handle.
   EXPECT_TRUE(network_monitor_->GetInterfaceInfo(kTestIfName1).available);
   EXPECT_EQ(network_monitor_->GetInterfaceInfo(kTestIfName1V4).adapter_type,
-            rtc::ADAPTER_TYPE_WIFI);
+            webrtc::ADAPTER_TYPE_WIFI);
 }
 
 TEST_F(AndroidNetworkMonitorTest, DuplicateIfnameDisconnectNonOwner) {
   network_monitor_->Start();
 
   jni::NetworkHandle ipv4_handle = 100;
-  rtc::IPAddress ipv4_address(kTestIpv4Address);
+  webrtc::IPAddress ipv4_address(kTestIpv4Address);
   jni::NetworkInformation net_info1 =
       CreateNetworkInformation(kTestIfName1, ipv4_handle, ipv4_address);
   net_info1.type = jni::NETWORK_WIFI;
 
   jni::NetworkHandle ipv6_handle = 101;
-  rtc::IPAddress ipv6_address = GetIpAddressFromIpv6String(kTestIpv6Address1);
+  webrtc::IPAddress ipv6_address =
+      GetIpAddressFromIpv6String(kTestIpv6Address1);
   jni::NetworkInformation net_info2 =
       CreateNetworkInformation(kTestIfName1, ipv6_handle, ipv6_address);
   net_info2.type = jni::NETWORK_UNKNOWN_CELLULAR;
@@ -325,26 +333,27 @@
   // The last added.
   EXPECT_TRUE(network_monitor_->GetInterfaceInfo(kTestIfName1).available);
   EXPECT_EQ(network_monitor_->GetInterfaceInfo(kTestIfName1).adapter_type,
-            rtc::ADAPTER_TYPE_CELLULAR);
+            webrtc::ADAPTER_TYPE_CELLULAR);
 
   Disconnect(ipv4_handle);
 
   // We should still find ipv6 network.
   EXPECT_TRUE(network_monitor_->GetInterfaceInfo(kTestIfName1).available);
   EXPECT_EQ(network_monitor_->GetInterfaceInfo(kTestIfName1V4).adapter_type,
-            rtc::ADAPTER_TYPE_CELLULAR);
+            webrtc::ADAPTER_TYPE_CELLULAR);
 }
 
 TEST_F(AndroidNetworkMonitorTest, ReconnectWithoutDisconnect) {
   network_monitor_->Start();
 
   jni::NetworkHandle ipv4_handle = 100;
-  rtc::IPAddress ipv4_address(kTestIpv4Address);
+  webrtc::IPAddress ipv4_address(kTestIpv4Address);
   jni::NetworkInformation net_info1 =
       CreateNetworkInformation(kTestIfName1, ipv4_handle, ipv4_address);
   net_info1.type = jni::NETWORK_WIFI;
 
-  rtc::IPAddress ipv6_address = GetIpAddressFromIpv6String(kTestIpv6Address1);
+  webrtc::IPAddress ipv6_address =
+      GetIpAddressFromIpv6String(kTestIpv6Address1);
   jni::NetworkInformation net_info2 =
       CreateNetworkInformation(kTestIfName2, ipv4_handle, ipv6_address);
   net_info2.type = jni::NETWORK_UNKNOWN_CELLULAR;
@@ -355,21 +364,21 @@
   // Only last one should still be there!
   EXPECT_TRUE(network_monitor_->GetInterfaceInfo(kTestIfName2).available);
   EXPECT_EQ(network_monitor_->GetInterfaceInfo(kTestIfName2).adapter_type,
-            rtc::ADAPTER_TYPE_CELLULAR);
+            webrtc::ADAPTER_TYPE_CELLULAR);
 
   EXPECT_FALSE(network_monitor_->GetInterfaceInfo(kTestIfName1).available);
   EXPECT_EQ(network_monitor_->GetInterfaceInfo(kTestIfName1).adapter_type,
-            rtc::ADAPTER_TYPE_UNKNOWN);
+            webrtc::ADAPTER_TYPE_UNKNOWN);
 
   Disconnect(ipv4_handle);
 
   // Should be empty!
   EXPECT_FALSE(network_monitor_->GetInterfaceInfo(kTestIfName2).available);
   EXPECT_EQ(network_monitor_->GetInterfaceInfo(kTestIfName2).adapter_type,
-            rtc::ADAPTER_TYPE_UNKNOWN);
+            webrtc::ADAPTER_TYPE_UNKNOWN);
   EXPECT_FALSE(network_monitor_->GetInterfaceInfo(kTestIfName1).available);
   EXPECT_EQ(network_monitor_->GetInterfaceInfo(kTestIfName1).adapter_type,
-            rtc::ADAPTER_TYPE_UNKNOWN);
+            webrtc::ADAPTER_TYPE_UNKNOWN);
 }
 
 }  // namespace test
diff --git a/sdk/android/native_unittests/audio_device/audio_device_unittest.cc b/sdk/android/native_unittests/audio_device/audio_device_unittest.cc
index f75c596..878f8ae 100644
--- a/sdk/android/native_unittests/audio_device/audio_device_unittest.cc
+++ b/sdk/android/native_unittests/audio_device/audio_device_unittest.cc
@@ -258,7 +258,7 @@
     memset(destination, 0, bytes_per_buffer_);
     if (play_count_ % (kNumCallbacksPerSecond / kImpulseFrequencyInHz) == 0) {
       if (pulse_time_ == 0) {
-        pulse_time_ = rtc::TimeMillis();
+        pulse_time_ = webrtc::TimeMillis();
       }
       PRINT(".");
       const int16_t impulse = std::numeric_limits<int16_t>::max();
@@ -288,7 +288,7 @@
         std::distance(vec.begin(), std::find(vec.begin(), vec.end(), max));
     if (max > kImpulseThreshold) {
       PRINTD("(%d,%d)", max, index_of_max);
-      int64_t now_time = rtc::TimeMillis();
+      int64_t now_time = webrtc::TimeMillis();
       int extra_delay = IndexToMilliseconds(static_cast<double>(index_of_max));
       PRINTD("[%d]", static_cast<int>(now_time - pulse_time_));
       PRINTD("[%d]", extra_delay);
@@ -365,7 +365,7 @@
 
   // Set default actions of the mock object. We are delegating to fake
   // implementations (of AudioStreamInterface) here.
-  void HandleCallbacks(rtc::Event* test_is_done,
+  void HandleCallbacks(webrtc::Event* test_is_done,
                        AudioStreamInterface* audio_stream,
                        int num_callbacks) {
     test_is_done_ = test_is_done;
@@ -448,7 +448,7 @@
   bool rec_mode() const { return type_ & kRecording; }
 
  private:
-  rtc::Event* test_is_done_;
+  webrtc::Event* test_is_done_;
   size_t num_callbacks_;
   int type_;
   size_t play_count_;
@@ -508,7 +508,7 @@
     return input_parameters_.frames_per_10ms_buffer();
   }
 
-  rtc::scoped_refptr<AudioDeviceModule> audio_device() const {
+  webrtc::scoped_refptr<AudioDeviceModule> audio_device() const {
     return audio_device_;
   }
 
@@ -541,7 +541,7 @@
 
   int TestDelayOnAudioLayer(
       const AudioDeviceModule::AudioLayer& layer_to_test) {
-    rtc::scoped_refptr<AudioDeviceModule> audio_device;
+    webrtc::scoped_refptr<AudioDeviceModule> audio_device;
     audio_device = CreateAndroidAudioDeviceModule(layer_to_test);
     EXPECT_NE(audio_device.get(), nullptr);
     uint16_t playout_delay;
@@ -551,7 +551,7 @@
 
   AudioDeviceModule::AudioLayer TestActiveAudioLayer(
       const AudioDeviceModule::AudioLayer& layer_to_test) {
-    rtc::scoped_refptr<AudioDeviceModule> audio_device;
+    webrtc::scoped_refptr<AudioDeviceModule> audio_device;
     audio_device = CreateAndroidAudioDeviceModule(layer_to_test);
     EXPECT_NE(audio_device.get(), nullptr);
     AudioDeviceModule::AudioLayer active;
@@ -668,8 +668,8 @@
 
   JNIEnv* jni_;
   ScopedJavaLocalRef<jobject> context_;
-  rtc::Event test_is_done_;
-  rtc::scoped_refptr<AudioDeviceModule> audio_device_;
+  webrtc::Event test_is_done_;
+  webrtc::scoped_refptr<AudioDeviceModule> audio_device_;
   ScopedJavaLocalRef<jobject> audio_manager_;
   AudioParameters output_parameters_;
   AudioParameters input_parameters_;
@@ -1160,7 +1160,7 @@
   ScopedJavaLocalRef<jobject> context = GetAppContext(jni);
 
   // Create and start the first ADM.
-  rtc::scoped_refptr<AudioDeviceModule> adm_1 =
+  webrtc::scoped_refptr<AudioDeviceModule> adm_1 =
       CreateJavaAudioDeviceModule(jni, context.obj());
   EXPECT_EQ(0, adm_1->Init());
   EXPECT_EQ(0, adm_1->InitRecording());
@@ -1168,7 +1168,7 @@
 
   // Create and start a second ADM. Expect this to fail due to the microphone
   // already being in use.
-  rtc::scoped_refptr<AudioDeviceModule> adm_2 =
+  webrtc::scoped_refptr<AudioDeviceModule> adm_2 =
       CreateJavaAudioDeviceModule(jni, context.obj());
   int32_t err = adm_2->Init();
   err |= adm_2->InitRecording();
diff --git a/sdk/android/native_unittests/codecs/wrapper_unittest.cc b/sdk/android/native_unittests/codecs/wrapper_unittest.cc
index c9fced5..ce9f94f 100644
--- a/sdk/android/native_unittests/codecs/wrapper_unittest.cc
+++ b/sdk/android/native_unittests/codecs/wrapper_unittest.cc
@@ -31,11 +31,10 @@
   const SdpVideoFormat video_format =
       JavaToNativeVideoCodecInfo(env, j_video_codec_info.obj());
 
-  EXPECT_EQ(cricket::kH264CodecName, video_format.name);
-  const auto it =
-      video_format.parameters.find(cricket::kH264FmtpProfileLevelId);
+  EXPECT_EQ(webrtc::kH264CodecName, video_format.name);
+  const auto it = video_format.parameters.find(webrtc::kH264FmtpProfileLevelId);
   ASSERT_NE(it, video_format.parameters.end());
-  EXPECT_EQ(cricket::kH264ProfileLevelConstrainedBaseline, it->second);
+  EXPECT_EQ(webrtc::kH264ProfileLevelConstrainedBaseline, it->second);
 }
 
 TEST(JavaCodecsWrapperTest, JavaToNativeResolutionBitrateLimits) {
diff --git a/sdk/android/native_unittests/peerconnection/peer_connection_factory_unittest.cc b/sdk/android/native_unittests/peerconnection/peer_connection_factory_unittest.cc
index 1ed52be..110fbb1 100644
--- a/sdk/android/native_unittests/peerconnection/peer_connection_factory_unittest.cc
+++ b/sdk/android/native_unittests/peerconnection/peer_connection_factory_unittest.cc
@@ -33,17 +33,17 @@
 namespace {
 
 // Create native peer connection factory, that will be wrapped by java one
-rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> CreateTestPCF(
+webrtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> CreateTestPCF(
     JNIEnv* jni,
-    rtc::Thread* network_thread,
-    rtc::Thread* worker_thread,
-    rtc::Thread* signaling_thread) {
+    webrtc::Thread* network_thread,
+    webrtc::Thread* worker_thread,
+    webrtc::Thread* signaling_thread) {
   // talk/ assumes pretty widely that the current Thread is ThreadManager'd, but
   // ThreadManager only WrapCurrentThread()s the thread where it is first
   // created.  Since the semantics around when auto-wrapping happens in
   // webrtc/rtc_base/ are convoluted, we simply wrap here to avoid having to
   // think about ramifications of auto-wrapping there.
-  rtc::ThreadManager::Instance()->WrapCurrentThread();
+  webrtc::ThreadManager::Instance()->WrapCurrentThread();
 
   PeerConnectionFactoryDependencies pcf_deps;
   pcf_deps.network_thread = network_thread;
@@ -75,22 +75,22 @@
       jni);
   RTC_LOG(LS_INFO) << "Java peer connection factory initialized.";
 
-  auto socket_server = std::make_unique<rtc::PhysicalSocketServer>();
+  auto socket_server = std::make_unique<webrtc::PhysicalSocketServer>();
 
   // Create threads.
-  auto network_thread = std::make_unique<rtc::Thread>(socket_server.get());
+  auto network_thread = std::make_unique<webrtc::Thread>(socket_server.get());
   network_thread->SetName("network_thread", nullptr);
   RTC_CHECK(network_thread->Start()) << "Failed to start thread";
 
-  std::unique_ptr<rtc::Thread> worker_thread = rtc::Thread::Create();
+  std::unique_ptr<webrtc::Thread> worker_thread = webrtc::Thread::Create();
   worker_thread->SetName("worker_thread", nullptr);
   RTC_CHECK(worker_thread->Start()) << "Failed to start thread";
 
-  std::unique_ptr<rtc::Thread> signaling_thread = rtc::Thread::Create();
+  std::unique_ptr<webrtc::Thread> signaling_thread = webrtc::Thread::Create();
   signaling_thread->SetName("signaling_thread", NULL);
   RTC_CHECK(signaling_thread->Start()) << "Failed to start thread";
 
-  rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> factory =
+  webrtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> factory =
       CreateTestPCF(jni, network_thread.get(), worker_thread.get(),
                     signaling_thread.get());
 
diff --git a/sdk/android/native_unittests/stacktrace/stacktrace_unittest.cc b/sdk/android/native_unittests/stacktrace/stacktrace_unittest.cc
index cc0e740..0d81cd6 100644
--- a/sdk/android/native_unittests/stacktrace/stacktrace_unittest.cc
+++ b/sdk/android/native_unittests/stacktrace/stacktrace_unittest.cc
@@ -105,15 +105,15 @@
   volatile uint32_t deadlock_region_start_address;
   volatile uint32_t deadlock_region_end_address;
   // Signaled when the deadlock is done.
-  rtc::Event deadlock_done_event;
+  webrtc::Event deadlock_done_event;
 };
 
 class RtcEventDeadlock : public DeadlockInterface {
  private:
-  void Deadlock() override { event.Wait(rtc::Event::kForever); }
+  void Deadlock() override { event.Wait(webrtc::Event::kForever); }
   void Release() override { event.Set(); }
 
-  rtc::Event event;
+  webrtc::Event event;
 };
 
 class RtcCriticalSectionDeadlock : public DeadlockInterface {
@@ -160,7 +160,7 @@
   params.deadlock_impl = deadlock_impl.get();
 
   // Spawn thread.
-  auto thread = rtc::PlatformThread::SpawnJoinable(
+  auto thread = webrtc::PlatformThread::SpawnJoinable(
       [&params] {
         params.tid = gettid();
         params.deadlock_region_start_address =
@@ -185,19 +185,19 @@
   deadlock_impl->Release();
 
   // Wait until the thread has left the deadlock.
-  params.deadlock_done_event.Wait(rtc::Event::kForever);
+  params.deadlock_done_event.Wait(webrtc::Event::kForever);
 
   // Assert that the stack trace contains the deadlock region.
   EXPECT_TRUE(StackTraceContainsRange(stack_trace,
                                       params.deadlock_region_start_address,
                                       params.deadlock_region_end_address))
       << "Deadlock region: ["
-      << rtc::ToHex(params.deadlock_region_start_address) << ", "
-      << rtc::ToHex(params.deadlock_region_end_address)
+      << webrtc::ToHex(params.deadlock_region_start_address) << ", "
+      << webrtc::ToHex(params.deadlock_region_end_address)
       << "] not contained in: " << StackTraceToString(stack_trace);
 }
 
-class LookoutLogSink final : public rtc::LogSink {
+class LookoutLogSink final : public webrtc::LogSink {
  public:
   explicit LookoutLogSink(std::string look_for)
       : look_for_(std::move(look_for)) {}
@@ -209,11 +209,11 @@
       when_found_.Set();
     }
   }
-  rtc::Event& WhenFound() { return when_found_; }
+  webrtc::Event& WhenFound() { return when_found_; }
 
  private:
   const std::string look_for_;
-  rtc::Event when_found_;
+  webrtc::Event when_found_;
 };
 
 }  // namespace
@@ -223,8 +223,8 @@
   const std::vector<StackTraceElement> stack_trace = GetStackTrace();
   const uint32_t end_addr = GetCurrentRelativeExecutionAddress();
   EXPECT_TRUE(StackTraceContainsRange(stack_trace, start_addr, end_addr))
-      << "Caller region: [" << rtc::ToHex(start_addr) << ", "
-      << rtc::ToHex(end_addr)
+      << "Caller region: [" << webrtc::ToHex(start_addr) << ", "
+      << webrtc::ToHex(end_addr)
       << "] not contained in: " << StackTraceToString(stack_trace);
 }
 
@@ -253,12 +253,12 @@
 TEST(Stacktrace, TestRtcEventDeadlockDetection) {
   // Start looking for the expected log output.
   LookoutLogSink sink(/*look_for=*/"Probable deadlock");
-  rtc::LogMessage::AddLogToStream(&sink, rtc::LS_WARNING);
+  webrtc::LogMessage::AddLogToStream(&sink, webrtc::LS_WARNING);
 
   // Start a thread that waits for an event.
-  rtc::Event ev;
-  auto thread = rtc::PlatformThread::SpawnJoinable(
-      [&ev] { ev.Wait(rtc::Event::kForever); },
+  webrtc::Event ev;
+  auto thread = webrtc::PlatformThread::SpawnJoinable(
+      [&ev] { ev.Wait(webrtc::Event::kForever); },
       "TestRtcEventDeadlockDetection");
 
   // The message should appear after 3 sec. We'll wait up to 10 sec in an
@@ -268,7 +268,7 @@
   // Unblock the thread and shut it down.
   ev.Set();
   thread.Finalize();
-  rtc::LogMessage::RemoveLogToStream(&sink);
+  webrtc::LogMessage::RemoveLogToStream(&sink);
 }
 
 }  // namespace test
diff --git a/sdk/android/native_unittests/video/video_source_unittest.cc b/sdk/android/native_unittests/video/video_source_unittest.cc
index 36dd5c4..71568a5 100644
--- a/sdk/android/native_unittests/video/video_source_unittest.cc
+++ b/sdk/android/native_unittests/video/video_source_unittest.cc
@@ -20,7 +20,7 @@
 namespace test {
 
 namespace {
-class TestVideoSink : public rtc::VideoSinkInterface<VideoFrame> {
+class TestVideoSink : public webrtc::VideoSinkInterface<VideoFrame> {
  public:
   void OnFrame(const VideoFrame& frame) { frames_.push_back(frame); }
 
@@ -38,11 +38,11 @@
 TEST(JavaVideoSourceTest, CreateJavaVideoSource) {
   JNIEnv* env = AttachCurrentThreadIfNeeded();
   // Wrap test thread so it can be used as the signaling thread.
-  rtc::ThreadManager::Instance()->WrapCurrentThread();
+  webrtc::ThreadManager::Instance()->WrapCurrentThread();
 
-  rtc::scoped_refptr<JavaVideoTrackSourceInterface> video_track_source =
+  webrtc::scoped_refptr<JavaVideoTrackSourceInterface> video_track_source =
       CreateJavaVideoSource(
-          env, rtc::ThreadManager::Instance()->CurrentThread(),
+          env, webrtc::ThreadManager::Instance()->CurrentThread(),
           false /* is_screencast */, true /* align_timestamps */);
 
   ASSERT_NE(nullptr, video_track_source);
@@ -55,13 +55,14 @@
 
   JNIEnv* env = AttachCurrentThreadIfNeeded();
   // Wrap test thread so it can be used as the signaling thread.
-  rtc::ThreadManager::Instance()->WrapCurrentThread();
+  webrtc::ThreadManager::Instance()->WrapCurrentThread();
 
-  rtc::scoped_refptr<JavaVideoTrackSourceInterface> video_track_source =
+  webrtc::scoped_refptr<JavaVideoTrackSourceInterface> video_track_source =
       CreateJavaVideoSource(
-          env, rtc::ThreadManager::Instance()->CurrentThread(),
+          env, webrtc::ThreadManager::Instance()->CurrentThread(),
           false /* is_screencast */, true /* align_timestamps */);
-  video_track_source->AddOrUpdateSink(&test_video_sink, rtc::VideoSinkWants());
+  video_track_source->AddOrUpdateSink(&test_video_sink,
+                                      webrtc::VideoSinkWants());
 
   jni::Java_JavaVideoSourceTestHelper_startCapture(
       env, video_track_source->GetJavaVideoCapturerObserver(env),
@@ -88,13 +89,14 @@
 
   JNIEnv* env = AttachCurrentThreadIfNeeded();
   // Wrap test thread so it can be used as the signaling thread.
-  rtc::ThreadManager::Instance()->WrapCurrentThread();
+  webrtc::ThreadManager::Instance()->WrapCurrentThread();
 
-  rtc::scoped_refptr<JavaVideoTrackSourceInterface> video_track_source =
+  webrtc::scoped_refptr<JavaVideoTrackSourceInterface> video_track_source =
       CreateJavaVideoSource(
-          env, rtc::ThreadManager::Instance()->CurrentThread(),
+          env, webrtc::ThreadManager::Instance()->CurrentThread(),
           false /* is_screencast */, false /* align_timestamps */);
-  video_track_source->AddOrUpdateSink(&test_video_sink, rtc::VideoSinkWants());
+  video_track_source->AddOrUpdateSink(&test_video_sink,
+                                      webrtc::VideoSinkWants());
 
   jni::Java_JavaVideoSourceTestHelper_startCapture(
       env, video_track_source->GetJavaVideoCapturerObserver(env),
@@ -119,11 +121,11 @@
 TEST(JavaVideoSourceTest, CapturerStartedSuccessStateBecomesLive) {
   JNIEnv* env = AttachCurrentThreadIfNeeded();
   // Wrap test thread so it can be used as the signaling thread.
-  rtc::ThreadManager::Instance()->WrapCurrentThread();
+  webrtc::ThreadManager::Instance()->WrapCurrentThread();
 
-  rtc::scoped_refptr<JavaVideoTrackSourceInterface> video_track_source =
+  webrtc::scoped_refptr<JavaVideoTrackSourceInterface> video_track_source =
       CreateJavaVideoSource(
-          env, rtc::ThreadManager::Instance()->CurrentThread(),
+          env, webrtc::ThreadManager::Instance()->CurrentThread(),
           false /* is_screencast */, true /* align_timestamps */);
 
   jni::Java_JavaVideoSourceTestHelper_startCapture(
@@ -137,11 +139,11 @@
 TEST(JavaVideoSourceTest, CapturerStartedFailureStateBecomesEnded) {
   JNIEnv* env = AttachCurrentThreadIfNeeded();
   // Wrap test thread so it can be used as the signaling thread.
-  rtc::ThreadManager::Instance()->WrapCurrentThread();
+  webrtc::ThreadManager::Instance()->WrapCurrentThread();
 
-  rtc::scoped_refptr<JavaVideoTrackSourceInterface> video_track_source =
+  webrtc::scoped_refptr<JavaVideoTrackSourceInterface> video_track_source =
       CreateJavaVideoSource(
-          env, rtc::ThreadManager::Instance()->CurrentThread(),
+          env, webrtc::ThreadManager::Instance()->CurrentThread(),
           false /* is_screencast */, true /* align_timestamps */);
 
   jni::Java_JavaVideoSourceTestHelper_startCapture(
@@ -155,11 +157,11 @@
 TEST(JavaVideoSourceTest, CapturerStoppedStateBecomesEnded) {
   JNIEnv* env = AttachCurrentThreadIfNeeded();
   // Wrap test thread so it can be used as the signaling thread.
-  rtc::ThreadManager::Instance()->WrapCurrentThread();
+  webrtc::ThreadManager::Instance()->WrapCurrentThread();
 
-  rtc::scoped_refptr<JavaVideoTrackSourceInterface> video_track_source =
+  webrtc::scoped_refptr<JavaVideoTrackSourceInterface> video_track_source =
       CreateJavaVideoSource(
-          env, rtc::ThreadManager::Instance()->CurrentThread(),
+          env, webrtc::ThreadManager::Instance()->CurrentThread(),
           false /* is_screencast */, true /* align_timestamps */);
 
   jni::Java_JavaVideoSourceTestHelper_startCapture(
diff --git a/sdk/android/src/jni/android_network_monitor.cc b/sdk/android/src/jni/android_network_monitor.cc
index 82762a0..f2f14b3 100644
--- a/sdk/android/src/jni/android_network_monitor.cc
+++ b/sdk/android/src/jni/android_network_monitor.cc
@@ -433,7 +433,7 @@
   }
 
   network_info_by_handle_[network_info.handle] = network_info;
-  for (const rtc::IPAddress& address : network_info.ip_addresses) {
+  for (const IPAddress& address : network_info.ip_addresses) {
     network_handle_by_address_[address] = network_info.handle;
   }
   network_handle_by_if_name_[network_info.interface_name] = network_info.handle;
@@ -451,7 +451,7 @@
     for (auto const& iter : network_info_by_handle_) {
       const std::vector<IPAddress>& addresses = iter.second.ip_addresses;
       auto address_it = std::find_if(addresses.begin(), addresses.end(),
-                                     [ip_address](rtc::IPAddress address) {
+                                     [ip_address](IPAddress address) {
                                        return AddressMatch(ip_address, address);
                                      });
       if (address_it != addresses.end()) {
@@ -499,7 +499,7 @@
   }
 
   const auto& network_info = iter->second;
-  for (const rtc::IPAddress& address : network_info.ip_addresses) {
+  for (const IPAddress& address : network_info.ip_addresses) {
     network_handle_by_address_.erase(address);
   }
 
diff --git a/sdk/android/src/jni/android_network_monitor.h b/sdk/android/src/jni/android_network_monitor.h
index 749a4c7..8b2d3973 100644
--- a/sdk/android/src/jni/android_network_monitor.h
+++ b/sdk/android/src/jni/android_network_monitor.h
@@ -135,7 +135,7 @@
   ScopedJavaGlobalRef<jobject> j_network_monitor_;
   Thread* const network_thread_;
   bool started_ RTC_GUARDED_BY(network_thread_) = false;
-  std::map<std::string, NetworkHandle, rtc::AbslStringViewCmp>
+  std::map<std::string, NetworkHandle, AbslStringViewCmp>
       network_handle_by_if_name_ RTC_GUARDED_BY(network_thread_);
   std::map<IPAddress, NetworkHandle> network_handle_by_address_
       RTC_GUARDED_BY(network_thread_);
@@ -159,7 +159,7 @@
   // of IsAdapterAvailable().
   bool disable_is_adapter_available_ RTC_GUARDED_BY(network_thread_) = false;
 
-  rtc::scoped_refptr<PendingTaskSafetyFlag> safety_flag_
+  scoped_refptr<PendingTaskSafetyFlag> safety_flag_
       RTC_PT_GUARDED_BY(network_thread_) = nullptr;
 
   const FieldTrialsView& field_trials_;
diff --git a/sdk/android/src/jni/android_video_track_source.cc b/sdk/android/src/jni/android_video_track_source.cc
index ac0733e..dc5fd3d 100644
--- a/sdk/android/src/jni/android_video_track_source.cc
+++ b/sdk/android/src/jni/android_video_track_source.cc
@@ -128,7 +128,7 @@
     jint j_rotation,
     jlong j_timestamp_ns,
     const JavaRef<jobject>& j_video_frame_buffer) {
-  rtc::scoped_refptr<VideoFrameBuffer> buffer =
+  scoped_refptr<VideoFrameBuffer> buffer =
       JavaToNativeFrameBuffer(env, j_video_frame_buffer);
   const VideoRotation rotation = jintToVideoRotation(j_rotation);
 
diff --git a/sdk/android/src/jni/audio_device/aaudio_player.cc b/sdk/android/src/jni/audio_device/aaudio_player.cc
index c7bae74..2bf9946 100644
--- a/sdk/android/src/jni/audio_device/aaudio_player.cc
+++ b/sdk/android/src/jni/audio_device/aaudio_player.cc
@@ -207,8 +207,8 @@
     memset(audio_data, 0, num_bytes);
   } else {
     fine_audio_buffer_->GetPlayoutData(
-        rtc::MakeArrayView(static_cast<int16_t*>(audio_data),
-                           aaudio_.samples_per_frame() * num_frames),
+        webrtc::MakeArrayView(static_cast<int16_t*>(audio_data),
+                              aaudio_.samples_per_frame() * num_frames),
         static_cast<int>(latency_millis_ + 0.5));
   }
 
diff --git a/sdk/android/src/jni/audio_device/aaudio_recorder.cc b/sdk/android/src/jni/audio_device/aaudio_recorder.cc
index 39130cd..8867cf8 100644
--- a/sdk/android/src/jni/audio_device/aaudio_recorder.cc
+++ b/sdk/android/src/jni/audio_device/aaudio_recorder.cc
@@ -190,8 +190,8 @@
   // Copy recorded audio in `audio_data` to the WebRTC sink using the
   // FineAudioBuffer object.
   fine_audio_buffer_->DeliverRecordedData(
-      rtc::MakeArrayView(static_cast<const int16_t*>(audio_data),
-                         aaudio_.samples_per_frame() * num_frames),
+      webrtc::MakeArrayView(static_cast<const int16_t*>(audio_data),
+                            aaudio_.samples_per_frame() * num_frames),
       static_cast<int>(latency_millis_ + 0.5));
 
   return AAUDIO_CALLBACK_RESULT_CONTINUE;
diff --git a/sdk/android/src/jni/audio_device/aaudio_wrapper.cc b/sdk/android/src/jni/audio_device/aaudio_wrapper.cc
index 6c20703..264bb6e 100644
--- a/sdk/android/src/jni/audio_device/aaudio_wrapper.cc
+++ b/sdk/android/src/jni/audio_device/aaudio_wrapper.cc
@@ -206,7 +206,7 @@
     // For input streams. Best guess we can do is to use the current burst size
     // as delay estimate.
     latency_millis = static_cast<double>(frames_per_burst()) / sample_rate() *
-                     rtc::kNumMillisecsPerSec;
+                     webrtc::kNumMillisecsPerSec;
   } else {
     int64_t existing_frame_index;
     int64_t existing_frame_presentation_time;
@@ -221,17 +221,17 @@
       // Number of frames between next frame and the existing frame.
       int64_t frame_index_delta = next_frame_index - existing_frame_index;
       // Assume the next frame will be written now.
-      int64_t next_frame_write_time = rtc::TimeNanos();
+      int64_t next_frame_write_time = webrtc::TimeNanos();
       // Calculate time when next frame will be presented to the hardware taking
       // sample rate into account.
       int64_t frame_time_delta =
-          (frame_index_delta * rtc::kNumNanosecsPerSec) / sample_rate();
+          (frame_index_delta * webrtc::kNumNanosecsPerSec) / sample_rate();
       int64_t next_frame_presentation_time =
           existing_frame_presentation_time + frame_time_delta;
       // Derive a latency estimate given results above.
       latency_millis = static_cast<double>(next_frame_presentation_time -
                                            next_frame_write_time) /
-                       rtc::kNumNanosecsPerMillisec;
+                       webrtc::kNumNanosecsPerMillisec;
     }
   }
   return latency_millis;
@@ -409,7 +409,7 @@
 void AAudioWrapper::LogStreamConfiguration() {
   RTC_DCHECK(stream_);
   char ss_buf[1024];
-  rtc::SimpleStringBuilder ss(ss_buf);
+  webrtc::SimpleStringBuilder ss(ss_buf);
   ss << "Stream Configuration: ";
   ss << "sample rate=" << sample_rate() << ", channels=" << channel_count();
   ss << ", samples per frame=" << samples_per_frame();
diff --git a/sdk/android/src/jni/audio_device/audio_device_module.cc b/sdk/android/src/jni/audio_device/audio_device_module.cc
index 2c57118..4d64e5f 100644
--- a/sdk/android/src/jni/audio_device/audio_device_module.cc
+++ b/sdk/android/src/jni/audio_device/audio_device_module.cc
@@ -647,7 +647,7 @@
   return Java_WebRtcAudioManager_isLowLatencyOutputSupported(env, j_context);
 }
 
-rtc::scoped_refptr<AudioDeviceModule> CreateAudioDeviceModuleFromInputAndOutput(
+scoped_refptr<AudioDeviceModule> CreateAudioDeviceModuleFromInputAndOutput(
     AudioDeviceModule::AudioLayer audio_layer,
     bool is_stereo_playout_supported,
     bool is_stereo_record_supported,
@@ -655,7 +655,7 @@
     std::unique_ptr<AudioInput> audio_input,
     std::unique_ptr<AudioOutput> audio_output) {
   RTC_DLOG(LS_INFO) << __FUNCTION__;
-  return rtc::make_ref_counted<AndroidAudioDeviceModule>(
+  return make_ref_counted<AndroidAudioDeviceModule>(
       audio_layer, is_stereo_playout_supported, is_stereo_record_supported,
       playout_delay_ms, std::move(audio_input), std::move(audio_output));
 }
diff --git a/sdk/android/src/jni/audio_device/audio_device_module.h b/sdk/android/src/jni/audio_device/audio_device_module.h
index a812d06..cbf71ed 100644
--- a/sdk/android/src/jni/audio_device/audio_device_module.h
+++ b/sdk/android/src/jni/audio_device/audio_device_module.h
@@ -95,7 +95,7 @@
                                  const JavaRef<jobject>& j_context);
 
 // Glue together an audio input and audio output to get an AudioDeviceModule.
-rtc::scoped_refptr<AudioDeviceModule> CreateAudioDeviceModuleFromInputAndOutput(
+scoped_refptr<AudioDeviceModule> CreateAudioDeviceModuleFromInputAndOutput(
     AudioDeviceModule::AudioLayer audio_layer,
     bool is_stereo_playout_supported,
     bool is_stereo_record_supported,
diff --git a/sdk/android/src/jni/audio_device/opensles_common.h b/sdk/android/src/jni/audio_device/opensles_common.h
index 9dd1e0f..feface9 100644
--- a/sdk/android/src/jni/audio_device/opensles_common.h
+++ b/sdk/android/src/jni/audio_device/opensles_common.h
@@ -69,7 +69,7 @@
 // Note: This class must be used single threaded and this is enforced by a
 // thread checker.
 class OpenSLEngineManager
-    : public rtc::RefCountedNonVirtual<OpenSLEngineManager> {
+    : public webrtc::RefCountedNonVirtual<OpenSLEngineManager> {
  public:
   OpenSLEngineManager();
   ~OpenSLEngineManager() = default;
diff --git a/sdk/android/src/jni/audio_device/opensles_player.cc b/sdk/android/src/jni/audio_device/opensles_player.cc
index fb952b7..efb6766 100644
--- a/sdk/android/src/jni/audio_device/opensles_player.cc
+++ b/sdk/android/src/jni/audio_device/opensles_player.cc
@@ -44,7 +44,7 @@
 
 OpenSLESPlayer::OpenSLESPlayer(
     const AudioParameters& audio_parameters,
-    rtc::scoped_refptr<OpenSLEngineManager> engine_manager)
+    webrtc::scoped_refptr<OpenSLEngineManager> engine_manager)
     : audio_parameters_(audio_parameters),
       audio_device_buffer_(nullptr),
       initialized_(false),
@@ -56,7 +56,7 @@
       simple_buffer_queue_(nullptr),
       volume_(nullptr),
       last_play_time_(0) {
-  ALOGD("ctor[tid=%d]", rtc::CurrentThreadId());
+  ALOGD("ctor[tid=%d]", webrtc::CurrentThreadId());
   // Use native audio output parameters provided by the audio manager and
   // define the PCM format structure.
   pcm_format_ = CreatePCMConfiguration(audio_parameters_.channels(),
@@ -68,7 +68,7 @@
 }
 
 OpenSLESPlayer::~OpenSLESPlayer() {
-  ALOGD("dtor[tid=%d]", rtc::CurrentThreadId());
+  ALOGD("dtor[tid=%d]", webrtc::CurrentThreadId());
   RTC_DCHECK(thread_checker_.IsCurrent());
   Terminate();
   DestroyAudioPlayer();
@@ -82,7 +82,7 @@
 }
 
 int OpenSLESPlayer::Init() {
-  ALOGD("Init[tid=%d]", rtc::CurrentThreadId());
+  ALOGD("Init[tid=%d]", webrtc::CurrentThreadId());
   RTC_DCHECK(thread_checker_.IsCurrent());
   if (audio_parameters_.channels() == 2) {
     ALOGW("Stereo mode is enabled");
@@ -91,14 +91,14 @@
 }
 
 int OpenSLESPlayer::Terminate() {
-  ALOGD("Terminate[tid=%d]", rtc::CurrentThreadId());
+  ALOGD("Terminate[tid=%d]", webrtc::CurrentThreadId());
   RTC_DCHECK(thread_checker_.IsCurrent());
   StopPlayout();
   return 0;
 }
 
 int OpenSLESPlayer::InitPlayout() {
-  ALOGD("InitPlayout[tid=%d]", rtc::CurrentThreadId());
+  ALOGD("InitPlayout[tid=%d]", webrtc::CurrentThreadId());
   RTC_DCHECK(thread_checker_.IsCurrent());
   RTC_DCHECK(!initialized_);
   RTC_DCHECK(!playing_);
@@ -117,7 +117,7 @@
 }
 
 int OpenSLESPlayer::StartPlayout() {
-  ALOGD("StartPlayout[tid=%d]", rtc::CurrentThreadId());
+  ALOGD("StartPlayout[tid=%d]", webrtc::CurrentThreadId());
   RTC_DCHECK(thread_checker_.IsCurrent());
   RTC_DCHECK(initialized_);
   RTC_DCHECK(!playing_);
@@ -145,7 +145,7 @@
 }
 
 int OpenSLESPlayer::StopPlayout() {
-  ALOGD("StopPlayout[tid=%d]", rtc::CurrentThreadId());
+  ALOGD("StopPlayout[tid=%d]", webrtc::CurrentThreadId());
   RTC_DCHECK(thread_checker_.IsCurrent());
   if (!initialized_ || !playing_) {
     return 0;
@@ -416,9 +416,9 @@
     // OpenSL ES. Use hardcoded delay estimate since OpenSL ES does not support
     // delay estimation.
     fine_audio_buffer_->GetPlayoutData(
-        rtc::ArrayView<int16_t>(audio_buffers_[buffer_index_].get(),
-                                audio_parameters_.frames_per_buffer() *
-                                    audio_parameters_.channels()),
+        webrtc::ArrayView<int16_t>(audio_buffers_[buffer_index_].get(),
+                                   audio_parameters_.frames_per_buffer() *
+                                       audio_parameters_.channels()),
         25);
   }
   // Enqueue the decoded audio buffer for playback.
diff --git a/sdk/android/src/jni/audio_device/opensles_player.h b/sdk/android/src/jni/audio_device/opensles_player.h
index 8d6c2ce..840c593 100644
--- a/sdk/android/src/jni/audio_device/opensles_player.h
+++ b/sdk/android/src/jni/audio_device/opensles_player.h
@@ -62,7 +62,7 @@
   static const int kNumOfOpenSLESBuffers = 2;
 
   OpenSLESPlayer(const AudioParameters& audio_parameters,
-                 rtc::scoped_refptr<OpenSLEngineManager> engine_manager);
+                 webrtc::scoped_refptr<OpenSLEngineManager> engine_manager);
   ~OpenSLESPlayer() override;
 
   int Init() override;
@@ -163,7 +163,7 @@
   // Example (kNumOfOpenSLESBuffers = 2): counts 0, 1, 0, 1, ...
   int buffer_index_;
 
-  const rtc::scoped_refptr<OpenSLEngineManager> engine_manager_;
+  const webrtc::scoped_refptr<OpenSLEngineManager> engine_manager_;
   // This interface exposes creation methods for all the OpenSL ES object types.
   // It is the OpenSL ES API entry point.
   SLEngineItf engine_;
diff --git a/sdk/android/src/jni/audio_device/opensles_recorder.cc b/sdk/android/src/jni/audio_device/opensles_recorder.cc
index 73736b8..8e9bfaa 100644
--- a/sdk/android/src/jni/audio_device/opensles_recorder.cc
+++ b/sdk/android/src/jni/audio_device/opensles_recorder.cc
@@ -45,7 +45,7 @@
 
 OpenSLESRecorder::OpenSLESRecorder(
     const AudioParameters& audio_parameters,
-    rtc::scoped_refptr<OpenSLEngineManager> engine_manager)
+    webrtc::scoped_refptr<OpenSLEngineManager> engine_manager)
     : audio_parameters_(audio_parameters),
       audio_device_buffer_(nullptr),
       initialized_(false),
@@ -56,7 +56,7 @@
       simple_buffer_queue_(nullptr),
       buffer_index_(0),
       last_rec_time_(0) {
-  ALOGD("ctor[tid=%d]", rtc::CurrentThreadId());
+  ALOGD("ctor[tid=%d]", webrtc::CurrentThreadId());
   // Detach from this thread since we want to use the checker to verify calls
   // from the internal  audio thread.
   thread_checker_opensles_.Detach();
@@ -68,7 +68,7 @@
 }
 
 OpenSLESRecorder::~OpenSLESRecorder() {
-  ALOGD("dtor[tid=%d]", rtc::CurrentThreadId());
+  ALOGD("dtor[tid=%d]", webrtc::CurrentThreadId());
   RTC_DCHECK(thread_checker_.IsCurrent());
   Terminate();
   DestroyAudioRecorder();
@@ -79,7 +79,7 @@
 }
 
 int OpenSLESRecorder::Init() {
-  ALOGD("Init[tid=%d]", rtc::CurrentThreadId());
+  ALOGD("Init[tid=%d]", webrtc::CurrentThreadId());
   RTC_DCHECK(thread_checker_.IsCurrent());
   if (audio_parameters_.channels() == 2) {
     ALOGD("Stereo mode is enabled");
@@ -88,14 +88,14 @@
 }
 
 int OpenSLESRecorder::Terminate() {
-  ALOGD("Terminate[tid=%d]", rtc::CurrentThreadId());
+  ALOGD("Terminate[tid=%d]", webrtc::CurrentThreadId());
   RTC_DCHECK(thread_checker_.IsCurrent());
   StopRecording();
   return 0;
 }
 
 int OpenSLESRecorder::InitRecording() {
-  ALOGD("InitRecording[tid=%d]", rtc::CurrentThreadId());
+  ALOGD("InitRecording[tid=%d]", webrtc::CurrentThreadId());
   RTC_DCHECK(thread_checker_.IsCurrent());
   RTC_DCHECK(!initialized_);
   RTC_DCHECK(!recording_);
@@ -114,7 +114,7 @@
 }
 
 int OpenSLESRecorder::StartRecording() {
-  ALOGD("StartRecording[tid=%d]", rtc::CurrentThreadId());
+  ALOGD("StartRecording[tid=%d]", webrtc::CurrentThreadId());
   RTC_DCHECK(thread_checker_.IsCurrent());
   RTC_DCHECK(initialized_);
   RTC_DCHECK(!recording_);
@@ -151,7 +151,7 @@
 }
 
 int OpenSLESRecorder::StopRecording() {
-  ALOGD("StopRecording[tid=%d]", rtc::CurrentThreadId());
+  ALOGD("StopRecording[tid=%d]", webrtc::CurrentThreadId());
   RTC_DCHECK(thread_checker_.IsCurrent());
   if (!initialized_ || !recording_) {
     return 0;
@@ -383,7 +383,7 @@
   // OpenSL ES anyhow. Hence, as is, the WebRTC based AEC (which would use
   // these estimates) will never be active.
   fine_audio_buffer_->DeliverRecordedData(
-      rtc::ArrayView<const int16_t>(
+      webrtc::ArrayView<const int16_t>(
           audio_buffers_[buffer_index_].get(),
           audio_parameters_.frames_per_buffer() * audio_parameters_.channels()),
       25);
diff --git a/sdk/android/src/jni/audio_device/opensles_recorder.h b/sdk/android/src/jni/audio_device/opensles_recorder.h
index bffe086..67e23a4 100644
--- a/sdk/android/src/jni/audio_device/opensles_recorder.h
+++ b/sdk/android/src/jni/audio_device/opensles_recorder.h
@@ -64,7 +64,7 @@
   static const int kNumOfOpenSLESBuffers = 2;
 
   OpenSLESRecorder(const AudioParameters& audio_parameters,
-                   rtc::scoped_refptr<OpenSLEngineManager> engine_manager);
+                   webrtc::scoped_refptr<OpenSLEngineManager> engine_manager);
   ~OpenSLESRecorder() override;
 
   int Init() override;
@@ -149,7 +149,7 @@
   bool initialized_;
   bool recording_;
 
-  const rtc::scoped_refptr<OpenSLEngineManager> engine_manager_;
+  const webrtc::scoped_refptr<OpenSLEngineManager> engine_manager_;
   // This interface exposes creation methods for all the OpenSL ES object types.
   // It is the OpenSL ES API entry point.
   SLEngineItf engine_;
diff --git a/sdk/android/src/jni/encoded_image.cc b/sdk/android/src/jni/encoded_image.cc
index 8cd2445..204fdc7 100644
--- a/sdk/android/src/jni/encoded_image.cc
+++ b/sdk/android/src/jni/encoded_image.cc
@@ -89,7 +89,7 @@
   const size_t buffer_size = env->GetDirectBufferCapacity(j_buffer.obj());
 
   EncodedImage frame;
-  frame.SetEncodedData(rtc::make_ref_counted<JavaEncodedImageBuffer>(
+  frame.SetEncodedData(make_ref_counted<JavaEncodedImageBuffer>(
       env, j_encoded_image, buffer, buffer_size));
 
   frame._encodedWidth = Java_EncodedImage_getEncodedWidth(env, j_encoded_image);
diff --git a/sdk/android/src/jni/logging/log_sink.cc b/sdk/android/src/jni/logging/log_sink.cc
index 84394d8..3430da6 100644
--- a/sdk/android/src/jni/logging/log_sink.cc
+++ b/sdk/android/src/jni/logging/log_sink.cc
@@ -24,13 +24,13 @@
 }
 
 void JNILogSink::OnLogMessage(const std::string& msg,
-                              rtc::LoggingSeverity severity,
+                              LoggingSeverity severity,
                               const char* tag) {
   OnLogMessage(absl::string_view{msg}, severity, tag);
 }
 
 void JNILogSink::OnLogMessage(absl::string_view msg,
-                              rtc::LoggingSeverity severity,
+                              LoggingSeverity severity,
                               const char* tag) {
   JNIEnv* env = AttachCurrentThreadIfNeeded();
   Java_JNILogging_logToInjectable(
diff --git a/sdk/android/src/jni/logging/log_sink.h b/sdk/android/src/jni/logging/log_sink.h
index 8e681ac..1c4d433 100644
--- a/sdk/android/src/jni/logging/log_sink.h
+++ b/sdk/android/src/jni/logging/log_sink.h
@@ -20,17 +20,17 @@
 namespace webrtc {
 namespace jni {
 
-class JNILogSink : public rtc::LogSink {
+class JNILogSink : public LogSink {
  public:
   JNILogSink(JNIEnv* env, const JavaRef<jobject>& j_logging);
   ~JNILogSink() override;
 
   void OnLogMessage(const std::string& msg) override;
   void OnLogMessage(const std::string& msg,
-                    rtc::LoggingSeverity severity,
+                    LoggingSeverity severity,
                     const char* tag) override;
   void OnLogMessage(absl::string_view msg,
-                    rtc::LoggingSeverity severity,
+                    LoggingSeverity severity,
                     const char* tag) override;
 
  private:
diff --git a/sdk/android/src/jni/native_capturer_observer.cc b/sdk/android/src/jni/native_capturer_observer.cc
index f8eb484..cb2b56a 100644
--- a/sdk/android/src/jni/native_capturer_observer.cc
+++ b/sdk/android/src/jni/native_capturer_observer.cc
@@ -20,7 +20,7 @@
 
 ScopedJavaLocalRef<jobject> CreateJavaNativeCapturerObserver(
     JNIEnv* env,
-    rtc::scoped_refptr<AndroidVideoTrackSource> native_source) {
+    scoped_refptr<AndroidVideoTrackSource> native_source) {
   return Java_NativeCapturerObserver_Constructor(
       env, NativeToJavaPointer(native_source.release()));
 }
diff --git a/sdk/android/src/jni/native_capturer_observer.h b/sdk/android/src/jni/native_capturer_observer.h
index 51acf41..ac4c221 100644
--- a/sdk/android/src/jni/native_capturer_observer.h
+++ b/sdk/android/src/jni/native_capturer_observer.h
@@ -21,7 +21,7 @@
 
 ScopedJavaLocalRef<jobject> CreateJavaNativeCapturerObserver(
     JNIEnv* env,
-    rtc::scoped_refptr<AndroidVideoTrackSource> native_source);
+    scoped_refptr<AndroidVideoTrackSource> native_source);
 
 }  // namespace jni
 }  // namespace webrtc
diff --git a/sdk/android/src/jni/pc/add_ice_candidate_observer.h b/sdk/android/src/jni/pc/add_ice_candidate_observer.h
index 1128385..ba47bef 100644
--- a/sdk/android/src/jni/pc/add_ice_candidate_observer.h
+++ b/sdk/android/src/jni/pc/add_ice_candidate_observer.h
@@ -21,7 +21,7 @@
 namespace jni {
 
 class AddIceCandidateObserverJni final
-    : public rtc::RefCountedNonVirtual<AddIceCandidateObserverJni> {
+    : public RefCountedNonVirtual<AddIceCandidateObserverJni> {
  public:
   AddIceCandidateObserverJni(JNIEnv* env, const JavaRef<jobject>& j_observer);
   ~AddIceCandidateObserverJni() = default;
diff --git a/sdk/android/src/jni/pc/audio_track.cc b/sdk/android/src/jni/pc/audio_track.cc
index 36ed43f..a204862 100644
--- a/sdk/android/src/jni/pc/audio_track.cc
+++ b/sdk/android/src/jni/pc/audio_track.cc
@@ -15,7 +15,7 @@
 namespace jni {
 
 static void JNI_AudioTrack_SetVolume(JNIEnv*, jlong j_p, jdouble volume) {
-  rtc::scoped_refptr<AudioSourceInterface> source(
+  scoped_refptr<AudioSourceInterface> source(
       reinterpret_cast<AudioTrackInterface*>(j_p)->GetSource());
   source->SetVolume(volume);
 }
diff --git a/sdk/android/src/jni/pc/call_session_file_rotating_log_sink.cc b/sdk/android/src/jni/pc/call_session_file_rotating_log_sink.cc
index 9ba4943..4acee15 100644
--- a/sdk/android/src/jni/pc/call_session_file_rotating_log_sink.cc
+++ b/sdk/android/src/jni/pc/call_session_file_rotating_log_sink.cc
@@ -26,14 +26,13 @@
   CallSessionFileRotatingLogSink* sink =
       new CallSessionFileRotatingLogSink(dir_path, j_maxFileSize);
   if (!sink->Init()) {
-    RTC_LOG_V(rtc::LoggingSeverity::LS_WARNING)
+    RTC_LOG_V(LoggingSeverity::LS_WARNING)
         << "Failed to init CallSessionFileRotatingLogSink for path "
         << dir_path;
     delete sink;
     return 0;
   }
-  rtc::LogMessage::AddLogToStream(
-      sink, static_cast<rtc::LoggingSeverity>(j_severity));
+  LogMessage::AddLogToStream(sink, static_cast<LoggingSeverity>(j_severity));
   return jlongFromPointer(sink);
 }
 
@@ -41,7 +40,7 @@
                                                           jlong j_sink) {
   CallSessionFileRotatingLogSink* sink =
       reinterpret_cast<CallSessionFileRotatingLogSink*>(j_sink);
-  rtc::LogMessage::RemoveLogToStream(sink);
+  LogMessage::RemoveLogToStream(sink);
   delete sink;
 }
 
@@ -53,7 +52,7 @@
   CallSessionFileRotatingStreamReader file_reader(dir_path);
   size_t log_size = file_reader.GetSize();
   if (log_size == 0) {
-    RTC_LOG_V(rtc::LoggingSeverity::LS_WARNING)
+    RTC_LOG_V(LoggingSeverity::LS_WARNING)
         << "CallSessionFileRotatingStream returns 0 size for path " << dir_path;
     return jni_zero::ScopedJavaLocalRef<jbyteArray>(jni, jni->NewByteArray(0));
   }
diff --git a/sdk/android/src/jni/pc/data_channel.cc b/sdk/android/src/jni/pc/data_channel.cc
index e2c66bb..baf003e 100644
--- a/sdk/android/src/jni/pc/data_channel.cc
+++ b/sdk/android/src/jni/pc/data_channel.cc
@@ -89,7 +89,7 @@
 
 ScopedJavaLocalRef<jobject> WrapNativeDataChannel(
     JNIEnv* env,
-    rtc::scoped_refptr<DataChannelInterface> channel) {
+    scoped_refptr<DataChannelInterface> channel) {
   if (!channel)
     return nullptr;
   // Channel is now owned by Java object, and will be freed from there.
@@ -154,7 +154,7 @@
     jboolean binary) {
   std::vector<int8_t> buffer = JavaToNativeByteArray(jni, data);
   bool ret = ExtractNativeDC(jni, j_dc)->Send(
-      DataBuffer(rtc::CopyOnWriteBuffer(buffer.data(), buffer.size()), binary));
+      DataBuffer(CopyOnWriteBuffer(buffer.data(), buffer.size()), binary));
   return ret;
 }
 
diff --git a/sdk/android/src/jni/pc/data_channel.h b/sdk/android/src/jni/pc/data_channel.h
index c034f2d..8ed3b6b 100644
--- a/sdk/android/src/jni/pc/data_channel.h
+++ b/sdk/android/src/jni/pc/data_channel.h
@@ -22,7 +22,7 @@
 
 ScopedJavaLocalRef<jobject> WrapNativeDataChannel(
     JNIEnv* env,
-    rtc::scoped_refptr<DataChannelInterface> channel);
+    scoped_refptr<DataChannelInterface> channel);
 
 }  // namespace jni
 }  // namespace webrtc
diff --git a/sdk/android/src/jni/pc/logging.cc b/sdk/android/src/jni/pc/logging.cc
index 618ad00..6cd78b2 100644
--- a/sdk/android/src/jni/pc/logging.cc
+++ b/sdk/android/src/jni/pc/logging.cc
@@ -24,9 +24,8 @@
                          JNIEnv* jni,
                          jclass,
                          jint nativeSeverity) {
-  if (nativeSeverity >= rtc::LS_VERBOSE && nativeSeverity <= rtc::LS_NONE) {
-    rtc::LogMessage::LogToDebug(
-        static_cast<rtc::LoggingSeverity>(nativeSeverity));
+  if (nativeSeverity >= LS_VERBOSE && nativeSeverity <= LS_NONE) {
+    LogMessage::LogToDebug(static_cast<LoggingSeverity>(nativeSeverity));
   }
 }
 
@@ -34,14 +33,14 @@
                          Logging_nativeEnableLogThreads,
                          JNIEnv* jni,
                          jclass) {
-  rtc::LogMessage::LogThreads(true);
+  LogMessage::LogThreads(true);
 }
 
 JNI_FUNCTION_DECLARATION(void,
                          Logging_nativeEnableLogTimeStamps,
                          JNIEnv* jni,
                          jclass) {
-  rtc::LogMessage::LogTimestamps(true);
+  LogMessage::LogTimestamps(true);
 }
 
 JNI_FUNCTION_DECLARATION(void,
@@ -55,8 +54,7 @@
       JavaToStdString(jni, jni_zero::JavaParamRef<jstring>(jni, j_message));
   std::string tag =
       JavaToStdString(jni, jni_zero::JavaParamRef<jstring>(jni, j_tag));
-  RTC_LOG_TAG(static_cast<rtc::LoggingSeverity>(j_severity), tag.c_str())
-      << message;
+  RTC_LOG_TAG(static_cast<LoggingSeverity>(j_severity), tag.c_str()) << message;
 }
 
 }  // namespace jni
diff --git a/sdk/android/src/jni/pc/media_stream.cc b/sdk/android/src/jni/pc/media_stream.cc
index 20d59a6..b23b614 100644
--- a/sdk/android/src/jni/pc/media_stream.cc
+++ b/sdk/android/src/jni/pc/media_stream.cc
@@ -21,7 +21,7 @@
 
 JavaMediaStream::JavaMediaStream(
     JNIEnv* env,
-    rtc::scoped_refptr<MediaStreamInterface> media_stream)
+    scoped_refptr<MediaStreamInterface> media_stream)
     : j_media_stream_(
           env,
           Java_MediaStream_Constructor(env,
@@ -46,12 +46,12 @@
              MediaStreamInterface* media_stream) {
         OnVideoTrackRemovedFromStream(video_track, media_stream);
       }));
-  for (rtc::scoped_refptr<AudioTrackInterface> track :
+  for (scoped_refptr<AudioTrackInterface> track :
        media_stream->GetAudioTracks()) {
     Java_MediaStream_addNativeAudioTrack(env, j_media_stream_,
                                          jlongFromPointer(track.release()));
   }
-  for (rtc::scoped_refptr<VideoTrackInterface> track :
+  for (scoped_refptr<VideoTrackInterface> track :
        media_stream->GetVideoTracks()) {
     Java_MediaStream_addNativeVideoTrack(env, j_media_stream_,
                                          jlongFromPointer(track.release()));
@@ -113,7 +113,7 @@
     jlong pointer,
     jlong j_audio_track_pointer) {
   return reinterpret_cast<MediaStreamInterface*>(pointer)->AddTrack(
-      rtc::scoped_refptr<AudioTrackInterface>(
+      scoped_refptr<AudioTrackInterface>(
           reinterpret_cast<AudioTrackInterface*>(j_audio_track_pointer)));
 }
 
@@ -122,7 +122,7 @@
     jlong pointer,
     jlong j_video_track_pointer) {
   return reinterpret_cast<MediaStreamInterface*>(pointer)->AddTrack(
-      rtc::scoped_refptr<VideoTrackInterface>(
+      scoped_refptr<VideoTrackInterface>(
           reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer)));
 }
 
@@ -130,7 +130,7 @@
                                                  jlong pointer,
                                                  jlong j_audio_track_pointer) {
   return reinterpret_cast<MediaStreamInterface*>(pointer)->RemoveTrack(
-      rtc::scoped_refptr<AudioTrackInterface>(
+      scoped_refptr<AudioTrackInterface>(
           reinterpret_cast<AudioTrackInterface*>(j_audio_track_pointer)));
 }
 
@@ -138,7 +138,7 @@
                                                  jlong pointer,
                                                  jlong j_video_track_pointer) {
   return reinterpret_cast<MediaStreamInterface*>(pointer)->RemoveTrack(
-      rtc::scoped_refptr<VideoTrackInterface>(
+      scoped_refptr<VideoTrackInterface>(
           reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer)));
 }
 
diff --git a/sdk/android/src/jni/pc/media_stream.h b/sdk/android/src/jni/pc/media_stream.h
index a477764..e27da82 100644
--- a/sdk/android/src/jni/pc/media_stream.h
+++ b/sdk/android/src/jni/pc/media_stream.h
@@ -24,9 +24,8 @@
 
 class JavaMediaStream {
  public:
-  explicit JavaMediaStream(
-      JNIEnv* env,
-      rtc::scoped_refptr<MediaStreamInterface> media_stream);
+  explicit JavaMediaStream(JNIEnv* env,
+                           scoped_refptr<MediaStreamInterface> media_stream);
   ~JavaMediaStream();
 
   const ScopedJavaGlobalRef<jobject>& j_media_stream() {
diff --git a/sdk/android/src/jni/pc/owned_factory_and_threads.cc b/sdk/android/src/jni/pc/owned_factory_and_threads.cc
index 9c945e0..7c83de6 100644
--- a/sdk/android/src/jni/pc/owned_factory_and_threads.cc
+++ b/sdk/android/src/jni/pc/owned_factory_and_threads.cc
@@ -20,7 +20,7 @@
     std::unique_ptr<Thread> network_thread,
     std::unique_ptr<Thread> worker_thread,
     std::unique_ptr<Thread> signaling_thread,
-    const rtc::scoped_refptr<PeerConnectionFactoryInterface>& factory)
+    const scoped_refptr<PeerConnectionFactoryInterface>& factory)
     : socket_factory_(std::move(socket_factory)),
       network_thread_(std::move(network_thread)),
       worker_thread_(std::move(worker_thread)),
diff --git a/sdk/android/src/jni/pc/owned_factory_and_threads.h b/sdk/android/src/jni/pc/owned_factory_and_threads.h
index 7b44971..11c431b 100644
--- a/sdk/android/src/jni/pc/owned_factory_and_threads.h
+++ b/sdk/android/src/jni/pc/owned_factory_and_threads.h
@@ -35,7 +35,7 @@
       std::unique_ptr<Thread> network_thread,
       std::unique_ptr<Thread> worker_thread,
       std::unique_ptr<Thread> signaling_thread,
-      const rtc::scoped_refptr<PeerConnectionFactoryInterface>& factory);
+      const scoped_refptr<PeerConnectionFactoryInterface>& factory);
 
   ~OwnedFactoryAndThreads() = default;
 
@@ -52,7 +52,7 @@
   const std::unique_ptr<Thread> network_thread_;
   const std::unique_ptr<Thread> worker_thread_;
   const std::unique_ptr<Thread> signaling_thread_;
-  const rtc::scoped_refptr<PeerConnectionFactoryInterface> factory_;
+  const scoped_refptr<PeerConnectionFactoryInterface> factory_;
 };
 
 }  // namespace jni
diff --git a/sdk/android/src/jni/pc/peer_connection.cc b/sdk/android/src/jni/pc/peer_connection.cc
index 49ebb8c..51dd8d7 100644
--- a/sdk/android/src/jni/pc/peer_connection.cc
+++ b/sdk/android/src/jni/pc/peer_connection.cc
@@ -181,7 +181,7 @@
   rtc_config->rtcp_mux_policy =
       JavaToNativeRtcpMuxPolicy(jni, j_rtcp_mux_policy);
   if (!j_rtc_certificate.is_null()) {
-    rtc::scoped_refptr<RTCCertificate> certificate = RTCCertificate::FromPEM(
+    scoped_refptr<RTCCertificate> certificate = RTCCertificate::FromPEM(
         JavaToNativeRTCCertificatePEM(jni, j_rtc_certificate));
     RTC_CHECK(certificate != nullptr) << "supplied certificate is malformed.";
     rtc_config->certificates.push_back(certificate);
@@ -387,7 +387,7 @@
 }
 
 void PeerConnectionObserverJni::OnAddStream(
-    rtc::scoped_refptr<MediaStreamInterface> stream) {
+    scoped_refptr<MediaStreamInterface> stream) {
   JNIEnv* env = AttachCurrentThreadIfNeeded();
   Java_Observer_onAddStream(
       env, j_observer_global_,
@@ -395,7 +395,7 @@
 }
 
 void PeerConnectionObserverJni::OnRemoveStream(
-    rtc::scoped_refptr<MediaStreamInterface> stream) {
+    scoped_refptr<MediaStreamInterface> stream) {
   JNIEnv* env = AttachCurrentThreadIfNeeded();
   NativeToJavaStreamsMap::iterator it = remote_streams_.find(stream.get());
   RTC_CHECK(it != remote_streams_.end())
@@ -406,7 +406,7 @@
 }
 
 void PeerConnectionObserverJni::OnDataChannel(
-    rtc::scoped_refptr<DataChannelInterface> channel) {
+    scoped_refptr<DataChannelInterface> channel) {
   JNIEnv* env = AttachCurrentThreadIfNeeded();
   Java_Observer_onDataChannel(env, j_observer_global_,
                               WrapNativeDataChannel(env, channel));
@@ -418,8 +418,8 @@
 }
 
 void PeerConnectionObserverJni::OnAddTrack(
-    rtc::scoped_refptr<RtpReceiverInterface> receiver,
-    const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams) {
+    scoped_refptr<RtpReceiverInterface> receiver,
+    const std::vector<scoped_refptr<MediaStreamInterface>>& streams) {
   JNIEnv* env = AttachCurrentThreadIfNeeded();
   jni_zero::ScopedJavaLocalRef<jobject> j_rtp_receiver =
       NativeToJavaRtpReceiver(env, receiver);
@@ -430,7 +430,7 @@
 }
 
 void PeerConnectionObserverJni::OnRemoveTrack(
-    rtc::scoped_refptr<RtpReceiverInterface> receiver) {
+    scoped_refptr<RtpReceiverInterface> receiver) {
   JNIEnv* env = AttachCurrentThreadIfNeeded();
   jni_zero::ScopedJavaLocalRef<jobject> j_rtp_receiver =
       NativeToJavaRtpReceiver(env, receiver);
@@ -440,7 +440,7 @@
 }
 
 void PeerConnectionObserverJni::OnTrack(
-    rtc::scoped_refptr<RtpTransceiverInterface> transceiver) {
+    scoped_refptr<RtpTransceiverInterface> transceiver) {
   JNIEnv* env = AttachCurrentThreadIfNeeded();
   jni_zero::ScopedJavaLocalRef<jobject> j_rtp_transceiver =
       NativeToJavaRtpTransceiver(env, transceiver);
@@ -453,7 +453,7 @@
 // Otherwise, create a new Java MediaStream.
 JavaMediaStream& PeerConnectionObserverJni::GetOrCreateJavaStream(
     JNIEnv* env,
-    const rtc::scoped_refptr<MediaStreamInterface>& stream) {
+    const scoped_refptr<MediaStreamInterface>& stream) {
   NativeToJavaStreamsMap::iterator it = remote_streams_.find(stream.get());
   if (it == remote_streams_.end()) {
     it = remote_streams_
@@ -468,24 +468,24 @@
 ScopedJavaLocalRef<jobjectArray>
 PeerConnectionObserverJni::NativeToJavaMediaStreamArray(
     JNIEnv* jni,
-    const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams) {
+    const std::vector<scoped_refptr<MediaStreamInterface>>& streams) {
   return NativeToJavaObjectArray(
       jni, streams, GetMediaStreamClass(jni),
-      [this](JNIEnv* env, rtc::scoped_refptr<MediaStreamInterface> stream)
+      [this](JNIEnv* env, scoped_refptr<MediaStreamInterface> stream)
           -> const jni_zero::ScopedJavaGlobalRef<jobject>& {
         return GetOrCreateJavaStream(env, stream).j_media_stream();
       });
 }
 
 OwnedPeerConnection::OwnedPeerConnection(
-    rtc::scoped_refptr<PeerConnectionInterface> peer_connection,
+    scoped_refptr<PeerConnectionInterface> peer_connection,
     std::unique_ptr<PeerConnectionObserver> observer)
     : OwnedPeerConnection(peer_connection,
                           std::move(observer),
                           nullptr /* constraints */) {}
 
 OwnedPeerConnection::OwnedPeerConnection(
-    rtc::scoped_refptr<PeerConnectionInterface> peer_connection,
+    scoped_refptr<PeerConnectionInterface> peer_connection,
     std::unique_ptr<PeerConnectionObserver> observer,
     std::unique_ptr<MediaConstraints> constraints)
     : peer_connection_(peer_connection),
@@ -558,7 +558,7 @@
     const jni_zero::JavaParamRef<jobject>& j_pc) {
   const PeerConnectionInterface::RTCConfiguration rtc_config =
       ExtractNativePC(jni, j_pc)->GetConfiguration();
-  rtc::scoped_refptr<RTCCertificate> certificate = rtc_config.certificates[0];
+  scoped_refptr<RTCCertificate> certificate = rtc_config.certificates[0];
   return NativeToJavaRTCCertificatePEM(jni, certificate->ToPEM());
 }
 
@@ -584,7 +584,7 @@
     const jni_zero::JavaParamRef<jobject>& j_constraints) {
   std::unique_ptr<MediaConstraints> constraints =
       JavaToNativeMediaConstraints(jni, j_constraints);
-  auto observer = rtc::make_ref_counted<CreateSdpObserverJni>(
+  auto observer = make_ref_counted<CreateSdpObserverJni>(
       jni, j_observer, std::move(constraints));
   PeerConnectionInterface::RTCOfferAnswerOptions options;
   CopyConstraintsIntoOfferAnswerOptions(observer->constraints(), &options);
@@ -598,7 +598,7 @@
     const jni_zero::JavaParamRef<jobject>& j_constraints) {
   std::unique_ptr<MediaConstraints> constraints =
       JavaToNativeMediaConstraints(jni, j_constraints);
-  auto observer = rtc::make_ref_counted<CreateSdpObserverJni>(
+  auto observer = make_ref_counted<CreateSdpObserverJni>(
       jni, j_observer, std::move(constraints));
   PeerConnectionInterface::RTCOfferAnswerOptions options;
   CopyConstraintsIntoOfferAnswerOptions(observer->constraints(), &options);
@@ -609,8 +609,7 @@
     JNIEnv* jni,
     const jni_zero::JavaParamRef<jobject>& j_pc,
     const jni_zero::JavaParamRef<jobject>& j_observer) {
-  auto observer =
-      rtc::make_ref_counted<SetLocalSdpObserverJni>(jni, j_observer);
+  auto observer = make_ref_counted<SetLocalSdpObserverJni>(jni, j_observer);
   ExtractNativePC(jni, j_pc)->SetLocalDescription(observer);
 }
 
@@ -619,8 +618,7 @@
     const jni_zero::JavaParamRef<jobject>& j_pc,
     const jni_zero::JavaParamRef<jobject>& j_observer,
     const jni_zero::JavaParamRef<jobject>& j_sdp) {
-  auto observer =
-      rtc::make_ref_counted<SetLocalSdpObserverJni>(jni, j_observer);
+  auto observer = make_ref_counted<SetLocalSdpObserverJni>(jni, j_observer);
   ExtractNativePC(jni, j_pc)->SetLocalDescription(
       JavaToNativeSessionDescription(jni, j_sdp), observer);
 }
@@ -630,8 +628,7 @@
     const jni_zero::JavaParamRef<jobject>& j_pc,
     const jni_zero::JavaParamRef<jobject>& j_observer,
     const jni_zero::JavaParamRef<jobject>& j_sdp) {
-  auto observer =
-      rtc::make_ref_counted<SetRemoteSdpObserverJni>(jni, j_observer);
+  auto observer = make_ref_counted<SetRemoteSdpObserverJni>(jni, j_observer);
   ExtractNativePC(jni, j_pc)->SetRemoteDescription(
       JavaToNativeSessionDescription(jni, j_sdp), observer);
 }
@@ -698,7 +695,7 @@
   std::unique_ptr<IceCandidateInterface> candidate(
       CreateIceCandidate(sdp_mid, j_sdp_mline_index, sdp, nullptr));
 
-  rtc::scoped_refptr<AddIceCandidateObserverJni> observer(
+  scoped_refptr<AddIceCandidateObserverJni> observer(
       new AddIceCandidateObserverJni(jni, j_observer));
   ExtractNativePC(jni, j_pc)->AddIceCandidate(
       std::move(candidate),
@@ -737,7 +734,7 @@
     const jni_zero::JavaParamRef<jstring>& j_stream_id) {
   std::string kind = JavaToNativeString(jni, j_kind);
   std::string stream_id = JavaToNativeString(jni, j_stream_id);
-  rtc::scoped_refptr<RtpSenderInterface> sender =
+  scoped_refptr<RtpSenderInterface> sender =
       ExtractNativePC(jni, j_pc)->CreateSender(kind, stream_id);
   return NativeToJavaRtpSender(jni, sender);
 }
@@ -768,9 +765,9 @@
     const jni_zero::JavaParamRef<jobject>& j_pc,
     const jlong native_track,
     const jni_zero::JavaParamRef<jobject>& j_stream_labels) {
-  RTCErrorOr<rtc::scoped_refptr<RtpSenderInterface>> result =
+  RTCErrorOr<scoped_refptr<RtpSenderInterface>> result =
       ExtractNativePC(jni, j_pc)->AddTrack(
-          rtc::scoped_refptr<MediaStreamTrackInterface>(
+          scoped_refptr<MediaStreamTrackInterface>(
               reinterpret_cast<MediaStreamTrackInterface*>(native_track)),
           JavaListToNativeVector<std::string, jstring>(jni, j_stream_labels,
                                                        &JavaToNativeString));
@@ -787,7 +784,7 @@
     const jni_zero::JavaParamRef<jobject>& j_pc,
     jlong native_sender) {
   return ExtractNativePC(jni, j_pc)
-      ->RemoveTrackOrError(rtc::scoped_refptr<RtpSenderInterface>(
+      ->RemoveTrackOrError(scoped_refptr<RtpSenderInterface>(
           reinterpret_cast<RtpSenderInterface*>(native_sender)))
       .ok();
 }
@@ -798,9 +795,9 @@
     const jni_zero::JavaParamRef<jobject>& j_pc,
     jlong native_track,
     const jni_zero::JavaParamRef<jobject>& j_init) {
-  RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>> result =
+  RTCErrorOr<scoped_refptr<RtpTransceiverInterface>> result =
       ExtractNativePC(jni, j_pc)->AddTransceiver(
-          rtc::scoped_refptr<MediaStreamTrackInterface>(
+          scoped_refptr<MediaStreamTrackInterface>(
               reinterpret_cast<MediaStreamTrackInterface*>(native_track)),
           JavaToNativeRtpTransceiverInit(jni, j_init));
   if (!result.ok()) {
@@ -818,7 +815,7 @@
     const jni_zero::JavaParamRef<jobject>& j_pc,
     const jni_zero::JavaParamRef<jobject>& j_media_type,
     const jni_zero::JavaParamRef<jobject>& j_init) {
-  RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>> result =
+  RTCErrorOr<scoped_refptr<RtpTransceiverInterface>> result =
       ExtractNativePC(jni, j_pc)->AddTransceiver(
           JavaToNativeMediaType(jni, j_media_type),
           JavaToNativeRtpTransceiverInit(jni, j_init));
@@ -836,7 +833,7 @@
     const jni_zero::JavaParamRef<jobject>& j_pc,
     const jni_zero::JavaParamRef<jobject>& j_observer,
     jlong native_track) {
-  auto observer = rtc::make_ref_counted<StatsObserverJni>(jni, j_observer);
+  auto observer = make_ref_counted<StatsObserverJni>(jni, j_observer);
   return ExtractNativePC(jni, j_pc)->GetStats(
       observer.get(),
       reinterpret_cast<MediaStreamTrackInterface*>(native_track),
@@ -848,7 +845,7 @@
     const jni_zero::JavaParamRef<jobject>& j_pc,
     const jni_zero::JavaParamRef<jobject>& j_callback) {
   auto callback =
-      rtc::make_ref_counted<RTCStatsCollectorCallbackWrapper>(jni, j_callback);
+      make_ref_counted<RTCStatsCollectorCallbackWrapper>(jni, j_callback);
   ExtractNativePC(jni, j_pc)->GetStats(callback.get());
 }
 
@@ -858,11 +855,11 @@
     jlong native_sender,
     const jni_zero::JavaParamRef<jobject>& j_callback) {
   auto callback =
-      rtc::make_ref_counted<RTCStatsCollectorCallbackWrapper>(jni, j_callback);
+      make_ref_counted<RTCStatsCollectorCallbackWrapper>(jni, j_callback);
   ExtractNativePC(jni, j_pc)->GetStats(
-      rtc::scoped_refptr<RtpSenderInterface>(
+      scoped_refptr<RtpSenderInterface>(
           reinterpret_cast<RtpSenderInterface*>(native_sender)),
-      rtc::scoped_refptr<RTCStatsCollectorCallbackWrapper>(callback.get()));
+      scoped_refptr<RTCStatsCollectorCallbackWrapper>(callback.get()));
 }
 
 static void JNI_PeerConnection_NewGetStatsReceiver(
@@ -871,11 +868,11 @@
     jlong native_receiver,
     const jni_zero::JavaParamRef<jobject>& j_callback) {
   auto callback =
-      rtc::make_ref_counted<RTCStatsCollectorCallbackWrapper>(jni, j_callback);
+      make_ref_counted<RTCStatsCollectorCallbackWrapper>(jni, j_callback);
   ExtractNativePC(jni, j_pc)->GetStats(
-      rtc::scoped_refptr<RtpReceiverInterface>(
+      scoped_refptr<RtpReceiverInterface>(
           reinterpret_cast<RtpReceiverInterface*>(native_receiver)),
-      rtc::scoped_refptr<RTCStatsCollectorCallbackWrapper>(callback.get()));
+      scoped_refptr<RTCStatsCollectorCallbackWrapper>(callback.get()));
 }
 
 static jboolean JNI_PeerConnection_SetBitrate(
diff --git a/sdk/android/src/jni/pc/peer_connection.h b/sdk/android/src/jni/pc/peer_connection.h
index cda325f..61e9c46 100644
--- a/sdk/android/src/jni/pc/peer_connection.h
+++ b/sdk/android/src/jni/pc/peer_connection.h
@@ -68,17 +68,15 @@
       PeerConnectionInterface::IceGatheringState new_state) override;
   void OnIceSelectedCandidatePairChanged(
       const CandidatePairChangeEvent& event) override;
-  void OnAddStream(rtc::scoped_refptr<MediaStreamInterface> stream) override;
-  void OnRemoveStream(rtc::scoped_refptr<MediaStreamInterface> stream) override;
-  void OnDataChannel(rtc::scoped_refptr<DataChannelInterface> channel) override;
+  void OnAddStream(scoped_refptr<MediaStreamInterface> stream) override;
+  void OnRemoveStream(scoped_refptr<MediaStreamInterface> stream) override;
+  void OnDataChannel(scoped_refptr<DataChannelInterface> channel) override;
   void OnRenegotiationNeeded() override;
-  void OnAddTrack(rtc::scoped_refptr<RtpReceiverInterface> receiver,
-                  const std::vector<rtc::scoped_refptr<MediaStreamInterface>>&
-                      streams) override;
-  void OnTrack(
-      rtc::scoped_refptr<RtpTransceiverInterface> transceiver) override;
-  void OnRemoveTrack(
-      rtc::scoped_refptr<RtpReceiverInterface> receiver) override;
+  void OnAddTrack(
+      scoped_refptr<RtpReceiverInterface> receiver,
+      const std::vector<scoped_refptr<MediaStreamInterface>>& streams) override;
+  void OnTrack(scoped_refptr<RtpTransceiverInterface> transceiver) override;
+  void OnRemoveTrack(scoped_refptr<RtpReceiverInterface> receiver) override;
 
  private:
   typedef std::map<MediaStreamInterface*, JavaMediaStream>
@@ -90,12 +88,12 @@
   // Otherwise, create a new Java MediaStream. Returns a global jobject.
   JavaMediaStream& GetOrCreateJavaStream(
       JNIEnv* env,
-      const rtc::scoped_refptr<MediaStreamInterface>& stream);
+      const scoped_refptr<MediaStreamInterface>& stream);
 
   // Converts array of streams, creating or re-using Java streams as necessary.
   ScopedJavaLocalRef<jobjectArray> NativeToJavaMediaStreamArray(
       JNIEnv* jni,
-      const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams);
+      const std::vector<scoped_refptr<MediaStreamInterface>>& streams);
 
   const ScopedJavaGlobalRef<jobject> j_observer_global_;
 
@@ -115,21 +113,19 @@
 // Also stores reference to the deprecated PeerConnection constraints for now.
 class OwnedPeerConnection {
  public:
-  OwnedPeerConnection(
-      rtc::scoped_refptr<PeerConnectionInterface> peer_connection,
-      std::unique_ptr<PeerConnectionObserver> observer);
+  OwnedPeerConnection(scoped_refptr<PeerConnectionInterface> peer_connection,
+                      std::unique_ptr<PeerConnectionObserver> observer);
   // Deprecated. PC constraints are deprecated.
-  OwnedPeerConnection(
-      rtc::scoped_refptr<PeerConnectionInterface> peer_connection,
-      std::unique_ptr<PeerConnectionObserver> observer,
-      std::unique_ptr<MediaConstraints> constraints);
+  OwnedPeerConnection(scoped_refptr<PeerConnectionInterface> peer_connection,
+                      std::unique_ptr<PeerConnectionObserver> observer,
+                      std::unique_ptr<MediaConstraints> constraints);
   ~OwnedPeerConnection();
 
   PeerConnectionInterface* pc() const { return peer_connection_.get(); }
   const MediaConstraints* constraints() const { return constraints_.get(); }
 
  private:
-  rtc::scoped_refptr<PeerConnectionInterface> peer_connection_;
+  scoped_refptr<PeerConnectionInterface> peer_connection_;
   std::unique_ptr<PeerConnectionObserver> observer_;
   std::unique_ptr<MediaConstraints> constraints_;
 };
diff --git a/sdk/android/src/jni/pc/peer_connection_factory.cc b/sdk/android/src/jni/pc/peer_connection_factory.cc
index 8ad0dd9..7404a95 100644
--- a/sdk/android/src/jni/pc/peer_connection_factory.cc
+++ b/sdk/android/src/jni/pc/peer_connection_factory.cc
@@ -47,11 +47,12 @@
 
 namespace {
 
-// Take ownership of the jlong reference and cast it into an rtc::scoped_refptr.
+// Take ownership of the jlong reference and cast it into an
+// webrtc::scoped_refptr.
 template <typename T>
-rtc::scoped_refptr<T> TakeOwnershipOfRefPtr(jlong j_pointer) {
+scoped_refptr<T> TakeOwnershipOfRefPtr(jlong j_pointer) {
   T* ptr = reinterpret_cast<T*>(j_pointer);
-  rtc::scoped_refptr<T> refptr;
+  scoped_refptr<T> refptr;
   refptr.swap(&ptr);
   return refptr;
 }
@@ -113,7 +114,7 @@
 
 ScopedJavaLocalRef<jobject> NativeToScopedJavaPeerConnectionFactory(
     JNIEnv* env,
-    rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> pcf,
+    scoped_refptr<webrtc::PeerConnectionFactoryInterface> pcf,
     std::unique_ptr<SocketFactory> socket_factory,
     std::unique_ptr<Thread> network_thread,
     std::unique_ptr<Thread> worker_thread,
@@ -153,7 +154,7 @@
 
 jobject NativeToJavaPeerConnectionFactory(
     JNIEnv* jni,
-    rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> pcf,
+    scoped_refptr<webrtc::PeerConnectionFactoryInterface> pcf,
     std::unique_ptr<SocketFactory> socket_factory,
     std::unique_ptr<Thread> network_thread,
     std::unique_ptr<Thread> worker_thread,
@@ -189,7 +190,7 @@
 }
 
 static void JNI_PeerConnectionFactory_InitializeInternalTracer(JNIEnv* jni) {
-  rtc::tracing::SetupInternalTracer();
+  tracing::SetupInternalTracer();
 }
 
 static jni_zero::ScopedJavaLocalRef<jstring>
@@ -209,17 +210,17 @@
   const char* init_string =
       jni->GetStringUTFChars(j_event_tracing_filename.obj(), NULL);
   RTC_LOG(LS_INFO) << "Starting internal tracing to: " << init_string;
-  bool ret = rtc::tracing::StartInternalCapture(init_string);
+  bool ret = tracing::StartInternalCapture(init_string);
   jni->ReleaseStringUTFChars(j_event_tracing_filename.obj(), init_string);
   return ret;
 }
 
 static void JNI_PeerConnectionFactory_StopInternalTracingCapture(JNIEnv* jni) {
-  rtc::tracing::StopInternalCapture();
+  tracing::StopInternalCapture();
 }
 
 static void JNI_PeerConnectionFactory_ShutdownInternalTracer(JNIEnv* jni) {
-  rtc::tracing::ShutdownInternalTracer();
+  tracing::ShutdownInternalTracer();
 }
 
 // Following parameters are optional:
@@ -230,12 +231,12 @@
     JNIEnv* jni,
     const jni_zero::JavaParamRef<jobject>& jcontext,
     const jni_zero::JavaParamRef<jobject>& joptions,
-    rtc::scoped_refptr<AudioDeviceModule> audio_device_module,
-    rtc::scoped_refptr<AudioEncoderFactory> audio_encoder_factory,
-    rtc::scoped_refptr<AudioDecoderFactory> audio_decoder_factory,
+    scoped_refptr<AudioDeviceModule> audio_device_module,
+    scoped_refptr<AudioEncoderFactory> audio_encoder_factory,
+    scoped_refptr<AudioDecoderFactory> audio_decoder_factory,
     const jni_zero::JavaParamRef<jobject>& jencoder_factory,
     const jni_zero::JavaParamRef<jobject>& jdecoder_factory,
-    rtc::scoped_refptr<AudioProcessing> audio_processor,
+    scoped_refptr<AudioProcessing> audio_processor,
     std::unique_ptr<FecControllerFactoryInterface> fec_controller_factory,
     std::unique_ptr<NetworkControllerFactoryInterface>
         network_controller_factory,
@@ -302,7 +303,7 @@
       absl::WrapUnique(CreateVideoDecoderFactory(jni, jdecoder_factory));
   EnableMedia(dependencies);
 
-  rtc::scoped_refptr<PeerConnectionFactoryInterface> factory =
+  scoped_refptr<PeerConnectionFactoryInterface> factory =
       CreateModularPeerConnectionFactory(std::move(dependencies));
 
   RTC_CHECK(factory) << "Failed to create the peer connection factory; "
@@ -332,11 +333,11 @@
     jlong native_network_controller_factory,
     jlong native_network_state_predictor_factory,
     jlong native_neteq_factory) {
-  rtc::scoped_refptr<AudioProcessing> audio_processor(
+  scoped_refptr<AudioProcessing> audio_processor(
       reinterpret_cast<AudioProcessing*>(native_audio_processor));
   return CreatePeerConnectionFactoryForJava(
       jni, jcontext, joptions,
-      rtc::scoped_refptr<AudioDeviceModule>(
+      scoped_refptr<AudioDeviceModule>(
           reinterpret_cast<AudioDeviceModule*>(native_audio_device_module)),
       TakeOwnershipOfRefPtr<AudioEncoderFactory>(native_audio_encoder_factory),
       TakeOwnershipOfRefPtr<AudioDecoderFactory>(native_audio_decoder_factory),
@@ -360,7 +361,7 @@
     JNIEnv* jni,
     jlong native_factory,
     const jni_zero::JavaParamRef<jstring>& label) {
-  rtc::scoped_refptr<MediaStreamInterface> stream(
+  scoped_refptr<MediaStreamInterface> stream(
       PeerConnectionFactoryFromJava(native_factory)
           ->CreateLocalMediaStream(JavaToStdString(jni, label)));
   return jlongFromPointer(stream.release());
@@ -374,7 +375,7 @@
       JavaToNativeMediaConstraints(jni, j_constraints);
   AudioOptions options;
   CopyConstraintsIntoAudioOptions(constraints.get(), &options);
-  rtc::scoped_refptr<AudioSourceInterface> source(
+  scoped_refptr<AudioSourceInterface> source(
       PeerConnectionFactoryFromJava(native_factory)
           ->CreateAudioSource(options));
   return jlongFromPointer(source.release());
@@ -385,7 +386,7 @@
     jlong native_factory,
     const jni_zero::JavaParamRef<jstring>& id,
     jlong native_source) {
-  rtc::scoped_refptr<AudioTrackInterface> track(
+  scoped_refptr<AudioTrackInterface> track(
       PeerConnectionFactoryFromJava(native_factory)
           ->CreateAudioTrack(
               JavaToStdString(jni, id),
@@ -452,7 +453,7 @@
     // Generate non-default certificate.
     KeyType key_type = GetRtcConfigKeyType(jni, j_rtc_config);
     if (key_type != KT_DEFAULT) {
-      rtc::scoped_refptr<RTCCertificate> certificate =
+      scoped_refptr<RTCCertificate> certificate =
           RTCCertificateGenerator::GenerateCertificate(KeyParams(key_type),
                                                        std::nullopt);
       if (!certificate) {
@@ -504,10 +505,10 @@
     jlong native_factory,
     const jni_zero::JavaParamRef<jstring>& id,
     jlong native_source) {
-  rtc::scoped_refptr<VideoTrackInterface> track =
+  scoped_refptr<VideoTrackInterface> track =
       PeerConnectionFactoryFromJava(native_factory)
           ->CreateVideoTrack(
-              rtc::scoped_refptr<VideoTrackSourceInterface>(
+              scoped_refptr<VideoTrackSourceInterface>(
                   reinterpret_cast<VideoTrackSourceInterface*>(native_source)),
               JavaToStdString(jni, id));
   return jlongFromPointer(track.release());
@@ -527,19 +528,19 @@
 
   // If there is already a LogSink, remove it from LogMessage.
   if (jni_log_sink) {
-    rtc::LogMessage::RemoveLogToStream(jni_log_sink.get());
+    LogMessage::RemoveLogToStream(jni_log_sink.get());
   }
   jni_log_sink = std::make_unique<JNILogSink>(jni, j_logging);
-  rtc::LogMessage::AddLogToStream(
-      jni_log_sink.get(), static_cast<rtc::LoggingSeverity>(nativeSeverity));
-  rtc::LogMessage::LogToDebug(rtc::LS_NONE);
+  LogMessage::AddLogToStream(jni_log_sink.get(),
+                             static_cast<LoggingSeverity>(nativeSeverity));
+  LogMessage::LogToDebug(LS_NONE);
 }
 
 static void JNI_PeerConnectionFactory_DeleteLoggable(JNIEnv* jni) {
   std::unique_ptr<JNILogSink>& jni_log_sink = GetStaticObjects().jni_log_sink;
 
   if (jni_log_sink) {
-    rtc::LogMessage::RemoveLogToStream(jni_log_sink.get());
+    LogMessage::RemoveLogToStream(jni_log_sink.get());
     jni_log_sink.reset();
   }
 }
diff --git a/sdk/android/src/jni/pc/peer_connection_factory.h b/sdk/android/src/jni/pc/peer_connection_factory.h
index 7f7a6c7..22da885 100644
--- a/sdk/android/src/jni/pc/peer_connection_factory.h
+++ b/sdk/android/src/jni/pc/peer_connection_factory.h
@@ -22,7 +22,7 @@
 // Creates java PeerConnectionFactory with specified `pcf`.
 jobject NativeToJavaPeerConnectionFactory(
     JNIEnv* jni,
-    rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> pcf,
+    scoped_refptr<webrtc::PeerConnectionFactoryInterface> pcf,
     std::unique_ptr<SocketFactory> socket_factory,
     std::unique_ptr<Thread> network_thread,
     std::unique_ptr<Thread> worker_thread,
diff --git a/sdk/android/src/jni/pc/rtc_certificate.cc b/sdk/android/src/jni/pc/rtc_certificate.cc
index b32baaf..86e892c 100644
--- a/sdk/android/src/jni/pc/rtc_certificate.cc
+++ b/sdk/android/src/jni/pc/rtc_certificate.cc
@@ -46,10 +46,10 @@
     JNIEnv* jni,
     const jni_zero::JavaParamRef<jobject>& j_key_type,
     jlong j_expires) {
-  rtc::KeyType key_type = JavaToNativeKeyType(jni, j_key_type);
+  KeyType key_type = JavaToNativeKeyType(jni, j_key_type);
   uint64_t expires = (uint64_t)j_expires;
-  rtc::scoped_refptr<RTCCertificate> certificate =
-      RTCCertificateGenerator::GenerateCertificate(rtc::KeyParams(key_type),
+  scoped_refptr<RTCCertificate> certificate =
+      RTCCertificateGenerator::GenerateCertificate(KeyParams(key_type),
                                                    expires);
   RTCCertificatePEM pem = certificate->ToPEM();
   return Java_RtcCertificatePem_Constructor(
diff --git a/sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.cc b/sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.cc
index 96fb89a..b96968b 100644
--- a/sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.cc
+++ b/sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.cc
@@ -105,7 +105,7 @@
 
 ScopedJavaLocalRef<jobject> NativeToJavaRtcStatsReport(
     JNIEnv* env,
-    const rtc::scoped_refptr<const RTCStatsReport>& report) {
+    const scoped_refptr<const RTCStatsReport>& report) {
   ScopedJavaLocalRef<jobject> j_stats_map =
       NativeToJavaMap(env, *report, [](JNIEnv* env, const RTCStats& stats) {
         return std::make_pair(NativeToJavaString(env, stats.id()),
@@ -124,7 +124,7 @@
 RTCStatsCollectorCallbackWrapper::~RTCStatsCollectorCallbackWrapper() = default;
 
 void RTCStatsCollectorCallbackWrapper::OnStatsDelivered(
-    const rtc::scoped_refptr<const RTCStatsReport>& report) {
+    const scoped_refptr<const RTCStatsReport>& report) {
   JNIEnv* jni = AttachCurrentThreadIfNeeded();
   Java_RTCStatsCollectorCallback_onStatsDelivered(
       jni, j_callback_global_, NativeToJavaRtcStatsReport(jni, report));
diff --git a/sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.h b/sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.h
index 50fad18..98d08ce 100644
--- a/sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.h
+++ b/sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.h
@@ -29,7 +29,7 @@
   ~RTCStatsCollectorCallbackWrapper() override;
 
   void OnStatsDelivered(
-      const rtc::scoped_refptr<const RTCStatsReport>& report) override;
+      const scoped_refptr<const RTCStatsReport>& report) override;
 
  private:
   const ScopedJavaGlobalRef<jobject> j_callback_global_;
diff --git a/sdk/android/src/jni/pc/rtp_receiver.cc b/sdk/android/src/jni/pc/rtp_receiver.cc
index 998fae7..56dc78d 100644
--- a/sdk/android/src/jni/pc/rtp_receiver.cc
+++ b/sdk/android/src/jni/pc/rtp_receiver.cc
@@ -47,7 +47,7 @@
 
 ScopedJavaLocalRef<jobject> NativeToJavaRtpReceiver(
     JNIEnv* env,
-    rtc::scoped_refptr<RtpReceiverInterface> receiver) {
+    scoped_refptr<RtpReceiverInterface> receiver) {
   // Receiver is now owned by Java object, and will be freed from there.
   return Java_RtpReceiver_Constructor(env,
                                       jlongFromPointer(receiver.release()));
@@ -120,7 +120,7 @@
                                               jlong j_rtp_sender_pointer,
                                               jlong j_frame_decryptor_pointer) {
   reinterpret_cast<RtpReceiverInterface*>(j_rtp_sender_pointer)
-      ->SetFrameDecryptor(rtc::scoped_refptr<FrameDecryptorInterface>(
+      ->SetFrameDecryptor(scoped_refptr<FrameDecryptorInterface>(
           reinterpret_cast<FrameDecryptorInterface*>(
               j_frame_decryptor_pointer)));
 }
diff --git a/sdk/android/src/jni/pc/rtp_receiver.h b/sdk/android/src/jni/pc/rtp_receiver.h
index ccef44b..20bc05d 100644
--- a/sdk/android/src/jni/pc/rtp_receiver.h
+++ b/sdk/android/src/jni/pc/rtp_receiver.h
@@ -21,7 +21,7 @@
 
 ScopedJavaLocalRef<jobject> NativeToJavaRtpReceiver(
     JNIEnv* env,
-    rtc::scoped_refptr<RtpReceiverInterface> receiver);
+    scoped_refptr<RtpReceiverInterface> receiver);
 
 // Takes ownership of the passed `j_receiver` and stores it as a global
 // reference. Will call dispose() in the dtor.
diff --git a/sdk/android/src/jni/pc/rtp_sender.cc b/sdk/android/src/jni/pc/rtp_sender.cc
index 31cec9d..e63d0cc 100644
--- a/sdk/android/src/jni/pc/rtp_sender.cc
+++ b/sdk/android/src/jni/pc/rtp_sender.cc
@@ -21,7 +21,7 @@
 
 ScopedJavaLocalRef<jobject> NativeToJavaRtpSender(
     JNIEnv* env,
-    rtc::scoped_refptr<RtpSenderInterface> sender) {
+    scoped_refptr<RtpSenderInterface> sender) {
   if (!sender)
     return nullptr;
   // Sender is now owned by the Java object, and will be freed from
@@ -104,7 +104,7 @@
                                             jlong j_rtp_sender_pointer,
                                             jlong j_frame_encryptor_pointer) {
   reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)
-      ->SetFrameEncryptor(rtc::scoped_refptr<FrameEncryptorInterface>(
+      ->SetFrameEncryptor(scoped_refptr<FrameEncryptorInterface>(
           reinterpret_cast<FrameEncryptorInterface*>(
               j_frame_encryptor_pointer)));
 }
diff --git a/sdk/android/src/jni/pc/rtp_sender.h b/sdk/android/src/jni/pc/rtp_sender.h
index d782ca9..5d8a40c 100644
--- a/sdk/android/src/jni/pc/rtp_sender.h
+++ b/sdk/android/src/jni/pc/rtp_sender.h
@@ -21,7 +21,7 @@
 
 ScopedJavaLocalRef<jobject> NativeToJavaRtpSender(
     JNIEnv* env,
-    rtc::scoped_refptr<RtpSenderInterface> sender);
+    scoped_refptr<RtpSenderInterface> sender);
 
 }  // namespace jni
 }  // namespace webrtc
diff --git a/sdk/android/src/jni/pc/rtp_transceiver.cc b/sdk/android/src/jni/pc/rtp_transceiver.cc
index c2b4920..d7425b9 100644
--- a/sdk/android/src/jni/pc/rtp_transceiver.cc
+++ b/sdk/android/src/jni/pc/rtp_transceiver.cc
@@ -62,7 +62,7 @@
 
 ScopedJavaLocalRef<jobject> NativeToJavaRtpTransceiver(
     JNIEnv* env,
-    rtc::scoped_refptr<RtpTransceiverInterface> transceiver) {
+    scoped_refptr<RtpTransceiverInterface> transceiver) {
   if (!transceiver) {
     return nullptr;
   }
diff --git a/sdk/android/src/jni/pc/rtp_transceiver.h b/sdk/android/src/jni/pc/rtp_transceiver.h
index 5b2d012..090c5f2 100644
--- a/sdk/android/src/jni/pc/rtp_transceiver.h
+++ b/sdk/android/src/jni/pc/rtp_transceiver.h
@@ -25,7 +25,7 @@
 
 ScopedJavaLocalRef<jobject> NativeToJavaRtpTransceiver(
     JNIEnv* env,
-    rtc::scoped_refptr<RtpTransceiverInterface> transceiver);
+    scoped_refptr<RtpTransceiverInterface> transceiver);
 
 // This takes ownership of the of the `j_transceiver` and stores it as a global
 // reference. This calls the Java Transceiver's dispose() method with the dtor.
diff --git a/sdk/android/src/jni/pc/ssl_certificate_verifier_wrapper.cc b/sdk/android/src/jni/pc/ssl_certificate_verifier_wrapper.cc
index c5ea440..9bd7042 100644
--- a/sdk/android/src/jni/pc/ssl_certificate_verifier_wrapper.cc
+++ b/sdk/android/src/jni/pc/ssl_certificate_verifier_wrapper.cc
@@ -28,7 +28,7 @@
   JNIEnv* jni = AttachCurrentThreadIfNeeded();
 
   // Serialize the der encoding of the cert into a jbyteArray
-  rtc::Buffer cert_der_buffer;
+  Buffer cert_der_buffer;
   certificate.ToDER(&cert_der_buffer);
   ScopedJavaLocalRef<jbyteArray> jni_buffer(
       jni, jni->NewByteArray(cert_der_buffer.size()));
diff --git a/sdk/android/src/jni/pc/stats_observer.cc b/sdk/android/src/jni/pc/stats_observer.cc
index 6d4a31d..7511100 100644
--- a/sdk/android/src/jni/pc/stats_observer.cc
+++ b/sdk/android/src/jni/pc/stats_observer.cc
@@ -24,7 +24,7 @@
 
 ScopedJavaLocalRef<jobject> NativeToJavaStatsReportValue(
     JNIEnv* env,
-    const rtc::scoped_refptr<StatsReport::Value>& value_ptr) {
+    const scoped_refptr<StatsReport::Value>& value_ptr) {
   // Should we use the '.name' enum value here instead of converting the
   // name to a string?
   return Java_Value_Constructor(
diff --git a/sdk/android/src/jni/pc/video.cc b/sdk/android/src/jni/pc/video.cc
index 25447fb..1aa7c3f 100644
--- a/sdk/android/src/jni/pc/video.cc
+++ b/sdk/android/src/jni/pc/video.cc
@@ -46,7 +46,7 @@
                         Thread* worker_thread,
                         jboolean is_screencast,
                         jboolean align_timestamps) {
-  auto source = rtc::make_ref_counted<AndroidVideoTrackSource>(
+  auto source = make_ref_counted<AndroidVideoTrackSource>(
       signaling_thread, env, is_screencast, align_timestamps);
   return source.release();
 }
diff --git a/sdk/android/src/jni/video_decoder_wrapper.cc b/sdk/android/src/jni/video_decoder_wrapper.cc
index 1edd1cb..60e2d64 100644
--- a/sdk/android/src/jni/video_decoder_wrapper.cc
+++ b/sdk/android/src/jni/video_decoder_wrapper.cc
@@ -38,7 +38,7 @@
 
 template <typename Dst, typename Src>
 inline std::optional<Dst> cast_optional(const std::optional<Src>& value) {
-  return value ? std::optional<Dst>(rtc::dchecked_cast<Dst, Src>(*value))
+  return value ? std::optional<Dst>(dchecked_cast<Dst, Src>(*value))
                : std::nullopt;
 }
 }  // namespace
diff --git a/sdk/android/src/jni/video_encoder_wrapper.cc b/sdk/android/src/jni/video_encoder_wrapper.cc
index 3c11f7e..6f70b74 100644
--- a/sdk/android/src/jni/video_encoder_wrapper.cc
+++ b/sdk/android/src/jni/video_encoder_wrapper.cc
@@ -343,7 +343,7 @@
   return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
 }
 
-int VideoEncoderWrapper::ParseQp(rtc::ArrayView<const uint8_t> buffer) {
+int VideoEncoderWrapper::ParseQp(ArrayView<const uint8_t> buffer) {
   int qp;
   bool success;
   switch (codec_settings_.codecType) {
diff --git a/sdk/android/src/jni/video_encoder_wrapper.h b/sdk/android/src/jni/video_encoder_wrapper.h
index 651f162..4998441 100644
--- a/sdk/android/src/jni/video_encoder_wrapper.h
+++ b/sdk/android/src/jni/video_encoder_wrapper.h
@@ -71,7 +71,7 @@
                            const JavaRef<jobject>& j_value,
                            const char* method_name);
 
-  int ParseQp(rtc::ArrayView<const uint8_t> buffer);
+  int ParseQp(ArrayView<const uint8_t> buffer);
 
   CodecSpecificInfo ParseCodecSpecificInfo(const EncodedImage& frame);
 
diff --git a/sdk/android/src/jni/video_frame.cc b/sdk/android/src/jni/video_frame.cc
index 2c248df..910778c 100644
--- a/sdk/android/src/jni/video_frame.cc
+++ b/sdk/android/src/jni/video_frame.cc
@@ -25,14 +25,14 @@
 class AndroidVideoBuffer : public VideoFrameBuffer {
  public:
   // Creates a native VideoFrameBuffer from a Java VideoFrame.Buffer.
-  static rtc::scoped_refptr<AndroidVideoBuffer> Create(
+  static scoped_refptr<AndroidVideoBuffer> Create(
       JNIEnv* jni,
       const JavaRef<jobject>& j_video_frame_buffer);
 
   // Similar to the Create() above, but adopts and takes ownership of the Java
   // VideoFrame.Buffer. I.e. retain() will not be called, but release() will be
   // called when the returned AndroidVideoBuffer is destroyed.
-  static rtc::scoped_refptr<AndroidVideoBuffer> Adopt(
+  static scoped_refptr<AndroidVideoBuffer> Adopt(
       JNIEnv* jni,
       const JavaRef<jobject>& j_video_frame_buffer);
 
@@ -42,12 +42,12 @@
 
   // Crops a region defined by `crop_x`, `crop_y`, `crop_width` and
   // `crop_height`. Scales it to size `scale_width` x `scale_height`.
-  rtc::scoped_refptr<VideoFrameBuffer> CropAndScale(int crop_x,
-                                                    int crop_y,
-                                                    int crop_width,
-                                                    int crop_height,
-                                                    int scale_width,
-                                                    int scale_height) override;
+  scoped_refptr<VideoFrameBuffer> CropAndScale(int crop_x,
+                                               int crop_y,
+                                               int crop_width,
+                                               int crop_height,
+                                               int scale_width,
+                                               int scale_height) override;
 
  protected:
   // Should not be called directly. Adopts the Java VideoFrame.Buffer. Use
@@ -59,7 +59,7 @@
   int width() const override;
   int height() const override;
 
-  rtc::scoped_refptr<I420BufferInterface> ToI420() override;
+  scoped_refptr<I420BufferInterface> ToI420() override;
 
   const int width_;
   const int height_;
@@ -70,7 +70,7 @@
 class AndroidVideoI420Buffer : public I420BufferInterface {
  public:
   // Creates a native VideoFrameBuffer from a Java VideoFrame.I420Buffer.
-  static rtc::scoped_refptr<AndroidVideoI420Buffer> Create(
+  static scoped_refptr<AndroidVideoI420Buffer> Create(
       JNIEnv* jni,
       int width,
       int height,
@@ -79,7 +79,7 @@
   // Adopts and takes ownership of the Java VideoFrame.Buffer. I.e. retain()
   // will not be called, but release() will be called when the returned
   // AndroidVideoBuffer is destroyed.
-  static rtc::scoped_refptr<AndroidVideoI420Buffer> Adopt(
+  static scoped_refptr<AndroidVideoI420Buffer> Adopt(
       JNIEnv* jni,
       int width,
       int height,
@@ -119,7 +119,7 @@
   int stride_v_;
 };
 
-rtc::scoped_refptr<AndroidVideoI420Buffer> AndroidVideoI420Buffer::Create(
+scoped_refptr<AndroidVideoI420Buffer> AndroidVideoI420Buffer::Create(
     JNIEnv* jni,
     int width,
     int height,
@@ -129,7 +129,7 @@
                                        j_video_frame_buffer);
 }
 
-rtc::scoped_refptr<AndroidVideoI420Buffer> AndroidVideoI420Buffer::Adopt(
+scoped_refptr<AndroidVideoI420Buffer> AndroidVideoI420Buffer::Adopt(
     JNIEnv* jni,
     int width,
     int height,
@@ -137,8 +137,8 @@
   RTC_DCHECK_EQ(
       static_cast<Type>(Java_Buffer_getBufferType(jni, j_video_frame_buffer)),
       Type::kI420);
-  return rtc::make_ref_counted<AndroidVideoI420Buffer>(jni, width, height,
-                                                       j_video_frame_buffer);
+  return make_ref_counted<AndroidVideoI420Buffer>(jni, width, height,
+                                                  j_video_frame_buffer);
 }
 
 AndroidVideoI420Buffer::AndroidVideoI420Buffer(
@@ -180,16 +180,16 @@
   return Java_VideoFrame_getTimestampNs(jni, j_video_frame);
 }
 
-rtc::scoped_refptr<AndroidVideoBuffer> AndroidVideoBuffer::Adopt(
+scoped_refptr<AndroidVideoBuffer> AndroidVideoBuffer::Adopt(
     JNIEnv* jni,
     const JavaRef<jobject>& j_video_frame_buffer) {
   RTC_DCHECK_EQ(
       static_cast<Type>(Java_Buffer_getBufferType(jni, j_video_frame_buffer)),
       Type::kNative);
-  return rtc::make_ref_counted<AndroidVideoBuffer>(jni, j_video_frame_buffer);
+  return make_ref_counted<AndroidVideoBuffer>(jni, j_video_frame_buffer);
 }
 
-rtc::scoped_refptr<AndroidVideoBuffer> AndroidVideoBuffer::Create(
+scoped_refptr<AndroidVideoBuffer> AndroidVideoBuffer::Create(
     JNIEnv* jni,
     const JavaRef<jobject>& j_video_frame_buffer) {
   Java_Buffer_retain(jni, j_video_frame_buffer);
@@ -213,7 +213,7 @@
   return j_video_frame_buffer_;
 }
 
-rtc::scoped_refptr<VideoFrameBuffer> AndroidVideoBuffer::CropAndScale(
+scoped_refptr<VideoFrameBuffer> AndroidVideoBuffer::CropAndScale(
     int crop_x,
     int crop_y,
     int crop_width,
@@ -238,7 +238,7 @@
   return height_;
 }
 
-rtc::scoped_refptr<I420BufferInterface> AndroidVideoBuffer::ToI420() {
+scoped_refptr<I420BufferInterface> AndroidVideoBuffer::ToI420() {
   JNIEnv* jni = AttachCurrentThreadIfNeeded();
   ScopedJavaLocalRef<jobject> j_i420_buffer =
       Java_Buffer_toI420(jni, j_video_frame_buffer_);
@@ -252,7 +252,7 @@
   return AndroidVideoI420Buffer::Adopt(jni, width_, height_, j_i420_buffer);
 }
 
-rtc::scoped_refptr<VideoFrameBuffer> JavaToNativeFrameBuffer(
+scoped_refptr<VideoFrameBuffer> JavaToNativeFrameBuffer(
     JNIEnv* jni,
     const JavaRef<jobject>& j_video_frame_buffer) {
   VideoFrameBuffer::Type type = static_cast<VideoFrameBuffer::Type>(
@@ -278,7 +278,7 @@
       Java_VideoFrame_getBuffer(jni, j_video_frame);
   int rotation = Java_VideoFrame_getRotation(jni, j_video_frame);
   int64_t timestamp_ns = Java_VideoFrame_getTimestampNs(jni, j_video_frame);
-  rtc::scoped_refptr<VideoFrameBuffer> buffer =
+  scoped_refptr<VideoFrameBuffer> buffer =
       JavaToNativeFrameBuffer(jni, j_video_frame_buffer);
   return VideoFrame::Builder()
       .set_video_frame_buffer(buffer)
@@ -290,7 +290,7 @@
 
 ScopedJavaLocalRef<jobject> NativeToJavaVideoFrame(JNIEnv* jni,
                                                    const VideoFrame& frame) {
-  rtc::scoped_refptr<VideoFrameBuffer> buffer = frame.video_frame_buffer();
+  scoped_refptr<VideoFrameBuffer> buffer = frame.video_frame_buffer();
 
   if (buffer->type() == VideoFrameBuffer::Type::kNative) {
     AndroidVideoBuffer* android_buffer =
diff --git a/sdk/android/src/jni/video_frame.h b/sdk/android/src/jni/video_frame.h
index 9b916de..6335bec 100644
--- a/sdk/android/src/jni/video_frame.h
+++ b/sdk/android/src/jni/video_frame.h
@@ -20,7 +20,7 @@
 namespace webrtc {
 namespace jni {
 
-rtc::scoped_refptr<VideoFrameBuffer> JavaToNativeFrameBuffer(
+scoped_refptr<VideoFrameBuffer> JavaToNativeFrameBuffer(
     JNIEnv* jni,
     const JavaRef<jobject>& j_video_frame_buffer);
 
diff --git a/sdk/android/src/jni/wrapped_native_i420_buffer.cc b/sdk/android/src/jni/wrapped_native_i420_buffer.cc
index f2c543e..c28336a 100644
--- a/sdk/android/src/jni/wrapped_native_i420_buffer.cc
+++ b/sdk/android/src/jni/wrapped_native_i420_buffer.cc
@@ -19,7 +19,7 @@
 // TODO(magjed): Write a test for this function.
 ScopedJavaLocalRef<jobject> WrapI420Buffer(
     JNIEnv* jni,
-    const rtc::scoped_refptr<I420BufferInterface>& i420_buffer) {
+    const scoped_refptr<I420BufferInterface>& i420_buffer) {
   ScopedJavaLocalRef<jobject> y_buffer =
       NewDirectByteBuffer(jni, const_cast<uint8_t*>(i420_buffer->DataY()),
                           i420_buffer->StrideY() * i420_buffer->height());
diff --git a/sdk/android/src/jni/wrapped_native_i420_buffer.h b/sdk/android/src/jni/wrapped_native_i420_buffer.h
index 70ad062..12750af 100644
--- a/sdk/android/src/jni/wrapped_native_i420_buffer.h
+++ b/sdk/android/src/jni/wrapped_native_i420_buffer.h
@@ -23,7 +23,7 @@
 // VideoFrame.I420Buffer as a jobject.
 ScopedJavaLocalRef<jobject> WrapI420Buffer(
     JNIEnv* jni,
-    const rtc::scoped_refptr<I420BufferInterface>& i420_buffer);
+    const scoped_refptr<I420BufferInterface>& i420_buffer);
 
 }  // namespace jni
 }  // namespace webrtc
diff --git a/sdk/media_constraints.cc b/sdk/media_constraints.cc
index d867994..0623071 100644
--- a/sdk/media_constraints.cc
+++ b/sdk/media_constraints.cc
@@ -36,7 +36,7 @@
   if (!FindConstraint(constraints, key, &string_value, mandatory_constraints)) {
     return false;
   }
-  return rtc::FromString(string_value, value);
+  return FromString(string_value, value);
 }
 
 // Specialization for std::string, since a string doesn't need conversion.