Replace `new rtc::RefCountedObject` with `rtc::make_ref_counted` in a few files

Bug: webrtc:12701
Change-Id: Ie50225374f811424faf20caf4cf454b2fd1c4dc9
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/215930
Reviewed-by: Harald Alvestrand <hta@webrtc.org>
Commit-Queue: Harald Alvestrand <hta@webrtc.org>
Commit-Queue: Tommi <tommi@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#33818}
diff --git a/api/audio/echo_detector_creator.cc b/api/audio/echo_detector_creator.cc
index 4c3d9e6..04215b0 100644
--- a/api/audio/echo_detector_creator.cc
+++ b/api/audio/echo_detector_creator.cc
@@ -15,7 +15,7 @@
 namespace webrtc {
 
 rtc::scoped_refptr<EchoDetector> CreateEchoDetector() {
-  return new rtc::RefCountedObject<ResidualEchoDetector>();
+  return rtc::make_ref_counted<ResidualEchoDetector>();
 }
 
 }  // namespace webrtc
diff --git a/api/audio_codecs/audio_decoder_factory_template.h b/api/audio_codecs/audio_decoder_factory_template.h
index e628cb6..388668d 100644
--- a/api/audio_codecs/audio_decoder_factory_template.h
+++ b/api/audio_codecs/audio_decoder_factory_template.h
@@ -123,9 +123,8 @@
   static_assert(sizeof...(Ts) >= 1,
                 "Caller must give at least one template parameter");
 
-  return rtc::scoped_refptr<AudioDecoderFactory>(
-      new rtc::RefCountedObject<
-          audio_decoder_factory_template_impl::AudioDecoderFactoryT<Ts...>>());
+  return rtc::make_ref_counted<
+      audio_decoder_factory_template_impl::AudioDecoderFactoryT<Ts...>>();
 }
 
 }  // namespace webrtc
diff --git a/api/audio_codecs/audio_encoder_factory_template.h b/api/audio_codecs/audio_encoder_factory_template.h
index 74cb053..cdc7def 100644
--- a/api/audio_codecs/audio_encoder_factory_template.h
+++ b/api/audio_codecs/audio_encoder_factory_template.h
@@ -142,9 +142,8 @@
   static_assert(sizeof...(Ts) >= 1,
                 "Caller must give at least one template parameter");
 
-  return rtc::scoped_refptr<AudioEncoderFactory>(
-      new rtc::RefCountedObject<
-          audio_encoder_factory_template_impl::AudioEncoderFactoryT<Ts...>>());
+  return rtc::make_ref_counted<
+      audio_encoder_factory_template_impl::AudioEncoderFactoryT<Ts...>>();
 }
 
 }  // namespace webrtc
diff --git a/api/audio_codecs/test/audio_decoder_factory_template_unittest.cc b/api/audio_codecs/test/audio_decoder_factory_template_unittest.cc
index 0e2e8c2..464ecfd 100644
--- a/api/audio_codecs/test/audio_decoder_factory_template_unittest.cc
+++ b/api/audio_codecs/test/audio_decoder_factory_template_unittest.cc
@@ -78,7 +78,7 @@
 
 TEST(AudioDecoderFactoryTemplateTest, NoDecoderTypes) {
   rtc::scoped_refptr<AudioDecoderFactory> factory(
-      new rtc::RefCountedObject<
+      rtc::make_ref_counted<
           audio_decoder_factory_template_impl::AudioDecoderFactoryT<>>());
   EXPECT_THAT(factory->GetSupportedDecoders(), ::testing::IsEmpty());
   EXPECT_FALSE(factory->IsSupportedDecoder({"foo", 8000, 1}));
diff --git a/api/audio_codecs/test/audio_encoder_factory_template_unittest.cc b/api/audio_codecs/test/audio_encoder_factory_template_unittest.cc
index 95ea855..110f993 100644
--- a/api/audio_codecs/test/audio_encoder_factory_template_unittest.cc
+++ b/api/audio_codecs/test/audio_encoder_factory_template_unittest.cc
@@ -78,7 +78,7 @@
 
 TEST(AudioEncoderFactoryTemplateTest, NoEncoderTypes) {
   rtc::scoped_refptr<AudioEncoderFactory> factory(
-      new rtc::RefCountedObject<
+      rtc::make_ref_counted<
           audio_encoder_factory_template_impl::AudioEncoderFactoryT<>>());
   EXPECT_THAT(factory->GetSupportedEncoders(), ::testing::IsEmpty());
   EXPECT_EQ(absl::nullopt, factory->QueryAudioEncoder({"foo", 8000, 1}));
diff --git a/api/ice_transport_factory.cc b/api/ice_transport_factory.cc
index d507812..26ef88b 100644
--- a/api/ice_transport_factory.cc
+++ b/api/ice_transport_factory.cc
@@ -60,12 +60,12 @@
     IceTransportInit init) {
   if (init.async_resolver_factory()) {
     // Backwards compatibility mode
-    return new rtc::RefCountedObject<IceTransportWithTransportChannel>(
+    return rtc::make_ref_counted<IceTransportWithTransportChannel>(
         std::make_unique<cricket::P2PTransportChannel>(
             "", cricket::ICE_CANDIDATE_COMPONENT_RTP, init.port_allocator(),
             init.async_resolver_factory(), init.event_log()));
   } else {
-    return new rtc::RefCountedObject<IceTransportWithTransportChannel>(
+    return rtc::make_ref_counted<IceTransportWithTransportChannel>(
         cricket::P2PTransportChannel::Create(
             "", cricket::ICE_CANDIDATE_COMPONENT_RTP, init.port_allocator(),
             init.async_dns_resolver_factory(), init.event_log()));
diff --git a/api/proxy.h b/api/proxy.h
index 3be9f93..d14e0b2 100644
--- a/api/proxy.h
+++ b/api/proxy.h
@@ -253,26 +253,26 @@
   void DestroyInternal() { delete c_; }                \
   INTERNAL_CLASS* c_;
 
-#define BEGIN_PRIMARY_PROXY_MAP(c)                                             \
-  PROXY_MAP_BOILERPLATE(c)                                                     \
-  PRIMARY_PROXY_MAP_BOILERPLATE(c)                                             \
-  REFCOUNTED_PROXY_MAP_BOILERPLATE(c)                                          \
- public:                                                                       \
-  static rtc::scoped_refptr<c##ProxyWithInternal> Create(                      \
-      rtc::Thread* primary_thread, INTERNAL_CLASS* c) {                        \
-    return new rtc::RefCountedObject<c##ProxyWithInternal>(primary_thread, c); \
+#define BEGIN_PRIMARY_PROXY_MAP(c)                                         \
+  PROXY_MAP_BOILERPLATE(c)                                                 \
+  PRIMARY_PROXY_MAP_BOILERPLATE(c)                                         \
+  REFCOUNTED_PROXY_MAP_BOILERPLATE(c)                                      \
+ public:                                                                   \
+  static rtc::scoped_refptr<c##ProxyWithInternal> Create(                  \
+      rtc::Thread* primary_thread, INTERNAL_CLASS* c) {                    \
+    return rtc::make_ref_counted<c##ProxyWithInternal>(primary_thread, c); \
   }
 
-#define BEGIN_PROXY_MAP(c)                                        \
-  PROXY_MAP_BOILERPLATE(c)                                        \
-  SECONDARY_PROXY_MAP_BOILERPLATE(c)                              \
-  REFCOUNTED_PROXY_MAP_BOILERPLATE(c)                             \
- public:                                                          \
-  static rtc::scoped_refptr<c##ProxyWithInternal> Create(         \
-      rtc::Thread* primary_thread, rtc::Thread* secondary_thread, \
-      INTERNAL_CLASS* c) {                                        \
-    return new rtc::RefCountedObject<c##ProxyWithInternal>(       \
-        primary_thread, secondary_thread, c);                     \
+#define BEGIN_PROXY_MAP(c)                                                   \
+  PROXY_MAP_BOILERPLATE(c)                                                   \
+  SECONDARY_PROXY_MAP_BOILERPLATE(c)                                         \
+  REFCOUNTED_PROXY_MAP_BOILERPLATE(c)                                        \
+ public:                                                                     \
+  static rtc::scoped_refptr<c##ProxyWithInternal> Create(                    \
+      rtc::Thread* primary_thread, rtc::Thread* secondary_thread,            \
+      INTERNAL_CLASS* c) {                                                   \
+    return rtc::make_ref_counted<c##ProxyWithInternal>(primary_thread,       \
+                                                       secondary_thread, c); \
   }
 
 #define BEGIN_OWNED_PROXY_MAP(c)                                   \
diff --git a/api/video/encoded_image.cc b/api/video/encoded_image.cc
index 61d921c..fc77b94 100644
--- a/api/video/encoded_image.cc
+++ b/api/video/encoded_image.cc
@@ -32,13 +32,13 @@
 
 // static
 rtc::scoped_refptr<EncodedImageBuffer> EncodedImageBuffer::Create(size_t size) {
-  return new rtc::RefCountedObject<EncodedImageBuffer>(size);
+  return rtc::make_ref_counted<EncodedImageBuffer>(size);
 }
 // static
 rtc::scoped_refptr<EncodedImageBuffer> EncodedImageBuffer::Create(
     const uint8_t* data,
     size_t size) {
-  return new rtc::RefCountedObject<EncodedImageBuffer>(data, size);
+  return rtc::make_ref_counted<EncodedImageBuffer>(data, size);
 }
 
 const uint8_t* EncodedImageBuffer::data() const {
diff --git a/api/video/i010_buffer.cc b/api/video/i010_buffer.cc
index 7286676..74d37d1 100644
--- a/api/video/i010_buffer.cc
+++ b/api/video/i010_buffer.cc
@@ -56,8 +56,8 @@
 
 // static
 rtc::scoped_refptr<I010Buffer> I010Buffer::Create(int width, int height) {
-  return new rtc::RefCountedObject<I010Buffer>(
-      width, height, width, (width + 1) / 2, (width + 1) / 2);
+  return rtc::make_ref_counted<I010Buffer>(width, height, width,
+                                           (width + 1) / 2, (width + 1) / 2);
 }
 
 // static
diff --git a/api/video/i420_buffer.cc b/api/video/i420_buffer.cc
index 2a52217..8783a4a 100644
--- a/api/video/i420_buffer.cc
+++ b/api/video/i420_buffer.cc
@@ -60,7 +60,7 @@
 
 // static
 rtc::scoped_refptr<I420Buffer> I420Buffer::Create(int width, int height) {
-  return new rtc::RefCountedObject<I420Buffer>(width, height);
+  return rtc::make_ref_counted<I420Buffer>(width, height);
 }
 
 // static
@@ -69,8 +69,8 @@
                                                   int stride_y,
                                                   int stride_u,
                                                   int stride_v) {
-  return new rtc::RefCountedObject<I420Buffer>(width, height, stride_y,
-                                               stride_u, stride_v);
+  return rtc::make_ref_counted<I420Buffer>(width, height, stride_y, stride_u,
+                                           stride_v);
 }
 
 // static
diff --git a/api/video/nv12_buffer.cc b/api/video/nv12_buffer.cc
index cfa85ac..974620b 100644
--- a/api/video/nv12_buffer.cc
+++ b/api/video/nv12_buffer.cc
@@ -49,7 +49,7 @@
 
 // static
 rtc::scoped_refptr<NV12Buffer> NV12Buffer::Create(int width, int height) {
-  return new rtc::RefCountedObject<NV12Buffer>(width, height);
+  return rtc::make_ref_counted<NV12Buffer>(width, height);
 }
 
 // static
@@ -57,8 +57,7 @@
                                                   int height,
                                                   int stride_y,
                                                   int stride_uv) {
-  return new rtc::RefCountedObject<NV12Buffer>(width, height, stride_y,
-                                               stride_uv);
+  return rtc::make_ref_counted<NV12Buffer>(width, height, stride_y, stride_uv);
 }
 
 // static
diff --git a/api/voip/test/voip_engine_factory_unittest.cc b/api/voip/test/voip_engine_factory_unittest.cc
index 84b474f..f967a0b 100644
--- a/api/voip/test/voip_engine_factory_unittest.cc
+++ b/api/voip/test/voip_engine_factory_unittest.cc
@@ -24,11 +24,11 @@
 // Create voip engine with mock modules as normal use case.
 TEST(VoipEngineFactoryTest, CreateEngineWithMockModules) {
   VoipEngineConfig config;
-  config.encoder_factory = new rtc::RefCountedObject<MockAudioEncoderFactory>();
-  config.decoder_factory = new rtc::RefCountedObject<MockAudioDecoderFactory>();
+  config.encoder_factory = rtc::make_ref_counted<MockAudioEncoderFactory>();
+  config.decoder_factory = rtc::make_ref_counted<MockAudioDecoderFactory>();
   config.task_queue_factory = CreateDefaultTaskQueueFactory();
   config.audio_processing =
-      new rtc::RefCountedObject<testing::NiceMock<test::MockAudioProcessing>>();
+      rtc::make_ref_counted<testing::NiceMock<test::MockAudioProcessing>>();
   config.audio_device_module = test::MockAudioDeviceModule::CreateNice();
 
   auto voip_engine = CreateVoipEngine(std::move(config));
@@ -38,8 +38,8 @@
 // Create voip engine without setting audio processing as optional component.
 TEST(VoipEngineFactoryTest, UseNoAudioProcessing) {
   VoipEngineConfig config;
-  config.encoder_factory = new rtc::RefCountedObject<MockAudioEncoderFactory>();
-  config.decoder_factory = new rtc::RefCountedObject<MockAudioDecoderFactory>();
+  config.encoder_factory = rtc::make_ref_counted<MockAudioEncoderFactory>();
+  config.decoder_factory = rtc::make_ref_counted<MockAudioDecoderFactory>();
   config.task_queue_factory = CreateDefaultTaskQueueFactory();
   config.audio_device_module = test::MockAudioDeviceModule::CreateNice();
 
diff --git a/audio/audio_receive_stream_unittest.cc b/audio/audio_receive_stream_unittest.cc
index 99e3a56..72244dd 100644
--- a/audio/audio_receive_stream_unittest.cc
+++ b/audio/audio_receive_stream_unittest.cc
@@ -74,7 +74,7 @@
 
 struct ConfigHelper {
   explicit ConfigHelper(bool use_null_audio_processing)
-      : ConfigHelper(new rtc::RefCountedObject<MockAudioMixer>(),
+      : ConfigHelper(rtc::make_ref_counted<MockAudioMixer>(),
                      use_null_audio_processing) {}
 
   ConfigHelper(rtc::scoped_refptr<MockAudioMixer> audio_mixer,
@@ -87,9 +87,9 @@
     config.audio_processing =
         use_null_audio_processing
             ? nullptr
-            : new rtc::RefCountedObject<NiceMock<MockAudioProcessing>>();
+            : rtc::make_ref_counted<NiceMock<MockAudioProcessing>>();
     config.audio_device_module =
-        new rtc::RefCountedObject<testing::NiceMock<MockAudioDeviceModule>>();
+        rtc::make_ref_counted<testing::NiceMock<MockAudioDeviceModule>>();
     audio_state_ = AudioState::Create(config);
 
     channel_receive_ = new ::testing::StrictMock<MockChannelReceive>();
@@ -117,7 +117,7 @@
         RtpExtension::kTransportSequenceNumberUri, kTransportSequenceNumberId));
     stream_config_.rtcp_send_transport = &rtcp_send_transport_;
     stream_config_.decoder_factory =
-        new rtc::RefCountedObject<MockAudioDecoderFactory>;
+        rtc::make_ref_counted<MockAudioDecoderFactory>();
   }
 
   std::unique_ptr<internal::AudioReceiveStream> CreateAudioReceiveStream() {
@@ -358,14 +358,14 @@
 
     auto new_config_0 = helper.config();
     rtc::scoped_refptr<FrameDecryptorInterface> mock_frame_decryptor_0(
-        new rtc::RefCountedObject<MockFrameDecryptor>());
+        rtc::make_ref_counted<MockFrameDecryptor>());
     new_config_0.frame_decryptor = mock_frame_decryptor_0;
 
     recv_stream->Reconfigure(new_config_0);
 
     auto new_config_1 = helper.config();
     rtc::scoped_refptr<FrameDecryptorInterface> mock_frame_decryptor_1(
-        new rtc::RefCountedObject<MockFrameDecryptor>());
+        rtc::make_ref_counted<MockFrameDecryptor>());
     new_config_1.frame_decryptor = mock_frame_decryptor_1;
     new_config_1.crypto_options.sframe.require_frame_encryption = true;
     recv_stream->Reconfigure(new_config_1);
diff --git a/audio/audio_send_stream_unittest.cc b/audio/audio_send_stream_unittest.cc
index f76a8fa..357e080 100644
--- a/audio/audio_send_stream_unittest.cc
+++ b/audio/audio_send_stream_unittest.cc
@@ -121,7 +121,7 @@
 
 rtc::scoped_refptr<MockAudioEncoderFactory> SetupEncoderFactoryMock() {
   rtc::scoped_refptr<MockAudioEncoderFactory> factory =
-      new rtc::RefCountedObject<MockAudioEncoderFactory>();
+      rtc::make_ref_counted<MockAudioEncoderFactory>();
   ON_CALL(*factory.get(), GetSupportedEncoders())
       .WillByDefault(Return(std::vector<AudioCodecSpec>(
           std::begin(kCodecSpecs), std::end(kCodecSpecs))));
@@ -154,7 +154,7 @@
         audio_processing_(
             use_null_audio_processing
                 ? nullptr
-                : new rtc::RefCountedObject<NiceMock<MockAudioProcessing>>()),
+                : rtc::make_ref_counted<NiceMock<MockAudioProcessing>>()),
         bitrate_allocator_(&limit_observer_),
         worker_queue_(task_queue_factory_->CreateTaskQueue(
             "ConfigHelper_worker_queue",
@@ -165,8 +165,7 @@
     AudioState::Config config;
     config.audio_mixer = AudioMixerImpl::Create();
     config.audio_processing = audio_processing_;
-    config.audio_device_module =
-        new rtc::RefCountedObject<MockAudioDeviceModule>();
+    config.audio_device_module = rtc::make_ref_counted<MockAudioDeviceModule>();
     audio_state_ = AudioState::Create(config);
 
     SetupDefaultChannelSend(audio_bwe_enabled);
@@ -923,7 +922,7 @@
     auto new_config = helper.config();
 
     rtc::scoped_refptr<FrameEncryptorInterface> mock_frame_encryptor_0(
-        new rtc::RefCountedObject<MockFrameEncryptor>());
+        rtc::make_ref_counted<MockFrameEncryptor>());
     new_config.frame_encryptor = mock_frame_encryptor_0;
     EXPECT_CALL(*helper.channel_send(), SetFrameEncryptor(Ne(nullptr)))
         .Times(1);
@@ -936,7 +935,7 @@
     // Updating frame encryptor to a new object should force a call to the
     // proxy.
     rtc::scoped_refptr<FrameEncryptorInterface> mock_frame_encryptor_1(
-        new rtc::RefCountedObject<MockFrameEncryptor>());
+        rtc::make_ref_counted<MockFrameEncryptor>());
     new_config.frame_encryptor = mock_frame_encryptor_1;
     new_config.crypto_options.sframe.require_frame_encryption = true;
     EXPECT_CALL(*helper.channel_send(), SetFrameEncryptor(Ne(nullptr)))
diff --git a/audio/audio_state.cc b/audio/audio_state.cc
index 566bae1..0e60f03 100644
--- a/audio/audio_state.cc
+++ b/audio/audio_state.cc
@@ -187,6 +187,6 @@
 
 rtc::scoped_refptr<AudioState> AudioState::Create(
     const AudioState::Config& config) {
-  return new rtc::RefCountedObject<internal::AudioState>(config);
+  return rtc::make_ref_counted<internal::AudioState>(config);
 }
 }  // namespace webrtc
diff --git a/audio/audio_state_unittest.cc b/audio/audio_state_unittest.cc
index 02fc04e..5f07a7b3 100644
--- a/audio/audio_state_unittest.cc
+++ b/audio/audio_state_unittest.cc
@@ -90,7 +90,7 @@
   FakeTaskQueueFactory task_queue_factory_;
 
   rtc::scoped_refptr<AsyncAudioProcessing::Factory> CreateFactory() {
-    return new rtc::RefCountedObject<AsyncAudioProcessing::Factory>(
+    return rtc::make_ref_counted<AsyncAudioProcessing::Factory>(
         audio_frame_processor_, task_queue_factory_);
   }
 };
@@ -107,10 +107,9 @@
     audio_state_config.audio_processing =
         params.use_null_audio_processing
             ? nullptr
-            : new rtc::RefCountedObject<
-                  testing::NiceMock<MockAudioProcessing>>();
+            : rtc::make_ref_counted<testing::NiceMock<MockAudioProcessing>>();
     audio_state_config.audio_device_module =
-        new rtc::RefCountedObject<NiceMock<MockAudioDeviceModule>>();
+        rtc::make_ref_counted<NiceMock<MockAudioDeviceModule>>();
     if (params.use_async_audio_processing) {
       audio_state_config.async_audio_processing_factory =
           async_audio_processing_helper_.CreateFactory();
@@ -183,7 +182,7 @@
 TEST_P(AudioStateTest, ConstructDestruct) {
   ConfigHelper helper(GetParam());
   rtc::scoped_refptr<internal::AudioState> audio_state(
-      new rtc::RefCountedObject<internal::AudioState>(helper.config()));
+      rtc::make_ref_counted<internal::AudioState>(helper.config()));
 }
 
 TEST_P(AudioStateTest, RecordedAudioArrivesAtSingleStream) {
@@ -196,7 +195,7 @@
   }
 
   rtc::scoped_refptr<internal::AudioState> audio_state(
-      new rtc::RefCountedObject<internal::AudioState>(helper.config()));
+      rtc::make_ref_counted<internal::AudioState>(helper.config()));
 
   MockAudioSendStream stream;
   audio_state->AddSendingStream(&stream, 8000, 2);
@@ -245,7 +244,7 @@
   }
 
   rtc::scoped_refptr<internal::AudioState> audio_state(
-      new rtc::RefCountedObject<internal::AudioState>(helper.config()));
+      rtc::make_ref_counted<internal::AudioState>(helper.config()));
 
   MockAudioSendStream stream_1;
   MockAudioSendStream stream_2;
@@ -308,7 +307,7 @@
   }
 
   rtc::scoped_refptr<internal::AudioState> audio_state(
-      new rtc::RefCountedObject<internal::AudioState>(helper.config()));
+      rtc::make_ref_counted<internal::AudioState>(helper.config()));
 
   audio_state->SetStereoChannelSwapping(true);
 
diff --git a/audio/channel_receive.cc b/audio/channel_receive.cc
index 44a647b..7089d21 100644
--- a/audio/channel_receive.cc
+++ b/audio/channel_receive.cc
@@ -337,7 +337,7 @@
         OnReceivedPayloadData(packet, header);
       };
   frame_transformer_delegate_ =
-      new rtc::RefCountedObject<ChannelReceiveFrameTransformerDelegate>(
+      rtc::make_ref_counted<ChannelReceiveFrameTransformerDelegate>(
           std::move(receive_audio_callback), std::move(frame_transformer),
           rtc::Thread::Current());
   frame_transformer_delegate_->Init();
diff --git a/audio/channel_receive_frame_transformer_delegate_unittest.cc b/audio/channel_receive_frame_transformer_delegate_unittest.cc
index e7f5a45..01aac45 100644
--- a/audio/channel_receive_frame_transformer_delegate_unittest.cc
+++ b/audio/channel_receive_frame_transformer_delegate_unittest.cc
@@ -41,9 +41,9 @@
 TEST(ChannelReceiveFrameTransformerDelegateTest,
      RegisterTransformedFrameCallbackOnInit) {
   rtc::scoped_refptr<MockFrameTransformer> mock_frame_transformer =
-      new rtc::RefCountedObject<MockFrameTransformer>();
+      rtc::make_ref_counted<MockFrameTransformer>();
   rtc::scoped_refptr<ChannelReceiveFrameTransformerDelegate> delegate =
-      new rtc::RefCountedObject<ChannelReceiveFrameTransformerDelegate>(
+      rtc::make_ref_counted<ChannelReceiveFrameTransformerDelegate>(
           ChannelReceiveFrameTransformerDelegate::ReceiveFrameCallback(),
           mock_frame_transformer, nullptr);
   EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameCallback);
@@ -55,9 +55,9 @@
 TEST(ChannelReceiveFrameTransformerDelegateTest,
      UnregisterTransformedFrameCallbackOnReset) {
   rtc::scoped_refptr<MockFrameTransformer> mock_frame_transformer =
-      new rtc::RefCountedObject<MockFrameTransformer>();
+      rtc::make_ref_counted<MockFrameTransformer>();
   rtc::scoped_refptr<ChannelReceiveFrameTransformerDelegate> delegate =
-      new rtc::RefCountedObject<ChannelReceiveFrameTransformerDelegate>(
+      rtc::make_ref_counted<ChannelReceiveFrameTransformerDelegate>(
           ChannelReceiveFrameTransformerDelegate::ReceiveFrameCallback(),
           mock_frame_transformer, nullptr);
   EXPECT_CALL(*mock_frame_transformer, UnregisterTransformedFrameCallback);
@@ -69,10 +69,10 @@
 TEST(ChannelReceiveFrameTransformerDelegateTest,
      TransformRunsChannelReceiveCallback) {
   rtc::scoped_refptr<MockFrameTransformer> mock_frame_transformer =
-      new rtc::RefCountedObject<NiceMock<MockFrameTransformer>>();
+      rtc::make_ref_counted<NiceMock<MockFrameTransformer>>();
   MockChannelReceive mock_channel;
   rtc::scoped_refptr<ChannelReceiveFrameTransformerDelegate> delegate =
-      new rtc::RefCountedObject<ChannelReceiveFrameTransformerDelegate>(
+      rtc::make_ref_counted<ChannelReceiveFrameTransformerDelegate>(
           mock_channel.callback(), mock_frame_transformer,
           rtc::Thread::Current());
   rtc::scoped_refptr<TransformedFrameCallback> callback;
@@ -100,10 +100,10 @@
 TEST(ChannelReceiveFrameTransformerDelegateTest,
      OnTransformedDoesNotRunChannelReceiveCallbackAfterReset) {
   rtc::scoped_refptr<MockFrameTransformer> mock_frame_transformer =
-      new rtc::RefCountedObject<testing::NiceMock<MockFrameTransformer>>();
+      rtc::make_ref_counted<testing::NiceMock<MockFrameTransformer>>();
   MockChannelReceive mock_channel;
   rtc::scoped_refptr<ChannelReceiveFrameTransformerDelegate> delegate =
-      new rtc::RefCountedObject<ChannelReceiveFrameTransformerDelegate>(
+      rtc::make_ref_counted<ChannelReceiveFrameTransformerDelegate>(
           mock_channel.callback(), mock_frame_transformer,
           rtc::Thread::Current());
 
diff --git a/audio/channel_send.cc b/audio/channel_send.cc
index 0434e48..47afc79 100644
--- a/audio/channel_send.cc
+++ b/audio/channel_send.cc
@@ -919,7 +919,7 @@
                             absolute_capture_timestamp_ms);
       };
   frame_transformer_delegate_ =
-      new rtc::RefCountedObject<ChannelSendFrameTransformerDelegate>(
+      rtc::make_ref_counted<ChannelSendFrameTransformerDelegate>(
           std::move(send_audio_callback), std::move(frame_transformer),
           &encoder_queue_);
   frame_transformer_delegate_->Init();
diff --git a/audio/channel_send_frame_transformer_delegate_unittest.cc b/audio/channel_send_frame_transformer_delegate_unittest.cc
index e2f3647..2ec78f8 100644
--- a/audio/channel_send_frame_transformer_delegate_unittest.cc
+++ b/audio/channel_send_frame_transformer_delegate_unittest.cc
@@ -53,9 +53,9 @@
 TEST(ChannelSendFrameTransformerDelegateTest,
      RegisterTransformedFrameCallbackOnInit) {
   rtc::scoped_refptr<MockFrameTransformer> mock_frame_transformer =
-      new rtc::RefCountedObject<MockFrameTransformer>();
+      rtc::make_ref_counted<MockFrameTransformer>();
   rtc::scoped_refptr<ChannelSendFrameTransformerDelegate> delegate =
-      new rtc::RefCountedObject<ChannelSendFrameTransformerDelegate>(
+      rtc::make_ref_counted<ChannelSendFrameTransformerDelegate>(
           ChannelSendFrameTransformerDelegate::SendFrameCallback(),
           mock_frame_transformer, nullptr);
   EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameCallback);
@@ -67,9 +67,9 @@
 TEST(ChannelSendFrameTransformerDelegateTest,
      UnregisterTransformedFrameCallbackOnReset) {
   rtc::scoped_refptr<MockFrameTransformer> mock_frame_transformer =
-      new rtc::RefCountedObject<MockFrameTransformer>();
+      rtc::make_ref_counted<MockFrameTransformer>();
   rtc::scoped_refptr<ChannelSendFrameTransformerDelegate> delegate =
-      new rtc::RefCountedObject<ChannelSendFrameTransformerDelegate>(
+      rtc::make_ref_counted<ChannelSendFrameTransformerDelegate>(
           ChannelSendFrameTransformerDelegate::SendFrameCallback(),
           mock_frame_transformer, nullptr);
   EXPECT_CALL(*mock_frame_transformer, UnregisterTransformedFrameCallback);
@@ -82,10 +82,10 @@
      TransformRunsChannelSendCallback) {
   TaskQueueForTest channel_queue("channel_queue");
   rtc::scoped_refptr<MockFrameTransformer> mock_frame_transformer =
-      new rtc::RefCountedObject<NiceMock<MockFrameTransformer>>();
+      rtc::make_ref_counted<NiceMock<MockFrameTransformer>>();
   MockChannelSend mock_channel;
   rtc::scoped_refptr<ChannelSendFrameTransformerDelegate> delegate =
-      new rtc::RefCountedObject<ChannelSendFrameTransformerDelegate>(
+      rtc::make_ref_counted<ChannelSendFrameTransformerDelegate>(
           mock_channel.callback(), mock_frame_transformer, &channel_queue);
   rtc::scoped_refptr<TransformedFrameCallback> callback;
   EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameCallback)
@@ -112,10 +112,10 @@
      OnTransformedDoesNotRunChannelSendCallbackAfterReset) {
   TaskQueueForTest channel_queue("channel_queue");
   rtc::scoped_refptr<MockFrameTransformer> mock_frame_transformer =
-      new rtc::RefCountedObject<testing::NiceMock<MockFrameTransformer>>();
+      rtc::make_ref_counted<testing::NiceMock<MockFrameTransformer>>();
   MockChannelSend mock_channel;
   rtc::scoped_refptr<ChannelSendFrameTransformerDelegate> delegate =
-      new rtc::RefCountedObject<ChannelSendFrameTransformerDelegate>(
+      rtc::make_ref_counted<ChannelSendFrameTransformerDelegate>(
           mock_channel.callback(), mock_frame_transformer, &channel_queue);
 
   delegate->Reset();
diff --git a/audio/voip/test/audio_channel_unittest.cc b/audio/voip/test/audio_channel_unittest.cc
index e0244c7..f99d163 100644
--- a/audio/voip/test/audio_channel_unittest.cc
+++ b/audio/voip/test/audio_channel_unittest.cc
@@ -65,7 +65,7 @@
     // Also this uses the same transport object for different audio channel to
     // simplify network routing logic.
     rtc::scoped_refptr<AudioChannel> audio_channel =
-        new rtc::RefCountedObject<AudioChannel>(
+        rtc::make_ref_counted<AudioChannel>(
             &transport_, ssrc, task_queue_factory_.get(), process_thread_.get(),
             audio_mixer_.get(), decoder_factory_);
     audio_channel->SetEncoder(kPcmuPayload, kPcmuFormat,
diff --git a/audio/voip/test/voip_core_unittest.cc b/audio/voip/test/voip_core_unittest.cc
index d290bd6..0d40760 100644
--- a/audio/voip/test/voip_core_unittest.cc
+++ b/audio/voip/test/voip_core_unittest.cc
@@ -39,7 +39,7 @@
     auto encoder_factory = CreateBuiltinAudioEncoderFactory();
     auto decoder_factory = CreateBuiltinAudioDecoderFactory();
     rtc::scoped_refptr<AudioProcessing> audio_processing =
-        new rtc::RefCountedObject<NiceMock<test::MockAudioProcessing>>();
+        rtc::make_ref_counted<NiceMock<test::MockAudioProcessing>>();
 
     auto process_thread = std::make_unique<NiceMock<MockProcessThread>>();
     // Hold the pointer to use for testing.
diff --git a/audio/voip/voip_core.cc b/audio/voip/voip_core.cc
index 33dadbc..67ae4c6 100644
--- a/audio/voip/voip_core.cc
+++ b/audio/voip/voip_core.cc
@@ -138,7 +138,7 @@
   }
 
   rtc::scoped_refptr<AudioChannel> channel =
-      new rtc::RefCountedObject<AudioChannel>(
+      rtc::make_ref_counted<AudioChannel>(
           transport, local_ssrc.value(), task_queue_factory_.get(),
           process_thread_.get(), audio_mixer_.get(), decoder_factory_);
 
diff --git a/call/adaptation/broadcast_resource_listener.cc b/call/adaptation/broadcast_resource_listener.cc
index 59bd1e0..876d4c0 100644
--- a/call/adaptation/broadcast_resource_listener.cc
+++ b/call/adaptation/broadcast_resource_listener.cc
@@ -83,8 +83,8 @@
   MutexLock lock(&lock_);
   RTC_DCHECK(is_listening_);
   rtc::scoped_refptr<AdapterResource> adapter =
-      new rtc::RefCountedObject<AdapterResource>(source_resource_->Name() +
-                                                 "Adapter");
+      rtc::make_ref_counted<AdapterResource>(source_resource_->Name() +
+                                             "Adapter");
   adapters_.push_back(adapter);
   return adapter;
 }
diff --git a/call/adaptation/resource_adaptation_processor.cc b/call/adaptation/resource_adaptation_processor.cc
index 4925b64..741575a 100644
--- a/call/adaptation/resource_adaptation_processor.cc
+++ b/call/adaptation/resource_adaptation_processor.cc
@@ -72,7 +72,7 @@
     VideoStreamAdapter* stream_adapter)
     : task_queue_(nullptr),
       resource_listener_delegate_(
-          new rtc::RefCountedObject<ResourceListenerDelegate>(this)),
+          rtc::make_ref_counted<ResourceListenerDelegate>(this)),
       resources_(),
       stream_adapter_(stream_adapter),
       last_reported_source_restrictions_(),
diff --git a/call/adaptation/test/fake_resource.cc b/call/adaptation/test/fake_resource.cc
index fa69e88..d125468 100644
--- a/call/adaptation/test/fake_resource.cc
+++ b/call/adaptation/test/fake_resource.cc
@@ -19,7 +19,7 @@
 
 // static
 rtc::scoped_refptr<FakeResource> FakeResource::Create(std::string name) {
-  return new rtc::RefCountedObject<FakeResource>(name);
+  return rtc::make_ref_counted<FakeResource>(name);
 }
 
 FakeResource::FakeResource(std::string name)
diff --git a/call/call_perf_tests.cc b/call/call_perf_tests.cc
index 4cb9766..47d6e90 100644
--- a/call/call_perf_tests.cc
+++ b/call/call_perf_tests.cc
@@ -834,7 +834,7 @@
           bitrate_allocator_factory_.get();
       encoder_config->max_bitrate_bps = 2 * kReconfigureThresholdKbps * 1000;
       encoder_config->video_stream_factory =
-          new rtc::RefCountedObject<VideoStreamFactory>();
+          rtc::make_ref_counted<VideoStreamFactory>();
 
       encoder_config_ = encoder_config->Copy();
     }
diff --git a/call/call_unittest.cc b/call/call_unittest.cc
index d836362..b06af1e 100644
--- a/call/call_unittest.cc
+++ b/call/call_unittest.cc
@@ -50,14 +50,14 @@
     task_queue_factory_ = webrtc::CreateDefaultTaskQueueFactory();
     webrtc::AudioState::Config audio_state_config;
     audio_state_config.audio_mixer =
-        new rtc::RefCountedObject<webrtc::test::MockAudioMixer>();
+        rtc::make_ref_counted<webrtc::test::MockAudioMixer>();
     audio_state_config.audio_processing =
         use_null_audio_processing
             ? nullptr
-            : new rtc::RefCountedObject<
+            : rtc::make_ref_counted<
                   NiceMock<webrtc::test::MockAudioProcessing>>();
     audio_state_config.audio_device_module =
-        new rtc::RefCountedObject<webrtc::test::MockAudioDeviceModule>();
+        rtc::make_ref_counted<webrtc::test::MockAudioDeviceModule>();
     webrtc::Call::Config config(&event_log_);
     config.audio_state = webrtc::AudioState::Create(audio_state_config);
     config.task_queue_factory = task_queue_factory_.get();
@@ -118,7 +118,7 @@
     config.rtp.remote_ssrc = 42;
     config.rtcp_send_transport = &rtcp_send_transport;
     config.decoder_factory =
-        new rtc::RefCountedObject<webrtc::MockAudioDecoderFactory>();
+        rtc::make_ref_counted<webrtc::MockAudioDecoderFactory>();
     AudioReceiveStream* stream = call->CreateAudioReceiveStream(config);
     EXPECT_NE(stream, nullptr);
     call->DestroyAudioReceiveStream(stream);
@@ -157,7 +157,7 @@
     MockTransport rtcp_send_transport;
     config.rtcp_send_transport = &rtcp_send_transport;
     config.decoder_factory =
-        new rtc::RefCountedObject<webrtc::MockAudioDecoderFactory>();
+        rtc::make_ref_counted<webrtc::MockAudioDecoderFactory>();
     std::list<AudioReceiveStream*> streams;
     for (int i = 0; i < 2; ++i) {
       for (uint32_t ssrc = 0; ssrc < 1234567; ssrc += 34567) {
@@ -187,7 +187,7 @@
     recv_config.rtp.local_ssrc = 777;
     recv_config.rtcp_send_transport = &rtcp_send_transport;
     recv_config.decoder_factory =
-        new rtc::RefCountedObject<webrtc::MockAudioDecoderFactory>();
+        rtc::make_ref_counted<webrtc::MockAudioDecoderFactory>();
     AudioReceiveStream* recv_stream =
         call->CreateAudioReceiveStream(recv_config);
     EXPECT_NE(recv_stream, nullptr);
@@ -226,7 +226,7 @@
     recv_config.rtp.local_ssrc = 777;
     recv_config.rtcp_send_transport = &rtcp_send_transport;
     recv_config.decoder_factory =
-        new rtc::RefCountedObject<webrtc::MockAudioDecoderFactory>();
+        rtc::make_ref_counted<webrtc::MockAudioDecoderFactory>();
     AudioReceiveStream* recv_stream =
         call->CreateAudioReceiveStream(recv_config);
     EXPECT_NE(recv_stream, nullptr);
diff --git a/call/rampup_tests.cc b/call/rampup_tests.cc
index 379f9dc..e2ea55b 100644
--- a/call/rampup_tests.cc
+++ b/call/rampup_tests.cc
@@ -160,7 +160,7 @@
   encoder_config->number_of_streams = num_video_streams_;
   encoder_config->max_bitrate_bps = 2000000;
   encoder_config->video_stream_factory =
-      new rtc::RefCountedObject<RampUpTester::VideoStreamFactory>();
+      rtc::make_ref_counted<RampUpTester::VideoStreamFactory>();
   if (num_video_streams_ == 1) {
     // For single stream rampup until 1mbps
     expected_bitrate_bps_ = kSingleStreamTargetBps;
diff --git a/call/rtp_video_sender_unittest.cc b/call/rtp_video_sender_unittest.cc
index e8689e7..fd26f1c 100644
--- a/call/rtp_video_sender_unittest.cc
+++ b/call/rtp_video_sender_unittest.cc
@@ -891,7 +891,7 @@
 
 TEST(RtpVideoSenderTest, SimulcastSenderRegistersFrameTransformers) {
   rtc::scoped_refptr<MockFrameTransformer> transformer =
-      new rtc::RefCountedObject<MockFrameTransformer>();
+      rtc::make_ref_counted<MockFrameTransformer>();
 
   EXPECT_CALL(*transformer, RegisterTransformedFrameSinkCallback(_, kSsrc1));
   EXPECT_CALL(*transformer, RegisterTransformedFrameSinkCallback(_, kSsrc2));
diff --git a/common_video/video_frame_buffer.cc b/common_video/video_frame_buffer.cc
index 4358db5..78a1264 100644
--- a/common_video/video_frame_buffer.cc
+++ b/common_video/video_frame_buffer.cc
@@ -208,7 +208,7 @@
     int v_stride,
     std::function<void()> no_longer_used) {
   return rtc::scoped_refptr<I420BufferInterface>(
-      new rtc::RefCountedObject<WrappedYuvBuffer<I420BufferInterface>>(
+      rtc::make_ref_counted<WrappedYuvBuffer<I420BufferInterface>>(
           width, height, y_plane, y_stride, u_plane, u_stride, v_plane,
           v_stride, no_longer_used));
 }
@@ -226,7 +226,7 @@
     int a_stride,
     std::function<void()> no_longer_used) {
   return rtc::scoped_refptr<I420ABufferInterface>(
-      new rtc::RefCountedObject<WrappedYuvaBuffer<I420ABufferInterface>>(
+      rtc::make_ref_counted<WrappedYuvaBuffer<I420ABufferInterface>>(
           width, height, y_plane, y_stride, u_plane, u_stride, v_plane,
           v_stride, a_plane, a_stride, no_longer_used));
 }
@@ -242,7 +242,7 @@
     int v_stride,
     std::function<void()> no_longer_used) {
   return rtc::scoped_refptr<I444BufferInterface>(
-      new rtc::RefCountedObject<WrappedYuvBuffer<I444BufferBase>>(
+      rtc::make_ref_counted<WrappedYuvBuffer<I444BufferBase>>(
           width, height, y_plane, y_stride, u_plane, u_stride, v_plane,
           v_stride, no_longer_used));
 }
@@ -281,7 +281,7 @@
     int v_stride,
     std::function<void()> no_longer_used) {
   return rtc::scoped_refptr<I010BufferInterface>(
-      new rtc::RefCountedObject<WrappedYuv16BBuffer<I010BufferBase>>(
+      rtc::make_ref_counted<WrappedYuv16BBuffer<I010BufferBase>>(
           width, height, y_plane, y_stride, u_plane, u_stride, v_plane,
           v_stride, no_longer_used));
 }
diff --git a/common_video/video_frame_buffer_pool.cc b/common_video/video_frame_buffer_pool.cc
index 6df240d..d225370 100644
--- a/common_video/video_frame_buffer_pool.cc
+++ b/common_video/video_frame_buffer_pool.cc
@@ -107,7 +107,7 @@
     return nullptr;
   // Allocate new buffer.
   rtc::scoped_refptr<I420Buffer> buffer =
-      new rtc::RefCountedObject<I420Buffer>(width, height);
+      rtc::make_ref_counted<I420Buffer>(width, height);
 
   if (zero_initialize_)
     buffer->InitializeData();
@@ -138,7 +138,7 @@
     return nullptr;
   // Allocate new buffer.
   rtc::scoped_refptr<NV12Buffer> buffer =
-      new rtc::RefCountedObject<NV12Buffer>(width, height);
+      rtc::make_ref_counted<NV12Buffer>(width, height);
 
   if (zero_initialize_)
     buffer->InitializeData();
diff --git a/media/engine/simulcast_encoder_adapter_unittest.cc b/media/engine/simulcast_encoder_adapter_unittest.cc
index b90f2fc..a74a2c3 100644
--- a/media/engine/simulcast_encoder_adapter_unittest.cc
+++ b/media/engine/simulcast_encoder_adapter_unittest.cc
@@ -1006,8 +1006,8 @@
   EXPECT_TRUE(adapter_->GetEncoderInfo().supports_native_handle);
 
   rtc::scoped_refptr<VideoFrameBuffer> buffer(
-      new rtc::RefCountedObject<FakeNativeBufferI420>(1280, 720,
-                                                      /*allow_to_i420=*/false));
+      rtc::make_ref_counted<FakeNativeBufferI420>(1280, 720,
+                                                  /*allow_to_i420=*/false));
   VideoFrame input_frame = VideoFrame::Builder()
                                .set_video_frame_buffer(buffer)
                                .set_timestamp_rtp(100)
@@ -1043,8 +1043,8 @@
   EXPECT_TRUE(adapter_->GetEncoderInfo().supports_native_handle);
 
   rtc::scoped_refptr<VideoFrameBuffer> buffer(
-      new rtc::RefCountedObject<FakeNativeBufferI420>(1280, 720,
-                                                      /*allow_to_i420=*/true));
+      rtc::make_ref_counted<FakeNativeBufferI420>(1280, 720,
+                                                  /*allow_to_i420=*/true));
   VideoFrame input_frame = VideoFrame::Builder()
                                .set_video_frame_buffer(buffer)
                                .set_timestamp_rtp(100)
diff --git a/media/engine/webrtc_video_engine.cc b/media/engine/webrtc_video_engine.cc
index 710e4fd..b04b3db 100644
--- a/media/engine/webrtc_video_engine.cc
+++ b/media/engine/webrtc_video_engine.cc
@@ -502,7 +502,7 @@
     webrtc::VideoCodecH264 h264_settings =
         webrtc::VideoEncoder::GetDefaultH264Settings();
     h264_settings.frameDroppingOn = frame_dropping;
-    return new rtc::RefCountedObject<
+    return rtc::make_ref_counted<
         webrtc::VideoEncoderConfig::H264EncoderSpecificSettings>(h264_settings);
   }
   if (absl::EqualsIgnoreCase(codec.name, kVp8CodecName)) {
@@ -512,7 +512,7 @@
     // VP8 denoising is enabled by default.
     vp8_settings.denoisingOn = codec_default_denoising ? true : denoising;
     vp8_settings.frameDroppingOn = frame_dropping;
-    return new rtc::RefCountedObject<
+    return rtc::make_ref_counted<
         webrtc::VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings);
   }
   if (absl::EqualsIgnoreCase(codec.name, kVp9CodecName)) {
@@ -562,7 +562,7 @@
       vp9_settings.flexibleMode = vp9_settings.numberOfSpatialLayers > 1;
       vp9_settings.interLayerPred = webrtc::InterLayerPredMode::kOn;
     }
-    return new rtc::RefCountedObject<
+    return rtc::make_ref_counted<
         webrtc::VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings);
   }
   return nullptr;
@@ -2546,7 +2546,7 @@
   int max_qp = kDefaultQpMax;
   codec.GetParam(kCodecParamMaxQuantization, &max_qp);
   encoder_config.video_stream_factory =
-      new rtc::RefCountedObject<EncoderStreamFactory>(
+      rtc::make_ref_counted<EncoderStreamFactory>(
           codec.name, max_qp, is_screencast, parameters_.conference_mode);
 
   return encoder_config;
diff --git a/media/engine/webrtc_voice_engine.cc b/media/engine/webrtc_voice_engine.cc
index e952394..717fb89 100644
--- a/media/engine/webrtc_voice_engine.cc
+++ b/media/engine/webrtc_voice_engine.cc
@@ -330,7 +330,7 @@
     config.audio_device_module = adm_;
     if (audio_frame_processor_)
       config.async_audio_processing_factory =
-          new rtc::RefCountedObject<webrtc::AsyncAudioProcessing::Factory>(
+          rtc::make_ref_counted<webrtc::AsyncAudioProcessing::Factory>(
               *audio_frame_processor_, *task_queue_factory_);
     audio_state_ = webrtc::AudioState::Create(config);
   }
diff --git a/p2p/base/default_ice_transport_factory.cc b/p2p/base/default_ice_transport_factory.cc
index 7d2fdb8..0a7175c 100644
--- a/p2p/base/default_ice_transport_factory.cc
+++ b/p2p/base/default_ice_transport_factory.cc
@@ -44,7 +44,7 @@
     int component,
     IceTransportInit init) {
   BasicIceControllerFactory factory;
-  return new rtc::RefCountedObject<DefaultIceTransport>(
+  return rtc::make_ref_counted<DefaultIceTransport>(
       cricket::P2PTransportChannel::Create(
           transport_name, component, init.port_allocator(),
           init.async_dns_resolver_factory(), init.event_log(), &factory));
diff --git a/video/adaptation/encode_usage_resource.cc b/video/adaptation/encode_usage_resource.cc
index 8fe7450..c42c63f 100644
--- a/video/adaptation/encode_usage_resource.cc
+++ b/video/adaptation/encode_usage_resource.cc
@@ -21,7 +21,7 @@
 // static
 rtc::scoped_refptr<EncodeUsageResource> EncodeUsageResource::Create(
     std::unique_ptr<OveruseFrameDetector> overuse_detector) {
-  return new rtc::RefCountedObject<EncodeUsageResource>(
+  return rtc::make_ref_counted<EncodeUsageResource>(
       std::move(overuse_detector));
 }
 
diff --git a/video/adaptation/pixel_limit_resource.cc b/video/adaptation/pixel_limit_resource.cc
index e1df141..789dac2 100644
--- a/video/adaptation/pixel_limit_resource.cc
+++ b/video/adaptation/pixel_limit_resource.cc
@@ -28,8 +28,8 @@
 rtc::scoped_refptr<PixelLimitResource> PixelLimitResource::Create(
     TaskQueueBase* task_queue,
     VideoStreamInputStateProvider* input_state_provider) {
-  return new rtc::RefCountedObject<PixelLimitResource>(task_queue,
-                                                       input_state_provider);
+  return rtc::make_ref_counted<PixelLimitResource>(task_queue,
+                                                   input_state_provider);
 }
 
 PixelLimitResource::PixelLimitResource(
diff --git a/video/adaptation/quality_scaler_resource.cc b/video/adaptation/quality_scaler_resource.cc
index c438488..c455252d 100644
--- a/video/adaptation/quality_scaler_resource.cc
+++ b/video/adaptation/quality_scaler_resource.cc
@@ -22,7 +22,7 @@
 
 // static
 rtc::scoped_refptr<QualityScalerResource> QualityScalerResource::Create() {
-  return new rtc::RefCountedObject<QualityScalerResource>();
+  return rtc::make_ref_counted<QualityScalerResource>();
 }
 
 QualityScalerResource::QualityScalerResource()
diff --git a/video/buffered_frame_decryptor_unittest.cc b/video/buffered_frame_decryptor_unittest.cc
index 5ede4fe..2f8a183 100644
--- a/video/buffered_frame_decryptor_unittest.cc
+++ b/video/buffered_frame_decryptor_unittest.cc
@@ -86,7 +86,7 @@
     decrypted_frame_call_count_ = 0;
     decryption_status_change_count_ = 0;
     seq_num_ = 0;
-    mock_frame_decryptor_ = new rtc::RefCountedObject<MockFrameDecryptor>();
+    mock_frame_decryptor_ = rtc::make_ref_counted<MockFrameDecryptor>();
     buffered_frame_decryptor_ =
         std::make_unique<BufferedFrameDecryptor>(this, this);
     buffered_frame_decryptor_->SetFrameDecryptor(mock_frame_decryptor_.get());
diff --git a/video/end_to_end_tests/resolution_bitrate_limits_tests.cc b/video/end_to_end_tests/resolution_bitrate_limits_tests.cc
index 16eee8c..d46c40c 100644
--- a/video/end_to_end_tests/resolution_bitrate_limits_tests.cc
+++ b/video/end_to_end_tests/resolution_bitrate_limits_tests.cc
@@ -27,8 +27,9 @@
   if (type == kVideoCodecVP9) {
     VideoCodecVP9 vp9 = VideoEncoder::GetDefaultVp9Settings();
     vp9.numberOfSpatialLayers = num_spatial_layers;
-    encoder_config->encoder_specific_settings = new rtc::RefCountedObject<
-        VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9);
+    encoder_config->encoder_specific_settings =
+        rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+            vp9);
   }
 }
 
@@ -119,7 +120,7 @@
     const VideoCodecType codec_type = PayloadStringToCodecType(payload_name_);
     encoder_config->codec_type = codec_type;
     encoder_config->video_stream_factory =
-        new rtc::RefCountedObject<cricket::EncoderStreamFactory>(
+        rtc::make_ref_counted<cricket::EncoderStreamFactory>(
             payload_name_, /*max qp*/ 0, /*screencast*/ false,
             /*screenshare enabled*/ false);
     encoder_config->max_bitrate_bps = -1;
diff --git a/video/end_to_end_tests/rtp_rtcp_tests.cc b/video/end_to_end_tests/rtp_rtcp_tests.cc
index 7601802..d76a7f0 100644
--- a/video/end_to_end_tests/rtp_rtcp_tests.cc
+++ b/video/end_to_end_tests/rtp_rtcp_tests.cc
@@ -316,7 +316,7 @@
         }
 
         GetVideoEncoderConfig()->video_stream_factory =
-            new rtc::RefCountedObject<VideoStreamFactory>();
+            rtc::make_ref_counted<VideoStreamFactory>();
         // Use the same total bitrates when sending a single stream to avoid
         // lowering the bitrate estimate and requiring a subsequent rampup.
         one_stream = GetVideoEncoderConfig()->Copy();
diff --git a/video/frame_encode_metadata_writer.cc b/video/frame_encode_metadata_writer.cc
index 0e604cd..8a0f3b3 100644
--- a/video/frame_encode_metadata_writer.cc
+++ b/video/frame_encode_metadata_writer.cc
@@ -217,7 +217,7 @@
           buffer, encoded_image->ColorSpace());
 
   encoded_image->SetEncodedData(
-      new rtc::RefCountedObject<EncodedImageBufferWrapper>(
+      rtc::make_ref_counted<EncodedImageBufferWrapper>(
           std::move(modified_buffer)));
 }
 
diff --git a/video/quality_scaling_tests.cc b/video/quality_scaling_tests.cc
index 0da9b69..50d3182 100644
--- a/video/quality_scaling_tests.cc
+++ b/video/quality_scaling_tests.cc
@@ -35,14 +35,16 @@
   if (type == kVideoCodecVP8) {
     VideoCodecVP8 vp8 = VideoEncoder::GetDefaultVp8Settings();
     vp8.automaticResizeOn = automatic_resize;
-    encoder_config->encoder_specific_settings = new rtc::RefCountedObject<
-        VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8);
+    encoder_config->encoder_specific_settings =
+        rtc::make_ref_counted<VideoEncoderConfig::Vp8EncoderSpecificSettings>(
+            vp8);
   } else if (type == kVideoCodecVP9) {
     VideoCodecVP9 vp9 = VideoEncoder::GetDefaultVp9Settings();
     vp9.automaticResizeOn = automatic_resize;
     vp9.numberOfSpatialLayers = num_spatial_layers;
-    encoder_config->encoder_specific_settings = new rtc::RefCountedObject<
-        VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9);
+    encoder_config->encoder_specific_settings =
+        rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+            vp9);
   }
 }
 }  // namespace
diff --git a/video/rtp_video_stream_receiver.cc b/video/rtp_video_stream_receiver.cc
index bdca039..7e8fe99 100644
--- a/video/rtp_video_stream_receiver.cc
+++ b/video/rtp_video_stream_receiver.cc
@@ -333,10 +333,10 @@
   }
 
   if (frame_transformer) {
-    frame_transformer_delegate_ = new rtc::RefCountedObject<
-        RtpVideoStreamReceiverFrameTransformerDelegate>(
-        this, std::move(frame_transformer), rtc::Thread::Current(),
-        config_.rtp.remote_ssrc);
+    frame_transformer_delegate_ =
+        rtc::make_ref_counted<RtpVideoStreamReceiverFrameTransformerDelegate>(
+            this, std::move(frame_transformer), rtc::Thread::Current(),
+            config_.rtp.remote_ssrc);
     frame_transformer_delegate_->Init();
   }
 }
@@ -919,7 +919,7 @@
     rtc::scoped_refptr<FrameTransformerInterface> frame_transformer) {
   RTC_DCHECK_RUN_ON(&network_tc_);
   frame_transformer_delegate_ =
-      new rtc::RefCountedObject<RtpVideoStreamReceiverFrameTransformerDelegate>(
+      rtc::make_ref_counted<RtpVideoStreamReceiverFrameTransformerDelegate>(
           this, std::move(frame_transformer), rtc::Thread::Current(),
           config_.rtp.remote_ssrc);
   frame_transformer_delegate_->Init();
diff --git a/video/rtp_video_stream_receiver2.cc b/video/rtp_video_stream_receiver2.cc
index c96dbed..fdbd5b6 100644
--- a/video/rtp_video_stream_receiver2.cc
+++ b/video/rtp_video_stream_receiver2.cc
@@ -307,10 +307,10 @@
   }
 
   if (frame_transformer) {
-    frame_transformer_delegate_ = new rtc::RefCountedObject<
-        RtpVideoStreamReceiverFrameTransformerDelegate>(
-        this, std::move(frame_transformer), rtc::Thread::Current(),
-        config_.rtp.remote_ssrc);
+    frame_transformer_delegate_ =
+        rtc::make_ref_counted<RtpVideoStreamReceiverFrameTransformerDelegate>(
+            this, std::move(frame_transformer), rtc::Thread::Current(),
+            config_.rtp.remote_ssrc);
     frame_transformer_delegate_->Init();
   }
 }
@@ -882,7 +882,7 @@
     rtc::scoped_refptr<FrameTransformerInterface> frame_transformer) {
   RTC_DCHECK_RUN_ON(&worker_task_checker_);
   frame_transformer_delegate_ =
-      new rtc::RefCountedObject<RtpVideoStreamReceiverFrameTransformerDelegate>(
+      rtc::make_ref_counted<RtpVideoStreamReceiverFrameTransformerDelegate>(
           this, std::move(frame_transformer), rtc::Thread::Current(),
           config_.rtp.remote_ssrc);
   frame_transformer_delegate_->Init();
diff --git a/video/rtp_video_stream_receiver2_unittest.cc b/video/rtp_video_stream_receiver2_unittest.cc
index 41e9ed8..9ade57d 100644
--- a/video/rtp_video_stream_receiver2_unittest.cc
+++ b/video/rtp_video_stream_receiver2_unittest.cc
@@ -1120,7 +1120,7 @@
 
 TEST_F(RtpVideoStreamReceiver2Test, TransformFrame) {
   rtc::scoped_refptr<MockFrameTransformer> mock_frame_transformer =
-      new rtc::RefCountedObject<testing::NiceMock<MockFrameTransformer>>();
+      rtc::make_ref_counted<testing::NiceMock<MockFrameTransformer>>();
   EXPECT_CALL(*mock_frame_transformer,
               RegisterTransformedFrameSinkCallback(_, config_.rtp.remote_ssrc));
   auto receiver = std::make_unique<RtpVideoStreamReceiver2>(
diff --git a/video/rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc b/video/rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc
index f3306f0..0d85cc0 100644
--- a/video/rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc
+++ b/video/rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc
@@ -61,10 +61,9 @@
 TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest,
      RegisterTransformedFrameCallbackSinkOnInit) {
   TestRtpVideoFrameReceiver receiver;
-  rtc::scoped_refptr<MockFrameTransformer> frame_transformer(
-      new rtc::RefCountedObject<MockFrameTransformer>());
-  rtc::scoped_refptr<RtpVideoStreamReceiverFrameTransformerDelegate> delegate(
-      new rtc::RefCountedObject<RtpVideoStreamReceiverFrameTransformerDelegate>(
+  auto frame_transformer(rtc::make_ref_counted<MockFrameTransformer>());
+  auto delegate(
+      rtc::make_ref_counted<RtpVideoStreamReceiverFrameTransformerDelegate>(
           &receiver, frame_transformer, rtc::Thread::Current(),
           /*remote_ssrc*/ 1111));
   EXPECT_CALL(*frame_transformer,
@@ -75,10 +74,9 @@
 TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest,
      UnregisterTransformedFrameSinkCallbackOnReset) {
   TestRtpVideoFrameReceiver receiver;
-  rtc::scoped_refptr<MockFrameTransformer> frame_transformer(
-      new rtc::RefCountedObject<MockFrameTransformer>());
-  rtc::scoped_refptr<RtpVideoStreamReceiverFrameTransformerDelegate> delegate(
-      new rtc::RefCountedObject<RtpVideoStreamReceiverFrameTransformerDelegate>(
+  auto frame_transformer(rtc::make_ref_counted<MockFrameTransformer>());
+  auto delegate(
+      rtc::make_ref_counted<RtpVideoStreamReceiverFrameTransformerDelegate>(
           &receiver, frame_transformer, rtc::Thread::Current(),
           /*remote_ssrc*/ 1111));
   EXPECT_CALL(*frame_transformer, UnregisterTransformedFrameSinkCallback(1111));
@@ -87,10 +85,10 @@
 
 TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest, TransformFrame) {
   TestRtpVideoFrameReceiver receiver;
-  rtc::scoped_refptr<MockFrameTransformer> frame_transformer(
-      new rtc::RefCountedObject<testing::NiceMock<MockFrameTransformer>>());
-  rtc::scoped_refptr<RtpVideoStreamReceiverFrameTransformerDelegate> delegate(
-      new rtc::RefCountedObject<RtpVideoStreamReceiverFrameTransformerDelegate>(
+  auto frame_transformer(
+      rtc::make_ref_counted<testing::NiceMock<MockFrameTransformer>>());
+  auto delegate(
+      rtc::make_ref_counted<RtpVideoStreamReceiverFrameTransformerDelegate>(
           &receiver, frame_transformer, rtc::Thread::Current(),
           /*remote_ssrc*/ 1111));
   auto frame = CreateRtpFrameObject();
@@ -101,10 +99,10 @@
 TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest,
      ManageFrameOnTransformedFrame) {
   TestRtpVideoFrameReceiver receiver;
-  rtc::scoped_refptr<MockFrameTransformer> mock_frame_transformer(
-      new rtc::RefCountedObject<NiceMock<MockFrameTransformer>>());
-  rtc::scoped_refptr<RtpVideoStreamReceiverFrameTransformerDelegate> delegate =
-      new rtc::RefCountedObject<RtpVideoStreamReceiverFrameTransformerDelegate>(
+  auto mock_frame_transformer(
+      rtc::make_ref_counted<NiceMock<MockFrameTransformer>>());
+  auto delegate =
+      rtc::make_ref_counted<RtpVideoStreamReceiverFrameTransformerDelegate>(
           &receiver, mock_frame_transformer, rtc::Thread::Current(),
           /*remote_ssrc*/ 1111);
 
@@ -127,10 +125,10 @@
 TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest,
      TransformableFrameMetadataHasCorrectValue) {
   TestRtpVideoFrameReceiver receiver;
-  rtc::scoped_refptr<MockFrameTransformer> mock_frame_transformer =
-      new rtc::RefCountedObject<NiceMock<MockFrameTransformer>>();
-  rtc::scoped_refptr<RtpVideoStreamReceiverFrameTransformerDelegate> delegate =
-      new rtc::RefCountedObject<RtpVideoStreamReceiverFrameTransformerDelegate>(
+  auto mock_frame_transformer =
+      rtc::make_ref_counted<NiceMock<MockFrameTransformer>>();
+  auto delegate =
+      rtc::make_ref_counted<RtpVideoStreamReceiverFrameTransformerDelegate>(
           &receiver, mock_frame_transformer, rtc::Thread::Current(), 1111);
   delegate->Init();
   RTPVideoHeader video_header;
diff --git a/video/rtp_video_stream_receiver_unittest.cc b/video/rtp_video_stream_receiver_unittest.cc
index acdc2b7..5a79b2a 100644
--- a/video/rtp_video_stream_receiver_unittest.cc
+++ b/video/rtp_video_stream_receiver_unittest.cc
@@ -1165,8 +1165,8 @@
 #endif
 
 TEST_F(RtpVideoStreamReceiverTest, TransformFrame) {
-  rtc::scoped_refptr<MockFrameTransformer> mock_frame_transformer =
-      new rtc::RefCountedObject<testing::NiceMock<MockFrameTransformer>>();
+  auto mock_frame_transformer =
+      rtc::make_ref_counted<testing::NiceMock<MockFrameTransformer>>();
   EXPECT_CALL(*mock_frame_transformer,
               RegisterTransformedFrameSinkCallback(_, config_.rtp.remote_ssrc));
   auto receiver = std::make_unique<RtpVideoStreamReceiver>(
diff --git a/video/video_quality_test.cc b/video/video_quality_test.cc
index a58aa1f..b87957f 100644
--- a/video/video_quality_test.cc
+++ b/video/video_quality_test.cc
@@ -626,7 +626,7 @@
     encoder_config.spatial_layers = params->ss[video_idx].spatial_layers;
     encoder_config.simulcast_layers = std::vector<VideoStream>(num_streams);
     encoder_config.video_stream_factory =
-        new rtc::RefCountedObject<cricket::EncoderStreamFactory>(
+        rtc::make_ref_counted<cricket::EncoderStreamFactory>(
             params->video[video_idx].codec, kDefaultMaxQp,
             params->screenshare[video_idx].enabled, true);
     params->ss[video_idx].streams =
@@ -800,7 +800,7 @@
           params_.ss[video_idx].streams;
     }
     video_encoder_configs_[video_idx].video_stream_factory =
-        new rtc::RefCountedObject<cricket::EncoderStreamFactory>(
+        rtc::make_ref_counted<cricket::EncoderStreamFactory>(
             params_.video[video_idx].codec,
             params_.ss[video_idx].streams[0].max_qp,
             params_.screenshare[video_idx].enabled, true);
@@ -829,7 +829,7 @@
         vp8_settings.numberOfTemporalLayers = static_cast<unsigned char>(
             params_.video[video_idx].num_temporal_layers);
         video_encoder_configs_[video_idx].encoder_specific_settings =
-            new rtc::RefCountedObject<
+            rtc::make_ref_counted<
                 VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings);
       } else if (params_.video[video_idx].codec == "VP9") {
         VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings();
@@ -846,7 +846,7 @@
           vp9_settings.flexibleMode = true;
         }
         video_encoder_configs_[video_idx].encoder_specific_settings =
-            new rtc::RefCountedObject<
+            rtc::make_ref_counted<
                 VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings);
       }
     } else if (params_.ss[video_idx].num_spatial_layers > 1) {
@@ -860,8 +860,8 @@
       vp9_settings.interLayerPred = params_.ss[video_idx].inter_layer_pred;
       vp9_settings.automaticResizeOn = false;
       video_encoder_configs_[video_idx].encoder_specific_settings =
-          new rtc::RefCountedObject<
-              VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings);
+          rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+              vp9_settings);
       RTC_DCHECK_EQ(video_encoder_configs_[video_idx].simulcast_layers.size(),
                     1);
       // Min bitrate will be enforced by spatial layer config instead.
@@ -871,7 +871,7 @@
         VideoCodecVP8 vp8_settings = VideoEncoder::GetDefaultVp8Settings();
         vp8_settings.automaticResizeOn = true;
         video_encoder_configs_[video_idx].encoder_specific_settings =
-            new rtc::RefCountedObject<
+            rtc::make_ref_counted<
                 VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings);
       } else if (params_.video[video_idx].codec == "VP9") {
         VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings();
@@ -879,7 +879,7 @@
         vp9_settings.automaticResizeOn =
             params_.ss[video_idx].num_spatial_layers == 1;
         video_encoder_configs_[video_idx].encoder_specific_settings =
-            new rtc::RefCountedObject<
+            rtc::make_ref_counted<
                 VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings);
       } else if (params_.video[video_idx].codec == "H264") {
         // Quality scaling is always on for H.264.
@@ -898,18 +898,18 @@
         VideoCodecVP8 vp8_settings = VideoEncoder::GetDefaultVp8Settings();
         vp8_settings.automaticResizeOn = false;
         video_encoder_configs_[video_idx].encoder_specific_settings =
-            new rtc::RefCountedObject<
+            rtc::make_ref_counted<
                 VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings);
       } else if (params_.video[video_idx].codec == "VP9") {
         VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings();
         vp9_settings.automaticResizeOn = false;
         video_encoder_configs_[video_idx].encoder_specific_settings =
-            new rtc::RefCountedObject<
+            rtc::make_ref_counted<
                 VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings);
       } else if (params_.video[video_idx].codec == "H264") {
         VideoCodecH264 h264_settings = VideoEncoder::GetDefaultH264Settings();
         video_encoder_configs_[video_idx].encoder_specific_settings =
-            new rtc::RefCountedObject<
+            rtc::make_ref_counted<
                 VideoEncoderConfig::H264EncoderSpecificSettings>(h264_settings);
       }
     }
@@ -986,7 +986,7 @@
     thumbnail_encoder_config.max_bitrate_bps = 50000;
     std::vector<VideoStream> streams{params_.ss[0].streams[0]};
     thumbnail_encoder_config.video_stream_factory =
-        new rtc::RefCountedObject<VideoStreamFactory>(streams);
+        rtc::make_ref_counted<VideoStreamFactory>(streams);
     thumbnail_encoder_config.spatial_layers = params_.ss[0].spatial_layers;
 
     thumbnail_encoder_configs_.push_back(thumbnail_encoder_config.Copy());
diff --git a/video/video_send_stream_tests.cc b/video/video_send_stream_tests.cc
index 3ee97a9..ba24441 100644
--- a/video/video_send_stream_tests.cc
+++ b/video/video_send_stream_tests.cc
@@ -2531,8 +2531,8 @@
 template <>
 rtc::scoped_refptr<VideoEncoderConfig::EncoderSpecificSettings>
 VideoCodecConfigObserver<VideoCodecH264>::GetEncoderSpecificSettings() const {
-  return new rtc::RefCountedObject<
-      VideoEncoderConfig::H264EncoderSpecificSettings>(encoder_settings_);
+  return rtc::make_ref_counted<VideoEncoderConfig::H264EncoderSpecificSettings>(
+      encoder_settings_);
 }
 
 template <>
@@ -2565,8 +2565,8 @@
 template <>
 rtc::scoped_refptr<VideoEncoderConfig::EncoderSpecificSettings>
 VideoCodecConfigObserver<VideoCodecVP8>::GetEncoderSpecificSettings() const {
-  return new rtc::RefCountedObject<
-      VideoEncoderConfig::Vp8EncoderSpecificSettings>(encoder_settings_);
+  return rtc::make_ref_counted<VideoEncoderConfig::Vp8EncoderSpecificSettings>(
+      encoder_settings_);
 }
 
 template <>
@@ -2599,8 +2599,8 @@
 template <>
 rtc::scoped_refptr<VideoEncoderConfig::EncoderSpecificSettings>
 VideoCodecConfigObserver<VideoCodecVP9>::GetEncoderSpecificSettings() const {
-  return new rtc::RefCountedObject<
-      VideoEncoderConfig::Vp9EncoderSpecificSettings>(encoder_settings_);
+  return rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+      encoder_settings_);
 }
 
 TEST_F(VideoSendStreamTest, EncoderSetupPropagatesVp8Config) {
@@ -2726,7 +2726,7 @@
       send_config->encoder_settings.encoder_factory = &encoder_factory_;
       EXPECT_EQ(1u, encoder_config->number_of_streams);
       encoder_config->video_stream_factory =
-          new rtc::RefCountedObject<VideoStreamFactory>();
+          rtc::make_ref_counted<VideoStreamFactory>();
       EXPECT_EQ(1u, encoder_config->simulcast_layers.size());
       encoder_config->simulcast_layers[0].num_temporal_layers = 2;
       encoder_config->content_type = VideoEncoderConfig::ContentType::kScreen;
@@ -3064,8 +3064,9 @@
     send_config->rtp.payload_name = "VP9";
     send_config->rtp.payload_type = kVp9PayloadType;
     ModifyVideoConfigsHook(send_config, receive_configs, encoder_config);
-    encoder_config->encoder_specific_settings = new rtc::RefCountedObject<
-        VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings_);
+    encoder_config->encoder_specific_settings =
+        rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+            vp9_settings_);
     EXPECT_EQ(1u, encoder_config->number_of_streams);
     EXPECT_EQ(1u, encoder_config->simulcast_layers.size());
     encoder_config->simulcast_layers[0].num_temporal_layers =
diff --git a/video/video_stream_encoder_unittest.cc b/video/video_stream_encoder_unittest.cc
index 7ff3d4d..d63e1bc 100644
--- a/video/video_stream_encoder_unittest.cc
+++ b/video/video_stream_encoder_unittest.cc
@@ -144,8 +144,8 @@
       int crop_height,
       int scaled_width,
       int scaled_height) override {
-    return new rtc::RefCountedObject<FakeNativeBuffer>(nullptr, scaled_width,
-                                                       scaled_height);
+    return rtc::make_ref_counted<FakeNativeBuffer>(nullptr, scaled_width,
+                                                   scaled_height);
   }
 
  private:
@@ -517,7 +517,7 @@
               &cropped_height, &out_width, &out_height)) {
         VideoFrame adapted_frame =
             VideoFrame::Builder()
-                .set_video_frame_buffer(new rtc::RefCountedObject<TestBuffer>(
+                .set_video_frame_buffer(rtc::make_ref_counted<TestBuffer>(
                     nullptr, out_width, out_height))
                 .set_ntp_time_ms(video_frame.ntp_time_ms())
                 .set_timestamp_ms(99)
@@ -715,8 +715,8 @@
       vp9_settings.numberOfSpatialLayers = num_spatial_layers;
       vp9_settings.automaticResizeOn = num_spatial_layers <= 1;
       video_encoder_config.encoder_specific_settings =
-          new rtc::RefCountedObject<
-              VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings);
+          rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+              vp9_settings);
     }
     ConfigureEncoder(std::move(video_encoder_config), allocation_callback_type);
   }
@@ -724,7 +724,7 @@
   VideoFrame CreateFrame(int64_t ntp_time_ms,
                          rtc::Event* destruction_event) const {
     return VideoFrame::Builder()
-        .set_video_frame_buffer(new rtc::RefCountedObject<TestBuffer>(
+        .set_video_frame_buffer(rtc::make_ref_counted<TestBuffer>(
             destruction_event, codec_width_, codec_height_))
         .set_ntp_time_ms(ntp_time_ms)
         .set_timestamp_ms(99)
@@ -736,7 +736,7 @@
                                          rtc::Event* destruction_event,
                                          int offset_x) const {
     return VideoFrame::Builder()
-        .set_video_frame_buffer(new rtc::RefCountedObject<TestBuffer>(
+        .set_video_frame_buffer(rtc::make_ref_counted<TestBuffer>(
             destruction_event, codec_width_, codec_height_))
         .set_ntp_time_ms(ntp_time_ms)
         .set_timestamp_ms(99)
@@ -748,7 +748,7 @@
   VideoFrame CreateFrame(int64_t ntp_time_ms, int width, int height) const {
     return VideoFrame::Builder()
         .set_video_frame_buffer(
-            new rtc::RefCountedObject<TestBuffer>(nullptr, width, height))
+            rtc::make_ref_counted<TestBuffer>(nullptr, width, height))
         .set_ntp_time_ms(ntp_time_ms)
         .set_timestamp_ms(ntp_time_ms)
         .set_rotation(kVideoRotation_0)
@@ -769,7 +769,7 @@
                                    int width,
                                    int height) const {
     return VideoFrame::Builder()
-        .set_video_frame_buffer(new rtc::RefCountedObject<FakeNativeBuffer>(
+        .set_video_frame_buffer(rtc::make_ref_counted<FakeNativeBuffer>(
             destruction_event, width, height))
         .set_ntp_time_ms(ntp_time_ms)
         .set_timestamp_ms(99)
@@ -782,7 +782,7 @@
                                        int width,
                                        int height) const {
     return VideoFrame::Builder()
-        .set_video_frame_buffer(new rtc::RefCountedObject<FakeNV12NativeBuffer>(
+        .set_video_frame_buffer(rtc::make_ref_counted<FakeNV12NativeBuffer>(
             destruction_event, width, height))
         .set_ntp_time_ms(ntp_time_ms)
         .set_timestamp_ms(99)
@@ -1587,7 +1587,7 @@
        NativeFrameWithoutI420SupportGetsCroppedIfNecessary) {
   // Use the cropping factory.
   video_encoder_config_.video_stream_factory =
-      new rtc::RefCountedObject<CroppingVideoStreamFactory>();
+      rtc::make_ref_counted<CroppingVideoStreamFactory>();
   video_stream_encoder_->ConfigureEncoder(std::move(video_encoder_config_),
                                           kMaxPayloadLength);
   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
@@ -2071,7 +2071,7 @@
   config.simulcast_layers[0].active = false;
   config.simulcast_layers[1].active = true;
   config.video_stream_factory =
-      new rtc::RefCountedObject<cricket::EncoderStreamFactory>(
+      rtc::make_ref_counted<cricket::EncoderStreamFactory>(
           "VP8", /*max qp*/ 56, /*screencast*/ false,
           /*screenshare enabled*/ false);
   video_stream_encoder_->ConfigureEncoder(config.Copy(), kMaxPayloadLength);
@@ -2135,7 +2135,7 @@
   config.simulcast_layers[0].active = false;
   config.simulcast_layers[1].active = true;
   config.video_stream_factory =
-      new rtc::RefCountedObject<cricket::EncoderStreamFactory>(
+      rtc::make_ref_counted<cricket::EncoderStreamFactory>(
           "VP8", /*max qp*/ 56, /*screencast*/ false,
           /*screenshare enabled*/ false);
   video_stream_encoder_->ConfigureEncoder(config.Copy(), kMaxPayloadLength);
@@ -2208,7 +2208,7 @@
   config.simulcast_layers[1].active = true;
   config.simulcast_layers[2].active = false;
   config.video_stream_factory =
-      new rtc::RefCountedObject<cricket::EncoderStreamFactory>(
+      rtc::make_ref_counted<cricket::EncoderStreamFactory>(
           "VP8", /*max qp*/ 56, /*screencast*/ false,
           /*screenshare enabled*/ false);
   video_stream_encoder_->ConfigureEncoder(config.Copy(), kMaxPayloadLength);
@@ -2252,7 +2252,7 @@
   config.simulcast_layers[1].active = false;
   config.simulcast_layers[2].active = false;
   config.video_stream_factory =
-      new rtc::RefCountedObject<cricket::EncoderStreamFactory>(
+      rtc::make_ref_counted<cricket::EncoderStreamFactory>(
           "VP8", /*max qp*/ 56, /*screencast*/ false,
           /*screenshare enabled*/ false);
   video_stream_encoder_->ConfigureEncoder(config.Copy(), kMaxPayloadLength);
@@ -2288,7 +2288,7 @@
   config.simulcast_layers[1].active = true;
   config.simulcast_layers[1].max_bitrate_bps = kMaxBitrateBps;
   config.video_stream_factory =
-      new rtc::RefCountedObject<cricket::EncoderStreamFactory>(
+      rtc::make_ref_counted<cricket::EncoderStreamFactory>(
           "VP8", /*max qp*/ 56, /*screencast*/ false,
           /*screenshare enabled*/ false);
   video_stream_encoder_->ConfigureEncoder(config.Copy(), kMaxPayloadLength);
@@ -2376,7 +2376,7 @@
     config.simulcast_layers[i].scale_resolution_down_by = scale_factors_[i];
   }
   config.video_stream_factory =
-      new rtc::RefCountedObject<cricket::EncoderStreamFactory>(
+      rtc::make_ref_counted<cricket::EncoderStreamFactory>(
           "VP8", /*max qp*/ 56, /*screencast*/ false,
           /*screenshare enabled*/ false);
   video_stream_encoder_->ConfigureEncoder(std::move(config), kMaxPayloadLength);
@@ -4406,7 +4406,7 @@
   video_encoder_config.content_type =
       VideoEncoderConfig::ContentType::kRealtimeVideo;
   video_encoder_config.encoder_specific_settings =
-      new rtc::RefCountedObject<VideoEncoderConfig::Vp8EncoderSpecificSettings>(
+      rtc::make_ref_counted<VideoEncoderConfig::Vp8EncoderSpecificSettings>(
           VideoEncoder::GetDefaultVp8Settings());
   for (auto& layer : video_encoder_config.simulcast_layers) {
     layer.num_temporal_layers = 2;
@@ -4451,7 +4451,7 @@
   video_encoder_config.content_type =
       VideoEncoderConfig::ContentType::kRealtimeVideo;
   video_encoder_config.encoder_specific_settings =
-      new rtc::RefCountedObject<VideoEncoderConfig::Vp8EncoderSpecificSettings>(
+      rtc::make_ref_counted<VideoEncoderConfig::Vp8EncoderSpecificSettings>(
           VideoEncoder::GetDefaultVp8Settings());
   for (auto& layer : video_encoder_config.simulcast_layers) {
     layer.num_temporal_layers = 2;
@@ -4500,7 +4500,7 @@
   vp9_settings.interLayerPred = InterLayerPredMode::kOn;
   vp9_settings.automaticResizeOn = false;
   video_encoder_config.encoder_specific_settings =
-      new rtc::RefCountedObject<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+      rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
           vp9_settings);
   ConfigureEncoder(std::move(video_encoder_config),
                    VideoStreamEncoder::BitrateAllocationCallbackType::
@@ -4555,7 +4555,7 @@
   vp9_settings.interLayerPred = InterLayerPredMode::kOn;
   vp9_settings.automaticResizeOn = false;
   video_encoder_config.encoder_specific_settings =
-      new rtc::RefCountedObject<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+      rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
           vp9_settings);
   ConfigureEncoder(std::move(video_encoder_config),
                    VideoStreamEncoder::BitrateAllocationCallbackType::
@@ -4603,7 +4603,7 @@
   vp9_settings.interLayerPred = InterLayerPredMode::kOnKeyPic;
   vp9_settings.automaticResizeOn = false;
   video_encoder_config.encoder_specific_settings =
-      new rtc::RefCountedObject<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+      rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
           vp9_settings);
   ConfigureEncoder(std::move(video_encoder_config),
                    VideoStreamEncoder::BitrateAllocationCallbackType::
@@ -4651,7 +4651,7 @@
   vp9_settings.interLayerPred = InterLayerPredMode::kOn;
   vp9_settings.automaticResizeOn = false;
   video_encoder_config.encoder_specific_settings =
-      new rtc::RefCountedObject<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+      rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
           vp9_settings);
   // Simulcast layers are used for enabling/disabling streams.
   video_encoder_config.simulcast_layers.resize(3);
@@ -4710,7 +4710,7 @@
   vp9_settings.interLayerPred = InterLayerPredMode::kOn;
   vp9_settings.automaticResizeOn = false;
   video_encoder_config.encoder_specific_settings =
-      new rtc::RefCountedObject<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+      rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
           vp9_settings);
   // Simulcast layers are used for enabling/disabling streams.
   video_encoder_config.simulcast_layers.resize(3);
@@ -4762,7 +4762,7 @@
   vp9_settings.interLayerPred = InterLayerPredMode::kOn;
   vp9_settings.automaticResizeOn = false;
   video_encoder_config.encoder_specific_settings =
-      new rtc::RefCountedObject<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+      rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
           vp9_settings);
   // Simulcast layers are used for enabling/disabling streams.
   video_encoder_config.simulcast_layers.resize(3);
@@ -5362,7 +5362,7 @@
   test::FillEncoderConfiguration(PayloadStringToCodecType("VP8"), 3,
                                  &video_encoder_config);
   video_encoder_config.video_stream_factory =
-      new rtc::RefCountedObject<cricket::EncoderStreamFactory>(
+      rtc::make_ref_counted<cricket::EncoderStreamFactory>(
           "VP8", /*max qp*/ 56, /*screencast*/ false,
           /*screenshare enabled*/ false);
   for (auto& layer : video_encoder_config.simulcast_layers) {
@@ -5429,7 +5429,7 @@
   // Since only one layer is active - automatic resize should be enabled.
   vp9_settings.automaticResizeOn = true;
   video_encoder_config.encoder_specific_settings =
-      new rtc::RefCountedObject<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+      rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
           vp9_settings);
   video_encoder_config.max_bitrate_bps = kSimulcastTargetBitrateBps;
   video_encoder_config.content_type =
@@ -5488,7 +5488,7 @@
   // Since only one layer is active - automatic resize should be enabled.
   vp9_settings.automaticResizeOn = true;
   video_encoder_config.encoder_specific_settings =
-      new rtc::RefCountedObject<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+      rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
           vp9_settings);
   video_encoder_config.max_bitrate_bps = kSimulcastTargetBitrateBps;
   video_encoder_config.content_type =
@@ -5546,7 +5546,7 @@
   // Since only one layer is active - automatic resize should be enabled.
   vp9_settings.automaticResizeOn = true;
   video_encoder_config.encoder_specific_settings =
-      new rtc::RefCountedObject<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+      rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
           vp9_settings);
   video_encoder_config.max_bitrate_bps = kSimulcastTargetBitrateBps;
   video_encoder_config.content_type =
@@ -5611,7 +5611,7 @@
   // Since only one layer is active - automatic resize should be enabled.
   vp9_settings.automaticResizeOn = true;
   video_encoder_config.encoder_specific_settings =
-      new rtc::RefCountedObject<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+      rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
           vp9_settings);
   video_encoder_config.max_bitrate_bps = kSimulcastTargetBitrateBps;
   video_encoder_config.content_type =
@@ -5687,7 +5687,7 @@
   // Since only one layer is active - automatic resize should be enabled.
   vp9_settings.automaticResizeOn = true;
   video_encoder_config.encoder_specific_settings =
-      new rtc::RefCountedObject<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+      rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
           vp9_settings);
   video_encoder_config.max_bitrate_bps = kSimulcastTargetBitrateBps;
   video_encoder_config.content_type =
@@ -6685,7 +6685,7 @@
   video_encoder_config.simulcast_layers[0].max_framerate = kFramerate;
   video_encoder_config.max_bitrate_bps = kTargetBitrateBps;
   video_encoder_config.video_stream_factory =
-      new rtc::RefCountedObject<CroppingVideoStreamFactory>();
+      rtc::make_ref_counted<CroppingVideoStreamFactory>();
   video_stream_encoder_->ConfigureEncoder(std::move(video_encoder_config),
                                           kMaxPayloadLength);
   video_stream_encoder_->WaitUntilTaskQueueIsIdle();
@@ -7650,7 +7650,7 @@
     config.simulcast_layers[i].active = true;
   }
   config.video_stream_factory =
-      new rtc::RefCountedObject<cricket::EncoderStreamFactory>(
+      rtc::make_ref_counted<cricket::EncoderStreamFactory>(
           "VP8", /*max qp*/ 56, /*screencast*/ false,
           /*screenshare enabled*/ false);
   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
@@ -7778,7 +7778,7 @@
     config.simulcast_layers[i].active = true;
   }
   config.video_stream_factory =
-      new rtc::RefCountedObject<cricket::EncoderStreamFactory>(
+      rtc::make_ref_counted<cricket::EncoderStreamFactory>(
           "VP8", /*max qp*/ 56, /*screencast*/ false,
           /*screenshare enabled*/ false);
   video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(