Prepare for deleting implicit conversion from raw pointer to scoped_refptr.
Updates all webrtc code, to have a small followup cl to just add the
"explicit" keyword. Patchset #24 passed all webrtc tests, with explicit.
Bug: webrtc:13464
Change-Id: I39863d3752f73209b531120f66916dc9177bf63a
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/242363
Reviewed-by: Tomas Gunnarsson <tommi@webrtc.org>
Commit-Queue: Niels Moller <nisse@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#35718}
diff --git a/api/scoped_refptr.h b/api/scoped_refptr.h
index 5b3a085..164fefe 100644
--- a/api/scoped_refptr.h
+++ b/api/scoped_refptr.h
@@ -75,6 +75,8 @@
scoped_refptr() : ptr_(nullptr) {}
+ // TODO(bugs.webrtc.org/13464): Implicit construction is deprecated. Mark
+ // explicit, and add a new implicit constructor accepting a nullptr_t.
scoped_refptr(T* p) : ptr_(p) { // NOLINT(runtime/explicit)
if (ptr_)
ptr_->AddRef();
diff --git a/api/video/i444_buffer.cc b/api/video/i444_buffer.cc
index 92f1662..8bf9f76 100644
--- a/api/video/i444_buffer.cc
+++ b/api/video/i444_buffer.cc
@@ -61,7 +61,7 @@
// static
rtc::scoped_refptr<I444Buffer> I444Buffer::Create(int width, int height) {
- return new rtc::RefCountedObject<I444Buffer>(width, height);
+ return rtc::make_ref_counted<I444Buffer>(width, height);
}
// static
@@ -70,8 +70,8 @@
int stride_y,
int stride_u,
int stride_v) {
- return new rtc::RefCountedObject<I444Buffer>(width, height, stride_y,
- stride_u, stride_v);
+ return rtc::make_ref_counted<I444Buffer>(width, height, stride_y, stride_u,
+ stride_v);
}
// static
diff --git a/common_video/video_frame_buffer_pool.cc b/common_video/video_frame_buffer_pool.cc
index e95eac3..267cab1 100644
--- a/common_video/video_frame_buffer_pool.cc
+++ b/common_video/video_frame_buffer_pool.cc
@@ -143,7 +143,7 @@
return nullptr;
// Allocate new buffer.
rtc::scoped_refptr<I444Buffer> buffer =
- new rtc::RefCountedObject<I444Buffer>(width, height);
+ rtc::make_ref_counted<I444Buffer>(width, height);
if (zero_initialize_)
buffer->InitializeData();
diff --git a/examples/androidnativeapi/jni/android_call_client.cc b/examples/androidnativeapi/jni/android_call_client.cc
index 8178273..3c7c86d 100644
--- a/examples/androidnativeapi/jni/android_call_client.cc
+++ b/examples/androidnativeapi/jni/android_call_client.cc
@@ -265,7 +265,7 @@
webrtc::CreateSessionDescription(webrtc::SdpType::kAnswer, sdp));
pc_->SetRemoteDescription(
std::move(answer),
- new rtc::RefCountedObject<SetRemoteSessionDescriptionObserver>());
+ rtc::make_ref_counted<SetRemoteSessionDescriptionObserver>());
}
void CreateOfferObserver::OnFailure(webrtc::RTCError error) {
diff --git a/examples/objcnativeapi/objc/objc_call_client.mm b/examples/objcnativeapi/objc/objc_call_client.mm
index 09d3d4a..c1d84f7 100644
--- a/examples/objcnativeapi/objc/objc_call_client.mm
+++ b/examples/objcnativeapi/objc/objc_call_client.mm
@@ -220,7 +220,7 @@
std::unique_ptr<webrtc::SessionDescriptionInterface> answer(
webrtc::CreateSessionDescription(webrtc::SdpType::kAnswer, sdp));
pc_->SetRemoteDescription(std::move(answer),
- new rtc::RefCountedObject<SetRemoteSessionDescriptionObserver>());
+ rtc::make_ref_counted<SetRemoteSessionDescriptionObserver>());
}
void CreateOfferObserver::OnFailure(webrtc::RTCError error) {
diff --git a/examples/peerconnection/client/conductor.cc b/examples/peerconnection/client/conductor.cc
index d9adffb..65958a4 100644
--- a/examples/peerconnection/client/conductor.cc
+++ b/examples/peerconnection/client/conductor.cc
@@ -85,8 +85,7 @@
capturer = absl::WrapUnique(
webrtc::test::VcmCapturer::Create(kWidth, kHeight, kFps, i));
if (capturer) {
- return new rtc::RefCountedObject<CapturerTrackSource>(
- std::move(capturer));
+ return rtc::make_ref_counted<CapturerTrackSource>(std::move(capturer));
}
}
diff --git a/examples/unityplugin/simple_peer_connection.cc b/examples/unityplugin/simple_peer_connection.cc
index 8a82718..34abbe2 100644
--- a/examples/unityplugin/simple_peer_connection.cc
+++ b/examples/unityplugin/simple_peer_connection.cc
@@ -61,7 +61,7 @@
if (!capturer) {
return nullptr;
}
- return new rtc::RefCountedObject<CapturerTrackSource>(std::move(capturer));
+ return rtc::make_ref_counted<CapturerTrackSource>(std::move(capturer));
}
protected:
diff --git a/modules/desktop_capture/desktop_capture_options.cc b/modules/desktop_capture/desktop_capture_options.cc
index c89896d..fc0340a 100644
--- a/modules/desktop_capture/desktop_capture_options.cc
+++ b/modules/desktop_capture/desktop_capture_options.cc
@@ -15,6 +15,8 @@
#include "modules/desktop_capture/win/full_screen_win_application_handler.h"
#endif
+#include "rtc_base/ref_counted_object.h"
+
namespace webrtc {
DesktopCaptureOptions::DesktopCaptureOptions() {}
@@ -36,12 +38,15 @@
result.set_x_display(SharedXDisplay::CreateDefault());
#endif
#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
- result.set_configuration_monitor(new DesktopConfigurationMonitor());
+ result.set_configuration_monitor(
+ rtc::make_ref_counted<DesktopConfigurationMonitor>());
result.set_full_screen_window_detector(
- new FullScreenWindowDetector(CreateFullScreenMacApplicationHandler));
+ rtc::make_ref_counted<FullScreenWindowDetector>(
+ CreateFullScreenMacApplicationHandler));
#elif defined(WEBRTC_WIN)
result.set_full_screen_window_detector(
- new FullScreenWindowDetector(CreateFullScreenWinApplicationHandler));
+ rtc::make_ref_counted<FullScreenWindowDetector>(
+ CreateFullScreenWinApplicationHandler));
#endif
return result;
}
diff --git a/modules/desktop_capture/linux/x11/shared_x_display.cc b/modules/desktop_capture/linux/x11/shared_x_display.cc
index ca084d4..ad2e043 100644
--- a/modules/desktop_capture/linux/x11/shared_x_display.cc
+++ b/modules/desktop_capture/linux/x11/shared_x_display.cc
@@ -36,9 +36,9 @@
XOpenDisplay(display_name.empty() ? NULL : display_name.c_str());
if (!display) {
RTC_LOG(LS_ERROR) << "Unable to open display";
- return NULL;
+ return nullptr;
}
- return new SharedXDisplay(display);
+ return rtc::scoped_refptr<SharedXDisplay>(new SharedXDisplay(display));
}
// static
diff --git a/modules/desktop_capture/shared_desktop_frame.cc b/modules/desktop_capture/shared_desktop_frame.cc
index 2ded145..e374038 100644
--- a/modules/desktop_capture/shared_desktop_frame.cc
+++ b/modules/desktop_capture/shared_desktop_frame.cc
@@ -21,8 +21,8 @@
// static
std::unique_ptr<SharedDesktopFrame> SharedDesktopFrame::Wrap(
std::unique_ptr<DesktopFrame> desktop_frame) {
- return std::unique_ptr<SharedDesktopFrame>(
- new SharedDesktopFrame(new Core(std::move(desktop_frame))));
+ return std::unique_ptr<SharedDesktopFrame>(new SharedDesktopFrame(
+ rtc::scoped_refptr<Core>(new Core(std::move(desktop_frame)))));
}
SharedDesktopFrame* SharedDesktopFrame::Wrap(DesktopFrame* desktop_frame) {
diff --git a/modules/video_capture/test/video_capture_unittest.cc b/modules/video_capture/test/video_capture_unittest.cc
index 098d60d..4cf3d59 100644
--- a/modules/video_capture/test/video_capture_unittest.cc
+++ b/modules/video_capture/test/video_capture_unittest.cc
@@ -168,7 +168,7 @@
rtc::scoped_refptr<VideoCaptureModule> module(
VideoCaptureFactory::Create(unique_name));
if (module.get() == NULL)
- return NULL;
+ return nullptr;
EXPECT_FALSE(module->CaptureStarted());
diff --git a/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.cc b/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.cc
index 5d0f380..669dc55 100644
--- a/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.cc
+++ b/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.cc
@@ -275,8 +275,8 @@
// This buffer contains all of `img`'s image data, a reference counted
// Vp9FrameBuffer. (libvpx is done with the buffers after a few
// vpx_codec_decode calls or vpx_codec_destroy).
- rtc::scoped_refptr<Vp9FrameBufferPool::Vp9FrameBuffer> img_buffer =
- static_cast<Vp9FrameBufferPool::Vp9FrameBuffer*>(img->fb_priv);
+ rtc::scoped_refptr<Vp9FrameBufferPool::Vp9FrameBuffer> img_buffer(
+ static_cast<Vp9FrameBufferPool::Vp9FrameBuffer*>(img->fb_priv));
// The buffer can be used directly by the VideoFrame (without copy) by
// using a Wrapped*Buffer.
diff --git a/pc/media_stream.cc b/pc/media_stream.cc
index 6cf84ac..6fe3088 100644
--- a/pc/media_stream.cc
+++ b/pc/media_stream.cc
@@ -59,7 +59,7 @@
const std::string& track_id) {
AudioTrackVector::iterator it = FindTrack(&audio_tracks_, track_id);
if (it == audio_tracks_.end())
- return NULL;
+ return nullptr;
return *it;
}
@@ -67,7 +67,7 @@
const std::string& track_id) {
VideoTrackVector::iterator it = FindTrack(&video_tracks_, track_id);
if (it == video_tracks_.end())
- return NULL;
+ return nullptr;
return *it;
}
diff --git a/pc/media_stream_unittest.cc b/pc/media_stream_unittest.cc
index 5522699..6ce8de9 100644
--- a/pc/media_stream_unittest.cc
+++ b/pc/media_stream_unittest.cc
@@ -63,7 +63,7 @@
ASSERT_TRUE(video_track_.get() != NULL);
EXPECT_EQ(MediaStreamTrackInterface::kLive, video_track_->state());
- audio_track_ = AudioTrack::Create(kAudioTrackId, NULL);
+ audio_track_ = AudioTrack::Create(kAudioTrackId, nullptr);
ASSERT_TRUE(audio_track_.get() != NULL);
EXPECT_EQ(MediaStreamTrackInterface::kLive, audio_track_->state());
diff --git a/sdk/android/src/jni/pc/peer_connection.cc b/sdk/android/src/jni/pc/peer_connection.cc
index 6bcaef4..12de839 100644
--- a/sdk/android/src/jni/pc/peer_connection.cc
+++ b/sdk/android/src/jni/pc/peer_connection.cc
@@ -764,7 +764,8 @@
const JavaParamRef<jobject>& j_stream_labels) {
RTCErrorOr<rtc::scoped_refptr<RtpSenderInterface>> result =
ExtractNativePC(jni, j_pc)->AddTrack(
- reinterpret_cast<MediaStreamTrackInterface*>(native_track),
+ rtc::scoped_refptr<MediaStreamTrackInterface>(
+ reinterpret_cast<MediaStreamTrackInterface*>(native_track)),
JavaListToNativeVector<std::string, jstring>(jni, j_stream_labels,
&JavaToNativeString));
if (!result.ok()) {
@@ -792,7 +793,8 @@
const JavaParamRef<jobject>& j_init) {
RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>> result =
ExtractNativePC(jni, j_pc)->AddTransceiver(
- reinterpret_cast<MediaStreamTrackInterface*>(native_track),
+ rtc::scoped_refptr<MediaStreamTrackInterface>(
+ reinterpret_cast<MediaStreamTrackInterface*>(native_track)),
JavaToNativeRtpTransceiverInit(jni, j_init));
if (!result.ok()) {
RTC_LOG(LS_ERROR) << "Failed to add transceiver: "
diff --git a/sdk/android/src/jni/pc/peer_connection_factory.cc b/sdk/android/src/jni/pc/peer_connection_factory.cc
index 5330cbd..08af07a 100644
--- a/sdk/android/src/jni/pc/peer_connection_factory.cc
+++ b/sdk/android/src/jni/pc/peer_connection_factory.cc
@@ -351,11 +351,12 @@
jlong native_network_controller_factory,
jlong native_network_state_predictor_factory,
jlong native_neteq_factory) {
- rtc::scoped_refptr<AudioProcessing> audio_processor =
- reinterpret_cast<AudioProcessing*>(native_audio_processor);
+ rtc::scoped_refptr<AudioProcessing> audio_processor(
+ reinterpret_cast<AudioProcessing*>(native_audio_processor));
return CreatePeerConnectionFactoryForJava(
jni, jcontext, joptions,
- reinterpret_cast<AudioDeviceModule*>(native_audio_device_module),
+ rtc::scoped_refptr<AudioDeviceModule>(
+ reinterpret_cast<AudioDeviceModule*>(native_audio_device_module)),
TakeOwnershipOfRefPtr<AudioEncoderFactory>(native_audio_encoder_factory),
TakeOwnershipOfRefPtr<AudioDecoderFactory>(native_audio_decoder_factory),
jencoder_factory, jdecoder_factory,
diff --git a/sdk/android/src/jni/pc/rtp_receiver.cc b/sdk/android/src/jni/pc/rtp_receiver.cc
index 4d7e954..7a3600b 100644
--- a/sdk/android/src/jni/pc/rtp_receiver.cc
+++ b/sdk/android/src/jni/pc/rtp_receiver.cc
@@ -118,8 +118,9 @@
jlong j_rtp_sender_pointer,
jlong j_frame_decryptor_pointer) {
reinterpret_cast<RtpReceiverInterface*>(j_rtp_sender_pointer)
- ->SetFrameDecryptor(reinterpret_cast<FrameDecryptorInterface*>(
- j_frame_decryptor_pointer));
+ ->SetFrameDecryptor(rtc::scoped_refptr<FrameDecryptorInterface>(
+ reinterpret_cast<FrameDecryptorInterface*>(
+ j_frame_decryptor_pointer)));
}
} // namespace jni
diff --git a/sdk/android/src/jni/pc/rtp_sender.cc b/sdk/android/src/jni/pc/rtp_sender.cc
index 411e5dc..233a353 100644
--- a/sdk/android/src/jni/pc/rtp_sender.cc
+++ b/sdk/android/src/jni/pc/rtp_sender.cc
@@ -105,8 +105,9 @@
jlong j_rtp_sender_pointer,
jlong j_frame_encryptor_pointer) {
reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)
- ->SetFrameEncryptor(reinterpret_cast<FrameEncryptorInterface*>(
- j_frame_encryptor_pointer));
+ ->SetFrameEncryptor(rtc::scoped_refptr<FrameEncryptorInterface>(
+ reinterpret_cast<FrameEncryptorInterface*>(
+ j_frame_encryptor_pointer)));
}
} // namespace jni
diff --git a/sdk/objc/api/peerconnection/RTCAudioTrack.mm b/sdk/objc/api/peerconnection/RTCAudioTrack.mm
index 6a97f46..73ec98f 100644
--- a/sdk/objc/api/peerconnection/RTCAudioTrack.mm
+++ b/sdk/objc/api/peerconnection/RTCAudioTrack.mm
@@ -48,11 +48,10 @@
- (RTC_OBJC_TYPE(RTCAudioSource) *)source {
if (!_source) {
- rtc::scoped_refptr<webrtc::AudioSourceInterface> source =
- self.nativeAudioTrack->GetSource();
+ rtc::scoped_refptr<webrtc::AudioSourceInterface> source(self.nativeAudioTrack->GetSource());
if (source) {
_source = [[RTC_OBJC_TYPE(RTCAudioSource) alloc] initWithFactory:self.factory
- nativeAudioSource:source.get()];
+ nativeAudioSource:source];
}
}
return _source;
@@ -61,7 +60,8 @@
#pragma mark - Private
- (rtc::scoped_refptr<webrtc::AudioTrackInterface>)nativeAudioTrack {
- return static_cast<webrtc::AudioTrackInterface *>(self.nativeTrack.get());
+ return rtc::scoped_refptr<webrtc::AudioTrackInterface>(
+ static_cast<webrtc::AudioTrackInterface *>(self.nativeTrack.get()));
}
@end
diff --git a/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm b/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm
index b3e0a7b..56177b4 100644
--- a/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm
+++ b/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm
@@ -20,7 +20,7 @@
class ObjCEncodedImageBuffer : public webrtc::EncodedImageBufferInterface {
public:
static rtc::scoped_refptr<ObjCEncodedImageBuffer> Create(NSData *data) {
- return new rtc::RefCountedObject<ObjCEncodedImageBuffer>(data);
+ return rtc::make_ref_counted<ObjCEncodedImageBuffer>(data);
}
const uint8_t *data() const override { return static_cast<const uint8_t *>(data_.bytes); }
// TODO(bugs.webrtc.org/9378): delete this non-const data method.
diff --git a/sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm b/sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm
index 8ded552..7f8b123 100644
--- a/sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm
+++ b/sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm
@@ -69,21 +69,21 @@
- (void)statisticsForSender : (RTC_OBJC_TYPE(RTCRtpSender) *)sender completionHandler
: (RTCStatisticsCompletionHandler)completionHandler {
- rtc::scoped_refptr<webrtc::StatsCollectorCallbackAdapter> collector(
- new rtc::RefCountedObject<webrtc::StatsCollectorCallbackAdapter>(completionHandler));
+ rtc::scoped_refptr<webrtc::StatsCollectorCallbackAdapter> collector =
+ rtc::make_ref_counted<webrtc::StatsCollectorCallbackAdapter>(completionHandler);
self.nativePeerConnection->GetStats(sender.nativeRtpSender, collector);
}
- (void)statisticsForReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)receiver
completionHandler:(RTCStatisticsCompletionHandler)completionHandler {
- rtc::scoped_refptr<webrtc::StatsCollectorCallbackAdapter> collector(
- new rtc::RefCountedObject<webrtc::StatsCollectorCallbackAdapter>(completionHandler));
+ rtc::scoped_refptr<webrtc::StatsCollectorCallbackAdapter> collector =
+ rtc::make_ref_counted<webrtc::StatsCollectorCallbackAdapter>(completionHandler);
self.nativePeerConnection->GetStats(receiver.nativeRtpReceiver, collector);
}
- (void)statisticsWithCompletionHandler:(RTCStatisticsCompletionHandler)completionHandler {
- rtc::scoped_refptr<webrtc::StatsCollectorCallbackAdapter> collector(
- new rtc::RefCountedObject<webrtc::StatsCollectorCallbackAdapter>(completionHandler));
+ rtc::scoped_refptr<webrtc::StatsCollectorCallbackAdapter> collector =
+ rtc::make_ref_counted<webrtc::StatsCollectorCallbackAdapter>(completionHandler);
self.nativePeerConnection->GetStats(collector);
}
@@ -91,9 +91,8 @@
statsOutputLevel:(RTCStatsOutputLevel)statsOutputLevel
completionHandler:
(void (^)(NSArray<RTC_OBJC_TYPE(RTCLegacyStatsReport) *> *stats))completionHandler {
- rtc::scoped_refptr<webrtc::StatsObserverAdapter> observer(
- new rtc::RefCountedObject<webrtc::StatsObserverAdapter>
- (completionHandler));
+ rtc::scoped_refptr<webrtc::StatsObserverAdapter> observer =
+ rtc::make_ref_counted<webrtc::StatsObserverAdapter>(completionHandler);
webrtc::PeerConnectionInterface::StatsOutputLevel nativeOutputLevel =
[[self class] nativeStatsOutputLevelForLevel:statsOutputLevel];
self.nativePeerConnection->GetStats(
diff --git a/sdk/objc/api/peerconnection/RTCPeerConnection.mm b/sdk/objc/api/peerconnection/RTCPeerConnection.mm
index 4a31a54..7db986c 100644
--- a/sdk/objc/api/peerconnection/RTCPeerConnection.mm
+++ b/sdk/objc/api/peerconnection/RTCPeerConnection.mm
@@ -572,9 +572,8 @@
- (void)offerForConstraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
completionHandler:(RTCCreateSessionDescriptionCompletionHandler)completionHandler {
RTC_DCHECK(completionHandler != nil);
- rtc::scoped_refptr<webrtc::CreateSessionDescriptionObserverAdapter>
- observer(new rtc::RefCountedObject
- <webrtc::CreateSessionDescriptionObserverAdapter>(completionHandler));
+ rtc::scoped_refptr<webrtc::CreateSessionDescriptionObserverAdapter> observer =
+ rtc::make_ref_counted<webrtc::CreateSessionDescriptionObserverAdapter>(completionHandler);
webrtc::PeerConnectionInterface::RTCOfferAnswerOptions options;
CopyConstraintsIntoOfferAnswerOptions(constraints.nativeConstraints.get(), &options);
@@ -584,9 +583,8 @@
- (void)answerForConstraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
completionHandler:(RTCCreateSessionDescriptionCompletionHandler)completionHandler {
RTC_DCHECK(completionHandler != nil);
- rtc::scoped_refptr<webrtc::CreateSessionDescriptionObserverAdapter>
- observer(new rtc::RefCountedObject
- <webrtc::CreateSessionDescriptionObserverAdapter>(completionHandler));
+ rtc::scoped_refptr<webrtc::CreateSessionDescriptionObserverAdapter> observer =
+ rtc::make_ref_counted<webrtc::CreateSessionDescriptionObserverAdapter>(completionHandler);
webrtc::PeerConnectionInterface::RTCOfferAnswerOptions options;
CopyConstraintsIntoOfferAnswerOptions(constraints.nativeConstraints.get(), &options);
@@ -596,24 +594,24 @@
- (void)setLocalDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)sdp
completionHandler:(RTCSetSessionDescriptionCompletionHandler)completionHandler {
RTC_DCHECK(completionHandler != nil);
- rtc::scoped_refptr<webrtc::SetLocalDescriptionObserverInterface> observer(
- new rtc::RefCountedObject<::SetSessionDescriptionObserver>(completionHandler));
+ rtc::scoped_refptr<webrtc::SetLocalDescriptionObserverInterface> observer =
+ rtc::make_ref_counted<::SetSessionDescriptionObserver>(completionHandler);
_peerConnection->SetLocalDescription(sdp.nativeDescription, observer);
}
- (void)setLocalDescriptionWithCompletionHandler:
(RTCSetSessionDescriptionCompletionHandler)completionHandler {
RTC_DCHECK(completionHandler != nil);
- rtc::scoped_refptr<webrtc::SetLocalDescriptionObserverInterface> observer(
- new rtc::RefCountedObject<::SetSessionDescriptionObserver>(completionHandler));
+ rtc::scoped_refptr<webrtc::SetLocalDescriptionObserverInterface> observer =
+ rtc::make_ref_counted<::SetSessionDescriptionObserver>(completionHandler);
_peerConnection->SetLocalDescription(observer);
}
- (void)setRemoteDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)sdp
completionHandler:(RTCSetSessionDescriptionCompletionHandler)completionHandler {
RTC_DCHECK(completionHandler != nil);
- rtc::scoped_refptr<webrtc::SetRemoteDescriptionObserverInterface> observer(
- new rtc::RefCountedObject<::SetSessionDescriptionObserver>(completionHandler));
+ rtc::scoped_refptr<webrtc::SetRemoteDescriptionObserverInterface> observer =
+ rtc::make_ref_counted<::SetSessionDescriptionObserver>(completionHandler);
_peerConnection->SetRemoteDescription(sdp.nativeDescription, observer);
}
diff --git a/sdk/objc/api/peerconnection/RTCVideoSource.mm b/sdk/objc/api/peerconnection/RTCVideoSource.mm
index 3211f18..486ca93 100644
--- a/sdk/objc/api/peerconnection/RTCVideoSource.mm
+++ b/sdk/objc/api/peerconnection/RTCVideoSource.mm
@@ -61,8 +61,8 @@
signalingThread:(rtc::Thread *)signalingThread
workerThread:(rtc::Thread *)workerThread
isScreenCast:(BOOL)isScreenCast {
- rtc::scoped_refptr<webrtc::ObjCVideoTrackSource> objCVideoTrackSource(
- new rtc::RefCountedObject<webrtc::ObjCVideoTrackSource>(isScreenCast));
+ rtc::scoped_refptr<webrtc::ObjCVideoTrackSource> objCVideoTrackSource =
+ rtc::make_ref_counted<webrtc::ObjCVideoTrackSource>(isScreenCast);
return [self initWithFactory:factory
nativeVideoSource:webrtc::VideoTrackSourceProxy::Create(
diff --git a/sdk/objc/api/peerconnection/RTCVideoTrack.mm b/sdk/objc/api/peerconnection/RTCVideoTrack.mm
index 3f38dd5..d9eddde 100644
--- a/sdk/objc/api/peerconnection/RTCVideoTrack.mm
+++ b/sdk/objc/api/peerconnection/RTCVideoTrack.mm
@@ -59,11 +59,11 @@
- (RTC_OBJC_TYPE(RTCVideoSource) *)source {
if (!_source) {
- rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source =
- self.nativeVideoTrack->GetSource();
+ rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source(
+ self.nativeVideoTrack->GetSource());
if (source) {
_source = [[RTC_OBJC_TYPE(RTCVideoSource) alloc] initWithFactory:self.factory
- nativeVideoSource:source.get()];
+ nativeVideoSource:source];
}
}
return _source;
@@ -107,7 +107,8 @@
#pragma mark - Private
- (rtc::scoped_refptr<webrtc::VideoTrackInterface>)nativeVideoTrack {
- return static_cast<webrtc::VideoTrackInterface *>(self.nativeTrack.get());
+ return rtc::scoped_refptr<webrtc::VideoTrackInterface>(
+ static_cast<webrtc::VideoTrackInterface *>(self.nativeTrack.get()));
}
@end
diff --git a/sdk/objc/native/api/audio_device_module.mm b/sdk/objc/native/api/audio_device_module.mm
index 3c2790e..55ea7e3 100644
--- a/sdk/objc/native/api/audio_device_module.mm
+++ b/sdk/objc/native/api/audio_device_module.mm
@@ -20,7 +20,7 @@
rtc::scoped_refptr<AudioDeviceModule> CreateAudioDeviceModule(bool bypass_voice_processing) {
RTC_DLOG(LS_INFO) << __FUNCTION__;
#if defined(WEBRTC_IOS)
- return new rtc::RefCountedObject<ios_adm::AudioDeviceModuleIOS>(bypass_voice_processing);
+ return rtc::make_ref_counted<ios_adm::AudioDeviceModuleIOS>(bypass_voice_processing);
#else
RTC_LOG(LS_ERROR) << "current platform is not supported => this module will self destruct!";
return nullptr;
diff --git a/sdk/objc/native/api/video_capturer.mm b/sdk/objc/native/api/video_capturer.mm
index cae7a50..0101c79 100644
--- a/sdk/objc/native/api/video_capturer.mm
+++ b/sdk/objc/native/api/video_capturer.mm
@@ -22,8 +22,8 @@
rtc::Thread *signaling_thread,
rtc::Thread *worker_thread) {
RTCObjCVideoSourceAdapter *adapter = [[RTCObjCVideoSourceAdapter alloc] init];
- rtc::scoped_refptr<webrtc::ObjCVideoTrackSource> objc_video_track_source(
- new rtc::RefCountedObject<webrtc::ObjCVideoTrackSource>(adapter));
+ rtc::scoped_refptr<webrtc::ObjCVideoTrackSource> objc_video_track_source =
+ rtc::make_ref_counted<webrtc::ObjCVideoTrackSource>(adapter);
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> video_source =
webrtc::CreateVideoTrackSourceProxy(signaling_thread, worker_thread, objc_video_track_source);
diff --git a/sdk/objc/native/api/video_frame_buffer.mm b/sdk/objc/native/api/video_frame_buffer.mm
index 6dc9975..e1d8aad 100644
--- a/sdk/objc/native/api/video_frame_buffer.mm
+++ b/sdk/objc/native/api/video_frame_buffer.mm
@@ -16,7 +16,7 @@
rtc::scoped_refptr<VideoFrameBuffer> ObjCToNativeVideoFrameBuffer(
id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> objc_video_frame_buffer) {
- return new rtc::RefCountedObject<ObjCFrameBuffer>(objc_video_frame_buffer);
+ return rtc::make_ref_counted<ObjCFrameBuffer>(objc_video_frame_buffer);
}
id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> NativeToObjCVideoFrameBuffer(
diff --git a/sdk/objc/native/src/objc_frame_buffer.mm b/sdk/objc/native/src/objc_frame_buffer.mm
index deb38a7..34d5d4e 100644
--- a/sdk/objc/native/src/objc_frame_buffer.mm
+++ b/sdk/objc/native/src/objc_frame_buffer.mm
@@ -67,7 +67,7 @@
rtc::scoped_refptr<I420BufferInterface> ObjCFrameBuffer::ToI420() {
rtc::scoped_refptr<I420BufferInterface> buffer =
- new rtc::RefCountedObject<ObjCI420FrameBuffer>([frame_buffer_ toI420]);
+ rtc::make_ref_counted<ObjCI420FrameBuffer>([frame_buffer_ toI420]);
return buffer;
}
diff --git a/sdk/objc/native/src/objc_video_decoder_factory.mm b/sdk/objc/native/src/objc_video_decoder_factory.mm
index d005d0e..e144fda 100644
--- a/sdk/objc/native/src/objc_video_decoder_factory.mm
+++ b/sdk/objc/native/src/objc_video_decoder_factory.mm
@@ -57,7 +57,7 @@
int32_t RegisterDecodeCompleteCallback(DecodedImageCallback *callback) override {
[decoder_ setCallback:^(RTC_OBJC_TYPE(RTCVideoFrame) * frame) {
const rtc::scoped_refptr<VideoFrameBuffer> buffer =
- new rtc::RefCountedObject<ObjCFrameBuffer>(frame.buffer);
+ rtc::make_ref_counted<ObjCFrameBuffer>(frame.buffer);
VideoFrame videoFrame =
VideoFrame::Builder()
.set_video_frame_buffer(buffer)
diff --git a/sdk/objc/native/src/objc_video_track_source.mm b/sdk/objc/native/src/objc_video_track_source.mm
index a973237..7937e90 100644
--- a/sdk/objc/native/src/objc_video_track_source.mm
+++ b/sdk/objc/native/src/objc_video_track_source.mm
@@ -91,12 +91,12 @@
rtc::scoped_refptr<VideoFrameBuffer> buffer;
if (adapted_width == frame.width && adapted_height == frame.height) {
// No adaption - optimized path.
- buffer = new rtc::RefCountedObject<ObjCFrameBuffer>(frame.buffer);
+ buffer = rtc::make_ref_counted<ObjCFrameBuffer>(frame.buffer);
} else if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) {
// Adapted CVPixelBuffer frame.
RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
(RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer;
- buffer = new rtc::RefCountedObject<ObjCFrameBuffer>([[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc]
+ buffer = rtc::make_ref_counted<ObjCFrameBuffer>([[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc]
initWithPixelBuffer:rtcPixelBuffer.pixelBuffer
adaptedWidth:adapted_width
adaptedHeight:adapted_height
@@ -108,7 +108,7 @@
// Adapted I420 frame.
// TODO(magjed): Optimize this I420 path.
rtc::scoped_refptr<I420Buffer> i420_buffer = I420Buffer::Create(adapted_width, adapted_height);
- buffer = new rtc::RefCountedObject<ObjCFrameBuffer>(frame.buffer);
+ buffer = rtc::make_ref_counted<ObjCFrameBuffer>(frame.buffer);
i420_buffer->CropAndScaleFrom(*buffer->ToI420(), crop_x, crop_y, crop_width, crop_height);
buffer = i420_buffer;
}
diff --git a/sdk/objc/unittests/ObjCVideoTrackSource_xctest.mm b/sdk/objc/unittests/ObjCVideoTrackSource_xctest.mm
index ca3d672..8bd1f1a 100644
--- a/sdk/objc/unittests/ObjCVideoTrackSource_xctest.mm
+++ b/sdk/objc/unittests/ObjCVideoTrackSource_xctest.mm
@@ -51,7 +51,7 @@
}
- (void)setUp {
- _video_source = new rtc::RefCountedObject<webrtc::ObjCVideoTrackSource>();
+ _video_source = rtc::make_ref_counted<webrtc::ObjCVideoTrackSource>();
}
- (void)tearDown {
diff --git a/sdk/objc/unittests/objc_video_encoder_factory_tests.mm b/sdk/objc/unittests/objc_video_encoder_factory_tests.mm
index 7c1594a..cd97c81 100644
--- a/sdk/objc/unittests/objc_video_encoder_factory_tests.mm
+++ b/sdk/objc/unittests/objc_video_encoder_factory_tests.mm
@@ -83,7 +83,7 @@
CVPixelBufferRef pixel_buffer;
CVPixelBufferCreate(kCFAllocatorDefault, 640, 480, kCVPixelFormatType_32ARGB, nil, &pixel_buffer);
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
- new rtc::RefCountedObject<webrtc::ObjCFrameBuffer>(
+ rtc::make_ref_counted<webrtc::ObjCFrameBuffer>(
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixel_buffer]);
webrtc::VideoFrame frame = webrtc::VideoFrame::Builder()
.set_video_frame_buffer(buffer)
@@ -101,7 +101,7 @@
CVPixelBufferRef pixel_buffer;
CVPixelBufferCreate(kCFAllocatorDefault, 640, 480, kCVPixelFormatType_32ARGB, nil, &pixel_buffer);
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
- new rtc::RefCountedObject<webrtc::ObjCFrameBuffer>(
+ rtc::make_ref_counted<webrtc::ObjCFrameBuffer>(
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixel_buffer]);
webrtc::VideoFrame frame = webrtc::VideoFrame::Builder()
.set_video_frame_buffer(buffer)
diff --git a/test/mac_capturer.mm b/test/mac_capturer.mm
index 1f84c1b..da8e9b7 100644
--- a/test/mac_capturer.mm
+++ b/test/mac_capturer.mm
@@ -26,7 +26,7 @@
didCaptureVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
const int64_t timestamp_us = frame.timeStampNs / rtc::kNumNanosecsPerMicrosec;
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
- new rtc::RefCountedObject<webrtc::ObjCFrameBuffer>(frame.buffer);
+ rtc::make_ref_counted<webrtc::ObjCFrameBuffer>(frame.buffer);
_capturer->OnFrame(webrtc::VideoFrame::Builder()
.set_video_frame_buffer(buffer)
.set_rotation(webrtc::kVideoRotation_0)