Wrap WebRTC OBJC API types with RTC_OBJC_TYPE.

This CL introduced 2 new macros that affect the WebRTC OBJC API symbols:

- RTC_OBJC_TYPE_PREFIX:
  Macro used to prepend a prefix to the API types that are exported with
  RTC_OBJC_EXPORT.

  Clients can patch the definition of this macro locally and build
  WebRTC.framework with their own prefix in case symbol clashing is a
  problem.

  This macro must only be defined by changing the value in
  sdk/objc/base/RTCMacros.h  and not on via compiler flag to ensure
  it has a unique value.

- RCT_OBJC_TYPE:
  Macro used internally to reference API types. Declaring an API type
  without using this macro will not include the declared type in the
  set of types that will be affected by the configurable
  RTC_OBJC_TYPE_PREFIX.

Manual changes:
https://webrtc-review.googlesource.com/c/src/+/173781/5..10

The auto-generated changes in PS#5 have been done with:
https://webrtc-review.googlesource.com/c/src/+/174061.

Bug: None
Change-Id: I0d54ca94db764fb3b6cb4365873f79e14cd879b8
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/173781
Commit-Queue: Mirko Bonadei <mbonadei@webrtc.org>
Reviewed-by: Karl Wiberg <kwiberg@webrtc.org>
Reviewed-by: Kári Helgason <kthelgason@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#31153}
diff --git a/sdk/objc/api/RTCVideoRendererAdapter+Private.h b/sdk/objc/api/RTCVideoRendererAdapter+Private.h
index d983ae6..9b123d2 100644
--- a/sdk/objc/api/RTCVideoRendererAdapter+Private.h
+++ b/sdk/objc/api/RTCVideoRendererAdapter+Private.h
@@ -23,7 +23,7 @@
  * Calls made to the webrtc::VideoRenderInterface will be adapted and passed to
  * this video renderer.
  */
-@property(nonatomic, readonly) id<RTCVideoRenderer> videoRenderer;
+@property(nonatomic, readonly) id<RTC_OBJC_TYPE(RTCVideoRenderer)> videoRenderer;
 
 /**
  * The native VideoSinkInterface surface exposed by this adapter. Calls made
@@ -33,7 +33,7 @@
 @property(nonatomic, readonly) rtc::VideoSinkInterface<webrtc::VideoFrame> *nativeVideoRenderer;
 
 /** Initialize an RTCVideoRendererAdapter with an RTCVideoRenderer. */
-- (instancetype)initWithNativeRenderer:(id<RTCVideoRenderer>)videoRenderer
+- (instancetype)initWithNativeRenderer:(id<RTC_OBJC_TYPE(RTCVideoRenderer)>)videoRenderer
     NS_DESIGNATED_INITIALIZER;
 
 @end
diff --git a/sdk/objc/api/RTCVideoRendererAdapter.mm b/sdk/objc/api/RTCVideoRendererAdapter.mm
index 27dd6c2..ef02f72 100644
--- a/sdk/objc/api/RTCVideoRendererAdapter.mm
+++ b/sdk/objc/api/RTCVideoRendererAdapter.mm
@@ -26,7 +26,7 @@
   }
 
   void OnFrame(const webrtc::VideoFrame& nativeVideoFrame) override {
-    RTCVideoFrame* videoFrame = NativeToObjCVideoFrame(nativeVideoFrame);
+    RTC_OBJC_TYPE(RTCVideoFrame)* videoFrame = NativeToObjCVideoFrame(nativeVideoFrame);
 
     CGSize current_size = (videoFrame.rotation % 180 == 0)
                               ? CGSizeMake(videoFrame.width, videoFrame.height)
@@ -51,7 +51,7 @@
 
 @synthesize videoRenderer = _videoRenderer;
 
-- (instancetype)initWithNativeRenderer:(id<RTCVideoRenderer>)videoRenderer {
+- (instancetype)initWithNativeRenderer:(id<RTC_OBJC_TYPE(RTCVideoRenderer)>)videoRenderer {
   NSParameterAssert(videoRenderer);
   if (self = [super init]) {
     _videoRenderer = videoRenderer;
diff --git a/sdk/objc/api/logging/RTCCallbackLogger.h b/sdk/objc/api/logging/RTCCallbackLogger.h
index 2bce03f..c1aeb82 100644
--- a/sdk/objc/api/logging/RTCCallbackLogger.h
+++ b/sdk/objc/api/logging/RTCCallbackLogger.h
@@ -22,7 +22,7 @@
 // This class intercepts WebRTC logs and forwards them to a registered block.
 // This class is not threadsafe.
 RTC_OBJC_EXPORT
-@interface RTCCallbackLogger : NSObject
+@interface RTC_OBJC_TYPE (RTCCallbackLogger) : NSObject
 
 // The severity level to capture. The default is kRTCLoggingSeverityInfo.
 @property(nonatomic, assign) RTCLoggingSeverity severity;
diff --git a/sdk/objc/api/logging/RTCCallbackLogger.mm b/sdk/objc/api/logging/RTCCallbackLogger.mm
index e58b03b..443fee1 100644
--- a/sdk/objc/api/logging/RTCCallbackLogger.mm
+++ b/sdk/objc/api/logging/RTCCallbackLogger.mm
@@ -64,7 +64,7 @@
   RTCCallbackLoggerMessageAndSeverityHandler callback_handler_;
 };
 
-@implementation RTCCallbackLogger {
+@implementation RTC_OBJC_TYPE (RTCCallbackLogger) {
   BOOL _hasStarted;
   std::unique_ptr<rtc::LogSink> _logSink;
 }
diff --git a/sdk/objc/api/peerconnection/RTCAudioSource+Private.h b/sdk/objc/api/peerconnection/RTCAudioSource+Private.h
index bf1ea62..2c333f9 100644
--- a/sdk/objc/api/peerconnection/RTCAudioSource+Private.h
+++ b/sdk/objc/api/peerconnection/RTCAudioSource+Private.h
@@ -12,20 +12,22 @@
 
 #import "RTCMediaSource+Private.h"
 
-@interface RTCAudioSource ()
+@interface RTC_OBJC_TYPE (RTCAudioSource)
+()
 
-/**
- * The AudioSourceInterface object passed to this RTCAudioSource during
- * construction.
- */
-@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::AudioSourceInterface> nativeAudioSource;
+    /**
+     * The AudioSourceInterface object passed to this RTCAudioSource during
+     * construction.
+     */
+    @property(nonatomic,
+              readonly) rtc::scoped_refptr<webrtc::AudioSourceInterface> nativeAudioSource;
 
 /** Initialize an RTCAudioSource from a native AudioSourceInterface. */
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory*)factory
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
               nativeAudioSource:(rtc::scoped_refptr<webrtc::AudioSourceInterface>)nativeAudioSource
     NS_DESIGNATED_INITIALIZER;
 
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory*)factory
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
               nativeMediaSource:(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
                            type:(RTCMediaSourceType)type NS_UNAVAILABLE;
 
diff --git a/sdk/objc/api/peerconnection/RTCAudioSource.h b/sdk/objc/api/peerconnection/RTCAudioSource.h
index d1030e3..9f78dcd 100644
--- a/sdk/objc/api/peerconnection/RTCAudioSource.h
+++ b/sdk/objc/api/peerconnection/RTCAudioSource.h
@@ -16,7 +16,7 @@
 NS_ASSUME_NONNULL_BEGIN
 
 RTC_OBJC_EXPORT
-@interface RTCAudioSource : RTCMediaSource
+@interface RTC_OBJC_TYPE (RTCAudioSource) : RTC_OBJC_TYPE(RTCMediaSource)
 
 - (instancetype)init NS_UNAVAILABLE;
 
diff --git a/sdk/objc/api/peerconnection/RTCAudioSource.mm b/sdk/objc/api/peerconnection/RTCAudioSource.mm
index a6822f6..b56c6e9 100644
--- a/sdk/objc/api/peerconnection/RTCAudioSource.mm
+++ b/sdk/objc/api/peerconnection/RTCAudioSource.mm
@@ -12,13 +12,13 @@
 
 #include "rtc_base/checks.h"
 
-@implementation RTCAudioSource {
+@implementation RTC_OBJC_TYPE (RTCAudioSource) {
 }
 
 @synthesize volume = _volume;
 @synthesize nativeAudioSource = _nativeAudioSource;
 
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
               nativeAudioSource:
                   (rtc::scoped_refptr<webrtc::AudioSourceInterface>)nativeAudioSource {
   RTC_DCHECK(factory);
@@ -32,7 +32,7 @@
   return self;
 }
 
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
               nativeMediaSource:(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
                            type:(RTCMediaSourceType)type {
   RTC_NOTREACHED();
@@ -41,7 +41,7 @@
 
 - (NSString *)description {
   NSString *stateString = [[self class] stringForState:self.state];
-  return [NSString stringWithFormat:@"RTCAudioSource( %p ): %@", self, stateString];
+  return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCAudioSource)( %p ): %@", self, stateString];
 }
 
 - (void)setVolume:(double)volume {
diff --git a/sdk/objc/api/peerconnection/RTCAudioTrack+Private.h b/sdk/objc/api/peerconnection/RTCAudioTrack+Private.h
index 88dd971..6495500 100644
--- a/sdk/objc/api/peerconnection/RTCAudioTrack+Private.h
+++ b/sdk/objc/api/peerconnection/RTCAudioTrack+Private.h
@@ -14,15 +14,16 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-@class RTCPeerConnectionFactory;
-@interface RTCAudioTrack ()
+@class RTC_OBJC_TYPE(RTCPeerConnectionFactory);
+@interface RTC_OBJC_TYPE (RTCAudioTrack)
+()
 
-/** AudioTrackInterface created or passed in at construction. */
-@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::AudioTrackInterface> nativeAudioTrack;
+    /** AudioTrackInterface created or passed in at construction. */
+    @property(nonatomic, readonly) rtc::scoped_refptr<webrtc::AudioTrackInterface> nativeAudioTrack;
 
 /** Initialize an RTCAudioTrack with an id. */
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
-                         source:(RTCAudioSource *)source
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+                         source:(RTC_OBJC_TYPE(RTCAudioSource) *)source
                         trackId:(NSString *)trackId;
 
 @end
diff --git a/sdk/objc/api/peerconnection/RTCAudioTrack.h b/sdk/objc/api/peerconnection/RTCAudioTrack.h
index 501ef92..95eb5d3 100644
--- a/sdk/objc/api/peerconnection/RTCAudioTrack.h
+++ b/sdk/objc/api/peerconnection/RTCAudioTrack.h
@@ -13,15 +13,15 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-@class RTCAudioSource;
+@class RTC_OBJC_TYPE(RTCAudioSource);
 
 RTC_OBJC_EXPORT
-@interface RTCAudioTrack : RTCMediaStreamTrack
+@interface RTC_OBJC_TYPE (RTCAudioTrack) : RTC_OBJC_TYPE(RTCMediaStreamTrack)
 
 - (instancetype)init NS_UNAVAILABLE;
 
 /** The audio source for this audio track. */
-@property(nonatomic, readonly) RTCAudioSource *source;
+@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCAudioSource) * source;
 
 @end
 
diff --git a/sdk/objc/api/peerconnection/RTCAudioTrack.mm b/sdk/objc/api/peerconnection/RTCAudioTrack.mm
index 3389b76..6a97f46 100644
--- a/sdk/objc/api/peerconnection/RTCAudioTrack.mm
+++ b/sdk/objc/api/peerconnection/RTCAudioTrack.mm
@@ -17,12 +17,12 @@
 
 #include "rtc_base/checks.h"
 
-@implementation RTCAudioTrack
+@implementation RTC_OBJC_TYPE (RTCAudioTrack)
 
 @synthesize source = _source;
 
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
-                         source:(RTCAudioSource *)source
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+                         source:(RTC_OBJC_TYPE(RTCAudioSource) *)source
                         trackId:(NSString *)trackId {
   RTC_DCHECK(factory);
   RTC_DCHECK(source);
@@ -37,7 +37,7 @@
   return self;
 }
 
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
                     nativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
                            type:(RTCMediaStreamTrackType)type {
   NSParameterAssert(factory);
@@ -46,14 +46,13 @@
   return [super initWithFactory:factory nativeTrack:nativeTrack type:type];
 }
 
-
-- (RTCAudioSource *)source {
+- (RTC_OBJC_TYPE(RTCAudioSource) *)source {
   if (!_source) {
     rtc::scoped_refptr<webrtc::AudioSourceInterface> source =
         self.nativeAudioTrack->GetSource();
     if (source) {
-      _source =
-          [[RTCAudioSource alloc] initWithFactory:self.factory nativeAudioSource:source.get()];
+      _source = [[RTC_OBJC_TYPE(RTCAudioSource) alloc] initWithFactory:self.factory
+                                                     nativeAudioSource:source.get()];
     }
   }
   return _source;
diff --git a/sdk/objc/api/peerconnection/RTCCertificate.h b/sdk/objc/api/peerconnection/RTCCertificate.h
index 50c1ca5..5ac8984 100644
--- a/sdk/objc/api/peerconnection/RTCCertificate.h
+++ b/sdk/objc/api/peerconnection/RTCCertificate.h
@@ -15,7 +15,7 @@
 NS_ASSUME_NONNULL_BEGIN
 
 RTC_OBJC_EXPORT
-@interface RTCCertificate : NSObject <NSCopying>
+@interface RTC_OBJC_TYPE (RTCCertificate) : NSObject <NSCopying>
 
 /** Private key in PEM. */
 @property(nonatomic, readonly, copy) NSString *private_key;
@@ -37,7 +37,7 @@
  *  provided.
  *  - name: "ECDSA" or "RSASSA-PKCS1-v1_5"
  */
-+ (nullable RTCCertificate *)generateCertificateWithParams:(NSDictionary *)params;
++ (nullable RTC_OBJC_TYPE(RTCCertificate) *)generateCertificateWithParams:(NSDictionary *)params;
 
 @end
 
diff --git a/sdk/objc/api/peerconnection/RTCCertificate.mm b/sdk/objc/api/peerconnection/RTCCertificate.mm
index 250cfc4..e5c33e4 100644
--- a/sdk/objc/api/peerconnection/RTCCertificate.mm
+++ b/sdk/objc/api/peerconnection/RTCCertificate.mm
@@ -16,7 +16,7 @@
 #include "rtc_base/rtc_certificate_generator.h"
 #include "rtc_base/ssl_identity.h"
 
-@implementation RTCCertificate
+@implementation RTC_OBJC_TYPE (RTCCertificate)
 
 @synthesize private_key = _private_key;
 @synthesize certificate = _certificate;
@@ -35,7 +35,7 @@
   return self;
 }
 
-+ (nullable RTCCertificate *)generateCertificateWithParams:(NSDictionary *)params {
++ (nullable RTC_OBJC_TYPE(RTCCertificate) *)generateCertificateWithParams:(NSDictionary *)params {
   rtc::KeyType keyType = rtc::KT_ECDSA;
   NSString *keyTypeString = [params valueForKey:@"name"];
   if (keyTypeString && [keyTypeString isEqualToString:@"RSASSA-PKCS1-v1_5"]) {
@@ -63,8 +63,9 @@
   RTC_LOG(LS_INFO) << "CERT PEM ";
   RTC_LOG(LS_INFO) << pem_certificate;
 
-  RTCCertificate *cert = [[RTCCertificate alloc] initWithPrivateKey:@(pem_private_key.c_str())
-                                                        certificate:@(pem_certificate.c_str())];
+  RTC_OBJC_TYPE(RTCCertificate) *cert =
+      [[RTC_OBJC_TYPE(RTCCertificate) alloc] initWithPrivateKey:@(pem_private_key.c_str())
+                                                    certificate:@(pem_certificate.c_str())];
   return cert;
 }
 
diff --git a/sdk/objc/api/peerconnection/RTCConfiguration+Native.h b/sdk/objc/api/peerconnection/RTCConfiguration+Native.h
index 54783f0..07c0da6 100644
--- a/sdk/objc/api/peerconnection/RTCConfiguration+Native.h
+++ b/sdk/objc/api/peerconnection/RTCConfiguration+Native.h
@@ -14,14 +14,15 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-@interface RTCConfiguration ()
+@interface RTC_OBJC_TYPE (RTCConfiguration)
+()
 
-/** Optional TurnCustomizer.
- *  With this class one can modify outgoing TURN messages.
- *  The object passed in must remain valid until PeerConnection::Close() is
- * called.
- */
-@property(nonatomic, nullable) webrtc::TurnCustomizer* turnCustomizer;
+    /** Optional TurnCustomizer.
+     *  With this class one can modify outgoing TURN messages.
+     *  The object passed in must remain valid until PeerConnection::Close() is
+     * called.
+     */
+    @property(nonatomic, nullable) webrtc::TurnCustomizer* turnCustomizer;
 
 @end
 
diff --git a/sdk/objc/api/peerconnection/RTCConfiguration+Private.h b/sdk/objc/api/peerconnection/RTCConfiguration+Private.h
index 845f779..70a6532 100644
--- a/sdk/objc/api/peerconnection/RTCConfiguration+Private.h
+++ b/sdk/objc/api/peerconnection/RTCConfiguration+Private.h
@@ -14,10 +14,11 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-@interface RTCConfiguration ()
+@interface RTC_OBJC_TYPE (RTCConfiguration)
+()
 
-+ (webrtc::PeerConnectionInterface::IceTransportsType)nativeTransportsTypeForTransportPolicy:
-        (RTCIceTransportPolicy)policy;
+    + (webrtc::PeerConnectionInterface::IceTransportsType)nativeTransportsTypeForTransportPolicy
+    : (RTCIceTransportPolicy)policy;
 
 + (RTCIceTransportPolicy)transportPolicyForTransportsType:
         (webrtc::PeerConnectionInterface::IceTransportsType)nativeType;
@@ -65,8 +66,8 @@
 + (NSString *)stringForSdpSemantics:(RTCSdpSemantics)sdpSemantics;
 
 /**
- * RTCConfiguration struct representation of this RTCConfiguration. This is
- * needed to pass to the underlying C++ APIs.
+ * RTCConfiguration struct representation of this RTCConfiguration.
+ * This is needed to pass to the underlying C++ APIs.
  */
 - (nullable webrtc::PeerConnectionInterface::RTCConfiguration *)createNativeConfiguration;
 
diff --git a/sdk/objc/api/peerconnection/RTCConfiguration.h b/sdk/objc/api/peerconnection/RTCConfiguration.h
index 44d09228..4e9c674 100644
--- a/sdk/objc/api/peerconnection/RTCConfiguration.h
+++ b/sdk/objc/api/peerconnection/RTCConfiguration.h
@@ -14,7 +14,7 @@
 #import "RTCCryptoOptions.h"
 #import "RTCMacros.h"
 
-@class RTCIceServer;
+@class RTC_OBJC_TYPE(RTCIceServer);
 
 /**
  * Represents the ice transport policy. This exposes the same states in C++,
@@ -70,7 +70,7 @@
 NS_ASSUME_NONNULL_BEGIN
 
 RTC_OBJC_EXPORT
-@interface RTCConfiguration : NSObject
+@interface RTC_OBJC_TYPE (RTCConfiguration) : NSObject
 
 /** If true, allows DSCP codes to be set on outgoing packets, configured using
  *  networkPriority field of RTCRtpEncodingParameters. Defaults to false.
@@ -78,10 +78,10 @@
 @property(nonatomic, assign) BOOL enableDscp;
 
 /** An array of Ice Servers available to be used by ICE. */
-@property(nonatomic, copy) NSArray<RTCIceServer *> *iceServers;
+@property(nonatomic, copy) NSArray<RTC_OBJC_TYPE(RTCIceServer) *> *iceServers;
 
 /** An RTCCertificate for 're' use. */
-@property(nonatomic, nullable) RTCCertificate *certificate;
+@property(nonatomic, nullable) RTC_OBJC_TYPE(RTCCertificate) * certificate;
 
 /** Which candidates the ICE agent is allowed to use. The W3C calls it
  * |iceTransportPolicy|, while in C++ it is called |type|. */
@@ -173,9 +173,9 @@
  *
  *  UnifiedPlan will cause RTCPeerConnection to create offers and answers with
  *  multiple m= sections where each m= section maps to one RTCRtpSender and one
- *  RTCRtpReceiver (an RTCRtpTransceiver), either both audio or both video. This
- *  will also cause RTCPeerConnection to ignore all but the first a=ssrc lines
- *  that form a Plan B stream.
+ *  RTCRtpReceiver (an RTCRtpTransceiver), either both audio or both
+ *  video. This will also cause RTCPeerConnection) to ignore all but the first a=ssrc
+ *  lines that form a Plan B stream.
  *
  *  For users who wish to send multiple audio/video streams and need to stay
  *  interoperable with legacy WebRTC implementations or use legacy APIs,
@@ -214,7 +214,7 @@
  * frame encryption for native WebRTC. Setting this will overwrite any
  * options set through the PeerConnectionFactory (which is deprecated).
  */
-@property(nonatomic, nullable) RTCCryptoOptions *cryptoOptions;
+@property(nonatomic, nullable) RTC_OBJC_TYPE(RTCCryptoOptions) * cryptoOptions;
 
 /**
  * Time interval between audio RTCP reports.
diff --git a/sdk/objc/api/peerconnection/RTCConfiguration.mm b/sdk/objc/api/peerconnection/RTCConfiguration.mm
index eeb9493..52c1450 100644
--- a/sdk/objc/api/peerconnection/RTCConfiguration.mm
+++ b/sdk/objc/api/peerconnection/RTCConfiguration.mm
@@ -20,7 +20,7 @@
 #include "rtc_base/rtc_certificate_generator.h"
 #include "rtc_base/ssl_identity.h"
 
-@implementation RTCConfiguration
+@implementation RTC_OBJC_TYPE (RTCConfiguration)
 
 @synthesize enableDscp = _enableDscp;
 @synthesize iceServers = _iceServers;
@@ -70,7 +70,8 @@
     _enableDscp = config.dscp();
     NSMutableArray *iceServers = [NSMutableArray array];
     for (const webrtc::PeerConnectionInterface::IceServer& server : config.servers) {
-      RTCIceServer *iceServer = [[RTCIceServer alloc] initWithNativeServer:server];
+      RTC_OBJC_TYPE(RTCIceServer) *iceServer =
+          [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithNativeServer:server];
       [iceServers addObject:iceServer];
     }
     _iceServers = iceServers;
@@ -78,9 +79,9 @@
       rtc::scoped_refptr<rtc::RTCCertificate> native_cert;
       native_cert = config.certificates[0];
       rtc::RTCCertificatePEM native_pem = native_cert->ToPEM();
-      _certificate =
-          [[RTCCertificate alloc] initWithPrivateKey:@(native_pem.private_key().c_str())
-                                         certificate:@(native_pem.certificate().c_str())];
+      _certificate = [[RTC_OBJC_TYPE(RTCCertificate) alloc]
+          initWithPrivateKey:@(native_pem.private_key().c_str())
+                 certificate:@(native_pem.certificate().c_str())];
     }
     _iceTransportPolicy =
         [[self class] transportPolicyForTransportsType:config.type];
@@ -122,7 +123,7 @@
     _turnCustomizer = config.turn_customizer;
     _activeResetSrtpParams = config.active_reset_srtp_params;
     if (config.crypto_options) {
-      _cryptoOptions = [[RTCCryptoOptions alloc]
+      _cryptoOptions = [[RTC_OBJC_TYPE(RTCCryptoOptions) alloc]
                initWithSrtpEnableGcmCryptoSuites:config.crypto_options->srtp
                                                      .enable_gcm_crypto_suites
              srtpEnableAes128Sha1_32CryptoCipher:config.crypto_options->srtp
@@ -140,7 +141,7 @@
 }
 
 - (NSString *)description {
-  static NSString *formatString = @"RTCConfiguration: "
+  static NSString *formatString = @"RTC_OBJC_TYPE(RTCConfiguration): "
                                   @"{\n%@\n%@\n%@\n%@\n%@\n%@\n%@\n%@\n%d\n%d\n%d\n%d\n%d\n%d\n"
                                   @"%d\n%@\n%d\n%d\n%d\n%d\n%d\n%@\n%d\n}\n";
 
@@ -181,7 +182,7 @@
           webrtc::PeerConnectionInterface::RTCConfigurationType::kAggressive));
 
   nativeConfig->set_dscp(_enableDscp);
-  for (RTCIceServer *iceServer in _iceServers) {
+  for (RTC_OBJC_TYPE(RTCIceServer) * iceServer in _iceServers) {
     nativeConfig->servers.push_back(iceServer.nativeServer);
   }
   nativeConfig->type =
diff --git a/sdk/objc/api/peerconnection/RTCCryptoOptions.h b/sdk/objc/api/peerconnection/RTCCryptoOptions.h
index b465bb5..759a45e 100644
--- a/sdk/objc/api/peerconnection/RTCCryptoOptions.h
+++ b/sdk/objc/api/peerconnection/RTCCryptoOptions.h
@@ -19,7 +19,7 @@
  * as Objective-C doesn't support nested structures.
  */
 RTC_OBJC_EXPORT
-@interface RTCCryptoOptions : NSObject
+@interface RTC_OBJC_TYPE (RTCCryptoOptions) : NSObject
 
 /**
  * Enable GCM crypto suites from RFC 7714 for SRTP. GCM will only be used
diff --git a/sdk/objc/api/peerconnection/RTCCryptoOptions.mm b/sdk/objc/api/peerconnection/RTCCryptoOptions.mm
index a059f75..fbaa1de 100644
--- a/sdk/objc/api/peerconnection/RTCCryptoOptions.mm
+++ b/sdk/objc/api/peerconnection/RTCCryptoOptions.mm
@@ -10,7 +10,7 @@
 
 #import "RTCCryptoOptions.h"
 
-@implementation RTCCryptoOptions
+@implementation RTC_OBJC_TYPE (RTCCryptoOptions)
 
 @synthesize srtpEnableGcmCryptoSuites = _srtpEnableGcmCryptoSuites;
 @synthesize srtpEnableAes128Sha1_32CryptoCipher = _srtpEnableAes128Sha1_32CryptoCipher;
diff --git a/sdk/objc/api/peerconnection/RTCDataChannel+Private.h b/sdk/objc/api/peerconnection/RTCDataChannel+Private.h
index e327fb4..2cdbdab 100644
--- a/sdk/objc/api/peerconnection/RTCDataChannel+Private.h
+++ b/sdk/objc/api/peerconnection/RTCDataChannel+Private.h
@@ -15,27 +15,29 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-@class RTCPeerConnectionFactory;
+@class RTC_OBJC_TYPE(RTCPeerConnectionFactory);
 
-@interface RTCDataBuffer ()
+@interface RTC_OBJC_TYPE (RTCDataBuffer)
+()
 
-/**
- * The native DataBuffer representation of this RTCDatabuffer object. This is
- * needed to pass to the underlying C++ APIs.
- */
-@property(nonatomic, readonly) const webrtc::DataBuffer *nativeDataBuffer;
+    /**
+     * The native DataBuffer representation of this RTCDatabuffer object. This is
+     * needed to pass to the underlying C++ APIs.
+     */
+    @property(nonatomic, readonly) const webrtc::DataBuffer *nativeDataBuffer;
 
 /** Initialize an RTCDataBuffer from a native DataBuffer. */
 - (instancetype)initWithNativeBuffer:(const webrtc::DataBuffer &)nativeBuffer;
 
 @end
 
-@interface RTCDataChannel ()
+@interface RTC_OBJC_TYPE (RTCDataChannel)
+()
 
-/** Initialize an RTCDataChannel from a native DataChannelInterface. */
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
-              nativeDataChannel:(rtc::scoped_refptr<webrtc::DataChannelInterface>)nativeDataChannel
-    NS_DESIGNATED_INITIALIZER;
+    /** Initialize an RTCDataChannel from a native DataChannelInterface. */
+    - (instancetype)initWithFactory
+    : (RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeDataChannel
+    : (rtc::scoped_refptr<webrtc::DataChannelInterface>)nativeDataChannel NS_DESIGNATED_INITIALIZER;
 
 + (webrtc::DataChannelInterface::DataState)nativeDataChannelStateForState:
         (RTCDataChannelState)state;
diff --git a/sdk/objc/api/peerconnection/RTCDataChannel.h b/sdk/objc/api/peerconnection/RTCDataChannel.h
index 0cc2de8..2d0661f 100644
--- a/sdk/objc/api/peerconnection/RTCDataChannel.h
+++ b/sdk/objc/api/peerconnection/RTCDataChannel.h
@@ -16,7 +16,7 @@
 NS_ASSUME_NONNULL_BEGIN
 
 RTC_OBJC_EXPORT
-@interface RTCDataBuffer : NSObject
+@interface RTC_OBJC_TYPE (RTCDataBuffer) : NSObject
 
 /** NSData representation of the underlying buffer. */
 @property(nonatomic, readonly) NSData *data;
@@ -34,20 +34,22 @@
 
 @end
 
-@class RTCDataChannel;
+@class RTC_OBJC_TYPE(RTCDataChannel);
 RTC_OBJC_EXPORT
-@protocol RTCDataChannelDelegate <NSObject>
+@protocol RTC_OBJC_TYPE
+(RTCDataChannelDelegate)<NSObject>
 
-/** The data channel state changed. */
-- (void)dataChannelDidChangeState:(RTCDataChannel *)dataChannel;
+    /** The data channel state changed. */
+    - (void)dataChannelDidChangeState : (RTC_OBJC_TYPE(RTCDataChannel) *)dataChannel;
 
 /** The data channel successfully received a data buffer. */
-- (void)dataChannel:(RTCDataChannel *)dataChannel
-    didReceiveMessageWithBuffer:(RTCDataBuffer *)buffer;
+- (void)dataChannel:(RTC_OBJC_TYPE(RTCDataChannel) *)dataChannel
+    didReceiveMessageWithBuffer:(RTC_OBJC_TYPE(RTCDataBuffer) *)buffer;
 
 @optional
 /** The data channel's |bufferedAmount| changed. */
-- (void)dataChannel:(RTCDataChannel *)dataChannel didChangeBufferedAmount:(uint64_t)amount;
+- (void)dataChannel:(RTC_OBJC_TYPE(RTCDataChannel) *)dataChannel
+    didChangeBufferedAmount:(uint64_t)amount;
 
 @end
 
@@ -60,7 +62,7 @@
 };
 
 RTC_OBJC_EXPORT
-@interface RTCDataChannel : NSObject
+@interface RTC_OBJC_TYPE (RTCDataChannel) : NSObject
 
 /**
  * A label that can be used to distinguish this data channel from other data
@@ -115,7 +117,7 @@
 @property(nonatomic, readonly) uint64_t bufferedAmount;
 
 /** The delegate for this data channel. */
-@property(nonatomic, weak) id<RTCDataChannelDelegate> delegate;
+@property(nonatomic, weak) id<RTC_OBJC_TYPE(RTCDataChannelDelegate)> delegate;
 
 - (instancetype)init NS_UNAVAILABLE;
 
@@ -123,7 +125,7 @@
 - (void)close;
 
 /** Attempt to send |data| on this data channel's underlying data transport. */
-- (BOOL)sendData:(RTCDataBuffer *)data;
+- (BOOL)sendData:(RTC_OBJC_TYPE(RTCDataBuffer) *)data;
 
 @end
 
diff --git a/sdk/objc/api/peerconnection/RTCDataChannel.mm b/sdk/objc/api/peerconnection/RTCDataChannel.mm
index 35c009e..4a79cef 100644
--- a/sdk/objc/api/peerconnection/RTCDataChannel.mm
+++ b/sdk/objc/api/peerconnection/RTCDataChannel.mm
@@ -18,21 +18,21 @@
 
 class DataChannelDelegateAdapter : public DataChannelObserver {
  public:
-  DataChannelDelegateAdapter(RTCDataChannel *channel) { channel_ = channel; }
+  DataChannelDelegateAdapter(RTC_OBJC_TYPE(RTCDataChannel) * channel) { channel_ = channel; }
 
   void OnStateChange() override {
     [channel_.delegate dataChannelDidChangeState:channel_];
   }
 
   void OnMessage(const DataBuffer& buffer) override {
-    RTCDataBuffer *data_buffer =
-        [[RTCDataBuffer alloc] initWithNativeBuffer:buffer];
+    RTC_OBJC_TYPE(RTCDataBuffer) *data_buffer =
+        [[RTC_OBJC_TYPE(RTCDataBuffer) alloc] initWithNativeBuffer:buffer];
     [channel_.delegate dataChannel:channel_
        didReceiveMessageWithBuffer:data_buffer];
   }
 
   void OnBufferedAmountChange(uint64_t previousAmount) override {
-    id<RTCDataChannelDelegate> delegate = channel_.delegate;
+    id<RTC_OBJC_TYPE(RTCDataChannelDelegate)> delegate = channel_.delegate;
     SEL sel = @selector(dataChannel:didChangeBufferedAmount:);
     if ([delegate respondsToSelector:sel]) {
       [delegate dataChannel:channel_ didChangeBufferedAmount:previousAmount];
@@ -40,12 +40,11 @@
   }
 
  private:
-  __weak RTCDataChannel *channel_;
+  __weak RTC_OBJC_TYPE(RTCDataChannel) * channel_;
 };
 }
 
-
-@implementation RTCDataBuffer {
+@implementation RTC_OBJC_TYPE (RTCDataBuffer) {
   std::unique_ptr<webrtc::DataBuffer> _dataBuffer;
 }
 
@@ -83,9 +82,8 @@
 
 @end
 
-
-@implementation RTCDataChannel {
-  RTCPeerConnectionFactory *_factory;
+@implementation RTC_OBJC_TYPE (RTCDataChannel) {
+  RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory;
   rtc::scoped_refptr<webrtc::DataChannelInterface> _nativeDataChannel;
   std::unique_ptr<webrtc::DataChannelDelegateAdapter> _observer;
   BOOL _isObserverRegistered;
@@ -152,21 +150,20 @@
   _nativeDataChannel->Close();
 }
 
-- (BOOL)sendData:(RTCDataBuffer *)data {
+- (BOOL)sendData:(RTC_OBJC_TYPE(RTCDataBuffer) *)data {
   return _nativeDataChannel->Send(*data.nativeDataBuffer);
 }
 
 - (NSString *)description {
-  return [NSString stringWithFormat:@"RTCDataChannel:\n%ld\n%@\n%@",
+  return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCDataChannel):\n%ld\n%@\n%@",
                                     (long)self.channelId,
                                     self.label,
-                                    [[self class]
-                                        stringForState:self.readyState]];
+                                    [[self class] stringForState:self.readyState]];
 }
 
 #pragma mark - Private
 
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
               nativeDataChannel:
                   (rtc::scoped_refptr<webrtc::DataChannelInterface>)nativeDataChannel {
   NSParameterAssert(nativeDataChannel);
diff --git a/sdk/objc/api/peerconnection/RTCDataChannelConfiguration+Private.h b/sdk/objc/api/peerconnection/RTCDataChannelConfiguration+Private.h
index 244f742..5aef10f 100644
--- a/sdk/objc/api/peerconnection/RTCDataChannelConfiguration+Private.h
+++ b/sdk/objc/api/peerconnection/RTCDataChannelConfiguration+Private.h
@@ -14,9 +14,10 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-@interface RTCDataChannelConfiguration ()
+@interface RTC_OBJC_TYPE (RTCDataChannelConfiguration)
+()
 
-@property(nonatomic, readonly) webrtc::DataChannelInit nativeDataChannelInit;
+    @property(nonatomic, readonly) webrtc::DataChannelInit nativeDataChannelInit;
 
 @end
 
diff --git a/sdk/objc/api/peerconnection/RTCDataChannelConfiguration.h b/sdk/objc/api/peerconnection/RTCDataChannelConfiguration.h
index 96d33f4..9459ae0 100644
--- a/sdk/objc/api/peerconnection/RTCDataChannelConfiguration.h
+++ b/sdk/objc/api/peerconnection/RTCDataChannelConfiguration.h
@@ -16,7 +16,7 @@
 NS_ASSUME_NONNULL_BEGIN
 
 RTC_OBJC_EXPORT
-@interface RTCDataChannelConfiguration : NSObject
+@interface RTC_OBJC_TYPE (RTCDataChannelConfiguration) : NSObject
 
 /** Set to YES if ordered delivery is required. */
 @property(nonatomic, assign) BOOL isOrdered;
diff --git a/sdk/objc/api/peerconnection/RTCDataChannelConfiguration.mm b/sdk/objc/api/peerconnection/RTCDataChannelConfiguration.mm
index 198bfbb..bf775b1 100644
--- a/sdk/objc/api/peerconnection/RTCDataChannelConfiguration.mm
+++ b/sdk/objc/api/peerconnection/RTCDataChannelConfiguration.mm
@@ -12,7 +12,7 @@
 
 #import "helpers/NSString+StdString.h"
 
-@implementation RTCDataChannelConfiguration
+@implementation RTC_OBJC_TYPE (RTCDataChannelConfiguration)
 
 @synthesize nativeDataChannelInit = _nativeDataChannelInit;
 
diff --git a/sdk/objc/api/peerconnection/RTCDtmfSender+Private.h b/sdk/objc/api/peerconnection/RTCDtmfSender+Private.h
index ec05481..49a6216 100644
--- a/sdk/objc/api/peerconnection/RTCDtmfSender+Private.h
+++ b/sdk/objc/api/peerconnection/RTCDtmfSender+Private.h
@@ -14,7 +14,7 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-@interface RTCDtmfSender : NSObject <RTCDtmfSender>
+@interface RTC_OBJC_TYPE (RTCDtmfSender) : NSObject <RTC_OBJC_TYPE(RTCDtmfSender)>
 
 @property(nonatomic, readonly) rtc::scoped_refptr<webrtc::DtmfSenderInterface> nativeDtmfSender;
 
diff --git a/sdk/objc/api/peerconnection/RTCDtmfSender.h b/sdk/objc/api/peerconnection/RTCDtmfSender.h
index 5d86d01..0f1b6ba 100644
--- a/sdk/objc/api/peerconnection/RTCDtmfSender.h
+++ b/sdk/objc/api/peerconnection/RTCDtmfSender.h
@@ -15,14 +15,15 @@
 NS_ASSUME_NONNULL_BEGIN
 
 RTC_OBJC_EXPORT
-@protocol RTCDtmfSender <NSObject>
+@protocol RTC_OBJC_TYPE
+(RTCDtmfSender)<NSObject>
 
-/**
- * Returns true if this RTCDtmfSender is capable of sending DTMF. Otherwise
- * returns false. To be able to send DTMF, the associated RTCRtpSender must be
- * able to send packets, and a "telephone-event" codec must be negotiated.
- */
-@property(nonatomic, readonly) BOOL canInsertDtmf;
+    /**
+     * Returns true if this RTCDtmfSender is capable of sending DTMF. Otherwise
+     * returns false. To be able to send DTMF, the associated RTCRtpSender must be
+     * able to send packets, and a "telephone-event" codec must be negotiated.
+     */
+    @property(nonatomic, readonly) BOOL canInsertDtmf;
 
 /**
  * Queues a task that sends the DTMF tones. The tones parameter is treated
diff --git a/sdk/objc/api/peerconnection/RTCDtmfSender.mm b/sdk/objc/api/peerconnection/RTCDtmfSender.mm
index 77d0678..ee3b79c 100644
--- a/sdk/objc/api/peerconnection/RTCDtmfSender.mm
+++ b/sdk/objc/api/peerconnection/RTCDtmfSender.mm
@@ -15,7 +15,7 @@
 
 #include "rtc_base/time_utils.h"
 
-@implementation RTCDtmfSender {
+@implementation RTC_OBJC_TYPE (RTCDtmfSender) {
   rtc::scoped_refptr<webrtc::DtmfSenderInterface> _nativeDtmfSender;
 }
 
@@ -48,12 +48,11 @@
 }
 
 - (NSString *)description {
-  return [NSString
-      stringWithFormat:
-          @"RTCDtmfSender {\n  remainingTones: %@\n  duration: %f sec\n  interToneGap: %f sec\n}",
-          [self remainingTones],
-          [self duration],
-          [self interToneGap]];
+  return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCDtmfSender) {\n  remainingTones: %@\n  "
+                                    @"duration: %f sec\n  interToneGap: %f sec\n}",
+                                    [self remainingTones],
+                                    [self duration],
+                                    [self interToneGap]];
 }
 
 #pragma mark - Private
@@ -67,7 +66,8 @@
   NSParameterAssert(nativeDtmfSender);
   if (self = [super init]) {
     _nativeDtmfSender = nativeDtmfSender;
-    RTCLogInfo(@"RTCDtmfSender(%p): created DTMF sender: %@", self, self.description);
+    RTCLogInfo(
+        @"RTC_OBJC_TYPE(RTCDtmfSender)(%p): created DTMF sender: %@", self, self.description);
   }
   return self;
 }
diff --git a/sdk/objc/api/peerconnection/RTCEncodedImage+Private.h b/sdk/objc/api/peerconnection/RTCEncodedImage+Private.h
index e96ce7b..a078b0a 100644
--- a/sdk/objc/api/peerconnection/RTCEncodedImage+Private.h
+++ b/sdk/objc/api/peerconnection/RTCEncodedImage+Private.h
@@ -15,9 +15,10 @@
 NS_ASSUME_NONNULL_BEGIN
 
 /* Interfaces for converting to/from internal C++ formats. */
-@interface RTCEncodedImage (Private)
+@interface RTC_OBJC_TYPE (RTCEncodedImage)
+(Private)
 
-- (instancetype)initWithNativeEncodedImage:(const webrtc::EncodedImage &)encodedImage;
+    - (instancetype)initWithNativeEncodedImage : (const webrtc::EncodedImage &)encodedImage;
 - (webrtc::EncodedImage)nativeEncodedImage;
 
 @end
diff --git a/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm b/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm
index 36d4d5a..f9e4346 100644
--- a/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm
+++ b/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm
@@ -56,9 +56,10 @@
 }
 @end
 
-@implementation RTCEncodedImage (Private)
+@implementation RTC_OBJC_TYPE (RTCEncodedImage)
+(Private)
 
-- (rtc::scoped_refptr<webrtc::EncodedImageBufferInterface>)encodedData {
+    - (rtc::scoped_refptr<webrtc::EncodedImageBufferInterface>)encodedData {
   RTCWrappedEncodedImageBuffer *wrappedBuffer =
       objc_getAssociatedObject(self, @selector(encodedData));
   return wrappedBuffer.buffer;
diff --git a/sdk/objc/api/peerconnection/RTCFileLogger.h b/sdk/objc/api/peerconnection/RTCFileLogger.h
index cd5c1c4..853e673 100644
--- a/sdk/objc/api/peerconnection/RTCFileLogger.h
+++ b/sdk/objc/api/peerconnection/RTCFileLogger.h
@@ -34,7 +34,7 @@
 // For kRTCFileLoggerTypeApp, the oldest log is overwritten.
 // This class is not threadsafe.
 RTC_OBJC_EXPORT
-@interface RTCFileLogger : NSObject
+@interface RTC_OBJC_TYPE (RTCFileLogger) : NSObject
 
 // The severity level to capture. The default is kRTCFileLoggerSeverityInfo.
 @property(nonatomic, assign) RTCFileLoggerSeverity severity;
diff --git a/sdk/objc/api/peerconnection/RTCFileLogger.mm b/sdk/objc/api/peerconnection/RTCFileLogger.mm
index 2532fcf..9562245 100644
--- a/sdk/objc/api/peerconnection/RTCFileLogger.mm
+++ b/sdk/objc/api/peerconnection/RTCFileLogger.mm
@@ -21,7 +21,7 @@
 NSUInteger const kDefaultMaxFileSize = 10 * 1024 * 1024; // 10MB.
 const char *kRTCFileLoggerRotatingLogPrefix = "rotating_log";
 
-@implementation RTCFileLogger {
+@implementation RTC_OBJC_TYPE (RTCFileLogger) {
   BOOL _hasStarted;
   NSString *_dirPath;
   NSUInteger _maxFileSize;
diff --git a/sdk/objc/api/peerconnection/RTCIceCandidate+Private.h b/sdk/objc/api/peerconnection/RTCIceCandidate+Private.h
index 8c9156c..409e16b 100644
--- a/sdk/objc/api/peerconnection/RTCIceCandidate+Private.h
+++ b/sdk/objc/api/peerconnection/RTCIceCandidate+Private.h
@@ -16,13 +16,14 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-@interface RTCIceCandidate ()
+@interface RTC_OBJC_TYPE (RTCIceCandidate)
+()
 
-/**
- * The native IceCandidateInterface representation of this RTCIceCandidate
- * object. This is needed to pass to the underlying C++ APIs.
- */
-@property(nonatomic, readonly) std::unique_ptr<webrtc::IceCandidateInterface> nativeCandidate;
+    /**
+     * The native IceCandidateInterface representation of this RTCIceCandidate
+     * object. This is needed to pass to the underlying C++ APIs.
+     */
+    @property(nonatomic, readonly) std::unique_ptr<webrtc::IceCandidateInterface> nativeCandidate;
 
 /**
  * Initialize an RTCIceCandidate from a native IceCandidateInterface. No
diff --git a/sdk/objc/api/peerconnection/RTCIceCandidate.h b/sdk/objc/api/peerconnection/RTCIceCandidate.h
index 3e305cc..f84843a 100644
--- a/sdk/objc/api/peerconnection/RTCIceCandidate.h
+++ b/sdk/objc/api/peerconnection/RTCIceCandidate.h
@@ -15,7 +15,7 @@
 NS_ASSUME_NONNULL_BEGIN
 
 RTC_OBJC_EXPORT
-@interface RTCIceCandidate : NSObject
+@interface RTC_OBJC_TYPE (RTCIceCandidate) : NSObject
 
 /**
  * If present, the identifier of the "media stream identification" for the media
diff --git a/sdk/objc/api/peerconnection/RTCIceCandidate.mm b/sdk/objc/api/peerconnection/RTCIceCandidate.mm
index cbae3f3..48385ef 100644
--- a/sdk/objc/api/peerconnection/RTCIceCandidate.mm
+++ b/sdk/objc/api/peerconnection/RTCIceCandidate.mm
@@ -15,7 +15,7 @@
 #import "base/RTCLogging.h"
 #import "helpers/NSString+StdString.h"
 
-@implementation RTCIceCandidate
+@implementation RTC_OBJC_TYPE (RTCIceCandidate)
 
 @synthesize sdpMid = _sdpMid;
 @synthesize sdpMLineIndex = _sdpMLineIndex;
@@ -35,7 +35,7 @@
 }
 
 - (NSString *)description {
-  return [NSString stringWithFormat:@"RTCIceCandidate:\n%@\n%d\n%@\n%@",
+  return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCIceCandidate):\n%@\n%d\n%@\n%@",
                                     _sdpMid,
                                     _sdpMLineIndex,
                                     _sdp,
@@ -50,7 +50,7 @@
   std::string sdp;
   candidate->ToString(&sdp);
 
-  RTCIceCandidate *rtcCandidate =
+  RTC_OBJC_TYPE(RTCIceCandidate) *rtcCandidate =
       [self initWithSdp:[NSString stringForStdString:sdp]
           sdpMLineIndex:candidate->sdp_mline_index()
                  sdpMid:[NSString stringForStdString:candidate->sdp_mid()]];
diff --git a/sdk/objc/api/peerconnection/RTCIceServer+Private.h b/sdk/objc/api/peerconnection/RTCIceServer+Private.h
index 53fbb45..3eee8199 100644
--- a/sdk/objc/api/peerconnection/RTCIceServer+Private.h
+++ b/sdk/objc/api/peerconnection/RTCIceServer+Private.h
@@ -14,13 +14,14 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-@interface RTCIceServer ()
+@interface RTC_OBJC_TYPE (RTCIceServer)
+()
 
-/**
- * IceServer struct representation of this RTCIceServer object's data.
- * This is needed to pass to the underlying C++ APIs.
- */
-@property(nonatomic, readonly) webrtc::PeerConnectionInterface::IceServer nativeServer;
+    /**
+     * IceServer struct representation of this RTCIceServer object's data.
+     * This is needed to pass to the underlying C++ APIs.
+     */
+    @property(nonatomic, readonly) webrtc::PeerConnectionInterface::IceServer nativeServer;
 
 /** Initialize an RTCIceServer from a native IceServer. */
 - (instancetype)initWithNativeServer:(webrtc::PeerConnectionInterface::IceServer)nativeServer;
diff --git a/sdk/objc/api/peerconnection/RTCIceServer.h b/sdk/objc/api/peerconnection/RTCIceServer.h
index ab5fc4a..dd66c61 100644
--- a/sdk/objc/api/peerconnection/RTCIceServer.h
+++ b/sdk/objc/api/peerconnection/RTCIceServer.h
@@ -20,7 +20,7 @@
 NS_ASSUME_NONNULL_BEGIN
 
 RTC_OBJC_EXPORT
-@interface RTCIceServer : NSObject
+@interface RTC_OBJC_TYPE (RTCIceServer) : NSObject
 
 /** URI(s) for this server represented as NSStrings. */
 @property(nonatomic, readonly) NSArray<NSString *> *urlStrings;
diff --git a/sdk/objc/api/peerconnection/RTCIceServer.mm b/sdk/objc/api/peerconnection/RTCIceServer.mm
index 2138e4c..19a0a7e 100644
--- a/sdk/objc/api/peerconnection/RTCIceServer.mm
+++ b/sdk/objc/api/peerconnection/RTCIceServer.mm
@@ -12,7 +12,7 @@
 
 #import "helpers/NSString+StdString.h"
 
-@implementation RTCIceServer
+@implementation RTC_OBJC_TYPE (RTCIceServer)
 
 @synthesize urlStrings = _urlStrings;
 @synthesize username = _username;
@@ -97,7 +97,7 @@
 }
 
 - (NSString *)description {
-  return [NSString stringWithFormat:@"RTCIceServer:\n%@\n%@\n%@\n%@\n%@\n%@\n%@",
+  return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCIceServer):\n%@\n%@\n%@\n%@\n%@\n%@\n%@",
                                     _urlStrings,
                                     _username,
                                     _credential,
diff --git a/sdk/objc/api/peerconnection/RTCLegacyStatsReport+Private.h b/sdk/objc/api/peerconnection/RTCLegacyStatsReport+Private.h
index d87659d..faa7962 100644
--- a/sdk/objc/api/peerconnection/RTCLegacyStatsReport+Private.h
+++ b/sdk/objc/api/peerconnection/RTCLegacyStatsReport+Private.h
@@ -14,10 +14,11 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-@interface RTCLegacyStatsReport ()
+@interface RTC_OBJC_TYPE (RTCLegacyStatsReport)
+()
 
-/** Initialize an RTCLegacyStatsReport object from a native StatsReport. */
-- (instancetype)initWithNativeReport:(const webrtc::StatsReport &)nativeReport;
+    /** Initialize an RTCLegacyStatsReport object from a native StatsReport. */
+    - (instancetype)initWithNativeReport : (const webrtc::StatsReport &)nativeReport;
 
 @end
 
diff --git a/sdk/objc/api/peerconnection/RTCLegacyStatsReport.h b/sdk/objc/api/peerconnection/RTCLegacyStatsReport.h
index 85f2b8f..b3bd12c 100644
--- a/sdk/objc/api/peerconnection/RTCLegacyStatsReport.h
+++ b/sdk/objc/api/peerconnection/RTCLegacyStatsReport.h
@@ -16,7 +16,7 @@
 
 /** This does not currently conform to the spec. */
 RTC_OBJC_EXPORT
-@interface RTCLegacyStatsReport : NSObject
+@interface RTC_OBJC_TYPE (RTCLegacyStatsReport) : NSObject
 
 /** Time since 1970-01-01T00:00:00Z in milliseconds. */
 @property(nonatomic, readonly) CFTimeInterval timestamp;
diff --git a/sdk/objc/api/peerconnection/RTCLegacyStatsReport.mm b/sdk/objc/api/peerconnection/RTCLegacyStatsReport.mm
index 89e1b85..bd7a1ad 100644
--- a/sdk/objc/api/peerconnection/RTCLegacyStatsReport.mm
+++ b/sdk/objc/api/peerconnection/RTCLegacyStatsReport.mm
@@ -15,7 +15,7 @@
 
 #include "rtc_base/checks.h"
 
-@implementation RTCLegacyStatsReport
+@implementation RTC_OBJC_TYPE (RTCLegacyStatsReport)
 
 @synthesize timestamp = _timestamp;
 @synthesize type = _type;
@@ -23,7 +23,7 @@
 @synthesize values = _values;
 
 - (NSString *)description {
-  return [NSString stringWithFormat:@"RTCLegacyStatsReport:\n%@\n%@\n%f\n%@",
+  return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCLegacyStatsReport):\n%@\n%@\n%f\n%@",
                                     _reportId,
                                     _type,
                                     _timestamp,
diff --git a/sdk/objc/api/peerconnection/RTCMediaConstraints+Private.h b/sdk/objc/api/peerconnection/RTCMediaConstraints+Private.h
index b3e1b10..97eee83 100644
--- a/sdk/objc/api/peerconnection/RTCMediaConstraints+Private.h
+++ b/sdk/objc/api/peerconnection/RTCMediaConstraints+Private.h
@@ -16,13 +16,14 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-@interface RTCMediaConstraints ()
+@interface RTC_OBJC_TYPE (RTCMediaConstraints)
+()
 
-/**
- * A MediaConstraints representation of this RTCMediaConstraints object. This is
- * needed to pass to the underlying C++ APIs.
- */
-- (std::unique_ptr<webrtc::MediaConstraints>)nativeConstraints;
+    /**
+     * A MediaConstraints representation of this RTCMediaConstraints object. This is
+     * needed to pass to the underlying C++ APIs.
+     */
+    - (std::unique_ptr<webrtc::MediaConstraints>)nativeConstraints;
 
 /** Return a native Constraints object representing these constraints */
 + (webrtc::MediaConstraints::Constraints)nativeConstraintsForConstraints:
diff --git a/sdk/objc/api/peerconnection/RTCMediaConstraints.h b/sdk/objc/api/peerconnection/RTCMediaConstraints.h
index 5c1a12e..c5baf20 100644
--- a/sdk/objc/api/peerconnection/RTCMediaConstraints.h
+++ b/sdk/objc/api/peerconnection/RTCMediaConstraints.h
@@ -31,7 +31,7 @@
 RTC_EXTERN NSString *const kRTCMediaConstraintsValueFalse;
 
 RTC_OBJC_EXPORT
-@interface RTCMediaConstraints : NSObject
+@interface RTC_OBJC_TYPE (RTCMediaConstraints) : NSObject
 
 - (instancetype)init NS_UNAVAILABLE;
 
diff --git a/sdk/objc/api/peerconnection/RTCMediaConstraints.mm b/sdk/objc/api/peerconnection/RTCMediaConstraints.mm
index bfdbdde..0f46e4b 100644
--- a/sdk/objc/api/peerconnection/RTCMediaConstraints.mm
+++ b/sdk/objc/api/peerconnection/RTCMediaConstraints.mm
@@ -28,7 +28,7 @@
 NSString *const kRTCMediaConstraintsValueTrue = @(webrtc::MediaConstraints::kValueTrue);
 NSString *const kRTCMediaConstraintsValueFalse = @(webrtc::MediaConstraints::kValueFalse);
 
-@implementation RTCMediaConstraints {
+@implementation RTC_OBJC_TYPE (RTCMediaConstraints) {
   NSDictionary<NSString *, NSString *> *_mandatory;
   NSDictionary<NSString *, NSString *> *_optional;
 }
@@ -47,9 +47,8 @@
 }
 
 - (NSString *)description {
-  return [NSString stringWithFormat:@"RTCMediaConstraints:\n%@\n%@",
-                                    _mandatory,
-                                    _optional];
+  return [NSString
+      stringWithFormat:@"RTC_OBJC_TYPE(RTCMediaConstraints):\n%@\n%@", _mandatory, _optional];
 }
 
 #pragma mark - Private
diff --git a/sdk/objc/api/peerconnection/RTCMediaSource+Private.h b/sdk/objc/api/peerconnection/RTCMediaSource+Private.h
index 7d69aaa..edda892 100644
--- a/sdk/objc/api/peerconnection/RTCMediaSource+Private.h
+++ b/sdk/objc/api/peerconnection/RTCMediaSource+Private.h
@@ -14,18 +14,20 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-@class RTCPeerConnectionFactory;
+@class RTC_OBJC_TYPE(RTCPeerConnectionFactory);
 
 typedef NS_ENUM(NSInteger, RTCMediaSourceType) {
   RTCMediaSourceTypeAudio,
   RTCMediaSourceTypeVideo,
 };
 
-@interface RTCMediaSource ()
+@interface RTC_OBJC_TYPE (RTCMediaSource)
+()
 
-@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::MediaSourceInterface> nativeMediaSource;
+    @property(nonatomic,
+              readonly) rtc::scoped_refptr<webrtc::MediaSourceInterface> nativeMediaSource;
 
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
               nativeMediaSource:(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
                            type:(RTCMediaSourceType)type NS_DESIGNATED_INITIALIZER;
 
diff --git a/sdk/objc/api/peerconnection/RTCMediaSource.h b/sdk/objc/api/peerconnection/RTCMediaSource.h
index 838c783..ba19c2a 100644
--- a/sdk/objc/api/peerconnection/RTCMediaSource.h
+++ b/sdk/objc/api/peerconnection/RTCMediaSource.h
@@ -22,7 +22,7 @@
 NS_ASSUME_NONNULL_BEGIN
 
 RTC_OBJC_EXPORT
-@interface RTCMediaSource : NSObject
+@interface RTC_OBJC_TYPE (RTCMediaSource) : NSObject
 
 /** The current state of the RTCMediaSource. */
 @property(nonatomic, readonly) RTCSourceState state;
diff --git a/sdk/objc/api/peerconnection/RTCMediaSource.mm b/sdk/objc/api/peerconnection/RTCMediaSource.mm
index 6ec41c3..61472a7 100644
--- a/sdk/objc/api/peerconnection/RTCMediaSource.mm
+++ b/sdk/objc/api/peerconnection/RTCMediaSource.mm
@@ -12,14 +12,14 @@
 
 #include "rtc_base/checks.h"
 
-@implementation RTCMediaSource {
-  RTCPeerConnectionFactory *_factory;
+@implementation RTC_OBJC_TYPE (RTCMediaSource) {
+  RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory;
   RTCMediaSourceType _type;
 }
 
 @synthesize nativeMediaSource = _nativeMediaSource;
 
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
               nativeMediaSource:(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
                            type:(RTCMediaSourceType)type {
   RTC_DCHECK(factory);
diff --git a/sdk/objc/api/peerconnection/RTCMediaStream+Private.h b/sdk/objc/api/peerconnection/RTCMediaStream+Private.h
index 23149ce..6c8a602 100644
--- a/sdk/objc/api/peerconnection/RTCMediaStream+Private.h
+++ b/sdk/objc/api/peerconnection/RTCMediaStream+Private.h
@@ -14,19 +14,22 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-@interface RTCMediaStream ()
+@interface RTC_OBJC_TYPE (RTCMediaStream)
+()
 
-/**
- * MediaStreamInterface representation of this RTCMediaStream object. This is
- * needed to pass to the underlying C++ APIs.
- */
-@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::MediaStreamInterface> nativeMediaStream;
+    /**
+     * MediaStreamInterface representation of this RTCMediaStream object. This is
+     * needed to pass to the underlying C++ APIs.
+     */
+    @property(nonatomic,
+              readonly) rtc::scoped_refptr<webrtc::MediaStreamInterface> nativeMediaStream;
 
 /** Initialize an RTCMediaStream with an id. */
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory streamId:(NSString *)streamId;
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+                       streamId:(NSString *)streamId;
 
 /** Initialize an RTCMediaStream from a native MediaStreamInterface. */
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
               nativeMediaStream:(rtc::scoped_refptr<webrtc::MediaStreamInterface>)nativeMediaStream;
 
 @end
diff --git a/sdk/objc/api/peerconnection/RTCMediaStream.h b/sdk/objc/api/peerconnection/RTCMediaStream.h
index bb9bec6..2d56f15 100644
--- a/sdk/objc/api/peerconnection/RTCMediaStream.h
+++ b/sdk/objc/api/peerconnection/RTCMediaStream.h
@@ -14,18 +14,18 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-@class RTCAudioTrack;
-@class RTCPeerConnectionFactory;
-@class RTCVideoTrack;
+@class RTC_OBJC_TYPE(RTCAudioTrack);
+@class RTC_OBJC_TYPE(RTCPeerConnectionFactory);
+@class RTC_OBJC_TYPE(RTCVideoTrack);
 
 RTC_OBJC_EXPORT
-@interface RTCMediaStream : NSObject
+@interface RTC_OBJC_TYPE (RTCMediaStream) : NSObject
 
 /** The audio tracks in this stream. */
-@property(nonatomic, strong, readonly) NSArray<RTCAudioTrack *> *audioTracks;
+@property(nonatomic, strong, readonly) NSArray<RTC_OBJC_TYPE(RTCAudioTrack) *> *audioTracks;
 
 /** The video tracks in this stream. */
-@property(nonatomic, strong, readonly) NSArray<RTCVideoTrack *> *videoTracks;
+@property(nonatomic, strong, readonly) NSArray<RTC_OBJC_TYPE(RTCVideoTrack) *> *videoTracks;
 
 /** An identifier for this media stream. */
 @property(nonatomic, readonly) NSString *streamId;
@@ -33,16 +33,16 @@
 - (instancetype)init NS_UNAVAILABLE;
 
 /** Adds the given audio track to this media stream. */
-- (void)addAudioTrack:(RTCAudioTrack *)audioTrack;
+- (void)addAudioTrack:(RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrack;
 
 /** Adds the given video track to this media stream. */
-- (void)addVideoTrack:(RTCVideoTrack *)videoTrack;
+- (void)addVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrack;
 
 /** Removes the given audio track to this media stream. */
-- (void)removeAudioTrack:(RTCAudioTrack *)audioTrack;
+- (void)removeAudioTrack:(RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrack;
 
 /** Removes the given video track to this media stream. */
-- (void)removeVideoTrack:(RTCVideoTrack *)videoTrack;
+- (void)removeVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrack;
 
 @end
 
diff --git a/sdk/objc/api/peerconnection/RTCMediaStream.mm b/sdk/objc/api/peerconnection/RTCMediaStream.mm
index c1a402a..a6292b5 100644
--- a/sdk/objc/api/peerconnection/RTCMediaStream.mm
+++ b/sdk/objc/api/peerconnection/RTCMediaStream.mm
@@ -18,14 +18,14 @@
 #import "RTCVideoTrack+Private.h"
 #import "helpers/NSString+StdString.h"
 
-@implementation RTCMediaStream {
-  RTCPeerConnectionFactory *_factory;
+@implementation RTC_OBJC_TYPE (RTCMediaStream) {
+  RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory;
   NSMutableArray *_audioTracks;
   NSMutableArray *_videoTracks;
   rtc::scoped_refptr<webrtc::MediaStreamInterface> _nativeMediaStream;
 }
 
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
                        streamId:(NSString *)streamId {
   NSParameterAssert(factory);
   NSParameterAssert(streamId.length);
@@ -35,11 +35,11 @@
   return [self initWithFactory:factory nativeMediaStream:stream];
 }
 
-- (NSArray<RTCAudioTrack *> *)audioTracks {
+- (NSArray<RTC_OBJC_TYPE(RTCAudioTrack) *> *)audioTracks {
   return [_audioTracks copy];
 }
 
-- (NSArray<RTCVideoTrack *> *)videoTracks {
+- (NSArray<RTC_OBJC_TYPE(RTCVideoTrack) *> *)videoTracks {
   return [_videoTracks copy];
 }
 
@@ -47,32 +47,32 @@
   return [NSString stringForStdString:_nativeMediaStream->id()];
 }
 
-- (void)addAudioTrack:(RTCAudioTrack *)audioTrack {
+- (void)addAudioTrack:(RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrack {
   if (_nativeMediaStream->AddTrack(audioTrack.nativeAudioTrack)) {
     [_audioTracks addObject:audioTrack];
   }
 }
 
-- (void)addVideoTrack:(RTCVideoTrack *)videoTrack {
+- (void)addVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrack {
   if (_nativeMediaStream->AddTrack(videoTrack.nativeVideoTrack)) {
     [_videoTracks addObject:videoTrack];
   }
 }
 
-- (void)removeAudioTrack:(RTCAudioTrack *)audioTrack {
+- (void)removeAudioTrack:(RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrack {
   NSUInteger index = [_audioTracks indexOfObjectIdenticalTo:audioTrack];
   NSAssert(index != NSNotFound,
-           @"|removeAudioTrack| called on unexpected RTCAudioTrack");
+           @"|removeAudioTrack| called on unexpected RTC_OBJC_TYPE(RTCAudioTrack)");
   if (index != NSNotFound &&
       _nativeMediaStream->RemoveTrack(audioTrack.nativeAudioTrack)) {
     [_audioTracks removeObjectAtIndex:index];
   }
 }
 
-- (void)removeVideoTrack:(RTCVideoTrack *)videoTrack {
+- (void)removeVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrack {
   NSUInteger index = [_videoTracks indexOfObjectIdenticalTo:videoTrack];
   NSAssert(index != NSNotFound,
-           @"|removeVideoTrack| called on unexpected RTCVideoTrack");
+           @"|removeVideoTrack| called on unexpected RTC_OBJC_TYPE(RTCVideoTrack)");
   if (index != NSNotFound &&
       _nativeMediaStream->RemoveTrack(videoTrack.nativeVideoTrack)) {
     [_videoTracks removeObjectAtIndex:index];
@@ -80,7 +80,7 @@
 }
 
 - (NSString *)description {
-  return [NSString stringWithFormat:@"RTCMediaStream:\n%@\nA=%lu\nV=%lu",
+  return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCMediaStream):\n%@\nA=%lu\nV=%lu",
                                     self.streamId,
                                     (unsigned long)self.audioTracks.count,
                                     (unsigned long)self.videoTracks.count];
@@ -92,7 +92,7 @@
   return _nativeMediaStream;
 }
 
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
               nativeMediaStream:
                   (rtc::scoped_refptr<webrtc::MediaStreamInterface>)nativeMediaStream {
   NSParameterAssert(nativeMediaStream);
@@ -108,15 +108,19 @@
 
     for (auto &track : audioTracks) {
       RTCMediaStreamTrackType type = RTCMediaStreamTrackTypeAudio;
-      RTCAudioTrack *audioTrack =
-          [[RTCAudioTrack alloc] initWithFactory:_factory nativeTrack:track type:type];
+      RTC_OBJC_TYPE(RTCAudioTrack) *audioTrack =
+          [[RTC_OBJC_TYPE(RTCAudioTrack) alloc] initWithFactory:_factory
+                                                    nativeTrack:track
+                                                           type:type];
       [_audioTracks addObject:audioTrack];
     }
 
     for (auto &track : videoTracks) {
       RTCMediaStreamTrackType type = RTCMediaStreamTrackTypeVideo;
-      RTCVideoTrack *videoTrack =
-          [[RTCVideoTrack alloc] initWithFactory:_factory nativeTrack:track type:type];
+      RTC_OBJC_TYPE(RTCVideoTrack) *videoTrack =
+          [[RTC_OBJC_TYPE(RTCVideoTrack) alloc] initWithFactory:_factory
+                                                    nativeTrack:track
+                                                           type:type];
       [_videoTracks addObject:videoTrack];
     }
   }
diff --git a/sdk/objc/api/peerconnection/RTCMediaStreamTrack+Private.h b/sdk/objc/api/peerconnection/RTCMediaStreamTrack+Private.h
index 176bb73..ee51e27 100644
--- a/sdk/objc/api/peerconnection/RTCMediaStreamTrack+Private.h
+++ b/sdk/objc/api/peerconnection/RTCMediaStreamTrack+Private.h
@@ -19,11 +19,13 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-@class RTCPeerConnectionFactory;
+@class RTC_OBJC_TYPE(RTCPeerConnectionFactory);
 
-@interface RTCMediaStreamTrack ()
+@interface RTC_OBJC_TYPE (RTCMediaStreamTrack)
+()
 
-@property(nonatomic, readonly) RTCPeerConnectionFactory *factory;
+        @property(nonatomic, readonly) RTC_OBJC_TYPE(RTCPeerConnectionFactory) *
+    factory;
 
 /**
  * The native MediaStreamTrackInterface passed in or created during
@@ -34,14 +36,14 @@
 /**
  * Initialize an RTCMediaStreamTrack from a native MediaStreamTrackInterface.
  */
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
                     nativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
                            type:(RTCMediaStreamTrackType)type NS_DESIGNATED_INITIALIZER;
 
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
                     nativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack;
 
-- (BOOL)isEqualToTrack:(RTCMediaStreamTrack *)track;
+- (BOOL)isEqualToTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track;
 
 + (webrtc::MediaStreamTrackInterface::TrackState)nativeTrackStateForState:
         (RTCMediaStreamTrackState)state;
@@ -51,9 +53,9 @@
 
 + (NSString *)stringForState:(RTCMediaStreamTrackState)state;
 
-+ (RTCMediaStreamTrack *)mediaTrackForNativeTrack:
-                             (rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
-                                          factory:(RTCPeerConnectionFactory *)factory;
++ (RTC_OBJC_TYPE(RTCMediaStreamTrack) *)
+    mediaTrackForNativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
+                     factory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory;
 
 @end
 
diff --git a/sdk/objc/api/peerconnection/RTCMediaStreamTrack.h b/sdk/objc/api/peerconnection/RTCMediaStreamTrack.h
index d1ea0f2..2200122 100644
--- a/sdk/objc/api/peerconnection/RTCMediaStreamTrack.h
+++ b/sdk/objc/api/peerconnection/RTCMediaStreamTrack.h
@@ -26,7 +26,7 @@
 RTC_EXTERN NSString *const kRTCMediaStreamTrackKindVideo;
 
 RTC_OBJC_EXPORT
-@interface RTCMediaStreamTrack : NSObject
+@interface RTC_OBJC_TYPE (RTCMediaStreamTrack) : NSObject
 
 /**
  * The kind of track. For example, "audio" if this track represents an audio
diff --git a/sdk/objc/api/peerconnection/RTCMediaStreamTrack.mm b/sdk/objc/api/peerconnection/RTCMediaStreamTrack.mm
index 07992a0..f1e128c 100644
--- a/sdk/objc/api/peerconnection/RTCMediaStreamTrack.mm
+++ b/sdk/objc/api/peerconnection/RTCMediaStreamTrack.mm
@@ -19,8 +19,8 @@
 NSString * const kRTCMediaStreamTrackKindVideo =
     @(webrtc::MediaStreamTrackInterface::kVideoKind);
 
-@implementation RTCMediaStreamTrack {
-  RTCPeerConnectionFactory *_factory;
+@implementation RTC_OBJC_TYPE (RTCMediaStreamTrack) {
+  RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory;
   rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> _nativeTrack;
   RTCMediaStreamTrackType _type;
 }
@@ -47,7 +47,7 @@
 
 - (NSString *)description {
   NSString *readyState = [[self class] stringForState:self.readyState];
-  return [NSString stringWithFormat:@"RTCMediaStreamTrack:\n%@\n%@\n%@\n%@",
+  return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCMediaStreamTrack):\n%@\n%@\n%@\n%@",
                                     self.kind,
                                     self.trackId,
                                     self.isEnabled ? @"enabled" : @"disabled",
@@ -61,7 +61,7 @@
   if (![object isMemberOfClass:[self class]]) {
     return NO;
   }
-  return [self isEqualToTrack:(RTCMediaStreamTrack *)object];
+  return [self isEqualToTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)object];
 }
 
 - (NSUInteger)hash {
@@ -76,7 +76,7 @@
 
 @synthesize factory = _factory;
 
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
                     nativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
                            type:(RTCMediaStreamTrackType)type {
   NSParameterAssert(nativeTrack);
@@ -89,7 +89,7 @@
   return self;
 }
 
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
                     nativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack {
   NSParameterAssert(nativeTrack);
   if (nativeTrack->kind() ==
@@ -103,7 +103,7 @@
   return nil;
 }
 
-- (BOOL)isEqualToTrack:(RTCMediaStreamTrack *)track {
+- (BOOL)isEqualToTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track {
   if (!track) {
     return NO;
   }
@@ -139,21 +139,22 @@
   }
 }
 
-+ (RTCMediaStreamTrack *)mediaTrackForNativeTrack:
-                             (rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
-                                          factory:(RTCPeerConnectionFactory *)factory {
++ (RTC_OBJC_TYPE(RTCMediaStreamTrack) *)
+    mediaTrackForNativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
+                     factory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory {
   NSParameterAssert(nativeTrack);
   NSParameterAssert(factory);
   if (nativeTrack->kind() == webrtc::MediaStreamTrackInterface::kAudioKind) {
-    return [[RTCAudioTrack alloc] initWithFactory:factory
-                                      nativeTrack:nativeTrack
-                                             type:RTCMediaStreamTrackTypeAudio];
+    return [[RTC_OBJC_TYPE(RTCAudioTrack) alloc] initWithFactory:factory
+                                                     nativeTrack:nativeTrack
+                                                            type:RTCMediaStreamTrackTypeAudio];
   } else if (nativeTrack->kind() == webrtc::MediaStreamTrackInterface::kVideoKind) {
-    return [[RTCVideoTrack alloc] initWithFactory:factory
-                                      nativeTrack:nativeTrack
-                                             type:RTCMediaStreamTrackTypeVideo];
+    return [[RTC_OBJC_TYPE(RTCVideoTrack) alloc] initWithFactory:factory
+                                                     nativeTrack:nativeTrack
+                                                            type:RTCMediaStreamTrackTypeVideo];
   } else {
-    return [[RTCMediaStreamTrack alloc] initWithFactory:factory nativeTrack:nativeTrack];
+    return [[RTC_OBJC_TYPE(RTCMediaStreamTrack) alloc] initWithFactory:factory
+                                                           nativeTrack:nativeTrack];
   }
 }
 
diff --git a/sdk/objc/api/peerconnection/RTCMetrics.h b/sdk/objc/api/peerconnection/RTCMetrics.h
index 6629fda..fddbb27 100644
--- a/sdk/objc/api/peerconnection/RTCMetrics.h
+++ b/sdk/objc/api/peerconnection/RTCMetrics.h
@@ -20,4 +20,4 @@
 RTC_EXTERN void RTCEnableMetrics(void);
 
 /** Gets and clears native histograms. */
-RTC_EXTERN NSArray<RTCMetricsSampleInfo*>* RTCGetAndResetMetrics(void);
+RTC_EXTERN NSArray<RTC_OBJC_TYPE(RTCMetricsSampleInfo) *>* RTCGetAndResetMetrics(void);
diff --git a/sdk/objc/api/peerconnection/RTCMetrics.mm b/sdk/objc/api/peerconnection/RTCMetrics.mm
index 8ca9d96..b3ad352 100644
--- a/sdk/objc/api/peerconnection/RTCMetrics.mm
+++ b/sdk/objc/api/peerconnection/RTCMetrics.mm
@@ -16,7 +16,7 @@
   webrtc::metrics::Enable();
 }
 
-NSArray<RTCMetricsSampleInfo *> *RTCGetAndResetMetrics(void) {
+NSArray<RTC_OBJC_TYPE(RTCMetricsSampleInfo) *> *RTCGetAndResetMetrics(void) {
   std::map<std::string, std::unique_ptr<webrtc::metrics::SampleInfo>>
       histograms;
   webrtc::metrics::GetAndReset(&histograms);
@@ -24,8 +24,8 @@
   NSMutableArray *metrics =
       [NSMutableArray arrayWithCapacity:histograms.size()];
   for (auto const &histogram : histograms) {
-    RTCMetricsSampleInfo *metric = [[RTCMetricsSampleInfo alloc]
-        initWithNativeSampleInfo:*histogram.second];
+    RTC_OBJC_TYPE(RTCMetricsSampleInfo) *metric =
+        [[RTC_OBJC_TYPE(RTCMetricsSampleInfo) alloc] initWithNativeSampleInfo:*histogram.second];
     [metrics addObject:metric];
   }
   return metrics;
diff --git a/sdk/objc/api/peerconnection/RTCMetricsSampleInfo+Private.h b/sdk/objc/api/peerconnection/RTCMetricsSampleInfo+Private.h
index c465b1c..e4aa41f 100644
--- a/sdk/objc/api/peerconnection/RTCMetricsSampleInfo+Private.h
+++ b/sdk/objc/api/peerconnection/RTCMetricsSampleInfo+Private.h
@@ -14,10 +14,11 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-@interface RTCMetricsSampleInfo ()
+@interface RTC_OBJC_TYPE (RTCMetricsSampleInfo)
+()
 
-/** Initialize an RTCMetricsSampleInfo object from native SampleInfo. */
-- (instancetype)initWithNativeSampleInfo:(const webrtc::metrics::SampleInfo &)info;
+    /** Initialize an RTCMetricsSampleInfo object from native SampleInfo. */
+    - (instancetype)initWithNativeSampleInfo : (const webrtc::metrics::SampleInfo &)info;
 
 @end
 
diff --git a/sdk/objc/api/peerconnection/RTCMetricsSampleInfo.h b/sdk/objc/api/peerconnection/RTCMetricsSampleInfo.h
index cd38ab9..47a877b 100644
--- a/sdk/objc/api/peerconnection/RTCMetricsSampleInfo.h
+++ b/sdk/objc/api/peerconnection/RTCMetricsSampleInfo.h
@@ -15,7 +15,7 @@
 NS_ASSUME_NONNULL_BEGIN
 
 RTC_OBJC_EXPORT
-@interface RTCMetricsSampleInfo : NSObject
+@interface RTC_OBJC_TYPE (RTCMetricsSampleInfo) : NSObject
 
 /**
  * Example of RTCMetricsSampleInfo:
diff --git a/sdk/objc/api/peerconnection/RTCMetricsSampleInfo.mm b/sdk/objc/api/peerconnection/RTCMetricsSampleInfo.mm
index a4937fb..e4be94e 100644
--- a/sdk/objc/api/peerconnection/RTCMetricsSampleInfo.mm
+++ b/sdk/objc/api/peerconnection/RTCMetricsSampleInfo.mm
@@ -12,7 +12,7 @@
 
 #import "helpers/NSString+StdString.h"
 
-@implementation RTCMetricsSampleInfo
+@implementation RTC_OBJC_TYPE (RTCMetricsSampleInfo)
 
 @synthesize name = _name;
 @synthesize min = _min;
diff --git a/sdk/objc/api/peerconnection/RTCPeerConnection+DataChannel.mm b/sdk/objc/api/peerconnection/RTCPeerConnection+DataChannel.mm
index 6c84fa3..1ded45d 100644
--- a/sdk/objc/api/peerconnection/RTCPeerConnection+DataChannel.mm
+++ b/sdk/objc/api/peerconnection/RTCPeerConnection+DataChannel.mm
@@ -14,10 +14,12 @@
 #import "RTCDataChannelConfiguration+Private.h"
 #import "helpers/NSString+StdString.h"
 
-@implementation RTCPeerConnection (DataChannel)
+@implementation RTC_OBJC_TYPE (RTCPeerConnection)
+(DataChannel)
 
-- (nullable RTCDataChannel *)dataChannelForLabel:(NSString *)label
-                                   configuration:(RTCDataChannelConfiguration *)configuration {
+    - (nullable RTC_OBJC_TYPE(RTCDataChannel) *)dataChannelForLabel
+    : (NSString *)label configuration
+    : (RTC_OBJC_TYPE(RTCDataChannelConfiguration) *)configuration {
   std::string labelString = [NSString stdStringForString:label];
   const webrtc::DataChannelInit nativeInit =
       configuration.nativeDataChannelInit;
@@ -27,7 +29,8 @@
   if (!dataChannel) {
     return nil;
   }
-  return [[RTCDataChannel alloc] initWithFactory:self.factory nativeDataChannel:dataChannel];
+  return [[RTC_OBJC_TYPE(RTCDataChannel) alloc] initWithFactory:self.factory
+                                              nativeDataChannel:dataChannel];
 }
 
 @end
diff --git a/sdk/objc/api/peerconnection/RTCPeerConnection+Private.h b/sdk/objc/api/peerconnection/RTCPeerConnection+Private.h
index 93b4ec7..7358810 100644
--- a/sdk/objc/api/peerconnection/RTCPeerConnection+Private.h
+++ b/sdk/objc/api/peerconnection/RTCPeerConnection+Private.h
@@ -22,7 +22,7 @@
  */
 class PeerConnectionDelegateAdapter : public PeerConnectionObserver {
  public:
-  PeerConnectionDelegateAdapter(RTCPeerConnection *peerConnection);
+  PeerConnectionDelegateAdapter(RTC_OBJC_TYPE(RTCPeerConnection) * peerConnection);
   ~PeerConnectionDelegateAdapter() override;
 
   void OnSignalingChange(PeerConnectionInterface::SignalingState new_state) override;
@@ -58,15 +58,17 @@
   void OnRemoveTrack(rtc::scoped_refptr<RtpReceiverInterface> receiver) override;
 
  private:
-  __weak RTCPeerConnection *peer_connection_;
+  __weak RTC_OBJC_TYPE(RTCPeerConnection) * peer_connection_;
 };
 
 }  // namespace webrtc
 
-@interface RTCPeerConnection ()
+@interface RTC_OBJC_TYPE (RTCPeerConnection)
+()
 
-/** The factory used to create this RTCPeerConnection */
-@property(nonatomic, readonly) RTCPeerConnectionFactory *factory;
+    /** The factory used to create this RTCPeerConnection */
+    @property(nonatomic, readonly) RTC_OBJC_TYPE(RTCPeerConnectionFactory) *
+    factory;
 
 /** The native PeerConnectionInterface created during construction. */
 @property(nonatomic, readonly) rtc::scoped_refptr<webrtc::PeerConnectionInterface>
@@ -75,20 +77,20 @@
 /** Initialize an RTCPeerConnection with a configuration, constraints, and
  *  delegate.
  */
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
-                  configuration:(RTCConfiguration *)configuration
-                    constraints:(RTCMediaConstraints *)constraints
-                       delegate:(nullable id<RTCPeerConnectionDelegate>)delegate;
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+                  configuration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration
+                    constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
+                       delegate:(nullable id<RTC_OBJC_TYPE(RTCPeerConnectionDelegate)>)delegate;
 
 /** Initialize an RTCPeerConnection with a configuration, constraints,
  *  delegate and PeerConnectionDependencies.
  */
-- (instancetype)initWithDependencies:(RTCPeerConnectionFactory *)factory
-                       configuration:(RTCConfiguration *)configuration
-                         constraints:(RTCMediaConstraints *)constraints
+- (instancetype)initWithDependencies:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+                       configuration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration
+                         constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
                         dependencies:
                             (std::unique_ptr<webrtc::PeerConnectionDependencies>)dependencies
-                            delegate:(nullable id<RTCPeerConnectionDelegate>)delegate
+                            delegate:(nullable id<RTC_OBJC_TYPE(RTCPeerConnectionDelegate)>)delegate
     NS_DESIGNATED_INITIALIZER;
 
 + (webrtc::PeerConnectionInterface::SignalingState)nativeSignalingStateForState:
diff --git a/sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm b/sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm
index e2965eb..46a6e3c 100644
--- a/sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm
+++ b/sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm
@@ -39,8 +39,8 @@
 
 class StatsObserverAdapter : public StatsObserver {
  public:
-  StatsObserverAdapter(void (^completionHandler)
-      (NSArray<RTCLegacyStatsReport *> *stats)) {
+  StatsObserverAdapter(
+      void (^completionHandler)(NSArray<RTC_OBJC_TYPE(RTCLegacyStatsReport) *> *stats)) {
     completion_handler_ = completionHandler;
   }
 
@@ -50,8 +50,8 @@
     RTC_DCHECK(completion_handler_);
     NSMutableArray *stats = [NSMutableArray arrayWithCapacity:reports.size()];
     for (const auto* report : reports) {
-      RTCLegacyStatsReport *statsReport =
-          [[RTCLegacyStatsReport alloc] initWithNativeReport:*report];
+      RTC_OBJC_TYPE(RTCLegacyStatsReport) *statsReport =
+          [[RTC_OBJC_TYPE(RTCLegacyStatsReport) alloc] initWithNativeReport:*report];
       [stats addObject:statsReport];
     }
     completion_handler_(stats);
@@ -59,20 +59,21 @@
   }
 
  private:
-  void (^completion_handler_)(NSArray<RTCLegacyStatsReport *> *stats);
+  void (^completion_handler_)(NSArray<RTC_OBJC_TYPE(RTCLegacyStatsReport) *> *stats);
 };
 }  // namespace webrtc
 
-@implementation RTCPeerConnection (Stats)
+@implementation RTC_OBJC_TYPE (RTCPeerConnection)
+(Stats)
 
-- (void)statisticsForSender:(RTCRtpSender *)sender
-          completionHandler:(RTCStatisticsCompletionHandler)completionHandler {
+    - (void)statisticsForSender : (RTC_OBJC_TYPE(RTCRtpSender) *)sender completionHandler
+    : (RTCStatisticsCompletionHandler)completionHandler {
   rtc::scoped_refptr<webrtc::StatsCollectorCallbackAdapter> collector(
       new rtc::RefCountedObject<webrtc::StatsCollectorCallbackAdapter>(completionHandler));
   self.nativePeerConnection->GetStats(sender.nativeRtpSender, collector);
 }
 
-- (void)statisticsForReceiver:(RTCRtpReceiver *)receiver
+- (void)statisticsForReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)receiver
             completionHandler:(RTCStatisticsCompletionHandler)completionHandler {
   rtc::scoped_refptr<webrtc::StatsCollectorCallbackAdapter> collector(
       new rtc::RefCountedObject<webrtc::StatsCollectorCallbackAdapter>(completionHandler));
@@ -85,10 +86,10 @@
   self.nativePeerConnection->GetStats(collector);
 }
 
-- (void)statsForTrack:(RTCMediaStreamTrack *)mediaStreamTrack
+- (void)statsForTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)mediaStreamTrack
      statsOutputLevel:(RTCStatsOutputLevel)statsOutputLevel
     completionHandler:
-    (void (^)(NSArray<RTCLegacyStatsReport *> *stats))completionHandler {
+        (void (^)(NSArray<RTC_OBJC_TYPE(RTCLegacyStatsReport) *> *stats))completionHandler {
   rtc::scoped_refptr<webrtc::StatsObserverAdapter> observer(
       new rtc::RefCountedObject<webrtc::StatsObserverAdapter>
           (completionHandler));
diff --git a/sdk/objc/api/peerconnection/RTCPeerConnection.h b/sdk/objc/api/peerconnection/RTCPeerConnection.h
index 012295c..cfc0a3d 100644
--- a/sdk/objc/api/peerconnection/RTCPeerConnection.h
+++ b/sdk/objc/api/peerconnection/RTCPeerConnection.h
@@ -12,21 +12,21 @@
 
 #import "RTCMacros.h"
 
-@class RTCConfiguration;
-@class RTCDataChannel;
-@class RTCDataChannelConfiguration;
-@class RTCIceCandidate;
-@class RTCMediaConstraints;
-@class RTCMediaStream;
-@class RTCMediaStreamTrack;
-@class RTCPeerConnectionFactory;
-@class RTCRtpReceiver;
-@class RTCRtpSender;
-@class RTCRtpTransceiver;
-@class RTCRtpTransceiverInit;
-@class RTCSessionDescription;
+@class RTC_OBJC_TYPE(RTCConfiguration);
+@class RTC_OBJC_TYPE(RTCDataChannel);
+@class RTC_OBJC_TYPE(RTCDataChannelConfiguration);
+@class RTC_OBJC_TYPE(RTCIceCandidate);
+@class RTC_OBJC_TYPE(RTCMediaConstraints);
+@class RTC_OBJC_TYPE(RTCMediaStream);
+@class RTC_OBJC_TYPE(RTCMediaStreamTrack);
+@class RTC_OBJC_TYPE(RTCPeerConnectionFactory);
+@class RTC_OBJC_TYPE(RTCRtpReceiver);
+@class RTC_OBJC_TYPE(RTCRtpSender);
+@class RTC_OBJC_TYPE(RTCRtpTransceiver);
+@class RTC_OBJC_TYPE(RTCRtpTransceiverInit);
+@class RTC_OBJC_TYPE(RTCSessionDescription);
 @class RTCStatisticsReport;
-@class RTCLegacyStatsReport;
+@class RTC_OBJC_TYPE(RTCLegacyStatsReport);
 
 typedef NS_ENUM(NSInteger, RTCRtpMediaType);
 
@@ -81,45 +81,49 @@
   RTCStatsOutputLevelDebug,
 };
 
-@class RTCPeerConnection;
+@class RTC_OBJC_TYPE(RTCPeerConnection);
 
 RTC_OBJC_EXPORT
-@protocol RTCPeerConnectionDelegate <NSObject>
+@protocol RTC_OBJC_TYPE
+(RTCPeerConnectionDelegate)<NSObject>
 
-/** Called when the SignalingState changed. */
-- (void)peerConnection:(RTCPeerConnection *)peerConnection
-    didChangeSignalingState:(RTCSignalingState)stateChanged;
+    /** Called when the SignalingState changed. */
+    - (void)peerConnection
+    : (RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection didChangeSignalingState
+    : (RTCSignalingState)stateChanged;
 
 /** Called when media is received on a new stream from remote peer. */
-- (void)peerConnection:(RTCPeerConnection *)peerConnection didAddStream:(RTCMediaStream *)stream;
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+          didAddStream:(RTC_OBJC_TYPE(RTCMediaStream) *)stream;
 
 /** Called when a remote peer closes a stream.
  *  This is not called when RTCSdpSemanticsUnifiedPlan is specified.
  */
-- (void)peerConnection:(RTCPeerConnection *)peerConnection didRemoveStream:(RTCMediaStream *)stream;
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+       didRemoveStream:(RTC_OBJC_TYPE(RTCMediaStream) *)stream;
 
 /** Called when negotiation is needed, for example ICE has restarted. */
-- (void)peerConnectionShouldNegotiate:(RTCPeerConnection *)peerConnection;
+- (void)peerConnectionShouldNegotiate:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection;
 
 /** Called any time the IceConnectionState changes. */
-- (void)peerConnection:(RTCPeerConnection *)peerConnection
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
     didChangeIceConnectionState:(RTCIceConnectionState)newState;
 
 /** Called any time the IceGatheringState changes. */
-- (void)peerConnection:(RTCPeerConnection *)peerConnection
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
     didChangeIceGatheringState:(RTCIceGatheringState)newState;
 
 /** New ice candidate has been found. */
-- (void)peerConnection:(RTCPeerConnection *)peerConnection
-    didGenerateIceCandidate:(RTCIceCandidate *)candidate;
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+    didGenerateIceCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)candidate;
 
 /** Called when a group of local Ice candidates have been removed. */
-- (void)peerConnection:(RTCPeerConnection *)peerConnection
-    didRemoveIceCandidates:(NSArray<RTCIceCandidate *> *)candidates;
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+    didRemoveIceCandidates:(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates;
 
 /** New data channel has been opened. */
-- (void)peerConnection:(RTCPeerConnection *)peerConnection
-    didOpenDataChannel:(RTCDataChannel *)dataChannel;
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+    didOpenDataChannel:(RTC_OBJC_TYPE(RTCDataChannel) *)dataChannel;
 
 /** Called when signaling indicates a transceiver will be receiving media from
  *  the remote endpoint.
@@ -128,72 +132,72 @@
 @optional
 /** Called any time the IceConnectionState changes following standardized
  * transition. */
-- (void)peerConnection:(RTCPeerConnection *)peerConnection
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
     didChangeStandardizedIceConnectionState:(RTCIceConnectionState)newState;
 
 /** Called any time the PeerConnectionState changes. */
-- (void)peerConnection:(RTCPeerConnection *)peerConnection
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
     didChangeConnectionState:(RTCPeerConnectionState)newState;
 
-- (void)peerConnection:(RTCPeerConnection *)peerConnection
-    didStartReceivingOnTransceiver:(RTCRtpTransceiver *)transceiver;
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+    didStartReceivingOnTransceiver:(RTC_OBJC_TYPE(RTCRtpTransceiver) *)transceiver;
 
 /** Called when a receiver and its track are created. */
-- (void)peerConnection:(RTCPeerConnection *)peerConnection
-        didAddReceiver:(RTCRtpReceiver *)rtpReceiver
-               streams:(NSArray<RTCMediaStream *> *)mediaStreams;
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+        didAddReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)rtpReceiver
+               streams:(NSArray<RTC_OBJC_TYPE(RTCMediaStream) *> *)mediaStreams;
 
 /** Called when the receiver and its track are removed. */
-- (void)peerConnection:(RTCPeerConnection *)peerConnection
-     didRemoveReceiver:(RTCRtpReceiver *)rtpReceiver;
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+     didRemoveReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)rtpReceiver;
 
 /** Called when the selected ICE candidate pair is changed. */
-- (void)peerConnection:(RTCPeerConnection *)peerConnection
-    didChangeLocalCandidate:(RTCIceCandidate *)local
-            remoteCandidate:(RTCIceCandidate *)remote
+- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
+    didChangeLocalCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)local
+            remoteCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)remote
              lastReceivedMs:(int)lastDataReceivedMs
                changeReason:(NSString *)reason;
 
 @end
 
 RTC_OBJC_EXPORT
-@interface RTCPeerConnection : NSObject
+@interface RTC_OBJC_TYPE (RTCPeerConnection) : NSObject
 
 /** The object that will be notifed about events such as state changes and
  *  streams being added or removed.
  */
-@property(nonatomic, weak, nullable) id<RTCPeerConnectionDelegate> delegate;
+@property(nonatomic, weak, nullable) id<RTC_OBJC_TYPE(RTCPeerConnectionDelegate)> delegate;
 /** This property is not available with RTCSdpSemanticsUnifiedPlan. Please use
  *  |senders| instead.
  */
-@property(nonatomic, readonly) NSArray<RTCMediaStream *> *localStreams;
-@property(nonatomic, readonly, nullable) RTCSessionDescription *localDescription;
-@property(nonatomic, readonly, nullable) RTCSessionDescription *remoteDescription;
+@property(nonatomic, readonly) NSArray<RTC_OBJC_TYPE(RTCMediaStream) *> *localStreams;
+@property(nonatomic, readonly, nullable) RTC_OBJC_TYPE(RTCSessionDescription) * localDescription;
+@property(nonatomic, readonly, nullable) RTC_OBJC_TYPE(RTCSessionDescription) * remoteDescription;
 @property(nonatomic, readonly) RTCSignalingState signalingState;
 @property(nonatomic, readonly) RTCIceConnectionState iceConnectionState;
 @property(nonatomic, readonly) RTCPeerConnectionState connectionState;
 @property(nonatomic, readonly) RTCIceGatheringState iceGatheringState;
-@property(nonatomic, readonly, copy) RTCConfiguration *configuration;
+@property(nonatomic, readonly, copy) RTC_OBJC_TYPE(RTCConfiguration) * configuration;
 
 /** Gets all RTCRtpSenders associated with this peer connection.
  *  Note: reading this property returns different instances of RTCRtpSender.
  *  Use isEqual: instead of == to compare RTCRtpSender instances.
  */
-@property(nonatomic, readonly) NSArray<RTCRtpSender *> *senders;
+@property(nonatomic, readonly) NSArray<RTC_OBJC_TYPE(RTCRtpSender) *> *senders;
 
 /** Gets all RTCRtpReceivers associated with this peer connection.
  *  Note: reading this property returns different instances of RTCRtpReceiver.
  *  Use isEqual: instead of == to compare RTCRtpReceiver instances.
  */
-@property(nonatomic, readonly) NSArray<RTCRtpReceiver *> *receivers;
+@property(nonatomic, readonly) NSArray<RTC_OBJC_TYPE(RTCRtpReceiver) *> *receivers;
 
 /** Gets all RTCRtpTransceivers associated with this peer connection.
  *  Note: reading this property returns different instances of
- *  RTCRtpTransceiver. Use isEqual: instead of == to compare RTCRtpTransceiver
- *  instances.
- *  This is only available with RTCSdpSemanticsUnifiedPlan specified.
+ *  RTCRtpTransceiver. Use isEqual: instead of == to compare
+ *  RTCRtpTransceiver instances. This is only available with
+ * RTCSdpSemanticsUnifiedPlan specified.
  */
-@property(nonatomic, readonly) NSArray<RTCRtpTransceiver *> *transceivers;
+@property(nonatomic, readonly) NSArray<RTC_OBJC_TYPE(RTCRtpTransceiver) *> *transceivers;
 
 - (instancetype)init NS_UNAVAILABLE;
 
@@ -203,38 +207,39 @@
  *  new ICE credentials. Note that the BUNDLE and RTCP-multiplexing policies
  *  cannot be changed with this method.
  */
-- (BOOL)setConfiguration:(RTCConfiguration *)configuration;
+- (BOOL)setConfiguration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration;
 
 /** Terminate all media and close the transport. */
 - (void)close;
 
 /** Provide a remote candidate to the ICE Agent. */
-- (void)addIceCandidate:(RTCIceCandidate *)candidate;
+- (void)addIceCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)candidate;
 
 /** Remove a group of remote candidates from the ICE Agent. */
-- (void)removeIceCandidates:(NSArray<RTCIceCandidate *> *)candidates;
+- (void)removeIceCandidates:(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates;
 
 /** Add a new media stream to be sent on this peer connection.
  *  This method is not supported with RTCSdpSemanticsUnifiedPlan. Please use
  *  addTrack instead.
  */
-- (void)addStream:(RTCMediaStream *)stream;
+- (void)addStream:(RTC_OBJC_TYPE(RTCMediaStream) *)stream;
 
 /** Remove the given media stream from this peer connection.
  *  This method is not supported with RTCSdpSemanticsUnifiedPlan. Please use
  *  removeTrack instead.
  */
-- (void)removeStream:(RTCMediaStream *)stream;
+- (void)removeStream:(RTC_OBJC_TYPE(RTCMediaStream) *)stream;
 
 /** Add a new media stream track to be sent on this peer connection, and return
- *  the newly created RTCRtpSender. The RTCRtpSender will be associated with
- *  the streams specified in the |streamIds| list.
+ *  the newly created RTCRtpSender. The RTCRtpSender will be
+ * associated with the streams specified in the |streamIds| list.
  *
  *  Errors: If an error occurs, returns nil. An error can occur if:
  *  - A sender already exists for the track.
  *  - The peer connection is closed.
  */
-- (RTCRtpSender *)addTrack:(RTCMediaStreamTrack *)track streamIds:(NSArray<NSString *> *)streamIds;
+- (RTC_OBJC_TYPE(RTCRtpSender) *)addTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track
+                                streamIds:(NSArray<NSString *> *)streamIds;
 
 /** With PlanB semantics, removes an RTCRtpSender from this peer connection.
  *
@@ -243,7 +248,7 @@
  *
  *  Returns YES on success.
  */
-- (BOOL)removeTrack:(RTCRtpSender *)sender;
+- (BOOL)removeTrack:(RTC_OBJC_TYPE(RTCRtpSender) *)sender;
 
 /** addTransceiver creates a new RTCRtpTransceiver and adds it to the set of
  *  transceivers. Adding a transceiver will cause future calls to CreateOffer
@@ -266,33 +271,37 @@
  *  of the transceiver (and sender/receiver) will be derived from the kind of
  *  the track.
  */
-- (RTCRtpTransceiver *)addTransceiverWithTrack:(RTCMediaStreamTrack *)track;
-- (RTCRtpTransceiver *)addTransceiverWithTrack:(RTCMediaStreamTrack *)track
-                                          init:(RTCRtpTransceiverInit *)init;
+- (RTC_OBJC_TYPE(RTCRtpTransceiver) *)addTransceiverWithTrack:
+    (RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track;
+- (RTC_OBJC_TYPE(RTCRtpTransceiver) *)
+    addTransceiverWithTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track
+                       init:(RTC_OBJC_TYPE(RTCRtpTransceiverInit) *)init;
 
 /** Adds a transceiver with the given kind. Can either be RTCRtpMediaTypeAudio
  *  or RTCRtpMediaTypeVideo.
  */
-- (RTCRtpTransceiver *)addTransceiverOfType:(RTCRtpMediaType)mediaType;
-- (RTCRtpTransceiver *)addTransceiverOfType:(RTCRtpMediaType)mediaType
-                                       init:(RTCRtpTransceiverInit *)init;
+- (RTC_OBJC_TYPE(RTCRtpTransceiver) *)addTransceiverOfType:(RTCRtpMediaType)mediaType;
+- (RTC_OBJC_TYPE(RTCRtpTransceiver) *)addTransceiverOfType:(RTCRtpMediaType)mediaType
+                                                      init:(RTC_OBJC_TYPE(RTCRtpTransceiverInit) *)
+                                                               init;
 
 /** Generate an SDP offer. */
-- (void)offerForConstraints:(RTCMediaConstraints *)constraints
-          completionHandler:(nullable void (^)(RTCSessionDescription *_Nullable sdp,
+- (void)offerForConstraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
+          completionHandler:(nullable void (^)(RTC_OBJC_TYPE(RTCSessionDescription) * _Nullable sdp,
                                                NSError *_Nullable error))completionHandler;
 
 /** Generate an SDP answer. */
-- (void)answerForConstraints:(RTCMediaConstraints *)constraints
-           completionHandler:(nullable void (^)(RTCSessionDescription *_Nullable sdp,
-                                                NSError *_Nullable error))completionHandler;
+- (void)answerForConstraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
+           completionHandler:
+               (nullable void (^)(RTC_OBJC_TYPE(RTCSessionDescription) * _Nullable sdp,
+                                  NSError *_Nullable error))completionHandler;
 
 /** Apply the supplied RTCSessionDescription as the local description. */
-- (void)setLocalDescription:(RTCSessionDescription *)sdp
+- (void)setLocalDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)sdp
           completionHandler:(nullable void (^)(NSError *_Nullable error))completionHandler;
 
 /** Apply the supplied RTCSessionDescription as the remote description. */
-- (void)setRemoteDescription:(RTCSessionDescription *)sdp
+- (void)setRemoteDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)sdp
            completionHandler:(nullable void (^)(NSError *_Nullable error))completionHandler;
 
 /** Limits the bandwidth allocated for all RTP streams sent by this
@@ -310,35 +319,40 @@
 
 @end
 
-@interface RTCPeerConnection (Media)
+@interface RTC_OBJC_TYPE (RTCPeerConnection)
+(Media)
 
-/** Create an RTCRtpSender with the specified kind and media stream ID.
- *  See RTCMediaStreamTrack.h for available kinds.
- *  This method is not supported with RTCSdpSemanticsUnifiedPlan. Please use
- *  addTransceiver instead.
- */
-- (RTCRtpSender *)senderWithKind:(NSString *)kind streamId:(NSString *)streamId;
+    /** Create an RTCRtpSender with the specified kind and media stream ID.
+     *  See RTCMediaStreamTrack.h for available kinds.
+     *  This method is not supported with RTCSdpSemanticsUnifiedPlan. Please use
+     *  addTransceiver instead.
+     */
+    - (RTC_OBJC_TYPE(RTCRtpSender) *)senderWithKind : (NSString *)kind streamId
+    : (NSString *)streamId;
 
 @end
 
-@interface RTCPeerConnection (DataChannel)
+@interface RTC_OBJC_TYPE (RTCPeerConnection)
+(DataChannel)
 
-/** Create a new data channel with the given label and configuration. */
-- (nullable RTCDataChannel *)dataChannelForLabel:(NSString *)label
-                                   configuration:(RTCDataChannelConfiguration *)configuration;
+    /** Create a new data channel with the given label and configuration. */
+    - (nullable RTC_OBJC_TYPE(RTCDataChannel) *)dataChannelForLabel
+    : (NSString *)label configuration : (RTC_OBJC_TYPE(RTCDataChannelConfiguration) *)configuration;
 
 @end
 
 typedef void (^RTCStatisticsCompletionHandler)(RTCStatisticsReport *);
 
-@interface RTCPeerConnection (Stats)
+@interface RTC_OBJC_TYPE (RTCPeerConnection)
+(Stats)
 
-/** Gather stats for the given RTCMediaStreamTrack. If |mediaStreamTrack| is nil
- *  statistics are gathered for all tracks.
- */
-- (void)statsForTrack:(nullable RTCMediaStreamTrack *)mediaStreamTrack
-     statsOutputLevel:(RTCStatsOutputLevel)statsOutputLevel
-    completionHandler:(nullable void (^)(NSArray<RTCLegacyStatsReport *> *stats))completionHandler;
+    /** Gather stats for the given RTCMediaStreamTrack. If |mediaStreamTrack| is nil
+     *  statistics are gathered for all tracks.
+     */
+    - (void)statsForTrack
+    : (nullable RTC_OBJC_TYPE(RTCMediaStreamTrack) *)mediaStreamTrack statsOutputLevel
+    : (RTCStatsOutputLevel)statsOutputLevel completionHandler
+    : (nullable void (^)(NSArray<RTC_OBJC_TYPE(RTCLegacyStatsReport) *> *stats))completionHandler;
 
 /** Gather statistic through the v2 statistics API. */
 - (void)statisticsWithCompletionHandler:(RTCStatisticsCompletionHandler)completionHandler;
@@ -346,13 +360,13 @@
 /** Spec-compliant getStats() performing the stats selection algorithm with the
  *  sender.
  */
-- (void)statisticsForSender:(RTCRtpSender *)sender
+- (void)statisticsForSender:(RTC_OBJC_TYPE(RTCRtpSender) *)sender
           completionHandler:(RTCStatisticsCompletionHandler)completionHandler;
 
 /** Spec-compliant getStats() performing the stats selection algorithm with the
  *  receiver.
  */
-- (void)statisticsForReceiver:(RTCRtpReceiver *)receiver
+- (void)statisticsForReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)receiver
             completionHandler:(RTCStatisticsCompletionHandler)completionHandler;
 
 @end
diff --git a/sdk/objc/api/peerconnection/RTCPeerConnection.mm b/sdk/objc/api/peerconnection/RTCPeerConnection.mm
index ebdd120..fa68d08 100644
--- a/sdk/objc/api/peerconnection/RTCPeerConnection.mm
+++ b/sdk/objc/api/peerconnection/RTCPeerConnection.mm
@@ -33,8 +33,7 @@
 #include "rtc_base/checks.h"
 #include "rtc_base/numerics/safe_conversions.h"
 
-NSString * const kRTCPeerConnectionErrorDomain =
-    @"org.webrtc.RTCPeerConnection";
+NSString *const kRTCPeerConnectionErrorDomain = @"org.webrtc.RTC_OBJC_TYPE(RTCPeerConnection)";
 int const kRTCPeerConnnectionSessionDescriptionError = -1;
 
 namespace webrtc {
@@ -42,9 +41,8 @@
 class CreateSessionDescriptionObserverAdapter
     : public CreateSessionDescriptionObserver {
  public:
-  CreateSessionDescriptionObserverAdapter(
-      void (^completionHandler)(RTCSessionDescription *sessionDescription,
-                                NSError *error)) {
+  CreateSessionDescriptionObserverAdapter(void (^completionHandler)(
+      RTC_OBJC_TYPE(RTCSessionDescription) * sessionDescription, NSError *error)) {
     completion_handler_ = completionHandler;
   }
 
@@ -54,9 +52,8 @@
     RTC_DCHECK(completion_handler_);
     std::unique_ptr<webrtc::SessionDescriptionInterface> description =
         std::unique_ptr<webrtc::SessionDescriptionInterface>(desc);
-    RTCSessionDescription* session =
-        [[RTCSessionDescription alloc] initWithNativeDescription:
-            description.get()];
+    RTC_OBJC_TYPE(RTCSessionDescription) *session =
+        [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithNativeDescription:description.get()];
     completion_handler_(session, nil);
     completion_handler_ = nil;
   }
@@ -74,8 +71,8 @@
   }
 
  private:
-  void (^completion_handler_)
-      (RTCSessionDescription *sessionDescription, NSError *error);
+  void (^completion_handler_)(RTC_OBJC_TYPE(RTCSessionDescription) * sessionDescription,
+                              NSError *error);
 };
 
 class SetSessionDescriptionObserverAdapter :
@@ -110,8 +107,8 @@
   void (^completion_handler_)(NSError *error);
 };
 
-PeerConnectionDelegateAdapter::PeerConnectionDelegateAdapter(
-    RTCPeerConnection *peerConnection) {
+PeerConnectionDelegateAdapter::PeerConnectionDelegateAdapter(RTC_OBJC_TYPE(RTCPeerConnection) *
+                                                             peerConnection) {
   peer_connection_ = peerConnection;
 }
 
@@ -122,26 +119,28 @@
 void PeerConnectionDelegateAdapter::OnSignalingChange(
     PeerConnectionInterface::SignalingState new_state) {
   RTCSignalingState state =
-      [[RTCPeerConnection class] signalingStateForNativeState:new_state];
-  RTCPeerConnection *peer_connection = peer_connection_;
+      [[RTC_OBJC_TYPE(RTCPeerConnection) class] signalingStateForNativeState:new_state];
+  RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_;
   [peer_connection.delegate peerConnection:peer_connection
                    didChangeSignalingState:state];
 }
 
 void PeerConnectionDelegateAdapter::OnAddStream(
     rtc::scoped_refptr<MediaStreamInterface> stream) {
-  RTCPeerConnection *peer_connection = peer_connection_;
-  RTCMediaStream *mediaStream =
-      [[RTCMediaStream alloc] initWithFactory:peer_connection.factory nativeMediaStream:stream];
+  RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_;
+  RTC_OBJC_TYPE(RTCMediaStream) *mediaStream =
+      [[RTC_OBJC_TYPE(RTCMediaStream) alloc] initWithFactory:peer_connection.factory
+                                           nativeMediaStream:stream];
   [peer_connection.delegate peerConnection:peer_connection
                               didAddStream:mediaStream];
 }
 
 void PeerConnectionDelegateAdapter::OnRemoveStream(
     rtc::scoped_refptr<MediaStreamInterface> stream) {
-  RTCPeerConnection *peer_connection = peer_connection_;
-  RTCMediaStream *mediaStream =
-      [[RTCMediaStream alloc] initWithFactory:peer_connection.factory nativeMediaStream:stream];
+  RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_;
+  RTC_OBJC_TYPE(RTCMediaStream) *mediaStream =
+      [[RTC_OBJC_TYPE(RTCMediaStream) alloc] initWithFactory:peer_connection.factory
+                                           nativeMediaStream:stream];
 
   [peer_connection.delegate peerConnection:peer_connection
                            didRemoveStream:mediaStream];
@@ -149,10 +148,10 @@
 
 void PeerConnectionDelegateAdapter::OnTrack(
     rtc::scoped_refptr<RtpTransceiverInterface> nativeTransceiver) {
-  RTCPeerConnection *peer_connection = peer_connection_;
-  RTCRtpTransceiver *transceiver =
-      [[RTCRtpTransceiver alloc] initWithFactory:peer_connection.factory
-                            nativeRtpTransceiver:nativeTransceiver];
+  RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_;
+  RTC_OBJC_TYPE(RTCRtpTransceiver) *transceiver =
+      [[RTC_OBJC_TYPE(RTCRtpTransceiver) alloc] initWithFactory:peer_connection.factory
+                                           nativeRtpTransceiver:nativeTransceiver];
   if ([peer_connection.delegate
           respondsToSelector:@selector(peerConnection:didStartReceivingOnTransceiver:)]) {
     [peer_connection.delegate peerConnection:peer_connection
@@ -162,21 +161,23 @@
 
 void PeerConnectionDelegateAdapter::OnDataChannel(
     rtc::scoped_refptr<DataChannelInterface> data_channel) {
-  RTCPeerConnection *peer_connection = peer_connection_;
-  RTCDataChannel *dataChannel = [[RTCDataChannel alloc] initWithFactory:peer_connection.factory
-                                                      nativeDataChannel:data_channel];
+  RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_;
+  RTC_OBJC_TYPE(RTCDataChannel) *dataChannel =
+      [[RTC_OBJC_TYPE(RTCDataChannel) alloc] initWithFactory:peer_connection.factory
+                                           nativeDataChannel:data_channel];
   [peer_connection.delegate peerConnection:peer_connection
                         didOpenDataChannel:dataChannel];
 }
 
 void PeerConnectionDelegateAdapter::OnRenegotiationNeeded() {
-  RTCPeerConnection *peer_connection = peer_connection_;
+  RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_;
   [peer_connection.delegate peerConnectionShouldNegotiate:peer_connection];
 }
 
 void PeerConnectionDelegateAdapter::OnIceConnectionChange(
     PeerConnectionInterface::IceConnectionState new_state) {
-  RTCIceConnectionState state = [RTCPeerConnection iceConnectionStateForNativeState:new_state];
+  RTCIceConnectionState state =
+      [RTC_OBJC_TYPE(RTCPeerConnection) iceConnectionStateForNativeState:new_state];
   [peer_connection_.delegate peerConnection:peer_connection_ didChangeIceConnectionState:state];
 }
 
@@ -184,7 +185,8 @@
     PeerConnectionInterface::IceConnectionState new_state) {
   if ([peer_connection_.delegate
           respondsToSelector:@selector(peerConnection:didChangeStandardizedIceConnectionState:)]) {
-    RTCIceConnectionState state = [RTCPeerConnection iceConnectionStateForNativeState:new_state];
+    RTCIceConnectionState state =
+        [RTC_OBJC_TYPE(RTCPeerConnection) iceConnectionStateForNativeState:new_state];
     [peer_connection_.delegate peerConnection:peer_connection_
         didChangeStandardizedIceConnectionState:state];
   }
@@ -194,7 +196,8 @@
     PeerConnectionInterface::PeerConnectionState new_state) {
   if ([peer_connection_.delegate
           respondsToSelector:@selector(peerConnection:didChangeConnectionState:)]) {
-    RTCPeerConnectionState state = [RTCPeerConnection connectionStateForNativeState:new_state];
+    RTCPeerConnectionState state =
+        [RTC_OBJC_TYPE(RTCPeerConnection) connectionStateForNativeState:new_state];
     [peer_connection_.delegate peerConnection:peer_connection_ didChangeConnectionState:state];
   }
 }
@@ -202,17 +205,17 @@
 void PeerConnectionDelegateAdapter::OnIceGatheringChange(
     PeerConnectionInterface::IceGatheringState new_state) {
   RTCIceGatheringState state =
-      [[RTCPeerConnection class] iceGatheringStateForNativeState:new_state];
-  RTCPeerConnection *peer_connection = peer_connection_;
+      [[RTC_OBJC_TYPE(RTCPeerConnection) class] iceGatheringStateForNativeState:new_state];
+  RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_;
   [peer_connection.delegate peerConnection:peer_connection
                 didChangeIceGatheringState:state];
 }
 
 void PeerConnectionDelegateAdapter::OnIceCandidate(
     const IceCandidateInterface *candidate) {
-  RTCIceCandidate *iceCandidate =
-      [[RTCIceCandidate alloc] initWithNativeCandidate:candidate];
-  RTCPeerConnection *peer_connection = peer_connection_;
+  RTC_OBJC_TYPE(RTCIceCandidate) *iceCandidate =
+      [[RTC_OBJC_TYPE(RTCIceCandidate) alloc] initWithNativeCandidate:candidate];
+  RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_;
   [peer_connection.delegate peerConnection:peer_connection
                    didGenerateIceCandidate:iceCandidate];
 }
@@ -224,11 +227,11 @@
   for (const auto& candidate : candidates) {
     std::unique_ptr<JsepIceCandidate> candidate_wrapper(
         new JsepIceCandidate(candidate.transport_name(), -1, candidate));
-    RTCIceCandidate* ice_candidate = [[RTCIceCandidate alloc]
-        initWithNativeCandidate:candidate_wrapper.get()];
+    RTC_OBJC_TYPE(RTCIceCandidate) *ice_candidate =
+        [[RTC_OBJC_TYPE(RTCIceCandidate) alloc] initWithNativeCandidate:candidate_wrapper.get()];
     [ice_candidates addObject:ice_candidate];
   }
-  RTCPeerConnection* peer_connection = peer_connection_;
+  RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_;
   [peer_connection.delegate peerConnection:peer_connection
                     didRemoveIceCandidates:ice_candidates];
 }
@@ -238,13 +241,13 @@
   const auto &selected_pair = event.selected_candidate_pair;
   auto local_candidate_wrapper = std::make_unique<JsepIceCandidate>(
       selected_pair.local_candidate().transport_name(), -1, selected_pair.local_candidate());
-  RTCIceCandidate *local_candidate =
-      [[RTCIceCandidate alloc] initWithNativeCandidate:local_candidate_wrapper.release()];
+  RTC_OBJC_TYPE(RTCIceCandidate) *local_candidate = [[RTC_OBJC_TYPE(RTCIceCandidate) alloc]
+      initWithNativeCandidate:local_candidate_wrapper.release()];
   auto remote_candidate_wrapper = std::make_unique<JsepIceCandidate>(
       selected_pair.remote_candidate().transport_name(), -1, selected_pair.remote_candidate());
-  RTCIceCandidate *remote_candidate =
-      [[RTCIceCandidate alloc] initWithNativeCandidate:remote_candidate_wrapper.release()];
-  RTCPeerConnection *peer_connection = peer_connection_;
+  RTC_OBJC_TYPE(RTCIceCandidate) *remote_candidate = [[RTC_OBJC_TYPE(RTCIceCandidate) alloc]
+      initWithNativeCandidate:remote_candidate_wrapper.release()];
+  RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_;
   NSString *nsstr_reason = [NSString stringForStdString:event.reason];
   if ([peer_connection.delegate
           respondsToSelector:@selector
@@ -260,17 +263,19 @@
 void PeerConnectionDelegateAdapter::OnAddTrack(
     rtc::scoped_refptr<RtpReceiverInterface> receiver,
     const std::vector<rtc::scoped_refptr<MediaStreamInterface>> &streams) {
-  RTCPeerConnection *peer_connection = peer_connection_;
+  RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_;
   if ([peer_connection.delegate respondsToSelector:@selector(peerConnection:
                                                              didAddReceiver:streams:)]) {
     NSMutableArray *mediaStreams = [NSMutableArray arrayWithCapacity:streams.size()];
     for (const auto &nativeStream : streams) {
-      RTCMediaStream *mediaStream = [[RTCMediaStream alloc] initWithFactory:peer_connection.factory
-                                                          nativeMediaStream:nativeStream];
+      RTC_OBJC_TYPE(RTCMediaStream) *mediaStream =
+          [[RTC_OBJC_TYPE(RTCMediaStream) alloc] initWithFactory:peer_connection.factory
+                                               nativeMediaStream:nativeStream];
       [mediaStreams addObject:mediaStream];
     }
-    RTCRtpReceiver *rtpReceiver = [[RTCRtpReceiver alloc] initWithFactory:peer_connection.factory
-                                                        nativeRtpReceiver:receiver];
+    RTC_OBJC_TYPE(RTCRtpReceiver) *rtpReceiver =
+        [[RTC_OBJC_TYPE(RTCRtpReceiver) alloc] initWithFactory:peer_connection.factory
+                                             nativeRtpReceiver:receiver];
 
     [peer_connection.delegate peerConnection:peer_connection
                               didAddReceiver:rtpReceiver
@@ -280,19 +285,20 @@
 
 void PeerConnectionDelegateAdapter::OnRemoveTrack(
     rtc::scoped_refptr<RtpReceiverInterface> receiver) {
-  RTCPeerConnection *peer_connection = peer_connection_;
+  RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_;
   if ([peer_connection.delegate respondsToSelector:@selector(peerConnection:didRemoveReceiver:)]) {
-    RTCRtpReceiver *rtpReceiver = [[RTCRtpReceiver alloc] initWithFactory:peer_connection.factory
-                                                        nativeRtpReceiver:receiver];
+    RTC_OBJC_TYPE(RTCRtpReceiver) *rtpReceiver =
+        [[RTC_OBJC_TYPE(RTCRtpReceiver) alloc] initWithFactory:peer_connection.factory
+                                             nativeRtpReceiver:receiver];
     [peer_connection.delegate peerConnection:peer_connection didRemoveReceiver:rtpReceiver];
   }
 }
 
 }  // namespace webrtc
 
-@implementation RTCPeerConnection {
-  RTCPeerConnectionFactory *_factory;
-  NSMutableArray<RTCMediaStream *> *_localStreams;
+@implementation RTC_OBJC_TYPE (RTCPeerConnection) {
+  RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory;
+  NSMutableArray<RTC_OBJC_TYPE(RTCMediaStream) *> *_localStreams;
   std::unique_ptr<webrtc::PeerConnectionDelegateAdapter> _observer;
   rtc::scoped_refptr<webrtc::PeerConnectionInterface> _peerConnection;
   std::unique_ptr<webrtc::MediaConstraints> _nativeConstraints;
@@ -302,10 +308,10 @@
 @synthesize delegate = _delegate;
 @synthesize factory = _factory;
 
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
-                  configuration:(RTCConfiguration *)configuration
-                    constraints:(RTCMediaConstraints *)constraints
-                       delegate:(id<RTCPeerConnectionDelegate>)delegate {
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+                  configuration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration
+                    constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
+                       delegate:(id<RTC_OBJC_TYPE(RTCPeerConnectionDelegate)>)delegate {
   NSParameterAssert(factory);
   std::unique_ptr<webrtc::PeerConnectionDependencies> dependencies =
       std::make_unique<webrtc::PeerConnectionDependencies>(nullptr);
@@ -316,12 +322,12 @@
                            delegate:delegate];
 }
 
-- (instancetype)initWithDependencies:(RTCPeerConnectionFactory *)factory
-                       configuration:(RTCConfiguration *)configuration
-                         constraints:(RTCMediaConstraints *)constraints
+- (instancetype)initWithDependencies:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+                       configuration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration
+                         constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
                         dependencies:
                             (std::unique_ptr<webrtc::PeerConnectionDependencies>)dependencies
-                            delegate:(id<RTCPeerConnectionDelegate>)delegate {
+                            delegate:(id<RTC_OBJC_TYPE(RTCPeerConnectionDelegate)>)delegate {
   NSParameterAssert(factory);
   NSParameterAssert(dependencies.get());
   std::unique_ptr<webrtc::PeerConnectionInterface::RTCConfiguration> config(
@@ -348,24 +354,24 @@
   return self;
 }
 
-- (NSArray<RTCMediaStream *> *)localStreams {
+- (NSArray<RTC_OBJC_TYPE(RTCMediaStream) *> *)localStreams {
   return [_localStreams copy];
 }
 
-- (RTCSessionDescription *)localDescription {
+- (RTC_OBJC_TYPE(RTCSessionDescription) *)localDescription {
   const webrtc::SessionDescriptionInterface *description =
       _peerConnection->local_description();
   return description ?
-      [[RTCSessionDescription alloc] initWithNativeDescription:description]
-          : nil;
+      [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithNativeDescription:description] :
+      nil;
 }
 
-- (RTCSessionDescription *)remoteDescription {
+- (RTC_OBJC_TYPE(RTCSessionDescription) *)remoteDescription {
   const webrtc::SessionDescriptionInterface *description =
       _peerConnection->remote_description();
   return description ?
-      [[RTCSessionDescription alloc] initWithNativeDescription:description]
-          : nil;
+      [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithNativeDescription:description] :
+      nil;
 }
 
 - (RTCSignalingState)signalingState {
@@ -387,7 +393,7 @@
       _peerConnection->ice_gathering_state()];
 }
 
-- (BOOL)setConfiguration:(RTCConfiguration *)configuration {
+- (BOOL)setConfiguration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration {
   std::unique_ptr<webrtc::PeerConnectionInterface::RTCConfiguration> config(
       [configuration createNativeConfiguration]);
   if (!config) {
@@ -398,25 +404,25 @@
   return _peerConnection->SetConfiguration(*config).ok();
 }
 
-- (RTCConfiguration *)configuration {
+- (RTC_OBJC_TYPE(RTCConfiguration) *)configuration {
   webrtc::PeerConnectionInterface::RTCConfiguration config =
     _peerConnection->GetConfiguration();
-  return [[RTCConfiguration alloc] initWithNativeConfiguration:config];
+  return [[RTC_OBJC_TYPE(RTCConfiguration) alloc] initWithNativeConfiguration:config];
 }
 
 - (void)close {
   _peerConnection->Close();
 }
 
-- (void)addIceCandidate:(RTCIceCandidate *)candidate {
+- (void)addIceCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)candidate {
   std::unique_ptr<const webrtc::IceCandidateInterface> iceCandidate(
       candidate.nativeCandidate);
   _peerConnection->AddIceCandidate(iceCandidate.get());
 }
 
-- (void)removeIceCandidates:(NSArray<RTCIceCandidate *> *)iceCandidates {
+- (void)removeIceCandidates:(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)iceCandidates {
   std::vector<cricket::Candidate> candidates;
-  for (RTCIceCandidate *iceCandidate in iceCandidates) {
+  for (RTC_OBJC_TYPE(RTCIceCandidate) * iceCandidate in iceCandidates) {
     std::unique_ptr<const webrtc::IceCandidateInterface> candidate(
         iceCandidate.nativeCandidate);
     if (candidate) {
@@ -430,7 +436,7 @@
   }
 }
 
-- (void)addStream:(RTCMediaStream *)stream {
+- (void)addStream:(RTC_OBJC_TYPE(RTCMediaStream) *)stream {
   if (!_peerConnection->AddStream(stream.nativeMediaStream)) {
     RTCLogError(@"Failed to add stream: %@", stream);
     return;
@@ -438,12 +444,13 @@
   [_localStreams addObject:stream];
 }
 
-- (void)removeStream:(RTCMediaStream *)stream {
+- (void)removeStream:(RTC_OBJC_TYPE(RTCMediaStream) *)stream {
   _peerConnection->RemoveStream(stream.nativeMediaStream);
   [_localStreams removeObject:stream];
 }
 
-- (RTCRtpSender *)addTrack:(RTCMediaStreamTrack *)track streamIds:(NSArray<NSString *> *)streamIds {
+- (RTC_OBJC_TYPE(RTCRtpSender) *)addTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track
+                                streamIds:(NSArray<NSString *> *)streamIds {
   std::vector<std::string> nativeStreamIds;
   for (NSString *streamId in streamIds) {
     nativeStreamIds.push_back([streamId UTF8String]);
@@ -454,11 +461,11 @@
     RTCLogError(@"Failed to add track %@: %s", track, nativeSenderOrError.error().message());
     return nil;
   }
-  return [[RTCRtpSender alloc] initWithFactory:self.factory
-                               nativeRtpSender:nativeSenderOrError.MoveValue()];
+  return [[RTC_OBJC_TYPE(RTCRtpSender) alloc] initWithFactory:self.factory
+                                              nativeRtpSender:nativeSenderOrError.MoveValue()];
 }
 
-- (BOOL)removeTrack:(RTCRtpSender *)sender {
+- (BOOL)removeTrack:(RTC_OBJC_TYPE(RTCRtpSender) *)sender {
   bool result = _peerConnection->RemoveTrack(sender.nativeRtpSender);
   if (!result) {
     RTCLogError(@"Failed to remote track %@", sender);
@@ -466,12 +473,15 @@
   return result;
 }
 
-- (RTCRtpTransceiver *)addTransceiverWithTrack:(RTCMediaStreamTrack *)track {
-  return [self addTransceiverWithTrack:track init:[[RTCRtpTransceiverInit alloc] init]];
+- (RTC_OBJC_TYPE(RTCRtpTransceiver) *)addTransceiverWithTrack:
+    (RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track {
+  return [self addTransceiverWithTrack:track
+                                  init:[[RTC_OBJC_TYPE(RTCRtpTransceiverInit) alloc] init]];
 }
 
-- (RTCRtpTransceiver *)addTransceiverWithTrack:(RTCMediaStreamTrack *)track
-                                          init:(RTCRtpTransceiverInit *)init {
+- (RTC_OBJC_TYPE(RTCRtpTransceiver) *)
+    addTransceiverWithTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track
+                       init:(RTC_OBJC_TYPE(RTCRtpTransceiverInit) *)init {
   webrtc::RTCErrorOr<rtc::scoped_refptr<webrtc::RtpTransceiverInterface>> nativeTransceiverOrError =
       _peerConnection->AddTransceiver(track.nativeTrack, init.nativeInit);
   if (!nativeTransceiverOrError.ok()) {
@@ -479,33 +489,36 @@
         @"Failed to add transceiver %@: %s", track, nativeTransceiverOrError.error().message());
     return nil;
   }
-  return [[RTCRtpTransceiver alloc] initWithFactory:self.factory
-                               nativeRtpTransceiver:nativeTransceiverOrError.MoveValue()];
+  return [[RTC_OBJC_TYPE(RTCRtpTransceiver) alloc]
+           initWithFactory:self.factory
+      nativeRtpTransceiver:nativeTransceiverOrError.MoveValue()];
 }
 
-- (RTCRtpTransceiver *)addTransceiverOfType:(RTCRtpMediaType)mediaType {
-  return [self addTransceiverOfType:mediaType init:[[RTCRtpTransceiverInit alloc] init]];
+- (RTC_OBJC_TYPE(RTCRtpTransceiver) *)addTransceiverOfType:(RTCRtpMediaType)mediaType {
+  return [self addTransceiverOfType:mediaType
+                               init:[[RTC_OBJC_TYPE(RTCRtpTransceiverInit) alloc] init]];
 }
 
-- (RTCRtpTransceiver *)addTransceiverOfType:(RTCRtpMediaType)mediaType
-                                       init:(RTCRtpTransceiverInit *)init {
+- (RTC_OBJC_TYPE(RTCRtpTransceiver) *)addTransceiverOfType:(RTCRtpMediaType)mediaType
+                                                      init:(RTC_OBJC_TYPE(RTCRtpTransceiverInit) *)
+                                                               init {
   webrtc::RTCErrorOr<rtc::scoped_refptr<webrtc::RtpTransceiverInterface>> nativeTransceiverOrError =
-      _peerConnection->AddTransceiver([RTCRtpReceiver nativeMediaTypeForMediaType:mediaType],
-                                      init.nativeInit);
+      _peerConnection->AddTransceiver(
+          [RTC_OBJC_TYPE(RTCRtpReceiver) nativeMediaTypeForMediaType:mediaType], init.nativeInit);
   if (!nativeTransceiverOrError.ok()) {
     RTCLogError(@"Failed to add transceiver %@: %s",
-                [RTCRtpReceiver stringForMediaType:mediaType],
+                [RTC_OBJC_TYPE(RTCRtpReceiver) stringForMediaType:mediaType],
                 nativeTransceiverOrError.error().message());
     return nil;
   }
-  return [[RTCRtpTransceiver alloc] initWithFactory:self.factory
-                               nativeRtpTransceiver:nativeTransceiverOrError.MoveValue()];
+  return [[RTC_OBJC_TYPE(RTCRtpTransceiver) alloc]
+           initWithFactory:self.factory
+      nativeRtpTransceiver:nativeTransceiverOrError.MoveValue()];
 }
 
-- (void)offerForConstraints:(RTCMediaConstraints *)constraints
-          completionHandler:
-    (void (^)(RTCSessionDescription *sessionDescription,
-              NSError *error))completionHandler {
+- (void)offerForConstraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
+          completionHandler:(void (^)(RTC_OBJC_TYPE(RTCSessionDescription) * sessionDescription,
+                                      NSError *error))completionHandler {
   rtc::scoped_refptr<webrtc::CreateSessionDescriptionObserverAdapter>
       observer(new rtc::RefCountedObject
           <webrtc::CreateSessionDescriptionObserverAdapter>(completionHandler));
@@ -515,10 +528,9 @@
   _peerConnection->CreateOffer(observer, options);
 }
 
-- (void)answerForConstraints:(RTCMediaConstraints *)constraints
-           completionHandler:
-    (void (^)(RTCSessionDescription *sessionDescription,
-              NSError *error))completionHandler {
+- (void)answerForConstraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
+           completionHandler:(void (^)(RTC_OBJC_TYPE(RTCSessionDescription) * sessionDescription,
+                                       NSError *error))completionHandler {
   rtc::scoped_refptr<webrtc::CreateSessionDescriptionObserverAdapter>
       observer(new rtc::RefCountedObject
           <webrtc::CreateSessionDescriptionObserverAdapter>(completionHandler));
@@ -528,7 +540,7 @@
   _peerConnection->CreateAnswer(observer, options);
 }
 
-- (void)setLocalDescription:(RTCSessionDescription *)sdp
+- (void)setLocalDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)sdp
           completionHandler:(void (^)(NSError *error))completionHandler {
   rtc::scoped_refptr<webrtc::SetSessionDescriptionObserverAdapter> observer(
       new rtc::RefCountedObject<webrtc::SetSessionDescriptionObserverAdapter>(
@@ -536,7 +548,7 @@
   _peerConnection->SetLocalDescription(observer, sdp.nativeDescription);
 }
 
-- (void)setRemoteDescription:(RTCSessionDescription *)sdp
+- (void)setRemoteDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)sdp
            completionHandler:(void (^)(NSError *error))completionHandler {
   rtc::scoped_refptr<webrtc::SetSessionDescriptionObserverAdapter> observer(
       new rtc::RefCountedObject<webrtc::SetSessionDescriptionObserverAdapter>(
@@ -588,48 +600,50 @@
   _hasStartedRtcEventLog = NO;
 }
 
-- (RTCRtpSender *)senderWithKind:(NSString *)kind
-                        streamId:(NSString *)streamId {
+- (RTC_OBJC_TYPE(RTCRtpSender) *)senderWithKind:(NSString *)kind streamId:(NSString *)streamId {
   std::string nativeKind = [NSString stdStringForString:kind];
   std::string nativeStreamId = [NSString stdStringForString:streamId];
   rtc::scoped_refptr<webrtc::RtpSenderInterface> nativeSender(
       _peerConnection->CreateSender(nativeKind, nativeStreamId));
-  return nativeSender ?
-      [[RTCRtpSender alloc] initWithFactory:self.factory nativeRtpSender:nativeSender] :
-      nil;
+  return nativeSender ? [[RTC_OBJC_TYPE(RTCRtpSender) alloc] initWithFactory:self.factory
+                                                             nativeRtpSender:nativeSender] :
+                        nil;
 }
 
-- (NSArray<RTCRtpSender *> *)senders {
+- (NSArray<RTC_OBJC_TYPE(RTCRtpSender) *> *)senders {
   std::vector<rtc::scoped_refptr<webrtc::RtpSenderInterface>> nativeSenders(
       _peerConnection->GetSenders());
   NSMutableArray *senders = [[NSMutableArray alloc] init];
   for (const auto &nativeSender : nativeSenders) {
-    RTCRtpSender *sender =
-        [[RTCRtpSender alloc] initWithFactory:self.factory nativeRtpSender:nativeSender];
+    RTC_OBJC_TYPE(RTCRtpSender) *sender =
+        [[RTC_OBJC_TYPE(RTCRtpSender) alloc] initWithFactory:self.factory
+                                             nativeRtpSender:nativeSender];
     [senders addObject:sender];
   }
   return senders;
 }
 
-- (NSArray<RTCRtpReceiver *> *)receivers {
+- (NSArray<RTC_OBJC_TYPE(RTCRtpReceiver) *> *)receivers {
   std::vector<rtc::scoped_refptr<webrtc::RtpReceiverInterface>> nativeReceivers(
       _peerConnection->GetReceivers());
   NSMutableArray *receivers = [[NSMutableArray alloc] init];
   for (const auto &nativeReceiver : nativeReceivers) {
-    RTCRtpReceiver *receiver =
-        [[RTCRtpReceiver alloc] initWithFactory:self.factory nativeRtpReceiver:nativeReceiver];
+    RTC_OBJC_TYPE(RTCRtpReceiver) *receiver =
+        [[RTC_OBJC_TYPE(RTCRtpReceiver) alloc] initWithFactory:self.factory
+                                             nativeRtpReceiver:nativeReceiver];
     [receivers addObject:receiver];
   }
   return receivers;
 }
 
-- (NSArray<RTCRtpTransceiver *> *)transceivers {
+- (NSArray<RTC_OBJC_TYPE(RTCRtpTransceiver) *> *)transceivers {
   std::vector<rtc::scoped_refptr<webrtc::RtpTransceiverInterface>> nativeTransceivers(
       _peerConnection->GetTransceivers());
   NSMutableArray *transceivers = [[NSMutableArray alloc] init];
   for (const auto &nativeTransceiver : nativeTransceivers) {
-    RTCRtpTransceiver *transceiver = [[RTCRtpTransceiver alloc] initWithFactory:self.factory
-                                                           nativeRtpTransceiver:nativeTransceiver];
+    RTC_OBJC_TYPE(RTCRtpTransceiver) *transceiver =
+        [[RTC_OBJC_TYPE(RTCRtpTransceiver) alloc] initWithFactory:self.factory
+                                             nativeRtpTransceiver:nativeTransceiver];
     [transceivers addObject:transceiver];
   }
   return transceivers;
diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h
index 7922c91..c2aab0b 100644
--- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h
+++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h
@@ -31,9 +31,10 @@
 /**
  * This class extension exposes methods that work directly with injectable C++ components.
  */
-@interface RTCPeerConnectionFactory ()
+@interface RTC_OBJC_TYPE (RTCPeerConnectionFactory)
+()
 
-- (instancetype)initNative NS_DESIGNATED_INITIALIZER;
+    - (instancetype)initNative NS_DESIGNATED_INITIALIZER;
 
 /* Initializer used when WebRTC is compiled with no media support */
 - (instancetype)initWithNoMedia;
@@ -84,19 +85,19 @@
                 mediaTransportFactory:
                     (std::unique_ptr<webrtc::MediaTransportFactory>)mediaTransportFactory;
 
-- (instancetype)initWithEncoderFactory:(nullable id<RTCVideoEncoderFactory>)encoderFactory
-                        decoderFactory:(nullable id<RTCVideoDecoderFactory>)decoderFactory
-                 mediaTransportFactory:
-                     (std::unique_ptr<webrtc::MediaTransportFactory>)mediaTransportFactory;
+- (instancetype)
+    initWithEncoderFactory:(nullable id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)>)encoderFactory
+            decoderFactory:(nullable id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)>)decoderFactory
+     mediaTransportFactory:(std::unique_ptr<webrtc::MediaTransportFactory>)mediaTransportFactory;
 
 /** Initialize an RTCPeerConnection with a configuration, constraints, and
  *  dependencies.
  */
-- (RTCPeerConnection *)
-    peerConnectionWithDependencies:(RTCConfiguration *)configuration
-                       constraints:(RTCMediaConstraints *)constraints
+- (RTC_OBJC_TYPE(RTCPeerConnection) *)
+    peerConnectionWithDependencies:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration
+                       constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
                       dependencies:(std::unique_ptr<webrtc::PeerConnectionDependencies>)dependencies
-                          delegate:(nullable id<RTCPeerConnectionDelegate>)delegate;
+                          delegate:(nullable id<RTC_OBJC_TYPE(RTCPeerConnectionDelegate)>)delegate;
 
 @end
 
diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Private.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Private.h
index db7829c..ef61c2e 100644
--- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Private.h
+++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Private.h
@@ -15,16 +15,16 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-@interface RTCPeerConnectionFactory ()
+@interface RTC_OBJC_TYPE (RTCPeerConnectionFactory)
+()
 
-/**
- * PeerConnectionFactoryInterface created and held by this
- * RTCPeerConnectionFactory object. This is needed to pass to the underlying
- * C++ APIs.
- */
-@property(nonatomic, readonly)
-    rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface>
-        nativeFactory;
+    /**
+     * PeerConnectionFactoryInterface created and held by this
+     * RTCPeerConnectionFactory object. This is needed to pass to the underlying
+     * C++ APIs.
+     */
+    @property(nonatomic,
+              readonly) rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> nativeFactory;
 
 @end
 
diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h
index c808218..3dcd3b6 100644
--- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h
+++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h
@@ -14,61 +14,69 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-@class RTCAudioSource;
-@class RTCAudioTrack;
-@class RTCConfiguration;
-@class RTCMediaConstraints;
-@class RTCMediaStream;
-@class RTCPeerConnection;
-@class RTCVideoSource;
-@class RTCVideoTrack;
-@class RTCPeerConnectionFactoryOptions;
-@protocol RTCPeerConnectionDelegate;
-@protocol RTCVideoDecoderFactory;
-@protocol RTCVideoEncoderFactory;
+@class RTC_OBJC_TYPE(RTCAudioSource);
+@class RTC_OBJC_TYPE(RTCAudioTrack);
+@class RTC_OBJC_TYPE(RTCConfiguration);
+@class RTC_OBJC_TYPE(RTCMediaConstraints);
+@class RTC_OBJC_TYPE(RTCMediaStream);
+@class RTC_OBJC_TYPE(RTCPeerConnection);
+@class RTC_OBJC_TYPE(RTCVideoSource);
+@class RTC_OBJC_TYPE(RTCVideoTrack);
+@class RTC_OBJC_TYPE(RTCPeerConnectionFactoryOptions);
+@protocol RTC_OBJC_TYPE
+(RTCPeerConnectionDelegate);
+@protocol RTC_OBJC_TYPE
+(RTCVideoDecoderFactory);
+@protocol RTC_OBJC_TYPE
+(RTCVideoEncoderFactory);
 
 RTC_OBJC_EXPORT
-@interface RTCPeerConnectionFactory : NSObject
+@interface RTC_OBJC_TYPE (RTCPeerConnectionFactory) : NSObject
 
 /* Initialize object with default H264 video encoder/decoder factories */
 - (instancetype)init;
 
 /* Initialize object with injectable video encoder/decoder factories */
-- (instancetype)initWithEncoderFactory:(nullable id<RTCVideoEncoderFactory>)encoderFactory
-                        decoderFactory:(nullable id<RTCVideoDecoderFactory>)decoderFactory;
+- (instancetype)
+    initWithEncoderFactory:(nullable id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)>)encoderFactory
+            decoderFactory:(nullable id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)>)decoderFactory;
 
 /** Initialize an RTCAudioSource with constraints. */
-- (RTCAudioSource *)audioSourceWithConstraints:(nullable RTCMediaConstraints *)constraints;
+- (RTC_OBJC_TYPE(RTCAudioSource) *)audioSourceWithConstraints:
+    (nullable RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints;
 
-/** Initialize an RTCAudioTrack with an id. Convenience ctor to use an audio source with no
- *  constraints.
+/** Initialize an RTCAudioTrack with an id. Convenience ctor to use an audio source
+ * with no constraints.
  */
-- (RTCAudioTrack *)audioTrackWithTrackId:(NSString *)trackId;
+- (RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrackWithTrackId:(NSString *)trackId;
 
 /** Initialize an RTCAudioTrack with a source and an id. */
-- (RTCAudioTrack *)audioTrackWithSource:(RTCAudioSource *)source trackId:(NSString *)trackId;
+- (RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrackWithSource:(RTC_OBJC_TYPE(RTCAudioSource) *)source
+                                               trackId:(NSString *)trackId;
 
-/** Initialize a generic RTCVideoSource. The RTCVideoSource should be passed to a RTCVideoCapturer
- *  implementation, e.g. RTCCameraVideoCapturer, in order to produce frames.
+/** Initialize a generic RTCVideoSource. The RTCVideoSource should be
+ * passed to a RTCVideoCapturer implementation, e.g.
+ * RTCCameraVideoCapturer, in order to produce frames.
  */
-- (RTCVideoSource *)videoSource;
+- (RTC_OBJC_TYPE(RTCVideoSource) *)videoSource;
 
 /** Initialize an RTCVideoTrack with a source and an id. */
-- (RTCVideoTrack *)videoTrackWithSource:(RTCVideoSource *)source trackId:(NSString *)trackId;
+- (RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrackWithSource:(RTC_OBJC_TYPE(RTCVideoSource) *)source
+                                               trackId:(NSString *)trackId;
 
 /** Initialize an RTCMediaStream with an id. */
-- (RTCMediaStream *)mediaStreamWithStreamId:(NSString *)streamId;
+- (RTC_OBJC_TYPE(RTCMediaStream) *)mediaStreamWithStreamId:(NSString *)streamId;
 
 /** Initialize an RTCPeerConnection with a configuration, constraints, and
  *  delegate.
  */
-- (RTCPeerConnection *)peerConnectionWithConfiguration:(RTCConfiguration *)configuration
-                                           constraints:(RTCMediaConstraints *)constraints
-                                              delegate:
-                                                  (nullable id<RTCPeerConnectionDelegate>)delegate;
+- (RTC_OBJC_TYPE(RTCPeerConnection) *)
+    peerConnectionWithConfiguration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration
+                        constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
+                           delegate:(nullable id<RTC_OBJC_TYPE(RTCPeerConnectionDelegate)>)delegate;
 
 /** Set the options to be used for subsequently created RTCPeerConnections */
-- (void)setOptions:(nonnull RTCPeerConnectionFactoryOptions *)options;
+- (void)setOptions:(nonnull RTC_OBJC_TYPE(RTCPeerConnectionFactoryOptions) *)options;
 
 /** Start an AecDump recording. This API call will likely change in the future. */
 - (BOOL)startAecDumpWithFilePath:(NSString *)filePath maxSizeInBytes:(int64_t)maxSizeInBytes;
diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm
index b2e12d3..2e34b05fe 100644
--- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm
+++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm
@@ -55,7 +55,7 @@
 #include "api/transport/media/media_transport_interface.h"
 #include "media/engine/webrtc_media_engine.h"  // nogncheck
 
-@implementation RTCPeerConnectionFactory {
+@implementation RTC_OBJC_TYPE (RTCPeerConnectionFactory) {
   std::unique_ptr<rtc::Thread> _networkThread;
   std::unique_ptr<rtc::Thread> _workerThread;
   std::unique_ptr<rtc::Thread> _signalingThread;
@@ -76,22 +76,23 @@
 #ifdef HAVE_NO_MEDIA
   return [self initWithNoMedia];
 #else
-  return [self initWithNativeAudioEncoderFactory:webrtc::CreateBuiltinAudioEncoderFactory()
-                       nativeAudioDecoderFactory:webrtc::CreateBuiltinAudioDecoderFactory()
-                       nativeVideoEncoderFactory:webrtc::ObjCToNativeVideoEncoderFactory(
-                                                     [[RTCVideoEncoderFactoryH264 alloc] init])
-                       nativeVideoDecoderFactory:webrtc::ObjCToNativeVideoDecoderFactory(
-                                                     [[RTCVideoDecoderFactoryH264 alloc] init])
-                               audioDeviceModule:[self audioDeviceModule]
-                           audioProcessingModule:nullptr
-                           mediaTransportFactory:nullptr];
+  return [self
+      initWithNativeAudioEncoderFactory:webrtc::CreateBuiltinAudioEncoderFactory()
+              nativeAudioDecoderFactory:webrtc::CreateBuiltinAudioDecoderFactory()
+              nativeVideoEncoderFactory:webrtc::ObjCToNativeVideoEncoderFactory([[RTC_OBJC_TYPE(
+                                            RTCVideoEncoderFactoryH264) alloc] init])
+              nativeVideoDecoderFactory:webrtc::ObjCToNativeVideoDecoderFactory([[RTC_OBJC_TYPE(
+                                            RTCVideoDecoderFactoryH264) alloc] init])
+                      audioDeviceModule:[self audioDeviceModule]
+                  audioProcessingModule:nullptr
+                  mediaTransportFactory:nullptr];
 #endif
 }
 
-- (instancetype)initWithEncoderFactory:(nullable id<RTCVideoEncoderFactory>)encoderFactory
-                        decoderFactory:(nullable id<RTCVideoDecoderFactory>)decoderFactory
-                 mediaTransportFactory:
-                     (std::unique_ptr<webrtc::MediaTransportFactory>)mediaTransportFactory {
+- (instancetype)
+    initWithEncoderFactory:(nullable id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)>)encoderFactory
+            decoderFactory:(nullable id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)>)decoderFactory
+     mediaTransportFactory:(std::unique_ptr<webrtc::MediaTransportFactory>)mediaTransportFactory {
 #ifdef HAVE_NO_MEDIA
   return [self initWithNoMedia];
 #else
@@ -112,8 +113,9 @@
                            mediaTransportFactory:std::move(mediaTransportFactory)];
 #endif
 }
-- (instancetype)initWithEncoderFactory:(nullable id<RTCVideoEncoderFactory>)encoderFactory
-                        decoderFactory:(nullable id<RTCVideoDecoderFactory>)decoderFactory {
+- (instancetype)
+    initWithEncoderFactory:(nullable id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)>)encoderFactory
+            decoderFactory:(nullable id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)>)decoderFactory {
   return [self initWithEncoderFactory:encoderFactory
                        decoderFactory:decoderFactory
                 mediaTransportFactory:nullptr];
@@ -241,7 +243,8 @@
   return self;
 }
 
-- (RTCAudioSource *)audioSourceWithConstraints:(nullable RTCMediaConstraints *)constraints {
+- (RTC_OBJC_TYPE(RTCAudioSource) *)audioSourceWithConstraints:
+    (nullable RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints {
   std::unique_ptr<webrtc::MediaConstraints> nativeConstraints;
   if (constraints) {
     nativeConstraints = constraints.nativeConstraints;
@@ -251,64 +254,58 @@
 
   rtc::scoped_refptr<webrtc::AudioSourceInterface> source =
       _nativeFactory->CreateAudioSource(options);
-  return [[RTCAudioSource alloc] initWithFactory:self nativeAudioSource:source];
+  return [[RTC_OBJC_TYPE(RTCAudioSource) alloc] initWithFactory:self nativeAudioSource:source];
 }
 
-- (RTCAudioTrack *)audioTrackWithTrackId:(NSString *)trackId {
-  RTCAudioSource *audioSource = [self audioSourceWithConstraints:nil];
+- (RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrackWithTrackId:(NSString *)trackId {
+  RTC_OBJC_TYPE(RTCAudioSource) *audioSource = [self audioSourceWithConstraints:nil];
   return [self audioTrackWithSource:audioSource trackId:trackId];
 }
 
-- (RTCAudioTrack *)audioTrackWithSource:(RTCAudioSource *)source
-                                trackId:(NSString *)trackId {
-  return [[RTCAudioTrack alloc] initWithFactory:self
-                                         source:source
-                                        trackId:trackId];
+- (RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrackWithSource:(RTC_OBJC_TYPE(RTCAudioSource) *)source
+                                               trackId:(NSString *)trackId {
+  return [[RTC_OBJC_TYPE(RTCAudioTrack) alloc] initWithFactory:self source:source trackId:trackId];
 }
 
-- (RTCVideoSource *)videoSource {
-  return [[RTCVideoSource alloc] initWithFactory:self
-                                 signalingThread:_signalingThread.get()
-                                    workerThread:_workerThread.get()];
+- (RTC_OBJC_TYPE(RTCVideoSource) *)videoSource {
+  return [[RTC_OBJC_TYPE(RTCVideoSource) alloc] initWithFactory:self
+                                                signalingThread:_signalingThread.get()
+                                                   workerThread:_workerThread.get()];
 }
 
-- (RTCVideoTrack *)videoTrackWithSource:(RTCVideoSource *)source
-                                trackId:(NSString *)trackId {
-  return [[RTCVideoTrack alloc] initWithFactory:self
-                                         source:source
-                                        trackId:trackId];
+- (RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrackWithSource:(RTC_OBJC_TYPE(RTCVideoSource) *)source
+                                               trackId:(NSString *)trackId {
+  return [[RTC_OBJC_TYPE(RTCVideoTrack) alloc] initWithFactory:self source:source trackId:trackId];
 }
 
-- (RTCMediaStream *)mediaStreamWithStreamId:(NSString *)streamId {
-  return [[RTCMediaStream alloc] initWithFactory:self
-                                        streamId:streamId];
+- (RTC_OBJC_TYPE(RTCMediaStream) *)mediaStreamWithStreamId:(NSString *)streamId {
+  return [[RTC_OBJC_TYPE(RTCMediaStream) alloc] initWithFactory:self streamId:streamId];
 }
 
-- (RTCPeerConnection *)peerConnectionWithConfiguration:
-    (RTCConfiguration *)configuration
-                                           constraints:
-    (RTCMediaConstraints *)constraints
-                                              delegate:
-    (nullable id<RTCPeerConnectionDelegate>)delegate {
-  return [[RTCPeerConnection alloc] initWithFactory:self
-                                      configuration:configuration
-                                        constraints:constraints
-                                           delegate:delegate];
+- (RTC_OBJC_TYPE(RTCPeerConnection) *)
+    peerConnectionWithConfiguration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration
+                        constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
+                           delegate:
+                               (nullable id<RTC_OBJC_TYPE(RTCPeerConnectionDelegate)>)delegate {
+  return [[RTC_OBJC_TYPE(RTCPeerConnection) alloc] initWithFactory:self
+                                                     configuration:configuration
+                                                       constraints:constraints
+                                                          delegate:delegate];
 }
 
-- (RTCPeerConnection *)
-    peerConnectionWithDependencies:(RTCConfiguration *)configuration
-                       constraints:(RTCMediaConstraints *)constraints
+- (RTC_OBJC_TYPE(RTCPeerConnection) *)
+    peerConnectionWithDependencies:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration
+                       constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints
                       dependencies:(std::unique_ptr<webrtc::PeerConnectionDependencies>)dependencies
-                          delegate:(id<RTCPeerConnectionDelegate>)delegate {
-  return [[RTCPeerConnection alloc] initWithDependencies:self
-                                           configuration:configuration
-                                             constraints:constraints
-                                            dependencies:std::move(dependencies)
-                                                delegate:delegate];
+                          delegate:(id<RTC_OBJC_TYPE(RTCPeerConnectionDelegate)>)delegate {
+  return [[RTC_OBJC_TYPE(RTCPeerConnection) alloc] initWithDependencies:self
+                                                          configuration:configuration
+                                                            constraints:constraints
+                                                           dependencies:std::move(dependencies)
+                                                               delegate:delegate];
 }
 
-- (void)setOptions:(nonnull RTCPeerConnectionFactoryOptions *)options {
+- (void)setOptions:(nonnull RTC_OBJC_TYPE(RTCPeerConnectionFactoryOptions) *)options {
   RTC_DCHECK(options != nil);
   _nativeFactory->SetOptions(options.nativeOptions);
 }
diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm
index 3bb75ee..522e520 100644
--- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm
+++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm
@@ -32,12 +32,12 @@
   auto audioDecoderFactory = webrtc::CreateBuiltinAudioDecoderFactory();
   [builder setAudioDecoderFactory:audioDecoderFactory];
 
-  auto videoEncoderFactory =
-      webrtc::ObjCToNativeVideoEncoderFactory([[RTCVideoEncoderFactoryH264 alloc] init]);
+  auto videoEncoderFactory = webrtc::ObjCToNativeVideoEncoderFactory(
+      [[RTC_OBJC_TYPE(RTCVideoEncoderFactoryH264) alloc] init]);
   [builder setVideoEncoderFactory:std::move(videoEncoderFactory)];
 
-  auto videoDecoderFactory =
-      webrtc::ObjCToNativeVideoDecoderFactory([[RTCVideoDecoderFactoryH264 alloc] init]);
+  auto videoDecoderFactory = webrtc::ObjCToNativeVideoDecoderFactory(
+      [[RTC_OBJC_TYPE(RTCVideoDecoderFactoryH264) alloc] init]);
   [builder setVideoDecoderFactory:std::move(videoDecoderFactory)];
 
 #if defined(WEBRTC_IOS)
diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h
index 189eb736..f0b0de1 100644
--- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h
+++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h
@@ -29,7 +29,7 @@
 
 + (RTCPeerConnectionFactoryBuilder *)builder;
 
-- (RTCPeerConnectionFactory *)createPeerConnectionFactory;
+- (RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)createPeerConnectionFactory;
 
 - (void)setVideoEncoderFactory:(std::unique_ptr<webrtc::VideoEncoderFactory>)videoEncoderFactory;
 
diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm
index af3d259..8f52bea 100644
--- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm
+++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm
@@ -33,8 +33,9 @@
   return [[RTCPeerConnectionFactoryBuilder alloc] init];
 }
 
-- (RTCPeerConnectionFactory *)createPeerConnectionFactory {
-  RTCPeerConnectionFactory *factory = [RTCPeerConnectionFactory alloc];
+- (RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)createPeerConnectionFactory {
+  RTC_OBJC_TYPE(RTCPeerConnectionFactory) *factory =
+      [RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc];
   return [factory initWithNativeAudioEncoderFactory:_audioEncoderFactory
                           nativeAudioDecoderFactory:_audioDecoderFactory
                           nativeVideoEncoderFactory:std::move(_videoEncoderFactory)
diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions+Private.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions+Private.h
index 986b0e6..8832b23 100644
--- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions+Private.h
+++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions+Private.h
@@ -14,12 +14,12 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-@interface RTCPeerConnectionFactoryOptions ()
+@interface RTC_OBJC_TYPE (RTCPeerConnectionFactoryOptions)
+()
 
-/** Returns the equivalent native PeerConnectionFactoryInterface::Options
- * structure. */
-@property(nonatomic, readonly)
-    webrtc::PeerConnectionFactoryInterface::Options nativeOptions;
+    /** Returns the equivalent native PeerConnectionFactoryInterface::Options
+     * structure. */
+    @property(nonatomic, readonly) webrtc::PeerConnectionFactoryInterface::Options nativeOptions;
 
 @end
 
diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions.h
index 4bec869..bfc54a5 100644
--- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions.h
+++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions.h
@@ -15,7 +15,7 @@
 NS_ASSUME_NONNULL_BEGIN
 
 RTC_OBJC_EXPORT
-@interface RTCPeerConnectionFactoryOptions : NSObject
+@interface RTC_OBJC_TYPE (RTCPeerConnectionFactoryOptions) : NSObject
 
 @property(nonatomic, assign) BOOL disableEncryption;
 
diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions.mm b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions.mm
index f0cc6a6..5467bd5 100644
--- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions.mm
+++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions.mm
@@ -25,7 +25,7 @@
 }
 }  // namespace
 
-@implementation RTCPeerConnectionFactoryOptions
+@implementation RTC_OBJC_TYPE (RTCPeerConnectionFactoryOptions)
 
 @synthesize disableEncryption = _disableEncryption;
 @synthesize disableNetworkMonitor = _disableNetworkMonitor;
diff --git a/sdk/objc/api/peerconnection/RTCRtcpParameters+Private.h b/sdk/objc/api/peerconnection/RTCRtcpParameters+Private.h
index 5471bf4..94c1f92 100644
--- a/sdk/objc/api/peerconnection/RTCRtcpParameters+Private.h
+++ b/sdk/objc/api/peerconnection/RTCRtcpParameters+Private.h
@@ -14,10 +14,11 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-@interface RTCRtcpParameters ()
+@interface RTC_OBJC_TYPE (RTCRtcpParameters)
+()
 
-/** Returns the equivalent native RtcpParameters structure. */
-@property(nonatomic, readonly) webrtc::RtcpParameters nativeParameters;
+    /** Returns the equivalent native RtcpParameters structure. */
+    @property(nonatomic, readonly) webrtc::RtcpParameters nativeParameters;
 
 /** Initialize the object with a native RtcpParameters structure. */
 - (instancetype)initWithNativeParameters:(const webrtc::RtcpParameters &)nativeParameters;
diff --git a/sdk/objc/api/peerconnection/RTCRtcpParameters.h b/sdk/objc/api/peerconnection/RTCRtcpParameters.h
index 5c26580..1bbaedc 100644
--- a/sdk/objc/api/peerconnection/RTCRtcpParameters.h
+++ b/sdk/objc/api/peerconnection/RTCRtcpParameters.h
@@ -15,7 +15,7 @@
 NS_ASSUME_NONNULL_BEGIN
 
 RTC_OBJC_EXPORT
-@interface RTCRtcpParameters : NSObject
+@interface RTC_OBJC_TYPE (RTCRtcpParameters) : NSObject
 
 /** The Canonical Name used by RTCP. */
 @property(nonatomic, readonly, copy) NSString *cname;
diff --git a/sdk/objc/api/peerconnection/RTCRtcpParameters.mm b/sdk/objc/api/peerconnection/RTCRtcpParameters.mm
index 0c33dda..4d6084b 100644
--- a/sdk/objc/api/peerconnection/RTCRtcpParameters.mm
+++ b/sdk/objc/api/peerconnection/RTCRtcpParameters.mm
@@ -12,7 +12,7 @@
 
 #import "helpers/NSString+StdString.h"
 
-@implementation RTCRtcpParameters
+@implementation RTC_OBJC_TYPE (RTCRtcpParameters)
 
 @synthesize cname = _cname;
 @synthesize isReducedSize = _isReducedSize;
diff --git a/sdk/objc/api/peerconnection/RTCRtpCodecParameters+Private.h b/sdk/objc/api/peerconnection/RTCRtpCodecParameters+Private.h
index 1b297ed..7833068 100644
--- a/sdk/objc/api/peerconnection/RTCRtpCodecParameters+Private.h
+++ b/sdk/objc/api/peerconnection/RTCRtpCodecParameters+Private.h
@@ -14,10 +14,11 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-@interface RTCRtpCodecParameters ()
+@interface RTC_OBJC_TYPE (RTCRtpCodecParameters)
+()
 
-/** Returns the equivalent native RtpCodecParameters structure. */
-@property(nonatomic, readonly) webrtc::RtpCodecParameters nativeParameters;
+    /** Returns the equivalent native RtpCodecParameters structure. */
+    @property(nonatomic, readonly) webrtc::RtpCodecParameters nativeParameters;
 
 /** Initialize the object with a native RtpCodecParameters structure. */
 - (instancetype)initWithNativeParameters:(const webrtc::RtpCodecParameters &)nativeParameters;
diff --git a/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h b/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h
index 5d3cac5..a68d9eb 100644
--- a/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h
+++ b/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h
@@ -31,9 +31,9 @@
 RTC_EXTERN const NSString *const kRTCVp9CodecName;
 RTC_EXTERN const NSString *const kRTCH264CodecName;
 
-/** Defined in http://w3c.github.io/webrtc-pc/#idl-def-RTCRtpCodecParameters */
+/** Defined in http://w3c.github.io/webrtc-pc/#idl-def-RTC_OBJC_TYPE(RTCRtpCodecParameters) */
 RTC_OBJC_EXPORT
-@interface RTCRtpCodecParameters : NSObject
+@interface RTC_OBJC_TYPE (RTCRtpCodecParameters) : NSObject
 
 /** The RTP payload type. */
 @property(nonatomic, assign) int payloadType;
diff --git a/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm b/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm
index f25679e..f61b93c 100644
--- a/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm
+++ b/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm
@@ -34,7 +34,7 @@
 const NSString * const kRTCVp9CodecName = @(cricket::kVp9CodecName);
 const NSString * const kRTCH264CodecName = @(cricket::kH264CodecName);
 
-@implementation RTCRtpCodecParameters
+@implementation RTC_OBJC_TYPE (RTCRtpCodecParameters)
 
 @synthesize payloadType = _payloadType;
 @synthesize name = _name;
diff --git a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters+Private.h b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters+Private.h
index e3684d3..074c9b1 100644
--- a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters+Private.h
+++ b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters+Private.h
@@ -14,10 +14,11 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-@interface RTCRtpEncodingParameters ()
+@interface RTC_OBJC_TYPE (RTCRtpEncodingParameters)
+()
 
-/** Returns the equivalent native RtpEncodingParameters structure. */
-@property(nonatomic, readonly) webrtc::RtpEncodingParameters nativeParameters;
+    /** Returns the equivalent native RtpEncodingParameters structure. */
+    @property(nonatomic, readonly) webrtc::RtpEncodingParameters nativeParameters;
 
 /** Initialize the object with a native RtpEncodingParameters structure. */
 - (instancetype)initWithNativeParameters:(const webrtc::RtpEncodingParameters &)nativeParameters;
diff --git a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h
index 1bbb88d..facd7e5 100644
--- a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h
+++ b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h
@@ -23,7 +23,7 @@
 };
 
 RTC_OBJC_EXPORT
-@interface RTCRtpEncodingParameters : NSObject
+@interface RTC_OBJC_TYPE (RTCRtpEncodingParameters) : NSObject
 
 /** The idenfifier for the encoding layer. This is used in simulcast. */
 @property(nonatomic, copy, nullable) NSString *rid;
diff --git a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm
index 4468fb3..eec6ce4 100644
--- a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm
+++ b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm
@@ -12,7 +12,7 @@
 
 #import "helpers/NSString+StdString.h"
 
-@implementation RTCRtpEncodingParameters
+@implementation RTC_OBJC_TYPE (RTCRtpEncodingParameters)
 
 @synthesize rid = _rid;
 @synthesize isActive = _isActive;
@@ -58,8 +58,8 @@
       _ssrc = [NSNumber numberWithUnsignedLong:*nativeParameters.ssrc];
     }
     _bitratePriority = nativeParameters.bitrate_priority;
-    _networkPriority =
-        [RTCRtpEncodingParameters priorityFromNativePriority:nativeParameters.network_priority];
+    _networkPriority = [RTC_OBJC_TYPE(RTCRtpEncodingParameters)
+        priorityFromNativePriority:nativeParameters.network_priority];
   }
   return self;
 }
@@ -91,7 +91,7 @@
   }
   parameters.bitrate_priority = _bitratePriority;
   parameters.network_priority =
-      [RTCRtpEncodingParameters nativePriorityFromPriority:_networkPriority];
+      [RTC_OBJC_TYPE(RTCRtpEncodingParameters) nativePriorityFromPriority:_networkPriority];
   return parameters;
 }
 
diff --git a/sdk/objc/api/peerconnection/RTCRtpFragmentationHeader+Private.h b/sdk/objc/api/peerconnection/RTCRtpFragmentationHeader+Private.h
index cfb7fb1..0b0bce5 100644
--- a/sdk/objc/api/peerconnection/RTCRtpFragmentationHeader+Private.h
+++ b/sdk/objc/api/peerconnection/RTCRtpFragmentationHeader+Private.h
@@ -15,10 +15,11 @@
 NS_ASSUME_NONNULL_BEGIN
 
 /* Interfaces for converting to/from internal C++ formats. */
-@interface RTCRtpFragmentationHeader (Private)
+@interface RTC_OBJC_TYPE (RTCRtpFragmentationHeader)
+(Private)
 
-- (instancetype)initWithNativeFragmentationHeader:
-        (const webrtc::RTPFragmentationHeader *__nullable)fragmentationHeader;
+    - (instancetype)initWithNativeFragmentationHeader
+    : (const webrtc::RTPFragmentationHeader *__nullable)fragmentationHeader;
 - (std::unique_ptr<webrtc::RTPFragmentationHeader>)createNativeFragmentationHeader;
 
 @end
diff --git a/sdk/objc/api/peerconnection/RTCRtpFragmentationHeader+Private.mm b/sdk/objc/api/peerconnection/RTCRtpFragmentationHeader+Private.mm
index 3a4415a..e514cf6 100644
--- a/sdk/objc/api/peerconnection/RTCRtpFragmentationHeader+Private.mm
+++ b/sdk/objc/api/peerconnection/RTCRtpFragmentationHeader+Private.mm
@@ -12,10 +12,11 @@
 
 #include "modules/include/module_common_types.h"
 
-@implementation RTCRtpFragmentationHeader (Private)
+@implementation RTC_OBJC_TYPE (RTCRtpFragmentationHeader)
+(Private)
 
-- (instancetype)initWithNativeFragmentationHeader:
-        (const webrtc::RTPFragmentationHeader *)fragmentationHeader {
+    - (instancetype)initWithNativeFragmentationHeader
+    : (const webrtc::RTPFragmentationHeader *)fragmentationHeader {
   if (self = [super init]) {
     if (fragmentationHeader) {
       int count = fragmentationHeader->fragmentationVectorSize;
diff --git a/sdk/objc/api/peerconnection/RTCRtpHeaderExtension+Private.h b/sdk/objc/api/peerconnection/RTCRtpHeaderExtension+Private.h
index 8a2a231..6255847 100644
--- a/sdk/objc/api/peerconnection/RTCRtpHeaderExtension+Private.h
+++ b/sdk/objc/api/peerconnection/RTCRtpHeaderExtension+Private.h
@@ -14,10 +14,11 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-@interface RTCRtpHeaderExtension ()
+@interface RTC_OBJC_TYPE (RTCRtpHeaderExtension)
+()
 
-/** Returns the equivalent native RtpExtension structure. */
-@property(nonatomic, readonly) webrtc::RtpExtension nativeParameters;
+    /** Returns the equivalent native RtpExtension structure. */
+    @property(nonatomic, readonly) webrtc::RtpExtension nativeParameters;
 
 /** Initialize the object with a native RtpExtension structure. */
 - (instancetype)initWithNativeParameters:(const webrtc::RtpExtension &)nativeParameters;
diff --git a/sdk/objc/api/peerconnection/RTCRtpHeaderExtension.h b/sdk/objc/api/peerconnection/RTCRtpHeaderExtension.h
index 3211449..15be5af 100644
--- a/sdk/objc/api/peerconnection/RTCRtpHeaderExtension.h
+++ b/sdk/objc/api/peerconnection/RTCRtpHeaderExtension.h
@@ -15,7 +15,7 @@
 NS_ASSUME_NONNULL_BEGIN
 
 RTC_OBJC_EXPORT
-@interface RTCRtpHeaderExtension : NSObject
+@interface RTC_OBJC_TYPE (RTCRtpHeaderExtension) : NSObject
 
 /** The URI of the RTP header extension, as defined in RFC5285. */
 @property(nonatomic, readonly, copy) NSString *uri;
diff --git a/sdk/objc/api/peerconnection/RTCRtpHeaderExtension.mm b/sdk/objc/api/peerconnection/RTCRtpHeaderExtension.mm
index afc4786..a19228e 100644
--- a/sdk/objc/api/peerconnection/RTCRtpHeaderExtension.mm
+++ b/sdk/objc/api/peerconnection/RTCRtpHeaderExtension.mm
@@ -12,7 +12,7 @@
 
 #import "helpers/NSString+StdString.h"
 
-@implementation RTCRtpHeaderExtension
+@implementation RTC_OBJC_TYPE (RTCRtpHeaderExtension)
 
 @synthesize uri = _uri;
 @synthesize id = _id;
diff --git a/sdk/objc/api/peerconnection/RTCRtpParameters+Private.h b/sdk/objc/api/peerconnection/RTCRtpParameters+Private.h
index a88ccfa..369475a 100644
--- a/sdk/objc/api/peerconnection/RTCRtpParameters+Private.h
+++ b/sdk/objc/api/peerconnection/RTCRtpParameters+Private.h
@@ -14,10 +14,11 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-@interface RTCRtpParameters ()
+@interface RTC_OBJC_TYPE (RTCRtpParameters)
+()
 
-/** Returns the equivalent native RtpParameters structure. */
-@property(nonatomic, readonly) webrtc::RtpParameters nativeParameters;
+    /** Returns the equivalent native RtpParameters structure. */
+    @property(nonatomic, readonly) webrtc::RtpParameters nativeParameters;
 
 /** Initialize the object with a native RtpParameters structure. */
 - (instancetype)initWithNativeParameters:(const webrtc::RtpParameters &)nativeParameters;
diff --git a/sdk/objc/api/peerconnection/RTCRtpParameters.h b/sdk/objc/api/peerconnection/RTCRtpParameters.h
index 8ee8d71..fff6a85 100644
--- a/sdk/objc/api/peerconnection/RTCRtpParameters.h
+++ b/sdk/objc/api/peerconnection/RTCRtpParameters.h
@@ -27,22 +27,23 @@
 };
 
 RTC_OBJC_EXPORT
-@interface RTCRtpParameters : NSObject
+@interface RTC_OBJC_TYPE (RTCRtpParameters) : NSObject
 
 /** A unique identifier for the last set of parameters applied. */
 @property(nonatomic, copy) NSString *transactionId;
 
 /** Parameters used for RTCP. */
-@property(nonatomic, readonly, copy) RTCRtcpParameters *rtcp;
+@property(nonatomic, readonly, copy) RTC_OBJC_TYPE(RTCRtcpParameters) * rtcp;
 
 /** An array containing parameters for RTP header extensions. */
-@property(nonatomic, readonly, copy) NSArray<RTCRtpHeaderExtension *> *headerExtensions;
+@property(nonatomic, readonly, copy)
+    NSArray<RTC_OBJC_TYPE(RTCRtpHeaderExtension) *> *headerExtensions;
 
 /** The currently active encodings in the order of preference. */
-@property(nonatomic, copy) NSArray<RTCRtpEncodingParameters *> *encodings;
+@property(nonatomic, copy) NSArray<RTC_OBJC_TYPE(RTCRtpEncodingParameters) *> *encodings;
 
 /** The negotiated set of send codecs in order of preference. */
-@property(nonatomic, copy) NSArray<RTCRtpCodecParameters *> *codecs;
+@property(nonatomic, copy) NSArray<RTC_OBJC_TYPE(RTCRtpCodecParameters) *> *codecs;
 
 /**
  * Degradation preference in case of CPU adaptation or constrained bandwidth.
diff --git a/sdk/objc/api/peerconnection/RTCRtpParameters.mm b/sdk/objc/api/peerconnection/RTCRtpParameters.mm
index cbb4576..2236b9a 100644
--- a/sdk/objc/api/peerconnection/RTCRtpParameters.mm
+++ b/sdk/objc/api/peerconnection/RTCRtpParameters.mm
@@ -16,7 +16,7 @@
 #import "RTCRtpHeaderExtension+Private.h"
 #import "helpers/NSString+StdString.h"
 
-@implementation RTCRtpParameters
+@implementation RTC_OBJC_TYPE (RTCRtpParameters)
 
 @synthesize transactionId = _transactionId;
 @synthesize rtcp = _rtcp;
@@ -33,30 +33,31 @@
     (const webrtc::RtpParameters &)nativeParameters {
   if (self = [self init]) {
     _transactionId = [NSString stringForStdString:nativeParameters.transaction_id];
-    _rtcp = [[RTCRtcpParameters alloc] initWithNativeParameters:nativeParameters.rtcp];
+    _rtcp =
+        [[RTC_OBJC_TYPE(RTCRtcpParameters) alloc] initWithNativeParameters:nativeParameters.rtcp];
 
     NSMutableArray *headerExtensions = [[NSMutableArray alloc] init];
     for (const auto &headerExtension : nativeParameters.header_extensions) {
-      [headerExtensions
-          addObject:[[RTCRtpHeaderExtension alloc] initWithNativeParameters:headerExtension]];
+      [headerExtensions addObject:[[RTC_OBJC_TYPE(RTCRtpHeaderExtension) alloc]
+                                      initWithNativeParameters:headerExtension]];
     }
     _headerExtensions = headerExtensions;
 
     NSMutableArray *encodings = [[NSMutableArray alloc] init];
     for (const auto &encoding : nativeParameters.encodings) {
-      [encodings addObject:[[RTCRtpEncodingParameters alloc]
+      [encodings addObject:[[RTC_OBJC_TYPE(RTCRtpEncodingParameters) alloc]
                                initWithNativeParameters:encoding]];
     }
     _encodings = encodings;
 
     NSMutableArray *codecs = [[NSMutableArray alloc] init];
     for (const auto &codec : nativeParameters.codecs) {
-      [codecs addObject:[[RTCRtpCodecParameters alloc]
-                            initWithNativeParameters:codec]];
+      [codecs
+          addObject:[[RTC_OBJC_TYPE(RTCRtpCodecParameters) alloc] initWithNativeParameters:codec]];
     }
     _codecs = codecs;
 
-    _degradationPreference = [RTCRtpParameters
+    _degradationPreference = [RTC_OBJC_TYPE(RTCRtpParameters)
         degradationPreferenceFromNativeDegradationPreference:nativeParameters
                                                                  .degradation_preference];
   }
@@ -67,17 +68,17 @@
   webrtc::RtpParameters parameters;
   parameters.transaction_id = [NSString stdStringForString:_transactionId];
   parameters.rtcp = [_rtcp nativeParameters];
-  for (RTCRtpHeaderExtension *headerExtension in _headerExtensions) {
+  for (RTC_OBJC_TYPE(RTCRtpHeaderExtension) * headerExtension in _headerExtensions) {
     parameters.header_extensions.push_back(headerExtension.nativeParameters);
   }
-  for (RTCRtpEncodingParameters *encoding in _encodings) {
+  for (RTC_OBJC_TYPE(RTCRtpEncodingParameters) * encoding in _encodings) {
     parameters.encodings.push_back(encoding.nativeParameters);
   }
-  for (RTCRtpCodecParameters *codec in _codecs) {
+  for (RTC_OBJC_TYPE(RTCRtpCodecParameters) * codec in _codecs) {
     parameters.codecs.push_back(codec.nativeParameters);
   }
   if (_degradationPreference) {
-    parameters.degradation_preference = [RTCRtpParameters
+    parameters.degradation_preference = [RTC_OBJC_TYPE(RTCRtpParameters)
         nativeDegradationPreferenceFromDegradationPreference:(RTCDegradationPreference)
                                                                  _degradationPreference.intValue];
   }
diff --git a/sdk/objc/api/peerconnection/RTCRtpReceiver+Native.h b/sdk/objc/api/peerconnection/RTCRtpReceiver+Native.h
index e085529..c15ce70 100644
--- a/sdk/objc/api/peerconnection/RTCRtpReceiver+Native.h
+++ b/sdk/objc/api/peerconnection/RTCRtpReceiver+Native.h
@@ -18,13 +18,14 @@
 /**
  * This class extension exposes methods that work directly with injectable C++ components.
  */
-@interface RTCRtpReceiver ()
+@interface RTC_OBJC_TYPE (RTCRtpReceiver)
+()
 
-/** Sets a user defined frame decryptor that will decrypt the entire frame.
- * This will decrypt the entire frame using the user provided decryption
- * mechanism regardless of whether SRTP is enabled or not.
- */
-- (void)setFrameDecryptor:(rtc::scoped_refptr<webrtc::FrameDecryptorInterface>)frameDecryptor;
+    /** Sets a user defined frame decryptor that will decrypt the entire frame.
+     * This will decrypt the entire frame using the user provided decryption
+     * mechanism regardless of whether SRTP is enabled or not.
+     */
+    - (void)setFrameDecryptor : (rtc::scoped_refptr<webrtc::FrameDecryptorInterface>)frameDecryptor;
 
 @end
 
diff --git a/sdk/objc/api/peerconnection/RTCRtpReceiver+Private.h b/sdk/objc/api/peerconnection/RTCRtpReceiver+Private.h
index 6f56739..6aed0b4 100644
--- a/sdk/objc/api/peerconnection/RTCRtpReceiver+Private.h
+++ b/sdk/objc/api/peerconnection/RTCRtpReceiver+Private.h
@@ -14,28 +14,30 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-@class RTCPeerConnectionFactory;
+@class RTC_OBJC_TYPE(RTCPeerConnectionFactory);
 
 namespace webrtc {
 
 class RtpReceiverDelegateAdapter : public RtpReceiverObserverInterface {
  public:
-  RtpReceiverDelegateAdapter(RTCRtpReceiver* receiver);
+  RtpReceiverDelegateAdapter(RTC_OBJC_TYPE(RTCRtpReceiver) * receiver);
 
   void OnFirstPacketReceived(cricket::MediaType media_type) override;
 
  private:
-  __weak RTCRtpReceiver* receiver_;
+  __weak RTC_OBJC_TYPE(RTCRtpReceiver) * receiver_;
 };
 
 }  // namespace webrtc
 
-@interface RTCRtpReceiver ()
+@interface RTC_OBJC_TYPE (RTCRtpReceiver)
+()
 
-@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::RtpReceiverInterface> nativeRtpReceiver;
+    @property(nonatomic,
+              readonly) rtc::scoped_refptr<webrtc::RtpReceiverInterface> nativeRtpReceiver;
 
 /** Initialize an RTCRtpReceiver with a native RtpReceiverInterface. */
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory*)factory
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
               nativeRtpReceiver:(rtc::scoped_refptr<webrtc::RtpReceiverInterface>)nativeRtpReceiver
     NS_DESIGNATED_INITIALIZER;
 
diff --git a/sdk/objc/api/peerconnection/RTCRtpReceiver.h b/sdk/objc/api/peerconnection/RTCRtpReceiver.h
index 7a7dace..7ab2cfa 100644
--- a/sdk/objc/api/peerconnection/RTCRtpReceiver.h
+++ b/sdk/objc/api/peerconnection/RTCRtpReceiver.h
@@ -23,33 +23,36 @@
   RTCRtpMediaTypeData,
 };
 
-@class RTCRtpReceiver;
+@class RTC_OBJC_TYPE(RTCRtpReceiver);
 
 RTC_OBJC_EXPORT
-@protocol RTCRtpReceiverDelegate <NSObject>
+@protocol RTC_OBJC_TYPE
+(RTCRtpReceiverDelegate)<NSObject>
 
-/** Called when the first RTP packet is received.
- *
- *  Note: Currently if there are multiple RtpReceivers of the same media type,
- *  they will all call OnFirstPacketReceived at once.
- *
- *  For example, if we create three audio receivers, A/B/C, they will listen to
- *  the same signal from the underneath network layer. Whenever the first audio packet
- *  is received, the underneath signal will be fired. All the receivers A/B/C will be
- *  notified and the callback of the receiver's delegate will be called.
- *
- *  The process is the same for video receivers.
- */
-- (void)rtpReceiver:(RTCRtpReceiver *)rtpReceiver
-    didReceiveFirstPacketForMediaType:(RTCRtpMediaType)mediaType;
+    /** Called when the first RTP packet is received.
+     *
+     *  Note: Currently if there are multiple RtpReceivers of the same media type,
+     *  they will all call OnFirstPacketReceived at once.
+     *
+     *  For example, if we create three audio receivers, A/B/C, they will listen to
+     *  the same signal from the underneath network layer. Whenever the first audio packet
+     *  is received, the underneath signal will be fired. All the receivers A/B/C will be
+     *  notified and the callback of the receiver's delegate will be called.
+     *
+     *  The process is the same for video receivers.
+     */
+    - (void)rtpReceiver
+    : (RTC_OBJC_TYPE(RTCRtpReceiver) *)rtpReceiver didReceiveFirstPacketForMediaType
+    : (RTCRtpMediaType)mediaType;
 
 @end
 
 RTC_OBJC_EXPORT
-@protocol RTCRtpReceiver <NSObject>
+@protocol RTC_OBJC_TYPE
+(RTCRtpReceiver)<NSObject>
 
-/** A unique identifier for this receiver. */
-@property(nonatomic, readonly) NSString *receiverId;
+    /** A unique identifier for this receiver. */
+    @property(nonatomic, readonly) NSString *receiverId;
 
 /** The currently active RTCRtpParameters, as defined in
  *  https://www.w3.org/TR/webrtc/#idl-def-RTCRtpParameters.
@@ -58,22 +61,22 @@
  *  but this API also applies them to receivers, similar to ORTC:
  *  http://ortc.org/wp-content/uploads/2016/03/ortc.html#rtcrtpparameters*.
  */
-@property(nonatomic, readonly) RTCRtpParameters *parameters;
+@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCRtpParameters) * parameters;
 
 /** The RTCMediaStreamTrack associated with the receiver.
  *  Note: reading this property returns a new instance of
  *  RTCMediaStreamTrack. Use isEqual: instead of == to compare
  *  RTCMediaStreamTrack instances.
  */
-@property(nonatomic, readonly, nullable) RTCMediaStreamTrack *track;
+@property(nonatomic, readonly, nullable) RTC_OBJC_TYPE(RTCMediaStreamTrack) * track;
 
 /** The delegate for this RtpReceiver. */
-@property(nonatomic, weak) id<RTCRtpReceiverDelegate> delegate;
+@property(nonatomic, weak) id<RTC_OBJC_TYPE(RTCRtpReceiverDelegate)> delegate;
 
 @end
 
 RTC_OBJC_EXPORT
-@interface RTCRtpReceiver : NSObject <RTCRtpReceiver>
+@interface RTC_OBJC_TYPE (RTCRtpReceiver) : NSObject <RTC_OBJC_TYPE(RTCRtpReceiver)>
 
 - (instancetype)init NS_UNAVAILABLE;
 
diff --git a/sdk/objc/api/peerconnection/RTCRtpReceiver.mm b/sdk/objc/api/peerconnection/RTCRtpReceiver.mm
index deeb4cb..3e00935 100644
--- a/sdk/objc/api/peerconnection/RTCRtpReceiver.mm
+++ b/sdk/objc/api/peerconnection/RTCRtpReceiver.mm
@@ -20,8 +20,7 @@
 
 namespace webrtc {
 
-RtpReceiverDelegateAdapter::RtpReceiverDelegateAdapter(
-    RTCRtpReceiver *receiver) {
+RtpReceiverDelegateAdapter::RtpReceiverDelegateAdapter(RTC_OBJC_TYPE(RTCRtpReceiver) * receiver) {
   RTC_CHECK(receiver);
   receiver_ = receiver;
 }
@@ -29,15 +28,15 @@
 void RtpReceiverDelegateAdapter::OnFirstPacketReceived(
     cricket::MediaType media_type) {
   RTCRtpMediaType packet_media_type =
-      [RTCRtpReceiver mediaTypeForNativeMediaType:media_type];
-  RTCRtpReceiver *receiver = receiver_;
+      [RTC_OBJC_TYPE(RTCRtpReceiver) mediaTypeForNativeMediaType:media_type];
+  RTC_OBJC_TYPE(RTCRtpReceiver) *receiver = receiver_;
   [receiver.delegate rtpReceiver:receiver didReceiveFirstPacketForMediaType:packet_media_type];
 }
 
 }  // namespace webrtc
 
-@implementation RTCRtpReceiver {
-  RTCPeerConnectionFactory *_factory;
+@implementation RTC_OBJC_TYPE (RTCRtpReceiver) {
+  RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory;
   rtc::scoped_refptr<webrtc::RtpReceiverInterface> _nativeRtpReceiver;
   std::unique_ptr<webrtc::RtpReceiverDelegateAdapter> _observer;
 }
@@ -48,23 +47,24 @@
   return [NSString stringForStdString:_nativeRtpReceiver->id()];
 }
 
-- (RTCRtpParameters *)parameters {
-  return [[RTCRtpParameters alloc]
+- (RTC_OBJC_TYPE(RTCRtpParameters) *)parameters {
+  return [[RTC_OBJC_TYPE(RTCRtpParameters) alloc]
       initWithNativeParameters:_nativeRtpReceiver->GetParameters()];
 }
 
-- (nullable RTCMediaStreamTrack *)track {
+- (nullable RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track {
   rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> nativeTrack(
     _nativeRtpReceiver->track());
   if (nativeTrack) {
-    return [RTCMediaStreamTrack mediaTrackForNativeTrack:nativeTrack factory:_factory];
+    return [RTC_OBJC_TYPE(RTCMediaStreamTrack) mediaTrackForNativeTrack:nativeTrack
+                                                                factory:_factory];
   }
   return nil;
 }
 
 - (NSString *)description {
-  return [NSString stringWithFormat:@"RTCRtpReceiver {\n  receiverId: %@\n}",
-      self.receiverId];
+  return [NSString
+      stringWithFormat:@"RTC_OBJC_TYPE(RTCRtpReceiver) {\n  receiverId: %@\n}", self.receiverId];
 }
 
 - (void)dealloc {
@@ -83,7 +83,7 @@
   if (![object isMemberOfClass:[self class]]) {
     return NO;
   }
-  RTCRtpReceiver *receiver = (RTCRtpReceiver *)object;
+  RTC_OBJC_TYPE(RTCRtpReceiver) *receiver = (RTC_OBJC_TYPE(RTCRtpReceiver) *)object;
   return _nativeRtpReceiver == receiver.nativeRtpReceiver;
 }
 
@@ -103,14 +103,13 @@
   return _nativeRtpReceiver;
 }
 
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
               nativeRtpReceiver:
                   (rtc::scoped_refptr<webrtc::RtpReceiverInterface>)nativeRtpReceiver {
   if (self = [super init]) {
     _factory = factory;
     _nativeRtpReceiver = nativeRtpReceiver;
-    RTCLogInfo(
-        @"RTCRtpReceiver(%p): created receiver: %@", self, self.description);
+    RTCLogInfo(@"RTC_OBJC_TYPE(RTCRtpReceiver)(%p): created receiver: %@", self, self.description);
     _observer.reset(new webrtc::RtpReceiverDelegateAdapter(self));
     _nativeRtpReceiver->SetObserver(_observer.get());
   }
diff --git a/sdk/objc/api/peerconnection/RTCRtpSender+Native.h b/sdk/objc/api/peerconnection/RTCRtpSender+Native.h
index 89a691c..249d5c5 100644
--- a/sdk/objc/api/peerconnection/RTCRtpSender+Native.h
+++ b/sdk/objc/api/peerconnection/RTCRtpSender+Native.h
@@ -18,14 +18,15 @@
 /**
  * This class extension exposes methods that work directly with injectable C++ components.
  */
-@interface RTCRtpSender ()
+@interface RTC_OBJC_TYPE (RTCRtpSender)
+()
 
-/** Sets a defined frame encryptor that will encrypt the entire frame
- * before it is sent across the network. This will encrypt the entire frame
- * using the user provided encryption mechanism regardless of whether SRTP is
- * enabled or not.
- */
-- (void)setFrameEncryptor:(rtc::scoped_refptr<webrtc::FrameEncryptorInterface>)frameEncryptor;
+    /** Sets a defined frame encryptor that will encrypt the entire frame
+     * before it is sent across the network. This will encrypt the entire frame
+     * using the user provided encryption mechanism regardless of whether SRTP is
+     * enabled or not.
+     */
+    - (void)setFrameEncryptor : (rtc::scoped_refptr<webrtc::FrameEncryptorInterface>)frameEncryptor;
 
 @end
 
diff --git a/sdk/objc/api/peerconnection/RTCRtpSender+Private.h b/sdk/objc/api/peerconnection/RTCRtpSender+Private.h
index 389b833..6fdb42b 100644
--- a/sdk/objc/api/peerconnection/RTCRtpSender+Private.h
+++ b/sdk/objc/api/peerconnection/RTCRtpSender+Private.h
@@ -14,14 +14,15 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-@class RTCPeerConnectionFactory;
+@class RTC_OBJC_TYPE(RTCPeerConnectionFactory);
 
-@interface RTCRtpSender ()
+@interface RTC_OBJC_TYPE (RTCRtpSender)
+()
 
-@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::RtpSenderInterface> nativeRtpSender;
+    @property(nonatomic, readonly) rtc::scoped_refptr<webrtc::RtpSenderInterface> nativeRtpSender;
 
 /** Initialize an RTCRtpSender with a native RtpSenderInterface. */
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory*)factory
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
                 nativeRtpSender:(rtc::scoped_refptr<webrtc::RtpSenderInterface>)nativeRtpSender
     NS_DESIGNATED_INITIALIZER;
 
diff --git a/sdk/objc/api/peerconnection/RTCRtpSender.h b/sdk/objc/api/peerconnection/RTCRtpSender.h
index c03b4cc..41bb083 100644
--- a/sdk/objc/api/peerconnection/RTCRtpSender.h
+++ b/sdk/objc/api/peerconnection/RTCRtpSender.h
@@ -18,33 +18,34 @@
 NS_ASSUME_NONNULL_BEGIN
 
 RTC_OBJC_EXPORT
-@protocol RTCRtpSender <NSObject>
+@protocol RTC_OBJC_TYPE
+(RTCRtpSender)<NSObject>
 
-/** A unique identifier for this sender. */
-@property(nonatomic, readonly) NSString *senderId;
+    /** A unique identifier for this sender. */
+    @property(nonatomic, readonly) NSString *senderId;
 
 /** The currently active RTCRtpParameters, as defined in
  *  https://www.w3.org/TR/webrtc/#idl-def-RTCRtpParameters.
  */
-@property(nonatomic, copy) RTCRtpParameters *parameters;
+@property(nonatomic, copy) RTC_OBJC_TYPE(RTCRtpParameters) * parameters;
 
 /** The RTCMediaStreamTrack associated with the sender.
  *  Note: reading this property returns a new instance of
  *  RTCMediaStreamTrack. Use isEqual: instead of == to compare
  *  RTCMediaStreamTrack instances.
  */
-@property(nonatomic, copy, nullable) RTCMediaStreamTrack *track;
+@property(nonatomic, copy, nullable) RTC_OBJC_TYPE(RTCMediaStreamTrack) * track;
 
 /** IDs of streams associated with the RTP sender */
 @property(nonatomic, copy) NSArray<NSString *> *streamIds;
 
 /** The RTCDtmfSender accociated with the RTP sender. */
-@property(nonatomic, readonly, nullable) id<RTCDtmfSender> dtmfSender;
+@property(nonatomic, readonly, nullable) id<RTC_OBJC_TYPE(RTCDtmfSender)> dtmfSender;
 
 @end
 
 RTC_OBJC_EXPORT
-@interface RTCRtpSender : NSObject <RTCRtpSender>
+@interface RTC_OBJC_TYPE (RTCRtpSender) : NSObject <RTC_OBJC_TYPE(RTCRtpSender)>
 
 - (instancetype)init NS_UNAVAILABLE;
 
diff --git a/sdk/objc/api/peerconnection/RTCRtpSender.mm b/sdk/objc/api/peerconnection/RTCRtpSender.mm
index d292651..1ca9360 100644
--- a/sdk/objc/api/peerconnection/RTCRtpSender.mm
+++ b/sdk/objc/api/peerconnection/RTCRtpSender.mm
@@ -19,8 +19,8 @@
 
 #include "api/media_stream_interface.h"
 
-@implementation RTCRtpSender {
-  RTCPeerConnectionFactory *_factory;
+@implementation RTC_OBJC_TYPE (RTCRtpSender) {
+  RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory;
   rtc::scoped_refptr<webrtc::RtpSenderInterface> _nativeRtpSender;
 }
 
@@ -30,30 +30,30 @@
   return [NSString stringForStdString:_nativeRtpSender->id()];
 }
 
-- (RTCRtpParameters *)parameters {
-  return [[RTCRtpParameters alloc]
+- (RTC_OBJC_TYPE(RTCRtpParameters) *)parameters {
+  return [[RTC_OBJC_TYPE(RTCRtpParameters) alloc]
       initWithNativeParameters:_nativeRtpSender->GetParameters()];
 }
 
-- (void)setParameters:(RTCRtpParameters *)parameters {
+- (void)setParameters:(RTC_OBJC_TYPE(RTCRtpParameters) *)parameters {
   if (!_nativeRtpSender->SetParameters(parameters.nativeParameters).ok()) {
-    RTCLogError(@"RTCRtpSender(%p): Failed to set parameters: %@", self,
-        parameters);
+    RTCLogError(@"RTC_OBJC_TYPE(RTCRtpSender)(%p): Failed to set parameters: %@", self, parameters);
   }
 }
 
-- (RTCMediaStreamTrack *)track {
+- (RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track {
   rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> nativeTrack(
     _nativeRtpSender->track());
   if (nativeTrack) {
-    return [RTCMediaStreamTrack mediaTrackForNativeTrack:nativeTrack factory:_factory];
+    return [RTC_OBJC_TYPE(RTCMediaStreamTrack) mediaTrackForNativeTrack:nativeTrack
+                                                                factory:_factory];
   }
   return nil;
 }
 
-- (void)setTrack:(RTCMediaStreamTrack *)track {
+- (void)setTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track {
   if (!_nativeRtpSender->SetTrack(track.nativeTrack)) {
-    RTCLogError(@"RTCRtpSender(%p): Failed to set track %@", self, track);
+    RTCLogError(@"RTC_OBJC_TYPE(RTCRtpSender)(%p): Failed to set track %@", self, track);
   }
 }
 
@@ -75,8 +75,8 @@
 }
 
 - (NSString *)description {
-  return [NSString stringWithFormat:@"RTCRtpSender {\n  senderId: %@\n}",
-      self.senderId];
+  return [NSString
+      stringWithFormat:@"RTC_OBJC_TYPE(RTCRtpSender) {\n  senderId: %@\n}", self.senderId];
 }
 
 - (BOOL)isEqual:(id)object {
@@ -89,7 +89,7 @@
   if (![object isMemberOfClass:[self class]]) {
     return NO;
   }
-  RTCRtpSender *sender = (RTCRtpSender *)object;
+  RTC_OBJC_TYPE(RTCRtpSender) *sender = (RTC_OBJC_TYPE(RTCRtpSender) *)object;
   return _nativeRtpSender == sender.nativeRtpSender;
 }
 
@@ -109,7 +109,7 @@
   return _nativeRtpSender;
 }
 
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
                 nativeRtpSender:(rtc::scoped_refptr<webrtc::RtpSenderInterface>)nativeRtpSender {
   NSParameterAssert(factory);
   NSParameterAssert(nativeRtpSender);
@@ -119,9 +119,10 @@
     rtc::scoped_refptr<webrtc::DtmfSenderInterface> nativeDtmfSender(
         _nativeRtpSender->GetDtmfSender());
     if (nativeDtmfSender) {
-      _dtmfSender = [[RTCDtmfSender alloc] initWithNativeDtmfSender:nativeDtmfSender];
+      _dtmfSender =
+          [[RTC_OBJC_TYPE(RTCDtmfSender) alloc] initWithNativeDtmfSender:nativeDtmfSender];
     }
-    RTCLogInfo(@"RTCRtpSender(%p): created sender: %@", self, self.description);
+    RTCLogInfo(@"RTC_OBJC_TYPE(RTCRtpSender)(%p): created sender: %@", self, self.description);
   }
   return self;
 }
diff --git a/sdk/objc/api/peerconnection/RTCRtpTransceiver+Private.h b/sdk/objc/api/peerconnection/RTCRtpTransceiver+Private.h
index d7f6b58..65d45fb 100644
--- a/sdk/objc/api/peerconnection/RTCRtpTransceiver+Private.h
+++ b/sdk/objc/api/peerconnection/RTCRtpTransceiver+Private.h
@@ -14,21 +14,23 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-@class RTCPeerConnectionFactory;
+@class RTC_OBJC_TYPE(RTCPeerConnectionFactory);
 
-@interface RTCRtpTransceiverInit ()
+@interface RTC_OBJC_TYPE (RTCRtpTransceiverInit)
+()
 
-@property(nonatomic, readonly) webrtc::RtpTransceiverInit nativeInit;
+    @property(nonatomic, readonly) webrtc::RtpTransceiverInit nativeInit;
 
 @end
 
-@interface RTCRtpTransceiver ()
+@interface RTC_OBJC_TYPE (RTCRtpTransceiver)
+()
 
-@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::RtpTransceiverInterface>
-    nativeRtpTransceiver;
+    @property(nonatomic,
+              readonly) rtc::scoped_refptr<webrtc::RtpTransceiverInterface> nativeRtpTransceiver;
 
 /** Initialize an RTCRtpTransceiver with a native RtpTransceiverInterface. */
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory*)factory
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
            nativeRtpTransceiver:
                (rtc::scoped_refptr<webrtc::RtpTransceiverInterface>)nativeRtpTransceiver
     NS_DESIGNATED_INITIALIZER;
diff --git a/sdk/objc/api/peerconnection/RTCRtpTransceiver.h b/sdk/objc/api/peerconnection/RTCRtpTransceiver.h
index 968dba3..f8996cc 100644
--- a/sdk/objc/api/peerconnection/RTCRtpTransceiver.h
+++ b/sdk/objc/api/peerconnection/RTCRtpTransceiver.h
@@ -30,7 +30,7 @@
  *  https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiverinit
  */
 RTC_OBJC_EXPORT
-@interface RTCRtpTransceiverInit : NSObject
+@interface RTC_OBJC_TYPE (RTCRtpTransceiverInit) : NSObject
 
 /** Direction of the RTCRtpTransceiver. See RTCRtpTransceiver.direction. */
 @property(nonatomic) RTCRtpTransceiverDirection direction;
@@ -39,14 +39,14 @@
 @property(nonatomic) NSArray<NSString *> *streamIds;
 
 /** TODO(bugs.webrtc.org/7600): Not implemented. */
-@property(nonatomic) NSArray<RTCRtpEncodingParameters *> *sendEncodings;
+@property(nonatomic) NSArray<RTC_OBJC_TYPE(RTCRtpEncodingParameters) *> *sendEncodings;
 
 @end
 
-@class RTCRtpTransceiver;
+@class RTC_OBJC_TYPE(RTCRtpTransceiver);
 
-/** The RTCRtpTransceiver maps to the RTCRtpTransceiver defined by the WebRTC
- *  specification. A transceiver represents a combination of an RTCRtpSender
+/** The RTCRtpTransceiver maps to the RTCRtpTransceiver defined by the
+ *  WebRTC specification. A transceiver represents a combination of an RTCRtpSender
  *  and an RTCRtpReceiver that share a common mid. As defined in JSEP, an
  *  RTCRtpTransceiver is said to be associated with a media description if its
  *  mid property is non-nil; otherwise, it is said to be disassociated.
@@ -59,12 +59,13 @@
  *  https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver
  */
 RTC_OBJC_EXPORT
-@protocol RTCRtpTransceiver <NSObject>
+@protocol RTC_OBJC_TYPE
+(RTCRtpTransceiver)<NSObject>
 
-/** Media type of the transceiver. The sender and receiver will also have this
- *  type.
- */
-@property(nonatomic, readonly) RTCRtpMediaType mediaType;
+    /** Media type of the transceiver. The sender and receiver will also have this
+     *  type.
+     */
+    @property(nonatomic, readonly) RTCRtpMediaType mediaType;
 
 /** The mid attribute is the mid negotiated and present in the local and
  *  remote descriptions. Before negotiation is complete, the mid value may be
@@ -78,14 +79,14 @@
  *  present, regardless of the direction of media.
  *  https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-sender
  */
-@property(nonatomic, readonly) RTCRtpSender *sender;
+@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCRtpSender) * sender;
 
 /** The receiver attribute exposes the RTCRtpReceiver corresponding to the RTP
  *  media that may be received with the transceiver's mid. The receiver is
  *  always present, regardless of the direction of media.
  *  https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-receiver
  */
-@property(nonatomic, readonly) RTCRtpReceiver *receiver;
+@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCRtpReceiver) * receiver;
 
 /** The isStopped attribute indicates that the sender of this transceiver will
  *  no longer send, and that the receiver will no longer receive. It is true if
@@ -121,7 +122,7 @@
 @end
 
 RTC_OBJC_EXPORT
-@interface RTCRtpTransceiver : NSObject <RTCRtpTransceiver>
+@interface RTC_OBJC_TYPE (RTCRtpTransceiver) : NSObject <RTC_OBJC_TYPE(RTCRtpTransceiver)>
 
 - (instancetype)init NS_UNAVAILABLE;
 
diff --git a/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm b/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm
index 74ea456..2995e5f 100644
--- a/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm
+++ b/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm
@@ -17,7 +17,7 @@
 #import "base/RTCLogging.h"
 #import "helpers/NSString+StdString.h"
 
-@implementation RTCRtpTransceiverInit
+@implementation RTC_OBJC_TYPE (RTCRtpTransceiverInit)
 
 @synthesize direction = _direction;
 @synthesize streamIds = _streamIds;
@@ -32,11 +32,12 @@
 
 - (webrtc::RtpTransceiverInit)nativeInit {
   webrtc::RtpTransceiverInit init;
-  init.direction = [RTCRtpTransceiver nativeRtpTransceiverDirectionFromDirection:_direction];
+  init.direction =
+      [RTC_OBJC_TYPE(RTCRtpTransceiver) nativeRtpTransceiverDirectionFromDirection:_direction];
   for (NSString *streamId in _streamIds) {
     init.stream_ids.push_back([streamId UTF8String]);
   }
-  for (RTCRtpEncodingParameters *sendEncoding in _sendEncodings) {
+  for (RTC_OBJC_TYPE(RTCRtpEncodingParameters) * sendEncoding in _sendEncodings) {
     init.send_encodings.push_back(sendEncoding.nativeParameters);
   }
   return init;
@@ -44,13 +45,14 @@
 
 @end
 
-@implementation RTCRtpTransceiver {
-  RTCPeerConnectionFactory *_factory;
+@implementation RTC_OBJC_TYPE (RTCRtpTransceiver) {
+  RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory;
   rtc::scoped_refptr<webrtc::RtpTransceiverInterface> _nativeRtpTransceiver;
 }
 
 - (RTCRtpMediaType)mediaType {
-  return [RTCRtpReceiver mediaTypeForNativeMediaType:_nativeRtpTransceiver->media_type()];
+  return [RTC_OBJC_TYPE(RTCRtpReceiver)
+      mediaTypeForNativeMediaType:_nativeRtpTransceiver->media_type()];
 }
 
 - (NSString *)mid {
@@ -69,18 +71,18 @@
 }
 
 - (RTCRtpTransceiverDirection)direction {
-  return [RTCRtpTransceiver
+  return [RTC_OBJC_TYPE(RTCRtpTransceiver)
       rtpTransceiverDirectionFromNativeDirection:_nativeRtpTransceiver->direction()];
 }
 
 - (void)setDirection:(RTCRtpTransceiverDirection)direction {
   _nativeRtpTransceiver->SetDirection(
-      [RTCRtpTransceiver nativeRtpTransceiverDirectionFromDirection:direction]);
+      [RTC_OBJC_TYPE(RTCRtpTransceiver) nativeRtpTransceiverDirectionFromDirection:direction]);
 }
 
 - (BOOL)currentDirection:(RTCRtpTransceiverDirection *)currentDirectionOut {
   if (_nativeRtpTransceiver->current_direction()) {
-    *currentDirectionOut = [RTCRtpTransceiver
+    *currentDirectionOut = [RTC_OBJC_TYPE(RTCRtpTransceiver)
         rtpTransceiverDirectionFromNativeDirection:*_nativeRtpTransceiver->current_direction()];
     return YES;
   } else {
@@ -94,7 +96,9 @@
 
 - (NSString *)description {
   return [NSString
-      stringWithFormat:@"RTCRtpTransceiver {\n  sender: %@\n  receiver: %@\n}", _sender, _receiver];
+      stringWithFormat:@"RTC_OBJC_TYPE(RTCRtpTransceiver) {\n  sender: %@\n  receiver: %@\n}",
+                       _sender,
+                       _receiver];
 }
 
 - (BOOL)isEqual:(id)object {
@@ -107,7 +111,7 @@
   if (![object isMemberOfClass:[self class]]) {
     return NO;
   }
-  RTCRtpTransceiver *transceiver = (RTCRtpTransceiver *)object;
+  RTC_OBJC_TYPE(RTCRtpTransceiver) *transceiver = (RTC_OBJC_TYPE(RTCRtpTransceiver) *)object;
   return _nativeRtpTransceiver == transceiver.nativeRtpTransceiver;
 }
 
@@ -121,7 +125,7 @@
   return _nativeRtpTransceiver;
 }
 
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
            nativeRtpTransceiver:
                (rtc::scoped_refptr<webrtc::RtpTransceiverInterface>)nativeRtpTransceiver {
   NSParameterAssert(factory);
@@ -129,11 +133,13 @@
   if (self = [super init]) {
     _factory = factory;
     _nativeRtpTransceiver = nativeRtpTransceiver;
-    _sender = [[RTCRtpSender alloc] initWithFactory:_factory
-                                    nativeRtpSender:nativeRtpTransceiver->sender()];
-    _receiver = [[RTCRtpReceiver alloc] initWithFactory:_factory
-                                      nativeRtpReceiver:nativeRtpTransceiver->receiver()];
-    RTCLogInfo(@"RTCRtpTransceiver(%p): created transceiver: %@", self, self.description);
+    _sender = [[RTC_OBJC_TYPE(RTCRtpSender) alloc] initWithFactory:_factory
+                                                   nativeRtpSender:nativeRtpTransceiver->sender()];
+    _receiver =
+        [[RTC_OBJC_TYPE(RTCRtpReceiver) alloc] initWithFactory:_factory
+                                             nativeRtpReceiver:nativeRtpTransceiver->receiver()];
+    RTCLogInfo(
+        @"RTC_OBJC_TYPE(RTCRtpTransceiver)(%p): created transceiver: %@", self, self.description);
   }
   return self;
 }
diff --git a/sdk/objc/api/peerconnection/RTCSessionDescription+Private.h b/sdk/objc/api/peerconnection/RTCSessionDescription+Private.h
index cc255cd..0f0a06a 100644
--- a/sdk/objc/api/peerconnection/RTCSessionDescription+Private.h
+++ b/sdk/objc/api/peerconnection/RTCSessionDescription+Private.h
@@ -14,14 +14,15 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-@interface RTCSessionDescription ()
+@interface RTC_OBJC_TYPE (RTCSessionDescription)
+()
 
-/**
- * The native SessionDescriptionInterface representation of this
- * RTCSessionDescription object. This is needed to pass to the underlying C++
- * APIs.
- */
-@property(nonatomic, readonly, nullable) webrtc::SessionDescriptionInterface *nativeDescription;
+    /**
+     * The native SessionDescriptionInterface representation of this
+     * RTCSessionDescription object. This is needed to pass to the underlying C++
+     * APIs.
+     */
+    @property(nonatomic, readonly, nullable) webrtc::SessionDescriptionInterface *nativeDescription;
 
 /**
  * Initialize an RTCSessionDescription from a native
diff --git a/sdk/objc/api/peerconnection/RTCSessionDescription.h b/sdk/objc/api/peerconnection/RTCSessionDescription.h
index b9bcab1..6bd118d 100644
--- a/sdk/objc/api/peerconnection/RTCSessionDescription.h
+++ b/sdk/objc/api/peerconnection/RTCSessionDescription.h
@@ -25,7 +25,7 @@
 NS_ASSUME_NONNULL_BEGIN
 
 RTC_OBJC_EXPORT
-@interface RTCSessionDescription : NSObject
+@interface RTC_OBJC_TYPE (RTCSessionDescription) : NSObject
 
 /** The type of session description. */
 @property(nonatomic, readonly) RTCSdpType type;
diff --git a/sdk/objc/api/peerconnection/RTCSessionDescription.mm b/sdk/objc/api/peerconnection/RTCSessionDescription.mm
index 21e5e42..a62870e 100644
--- a/sdk/objc/api/peerconnection/RTCSessionDescription.mm
+++ b/sdk/objc/api/peerconnection/RTCSessionDescription.mm
@@ -15,7 +15,7 @@
 
 #include "rtc_base/checks.h"
 
-@implementation RTCSessionDescription
+@implementation RTC_OBJC_TYPE (RTCSessionDescription)
 
 @synthesize type = _type;
 @synthesize sdp = _sdp;
@@ -40,7 +40,7 @@
 }
 
 - (NSString *)description {
-  return [NSString stringWithFormat:@"RTCSessionDescription:\n%@\n%@",
+  return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCSessionDescription):\n%@\n%@",
                                     [[self class] stringForType:_type],
                                     _sdp];
 }
diff --git a/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.h b/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.h
index 9c2178f..5eff996 100644
--- a/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.h
+++ b/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.h
@@ -15,9 +15,10 @@
 NS_ASSUME_NONNULL_BEGIN
 
 /* Interface for converting to/from internal C++ formats. */
-@interface RTCVideoCodecInfo (Private)
+@interface RTC_OBJC_TYPE (RTCVideoCodecInfo)
+(Private)
 
-- (instancetype)initWithNativeSdpVideoFormat:(webrtc::SdpVideoFormat)format;
+    - (instancetype)initWithNativeSdpVideoFormat : (webrtc::SdpVideoFormat)format;
 - (webrtc::SdpVideoFormat)nativeSdpVideoFormat;
 
 @end
diff --git a/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.mm b/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.mm
index 21aacf6..2eb8d36 100644
--- a/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.mm
+++ b/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.mm
@@ -12,9 +12,10 @@
 
 #import "helpers/NSString+StdString.h"
 
-@implementation RTCVideoCodecInfo (Private)
+@implementation RTC_OBJC_TYPE (RTCVideoCodecInfo)
+(Private)
 
-- (instancetype)initWithNativeSdpVideoFormat:(webrtc::SdpVideoFormat)format {
+    - (instancetype)initWithNativeSdpVideoFormat : (webrtc::SdpVideoFormat)format {
   NSMutableDictionary *params = [NSMutableDictionary dictionary];
   for (auto it = format.parameters.begin(); it != format.parameters.end(); ++it) {
     [params setObject:[NSString stringForStdString:it->second]
diff --git a/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.h b/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.h
index 5b06245..8323b18 100644
--- a/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.h
+++ b/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.h
@@ -15,9 +15,10 @@
 NS_ASSUME_NONNULL_BEGIN
 
 /* Interfaces for converting to/from internal C++ formats. */
-@interface RTCVideoEncoderSettings (Private)
+@interface RTC_OBJC_TYPE (RTCVideoEncoderSettings)
+(Private)
 
-- (instancetype)initWithNativeVideoCodec:(const webrtc::VideoCodec *__nullable)videoCodec;
+    - (instancetype)initWithNativeVideoCodec : (const webrtc::VideoCodec *__nullable)videoCodec;
 - (webrtc::VideoCodec)nativeVideoCodec;
 
 @end
diff --git a/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.mm b/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.mm
index fe7e690..dec3a61 100644
--- a/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.mm
+++ b/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.mm
@@ -12,9 +12,10 @@
 
 #import "helpers/NSString+StdString.h"
 
-@implementation RTCVideoEncoderSettings (Private)
+@implementation RTC_OBJC_TYPE (RTCVideoEncoderSettings)
+(Private)
 
-- (instancetype)initWithNativeVideoCodec:(const webrtc::VideoCodec *)videoCodec {
+    - (instancetype)initWithNativeVideoCodec : (const webrtc::VideoCodec *)videoCodec {
   if (self = [super init]) {
     if (videoCodec) {
       const char *codecName = CodecTypeToPayloadString(videoCodec->codecType);
diff --git a/sdk/objc/api/peerconnection/RTCVideoSource+Private.h b/sdk/objc/api/peerconnection/RTCVideoSource+Private.h
index 1827e6b..0390846 100644
--- a/sdk/objc/api/peerconnection/RTCVideoSource+Private.h
+++ b/sdk/objc/api/peerconnection/RTCVideoSource+Private.h
@@ -17,26 +17,27 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-@interface RTCVideoSource ()
+@interface RTC_OBJC_TYPE (RTCVideoSource)
+()
 
-/**
- * The VideoTrackSourceInterface object passed to this RTCVideoSource during
- * construction.
- */
-@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>
-    nativeVideoSource;
+    /**
+     * The VideoTrackSourceInterface object passed to this RTCVideoSource during
+     * construction.
+     */
+    @property(nonatomic,
+              readonly) rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> nativeVideoSource;
 
 /** Initialize an RTCVideoSource from a native VideoTrackSourceInterface. */
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
               nativeVideoSource:
                   (rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>)nativeVideoSource
     NS_DESIGNATED_INITIALIZER;
 
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
               nativeMediaSource:(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
                            type:(RTCMediaSourceType)type NS_UNAVAILABLE;
 
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
                 signalingThread:(rtc::Thread *)signalingThread
                    workerThread:(rtc::Thread *)workerThread;
 
diff --git a/sdk/objc/api/peerconnection/RTCVideoSource.h b/sdk/objc/api/peerconnection/RTCVideoSource.h
index ec8a45c..cdef8b8 100644
--- a/sdk/objc/api/peerconnection/RTCVideoSource.h
+++ b/sdk/objc/api/peerconnection/RTCVideoSource.h
@@ -18,7 +18,7 @@
 
 RTC_OBJC_EXPORT
 
-@interface RTCVideoSource : RTCMediaSource <RTCVideoCapturerDelegate>
+@interface RTC_OBJC_TYPE (RTCVideoSource) : RTC_OBJC_TYPE(RTCMediaSource) <RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>
 
 - (instancetype)init NS_UNAVAILABLE;
 
diff --git a/sdk/objc/api/peerconnection/RTCVideoSource.mm b/sdk/objc/api/peerconnection/RTCVideoSource.mm
index 789c843..15b0d6f 100644
--- a/sdk/objc/api/peerconnection/RTCVideoSource.mm
+++ b/sdk/objc/api/peerconnection/RTCVideoSource.mm
@@ -24,11 +24,11 @@
 // TODO(magjed): Refactor this class and target ObjCVideoTrackSource only once
 // RTCAVFoundationVideoSource is gone. See http://crbug/webrtc/7177 for more
 // info.
-@implementation RTCVideoSource {
+@implementation RTC_OBJC_TYPE (RTCVideoSource) {
   rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> _nativeVideoSource;
 }
 
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
               nativeVideoSource:
                   (rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>)nativeVideoSource {
   RTC_DCHECK(factory);
@@ -41,14 +41,14 @@
   return self;
 }
 
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
               nativeMediaSource:(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
                            type:(RTCMediaSourceType)type {
   RTC_NOTREACHED();
   return nil;
 }
 
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
                 signalingThread:(rtc::Thread *)signalingThread
                    workerThread:(rtc::Thread *)workerThread {
   rtc::scoped_refptr<webrtc::ObjCVideoTrackSource> objCVideoTrackSource(
@@ -61,10 +61,11 @@
 
 - (NSString *)description {
   NSString *stateString = [[self class] stringForState:self.state];
-  return [NSString stringWithFormat:@"RTCVideoSource( %p ): %@", self, stateString];
+  return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCVideoSource)( %p ): %@", self, stateString];
 }
 
-- (void)capturer:(RTCVideoCapturer *)capturer didCaptureVideoFrame:(RTCVideoFrame *)frame {
+- (void)capturer:(RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer
+    didCaptureVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
   getObjCVideoSource(_nativeVideoSource)->OnCapturedFrame(frame);
 }
 
diff --git a/sdk/objc/api/peerconnection/RTCVideoTrack+Private.h b/sdk/objc/api/peerconnection/RTCVideoTrack+Private.h
index dd3d172..f1a8d7e 100644
--- a/sdk/objc/api/peerconnection/RTCVideoTrack+Private.h
+++ b/sdk/objc/api/peerconnection/RTCVideoTrack+Private.h
@@ -14,14 +14,15 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-@interface RTCVideoTrack ()
+@interface RTC_OBJC_TYPE (RTCVideoTrack)
+()
 
-/** VideoTrackInterface created or passed in at construction. */
-@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::VideoTrackInterface> nativeVideoTrack;
+    /** VideoTrackInterface created or passed in at construction. */
+    @property(nonatomic, readonly) rtc::scoped_refptr<webrtc::VideoTrackInterface> nativeVideoTrack;
 
 /** Initialize an RTCVideoTrack with its source and an id. */
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
-                         source:(RTCVideoSource *)source
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+                         source:(RTC_OBJC_TYPE(RTCVideoSource) *)source
                         trackId:(NSString *)trackId;
 
 @end
diff --git a/sdk/objc/api/peerconnection/RTCVideoTrack.h b/sdk/objc/api/peerconnection/RTCVideoTrack.h
index b946889..5382b71 100644
--- a/sdk/objc/api/peerconnection/RTCVideoTrack.h
+++ b/sdk/objc/api/peerconnection/RTCVideoTrack.h
@@ -14,23 +14,24 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-@protocol RTCVideoRenderer;
-@class RTCPeerConnectionFactory;
-@class RTCVideoSource;
+@protocol RTC_OBJC_TYPE
+(RTCVideoRenderer);
+@class RTC_OBJC_TYPE(RTCPeerConnectionFactory);
+@class RTC_OBJC_TYPE(RTCVideoSource);
 
 RTC_OBJC_EXPORT
-@interface RTCVideoTrack : RTCMediaStreamTrack
+@interface RTC_OBJC_TYPE (RTCVideoTrack) : RTC_OBJC_TYPE(RTCMediaStreamTrack)
 
 /** The video source for this video track. */
-@property(nonatomic, readonly) RTCVideoSource *source;
+@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCVideoSource) *source;
 
 - (instancetype)init NS_UNAVAILABLE;
 
 /** Register a renderer that will render all frames received on this track. */
-- (void)addRenderer:(id<RTCVideoRenderer>)renderer;
+- (void)addRenderer:(id<RTC_OBJC_TYPE(RTCVideoRenderer)>)renderer;
 
 /** Deregister a renderer. */
-- (void)removeRenderer:(id<RTCVideoRenderer>)renderer;
+- (void)removeRenderer:(id<RTC_OBJC_TYPE(RTCVideoRenderer)>)renderer;
 
 @end
 
diff --git a/sdk/objc/api/peerconnection/RTCVideoTrack.mm b/sdk/objc/api/peerconnection/RTCVideoTrack.mm
index 77936a6..3f38dd5 100644
--- a/sdk/objc/api/peerconnection/RTCVideoTrack.mm
+++ b/sdk/objc/api/peerconnection/RTCVideoTrack.mm
@@ -16,14 +16,14 @@
 #import "api/RTCVideoRendererAdapter+Private.h"
 #import "helpers/NSString+StdString.h"
 
-@implementation RTCVideoTrack {
+@implementation RTC_OBJC_TYPE (RTCVideoTrack) {
   NSMutableArray *_adapters;
 }
 
 @synthesize source = _source;
 
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
-                         source:(RTCVideoSource *)source
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
+                         source:(RTC_OBJC_TYPE(RTCVideoSource) *)source
                         trackId:(NSString *)trackId {
   NSParameterAssert(factory);
   NSParameterAssert(source);
@@ -38,7 +38,7 @@
   return self;
 }
 
-- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
+- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory
                     nativeTrack:
                         (rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeMediaTrack
                            type:(RTCMediaStreamTrackType)type {
@@ -57,19 +57,19 @@
   }
 }
 
-- (RTCVideoSource *)source {
+- (RTC_OBJC_TYPE(RTCVideoSource) *)source {
   if (!_source) {
     rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source =
         self.nativeVideoTrack->GetSource();
     if (source) {
-      _source =
-          [[RTCVideoSource alloc] initWithFactory:self.factory nativeVideoSource:source.get()];
+      _source = [[RTC_OBJC_TYPE(RTCVideoSource) alloc] initWithFactory:self.factory
+                                                     nativeVideoSource:source.get()];
     }
   }
   return _source;
 }
 
-- (void)addRenderer:(id<RTCVideoRenderer>)renderer {
+- (void)addRenderer:(id<RTC_OBJC_TYPE(RTCVideoRenderer)>)renderer {
   // Make sure we don't have this renderer yet.
   for (RTCVideoRendererAdapter *adapter in _adapters) {
     if (adapter.videoRenderer == renderer) {
@@ -85,7 +85,7 @@
                                          rtc::VideoSinkWants());
 }
 
-- (void)removeRenderer:(id<RTCVideoRenderer>)renderer {
+- (void)removeRenderer:(id<RTC_OBJC_TYPE(RTCVideoRenderer)>)renderer {
   __block NSUInteger indexToRemove = NSNotFound;
   [_adapters enumerateObjectsUsingBlock:^(RTCVideoRendererAdapter *adapter,
                                           NSUInteger idx,
diff --git a/sdk/objc/api/video_codec/RTCVideoDecoderVP8.h b/sdk/objc/api/video_codec/RTCVideoDecoderVP8.h
index 00786dc..a118b25 100644
--- a/sdk/objc/api/video_codec/RTCVideoDecoderVP8.h
+++ b/sdk/objc/api/video_codec/RTCVideoDecoderVP8.h
@@ -14,12 +14,12 @@
 #import "RTCVideoDecoder.h"
 
 RTC_OBJC_EXPORT
-@interface RTCVideoDecoderVP8 : NSObject
+@interface RTC_OBJC_TYPE (RTCVideoDecoderVP8) : NSObject
 
 /* This returns a VP8 decoder that can be returned from a RTCVideoDecoderFactory injected into
  * RTCPeerConnectionFactory. Even though it implements the RTCVideoDecoder protocol, it can not be
  * used independently from the RTCPeerConnectionFactory.
  */
-+ (id<RTCVideoDecoder>)vp8Decoder;
++ (id<RTC_OBJC_TYPE(RTCVideoDecoder)>)vp8Decoder;
 
 @end
diff --git a/sdk/objc/api/video_codec/RTCVideoDecoderVP8.mm b/sdk/objc/api/video_codec/RTCVideoDecoderVP8.mm
index 9750bd8..91ca3b7 100644
--- a/sdk/objc/api/video_codec/RTCVideoDecoderVP8.mm
+++ b/sdk/objc/api/video_codec/RTCVideoDecoderVP8.mm
@@ -16,9 +16,9 @@
 
 #include "modules/video_coding/codecs/vp8/include/vp8.h"
 
-@implementation RTCVideoDecoderVP8
+@implementation RTC_OBJC_TYPE (RTCVideoDecoderVP8)
 
-+ (id<RTCVideoDecoder>)vp8Decoder {
++ (id<RTC_OBJC_TYPE(RTCVideoDecoder)>)vp8Decoder {
   return [[RTCWrappedNativeVideoDecoder alloc]
       initWithNativeDecoder:std::unique_ptr<webrtc::VideoDecoder>(webrtc::VP8Decoder::Create())];
 }
diff --git a/sdk/objc/api/video_codec/RTCVideoDecoderVP9.h b/sdk/objc/api/video_codec/RTCVideoDecoderVP9.h
index b74c1ef..b3a1743 100644
--- a/sdk/objc/api/video_codec/RTCVideoDecoderVP9.h
+++ b/sdk/objc/api/video_codec/RTCVideoDecoderVP9.h
@@ -14,12 +14,12 @@
 #import "RTCVideoDecoder.h"
 
 RTC_OBJC_EXPORT
-@interface RTCVideoDecoderVP9 : NSObject
+@interface RTC_OBJC_TYPE (RTCVideoDecoderVP9) : NSObject
 
 /* This returns a VP9 decoder that can be returned from a RTCVideoDecoderFactory injected into
  * RTCPeerConnectionFactory. Even though it implements the RTCVideoDecoder protocol, it can not be
  * used independently from the RTCPeerConnectionFactory.
  */
-+ (id<RTCVideoDecoder>)vp9Decoder;
++ (id<RTC_OBJC_TYPE(RTCVideoDecoder)>)vp9Decoder;
 
 @end
diff --git a/sdk/objc/api/video_codec/RTCVideoDecoderVP9.mm b/sdk/objc/api/video_codec/RTCVideoDecoderVP9.mm
index 48582fe..56041a2 100644
--- a/sdk/objc/api/video_codec/RTCVideoDecoderVP9.mm
+++ b/sdk/objc/api/video_codec/RTCVideoDecoderVP9.mm
@@ -16,9 +16,9 @@
 
 #include "modules/video_coding/codecs/vp9/include/vp9.h"
 
-@implementation RTCVideoDecoderVP9
+@implementation RTC_OBJC_TYPE (RTCVideoDecoderVP9)
 
-+ (id<RTCVideoDecoder>)vp9Decoder {
++ (id<RTC_OBJC_TYPE(RTCVideoDecoder)>)vp9Decoder {
   return [[RTCWrappedNativeVideoDecoder alloc]
       initWithNativeDecoder:std::unique_ptr<webrtc::VideoDecoder>(webrtc::VP9Decoder::Create())];
 }
diff --git a/sdk/objc/api/video_codec/RTCVideoEncoderVP8.h b/sdk/objc/api/video_codec/RTCVideoEncoderVP8.h
index 8d87a89..e136a5b 100644
--- a/sdk/objc/api/video_codec/RTCVideoEncoderVP8.h
+++ b/sdk/objc/api/video_codec/RTCVideoEncoderVP8.h
@@ -14,12 +14,12 @@
 #import "RTCVideoEncoder.h"
 
 RTC_OBJC_EXPORT
-@interface RTCVideoEncoderVP8 : NSObject
+@interface RTC_OBJC_TYPE (RTCVideoEncoderVP8) : NSObject
 
 /* This returns a VP8 encoder that can be returned from a RTCVideoEncoderFactory injected into
  * RTCPeerConnectionFactory. Even though it implements the RTCVideoEncoder protocol, it can not be
  * used independently from the RTCPeerConnectionFactory.
  */
-+ (id<RTCVideoEncoder>)vp8Encoder;
++ (id<RTC_OBJC_TYPE(RTCVideoEncoder)>)vp8Encoder;
 
 @end
diff --git a/sdk/objc/api/video_codec/RTCVideoEncoderVP8.mm b/sdk/objc/api/video_codec/RTCVideoEncoderVP8.mm
index 677f6dd..1355127 100644
--- a/sdk/objc/api/video_codec/RTCVideoEncoderVP8.mm
+++ b/sdk/objc/api/video_codec/RTCVideoEncoderVP8.mm
@@ -16,9 +16,9 @@
 
 #include "modules/video_coding/codecs/vp8/include/vp8.h"
 
-@implementation RTCVideoEncoderVP8
+@implementation RTC_OBJC_TYPE (RTCVideoEncoderVP8)
 
-+ (id<RTCVideoEncoder>)vp8Encoder {
++ (id<RTC_OBJC_TYPE(RTCVideoEncoder)>)vp8Encoder {
   return [[RTCWrappedNativeVideoEncoder alloc]
       initWithNativeEncoder:std::unique_ptr<webrtc::VideoEncoder>(webrtc::VP8Encoder::Create())];
 }
diff --git a/sdk/objc/api/video_codec/RTCVideoEncoderVP9.h b/sdk/objc/api/video_codec/RTCVideoEncoderVP9.h
index 9efea4b..8f961ef 100644
--- a/sdk/objc/api/video_codec/RTCVideoEncoderVP9.h
+++ b/sdk/objc/api/video_codec/RTCVideoEncoderVP9.h
@@ -14,12 +14,12 @@
 #import "RTCVideoEncoder.h"
 
 RTC_OBJC_EXPORT
-@interface RTCVideoEncoderVP9 : NSObject
+@interface RTC_OBJC_TYPE (RTCVideoEncoderVP9) : NSObject
 
 /* This returns a VP9 encoder that can be returned from a RTCVideoEncoderFactory injected into
  * RTCPeerConnectionFactory. Even though it implements the RTCVideoEncoder protocol, it can not be
  * used independently from the RTCPeerConnectionFactory.
  */
-+ (id<RTCVideoEncoder>)vp9Encoder;
++ (id<RTC_OBJC_TYPE(RTCVideoEncoder)>)vp9Encoder;
 
 @end
diff --git a/sdk/objc/api/video_codec/RTCVideoEncoderVP9.mm b/sdk/objc/api/video_codec/RTCVideoEncoderVP9.mm
index a5d8408..ec9e75a 100644
--- a/sdk/objc/api/video_codec/RTCVideoEncoderVP9.mm
+++ b/sdk/objc/api/video_codec/RTCVideoEncoderVP9.mm
@@ -16,9 +16,9 @@
 
 #include "modules/video_coding/codecs/vp9/include/vp9.h"
 
-@implementation RTCVideoEncoderVP9
+@implementation RTC_OBJC_TYPE (RTCVideoEncoderVP9)
 
-+ (id<RTCVideoEncoder>)vp9Encoder {
++ (id<RTC_OBJC_TYPE(RTCVideoEncoder)>)vp9Encoder {
   return [[RTCWrappedNativeVideoEncoder alloc]
       initWithNativeEncoder:std::unique_ptr<webrtc::VideoEncoder>(webrtc::VP9Encoder::Create())];
 }
diff --git a/sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.h b/sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.h
index b5694c7..2241c0c 100644
--- a/sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.h
+++ b/sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.h
@@ -15,7 +15,7 @@
 #include "api/video_codecs/video_decoder.h"
 #include "media/base/codec.h"
 
-@interface RTCWrappedNativeVideoDecoder : NSObject <RTCVideoDecoder>
+@interface RTCWrappedNativeVideoDecoder : NSObject <RTC_OBJC_TYPE (RTCVideoDecoder)>
 
 - (instancetype)initWithNativeDecoder:(std::unique_ptr<webrtc::VideoDecoder>)decoder;
 
diff --git a/sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.mm b/sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.mm
index dce479c..e4d8dc3 100644
--- a/sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.mm
+++ b/sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.mm
@@ -29,7 +29,7 @@
   return std::move(_wrappedDecoder);
 }
 
-#pragma mark - RTCVideoDecoder
+#pragma mark - RTC_OBJC_TYPE(RTCVideoDecoder)
 
 - (void)setCallback:(RTCVideoDecoderCallback)callback {
   RTC_NOTREACHED();
@@ -45,9 +45,9 @@
   return 0;
 }
 
-- (NSInteger)decode:(RTCEncodedImage *)encodedImage
+- (NSInteger)decode:(RTC_OBJC_TYPE(RTCEncodedImage) *)encodedImage
         missingFrames:(BOOL)missingFrames
-    codecSpecificInfo:(nullable id<RTCCodecSpecificInfo>)info
+    codecSpecificInfo:(nullable id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)>)info
          renderTimeMs:(int64_t)renderTimeMs {
   RTC_NOTREACHED();
   return 0;
diff --git a/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.h b/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.h
index b4ef882..ec16793 100644
--- a/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.h
+++ b/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.h
@@ -16,7 +16,7 @@
 #include "api/video_codecs/video_encoder.h"
 #include "media/base/codec.h"
 
-@interface RTCWrappedNativeVideoEncoder : NSObject <RTCVideoEncoder>
+@interface RTCWrappedNativeVideoEncoder : NSObject <RTC_OBJC_TYPE (RTCVideoEncoder)>
 
 - (instancetype)initWithNativeEncoder:(std::unique_ptr<webrtc::VideoEncoder>)encoder;
 
diff --git a/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.mm b/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.mm
index 9afd54f..6feecab 100644
--- a/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.mm
+++ b/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.mm
@@ -29,13 +29,13 @@
   return std::move(_wrappedEncoder);
 }
 
-#pragma mark - RTCVideoEncoder
+#pragma mark - RTC_OBJC_TYPE(RTCVideoEncoder)
 
 - (void)setCallback:(RTCVideoEncoderCallback)callback {
   RTC_NOTREACHED();
 }
 
-- (NSInteger)startEncodeWithSettings:(RTCVideoEncoderSettings *)settings
+- (NSInteger)startEncodeWithSettings:(RTC_OBJC_TYPE(RTCVideoEncoderSettings) *)settings
                        numberOfCores:(int)numberOfCores {
   RTC_NOTREACHED();
   return 0;
@@ -46,8 +46,8 @@
   return 0;
 }
 
-- (NSInteger)encode:(RTCVideoFrame *)frame
-    codecSpecificInfo:(nullable id<RTCCodecSpecificInfo>)info
+- (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame
+    codecSpecificInfo:(nullable id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)>)info
            frameTypes:(NSArray<NSNumber *> *)frameTypes {
   RTC_NOTREACHED();
   return 0;
@@ -63,7 +63,7 @@
   return nil;
 }
 
-- (nullable RTCVideoEncoderQpThresholds *)scalingSettings {
+- (nullable RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) *)scalingSettings {
   RTC_NOTREACHED();
   return nil;
 }
diff --git a/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer+Private.h b/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer+Private.h
index fad08c2..20dc807 100644
--- a/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer+Private.h
+++ b/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer+Private.h
@@ -14,7 +14,8 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-@interface RTCI420Buffer () {
+@interface RTC_OBJC_TYPE (RTCI420Buffer)
+() {
  @protected
   rtc::scoped_refptr<webrtc::I420BufferInterface> _i420Buffer;
 }
diff --git a/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.h b/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.h
index 9a904f5..3afe209 100644
--- a/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.h
+++ b/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.h
@@ -17,7 +17,7 @@
 
 /** RTCI420Buffer implements the RTCI420Buffer protocol */
 RTC_OBJC_EXPORT
-@interface RTCI420Buffer : NSObject<RTCI420Buffer>
+@interface RTC_OBJC_TYPE (RTCI420Buffer) : NSObject<RTC_OBJC_TYPE(RTCI420Buffer)>
 @end
 
 NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.mm b/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.mm
index d9d5d15..f82f206 100644
--- a/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.mm
+++ b/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.mm
@@ -17,7 +17,7 @@
 #include "third_party/libyuv/include/libyuv.h"
 #endif
 
-@implementation RTCI420Buffer
+@implementation RTC_OBJC_TYPE (RTCI420Buffer)
 
 - (instancetype)initWithWidth:(int)width height:(int)height {
   if (self = [super init]) {
@@ -99,7 +99,7 @@
   return _i420Buffer->DataV();
 }
 
-- (id<RTCI420Buffer>)toI420 {
+- (id<RTC_OBJC_TYPE(RTCI420Buffer)>)toI420 {
   return self;
 }
 
diff --git a/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.h b/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.h
index 6cd5110..053a10a 100644
--- a/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.h
+++ b/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.h
@@ -18,7 +18,7 @@
 
 /** Mutable version of RTCI420Buffer */
 RTC_OBJC_EXPORT
-@interface RTCMutableI420Buffer : RTCI420Buffer<RTCMutableI420Buffer>
+@interface RTC_OBJC_TYPE (RTCMutableI420Buffer) : RTC_OBJC_TYPE(RTCI420Buffer)<RTC_OBJC_TYPE(RTCMutableI420Buffer)>
 @end
 
 NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.mm b/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.mm
index 5c6c1ff..1e669bc 100644
--- a/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.mm
+++ b/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.mm
@@ -14,7 +14,7 @@
 
 #include "api/video/i420_buffer.h"
 
-@implementation RTCMutableI420Buffer
+@implementation RTC_OBJC_TYPE (RTCMutableI420Buffer)
 
 - (uint8_t *)mutableDataY {
   return static_cast<webrtc::I420Buffer *>(_i420Buffer.get())->MutableDataY();
diff --git a/sdk/objc/base/RTCCodecSpecificInfo.h b/sdk/objc/base/RTCCodecSpecificInfo.h
index e2ae4ca..5e7800e 100644
--- a/sdk/objc/base/RTCCodecSpecificInfo.h
+++ b/sdk/objc/base/RTCCodecSpecificInfo.h
@@ -18,7 +18,7 @@
  *  Corresponds to webrtc::CodecSpecificInfo.
  */
 RTC_OBJC_EXPORT
-@protocol RTCCodecSpecificInfo <NSObject>
-@end
+@protocol RTC_OBJC_TYPE
+(RTCCodecSpecificInfo)<NSObject> @end
 
 NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/base/RTCEncodedImage.h b/sdk/objc/base/RTCEncodedImage.h
index 670c727..5fec8a2 100644
--- a/sdk/objc/base/RTCEncodedImage.h
+++ b/sdk/objc/base/RTCEncodedImage.h
@@ -31,7 +31,7 @@
 
 /** Represents an encoded frame. Corresponds to webrtc::EncodedImage. */
 RTC_OBJC_EXPORT
-@interface RTCEncodedImage : NSObject
+@interface RTC_OBJC_TYPE (RTCEncodedImage) : NSObject
 
 @property(nonatomic, strong) NSData *buffer;
 @property(nonatomic, assign) int32_t encodedWidth;
diff --git a/sdk/objc/base/RTCEncodedImage.m b/sdk/objc/base/RTCEncodedImage.m
index 024a57c..dec9630 100644
--- a/sdk/objc/base/RTCEncodedImage.m
+++ b/sdk/objc/base/RTCEncodedImage.m
@@ -10,7 +10,7 @@
 
 #import "RTCEncodedImage.h"
 
-@implementation RTCEncodedImage
+@implementation RTC_OBJC_TYPE (RTCEncodedImage)
 
 @synthesize buffer = _buffer;
 @synthesize encodedWidth = _encodedWidth;
diff --git a/sdk/objc/base/RTCI420Buffer.h b/sdk/objc/base/RTCI420Buffer.h
index a6c7e41..b97f05a 100644
--- a/sdk/objc/base/RTCI420Buffer.h
+++ b/sdk/objc/base/RTCI420Buffer.h
@@ -16,7 +16,7 @@
 
 /** Protocol for RTCYUVPlanarBuffers containing I420 data */
 RTC_OBJC_EXPORT
-@protocol RTCI420Buffer <RTCYUVPlanarBuffer>
-@end
+@protocol RTC_OBJC_TYPE
+(RTCI420Buffer)<RTC_OBJC_TYPE(RTCYUVPlanarBuffer)> @end
 
 NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/base/RTCMacros.h b/sdk/objc/base/RTCMacros.h
index 7f7e64c..e527ff6 100644
--- a/sdk/objc/base/RTCMacros.h
+++ b/sdk/objc/base/RTCMacros.h
@@ -11,6 +11,30 @@
 #ifndef SDK_OBJC_BASE_RTCMACROS_H_
 #define SDK_OBJC_BASE_RTCMACROS_H_
 
+// Internal macros used to correctly concatenate symbols.
+#define RTC_SYMBOL_CONCAT_HELPER(a, b) a##b
+#define RTC_SYMBOL_CONCAT(a, b) RTC_SYMBOL_CONCAT_HELPER(a, b)
+
+// RTC_OBJC_TYPE_PREFIX
+//
+// Macro used to prepend a prefix to the API types that are exported with
+// RTC_OBJC_EXPORT.
+//
+// Clients can patch the definition of this macro locally and build
+// WebRTC.framework with their own prefix in case symbol clashing is a
+// problem.
+//
+// This macro must only be defined here and not on via compiler flag to
+// ensure it has a unique value.
+#define RTC_OBJC_TYPE_PREFIX
+
+// RCT_OBJC_TYPE
+//
+// Macro used internally to declare API types. Declaring an API type without
+// using this macro will not include the declared type in the set of types
+// that will be affected by the configurable RTC_OBJC_TYPE_PREFIX.
+#define RTC_OBJC_TYPE(type_name) RTC_SYMBOL_CONCAT(RTC_OBJC_TYPE_PREFIX, type_name)
+
 #define RTC_OBJC_EXPORT __attribute__((visibility("default")))
 
 #if defined(__cplusplus)
diff --git a/sdk/objc/base/RTCMutableI420Buffer.h b/sdk/objc/base/RTCMutableI420Buffer.h
index 098fb9a..cde7219 100644
--- a/sdk/objc/base/RTCMutableI420Buffer.h
+++ b/sdk/objc/base/RTCMutableI420Buffer.h
@@ -17,7 +17,7 @@
 
 /** Extension of the I420 buffer with mutable data access */
 RTC_OBJC_EXPORT
-@protocol RTCMutableI420Buffer <RTCI420Buffer, RTCMutableYUVPlanarBuffer>
-@end
+@protocol RTC_OBJC_TYPE
+(RTCMutableI420Buffer)<RTC_OBJC_TYPE(RTCI420Buffer), RTC_OBJC_TYPE(RTCMutableYUVPlanarBuffer)> @end
 
 NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/base/RTCMutableYUVPlanarBuffer.h b/sdk/objc/base/RTCMutableYUVPlanarBuffer.h
index 00dfcd9..bd14e3b 100644
--- a/sdk/objc/base/RTCMutableYUVPlanarBuffer.h
+++ b/sdk/objc/base/RTCMutableYUVPlanarBuffer.h
@@ -16,9 +16,10 @@
 
 /** Extension of the YUV planar data buffer with mutable data access */
 RTC_OBJC_EXPORT
-@protocol RTCMutableYUVPlanarBuffer <RTCYUVPlanarBuffer>
+@protocol RTC_OBJC_TYPE
+(RTCMutableYUVPlanarBuffer)<RTC_OBJC_TYPE(RTCYUVPlanarBuffer)>
 
-@property(nonatomic, readonly) uint8_t *mutableDataY;
+    @property(nonatomic, readonly) uint8_t *mutableDataY;
 @property(nonatomic, readonly) uint8_t *mutableDataU;
 @property(nonatomic, readonly) uint8_t *mutableDataV;
 
diff --git a/sdk/objc/base/RTCRtpFragmentationHeader.h b/sdk/objc/base/RTCRtpFragmentationHeader.h
index 2e26b08..001b4e9 100644
--- a/sdk/objc/base/RTCRtpFragmentationHeader.h
+++ b/sdk/objc/base/RTCRtpFragmentationHeader.h
@@ -16,7 +16,7 @@
 
 /** Information for header. Corresponds to webrtc::RTPFragmentationHeader. */
 RTC_OBJC_EXPORT
-@interface RTCRtpFragmentationHeader : NSObject
+@interface RTC_OBJC_TYPE (RTCRtpFragmentationHeader) : NSObject
 
 @property(nonatomic, strong) NSArray<NSNumber *> *fragmentationOffset;
 @property(nonatomic, strong) NSArray<NSNumber *> *fragmentationLength;
diff --git a/sdk/objc/base/RTCRtpFragmentationHeader.m b/sdk/objc/base/RTCRtpFragmentationHeader.m
index 8049abc..60e2f5d 100644
--- a/sdk/objc/base/RTCRtpFragmentationHeader.m
+++ b/sdk/objc/base/RTCRtpFragmentationHeader.m
@@ -10,11 +10,11 @@
 
 #import "RTCRtpFragmentationHeader.h"
 
-@implementation RTCRtpFragmentationHeader
+@implementation RTC_OBJC_TYPE (RTCRtpFragmentationHeader)
 
 @synthesize fragmentationOffset = _fragmentationOffset;
 @synthesize fragmentationLength = _fragmentationLength;
 @synthesize fragmentationTimeDiff = _fragmentationTimeDiff;
 @synthesize fragmentationPlType = _fragmentationPlType;
 
-@end
\ No newline at end of file
+@end
diff --git a/sdk/objc/base/RTCVideoCapturer.h b/sdk/objc/base/RTCVideoCapturer.h
index 5212627..a1ffdcf 100644
--- a/sdk/objc/base/RTCVideoCapturer.h
+++ b/sdk/objc/base/RTCVideoCapturer.h
@@ -14,19 +14,21 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-@class RTCVideoCapturer;
+@class RTC_OBJC_TYPE(RTCVideoCapturer);
 
 RTC_OBJC_EXPORT
-@protocol RTCVideoCapturerDelegate <NSObject>
-- (void)capturer:(RTCVideoCapturer *)capturer didCaptureVideoFrame:(RTCVideoFrame *)frame;
+@protocol RTC_OBJC_TYPE
+(RTCVideoCapturerDelegate)<NSObject> -
+    (void)capturer : (RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer didCaptureVideoFrame
+    : (RTC_OBJC_TYPE(RTCVideoFrame) *)frame;
 @end
 
 RTC_OBJC_EXPORT
-@interface RTCVideoCapturer : NSObject
+@interface RTC_OBJC_TYPE (RTCVideoCapturer) : NSObject
 
-@property(nonatomic, weak) id<RTCVideoCapturerDelegate> delegate;
+@property(nonatomic, weak) id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)> delegate;
 
-- (instancetype)initWithDelegate:(id<RTCVideoCapturerDelegate>)delegate;
+- (instancetype)initWithDelegate:(id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate;
 
 @end
 
diff --git a/sdk/objc/base/RTCVideoCapturer.m b/sdk/objc/base/RTCVideoCapturer.m
index 39cc377..ca31a73 100644
--- a/sdk/objc/base/RTCVideoCapturer.m
+++ b/sdk/objc/base/RTCVideoCapturer.m
@@ -10,11 +10,11 @@
 
 #import "RTCVideoCapturer.h"
 
-@implementation RTCVideoCapturer
+@implementation RTC_OBJC_TYPE (RTCVideoCapturer)
 
 @synthesize delegate = _delegate;
 
-- (instancetype)initWithDelegate:(id<RTCVideoCapturerDelegate>)delegate {
+- (instancetype)initWithDelegate:(id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate {
   if (self = [super init]) {
     _delegate = delegate;
   }
diff --git a/sdk/objc/base/RTCVideoCodecInfo.h b/sdk/objc/base/RTCVideoCodecInfo.h
index 2162caa..fa28958 100644
--- a/sdk/objc/base/RTCVideoCodecInfo.h
+++ b/sdk/objc/base/RTCVideoCodecInfo.h
@@ -16,7 +16,7 @@
 
 /** Holds information to identify a codec. Corresponds to webrtc::SdpVideoFormat. */
 RTC_OBJC_EXPORT
-@interface RTCVideoCodecInfo : NSObject <NSCoding>
+@interface RTC_OBJC_TYPE (RTCVideoCodecInfo) : NSObject <NSCoding>
 
 - (instancetype)init NS_UNAVAILABLE;
 
@@ -26,7 +26,7 @@
                   parameters:(nullable NSDictionary<NSString *, NSString *> *)parameters
     NS_DESIGNATED_INITIALIZER;
 
-- (BOOL)isEqualToCodecInfo:(RTCVideoCodecInfo *)info;
+- (BOOL)isEqualToCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info;
 
 @property(nonatomic, readonly) NSString *name;
 @property(nonatomic, readonly) NSDictionary<NSString *, NSString *> *parameters;
diff --git a/sdk/objc/base/RTCVideoCodecInfo.m b/sdk/objc/base/RTCVideoCodecInfo.m
index 7fb17ca..ce26ae1 100644
--- a/sdk/objc/base/RTCVideoCodecInfo.m
+++ b/sdk/objc/base/RTCVideoCodecInfo.m
@@ -10,7 +10,7 @@
 
 #import "RTCVideoCodecInfo.h"
 
-@implementation RTCVideoCodecInfo
+@implementation RTC_OBJC_TYPE (RTCVideoCodecInfo)
 
 @synthesize name = _name;
 @synthesize parameters = _parameters;
@@ -29,7 +29,7 @@
   return self;
 }
 
-- (BOOL)isEqualToCodecInfo:(RTCVideoCodecInfo *)info {
+- (BOOL)isEqualToCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info {
   if (!info ||
       ![self.name isEqualToString:info.name] ||
       ![self.parameters isEqualToDictionary:info.parameters]) {
diff --git a/sdk/objc/base/RTCVideoDecoder.h b/sdk/objc/base/RTCVideoDecoder.h
index 8077c69..ccddd42 100644
--- a/sdk/objc/base/RTCVideoDecoder.h
+++ b/sdk/objc/base/RTCVideoDecoder.h
@@ -19,18 +19,19 @@
 NS_ASSUME_NONNULL_BEGIN
 
 /** Callback block for decoder. */
-typedef void (^RTCVideoDecoderCallback)(RTCVideoFrame *frame);
+typedef void (^RTCVideoDecoderCallback)(RTC_OBJC_TYPE(RTCVideoFrame) * frame);
 
 /** Protocol for decoder implementations. */
 RTC_OBJC_EXPORT
-@protocol RTCVideoDecoder <NSObject>
+@protocol RTC_OBJC_TYPE
+(RTCVideoDecoder)<NSObject>
 
-- (void)setCallback:(RTCVideoDecoderCallback)callback;
+    - (void)setCallback : (RTCVideoDecoderCallback)callback;
 - (NSInteger)startDecodeWithNumberOfCores:(int)numberOfCores;
 - (NSInteger)releaseDecoder;
-- (NSInteger)decode:(RTCEncodedImage *)encodedImage
+- (NSInteger)decode:(RTC_OBJC_TYPE(RTCEncodedImage) *)encodedImage
         missingFrames:(BOOL)missingFrames
-    codecSpecificInfo:(nullable id<RTCCodecSpecificInfo>)info
+    codecSpecificInfo:(nullable id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)>)info
          renderTimeMs:(int64_t)renderTimeMs;
 - (NSString *)implementationName;
 
diff --git a/sdk/objc/base/RTCVideoDecoderFactory.h b/sdk/objc/base/RTCVideoDecoderFactory.h
index 3e24153..8d90138 100644
--- a/sdk/objc/base/RTCVideoDecoderFactory.h
+++ b/sdk/objc/base/RTCVideoDecoderFactory.h
@@ -16,12 +16,16 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-/** RTCVideoDecoderFactory is an Objective-C version of webrtc::VideoDecoderFactory. */
+/** RTCVideoDecoderFactory is an Objective-C version of webrtc::VideoDecoderFactory.
+ */
 RTC_OBJC_EXPORT
-@protocol RTCVideoDecoderFactory <NSObject>
+@protocol RTC_OBJC_TYPE
+(RTCVideoDecoderFactory)<NSObject>
 
-- (nullable id<RTCVideoDecoder>)createDecoder:(RTCVideoCodecInfo *)info;
-- (NSArray<RTCVideoCodecInfo *> *)supportedCodecs;  // TODO(andersc): "supportedFormats" instead?
+    - (nullable id<RTC_OBJC_TYPE(RTCVideoDecoder)>)createDecoder
+    : (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info;
+- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)
+    supportedCodecs;  // TODO(andersc): "supportedFormats" instead?
 
 @end
 
diff --git a/sdk/objc/base/RTCVideoEncoder.h b/sdk/objc/base/RTCVideoEncoder.h
index c525767..7d1a7af 100644
--- a/sdk/objc/base/RTCVideoEncoder.h
+++ b/sdk/objc/base/RTCVideoEncoder.h
@@ -21,20 +21,21 @@
 NS_ASSUME_NONNULL_BEGIN
 
 /** Callback block for encoder. */
-typedef BOOL (^RTCVideoEncoderCallback)(RTCEncodedImage *frame,
-                                        id<RTCCodecSpecificInfo> info,
-                                        RTCRtpFragmentationHeader *header);
+typedef BOOL (^RTCVideoEncoderCallback)(RTC_OBJC_TYPE(RTCEncodedImage) * frame,
+                                        id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)> info,
+                                        RTC_OBJC_TYPE(RTCRtpFragmentationHeader) * header);
 
 /** Protocol for encoder implementations. */
 RTC_OBJC_EXPORT
-@protocol RTCVideoEncoder <NSObject>
+@protocol RTC_OBJC_TYPE
+(RTCVideoEncoder)<NSObject>
 
-- (void)setCallback:(RTCVideoEncoderCallback)callback;
-- (NSInteger)startEncodeWithSettings:(RTCVideoEncoderSettings *)settings
+    - (void)setCallback : (RTCVideoEncoderCallback)callback;
+- (NSInteger)startEncodeWithSettings:(RTC_OBJC_TYPE(RTCVideoEncoderSettings) *)settings
                        numberOfCores:(int)numberOfCores;
 - (NSInteger)releaseEncoder;
-- (NSInteger)encode:(RTCVideoFrame *)frame
-    codecSpecificInfo:(nullable id<RTCCodecSpecificInfo>)info
+- (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame
+    codecSpecificInfo:(nullable id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)>)info
            frameTypes:(NSArray<NSNumber *> *)frameTypes;
 - (int)setBitrate:(uint32_t)bitrateKbit framerate:(uint32_t)framerate;
 - (NSString *)implementationName;
@@ -42,7 +43,7 @@
 /** Returns QP scaling settings for encoder. The quality scaler adjusts the resolution in order to
  *  keep the QP from the encoded images within the given range. Returning nil from this function
  *  disables quality scaling. */
-- (nullable RTCVideoEncoderQpThresholds *)scalingSettings;
+- (nullable RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) *)scalingSettings;
 
 @end
 
diff --git a/sdk/objc/base/RTCVideoEncoderFactory.h b/sdk/objc/base/RTCVideoEncoderFactory.h
index 6ea78a5..b115b2a 100644
--- a/sdk/objc/base/RTCVideoEncoderFactory.h
+++ b/sdk/objc/base/RTCVideoEncoderFactory.h
@@ -20,24 +20,29 @@
  webrtc::VideoEncoderFactory::VideoEncoderSelector.
  */
 RTC_OBJC_EXPORT
-@protocol RTCVideoEncoderSelector <NSObject>
+@protocol RTC_OBJC_TYPE
+(RTCVideoEncoderSelector)<NSObject>
 
-- (void)registerCurrentEncoderInfo:(RTCVideoCodecInfo *)info;
-- (nullable RTCVideoCodecInfo *)encoderForBitrate:(NSInteger)bitrate;
-- (nullable RTCVideoCodecInfo *)encoderForBrokenEncoder;
+    - (void)registerCurrentEncoderInfo : (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info;
+- (nullable RTC_OBJC_TYPE(RTCVideoCodecInfo) *)encoderForBitrate:(NSInteger)bitrate;
+- (nullable RTC_OBJC_TYPE(RTCVideoCodecInfo) *)encoderForBrokenEncoder;
 
 @end
 
-/** RTCVideoEncoderFactory is an Objective-C version of webrtc::VideoEncoderFactory. */
+/** RTCVideoEncoderFactory is an Objective-C version of webrtc::VideoEncoderFactory.
+ */
 RTC_OBJC_EXPORT
-@protocol RTCVideoEncoderFactory <NSObject>
+@protocol RTC_OBJC_TYPE
+(RTCVideoEncoderFactory)<NSObject>
 
-- (nullable id<RTCVideoEncoder>)createEncoder:(RTCVideoCodecInfo *)info;
-- (NSArray<RTCVideoCodecInfo *> *)supportedCodecs;  // TODO(andersc): "supportedFormats" instead?
+    - (nullable id<RTC_OBJC_TYPE(RTCVideoEncoder)>)createEncoder
+    : (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info;
+- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)
+    supportedCodecs;  // TODO(andersc): "supportedFormats" instead?
 
 @optional
-- (NSArray<RTCVideoCodecInfo *> *)implementations;
-- (nullable id<RTCVideoEncoderSelector>)encoderSelector;
+- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)implementations;
+- (nullable id<RTC_OBJC_TYPE(RTCVideoEncoderSelector)>)encoderSelector;
 
 @end
 
diff --git a/sdk/objc/base/RTCVideoEncoderQpThresholds.h b/sdk/objc/base/RTCVideoEncoderQpThresholds.h
index 2b48f45..1a6e9e8 100644
--- a/sdk/objc/base/RTCVideoEncoderQpThresholds.h
+++ b/sdk/objc/base/RTCVideoEncoderQpThresholds.h
@@ -16,7 +16,7 @@
 
 /** QP thresholds for encoder. Corresponds to webrtc::VideoEncoder::QpThresholds. */
 RTC_OBJC_EXPORT
-@interface RTCVideoEncoderQpThresholds : NSObject
+@interface RTC_OBJC_TYPE (RTCVideoEncoderQpThresholds) : NSObject
 
 - (instancetype)initWithThresholdsLow:(NSInteger)low high:(NSInteger)high;
 
diff --git a/sdk/objc/base/RTCVideoEncoderQpThresholds.m b/sdk/objc/base/RTCVideoEncoderQpThresholds.m
index 5bd06ff..fb7012f 100644
--- a/sdk/objc/base/RTCVideoEncoderQpThresholds.m
+++ b/sdk/objc/base/RTCVideoEncoderQpThresholds.m
@@ -10,7 +10,7 @@
 
 #import "RTCVideoEncoderQpThresholds.h"
 
-@implementation RTCVideoEncoderQpThresholds
+@implementation RTC_OBJC_TYPE (RTCVideoEncoderQpThresholds)
 
 @synthesize low = _low;
 @synthesize high = _high;
diff --git a/sdk/objc/base/RTCVideoEncoderSettings.h b/sdk/objc/base/RTCVideoEncoderSettings.h
index a9403f8..ae792ea 100644
--- a/sdk/objc/base/RTCVideoEncoderSettings.h
+++ b/sdk/objc/base/RTCVideoEncoderSettings.h
@@ -21,7 +21,7 @@
 
 /** Settings for encoder. Corresponds to webrtc::VideoCodec. */
 RTC_OBJC_EXPORT
-@interface RTCVideoEncoderSettings : NSObject
+@interface RTC_OBJC_TYPE (RTCVideoEncoderSettings) : NSObject
 
 @property(nonatomic, strong) NSString *name;
 
diff --git a/sdk/objc/base/RTCVideoEncoderSettings.m b/sdk/objc/base/RTCVideoEncoderSettings.m
index f68bc8c..f66cd2c 100644
--- a/sdk/objc/base/RTCVideoEncoderSettings.m
+++ b/sdk/objc/base/RTCVideoEncoderSettings.m
@@ -10,7 +10,7 @@
 
 #import "RTCVideoEncoderSettings.h"
 
-@implementation RTCVideoEncoderSettings
+@implementation RTC_OBJC_TYPE (RTCVideoEncoderSettings)
 
 @synthesize name = _name;
 @synthesize width = _width;
diff --git a/sdk/objc/base/RTCVideoFrame.h b/sdk/objc/base/RTCVideoFrame.h
index 9aca743..f5638d2 100644
--- a/sdk/objc/base/RTCVideoFrame.h
+++ b/sdk/objc/base/RTCVideoFrame.h
@@ -22,11 +22,12 @@
   RTCVideoRotation_270 = 270,
 };
 
-@protocol RTCVideoFrameBuffer;
+@protocol RTC_OBJC_TYPE
+(RTCVideoFrameBuffer);
 
 // RTCVideoFrame is an ObjectiveC version of webrtc::VideoFrame.
 RTC_OBJC_EXPORT
-@interface RTCVideoFrame : NSObject
+@interface RTC_OBJC_TYPE (RTCVideoFrame) : NSObject
 
 /** Width without rotation applied. */
 @property(nonatomic, readonly) int width;
@@ -41,7 +42,7 @@
 /** Timestamp 90 kHz. */
 @property(nonatomic, assign) int32_t timeStamp;
 
-@property(nonatomic, readonly) id<RTCVideoFrameBuffer> buffer;
+@property(nonatomic, readonly) id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> buffer;
 
 - (instancetype)init NS_UNAVAILABLE;
 - (instancetype) new NS_UNAVAILABLE;
@@ -71,14 +72,14 @@
 
 /** Initialize an RTCVideoFrame from a frame buffer, rotation, and timestamp.
  */
-- (instancetype)initWithBuffer:(id<RTCVideoFrameBuffer>)frameBuffer
+- (instancetype)initWithBuffer:(id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)>)frameBuffer
                       rotation:(RTCVideoRotation)rotation
                    timeStampNs:(int64_t)timeStampNs;
 
 /** Return a frame that is guaranteed to be I420, i.e. it is possible to access
  *  the YUV data on it.
  */
-- (RTCVideoFrame *)newI420VideoFrame;
+- (RTC_OBJC_TYPE(RTCVideoFrame) *)newI420VideoFrame;
 
 @end
 
diff --git a/sdk/objc/base/RTCVideoFrame.mm b/sdk/objc/base/RTCVideoFrame.mm
index 0a44b04..e162238 100644
--- a/sdk/objc/base/RTCVideoFrame.mm
+++ b/sdk/objc/base/RTCVideoFrame.mm
@@ -13,7 +13,7 @@
 #import "RTCI420Buffer.h"
 #import "RTCVideoFrameBuffer.h"
 
-@implementation RTCVideoFrame {
+@implementation RTC_OBJC_TYPE (RTCVideoFrame) {
   RTCVideoRotation _rotation;
   int64_t _timeStampNs;
 }
@@ -37,10 +37,10 @@
   return _timeStampNs;
 }
 
-- (RTCVideoFrame *)newI420VideoFrame {
-  return [[RTCVideoFrame alloc] initWithBuffer:[_buffer toI420]
-                                      rotation:_rotation
-                                   timeStampNs:_timeStampNs];
+- (RTC_OBJC_TYPE(RTCVideoFrame) *)newI420VideoFrame {
+  return [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:[_buffer toI420]
+                                                     rotation:_rotation
+                                                  timeStampNs:_timeStampNs];
 }
 
 - (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer
@@ -63,7 +63,7 @@
   return nil;
 }
 
-- (instancetype)initWithBuffer:(id<RTCVideoFrameBuffer>)buffer
+- (instancetype)initWithBuffer:(id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)>)buffer
                       rotation:(RTCVideoRotation)rotation
                    timeStampNs:(int64_t)timeStampNs {
   if (self = [super init]) {
diff --git a/sdk/objc/base/RTCVideoFrameBuffer.h b/sdk/objc/base/RTCVideoFrameBuffer.h
index bb9e6fb..82d057e 100644
--- a/sdk/objc/base/RTCVideoFrameBuffer.h
+++ b/sdk/objc/base/RTCVideoFrameBuffer.h
@@ -14,16 +14,18 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-@protocol RTCI420Buffer;
+@protocol RTC_OBJC_TYPE
+(RTCI420Buffer);
 
 // RTCVideoFrameBuffer is an ObjectiveC version of webrtc::VideoFrameBuffer.
 RTC_OBJC_EXPORT
-@protocol RTCVideoFrameBuffer <NSObject>
+@protocol RTC_OBJC_TYPE
+(RTCVideoFrameBuffer)<NSObject>
 
-@property(nonatomic, readonly) int width;
+    @property(nonatomic, readonly) int width;
 @property(nonatomic, readonly) int height;
 
-- (id<RTCI420Buffer>)toI420;
+- (id<RTC_OBJC_TYPE(RTCI420Buffer)>)toI420;
 
 @end
 
diff --git a/sdk/objc/base/RTCVideoRenderer.h b/sdk/objc/base/RTCVideoRenderer.h
index 7b359a3..0f76329 100644
--- a/sdk/objc/base/RTCVideoRenderer.h
+++ b/sdk/objc/base/RTCVideoRenderer.h
@@ -17,23 +17,26 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-@class RTCVideoFrame;
+@class RTC_OBJC_TYPE(RTCVideoFrame);
 
 RTC_OBJC_EXPORT
-@protocol RTCVideoRenderer <NSObject>
+@protocol RTC_OBJC_TYPE
+(RTCVideoRenderer)<NSObject>
 
-/** The size of the frame. */
-- (void)setSize:(CGSize)size;
+    /** The size of the frame. */
+    - (void)setSize : (CGSize)size;
 
 /** The frame to be displayed. */
-- (void)renderFrame:(nullable RTCVideoFrame *)frame;
+- (void)renderFrame:(nullable RTC_OBJC_TYPE(RTCVideoFrame) *)frame;
 
 @end
 
 RTC_OBJC_EXPORT
-@protocol RTCVideoViewDelegate
+@protocol RTC_OBJC_TYPE
+(RTCVideoViewDelegate)
 
-- (void)videoView:(id<RTCVideoRenderer>)videoView didChangeVideoSize:(CGSize)size;
+    - (void)videoView : (id<RTC_OBJC_TYPE(RTCVideoRenderer)>)videoView didChangeVideoSize
+    : (CGSize)size;
 
 @end
 
diff --git a/sdk/objc/base/RTCYUVPlanarBuffer.h b/sdk/objc/base/RTCYUVPlanarBuffer.h
index 8ceb66c..be01b91 100644
--- a/sdk/objc/base/RTCYUVPlanarBuffer.h
+++ b/sdk/objc/base/RTCYUVPlanarBuffer.h
@@ -17,9 +17,10 @@
 
 /** Protocol for RTCVideoFrameBuffers containing YUV planar data. */
 RTC_OBJC_EXPORT
-@protocol RTCYUVPlanarBuffer <RTCVideoFrameBuffer>
+@protocol RTC_OBJC_TYPE
+(RTCYUVPlanarBuffer)<RTC_OBJC_TYPE(RTCVideoFrameBuffer)>
 
-@property(nonatomic, readonly) int chromaWidth;
+    @property(nonatomic, readonly) int chromaWidth;
 @property(nonatomic, readonly) int chromaHeight;
 @property(nonatomic, readonly) const uint8_t *dataY;
 @property(nonatomic, readonly) const uint8_t *dataU;
diff --git a/sdk/objc/components/audio/RTCAudioSession+Configuration.mm b/sdk/objc/components/audio/RTCAudioSession+Configuration.mm
index c81ce1b..b2753f2 100644
--- a/sdk/objc/components/audio/RTCAudioSession+Configuration.mm
+++ b/sdk/objc/components/audio/RTCAudioSession+Configuration.mm
@@ -13,17 +13,18 @@
 
 #import "base/RTCLogging.h"
 
-@implementation RTCAudioSession (Configuration)
+@implementation RTC_OBJC_TYPE (RTCAudioSession)
+(Configuration)
 
-- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration
-                   error:(NSError **)outError {
+    - (BOOL)setConfiguration : (RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration error
+    : (NSError **)outError {
   return [self setConfiguration:configuration
                          active:NO
                 shouldSetActive:NO
                           error:outError];
 }
 
-- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration
+- (BOOL)setConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration
                   active:(BOOL)active
                    error:(NSError **)outError {
   return [self setConfiguration:configuration
@@ -34,7 +35,7 @@
 
 #pragma mark - Private
 
-- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration
+- (BOOL)setConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration
                   active:(BOOL)active
          shouldSetActive:(BOOL)shouldSetActive
                    error:(NSError **)outError {
diff --git a/sdk/objc/components/audio/RTCAudioSession+Private.h b/sdk/objc/components/audio/RTCAudioSession+Private.h
index 8cf9339..4c1eb1c 100644
--- a/sdk/objc/components/audio/RTCAudioSession+Private.h
+++ b/sdk/objc/components/audio/RTCAudioSession+Private.h
@@ -12,14 +12,15 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-@class RTCAudioSessionConfiguration;
+@class RTC_OBJC_TYPE(RTCAudioSessionConfiguration);
 
-@interface RTCAudioSession ()
+@interface RTC_OBJC_TYPE (RTCAudioSession)
+()
 
-/** Number of times setActive:YES has succeeded without a balanced call to
- *  setActive:NO.
- */
-@property(nonatomic, readonly) int activationCount;
+    /** Number of times setActive:YES has succeeded without a balanced call to
+     *  setActive:NO.
+     */
+    @property(nonatomic, readonly) int activationCount;
 
 /** The number of times |beginWebRTCSession| was called without a balanced call
  *  to |endWebRTCSession|.
@@ -40,7 +41,7 @@
  *  the list. This delegate will be notified before other delegates of
  *  audio events.
  */
-- (void)pushDelegate:(id<RTCAudioSessionDelegate>)delegate;
+- (void)pushDelegate:(id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)>)delegate;
 
 /** Signals RTCAudioSession that a WebRTC session is about to begin and
  *  audio configuration is needed. Will configure the audio session for WebRTC
diff --git a/sdk/objc/components/audio/RTCAudioSession.h b/sdk/objc/components/audio/RTCAudioSession.h
index b5bba2f..f917e32 100644
--- a/sdk/objc/components/audio/RTCAudioSession.h
+++ b/sdk/objc/components/audio/RTCAudioSession.h
@@ -21,78 +21,81 @@
 /** Unknown configuration error occurred. */
 extern NSInteger const kRTCAudioSessionErrorConfiguration;
 
-@class RTCAudioSession;
-@class RTCAudioSessionConfiguration;
+@class RTC_OBJC_TYPE(RTCAudioSession);
+@class RTC_OBJC_TYPE(RTCAudioSessionConfiguration);
 
 // Surfaces AVAudioSession events. WebRTC will listen directly for notifications
 // from AVAudioSession and handle them before calling these delegate methods,
 // at which point applications can perform additional processing if required.
 RTC_OBJC_EXPORT
-@protocol RTCAudioSessionDelegate <NSObject>
+@protocol RTC_OBJC_TYPE
+(RTCAudioSessionDelegate)<NSObject>
 
-@optional
+    @optional
 /** Called on a system notification thread when AVAudioSession starts an
  *  interruption event.
  */
-- (void)audioSessionDidBeginInterruption:(RTCAudioSession *)session;
+- (void)audioSessionDidBeginInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session;
 
 /** Called on a system notification thread when AVAudioSession ends an
  *  interruption event.
  */
-- (void)audioSessionDidEndInterruption:(RTCAudioSession *)session
+- (void)audioSessionDidEndInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session
                    shouldResumeSession:(BOOL)shouldResumeSession;
 
 /** Called on a system notification thread when AVAudioSession changes the
  *  route.
  */
-- (void)audioSessionDidChangeRoute:(RTCAudioSession *)session
+- (void)audioSessionDidChangeRoute:(RTC_OBJC_TYPE(RTCAudioSession) *)session
                             reason:(AVAudioSessionRouteChangeReason)reason
                      previousRoute:(AVAudioSessionRouteDescription *)previousRoute;
 
 /** Called on a system notification thread when AVAudioSession media server
  *  terminates.
  */
-- (void)audioSessionMediaServerTerminated:(RTCAudioSession *)session;
+- (void)audioSessionMediaServerTerminated:(RTC_OBJC_TYPE(RTCAudioSession) *)session;
 
 /** Called on a system notification thread when AVAudioSession media server
  *  restarts.
  */
-- (void)audioSessionMediaServerReset:(RTCAudioSession *)session;
+- (void)audioSessionMediaServerReset:(RTC_OBJC_TYPE(RTCAudioSession) *)session;
 
 // TODO(tkchin): Maybe handle SilenceSecondaryAudioHintNotification.
 
-- (void)audioSession:(RTCAudioSession *)session didChangeCanPlayOrRecord:(BOOL)canPlayOrRecord;
+- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)session
+    didChangeCanPlayOrRecord:(BOOL)canPlayOrRecord;
 
 /** Called on a WebRTC thread when the audio device is notified to begin
  *  playback or recording.
  */
-- (void)audioSessionDidStartPlayOrRecord:(RTCAudioSession *)session;
+- (void)audioSessionDidStartPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session;
 
 /** Called on a WebRTC thread when the audio device is notified to stop
  *  playback or recording.
  */
-- (void)audioSessionDidStopPlayOrRecord:(RTCAudioSession *)session;
+- (void)audioSessionDidStopPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session;
 
 /** Called when the AVAudioSession output volume value changes. */
-- (void)audioSession:(RTCAudioSession *)audioSession didChangeOutputVolume:(float)outputVolume;
+- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession
+    didChangeOutputVolume:(float)outputVolume;
 
 /** Called when the audio device detects a playout glitch. The argument is the
  *  number of glitches detected so far in the current audio playout session.
  */
-- (void)audioSession:(RTCAudioSession *)audioSession
+- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession
     didDetectPlayoutGlitch:(int64_t)totalNumberOfGlitches;
 
 /** Called when the audio session is about to change the active state.
  */
-- (void)audioSession:(RTCAudioSession *)audioSession willSetActive:(BOOL)active;
+- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession willSetActive:(BOOL)active;
 
 /** Called after the audio session sucessfully changed the active state.
  */
-- (void)audioSession:(RTCAudioSession *)audioSession didSetActive:(BOOL)active;
+- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession didSetActive:(BOOL)active;
 
 /** Called after the audio session failed to change the active state.
  */
-- (void)audioSession:(RTCAudioSession *)audioSession
+- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession
     failedToSetActive:(BOOL)active
                 error:(NSError *)error;
 
@@ -103,10 +106,11 @@
  *  case of this is when CallKit activates the audio session for the application
  */
 RTC_OBJC_EXPORT
-@protocol RTCAudioSessionActivationDelegate <NSObject>
+@protocol RTC_OBJC_TYPE
+(RTCAudioSessionActivationDelegate)<NSObject>
 
-/** Called when the audio session is activated outside of the app by iOS. */
-- (void)audioSessionDidActivate:(AVAudioSession *)session;
+    /** Called when the audio session is activated outside of the app by iOS. */
+    - (void)audioSessionDidActivate : (AVAudioSession *)session;
 
 /** Called when the audio session is deactivated outside of the app by iOS. */
 - (void)audioSessionDidDeactivate:(AVAudioSession *)session;
@@ -121,7 +125,7 @@
  *  activated only once. See |setActive:error:|.
  */
 RTC_OBJC_EXPORT
-@interface RTCAudioSession : NSObject <RTCAudioSessionActivationDelegate>
+@interface RTC_OBJC_TYPE (RTCAudioSession) : NSObject <RTC_OBJC_TYPE(RTCAudioSessionActivationDelegate)>
 
 /** Convenience property to access the AVAudioSession singleton. Callers should
  *  not call setters on AVAudioSession directly, but other method invocations
@@ -196,9 +200,9 @@
 - (instancetype)init NS_UNAVAILABLE;
 
 /** Adds a delegate, which is held weakly. */
-- (void)addDelegate:(id<RTCAudioSessionDelegate>)delegate;
+- (void)addDelegate:(id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)>)delegate;
 /** Removes an added delegate. */
-- (void)removeDelegate:(id<RTCAudioSessionDelegate>)delegate;
+- (void)removeDelegate:(id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)>)delegate;
 
 /** Request exclusive access to the audio session for configuration. This call
  *  will block if the lock is held by another object.
@@ -237,19 +241,21 @@
                       error:(NSError **)outError;
 @end
 
-@interface RTCAudioSession (Configuration)
+@interface RTC_OBJC_TYPE (RTCAudioSession)
+(Configuration)
 
-/** Applies the configuration to the current session. Attempts to set all
- *  properties even if previous ones fail. Only the last error will be
- *  returned.
- *  |lockForConfiguration| must be called first.
- */
-- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration error:(NSError **)outError;
+    /** Applies the configuration to the current session. Attempts to set all
+     *  properties even if previous ones fail. Only the last error will be
+     *  returned.
+     *  |lockForConfiguration| must be called first.
+     */
+    - (BOOL)setConfiguration : (RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration error
+    : (NSError **)outError;
 
 /** Convenience method that calls both setConfiguration and setActive.
  *  |lockForConfiguration| must be called first.
  */
-- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration
+- (BOOL)setConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration
                   active:(BOOL)active
                    error:(NSError **)outError;
 
diff --git a/sdk/objc/components/audio/RTCAudioSession.mm b/sdk/objc/components/audio/RTCAudioSession.mm
index 260529d..74b57ac 100644
--- a/sdk/objc/components/audio/RTCAudioSession.mm
+++ b/sdk/objc/components/audio/RTCAudioSession.mm
@@ -21,20 +21,20 @@
 #import "RTCAudioSessionConfiguration.h"
 #import "base/RTCLogging.h"
 
-
-NSString * const kRTCAudioSessionErrorDomain = @"org.webrtc.RTCAudioSession";
+NSString *const kRTCAudioSessionErrorDomain = @"org.webrtc.RTC_OBJC_TYPE(RTCAudioSession)";
 NSInteger const kRTCAudioSessionErrorLockRequired = -1;
 NSInteger const kRTCAudioSessionErrorConfiguration = -2;
 NSString * const kRTCAudioSessionOutputVolumeSelector = @"outputVolume";
 
-@interface RTCAudioSession ()
-@property(nonatomic, readonly) std::vector<__weak id<RTCAudioSessionDelegate> > delegates;
+@interface RTC_OBJC_TYPE (RTCAudioSession)
+() @property(nonatomic,
+             readonly) std::vector<__weak id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)> > delegates;
 @end
 
 // This class needs to be thread-safe because it is accessed from many threads.
 // TODO(tkchin): Consider more granular locking. We're not expecting a lot of
 // lock contention so coarse locks should be fine for now.
-@implementation RTCAudioSession {
+@implementation RTC_OBJC_TYPE (RTCAudioSession) {
   rtc::CriticalSection _crit;
   AVAudioSession *_session;
   volatile int _activationCount;
@@ -54,7 +54,7 @@
 
 + (instancetype)sharedInstance {
   static dispatch_once_t onceToken;
-  static RTCAudioSession *sharedInstance = nil;
+  static RTC_OBJC_TYPE(RTCAudioSession) *sharedInstance = nil;
   dispatch_once(&onceToken, ^{
     sharedInstance = [[self alloc] init];
   });
@@ -102,9 +102,9 @@
     [_session addObserver:self
                forKeyPath:kRTCAudioSessionOutputVolumeSelector
                   options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld
-                  context:(__bridge void*)RTCAudioSession.class];
+                  context:(__bridge void *)RTC_OBJC_TYPE(RTCAudioSession).class];
 
-    RTCLog(@"RTCAudioSession (%p): init.", self);
+    RTCLog(@"RTC_OBJC_TYPE(RTCAudioSession) (%p): init.", self);
   }
   return self;
 }
@@ -113,25 +113,24 @@
   [[NSNotificationCenter defaultCenter] removeObserver:self];
   [_session removeObserver:self
                 forKeyPath:kRTCAudioSessionOutputVolumeSelector
-                   context:(__bridge void*)RTCAudioSession.class];
-  RTCLog(@"RTCAudioSession (%p): dealloc.", self);
+                   context:(__bridge void *)RTC_OBJC_TYPE(RTCAudioSession).class];
+  RTCLog(@"RTC_OBJC_TYPE(RTCAudioSession) (%p): dealloc.", self);
 }
 
 - (NSString *)description {
-  NSString *format =
-      @"RTCAudioSession: {\n"
-       "  category: %@\n"
-       "  categoryOptions: %ld\n"
-       "  mode: %@\n"
-       "  isActive: %d\n"
-       "  sampleRate: %.2f\n"
-       "  IOBufferDuration: %f\n"
-       "  outputNumberOfChannels: %ld\n"
-       "  inputNumberOfChannels: %ld\n"
-       "  outputLatency: %f\n"
-       "  inputLatency: %f\n"
-       "  outputVolume: %f\n"
-       "}";
+  NSString *format = @"RTC_OBJC_TYPE(RTCAudioSession): {\n"
+                      "  category: %@\n"
+                      "  categoryOptions: %ld\n"
+                      "  mode: %@\n"
+                      "  isActive: %d\n"
+                      "  sampleRate: %.2f\n"
+                      "  IOBufferDuration: %f\n"
+                      "  outputNumberOfChannels: %ld\n"
+                      "  inputNumberOfChannels: %ld\n"
+                      "  outputLatency: %f\n"
+                      "  inputLatency: %f\n"
+                      "  outputVolume: %f\n"
+                      "}";
   NSString *description = [NSString stringWithFormat:format,
       self.category, (long)self.categoryOptions, self.mode,
       self.isActive, self.sampleRate, self.IOBufferDuration,
@@ -206,7 +205,7 @@
 }
 
 // TODO(tkchin): Check for duplicates.
-- (void)addDelegate:(id<RTCAudioSessionDelegate>)delegate {
+- (void)addDelegate:(id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)>)delegate {
   RTCLog(@"Adding delegate: (%p)", delegate);
   if (!delegate) {
     return;
@@ -217,7 +216,7 @@
   }
 }
 
-- (void)removeDelegate:(id<RTCAudioSessionDelegate>)delegate {
+- (void)removeDelegate:(id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)>)delegate {
   RTCLog(@"Removing delegate: (%p)", delegate);
   if (!delegate) {
     return;
@@ -621,7 +620,7 @@
   return error;
 }
 
-- (std::vector<__weak id<RTCAudioSessionDelegate> >)delegates {
+- (std::vector<__weak id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)> >)delegates {
   @synchronized(self) {
     // Note: this returns a copy.
     return _delegates;
@@ -629,7 +628,7 @@
 }
 
 // TODO(tkchin): check for duplicates.
-- (void)pushDelegate:(id<RTCAudioSessionDelegate>)delegate {
+- (void)pushDelegate:(id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)>)delegate {
   @synchronized(self) {
     _delegates.insert(_delegates.begin(), delegate);
   }
@@ -687,7 +686,7 @@
   // acquire lock if it hasn't already been called.
   if (!self.isLocked) {
     if (outError) {
-      *outError = [RTCAudioSession lockError];
+      *outError = [RTC_OBJC_TYPE(RTCAudioSession) lockError];
     }
     return NO;
   }
@@ -730,8 +729,8 @@
   // Configure the AVAudioSession and activate it.
   // Provide an error even if there isn't one so we can log it.
   NSError *error = nil;
-  RTCAudioSessionConfiguration *webRTCConfig =
-      [RTCAudioSessionConfiguration webRTCConfiguration];
+  RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *webRTCConfig =
+      [RTC_OBJC_TYPE(RTCAudioSessionConfiguration) webRTCConfiguration];
   if (![self setConfiguration:webRTCConfig active:YES error:&error]) {
     RTCLogError(@"Failed to set WebRTC audio configuration: %@",
                 error.localizedDescription);
@@ -866,7 +865,7 @@
                       ofObject:(id)object
                         change:(NSDictionary *)change
                        context:(void *)context {
-  if (context == (__bridge void*)RTCAudioSession.class) {
+  if (context == (__bridge void *)RTC_OBJC_TYPE(RTCAudioSession).class) {
     if (object == _session) {
       NSNumber *newVolume = change[NSKeyValueChangeNewKey];
       RTCLog(@"OutputVolumeDidChange to %f", newVolume.floatValue);
diff --git a/sdk/objc/components/audio/RTCAudioSessionConfiguration.h b/sdk/objc/components/audio/RTCAudioSessionConfiguration.h
index 9f3765d..4582b80 100644
--- a/sdk/objc/components/audio/RTCAudioSessionConfiguration.h
+++ b/sdk/objc/components/audio/RTCAudioSessionConfiguration.h
@@ -23,7 +23,7 @@
 
 // Struct to hold configuration values.
 RTC_OBJC_EXPORT
-@interface RTCAudioSessionConfiguration : NSObject
+@interface RTC_OBJC_TYPE (RTCAudioSessionConfiguration) : NSObject
 
 @property(nonatomic, strong) NSString *category;
 @property(nonatomic, assign) AVAudioSessionCategoryOptions categoryOptions;
@@ -41,7 +41,7 @@
 /** Returns the configuration that WebRTC needs. */
 + (instancetype)webRTCConfiguration;
 /** Provide a way to override the default configuration. */
-+ (void)setWebRTCConfiguration:(RTCAudioSessionConfiguration *)configuration;
++ (void)setWebRTCConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration;
 
 @end
 
diff --git a/sdk/objc/components/audio/RTCAudioSessionConfiguration.m b/sdk/objc/components/audio/RTCAudioSessionConfiguration.m
index 2247e65..39e9ac1 100644
--- a/sdk/objc/components/audio/RTCAudioSessionConfiguration.m
+++ b/sdk/objc/components/audio/RTCAudioSessionConfiguration.m
@@ -51,9 +51,9 @@
 // TODO(henrika): monitor this size and determine if it should be modified.
 const double kRTCAudioSessionLowComplexityIOBufferDuration = 0.06;
 
-static RTCAudioSessionConfiguration *gWebRTCConfiguration = nil;
+static RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *gWebRTCConfiguration = nil;
 
-@implementation RTCAudioSessionConfiguration
+@implementation RTC_OBJC_TYPE (RTCAudioSessionConfiguration)
 
 @synthesize category = _category;
 @synthesize categoryOptions = _categoryOptions;
@@ -105,9 +105,9 @@
 }
 
 + (instancetype)currentConfiguration {
-  RTCAudioSession *session = [RTCAudioSession sharedInstance];
-  RTCAudioSessionConfiguration *config =
-      [[RTCAudioSessionConfiguration alloc] init];
+  RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+  RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *config =
+      [[RTC_OBJC_TYPE(RTCAudioSessionConfiguration) alloc] init];
   config.category = session.category;
   config.categoryOptions = session.categoryOptions;
   config.mode = session.mode;
@@ -120,11 +120,11 @@
 
 + (instancetype)webRTCConfiguration {
   @synchronized(self) {
-    return (RTCAudioSessionConfiguration *)gWebRTCConfiguration;
+    return (RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)gWebRTCConfiguration;
   }
 }
 
-+ (void)setWebRTCConfiguration:(RTCAudioSessionConfiguration *)configuration {
++ (void)setWebRTCConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration {
   @synchronized(self) {
     gWebRTCConfiguration = configuration;
   }
diff --git a/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h b/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h
index 7ca2d75..e28f26f 100644
--- a/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h
+++ b/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h
@@ -19,7 +19,7 @@
 /** Adapter that forwards RTCAudioSessionDelegate calls to the appropriate
  *  methods on the AudioSessionObserver.
  */
-@interface RTCNativeAudioSessionDelegateAdapter : NSObject <RTCAudioSessionDelegate>
+@interface RTCNativeAudioSessionDelegateAdapter : NSObject <RTC_OBJC_TYPE (RTCAudioSessionDelegate)>
 
 - (instancetype)init NS_UNAVAILABLE;
 
diff --git a/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm b/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm
index aef97b9..daddf31 100644
--- a/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm
+++ b/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm
@@ -26,20 +26,20 @@
   return self;
 }
 
-#pragma mark - RTCAudioSessionDelegate
+#pragma mark - RTC_OBJC_TYPE(RTCAudioSessionDelegate)
 
-- (void)audioSessionDidBeginInterruption:(RTCAudioSession *)session {
+- (void)audioSessionDidBeginInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
   _observer->OnInterruptionBegin();
 }
 
-- (void)audioSessionDidEndInterruption:(RTCAudioSession *)session
+- (void)audioSessionDidEndInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session
                    shouldResumeSession:(BOOL)shouldResumeSession {
   _observer->OnInterruptionEnd();
 }
 
-- (void)audioSessionDidChangeRoute:(RTCAudioSession *)session
-           reason:(AVAudioSessionRouteChangeReason)reason
-    previousRoute:(AVAudioSessionRouteDescription *)previousRoute {
+- (void)audioSessionDidChangeRoute:(RTC_OBJC_TYPE(RTCAudioSession) *)session
+                            reason:(AVAudioSessionRouteChangeReason)reason
+                     previousRoute:(AVAudioSessionRouteDescription *)previousRoute {
   switch (reason) {
     case AVAudioSessionRouteChangeReasonUnknown:
     case AVAudioSessionRouteChangeReasonNewDeviceAvailable:
@@ -64,24 +64,24 @@
   }
 }
 
-- (void)audioSessionMediaServerTerminated:(RTCAudioSession *)session {
+- (void)audioSessionMediaServerTerminated:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
 }
 
-- (void)audioSessionMediaServerReset:(RTCAudioSession *)session {
+- (void)audioSessionMediaServerReset:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
 }
 
-- (void)audioSession:(RTCAudioSession *)session
+- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)session
     didChangeCanPlayOrRecord:(BOOL)canPlayOrRecord {
   _observer->OnCanPlayOrRecordChange(canPlayOrRecord);
 }
 
-- (void)audioSessionDidStartPlayOrRecord:(RTCAudioSession *)session {
+- (void)audioSessionDidStartPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
 }
 
-- (void)audioSessionDidStopPlayOrRecord:(RTCAudioSession *)session {
+- (void)audioSessionDidStopPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
 }
 
-- (void)audioSession:(RTCAudioSession *)audioSession
+- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession
     didChangeOutputVolume:(float)outputVolume {
   _observer->OnChangedOutputVolume();
 }
diff --git a/sdk/objc/components/capturer/RTCCameraVideoCapturer.h b/sdk/objc/components/capturer/RTCCameraVideoCapturer.h
index 2b5e56f..fed5a37 100644
--- a/sdk/objc/components/capturer/RTCCameraVideoCapturer.h
+++ b/sdk/objc/components/capturer/RTCCameraVideoCapturer.h
@@ -17,10 +17,10 @@
 NS_ASSUME_NONNULL_BEGIN
 
 RTC_OBJC_EXPORT
-// Camera capture that implements RTCVideoCapturer. Delivers frames to a RTCVideoCapturerDelegate
-// (usually RTCVideoSource).
+// Camera capture that implements RTCVideoCapturer. Delivers frames to a
+// RTCVideoCapturerDelegate (usually RTCVideoSource).
 NS_EXTENSION_UNAVAILABLE_IOS("Camera not available in app extensions.")
-@interface RTCCameraVideoCapturer : RTCVideoCapturer
+@interface RTC_OBJC_TYPE (RTCCameraVideoCapturer) : RTC_OBJC_TYPE(RTCVideoCapturer)
 
 // Capture session that is used for capturing. Valid from initialization to dealloc.
 @property(readonly, nonatomic) AVCaptureSession *captureSession;
diff --git a/sdk/objc/components/capturer/RTCCameraVideoCapturer.m b/sdk/objc/components/capturer/RTCCameraVideoCapturer.m
index 5cfb616..6edcec8 100644
--- a/sdk/objc/components/capturer/RTCCameraVideoCapturer.m
+++ b/sdk/objc/components/capturer/RTCCameraVideoCapturer.m
@@ -25,8 +25,9 @@
 
 const int64_t kNanosecondsPerSecond = 1000000000;
 
-@interface RTCCameraVideoCapturer ()<AVCaptureVideoDataOutputSampleBufferDelegate>
-@property(nonatomic, readonly) dispatch_queue_t frameQueue;
+@interface RTC_OBJC_TYPE (RTCCameraVideoCapturer)
+()<AVCaptureVideoDataOutputSampleBufferDelegate> @property(nonatomic,
+                                                           readonly) dispatch_queue_t frameQueue;
 @property(nonatomic, strong) AVCaptureDevice *currentDevice;
 @property(nonatomic, assign) BOOL hasRetriedOnFatalError;
 @property(nonatomic, assign) BOOL isRunning;
@@ -34,7 +35,7 @@
 @property(nonatomic, assign) BOOL willBeRunning;
 @end
 
-@implementation RTCCameraVideoCapturer {
+@implementation RTC_OBJC_TYPE (RTCCameraVideoCapturer) {
   AVCaptureVideoDataOutput *_videoDataOutput;
   AVCaptureSession *_captureSession;
   FourCharCode _preferredOutputPixelFormat;
@@ -57,12 +58,12 @@
   return [self initWithDelegate:nil captureSession:[[AVCaptureSession alloc] init]];
 }
 
-- (instancetype)initWithDelegate:(__weak id<RTCVideoCapturerDelegate>)delegate {
+- (instancetype)initWithDelegate:(__weak id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate {
   return [self initWithDelegate:delegate captureSession:[[AVCaptureSession alloc] init]];
 }
 
 // This initializer is used for testing.
-- (instancetype)initWithDelegate:(__weak id<RTCVideoCapturerDelegate>)delegate
+- (instancetype)initWithDelegate:(__weak id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate
                   captureSession:(AVCaptureSession *)captureSession {
   if (self = [super initWithDelegate:delegate]) {
     // Create the capture session and all relevant inputs and outputs. We need
@@ -110,9 +111,9 @@
 }
 
 - (void)dealloc {
-  NSAssert(
-      !_willBeRunning,
-      @"Session was still running in RTCCameraVideoCapturer dealloc. Forgot to call stopCapture?");
+  NSAssert(!_willBeRunning,
+           @"Session was still running in RTC_OBJC_TYPE(RTCCameraVideoCapturer) dealloc. Forgot to "
+           @"call stopCapture?");
   [[NSNotificationCenter defaultCenter] removeObserver:self];
 }
 
@@ -154,7 +155,7 @@
                            fps:(NSInteger)fps
              completionHandler:(nullable void (^)(NSError *))completionHandler {
   _willBeRunning = YES;
-  [RTCDispatcher
+  [RTC_OBJC_TYPE(RTCDispatcher)
       dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
                     block:^{
                       RTCLogInfo("startCaptureWithDevice %@ @ %ld fps", format, (long)fps);
@@ -196,7 +197,7 @@
 
 - (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler {
   _willBeRunning = NO;
-  [RTCDispatcher
+  [RTC_OBJC_TYPE(RTCDispatcher)
       dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
                     block:^{
                       RTCLogInfo("Stop");
@@ -225,10 +226,10 @@
 
 #if TARGET_OS_IPHONE
 - (void)deviceOrientationDidChange:(NSNotification *)notification {
-  [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
-                               block:^{
-                                 [self updateOrientation];
-                               }];
+  [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
+                                              block:^{
+                                                [self updateOrientation];
+                                              }];
 }
 #endif
 
@@ -287,12 +288,14 @@
   _rotation = RTCVideoRotation_0;
 #endif
 
-  RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer];
+  RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
+      [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBuffer];
   int64_t timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) *
       kNanosecondsPerSecond;
-  RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer
-                                                           rotation:_rotation
-                                                        timeStampNs:timeStampNs];
+  RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame =
+      [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:rtcPixelBuffer
+                                                  rotation:_rotation
+                                               timeStampNs:timeStampNs];
   [self.delegate capturer:self didCaptureVideoFrame:videoFrame];
 }
 
@@ -343,29 +346,29 @@
   NSError *error = [notification.userInfo objectForKey:AVCaptureSessionErrorKey];
   RTCLogError(@"Capture session runtime error: %@", error);
 
-  [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
-                               block:^{
+  [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
+                                              block:^{
 #if TARGET_OS_IPHONE
-                                 if (error.code == AVErrorMediaServicesWereReset) {
-                                   [self handleNonFatalError];
-                                 } else {
-                                   [self handleFatalError];
-                                 }
+                                                if (error.code == AVErrorMediaServicesWereReset) {
+                                                  [self handleNonFatalError];
+                                                } else {
+                                                  [self handleFatalError];
+                                                }
 #else
-                                [self handleFatalError];
+        [self handleFatalError];
 #endif
-                               }];
+                                              }];
 }
 
 - (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
   RTCLog(@"Capture session started.");
 
-  [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
-                               block:^{
-                                 // If we successfully restarted after an unknown error,
-                                 // allow future retries on fatal errors.
-                                 self.hasRetriedOnFatalError = NO;
-                               }];
+  [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
+                                              block:^{
+                                                // If we successfully restarted after an unknown
+                                                // error, allow future retries on fatal errors.
+                                                self.hasRetriedOnFatalError = NO;
+                                              }];
 }
 
 - (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
@@ -373,7 +376,7 @@
 }
 
 - (void)handleFatalError {
-  [RTCDispatcher
+  [RTC_OBJC_TYPE(RTCDispatcher)
       dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
                     block:^{
                       if (!self.hasRetriedOnFatalError) {
@@ -387,13 +390,13 @@
 }
 
 - (void)handleNonFatalError {
-  [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
-                               block:^{
-                                 RTCLog(@"Restarting capture session after error.");
-                                 if (self.isRunning) {
-                                   [self.captureSession startRunning];
-                                 }
-                               }];
+  [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
+                                              block:^{
+                                                RTCLog(@"Restarting capture session after error.");
+                                                if (self.isRunning) {
+                                                  [self.captureSession startRunning];
+                                                }
+                                              }];
 }
 
 #if TARGET_OS_IPHONE
@@ -401,13 +404,14 @@
 #pragma mark - UIApplication notifications
 
 - (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
-  [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
-                               block:^{
-                                 if (self.isRunning && !self.captureSession.isRunning) {
-                                   RTCLog(@"Restarting capture session on active.");
-                                   [self.captureSession startRunning];
-                                 }
-                               }];
+  [RTC_OBJC_TYPE(RTCDispatcher)
+      dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
+                    block:^{
+                      if (self.isRunning && !self.captureSession.isRunning) {
+                        RTCLog(@"Restarting capture session on active.");
+                        [self.captureSession startRunning];
+                      }
+                    }];
 }
 
 #endif  // TARGET_OS_IPHONE
@@ -448,7 +452,8 @@
 
   // `videoDataOutput.availableVideoCVPixelFormatTypes` returns the pixel formats supported by the
   // device with the most efficient output format first. Find the first format that we support.
-  NSSet<NSNumber *> *supportedPixelFormats = [RTCCVPixelBuffer supportedPixelFormats];
+  NSSet<NSNumber *> *supportedPixelFormats =
+      [RTC_OBJC_TYPE(RTCCVPixelBuffer) supportedPixelFormats];
   NSMutableOrderedSet *availablePixelFormats =
       [NSMutableOrderedSet orderedSetWithArray:videoDataOutput.availableVideoCVPixelFormatTypes];
   [availablePixelFormats intersectSet:supportedPixelFormats];
@@ -465,7 +470,7 @@
 
 - (void)updateVideoDataOutputPixelFormat:(AVCaptureDeviceFormat *)format {
   FourCharCode mediaSubType = CMFormatDescriptionGetMediaSubType(format.formatDescription);
-  if (![[RTCCVPixelBuffer supportedPixelFormats] containsObject:@(mediaSubType)]) {
+  if (![[RTC_OBJC_TYPE(RTCCVPixelBuffer) supportedPixelFormats] containsObject:@(mediaSubType)]) {
     mediaSubType = _preferredOutputPixelFormat;
   }
 
@@ -479,7 +484,7 @@
 #pragma mark - Private, called inside capture queue
 
 - (void)updateDeviceCaptureFormat:(AVCaptureDeviceFormat *)format fps:(NSInteger)fps {
-  NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession],
+  NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeCaptureSession],
            @"updateDeviceCaptureFormat must be called on the capture queue.");
   @try {
     _currentDevice.activeFormat = format;
@@ -491,7 +496,7 @@
 }
 
 - (void)reconfigureCaptureSessionInput {
-  NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession],
+  NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeCaptureSession],
            @"reconfigureCaptureSessionInput must be called on the capture queue.");
   NSError *error = nil;
   AVCaptureDeviceInput *input =
@@ -513,7 +518,7 @@
 }
 
 - (void)updateOrientation {
-  NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession],
+  NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeCaptureSession],
            @"updateOrientation must be called on the capture queue.");
 #if TARGET_OS_IPHONE
   _orientation = [UIDevice currentDevice].orientation;
diff --git a/sdk/objc/components/capturer/RTCFileVideoCapturer.h b/sdk/objc/components/capturer/RTCFileVideoCapturer.h
index 0782588..19262c6 100644
--- a/sdk/objc/components/capturer/RTCFileVideoCapturer.h
+++ b/sdk/objc/components/capturer/RTCFileVideoCapturer.h
@@ -27,7 +27,7 @@
 RTC_OBJC_EXPORT
 
 NS_CLASS_AVAILABLE_IOS(10)
-@interface RTCFileVideoCapturer : RTCVideoCapturer
+@interface RTC_OBJC_TYPE (RTCFileVideoCapturer) : RTC_OBJC_TYPE(RTCVideoCapturer)
 
 /**
  * Starts asynchronous capture of frames from video file.
diff --git a/sdk/objc/components/capturer/RTCFileVideoCapturer.m b/sdk/objc/components/capturer/RTCFileVideoCapturer.m
index 2c82ba1..4c39ccd 100644
--- a/sdk/objc/components/capturer/RTCFileVideoCapturer.m
+++ b/sdk/objc/components/capturer/RTCFileVideoCapturer.m
@@ -15,7 +15,8 @@
 #import "components/video_frame_buffer/RTCCVPixelBuffer.h"
 #include "rtc_base/system/gcd_helpers.h"
 
-NSString *const kRTCFileVideoCapturerErrorDomain = @"org.webrtc.RTCFileVideoCapturer";
+NSString *const kRTCFileVideoCapturerErrorDomain =
+    @"org.webrtc.RTC_OBJC_TYPE(RTCFileVideoCapturer)";
 
 typedef NS_ENUM(NSInteger, RTCFileVideoCapturerErrorCode) {
   RTCFileVideoCapturerErrorCode_CapturerRunning = 2000,
@@ -28,12 +29,12 @@
   RTCFileVideoCapturerStatusStopped
 };
 
-@interface RTCFileVideoCapturer ()
-@property(nonatomic, assign) CMTime lastPresentationTime;
+@interface RTC_OBJC_TYPE (RTCFileVideoCapturer)
+() @property(nonatomic, assign) CMTime lastPresentationTime;
 @property(nonatomic, strong) NSURL *fileURL;
 @end
 
-@implementation RTCFileVideoCapturer {
+@implementation RTC_OBJC_TYPE (RTCFileVideoCapturer) {
   AVAssetReader *_reader;
   AVAssetReaderTrackOutput *_outTrack;
   RTCFileVideoCapturerStatus _status;
@@ -182,11 +183,14 @@
       return;
     }
 
-    RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer];
+    RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
+        [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBuffer];
     NSTimeInterval timeStampSeconds = CACurrentMediaTime();
     int64_t timeStampNs = lroundf(timeStampSeconds * NSEC_PER_SEC);
-    RTCVideoFrame *videoFrame =
-        [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer rotation:0 timeStampNs:timeStampNs];
+    RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame =
+        [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:rtcPixelBuffer
+                                                    rotation:0
+                                                 timeStampNs:timeStampNs];
     CFRelease(sampleBuffer);
 
     dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
diff --git a/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm b/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm
index 6cd7ff3..f4c76fa 100644
--- a/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm
+++ b/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm
@@ -97,7 +97,7 @@
       cropHeight:(nonnull int *)cropHeight
            cropX:(nonnull int *)cropX
            cropY:(nonnull int *)cropY
-         ofFrame:(nonnull RTCVideoFrame *)frame {
+         ofFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
   *width = frame.width;
   *height = frame.height;
   *cropWidth = frame.width;
@@ -106,7 +106,7 @@
   *cropY = 0;
 }
 
-- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame {
+- (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
   if (![super setupTexturesForFrame:frame]) {
     return NO;
   }
@@ -116,7 +116,7 @@
     return NO;
   }
 
-  id<RTCI420Buffer> buffer = [frame.buffer toI420];
+  id<RTC_OBJC_TYPE(RTCI420Buffer)> buffer = [frame.buffer toI420];
 
   // Luma (y) texture.
   if (!_descriptor || _width != frame.width || _height != frame.height) {
diff --git a/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h b/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h
index 7b61539..f70e2ad 100644
--- a/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h
+++ b/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h
@@ -15,9 +15,9 @@
 NS_AVAILABLE_MAC(10.11)
 
 RTC_OBJC_EXPORT
-@interface RTCMTLNSVideoView : NSView <RTCVideoRenderer>
+@interface RTC_OBJC_TYPE (RTCMTLNSVideoView) : NSView <RTC_OBJC_TYPE(RTCVideoRenderer)>
 
-@property(nonatomic, weak) id<RTCVideoViewDelegate> delegate;
+@property(nonatomic, weak) id<RTC_OBJC_TYPE(RTCVideoViewDelegate)> delegate;
 
 + (BOOL)isMetalAvailable;
 
diff --git a/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m b/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m
index ac5294e..625fb1c 100644
--- a/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m
+++ b/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m
@@ -17,13 +17,13 @@
 
 #import "RTCMTLI420Renderer.h"
 
-@interface RTCMTLNSVideoView ()<MTKViewDelegate>
-@property(nonatomic) id<RTCMTLRenderer> renderer;
+@interface RTC_OBJC_TYPE (RTCMTLNSVideoView)
+()<MTKViewDelegate> @property(nonatomic) id<RTCMTLRenderer> renderer;
 @property(nonatomic, strong) MTKView *metalView;
-@property(atomic, strong) RTCVideoFrame *videoFrame;
+@property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) * videoFrame;
 @end
 
-@implementation RTCMTLNSVideoView {
+@implementation RTC_OBJC_TYPE (RTCMTLNSVideoView) {
   id<RTCMTLRenderer> _renderer;
 }
 
@@ -102,7 +102,7 @@
 - (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size {
 }
 
-#pragma mark - RTCVideoRenderer
+#pragma mark - RTC_OBJC_TYPE(RTCVideoRenderer)
 
 - (void)setSize:(CGSize)size {
   _metalView.drawableSize = size;
@@ -112,7 +112,7 @@
   [_metalView draw];
 }
 
-- (void)renderFrame:(nullable RTCVideoFrame *)frame {
+- (void)renderFrame:(nullable RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
   if (frame == nil) {
     return;
   }
diff --git a/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm b/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm
index 98835cb..7b037c6 100644
--- a/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm
+++ b/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm
@@ -95,8 +95,8 @@
       cropHeight:(nonnull int *)cropHeight
            cropX:(nonnull int *)cropX
            cropY:(nonnull int *)cropY
-         ofFrame:(nonnull RTCVideoFrame *)frame {
-  RTCCVPixelBuffer *pixelBuffer = (RTCCVPixelBuffer *)frame.buffer;
+         ofFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+  RTC_OBJC_TYPE(RTCCVPixelBuffer) *pixelBuffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer;
   *width = CVPixelBufferGetWidth(pixelBuffer.pixelBuffer);
   *height = CVPixelBufferGetHeight(pixelBuffer.pixelBuffer);
   *cropWidth = pixelBuffer.cropWidth;
@@ -105,12 +105,12 @@
   *cropY = pixelBuffer.cropY;
 }
 
-- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame {
-  RTC_DCHECK([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]);
+- (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+  RTC_DCHECK([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]);
   if (![super setupTexturesForFrame:frame]) {
     return NO;
   }
-  CVPixelBufferRef pixelBuffer = ((RTCCVPixelBuffer *)frame.buffer).pixelBuffer;
+  CVPixelBufferRef pixelBuffer = ((RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer).pixelBuffer;
 
   id<MTLTexture> lumaTexture = nil;
   id<MTLTexture> chromaTexture = nil;
diff --git a/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm b/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm
index eb4c2ba..c6adcd0 100644
--- a/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm
+++ b/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm
@@ -93,8 +93,8 @@
       cropHeight:(nonnull int *)cropHeight
            cropX:(nonnull int *)cropX
            cropY:(nonnull int *)cropY
-         ofFrame:(nonnull RTCVideoFrame *)frame {
-  RTCCVPixelBuffer *pixelBuffer = (RTCCVPixelBuffer *)frame.buffer;
+         ofFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+  RTC_OBJC_TYPE(RTCCVPixelBuffer) *pixelBuffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer;
   *width = CVPixelBufferGetWidth(pixelBuffer.pixelBuffer);
   *height = CVPixelBufferGetHeight(pixelBuffer.pixelBuffer);
   *cropWidth = pixelBuffer.cropWidth;
@@ -103,12 +103,12 @@
   *cropY = pixelBuffer.cropY;
 }
 
-- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame {
-  RTC_DCHECK([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]);
+- (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+  RTC_DCHECK([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]);
   if (![super setupTexturesForFrame:frame]) {
     return NO;
   }
-  CVPixelBufferRef pixelBuffer = ((RTCCVPixelBuffer *)frame.buffer).pixelBuffer;
+  CVPixelBufferRef pixelBuffer = ((RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer).pixelBuffer;
 
   id<MTLTexture> gpuTexture = nil;
   CVMetalTextureRef textureOut = nullptr;
diff --git a/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h b/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h
index f442886..916d4d4 100644
--- a/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h
+++ b/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h
@@ -19,7 +19,7 @@
 @interface RTCMTLRenderer (Private)
 - (nullable id<MTLDevice>)currentMetalDevice;
 - (NSString *)shaderSource;
-- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame;
+- (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame;
 - (void)uploadTexturesToRenderEncoder:(id<MTLRenderCommandEncoder>)renderEncoder;
 - (void)getWidth:(nonnull int *)width
           height:(nonnull int *)height
@@ -27,7 +27,7 @@
       cropHeight:(nonnull int *)cropHeight
            cropX:(nonnull int *)cropX
            cropY:(nonnull int *)cropY
-         ofFrame:(nonnull RTCVideoFrame *)frame;
+         ofFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame;
 @end
 
 NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/components/renderer/metal/RTCMTLRenderer.h b/sdk/objc/components/renderer/metal/RTCMTLRenderer.h
index 9c1f371..aa31545 100644
--- a/sdk/objc/components/renderer/metal/RTCMTLRenderer.h
+++ b/sdk/objc/components/renderer/metal/RTCMTLRenderer.h
@@ -28,7 +28,7 @@
  *
  * @param frame The frame to be rendered.
  */
-- (void)drawFrame:(RTCVideoFrame *)frame;
+- (void)drawFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame;
 
 /**
  * Sets the provided view as rendering destination if possible.
diff --git a/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm b/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm
index 63cf225..e8d1613 100644
--- a/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm
+++ b/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm
@@ -167,11 +167,11 @@
       cropHeight:(int *)cropHeight
            cropX:(int *)cropX
            cropY:(int *)cropY
-         ofFrame:(nonnull RTCVideoFrame *)frame {
+         ofFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
   RTC_NOTREACHED() << "Virtual method not implemented in subclass.";
 }
 
-- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame {
+- (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
   // Apply rotation override if set.
   RTCVideoRotation rotation;
   NSValue *rotationOverride = self.rotationOverride;
@@ -311,7 +311,7 @@
 
 #pragma mark - RTCMTLRenderer
 
-- (void)drawFrame:(RTCVideoFrame *)frame {
+- (void)drawFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
   @autoreleasepool {
     // Wait until the inflight (curently sent to GPU) command buffer
     // has completed the GPU work.
diff --git a/sdk/objc/components/renderer/metal/RTCMTLVideoView.h b/sdk/objc/components/renderer/metal/RTCMTLVideoView.h
index 36cb144..5678112 100644
--- a/sdk/objc/components/renderer/metal/RTCMTLVideoView.h
+++ b/sdk/objc/components/renderer/metal/RTCMTLVideoView.h
@@ -27,9 +27,9 @@
 NS_CLASS_AVAILABLE_IOS(9)
 
 RTC_OBJC_EXPORT
-@interface RTCMTLVideoView : UIView<RTCVideoRenderer>
+@interface RTC_OBJC_TYPE (RTCMTLVideoView) : UIView<RTC_OBJC_TYPE(RTCVideoRenderer)>
 
-@property(nonatomic, weak) id<RTCVideoViewDelegate> delegate;
+@property(nonatomic, weak) id<RTC_OBJC_TYPE(RTCVideoViewDelegate)> delegate;
 
 @property(nonatomic) UIViewContentMode videoContentMode;
 
diff --git a/sdk/objc/components/renderer/metal/RTCMTLVideoView.m b/sdk/objc/components/renderer/metal/RTCMTLVideoView.m
index c9a622e..f5be7c0 100644
--- a/sdk/objc/components/renderer/metal/RTCMTLVideoView.m
+++ b/sdk/objc/components/renderer/metal/RTCMTLVideoView.m
@@ -29,17 +29,17 @@
 #define RTCMTLI420RendererClass NSClassFromString(@"RTCMTLI420Renderer")
 #define RTCMTLRGBRendererClass NSClassFromString(@"RTCMTLRGBRenderer")
 
-@interface RTCMTLVideoView () <MTKViewDelegate>
-@property(nonatomic) RTCMTLI420Renderer *rendererI420;
+@interface RTC_OBJC_TYPE (RTCMTLVideoView)
+()<MTKViewDelegate> @property(nonatomic) RTCMTLI420Renderer *rendererI420;
 @property(nonatomic) RTCMTLNV12Renderer *rendererNV12;
 @property(nonatomic) RTCMTLRGBRenderer *rendererRGB;
 @property(nonatomic) MTKView *metalView;
-@property(atomic) RTCVideoFrame *videoFrame;
+@property(atomic) RTC_OBJC_TYPE(RTCVideoFrame) * videoFrame;
 @property(nonatomic) CGSize videoFrameSize;
 @property(nonatomic) int64_t lastFrameTimeNs;
 @end
 
-@implementation RTCMTLVideoView
+@implementation RTC_OBJC_TYPE (RTCMTLVideoView)
 
 @synthesize delegate = _delegate;
 @synthesize rendererI420 = _rendererI420;
@@ -110,9 +110,10 @@
 }
 
 - (void)configure {
-  NSAssert([RTCMTLVideoView isMetalAvailable], @"Metal not availiable on this device");
+  NSAssert([RTC_OBJC_TYPE(RTCMTLVideoView) isMetalAvailable],
+           @"Metal not availiable on this device");
 
-  self.metalView = [RTCMTLVideoView createMetalView:self.bounds];
+  self.metalView = [RTC_OBJC_TYPE(RTCMTLVideoView) createMetalView:self.bounds];
   self.metalView.delegate = self;
   self.metalView.contentMode = UIViewContentModeScaleAspectFill;
   [self addSubview:self.metalView];
@@ -140,7 +141,7 @@
 
 - (void)drawInMTKView:(nonnull MTKView *)view {
   NSAssert(view == self.metalView, @"Receiving draw callbacks from foreign instance.");
-  RTCVideoFrame *videoFrame = self.videoFrame;
+  RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame = self.videoFrame;
   // Skip rendering if we've already rendered this frame.
   if (!videoFrame || videoFrame.timeStampNs == self.lastFrameTimeNs) {
     return;
@@ -151,12 +152,12 @@
   }
 
   RTCMTLRenderer *renderer;
-  if ([videoFrame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
-    RTCCVPixelBuffer *buffer = (RTCCVPixelBuffer*)videoFrame.buffer;
+  if ([videoFrame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) {
+    RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)videoFrame.buffer;
     const OSType pixelFormat = CVPixelBufferGetPixelFormatType(buffer.pixelBuffer);
     if (pixelFormat == kCVPixelFormatType_32BGRA || pixelFormat == kCVPixelFormatType_32ARGB) {
       if (!self.rendererRGB) {
-        self.rendererRGB = [RTCMTLVideoView createRGBRenderer];
+        self.rendererRGB = [RTC_OBJC_TYPE(RTCMTLVideoView) createRGBRenderer];
         if (![self.rendererRGB addRenderingDestination:self.metalView]) {
           self.rendererRGB = nil;
           RTCLogError(@"Failed to create RGB renderer");
@@ -166,7 +167,7 @@
       renderer = self.rendererRGB;
     } else {
       if (!self.rendererNV12) {
-        self.rendererNV12 = [RTCMTLVideoView createNV12Renderer];
+        self.rendererNV12 = [RTC_OBJC_TYPE(RTCMTLVideoView) createNV12Renderer];
         if (![self.rendererNV12 addRenderingDestination:self.metalView]) {
           self.rendererNV12 = nil;
           RTCLogError(@"Failed to create NV12 renderer");
@@ -177,7 +178,7 @@
     }
   } else {
     if (!self.rendererI420) {
-      self.rendererI420 = [RTCMTLVideoView createI420Renderer];
+      self.rendererI420 = [RTC_OBJC_TYPE(RTCMTLVideoView) createI420Renderer];
       if (![self.rendererI420 addRenderingDestination:self.metalView]) {
         self.rendererI420 = nil;
         RTCLogError(@"Failed to create I420 renderer");
@@ -236,12 +237,12 @@
   }
 }
 
-#pragma mark - RTCVideoRenderer
+#pragma mark - RTC_OBJC_TYPE(RTCVideoRenderer)
 
 - (void)setSize:(CGSize)size {
-  __weak RTCMTLVideoView *weakSelf = self;
+  __weak RTC_OBJC_TYPE(RTCMTLVideoView) *weakSelf = self;
   dispatch_async(dispatch_get_main_queue(), ^{
-    RTCMTLVideoView *strongSelf = weakSelf;
+    RTC_OBJC_TYPE(RTCMTLVideoView) *strongSelf = weakSelf;
 
     strongSelf.videoFrameSize = size;
     CGSize drawableSize = [strongSelf drawableSize];
@@ -252,7 +253,7 @@
   });
 }
 
-- (void)renderFrame:(nullable RTCVideoFrame *)frame {
+- (void)renderFrame:(nullable RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
   if (!self.isEnabled) {
     return;
   }
diff --git a/sdk/objc/components/renderer/opengl/RTCDefaultShader.h b/sdk/objc/components/renderer/opengl/RTCDefaultShader.h
index 034a22b..71a073a 100644
--- a/sdk/objc/components/renderer/opengl/RTCDefaultShader.h
+++ b/sdk/objc/components/renderer/opengl/RTCDefaultShader.h
@@ -12,11 +12,11 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-/** Default RTCVideoViewShading that will be used in RTCNSGLVideoView and
- *  RTCEAGLVideoView if no external shader is specified. This shader will render
+/** Default RTCVideoViewShading that will be used in RTCNSGLVideoView
+ *  and RTCEAGLVideoView if no external shader is specified. This shader will render
  *  the video in a rectangle without any color or geometric transformations.
  */
-@interface RTCDefaultShader : NSObject<RTCVideoViewShading>
+@interface RTCDefaultShader : NSObject <RTC_OBJC_TYPE (RTCVideoViewShading)>
 
 @end
 
diff --git a/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.h b/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.h
index 73cd3a1..24b26cd 100644
--- a/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.h
+++ b/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.h
@@ -17,23 +17,25 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-@class RTCEAGLVideoView;
+@class RTC_OBJC_TYPE(RTCEAGLVideoView);
 
 /**
- * RTCEAGLVideoView is an RTCVideoRenderer which renders video frames in its
- * bounds using OpenGLES 2.0 or OpenGLES 3.0.
+ * RTCEAGLVideoView is an RTCVideoRenderer which renders video frames
+ * in its bounds using OpenGLES 2.0 or OpenGLES 3.0.
  */
 RTC_OBJC_EXPORT
 NS_EXTENSION_UNAVAILABLE_IOS("Rendering not available in app extensions.")
-@interface RTCEAGLVideoView : UIView <RTCVideoRenderer>
+@interface RTC_OBJC_TYPE (RTCEAGLVideoView) : UIView <RTC_OBJC_TYPE(RTCVideoRenderer)>
 
-@property(nonatomic, weak) id<RTCVideoViewDelegate> delegate;
+@property(nonatomic, weak) id<RTC_OBJC_TYPE(RTCVideoViewDelegate)> delegate;
 
 - (instancetype)initWithFrame:(CGRect)frame
-                       shader:(id<RTCVideoViewShading>)shader NS_DESIGNATED_INITIALIZER;
+                       shader:(id<RTC_OBJC_TYPE(RTCVideoViewShading)>)shader
+    NS_DESIGNATED_INITIALIZER;
 
 - (instancetype)initWithCoder:(NSCoder *)aDecoder
-                       shader:(id<RTCVideoViewShading>)shader NS_DESIGNATED_INITIALIZER;
+                       shader:(id<RTC_OBJC_TYPE(RTCVideoViewShading)>)shader
+    NS_DESIGNATED_INITIALIZER;
 
 /** @abstract Wrapped RTCVideoRotation, or nil.
  */
diff --git a/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m b/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m
index 6a01d48..a3435a7 100644
--- a/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m
+++ b/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m
@@ -21,7 +21,7 @@
 #import "base/RTCVideoFrameBuffer.h"
 #import "components/video_frame_buffer/RTCCVPixelBuffer.h"
 
-// RTCEAGLVideoView wraps a GLKView which is setup with
+// RTC_OBJC_TYPE(RTCEAGLVideoView) wraps a GLKView which is setup with
 // enableSetNeedsDisplay = NO for the purpose of gaining control of
 // exactly when to call -[GLKView display]. This need for extra
 // control is required to avoid triggering method calls on GLKView
@@ -30,23 +30,24 @@
 // error GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT. -[GLKView display] is
 // the method that will trigger the binding of the render
 // buffer. Because the standard behaviour of -[UIView setNeedsDisplay]
-// is disabled for the reasons above, the RTCEAGLVideoView maintains
+// is disabled for the reasons above, the RTC_OBJC_TYPE(RTCEAGLVideoView) maintains
 // its own |isDirty| flag.
 
-@interface RTCEAGLVideoView () <GLKViewDelegate>
-// |videoFrame| is set when we receive a frame from a worker thread and is read
-// from the display link callback so atomicity is required.
-@property(atomic, strong) RTCVideoFrame *videoFrame;
+@interface RTC_OBJC_TYPE (RTCEAGLVideoView)
+()<GLKViewDelegate>
+    // |videoFrame| is set when we receive a frame from a worker thread and is read
+    // from the display link callback so atomicity is required.
+    @property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) * videoFrame;
 @property(nonatomic, readonly) GLKView *glkView;
 @end
 
-@implementation RTCEAGLVideoView {
+@implementation RTC_OBJC_TYPE (RTCEAGLVideoView) {
   RTCDisplayLinkTimer *_timer;
   EAGLContext *_glContext;
   // This flag should only be set and read on the main thread (e.g. by
   // setNeedsDisplay)
   BOOL _isDirty;
-  id<RTCVideoViewShading> _shader;
+  id<RTC_OBJC_TYPE(RTCVideoViewShading)> _shader;
   RTCNV12TextureCache *_nv12TextureCache;
   RTCI420TextureCache *_i420TextureCache;
   // As timestamps should be unique between frames, will store last
@@ -67,7 +68,7 @@
   return [self initWithCoder:aDecoder shader:[[RTCDefaultShader alloc] init]];
 }
 
-- (instancetype)initWithFrame:(CGRect)frame shader:(id<RTCVideoViewShading>)shader {
+- (instancetype)initWithFrame:(CGRect)frame shader:(id<RTC_OBJC_TYPE(RTCVideoViewShading)>)shader {
   if (self = [super initWithFrame:frame]) {
     _shader = shader;
     if (![self configure]) {
@@ -77,7 +78,8 @@
   return self;
 }
 
-- (instancetype)initWithCoder:(NSCoder *)aDecoder shader:(id<RTCVideoViewShading>)shader {
+- (instancetype)initWithCoder:(NSCoder *)aDecoder
+                       shader:(id<RTC_OBJC_TYPE(RTCVideoViewShading)>)shader {
   if (self = [super initWithCoder:aDecoder]) {
     _shader = shader;
     if (![self configure]) {
@@ -127,11 +129,11 @@
   // Frames are received on a separate thread, so we poll for current frame
   // using a refresh rate proportional to screen refresh frequency. This
   // occurs on the main thread.
-  __weak RTCEAGLVideoView *weakSelf = self;
+  __weak RTC_OBJC_TYPE(RTCEAGLVideoView) *weakSelf = self;
   _timer = [[RTCDisplayLinkTimer alloc] initWithTimerHandler:^{
-      RTCEAGLVideoView *strongSelf = weakSelf;
-      [strongSelf displayLinkTimerDidFire];
-    }];
+    RTC_OBJC_TYPE(RTCEAGLVideoView) *strongSelf = weakSelf;
+    [strongSelf displayLinkTimerDidFire];
+  }];
   if ([[UIApplication sharedApplication] applicationState] == UIApplicationStateActive) {
     [self setupGL];
   }
@@ -182,7 +184,7 @@
 - (void)glkView:(GLKView *)view drawInRect:(CGRect)rect {
   // The renderer will draw the frame to the framebuffer corresponding to the
   // one used by |view|.
-  RTCVideoFrame *frame = self.videoFrame;
+  RTC_OBJC_TYPE(RTCVideoFrame) *frame = self.videoFrame;
   if (!frame || frame.timeStampNs == _lastDrawnFrameTimeStampNs) {
     return;
   }
@@ -192,7 +194,7 @@
   }
   [self ensureGLContext];
   glClear(GL_COLOR_BUFFER_BIT);
-  if ([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
+  if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) {
     if (!_nv12TextureCache) {
       _nv12TextureCache = [[RTCNV12TextureCache alloc] initWithContext:_glContext];
     }
@@ -223,18 +225,18 @@
   }
 }
 
-#pragma mark - RTCVideoRenderer
+#pragma mark - RTC_OBJC_TYPE(RTCVideoRenderer)
 
 // These methods may be called on non-main thread.
 - (void)setSize:(CGSize)size {
-  __weak RTCEAGLVideoView *weakSelf = self;
+  __weak RTC_OBJC_TYPE(RTCEAGLVideoView) *weakSelf = self;
   dispatch_async(dispatch_get_main_queue(), ^{
-    RTCEAGLVideoView *strongSelf = weakSelf;
+    RTC_OBJC_TYPE(RTCEAGLVideoView) *strongSelf = weakSelf;
     [strongSelf.delegate videoView:strongSelf didChangeVideoSize:size];
   });
 }
 
-- (void)renderFrame:(RTCVideoFrame *)frame {
+- (void)renderFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
   self.videoFrame = frame;
 }
 
diff --git a/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h b/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h
index 07172e7..9fdcc5a 100644
--- a/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h
+++ b/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h
@@ -20,6 +20,6 @@
 - (instancetype)init NS_UNAVAILABLE;
 - (instancetype)initWithContext:(GlContextType *)context NS_DESIGNATED_INITIALIZER;
 
-- (void)uploadFrameToTextures:(RTCVideoFrame *)frame;
+- (void)uploadFrameToTextures:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame;
 
 @end
diff --git a/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm b/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm
index 865f3a2..5dccd4b 100644
--- a/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm
+++ b/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm
@@ -123,10 +123,10 @@
                uploadPlane);
 }
 
-- (void)uploadFrameToTextures:(RTCVideoFrame *)frame {
+- (void)uploadFrameToTextures:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
   _currentTextureSet = (_currentTextureSet + 1) % kNumTextureSets;
 
-  id<RTCI420Buffer> buffer = [frame.buffer toI420];
+  id<RTC_OBJC_TYPE(RTCI420Buffer)> buffer = [frame.buffer toI420];
 
   const int chromaWidth = buffer.chromaWidth;
   const int chromaHeight = buffer.chromaHeight;
diff --git a/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.h b/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.h
index 2540f38..c9ee986 100644
--- a/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.h
+++ b/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.h
@@ -19,20 +19,21 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-@class RTCNSGLVideoView;
+@class RTC_OBJC_TYPE(RTCNSGLVideoView);
 
 RTC_OBJC_EXPORT
-@protocol RTCNSGLVideoViewDelegate <RTCVideoViewDelegate>
-@end
+@protocol RTC_OBJC_TYPE
+(RTCNSGLVideoViewDelegate)<RTC_OBJC_TYPE(RTCVideoViewDelegate)> @end
 
 RTC_OBJC_EXPORT
-@interface RTCNSGLVideoView : NSOpenGLView <RTCVideoRenderer>
+@interface RTC_OBJC_TYPE (RTCNSGLVideoView) : NSOpenGLView <RTC_OBJC_TYPE(RTCVideoRenderer)>
 
-@property(nonatomic, weak) id<RTCVideoViewDelegate> delegate;
+@property(nonatomic, weak) id<RTC_OBJC_TYPE(RTCVideoViewDelegate)> delegate;
 
 - (instancetype)initWithFrame:(NSRect)frameRect
                   pixelFormat:(NSOpenGLPixelFormat *)format
-                       shader:(id<RTCVideoViewShading>)shader NS_DESIGNATED_INITIALIZER;
+                       shader:(id<RTC_OBJC_TYPE(RTCVideoViewShading)>)shader
+    NS_DESIGNATED_INITIALIZER;
 
 @end
 
diff --git a/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m b/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m
index 714cae7..de54e36 100644
--- a/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m
+++ b/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m
@@ -23,10 +23,12 @@
 #import "base/RTCLogging.h"
 #import "base/RTCVideoFrame.h"
 
-@interface RTCNSGLVideoView ()
-// |videoFrame| is set when we receive a frame from a worker thread and is read
-// from the display link callback so atomicity is required.
-@property(atomic, strong) RTCVideoFrame *videoFrame;
+@interface RTC_OBJC_TYPE (RTCNSGLVideoView)
+()
+    // |videoFrame| is set when we receive a frame from a worker thread and is read
+    // from the display link callback so atomicity is required.
+    @property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) *
+    videoFrame;
 @property(atomic, strong) RTCI420TextureCache *i420TextureCache;
 
 - (void)drawFrame;
@@ -38,15 +40,16 @@
                                    CVOptionFlags flagsIn,
                                    CVOptionFlags *flagsOut,
                                    void *displayLinkContext) {
-  RTCNSGLVideoView *view = (__bridge RTCNSGLVideoView *)displayLinkContext;
+  RTC_OBJC_TYPE(RTCNSGLVideoView) *view =
+      (__bridge RTC_OBJC_TYPE(RTCNSGLVideoView) *)displayLinkContext;
   [view drawFrame];
   return kCVReturnSuccess;
 }
 
-@implementation RTCNSGLVideoView {
+@implementation RTC_OBJC_TYPE (RTCNSGLVideoView) {
   CVDisplayLinkRef _displayLink;
-  RTCVideoFrame *_lastDrawnFrame;
-  id<RTCVideoViewShading> _shader;
+  RTC_OBJC_TYPE(RTCVideoFrame) * _lastDrawnFrame;
+  id<RTC_OBJC_TYPE(RTCVideoViewShading)> _shader;
 }
 
 @synthesize delegate = _delegate;
@@ -59,7 +62,7 @@
 
 - (instancetype)initWithFrame:(NSRect)frame
                   pixelFormat:(NSOpenGLPixelFormat *)format
-                       shader:(id<RTCVideoViewShading>)shader {
+                       shader:(id<RTC_OBJC_TYPE(RTCVideoViewShading)>)shader {
   if (self = [super initWithFrame:frame pixelFormat:format]) {
     _shader = shader;
   }
@@ -105,7 +108,7 @@
   [super clearGLContext];
 }
 
-#pragma mark - RTCVideoRenderer
+#pragma mark - RTC_OBJC_TYPE(RTCVideoRenderer)
 
 // These methods may be called on non-main thread.
 - (void)setSize:(CGSize)size {
@@ -114,14 +117,14 @@
   });
 }
 
-- (void)renderFrame:(RTCVideoFrame *)frame {
+- (void)renderFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
   self.videoFrame = frame;
 }
 
 #pragma mark - Private
 
 - (void)drawFrame {
-  RTCVideoFrame *frame = self.videoFrame;
+  RTC_OBJC_TYPE(RTCVideoFrame) *frame = self.videoFrame;
   if (!frame || frame == _lastDrawnFrame) {
     return;
   }
diff --git a/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h b/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h
index 9cba823..f202b83 100644
--- a/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h
+++ b/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h
@@ -10,7 +10,9 @@
 
 #import <GLKit/GLKit.h>
 
-@class RTCVideoFrame;
+#import "base/RTCMacros.h"
+
+@class RTC_OBJC_TYPE(RTCVideoFrame);
 
 NS_ASSUME_NONNULL_BEGIN
 
@@ -22,7 +24,7 @@
 - (instancetype)init NS_UNAVAILABLE;
 - (nullable instancetype)initWithContext:(EAGLContext *)context NS_DESIGNATED_INITIALIZER;
 
-- (BOOL)uploadFrameToTextures:(RTCVideoFrame *)frame;
+- (BOOL)uploadFrameToTextures:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame;
 
 - (void)releaseTextures;
 
diff --git a/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m b/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m
index aab62d4..a520ac4 100644
--- a/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m
+++ b/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m
@@ -76,10 +76,10 @@
   return YES;
 }
 
-- (BOOL)uploadFrameToTextures:(RTCVideoFrame *)frame {
-  NSAssert([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]],
+- (BOOL)uploadFrameToTextures:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+  NSAssert([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]],
            @"frame must be CVPixelBuffer backed");
-  RTCCVPixelBuffer *rtcPixelBuffer = (RTCCVPixelBuffer *)frame.buffer;
+  RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer;
   CVPixelBufferRef pixelBuffer = rtcPixelBuffer.pixelBuffer;
   return [self loadTexture:&_yTextureRef
                pixelBuffer:pixelBuffer
diff --git a/sdk/objc/components/renderer/opengl/RTCVideoViewShading.h b/sdk/objc/components/renderer/opengl/RTCVideoViewShading.h
index 6876cc3..9df30a8 100644
--- a/sdk/objc/components/renderer/opengl/RTCVideoViewShading.h
+++ b/sdk/objc/components/renderer/opengl/RTCVideoViewShading.h
@@ -15,19 +15,17 @@
 NS_ASSUME_NONNULL_BEGIN
 
 /**
- * RTCVideoViewShading provides a way for apps to customize the OpenGL(ES) shaders used in
- * rendering for the RTCEAGLVideoView/RTCNSGLVideoView.
+ * RTCVideoViewShading provides a way for apps to customize the OpenGL(ES shaders
+ * used in rendering for the RTCEAGLVideoView/RTCNSGLVideoView.
  */
 RTC_OBJC_EXPORT
-@protocol RTCVideoViewShading <NSObject>
+@protocol RTC_OBJC_TYPE
+(RTCVideoViewShading)<NSObject>
 
-/** Callback for I420 frames. Each plane is given as a texture. */
-- (void)applyShadingForFrameWithWidth:(int)width
-                               height:(int)height
-                             rotation:(RTCVideoRotation)rotation
-                               yPlane:(GLuint)yPlane
-                               uPlane:(GLuint)uPlane
-                               vPlane:(GLuint)vPlane;
+    /** Callback for I420 frames. Each plane is given as a texture. */
+    - (void)applyShadingForFrameWithWidth : (int)width height : (int)height rotation
+    : (RTCVideoRotation)rotation yPlane : (GLuint)yPlane uPlane : (GLuint)uPlane vPlane
+    : (GLuint)vPlane;
 
 /** Callback for NV12 frames. Each plane is given as a texture. */
 - (void)applyShadingForFrameWithWidth:(int)width
diff --git a/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264+Private.h b/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264+Private.h
index f67fa94..a0cd851 100644
--- a/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264+Private.h
+++ b/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264+Private.h
@@ -15,9 +15,10 @@
 NS_ASSUME_NONNULL_BEGIN
 
 /* Interfaces for converting to/from internal C++ formats. */
-@interface RTCCodecSpecificInfoH264 ()
+@interface RTC_OBJC_TYPE (RTCCodecSpecificInfoH264)
+()
 
-- (webrtc::CodecSpecificInfo)nativeCodecSpecificInfo;
+    - (webrtc::CodecSpecificInfo)nativeCodecSpecificInfo;
 
 @end
 
diff --git a/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.h b/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.h
index ece9570..ae3003a 100644
--- a/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.h
+++ b/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.h
@@ -20,7 +20,7 @@
 };
 
 RTC_OBJC_EXPORT
-@interface RTCCodecSpecificInfoH264 : NSObject <RTCCodecSpecificInfo>
+@interface RTC_OBJC_TYPE (RTCCodecSpecificInfoH264) : NSObject <RTC_OBJC_TYPE(RTCCodecSpecificInfo)>
 
 @property(nonatomic, assign) RTCH264PacketizationMode packetizationMode;
 
diff --git a/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.mm b/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.mm
index 57f2411..e38ed30 100644
--- a/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.mm
+++ b/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.mm
@@ -13,7 +13,7 @@
 #import "RTCH264ProfileLevelId.h"
 
 // H264 specific settings.
-@implementation RTCCodecSpecificInfoH264
+@implementation RTC_OBJC_TYPE (RTCCodecSpecificInfoH264)
 
 @synthesize packetizationMode = _packetizationMode;
 
diff --git a/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.h b/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.h
index 7ca9463..de5a9c4 100644
--- a/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.h
+++ b/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.h
@@ -16,10 +16,11 @@
 NS_ASSUME_NONNULL_BEGIN
 
 /** This decoder factory include support for all codecs bundled with WebRTC. If using custom
- *  codecs, create custom implementations of RTCVideoEncoderFactory and RTCVideoDecoderFactory.
+ *  codecs, create custom implementations of RTCVideoEncoderFactory and
+ *  RTCVideoDecoderFactory.
  */
 RTC_OBJC_EXPORT
-@interface RTCDefaultVideoDecoderFactory : NSObject <RTCVideoDecoderFactory>
+@interface RTC_OBJC_TYPE (RTCDefaultVideoDecoderFactory) : NSObject <RTC_OBJC_TYPE(RTCVideoDecoderFactory)>
 @end
 
 NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.m b/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.m
index 44445f4..4046cfe 100644
--- a/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.m
+++ b/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.m
@@ -19,31 +19,33 @@
 #import "api/video_codec/RTCVideoDecoderVP9.h"
 #endif
 
-@implementation RTCDefaultVideoDecoderFactory
+@implementation RTC_OBJC_TYPE (RTCDefaultVideoDecoderFactory)
 
-- (NSArray<RTCVideoCodecInfo *> *)supportedCodecs {
+- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs {
   NSDictionary<NSString *, NSString *> *constrainedHighParams = @{
     @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedHigh,
     @"level-asymmetry-allowed" : @"1",
     @"packetization-mode" : @"1",
   };
-  RTCVideoCodecInfo *constrainedHighInfo =
-      [[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecH264Name
-                                   parameters:constrainedHighParams];
+  RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo =
+      [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH264Name
+                                                  parameters:constrainedHighParams];
 
   NSDictionary<NSString *, NSString *> *constrainedBaselineParams = @{
     @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedBaseline,
     @"level-asymmetry-allowed" : @"1",
     @"packetization-mode" : @"1",
   };
-  RTCVideoCodecInfo *constrainedBaselineInfo =
-      [[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecH264Name
-                                   parameters:constrainedBaselineParams];
+  RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo =
+      [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH264Name
+                                                  parameters:constrainedBaselineParams];
 
-  RTCVideoCodecInfo *vp8Info = [[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecVp8Name];
+  RTC_OBJC_TYPE(RTCVideoCodecInfo) *vp8Info =
+      [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp8Name];
 
 #if defined(RTC_ENABLE_VP9)
-  RTCVideoCodecInfo *vp9Info = [[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecVp9Name];
+  RTC_OBJC_TYPE(RTCVideoCodecInfo) *vp9Info =
+      [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp9Name];
 #endif
 
   return @[
@@ -56,14 +58,14 @@
   ];
 }
 
-- (id<RTCVideoDecoder>)createDecoder:(RTCVideoCodecInfo *)info {
+- (id<RTC_OBJC_TYPE(RTCVideoDecoder)>)createDecoder:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info {
   if ([info.name isEqualToString:kRTCVideoCodecH264Name]) {
-    return [[RTCVideoDecoderH264 alloc] init];
+    return [[RTC_OBJC_TYPE(RTCVideoDecoderH264) alloc] init];
   } else if ([info.name isEqualToString:kRTCVideoCodecVp8Name]) {
-    return [RTCVideoDecoderVP8 vp8Decoder];
+    return [RTC_OBJC_TYPE(RTCVideoDecoderVP8) vp8Decoder];
 #if defined(RTC_ENABLE_VP9)
   } else if ([info.name isEqualToString:kRTCVideoCodecVp9Name]) {
-    return [RTCVideoDecoderVP9 vp9Decoder];
+    return [RTC_OBJC_TYPE(RTCVideoDecoderVP9) vp9Decoder];
 #endif
   }
 
diff --git a/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.h b/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.h
index c45e543..92ab40c 100644
--- a/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.h
+++ b/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.h
@@ -16,14 +16,15 @@
 NS_ASSUME_NONNULL_BEGIN
 
 /** This encoder factory include support for all codecs bundled with WebRTC. If using custom
- *  codecs, create custom implementations of RTCVideoEncoderFactory and RTCVideoDecoderFactory.
+ *  codecs, create custom implementations of RTCVideoEncoderFactory and
+ *  RTCVideoDecoderFactory.
  */
 RTC_OBJC_EXPORT
-@interface RTCDefaultVideoEncoderFactory : NSObject <RTCVideoEncoderFactory>
+@interface RTC_OBJC_TYPE (RTCDefaultVideoEncoderFactory) : NSObject <RTC_OBJC_TYPE(RTCVideoEncoderFactory)>
 
-@property(nonatomic, retain) RTCVideoCodecInfo *preferredCodec;
+@property(nonatomic, retain) RTC_OBJC_TYPE(RTCVideoCodecInfo) *preferredCodec;
 
-+ (NSArray<RTCVideoCodecInfo *> *)supportedCodecs;
++ (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs;
 
 @end
 
diff --git a/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m b/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m
index b72296b..35a1407 100644
--- a/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m
+++ b/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m
@@ -19,33 +19,35 @@
 #import "api/video_codec/RTCVideoEncoderVP9.h"
 #endif
 
-@implementation RTCDefaultVideoEncoderFactory
+@implementation RTC_OBJC_TYPE (RTCDefaultVideoEncoderFactory)
 
 @synthesize preferredCodec;
 
-+ (NSArray<RTCVideoCodecInfo *> *)supportedCodecs {
++ (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs {
   NSDictionary<NSString *, NSString *> *constrainedHighParams = @{
     @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedHigh,
     @"level-asymmetry-allowed" : @"1",
     @"packetization-mode" : @"1",
   };
-  RTCVideoCodecInfo *constrainedHighInfo =
-      [[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecH264Name
-                                   parameters:constrainedHighParams];
+  RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo =
+      [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH264Name
+                                                  parameters:constrainedHighParams];
 
   NSDictionary<NSString *, NSString *> *constrainedBaselineParams = @{
     @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedBaseline,
     @"level-asymmetry-allowed" : @"1",
     @"packetization-mode" : @"1",
   };
-  RTCVideoCodecInfo *constrainedBaselineInfo =
-      [[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecH264Name
-                                   parameters:constrainedBaselineParams];
+  RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo =
+      [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH264Name
+                                                  parameters:constrainedBaselineParams];
 
-  RTCVideoCodecInfo *vp8Info = [[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecVp8Name];
+  RTC_OBJC_TYPE(RTCVideoCodecInfo) *vp8Info =
+      [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp8Name];
 
 #if defined(RTC_ENABLE_VP9)
-  RTCVideoCodecInfo *vp9Info = [[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecVp9Name];
+  RTC_OBJC_TYPE(RTCVideoCodecInfo) *vp9Info =
+      [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp9Name];
 #endif
 
   return @[
@@ -58,24 +60,25 @@
   ];
 }
 
-- (id<RTCVideoEncoder>)createEncoder:(RTCVideoCodecInfo *)info {
+- (id<RTC_OBJC_TYPE(RTCVideoEncoder)>)createEncoder:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info {
   if ([info.name isEqualToString:kRTCVideoCodecH264Name]) {
-    return [[RTCVideoEncoderH264 alloc] initWithCodecInfo:info];
+    return [[RTC_OBJC_TYPE(RTCVideoEncoderH264) alloc] initWithCodecInfo:info];
   } else if ([info.name isEqualToString:kRTCVideoCodecVp8Name]) {
-    return [RTCVideoEncoderVP8 vp8Encoder];
+    return [RTC_OBJC_TYPE(RTCVideoEncoderVP8) vp8Encoder];
 #if defined(RTC_ENABLE_VP9)
   } else if ([info.name isEqualToString:kRTCVideoCodecVp9Name]) {
-    return [RTCVideoEncoderVP9 vp9Encoder];
+    return [RTC_OBJC_TYPE(RTCVideoEncoderVP9) vp9Encoder];
 #endif
   }
 
   return nil;
 }
 
-- (NSArray<RTCVideoCodecInfo *> *)supportedCodecs {
-  NSMutableArray<RTCVideoCodecInfo *> *codecs = [[[self class] supportedCodecs] mutableCopy];
+- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs {
+  NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *codecs =
+      [[[self class] supportedCodecs] mutableCopy];
 
-  NSMutableArray<RTCVideoCodecInfo *> *orderedCodecs = [NSMutableArray array];
+  NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *orderedCodecs = [NSMutableArray array];
   NSUInteger index = [codecs indexOfObject:self.preferredCodec];
   if (index != NSNotFound) {
     [orderedCodecs addObject:[codecs objectAtIndex:index]];
diff --git a/sdk/objc/components/video_codec/RTCH264ProfileLevelId.h b/sdk/objc/components/video_codec/RTCH264ProfileLevelId.h
index 56b3532..dac7bb5 100644
--- a/sdk/objc/components/video_codec/RTCH264ProfileLevelId.h
+++ b/sdk/objc/components/video_codec/RTCH264ProfileLevelId.h
@@ -48,7 +48,7 @@
 };
 
 RTC_OBJC_EXPORT
-@interface RTCH264ProfileLevelId : NSObject
+@interface RTC_OBJC_TYPE (RTCH264ProfileLevelId) : NSObject
 
 @property(nonatomic, readonly) RTCH264Profile profile;
 @property(nonatomic, readonly) RTCH264Level level;
diff --git a/sdk/objc/components/video_codec/RTCH264ProfileLevelId.mm b/sdk/objc/components/video_codec/RTCH264ProfileLevelId.mm
index afd9fcb..b985d9d 100644
--- a/sdk/objc/components/video_codec/RTCH264ProfileLevelId.mm
+++ b/sdk/objc/components/video_codec/RTCH264ProfileLevelId.mm
@@ -75,15 +75,16 @@
 
 }  // namespace
 
-@interface RTCH264ProfileLevelId ()
+@interface RTC_OBJC_TYPE (RTCH264ProfileLevelId)
+()
 
-@property(nonatomic, assign) RTCH264Profile profile;
+    @property(nonatomic, assign) RTCH264Profile profile;
 @property(nonatomic, assign) RTCH264Level level;
 @property(nonatomic, strong) NSString *hexString;
 
 @end
 
-@implementation RTCH264ProfileLevelId
+@implementation RTC_OBJC_TYPE (RTCH264ProfileLevelId)
 
 @synthesize profile = _profile;
 @synthesize level = _level;
diff --git a/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.h b/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.h
index 4fcff1d..88bacbb 100644
--- a/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.h
+++ b/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.h
@@ -14,5 +14,5 @@
 #import "RTCVideoDecoderFactory.h"
 
 RTC_OBJC_EXPORT
-@interface RTCVideoDecoderFactoryH264 : NSObject <RTCVideoDecoderFactory>
+@interface RTC_OBJC_TYPE (RTCVideoDecoderFactoryH264) : NSObject <RTC_OBJC_TYPE(RTCVideoDecoderFactory)>
 @end
diff --git a/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.m b/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.m
index bf399c6..bdae19d 100644
--- a/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.m
+++ b/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.m
@@ -13,10 +13,10 @@
 #import "RTCH264ProfileLevelId.h"
 #import "RTCVideoDecoderH264.h"
 
-@implementation RTCVideoDecoderFactoryH264
+@implementation RTC_OBJC_TYPE (RTCVideoDecoderFactoryH264)
 
-- (NSArray<RTCVideoCodecInfo *> *)supportedCodecs {
-  NSMutableArray<RTCVideoCodecInfo *> *codecs = [NSMutableArray array];
+- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs {
+  NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *codecs = [NSMutableArray array];
   NSString *codecName = kRTCVideoCodecH264Name;
 
   NSDictionary<NSString *, NSString *> *constrainedHighParams = @{
@@ -24,8 +24,9 @@
     @"level-asymmetry-allowed" : @"1",
     @"packetization-mode" : @"1",
   };
-  RTCVideoCodecInfo *constrainedHighInfo =
-      [[RTCVideoCodecInfo alloc] initWithName:codecName parameters:constrainedHighParams];
+  RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo =
+      [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:codecName
+                                                  parameters:constrainedHighParams];
   [codecs addObject:constrainedHighInfo];
 
   NSDictionary<NSString *, NSString *> *constrainedBaselineParams = @{
@@ -33,15 +34,16 @@
     @"level-asymmetry-allowed" : @"1",
     @"packetization-mode" : @"1",
   };
-  RTCVideoCodecInfo *constrainedBaselineInfo =
-      [[RTCVideoCodecInfo alloc] initWithName:codecName parameters:constrainedBaselineParams];
+  RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo =
+      [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:codecName
+                                                  parameters:constrainedBaselineParams];
   [codecs addObject:constrainedBaselineInfo];
 
   return [codecs copy];
 }
 
-- (id<RTCVideoDecoder>)createDecoder:(RTCVideoCodecInfo *)info {
-  return [[RTCVideoDecoderH264 alloc] init];
+- (id<RTC_OBJC_TYPE(RTCVideoDecoder)>)createDecoder:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info {
+  return [[RTC_OBJC_TYPE(RTCVideoDecoderH264) alloc] init];
 }
 
 @end
diff --git a/sdk/objc/components/video_codec/RTCVideoDecoderH264.h b/sdk/objc/components/video_codec/RTCVideoDecoderH264.h
index b860276..a12e421 100644
--- a/sdk/objc/components/video_codec/RTCVideoDecoderH264.h
+++ b/sdk/objc/components/video_codec/RTCVideoDecoderH264.h
@@ -14,5 +14,5 @@
 #import "RTCVideoDecoder.h"
 
 RTC_OBJC_EXPORT
-@interface RTCVideoDecoderH264 : NSObject <RTCVideoDecoder>
+@interface RTC_OBJC_TYPE (RTCVideoDecoderH264) : NSObject <RTC_OBJC_TYPE(RTCVideoDecoder)>
 @end
diff --git a/sdk/objc/components/video_codec/RTCVideoDecoderH264.mm b/sdk/objc/components/video_codec/RTCVideoDecoderH264.mm
index 04bdabf..52edefe 100644
--- a/sdk/objc/components/video_codec/RTCVideoDecoderH264.mm
+++ b/sdk/objc/components/video_codec/RTCVideoDecoderH264.mm
@@ -37,8 +37,8 @@
   int64_t timestamp;
 };
 
-@interface RTCVideoDecoderH264 ()
-- (void)setError:(OSStatus)error;
+@interface RTC_OBJC_TYPE (RTCVideoDecoderH264)
+() - (void)setError : (OSStatus)error;
 @end
 
 // This is the callback function that VideoToolbox calls when decode is
@@ -53,23 +53,25 @@
   std::unique_ptr<RTCFrameDecodeParams> decodeParams(
       reinterpret_cast<RTCFrameDecodeParams *>(params));
   if (status != noErr) {
-    RTCVideoDecoderH264 *decoder = (__bridge RTCVideoDecoderH264 *)decoderRef;
+    RTC_OBJC_TYPE(RTCVideoDecoderH264) *decoder =
+        (__bridge RTC_OBJC_TYPE(RTCVideoDecoderH264) *)decoderRef;
     [decoder setError:status];
     RTC_LOG(LS_ERROR) << "Failed to decode frame. Status: " << status;
     return;
   }
   // TODO(tkchin): Handle CVO properly.
-  RTCCVPixelBuffer *frameBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:imageBuffer];
-  RTCVideoFrame *decodedFrame =
-      [[RTCVideoFrame alloc] initWithBuffer:frameBuffer
-                                   rotation:RTCVideoRotation_0
-                                timeStampNs:CMTimeGetSeconds(timestamp) * rtc::kNumNanosecsPerSec];
+  RTC_OBJC_TYPE(RTCCVPixelBuffer) *frameBuffer =
+      [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:imageBuffer];
+  RTC_OBJC_TYPE(RTCVideoFrame) *decodedFrame = [[RTC_OBJC_TYPE(RTCVideoFrame) alloc]
+      initWithBuffer:frameBuffer
+            rotation:RTCVideoRotation_0
+         timeStampNs:CMTimeGetSeconds(timestamp) * rtc::kNumNanosecsPerSec];
   decodedFrame.timeStamp = decodeParams->timestamp;
   decodeParams->callback(decodedFrame);
 }
 
 // Decoder.
-@implementation RTCVideoDecoderH264 {
+@implementation RTC_OBJC_TYPE (RTCVideoDecoderH264) {
   CMVideoFormatDescriptionRef _videoFormat;
   CMMemoryPoolRef _memoryPool;
   VTDecompressionSessionRef _decompressionSession;
@@ -96,9 +98,9 @@
   return WEBRTC_VIDEO_CODEC_OK;
 }
 
-- (NSInteger)decode:(RTCEncodedImage *)inputImage
+- (NSInteger)decode:(RTC_OBJC_TYPE(RTCEncodedImage) *)inputImage
         missingFrames:(BOOL)missingFrames
-    codecSpecificInfo:(nullable id<RTCCodecSpecificInfo>)info
+    codecSpecificInfo:(nullable id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)>)info
          renderTimeMs:(int64_t)renderTimeMs {
   RTC_DCHECK(inputImage.buffer);
 
diff --git a/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.h b/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.h
index c64405e..45fc4be 100644
--- a/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.h
+++ b/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.h
@@ -14,5 +14,5 @@
 #import "RTCVideoEncoderFactory.h"
 
 RTC_OBJC_EXPORT
-@interface RTCVideoEncoderFactoryH264 : NSObject <RTCVideoEncoderFactory>
+@interface RTC_OBJC_TYPE (RTCVideoEncoderFactoryH264) : NSObject <RTC_OBJC_TYPE(RTCVideoEncoderFactory)>
 @end
diff --git a/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.m b/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.m
index bbc15e9..9843849 100644
--- a/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.m
+++ b/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.m
@@ -13,10 +13,10 @@
 #import "RTCH264ProfileLevelId.h"
 #import "RTCVideoEncoderH264.h"
 
-@implementation RTCVideoEncoderFactoryH264
+@implementation RTC_OBJC_TYPE (RTCVideoEncoderFactoryH264)
 
-- (NSArray<RTCVideoCodecInfo *> *)supportedCodecs {
-  NSMutableArray<RTCVideoCodecInfo *> *codecs = [NSMutableArray array];
+- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs {
+  NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *codecs = [NSMutableArray array];
   NSString *codecName = kRTCVideoCodecH264Name;
 
   NSDictionary<NSString *, NSString *> *constrainedHighParams = @{
@@ -24,8 +24,9 @@
     @"level-asymmetry-allowed" : @"1",
     @"packetization-mode" : @"1",
   };
-  RTCVideoCodecInfo *constrainedHighInfo =
-      [[RTCVideoCodecInfo alloc] initWithName:codecName parameters:constrainedHighParams];
+  RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo =
+      [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:codecName
+                                                  parameters:constrainedHighParams];
   [codecs addObject:constrainedHighInfo];
 
   NSDictionary<NSString *, NSString *> *constrainedBaselineParams = @{
@@ -33,15 +34,16 @@
     @"level-asymmetry-allowed" : @"1",
     @"packetization-mode" : @"1",
   };
-  RTCVideoCodecInfo *constrainedBaselineInfo =
-      [[RTCVideoCodecInfo alloc] initWithName:codecName parameters:constrainedBaselineParams];
+  RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo =
+      [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:codecName
+                                                  parameters:constrainedBaselineParams];
   [codecs addObject:constrainedBaselineInfo];
 
   return [codecs copy];
 }
 
-- (id<RTCVideoEncoder>)createEncoder:(RTCVideoCodecInfo *)info {
-  return [[RTCVideoEncoderH264 alloc] initWithCodecInfo:info];
+- (id<RTC_OBJC_TYPE(RTCVideoEncoder)>)createEncoder:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info {
+  return [[RTC_OBJC_TYPE(RTCVideoEncoderH264) alloc] initWithCodecInfo:info];
 }
 
 @end
diff --git a/sdk/objc/components/video_codec/RTCVideoEncoderH264.h b/sdk/objc/components/video_codec/RTCVideoEncoderH264.h
index a9c0558..9f4f4c7 100644
--- a/sdk/objc/components/video_codec/RTCVideoEncoderH264.h
+++ b/sdk/objc/components/video_codec/RTCVideoEncoderH264.h
@@ -15,8 +15,8 @@
 #import "RTCVideoEncoder.h"
 
 RTC_OBJC_EXPORT
-@interface RTCVideoEncoderH264 : NSObject <RTCVideoEncoder>
+@interface RTC_OBJC_TYPE (RTCVideoEncoderH264) : NSObject <RTC_OBJC_TYPE(RTCVideoEncoder)>
 
-- (instancetype)initWithCodecInfo:(RTCVideoCodecInfo *)codecInfo;
+- (instancetype)initWithCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)codecInfo;
 
 @end
diff --git a/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm b/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm
index 5b90922..1138064 100644
--- a/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm
+++ b/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm
@@ -40,17 +40,14 @@
 #include "sdk/objc/components/video_codec/nalu_rewriter.h"
 #include "third_party/libyuv/include/libyuv/convert_from.h"
 
-@interface RTCVideoEncoderH264 ()
+@interface RTC_OBJC_TYPE (RTCVideoEncoderH264)
+()
 
-- (void)frameWasEncoded:(OSStatus)status
-                  flags:(VTEncodeInfoFlags)infoFlags
-           sampleBuffer:(CMSampleBufferRef)sampleBuffer
-      codecSpecificInfo:(id<RTCCodecSpecificInfo>)codecSpecificInfo
-                  width:(int32_t)width
-                 height:(int32_t)height
-           renderTimeMs:(int64_t)renderTimeMs
-              timestamp:(uint32_t)timestamp
-               rotation:(RTCVideoRotation)rotation;
+    - (void)frameWasEncoded : (OSStatus)status flags : (VTEncodeInfoFlags)infoFlags sampleBuffer
+    : (CMSampleBufferRef)sampleBuffer codecSpecificInfo
+    : (id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)>)codecSpecificInfo width : (int32_t)width height
+    : (int32_t)height renderTimeMs : (int64_t)renderTimeMs timestamp : (uint32_t)timestamp rotation
+    : (RTCVideoRotation)rotation;
 
 @end
 
@@ -70,8 +67,8 @@
 // Struct that we pass to the encoder per frame to encode. We receive it again
 // in the encoder callback.
 struct RTCFrameEncodeParams {
-  RTCFrameEncodeParams(RTCVideoEncoderH264 *e,
-                       RTCCodecSpecificInfoH264 *csi,
+  RTCFrameEncodeParams(RTC_OBJC_TYPE(RTCVideoEncoderH264) * e,
+                       RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) * csi,
                        int32_t w,
                        int32_t h,
                        int64_t rtms,
@@ -81,12 +78,12 @@
     if (csi) {
       codecSpecificInfo = csi;
     } else {
-      codecSpecificInfo = [[RTCCodecSpecificInfoH264 alloc] init];
+      codecSpecificInfo = [[RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) alloc] init];
     }
   }
 
-  RTCVideoEncoderH264 *encoder;
-  RTCCodecSpecificInfoH264 *codecSpecificInfo;
+  RTC_OBJC_TYPE(RTCVideoEncoderH264) * encoder;
+  RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) * codecSpecificInfo;
   int32_t width;
   int32_t height;
   int64_t render_time_ms;
@@ -97,7 +94,8 @@
 // We receive I420Frames as input, but we need to feed CVPixelBuffers into the
 // encoder. This performs the copy and format conversion.
 // TODO(tkchin): See if encoder will accept i420 frames and compare performance.
-bool CopyVideoFrameToNV12PixelBuffer(id<RTCI420Buffer> frameBuffer, CVPixelBufferRef pixelBuffer) {
+bool CopyVideoFrameToNV12PixelBuffer(id<RTC_OBJC_TYPE(RTCI420Buffer)> frameBuffer,
+                                     CVPixelBufferRef pixelBuffer) {
   RTC_DCHECK(pixelBuffer);
   RTC_DCHECK_EQ(CVPixelBufferGetPixelFormatType(pixelBuffer), kNV12PixelFormat);
   RTC_DCHECK_EQ(CVPixelBufferGetHeightOfPlane(pixelBuffer, 0), frameBuffer.height);
@@ -313,8 +311,8 @@
 }
 }  // namespace
 
-@implementation RTCVideoEncoderH264 {
-  RTCVideoCodecInfo *_codecInfo;
+@implementation RTC_OBJC_TYPE (RTCVideoEncoderH264) {
+  RTC_OBJC_TYPE(RTCVideoCodecInfo) * _codecInfo;
   std::unique_ptr<webrtc::BitrateAdjuster> _bitrateAdjuster;
   uint32_t _targetBitrateBps;
   uint32_t _encoderBitrateBps;
@@ -340,7 +338,7 @@
 // drastically reduced bitrate, so we want to avoid that. In steady state
 // conditions, 0.95 seems to give us better overall bitrate over long periods
 // of time.
-- (instancetype)initWithCodecInfo:(RTCVideoCodecInfo *)codecInfo {
+- (instancetype)initWithCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)codecInfo {
   if (self = [super init]) {
     _codecInfo = codecInfo;
     _bitrateAdjuster.reset(new webrtc::BitrateAdjuster(.5, .95));
@@ -358,7 +356,7 @@
   [self destroyCompressionSession];
 }
 
-- (NSInteger)startEncodeWithSettings:(RTCVideoEncoderSettings *)settings
+- (NSInteger)startEncodeWithSettings:(RTC_OBJC_TYPE(RTCVideoEncoderSettings) *)settings
                        numberOfCores:(int)numberOfCores {
   RTC_DCHECK(settings);
   RTC_DCHECK([settings.name isEqualToString:kRTCVideoCodecH264Name]);
@@ -388,8 +386,8 @@
   return [self resetCompressionSessionWithPixelFormat:kNV12PixelFormat];
 }
 
-- (NSInteger)encode:(RTCVideoFrame *)frame
-    codecSpecificInfo:(nullable id<RTCCodecSpecificInfo>)codecSpecificInfo
+- (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame
+    codecSpecificInfo:(nullable id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)>)codecSpecificInfo
            frameTypes:(NSArray<NSNumber *> *)frameTypes {
   RTC_DCHECK_EQ(frame.width, _width);
   RTC_DCHECK_EQ(frame.height, _height);
@@ -404,9 +402,10 @@
   }
 
   CVPixelBufferRef pixelBuffer = nullptr;
-  if ([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
+  if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) {
     // Native frame buffer
-    RTCCVPixelBuffer *rtcPixelBuffer = (RTCCVPixelBuffer *)frame.buffer;
+    RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
+        (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer;
     if (![rtcPixelBuffer requiresCropping]) {
       // This pixel buffer might have a higher resolution than what the
       // compression session is configured to. The compression session can
@@ -543,17 +542,18 @@
   return WEBRTC_VIDEO_CODEC_OK;
 }
 
-- (OSType)pixelFormatOfFrame:(RTCVideoFrame *)frame {
+- (OSType)pixelFormatOfFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
   // Use NV12 for non-native frames.
-  if ([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
-    RTCCVPixelBuffer *rtcPixelBuffer = (RTCCVPixelBuffer *)frame.buffer;
+  if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) {
+    RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
+        (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer;
     return CVPixelBufferGetPixelFormatType(rtcPixelBuffer.pixelBuffer);
   }
 
   return kNV12PixelFormat;
 }
 
-- (BOOL)resetCompressionSessionIfNeededWithFrame:(RTCVideoFrame *)frame {
+- (BOOL)resetCompressionSessionIfNeededWithFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
   BOOL resetCompressionSession = NO;
 
   // If we're capturing native frames in another pixel format than the compression session is
@@ -755,7 +755,7 @@
 - (void)frameWasEncoded:(OSStatus)status
                   flags:(VTEncodeInfoFlags)infoFlags
            sampleBuffer:(CMSampleBufferRef)sampleBuffer
-      codecSpecificInfo:(id<RTCCodecSpecificInfo>)codecSpecificInfo
+      codecSpecificInfo:(id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)>)codecSpecificInfo
                   width:(int32_t)width
                  height:(int32_t)height
            renderTimeMs:(int64_t)renderTimeMs
@@ -783,18 +783,19 @@
   }
 
   __block std::unique_ptr<rtc::Buffer> buffer = std::make_unique<rtc::Buffer>();
-  RTCRtpFragmentationHeader *header;
+  RTC_OBJC_TYPE(RTCRtpFragmentationHeader) * header;
   {
     std::unique_ptr<webrtc::RTPFragmentationHeader> header_cpp;
     bool result =
         H264CMSampleBufferToAnnexBBuffer(sampleBuffer, isKeyframe, buffer.get(), &header_cpp);
-    header = [[RTCRtpFragmentationHeader alloc] initWithNativeFragmentationHeader:header_cpp.get()];
+    header = [[RTC_OBJC_TYPE(RTCRtpFragmentationHeader) alloc]
+        initWithNativeFragmentationHeader:header_cpp.get()];
     if (!result) {
       return;
     }
   }
 
-  RTCEncodedImage *frame = [[RTCEncodedImage alloc] init];
+  RTC_OBJC_TYPE(RTCEncodedImage) *frame = [[RTC_OBJC_TYPE(RTCEncodedImage) alloc] init];
   // This assumes ownership of `buffer` and is responsible for freeing it when done.
   frame.buffer = [[NSData alloc] initWithBytesNoCopy:buffer->data()
                                               length:buffer->size()
@@ -825,9 +826,10 @@
   _bitrateAdjuster->Update(frame.buffer.length);
 }
 
-- (nullable RTCVideoEncoderQpThresholds *)scalingSettings {
-  return [[RTCVideoEncoderQpThresholds alloc] initWithThresholdsLow:kLowH264QpThreshold
-                                                               high:kHighH264QpThreshold];
+- (nullable RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) *)scalingSettings {
+  return [[RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) alloc]
+      initWithThresholdsLow:kLowH264QpThreshold
+                       high:kHighH264QpThreshold];
 }
 
 @end
diff --git a/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.h b/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.h
index 432a382..17eebd0 100644
--- a/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.h
+++ b/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.h
@@ -17,7 +17,7 @@
 
 /** RTCVideoFrameBuffer containing a CVPixelBufferRef */
 RTC_OBJC_EXPORT
-@interface RTCCVPixelBuffer : NSObject <RTCVideoFrameBuffer>
+@interface RTC_OBJC_TYPE (RTCCVPixelBuffer) : NSObject <RTC_OBJC_TYPE(RTCVideoFrameBuffer)>
 
 @property(nonatomic, readonly) CVPixelBufferRef pixelBuffer;
 @property(nonatomic, readonly) int cropX;
diff --git a/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.mm b/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.mm
index 01b6405..df8077b 100644
--- a/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.mm
+++ b/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.mm
@@ -22,7 +22,7 @@
 #import <VideoToolbox/VideoToolbox.h>
 #endif
 
-@implementation RTCCVPixelBuffer {
+@implementation RTC_OBJC_TYPE (RTCCVPixelBuffer) {
   int _width;
   int _height;
   int _bufferWidth;
@@ -152,13 +152,13 @@
   return YES;
 }
 
-- (id<RTCI420Buffer>)toI420 {
+- (id<RTC_OBJC_TYPE(RTCI420Buffer)>)toI420 {
   const OSType pixelFormat = CVPixelBufferGetPixelFormatType(_pixelBuffer);
 
   CVPixelBufferLockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
 
-  RTCMutableI420Buffer* i420Buffer =
-      [[RTCMutableI420Buffer alloc] initWithWidth:[self width] height:[self height]];
+  RTC_OBJC_TYPE(RTCMutableI420Buffer)* i420Buffer =
+      [[RTC_OBJC_TYPE(RTCMutableI420Buffer) alloc] initWithWidth:[self width] height:[self height]];
 
   switch (pixelFormat) {
     case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
diff --git a/sdk/objc/helpers/RTCCameraPreviewView.h b/sdk/objc/helpers/RTCCameraPreviewView.h
index 17f8d33..db9b15a 100644
--- a/sdk/objc/helpers/RTCCameraPreviewView.h
+++ b/sdk/objc/helpers/RTCCameraPreviewView.h
@@ -19,7 +19,7 @@
  *  AVCaptureSession.
  */
 RTC_OBJC_EXPORT
-@interface RTCCameraPreviewView : UIView
+@interface RTC_OBJC_TYPE (RTCCameraPreviewView) : UIView
 
 /** The capture session being rendered in the view. Capture session
  *  is assigned to AVCaptureVideoPreviewLayer async in the same
diff --git a/sdk/objc/helpers/RTCCameraPreviewView.m b/sdk/objc/helpers/RTCCameraPreviewView.m
index 57dadea..12e87d8 100644
--- a/sdk/objc/helpers/RTCCameraPreviewView.m
+++ b/sdk/objc/helpers/RTCCameraPreviewView.m
@@ -15,7 +15,7 @@
 
 #import "RTCDispatcher+Private.h"
 
-@implementation RTCCameraPreviewView
+@implementation RTC_OBJC_TYPE (RTCCameraPreviewView)
 
 @synthesize captureSession = _captureSession;
 
@@ -48,15 +48,15 @@
     return;
   }
   _captureSession = captureSession;
-  [RTCDispatcher
+  [RTC_OBJC_TYPE(RTCDispatcher)
       dispatchAsyncOnType:RTCDispatcherTypeMain
                     block:^{
                       AVCaptureVideoPreviewLayer *previewLayer = [self previewLayer];
-                      [RTCDispatcher
+                      [RTC_OBJC_TYPE(RTCDispatcher)
                           dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
                                         block:^{
                                           previewLayer.session = captureSession;
-                                          [RTCDispatcher
+                                          [RTC_OBJC_TYPE(RTCDispatcher)
                                               dispatchAsyncOnType:RTCDispatcherTypeMain
                                                             block:^{
                                                               [self setCorrectVideoOrientation];
diff --git a/sdk/objc/helpers/RTCDispatcher+Private.h b/sdk/objc/helpers/RTCDispatcher+Private.h
index aa741f4..195c651 100644
--- a/sdk/objc/helpers/RTCDispatcher+Private.h
+++ b/sdk/objc/helpers/RTCDispatcher+Private.h
@@ -10,8 +10,9 @@
 
 #import "RTCDispatcher.h"
 
-@interface RTCDispatcher ()
+@interface RTC_OBJC_TYPE (RTCDispatcher)
+()
 
-+ (dispatch_queue_t)dispatchQueueForType:(RTCDispatcherQueueType)dispatchType;
+    + (dispatch_queue_t)dispatchQueueForType : (RTCDispatcherQueueType)dispatchType;
 
 @end
diff --git a/sdk/objc/helpers/RTCDispatcher.h b/sdk/objc/helpers/RTCDispatcher.h
index 4f8359b..f8580f9 100644
--- a/sdk/objc/helpers/RTCDispatcher.h
+++ b/sdk/objc/helpers/RTCDispatcher.h
@@ -26,7 +26,7 @@
  *  shared dispatch queue.
  */
 RTC_OBJC_EXPORT
-@interface RTCDispatcher : NSObject
+@interface RTC_OBJC_TYPE (RTCDispatcher) : NSObject
 
 - (instancetype)init NS_UNAVAILABLE;
 
diff --git a/sdk/objc/helpers/RTCDispatcher.m b/sdk/objc/helpers/RTCDispatcher.m
index b9d64a4..2e83573 100644
--- a/sdk/objc/helpers/RTCDispatcher.m
+++ b/sdk/objc/helpers/RTCDispatcher.m
@@ -13,7 +13,7 @@
 static dispatch_queue_t kAudioSessionQueue = nil;
 static dispatch_queue_t kCaptureSessionQueue = nil;
 
-@implementation RTCDispatcher
+@implementation RTC_OBJC_TYPE (RTCDispatcher)
 
 + (void)initialize {
   static dispatch_once_t onceToken;
diff --git a/sdk/objc/native/api/video_capturer.h b/sdk/objc/native/api/video_capturer.h
index 464d148..9847d81 100644
--- a/sdk/objc/native/api/video_capturer.h
+++ b/sdk/objc/native/api/video_capturer.h
@@ -20,7 +20,7 @@
 namespace webrtc {
 
 rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> ObjCToNativeVideoCapturer(
-    RTCVideoCapturer* objc_video_capturer,
+    RTC_OBJC_TYPE(RTCVideoCapturer) * objc_video_capturer,
     rtc::Thread* signaling_thread,
     rtc::Thread* worker_thread);
 
diff --git a/sdk/objc/native/api/video_capturer.mm b/sdk/objc/native/api/video_capturer.mm
index 2618550..6dd0edb 100644
--- a/sdk/objc/native/api/video_capturer.mm
+++ b/sdk/objc/native/api/video_capturer.mm
@@ -17,7 +17,7 @@
 namespace webrtc {
 
 rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> ObjCToNativeVideoCapturer(
-    RTCVideoCapturer *objc_video_capturer,
+    RTC_OBJC_TYPE(RTCVideoCapturer) * objc_video_capturer,
     rtc::Thread *signaling_thread,
     rtc::Thread *worker_thread) {
   RTCObjCVideoSourceAdapter *adapter = [[RTCObjCVideoSourceAdapter alloc] init];
diff --git a/sdk/objc/native/api/video_decoder_factory.h b/sdk/objc/native/api/video_decoder_factory.h
index 710bb6e..03d8af3 100644
--- a/sdk/objc/native/api/video_decoder_factory.h
+++ b/sdk/objc/native/api/video_decoder_factory.h
@@ -20,7 +20,7 @@
 namespace webrtc {
 
 std::unique_ptr<VideoDecoderFactory> ObjCToNativeVideoDecoderFactory(
-    id<RTCVideoDecoderFactory> objc_video_decoder_factory);
+    id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> objc_video_decoder_factory);
 
 }  // namespace webrtc
 
diff --git a/sdk/objc/native/api/video_decoder_factory.mm b/sdk/objc/native/api/video_decoder_factory.mm
index 8d0e4ab..d418f2f 100644
--- a/sdk/objc/native/api/video_decoder_factory.mm
+++ b/sdk/objc/native/api/video_decoder_factory.mm
@@ -17,7 +17,7 @@
 namespace webrtc {
 
 std::unique_ptr<VideoDecoderFactory> ObjCToNativeVideoDecoderFactory(
-    id<RTCVideoDecoderFactory> objc_video_decoder_factory) {
+    id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> objc_video_decoder_factory) {
   return std::make_unique<ObjCVideoDecoderFactory>(objc_video_decoder_factory);
 }
 
diff --git a/sdk/objc/native/api/video_encoder_factory.h b/sdk/objc/native/api/video_encoder_factory.h
index 8dab48c..6e551b2 100644
--- a/sdk/objc/native/api/video_encoder_factory.h
+++ b/sdk/objc/native/api/video_encoder_factory.h
@@ -20,7 +20,7 @@
 namespace webrtc {
 
 std::unique_ptr<VideoEncoderFactory> ObjCToNativeVideoEncoderFactory(
-    id<RTCVideoEncoderFactory> objc_video_encoder_factory);
+    id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> objc_video_encoder_factory);
 
 }  // namespace webrtc
 
diff --git a/sdk/objc/native/api/video_encoder_factory.mm b/sdk/objc/native/api/video_encoder_factory.mm
index b582deb..6fa5563 100644
--- a/sdk/objc/native/api/video_encoder_factory.mm
+++ b/sdk/objc/native/api/video_encoder_factory.mm
@@ -17,7 +17,7 @@
 namespace webrtc {
 
 std::unique_ptr<VideoEncoderFactory> ObjCToNativeVideoEncoderFactory(
-    id<RTCVideoEncoderFactory> objc_video_encoder_factory) {
+    id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> objc_video_encoder_factory) {
   return std::make_unique<ObjCVideoEncoderFactory>(objc_video_encoder_factory);
 }
 
diff --git a/sdk/objc/native/api/video_frame.h b/sdk/objc/native/api/video_frame.h
index f8dd568..b4416ff 100644
--- a/sdk/objc/native/api/video_frame.h
+++ b/sdk/objc/native/api/video_frame.h
@@ -17,7 +17,7 @@
 
 namespace webrtc {
 
-RTCVideoFrame* NativeToObjCVideoFrame(const VideoFrame& frame);
+RTC_OBJC_TYPE(RTCVideoFrame) * NativeToObjCVideoFrame(const VideoFrame& frame);
 
 }  // namespace webrtc
 
diff --git a/sdk/objc/native/api/video_frame.mm b/sdk/objc/native/api/video_frame.mm
index 02dd830..b82994f 100644
--- a/sdk/objc/native/api/video_frame.mm
+++ b/sdk/objc/native/api/video_frame.mm
@@ -14,7 +14,7 @@
 
 namespace webrtc {
 
-RTCVideoFrame* NativeToObjCVideoFrame(const VideoFrame& frame) {
+RTC_OBJC_TYPE(RTCVideoFrame) * NativeToObjCVideoFrame(const VideoFrame& frame) {
   return ToObjCVideoFrame(frame);
 }
 
diff --git a/sdk/objc/native/api/video_frame_buffer.h b/sdk/objc/native/api/video_frame_buffer.h
index 54a7375..204d65d 100644
--- a/sdk/objc/native/api/video_frame_buffer.h
+++ b/sdk/objc/native/api/video_frame_buffer.h
@@ -19,9 +19,9 @@
 namespace webrtc {
 
 rtc::scoped_refptr<VideoFrameBuffer> ObjCToNativeVideoFrameBuffer(
-    id<RTCVideoFrameBuffer> objc_video_frame_buffer);
+    id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> objc_video_frame_buffer);
 
-id<RTCVideoFrameBuffer> NativeToObjCVideoFrameBuffer(
+id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> NativeToObjCVideoFrameBuffer(
     const rtc::scoped_refptr<VideoFrameBuffer>& buffer);
 
 }  // namespace webrtc
diff --git a/sdk/objc/native/api/video_frame_buffer.mm b/sdk/objc/native/api/video_frame_buffer.mm
index 2abda42..6dc9975 100644
--- a/sdk/objc/native/api/video_frame_buffer.mm
+++ b/sdk/objc/native/api/video_frame_buffer.mm
@@ -15,11 +15,11 @@
 namespace webrtc {
 
 rtc::scoped_refptr<VideoFrameBuffer> ObjCToNativeVideoFrameBuffer(
-    id<RTCVideoFrameBuffer> objc_video_frame_buffer) {
+    id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> objc_video_frame_buffer) {
   return new rtc::RefCountedObject<ObjCFrameBuffer>(objc_video_frame_buffer);
 }
 
-id<RTCVideoFrameBuffer> NativeToObjCVideoFrameBuffer(
+id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> NativeToObjCVideoFrameBuffer(
     const rtc::scoped_refptr<VideoFrameBuffer> &buffer) {
   return ToObjCVideoFrameBuffer(buffer);
 }
diff --git a/sdk/objc/native/api/video_renderer.h b/sdk/objc/native/api/video_renderer.h
index afa6543..04796b8 100644
--- a/sdk/objc/native/api/video_renderer.h
+++ b/sdk/objc/native/api/video_renderer.h
@@ -21,7 +21,7 @@
 namespace webrtc {
 
 std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>> ObjCToNativeVideoRenderer(
-    id<RTCVideoRenderer> objc_video_renderer);
+    id<RTC_OBJC_TYPE(RTCVideoRenderer)> objc_video_renderer);
 
 }  // namespace webrtc
 
diff --git a/sdk/objc/native/api/video_renderer.mm b/sdk/objc/native/api/video_renderer.mm
index 6631685..e92d47d 100644
--- a/sdk/objc/native/api/video_renderer.mm
+++ b/sdk/objc/native/api/video_renderer.mm
@@ -17,7 +17,7 @@
 namespace webrtc {
 
 std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>> ObjCToNativeVideoRenderer(
-    id<RTCVideoRenderer> objc_video_renderer) {
+    id<RTC_OBJC_TYPE(RTCVideoRenderer)> objc_video_renderer) {
   return std::make_unique<ObjCVideoRenderer>(objc_video_renderer);
 }
 
diff --git a/sdk/objc/native/src/audio/audio_device_ios.mm b/sdk/objc/native/src/audio/audio_device_ios.mm
index 6bf1d9b..b70c4d0 100644
--- a/sdk/objc/native/src/audio/audio_device_ios.mm
+++ b/sdk/objc/native/src/audio/audio_device_ios.mm
@@ -152,7 +152,8 @@
   // here. They have not been set and confirmed yet since configureForWebRTC
   // is not called until audio is about to start. However, it makes sense to
   // store the parameters now and then verify at a later stage.
-  RTCAudioSessionConfiguration* config = [RTCAudioSessionConfiguration webRTCConfiguration];
+  RTC_OBJC_TYPE(RTCAudioSessionConfiguration)* config =
+      [RTC_OBJC_TYPE(RTCAudioSessionConfiguration) webRTCConfiguration];
   playout_parameters_.reset(config.sampleRate, config.outputNumberOfChannels);
   record_parameters_.reset(config.sampleRate, config.inputNumberOfChannels);
   // Ensure that the audio device buffer (ADB) knows about the internal audio
@@ -532,12 +533,12 @@
     // Allocate new buffers given the potentially new stream format.
     SetupAudioBuffersForActiveAudioSession();
   }
-  UpdateAudioUnit([RTCAudioSession sharedInstance].canPlayOrRecord);
+  UpdateAudioUnit([RTC_OBJC_TYPE(RTCAudioSession) sharedInstance].canPlayOrRecord);
 }
 
 void AudioDeviceIOS::HandleValidRouteChange() {
   RTC_DCHECK_RUN_ON(&thread_checker_);
-  RTCAudioSession* session = [RTCAudioSession sharedInstance];
+  RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
   RTCLog(@"%@", session);
   HandleSampleRateChange(session.sampleRate);
 }
@@ -565,7 +566,7 @@
 
   // The audio unit is already initialized or started.
   // Check to see if the sample rate or buffer size has changed.
-  RTCAudioSession* session = [RTCAudioSession sharedInstance];
+  RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
   const double session_sample_rate = session.sampleRate;
   const NSTimeInterval session_buffer_duration = session.IOBufferDuration;
   const size_t session_frames_per_buffer =
@@ -646,7 +647,7 @@
 
   int64_t glitch_count = num_detected_playout_glitches_;
   dispatch_async(dispatch_get_main_queue(), ^{
-    RTCAudioSession* session = [RTCAudioSession sharedInstance];
+    RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
     [session notifyDidDetectPlayoutGlitch:glitch_count];
   });
 }
@@ -678,7 +679,7 @@
 void AudioDeviceIOS::SetupAudioBuffersForActiveAudioSession() {
   LOGI() << "SetupAudioBuffersForActiveAudioSession";
   // Verify the current values once the audio session has been activated.
-  RTCAudioSession* session = [RTCAudioSession sharedInstance];
+  RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
   double sample_rate = session.sampleRate;
   NSTimeInterval io_buffer_duration = session.IOBufferDuration;
   RTCLog(@"%@", session);
@@ -687,7 +688,8 @@
   // hardware sample rate but continue and use the non-ideal sample rate after
   // reinitializing the audio parameters. Most BT headsets only support 8kHz or
   // 16kHz.
-  RTCAudioSessionConfiguration* webRTCConfig = [RTCAudioSessionConfiguration webRTCConfiguration];
+  RTC_OBJC_TYPE(RTCAudioSessionConfiguration)* webRTCConfig =
+      [RTC_OBJC_TYPE(RTCAudioSessionConfiguration) webRTCConfiguration];
   if (sample_rate != webRTCConfig.sampleRate) {
     RTC_LOG(LS_WARNING) << "Unable to set the preferred sample rate";
   }
@@ -797,7 +799,7 @@
   if (should_start_audio_unit) {
     RTCLog(@"Starting audio unit for UpdateAudioUnit");
     // Log session settings before trying to start audio streaming.
-    RTCAudioSession* session = [RTCAudioSession sharedInstance];
+    RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
     RTCLog(@"%@", session);
     if (!audio_unit_->Start()) {
       RTCLogError(@"Failed to start audio unit.");
@@ -827,7 +829,7 @@
     RTCLogWarning(@"Audio session already configured.");
     return false;
   }
-  RTCAudioSession* session = [RTCAudioSession sharedInstance];
+  RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
   [session lockForConfiguration];
   bool success = [session configureWebRTCSession:nil];
   [session unlockForConfiguration];
@@ -847,7 +849,7 @@
     RTCLogWarning(@"Audio session already unconfigured.");
     return;
   }
-  RTCAudioSession* session = [RTCAudioSession sharedInstance];
+  RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
   [session lockForConfiguration];
   [session unconfigureWebRTCSession:nil];
   [session endWebRTCSession:nil];
@@ -865,7 +867,7 @@
     return false;
   }
 
-  RTCAudioSession* session = [RTCAudioSession sharedInstance];
+  RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
   // Subscribe to audio session events.
   [session pushDelegate:audio_session_observer_];
   is_interrupted_ = session.isInterrupted ? true : false;
@@ -915,7 +917,7 @@
   io_thread_checker_.Detach();
 
   // Remove audio session notification observers.
-  RTCAudioSession* session = [RTCAudioSession sharedInstance];
+  RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
   [session removeDelegate:audio_session_observer_];
 
   // All I/O should be stopped or paused prior to deactivating the audio
diff --git a/sdk/objc/native/src/objc_frame_buffer.h b/sdk/objc/native/src/objc_frame_buffer.h
index f941dad..9c1ff17 100644
--- a/sdk/objc/native/src/objc_frame_buffer.h
+++ b/sdk/objc/native/src/objc_frame_buffer.h
@@ -13,15 +13,18 @@
 
 #import <CoreVideo/CoreVideo.h>
 
+#import "base/RTCMacros.h"
+
 #include "common_video/include/video_frame_buffer.h"
 
-@protocol RTCVideoFrameBuffer;
+@protocol RTC_OBJC_TYPE
+(RTCVideoFrameBuffer);
 
 namespace webrtc {
 
 class ObjCFrameBuffer : public VideoFrameBuffer {
  public:
-  explicit ObjCFrameBuffer(id<RTCVideoFrameBuffer>);
+  explicit ObjCFrameBuffer(id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)>);
   ~ObjCFrameBuffer() override;
 
   Type type() const override;
@@ -31,15 +34,15 @@
 
   rtc::scoped_refptr<I420BufferInterface> ToI420() override;
 
-  id<RTCVideoFrameBuffer> wrapped_frame_buffer() const;
+  id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> wrapped_frame_buffer() const;
 
  private:
-  id<RTCVideoFrameBuffer> frame_buffer_;
+  id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> frame_buffer_;
   int width_;
   int height_;
 };
 
-id<RTCVideoFrameBuffer> ToObjCVideoFrameBuffer(
+id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> ToObjCVideoFrameBuffer(
     const rtc::scoped_refptr<VideoFrameBuffer>& buffer);
 
 }  // namespace webrtc
diff --git a/sdk/objc/native/src/objc_frame_buffer.mm b/sdk/objc/native/src/objc_frame_buffer.mm
index 52e4341..deb38a7 100644
--- a/sdk/objc/native/src/objc_frame_buffer.mm
+++ b/sdk/objc/native/src/objc_frame_buffer.mm
@@ -17,10 +17,10 @@
 
 namespace {
 
-/** ObjCFrameBuffer that conforms to I420BufferInterface by wrapping RTCI420Buffer */
+/** ObjCFrameBuffer that conforms to I420BufferInterface by wrapping RTC_OBJC_TYPE(RTCI420Buffer) */
 class ObjCI420FrameBuffer : public I420BufferInterface {
  public:
-  explicit ObjCI420FrameBuffer(id<RTCI420Buffer> frame_buffer)
+  explicit ObjCI420FrameBuffer(id<RTC_OBJC_TYPE(RTCI420Buffer)> frame_buffer)
       : frame_buffer_(frame_buffer), width_(frame_buffer.width), height_(frame_buffer.height) {}
   ~ObjCI420FrameBuffer() override {}
 
@@ -41,14 +41,14 @@
   int StrideV() const override { return frame_buffer_.strideV; }
 
  private:
-  id<RTCI420Buffer> frame_buffer_;
+  id<RTC_OBJC_TYPE(RTCI420Buffer)> frame_buffer_;
   int width_;
   int height_;
 };
 
 }  // namespace
 
-ObjCFrameBuffer::ObjCFrameBuffer(id<RTCVideoFrameBuffer> frame_buffer)
+ObjCFrameBuffer::ObjCFrameBuffer(id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> frame_buffer)
     : frame_buffer_(frame_buffer), width_(frame_buffer.width), height_(frame_buffer.height) {}
 
 ObjCFrameBuffer::~ObjCFrameBuffer() {}
@@ -72,15 +72,16 @@
   return buffer;
 }
 
-id<RTCVideoFrameBuffer> ObjCFrameBuffer::wrapped_frame_buffer() const {
+id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> ObjCFrameBuffer::wrapped_frame_buffer() const {
   return frame_buffer_;
 }
 
-id<RTCVideoFrameBuffer> ToObjCVideoFrameBuffer(const rtc::scoped_refptr<VideoFrameBuffer>& buffer) {
+id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> ToObjCVideoFrameBuffer(
+    const rtc::scoped_refptr<VideoFrameBuffer>& buffer) {
   if (buffer->type() == VideoFrameBuffer::Type::kNative) {
     return static_cast<ObjCFrameBuffer*>(buffer.get())->wrapped_frame_buffer();
   } else {
-    return [[RTCI420Buffer alloc] initWithFrameBuffer:buffer->ToI420()];
+    return [[RTC_OBJC_TYPE(RTCI420Buffer) alloc] initWithFrameBuffer:buffer->ToI420()];
   }
 }
 
diff --git a/sdk/objc/native/src/objc_video_decoder_factory.h b/sdk/objc/native/src/objc_video_decoder_factory.h
index 9911bbf..30ad8c2 100644
--- a/sdk/objc/native/src/objc_video_decoder_factory.h
+++ b/sdk/objc/native/src/objc_video_decoder_factory.h
@@ -11,26 +11,29 @@
 #ifndef SDK_OBJC_NATIVE_SRC_OBJC_VIDEO_DECODER_FACTORY_H_
 #define SDK_OBJC_NATIVE_SRC_OBJC_VIDEO_DECODER_FACTORY_H_
 
+#import "base/RTCMacros.h"
+
 #include "api/video_codecs/video_decoder_factory.h"
 #include "media/base/codec.h"
 
-@protocol RTCVideoDecoderFactory;
+@protocol RTC_OBJC_TYPE
+(RTCVideoDecoderFactory);
 
 namespace webrtc {
 
 class ObjCVideoDecoderFactory : public VideoDecoderFactory {
  public:
-  explicit ObjCVideoDecoderFactory(id<RTCVideoDecoderFactory>);
+  explicit ObjCVideoDecoderFactory(id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)>);
   ~ObjCVideoDecoderFactory() override;
 
-  id<RTCVideoDecoderFactory> wrapped_decoder_factory() const;
+  id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> wrapped_decoder_factory() const;
 
   std::vector<SdpVideoFormat> GetSupportedFormats() const override;
   std::unique_ptr<VideoDecoder> CreateVideoDecoder(
       const SdpVideoFormat& format) override;
 
  private:
-  id<RTCVideoDecoderFactory> decoder_factory_;
+  id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> decoder_factory_;
 };
 
 }  // namespace webrtc
diff --git a/sdk/objc/native/src/objc_video_decoder_factory.mm b/sdk/objc/native/src/objc_video_decoder_factory.mm
index 5aca02d..0906054 100644
--- a/sdk/objc/native/src/objc_video_decoder_factory.mm
+++ b/sdk/objc/native/src/objc_video_decoder_factory.mm
@@ -33,7 +33,7 @@
 namespace {
 class ObjCVideoDecoder : public VideoDecoder {
  public:
-  ObjCVideoDecoder(id<RTCVideoDecoder> decoder)
+  ObjCVideoDecoder(id<RTC_OBJC_TYPE(RTCVideoDecoder)> decoder)
       : decoder_(decoder), implementation_name_([decoder implementationName].stdString) {}
 
   int32_t InitDecode(const VideoCodec *codec_settings, int32_t number_of_cores) override {
@@ -43,8 +43,8 @@
   int32_t Decode(const EncodedImage &input_image,
                  bool missing_frames,
                  int64_t render_time_ms = -1) override {
-    RTCEncodedImage *encodedImage =
-        [[RTCEncodedImage alloc] initWithNativeEncodedImage:input_image];
+    RTC_OBJC_TYPE(RTCEncodedImage) *encodedImage =
+        [[RTC_OBJC_TYPE(RTCEncodedImage) alloc] initWithNativeEncodedImage:input_image];
 
     return [decoder_ decode:encodedImage
               missingFrames:missing_frames
@@ -53,7 +53,7 @@
   }
 
   int32_t RegisterDecodeCompleteCallback(DecodedImageCallback *callback) override {
-    [decoder_ setCallback:^(RTCVideoFrame *frame) {
+    [decoder_ setCallback:^(RTC_OBJC_TYPE(RTCVideoFrame) * frame) {
       const rtc::scoped_refptr<VideoFrameBuffer> buffer =
           new rtc::RefCountedObject<ObjCFrameBuffer>(frame.buffer);
       VideoFrame videoFrame =
@@ -76,26 +76,27 @@
   const char *ImplementationName() const override { return implementation_name_.c_str(); }
 
  private:
-  id<RTCVideoDecoder> decoder_;
+  id<RTC_OBJC_TYPE(RTCVideoDecoder)> decoder_;
   const std::string implementation_name_;
 };
 }  // namespace
 
-ObjCVideoDecoderFactory::ObjCVideoDecoderFactory(id<RTCVideoDecoderFactory> decoder_factory)
+ObjCVideoDecoderFactory::ObjCVideoDecoderFactory(
+    id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> decoder_factory)
     : decoder_factory_(decoder_factory) {}
 
 ObjCVideoDecoderFactory::~ObjCVideoDecoderFactory() {}
 
-id<RTCVideoDecoderFactory> ObjCVideoDecoderFactory::wrapped_decoder_factory() const {
+id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> ObjCVideoDecoderFactory::wrapped_decoder_factory() const {
   return decoder_factory_;
 }
 
 std::unique_ptr<VideoDecoder> ObjCVideoDecoderFactory::CreateVideoDecoder(
     const SdpVideoFormat &format) {
   NSString *codecName = [NSString stringWithUTF8String:format.name.c_str()];
-  for (RTCVideoCodecInfo *codecInfo in decoder_factory_.supportedCodecs) {
+  for (RTC_OBJC_TYPE(RTCVideoCodecInfo) * codecInfo in decoder_factory_.supportedCodecs) {
     if ([codecName isEqualToString:codecInfo.name]) {
-      id<RTCVideoDecoder> decoder = [decoder_factory_ createDecoder:codecInfo];
+      id<RTC_OBJC_TYPE(RTCVideoDecoder)> decoder = [decoder_factory_ createDecoder:codecInfo];
 
       if ([decoder isKindOfClass:[RTCWrappedNativeVideoDecoder class]]) {
         return [(RTCWrappedNativeVideoDecoder *)decoder releaseWrappedDecoder];
@@ -110,7 +111,7 @@
 
 std::vector<SdpVideoFormat> ObjCVideoDecoderFactory::GetSupportedFormats() const {
   std::vector<SdpVideoFormat> supported_formats;
-  for (RTCVideoCodecInfo *supportedCodec in decoder_factory_.supportedCodecs) {
+  for (RTC_OBJC_TYPE(RTCVideoCodecInfo) * supportedCodec in decoder_factory_.supportedCodecs) {
     SdpVideoFormat format = [supportedCodec nativeSdpVideoFormat];
     supported_formats.push_back(format);
   }
diff --git a/sdk/objc/native/src/objc_video_encoder_factory.h b/sdk/objc/native/src/objc_video_encoder_factory.h
index ca7a23a..7e474c9 100644
--- a/sdk/objc/native/src/objc_video_encoder_factory.h
+++ b/sdk/objc/native/src/objc_video_encoder_factory.h
@@ -13,18 +13,21 @@
 
 #import <Foundation/Foundation.h>
 
+#import "base/RTCMacros.h"
+
 #include "api/video_codecs/video_encoder_factory.h"
 
-@protocol RTCVideoEncoderFactory;
+@protocol RTC_OBJC_TYPE
+(RTCVideoEncoderFactory);
 
 namespace webrtc {
 
 class ObjCVideoEncoderFactory : public VideoEncoderFactory {
  public:
-  explicit ObjCVideoEncoderFactory(id<RTCVideoEncoderFactory>);
+  explicit ObjCVideoEncoderFactory(id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)>);
   ~ObjCVideoEncoderFactory() override;
 
-  id<RTCVideoEncoderFactory> wrapped_encoder_factory() const;
+  id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> wrapped_encoder_factory() const;
 
   std::vector<SdpVideoFormat> GetSupportedFormats() const override;
   std::vector<SdpVideoFormat> GetImplementations() const override;
@@ -34,7 +37,7 @@
   std::unique_ptr<EncoderSelectorInterface> GetEncoderSelector() const override;
 
  private:
-  id<RTCVideoEncoderFactory> encoder_factory_;
+  id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> encoder_factory_;
 };
 
 }  // namespace webrtc
diff --git a/sdk/objc/native/src/objc_video_encoder_factory.mm b/sdk/objc/native/src/objc_video_encoder_factory.mm
index 027bfb5..0b53ece 100644
--- a/sdk/objc/native/src/objc_video_encoder_factory.mm
+++ b/sdk/objc/native/src/objc_video_encoder_factory.mm
@@ -12,6 +12,7 @@
 
 #include <string>
 
+#import "base/RTCMacros.h"
 #import "base/RTCVideoEncoder.h"
 #import "base/RTCVideoEncoderFactory.h"
 #import "components/video_codec/RTCCodecSpecificInfoH264+Private.h"
@@ -38,26 +39,27 @@
 
 class ObjCVideoEncoder : public VideoEncoder {
  public:
-  ObjCVideoEncoder(id<RTCVideoEncoder> encoder)
+  ObjCVideoEncoder(id<RTC_OBJC_TYPE(RTCVideoEncoder)> encoder)
       : encoder_(encoder), implementation_name_([encoder implementationName].stdString) {}
 
   int32_t InitEncode(const VideoCodec *codec_settings, const Settings &encoder_settings) override {
-    RTCVideoEncoderSettings *settings =
-        [[RTCVideoEncoderSettings alloc] initWithNativeVideoCodec:codec_settings];
+    RTC_OBJC_TYPE(RTCVideoEncoderSettings) *settings =
+        [[RTC_OBJC_TYPE(RTCVideoEncoderSettings) alloc] initWithNativeVideoCodec:codec_settings];
     return [encoder_ startEncodeWithSettings:settings
                                numberOfCores:encoder_settings.number_of_cores];
   }
 
   int32_t RegisterEncodeCompleteCallback(EncodedImageCallback *callback) override {
-    [encoder_ setCallback:^BOOL(RTCEncodedImage *_Nonnull frame,
-                                id<RTCCodecSpecificInfo> _Nonnull info,
-                                RTCRtpFragmentationHeader *_Nonnull header) {
+    [encoder_ setCallback:^BOOL(RTC_OBJC_TYPE(RTCEncodedImage) * _Nonnull frame,
+                                id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)> _Nonnull info,
+                                RTC_OBJC_TYPE(RTCRtpFragmentationHeader) * _Nonnull header) {
       EncodedImage encodedImage = [frame nativeEncodedImage];
 
       // Handle types that can be converted into one of CodecSpecificInfo's hard coded cases.
       CodecSpecificInfo codecSpecificInfo;
-      if ([info isKindOfClass:[RTCCodecSpecificInfoH264 class]]) {
-        codecSpecificInfo = [(RTCCodecSpecificInfoH264 *)info nativeCodecSpecificInfo];
+      if ([info isKindOfClass:[RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) class]]) {
+        codecSpecificInfo =
+            [(RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) *)info nativeCodecSpecificInfo];
       }
 
       std::unique_ptr<RTPFragmentationHeader> fragmentationHeader =
@@ -95,7 +97,7 @@
     info.supports_native_handle = true;
     info.implementation_name = implementation_name_;
 
-    RTCVideoEncoderQpThresholds *qp_thresholds = [encoder_ scalingSettings];
+    RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) *qp_thresholds = [encoder_ scalingSettings];
     info.scaling_settings = qp_thresholds ? ScalingSettings(qp_thresholds.low, qp_thresholds.high) :
                                             ScalingSettings::kOff;
 
@@ -105,26 +107,29 @@
   }
 
  private:
-  id<RTCVideoEncoder> encoder_;
+  id<RTC_OBJC_TYPE(RTCVideoEncoder)> encoder_;
   const std::string implementation_name_;
 };
 
 class ObjcVideoEncoderSelector : public VideoEncoderFactory::EncoderSelectorInterface {
  public:
-  ObjcVideoEncoderSelector(id<RTCVideoEncoderSelector> selector) { selector_ = selector; }
+  ObjcVideoEncoderSelector(id<RTC_OBJC_TYPE(RTCVideoEncoderSelector)> selector) {
+    selector_ = selector;
+  }
   void OnCurrentEncoder(const SdpVideoFormat &format) override {
-    RTCVideoCodecInfo *info = [[RTCVideoCodecInfo alloc] initWithNativeSdpVideoFormat:format];
+    RTC_OBJC_TYPE(RTCVideoCodecInfo) *info =
+        [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithNativeSdpVideoFormat:format];
     [selector_ registerCurrentEncoderInfo:info];
   }
   absl::optional<SdpVideoFormat> OnEncoderBroken() override {
-    RTCVideoCodecInfo *info = [selector_ encoderForBrokenEncoder];
+    RTC_OBJC_TYPE(RTCVideoCodecInfo) *info = [selector_ encoderForBrokenEncoder];
     if (info) {
       return [info nativeSdpVideoFormat];
     }
     return absl::nullopt;
   }
   absl::optional<SdpVideoFormat> OnAvailableBitrate(const DataRate &rate) override {
-    RTCVideoCodecInfo *info = [selector_ encoderForBitrate:rate.kbps<NSInteger>()];
+    RTC_OBJC_TYPE(RTCVideoCodecInfo) *info = [selector_ encoderForBitrate:rate.kbps<NSInteger>()];
     if (info) {
       return [info nativeSdpVideoFormat];
     }
@@ -132,23 +137,24 @@
   }
 
  private:
-  id<RTCVideoEncoderSelector> selector_;
+  id<RTC_OBJC_TYPE(RTCVideoEncoderSelector)> selector_;
 };
 
 }  // namespace
 
-ObjCVideoEncoderFactory::ObjCVideoEncoderFactory(id<RTCVideoEncoderFactory> encoder_factory)
+ObjCVideoEncoderFactory::ObjCVideoEncoderFactory(
+    id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> encoder_factory)
     : encoder_factory_(encoder_factory) {}
 
 ObjCVideoEncoderFactory::~ObjCVideoEncoderFactory() {}
 
-id<RTCVideoEncoderFactory> ObjCVideoEncoderFactory::wrapped_encoder_factory() const {
+id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> ObjCVideoEncoderFactory::wrapped_encoder_factory() const {
   return encoder_factory_;
 }
 
 std::vector<SdpVideoFormat> ObjCVideoEncoderFactory::GetSupportedFormats() const {
   std::vector<SdpVideoFormat> supported_formats;
-  for (RTCVideoCodecInfo *supportedCodec in [encoder_factory_ supportedCodecs]) {
+  for (RTC_OBJC_TYPE(RTCVideoCodecInfo) * supportedCodec in [encoder_factory_ supportedCodecs]) {
     SdpVideoFormat format = [supportedCodec nativeSdpVideoFormat];
     supported_formats.push_back(format);
   }
@@ -159,7 +165,7 @@
 std::vector<SdpVideoFormat> ObjCVideoEncoderFactory::GetImplementations() const {
   if ([encoder_factory_ respondsToSelector:@selector(implementations)]) {
     std::vector<SdpVideoFormat> supported_formats;
-    for (RTCVideoCodecInfo *supportedCodec in [encoder_factory_ implementations]) {
+    for (RTC_OBJC_TYPE(RTCVideoCodecInfo) * supportedCodec in [encoder_factory_ implementations]) {
       SdpVideoFormat format = [supportedCodec nativeSdpVideoFormat];
       supported_formats.push_back(format);
     }
@@ -183,8 +189,9 @@
 
 std::unique_ptr<VideoEncoder> ObjCVideoEncoderFactory::CreateVideoEncoder(
     const SdpVideoFormat &format) {
-  RTCVideoCodecInfo *info = [[RTCVideoCodecInfo alloc] initWithNativeSdpVideoFormat:format];
-  id<RTCVideoEncoder> encoder = [encoder_factory_ createEncoder:info];
+  RTC_OBJC_TYPE(RTCVideoCodecInfo) *info =
+      [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithNativeSdpVideoFormat:format];
+  id<RTC_OBJC_TYPE(RTCVideoEncoder)> encoder = [encoder_factory_ createEncoder:info];
   if ([encoder isKindOfClass:[RTCWrappedNativeVideoEncoder class]]) {
     return [(RTCWrappedNativeVideoEncoder *)encoder releaseWrappedEncoder];
   } else {
diff --git a/sdk/objc/native/src/objc_video_frame.h b/sdk/objc/native/src/objc_video_frame.h
index fd74aca..c2931cb 100644
--- a/sdk/objc/native/src/objc_video_frame.h
+++ b/sdk/objc/native/src/objc_video_frame.h
@@ -17,7 +17,7 @@
 
 namespace webrtc {
 
-RTCVideoFrame* ToObjCVideoFrame(const VideoFrame& frame);
+RTC_OBJC_TYPE(RTCVideoFrame) * ToObjCVideoFrame(const VideoFrame& frame);
 
 }  // namespace webrtc
 
diff --git a/sdk/objc/native/src/objc_video_frame.mm b/sdk/objc/native/src/objc_video_frame.mm
index 76f7add..2e8ce61 100644
--- a/sdk/objc/native/src/objc_video_frame.mm
+++ b/sdk/objc/native/src/objc_video_frame.mm
@@ -15,11 +15,11 @@
 
 namespace webrtc {
 
-RTCVideoFrame *ToObjCVideoFrame(const VideoFrame &frame) {
-  RTCVideoFrame *videoFrame =
-      [[RTCVideoFrame alloc] initWithBuffer:ToObjCVideoFrameBuffer(frame.video_frame_buffer())
-                                   rotation:RTCVideoRotation(frame.rotation())
-                                timeStampNs:frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec];
+RTC_OBJC_TYPE(RTCVideoFrame) * ToObjCVideoFrame(const VideoFrame &frame) {
+  RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame = [[RTC_OBJC_TYPE(RTCVideoFrame) alloc]
+      initWithBuffer:ToObjCVideoFrameBuffer(frame.video_frame_buffer())
+            rotation:RTCVideoRotation(frame.rotation())
+         timeStampNs:frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec];
   videoFrame.timeStamp = frame.timestamp();
 
   return videoFrame;
diff --git a/sdk/objc/native/src/objc_video_renderer.h b/sdk/objc/native/src/objc_video_renderer.h
index 9396ab6..f9c35ea 100644
--- a/sdk/objc/native/src/objc_video_renderer.h
+++ b/sdk/objc/native/src/objc_video_renderer.h
@@ -14,20 +14,23 @@
 #import <CoreGraphics/CoreGraphics.h>
 #import <Foundation/Foundation.h>
 
+#import "base/RTCMacros.h"
+
 #include "api/video/video_frame.h"
 #include "api/video/video_sink_interface.h"
 
-@protocol RTCVideoRenderer;
+@protocol RTC_OBJC_TYPE
+(RTCVideoRenderer);
 
 namespace webrtc {
 
 class ObjCVideoRenderer : public rtc::VideoSinkInterface<VideoFrame> {
  public:
-  ObjCVideoRenderer(id<RTCVideoRenderer> renderer);
+  ObjCVideoRenderer(id<RTC_OBJC_TYPE(RTCVideoRenderer)> renderer);
   void OnFrame(const VideoFrame& nativeVideoFrame) override;
 
  private:
-  id<RTCVideoRenderer> renderer_;
+  id<RTC_OBJC_TYPE(RTCVideoRenderer)> renderer_;
   CGSize size_;
 };
 
diff --git a/sdk/objc/native/src/objc_video_renderer.mm b/sdk/objc/native/src/objc_video_renderer.mm
index 486b7e3..4a9b647 100644
--- a/sdk/objc/native/src/objc_video_renderer.mm
+++ b/sdk/objc/native/src/objc_video_renderer.mm
@@ -10,6 +10,7 @@
 
 #include "sdk/objc/native/src/objc_video_renderer.h"
 
+#import "base/RTCMacros.h"
 #import "base/RTCVideoFrame.h"
 #import "base/RTCVideoRenderer.h"
 
@@ -17,11 +18,11 @@
 
 namespace webrtc {
 
-ObjCVideoRenderer::ObjCVideoRenderer(id<RTCVideoRenderer> renderer)
+ObjCVideoRenderer::ObjCVideoRenderer(id<RTC_OBJC_TYPE(RTCVideoRenderer)> renderer)
     : renderer_(renderer), size_(CGSizeZero) {}
 
 void ObjCVideoRenderer::OnFrame(const VideoFrame& nativeVideoFrame) {
-  RTCVideoFrame* videoFrame = ToObjCVideoFrame(nativeVideoFrame);
+  RTC_OBJC_TYPE(RTCVideoFrame)* videoFrame = ToObjCVideoFrame(nativeVideoFrame);
 
   CGSize current_size = (videoFrame.rotation % 180 == 0) ?
       CGSizeMake(videoFrame.width, videoFrame.height) :
diff --git a/sdk/objc/native/src/objc_video_track_source.h b/sdk/objc/native/src/objc_video_track_source.h
index 93e7d15..dad6544 100644
--- a/sdk/objc/native/src/objc_video_track_source.h
+++ b/sdk/objc/native/src/objc_video_track_source.h
@@ -17,9 +17,9 @@
 #include "media/base/adapted_video_track_source.h"
 #include "rtc_base/timestamp_aligner.h"
 
-RTC_FWD_DECL_OBJC_CLASS(RTCVideoFrame);
+RTC_FWD_DECL_OBJC_CLASS(RTC_OBJC_TYPE(RTCVideoFrame));
 
-@interface RTCObjCVideoSourceAdapter : NSObject<RTCVideoCapturerDelegate>
+@interface RTCObjCVideoSourceAdapter : NSObject <RTC_OBJC_TYPE (RTCVideoCapturerDelegate)>
 @end
 
 namespace webrtc {
@@ -42,7 +42,7 @@
 
   bool remote() const override;
 
-  void OnCapturedFrame(RTCVideoFrame* frame);
+  void OnCapturedFrame(RTC_OBJC_TYPE(RTCVideoFrame) * frame);
 
   // Called by RTCVideoSource.
   void OnOutputFormatRequest(int width, int height, int fps);
diff --git a/sdk/objc/native/src/objc_video_track_source.mm b/sdk/objc/native/src/objc_video_track_source.mm
index 580180a..85ad087 100644
--- a/sdk/objc/native/src/objc_video_track_source.mm
+++ b/sdk/objc/native/src/objc_video_track_source.mm
@@ -25,7 +25,8 @@
 
 @synthesize objCVideoTrackSource = _objCVideoTrackSource;
 
-- (void)capturer:(RTCVideoCapturer *)capturer didCaptureVideoFrame:(RTCVideoFrame *)frame {
+- (void)capturer:(RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer
+    didCaptureVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
   _objCVideoTrackSource->OnCapturedFrame(frame);
 }
 
@@ -61,7 +62,7 @@
   video_adapter()->OnOutputFormatRequest(format);
 }
 
-void ObjCVideoTrackSource::OnCapturedFrame(RTCVideoFrame *frame) {
+void ObjCVideoTrackSource::OnCapturedFrame(RTC_OBJC_TYPE(RTCVideoFrame) * frame) {
   const int64_t timestamp_us = frame.timeStampNs / rtc::kNumNanosecsPerMicrosec;
   const int64_t translated_timestamp_us =
       timestamp_aligner_.TranslateTimestamp(timestamp_us, rtc::TimeMicros());
@@ -88,10 +89,11 @@
   if (adapted_width == frame.width && adapted_height == frame.height) {
     // No adaption - optimized path.
     buffer = new rtc::RefCountedObject<ObjCFrameBuffer>(frame.buffer);
-  } else if ([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
+  } else if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) {
     // Adapted CVPixelBuffer frame.
-    RTCCVPixelBuffer *rtcPixelBuffer = (RTCCVPixelBuffer *)frame.buffer;
-    buffer = new rtc::RefCountedObject<ObjCFrameBuffer>([[RTCCVPixelBuffer alloc]
+    RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
+        (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer;
+    buffer = new rtc::RefCountedObject<ObjCFrameBuffer>([[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc]
         initWithPixelBuffer:rtcPixelBuffer.pixelBuffer
                adaptedWidth:adapted_width
               adaptedHeight:adapted_height
diff --git a/sdk/objc/unittests/ObjCVideoTrackSource_xctest.mm b/sdk/objc/unittests/ObjCVideoTrackSource_xctest.mm
index a937957..ca3d672 100644
--- a/sdk/objc/unittests/ObjCVideoTrackSource_xctest.mm
+++ b/sdk/objc/unittests/ObjCVideoTrackSource_xctest.mm
@@ -25,7 +25,7 @@
 #include "rtc_base/ref_counted_object.h"
 #include "sdk/objc/native/api/video_frame.h"
 
-typedef void (^VideoSinkCallback)(RTCVideoFrame *);
+typedef void (^VideoSinkCallback)(RTC_OBJC_TYPE(RTCVideoFrame) *);
 
 namespace {
 
@@ -63,10 +63,13 @@
   CVPixelBufferCreate(
       NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
 
-  RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
+  RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+      [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
 
-  RTCVideoFrame *frame =
-      [[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0];
+  RTC_OBJC_TYPE(RTCVideoFrame) *frame =
+      [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
+                                                  rotation:RTCVideoRotation_0
+                                               timeStampNs:0];
 
   cricket::FakeVideoRenderer *video_renderer = new cricket::FakeVideoRenderer();
   const rtc::VideoSinkWants video_sink_wants;
@@ -92,10 +95,13 @@
   CVPixelBufferCreate(
       NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
 
-  RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
+  RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+      [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
 
-  RTCVideoFrame *frame =
-      [[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0];
+  RTC_OBJC_TYPE(RTCVideoFrame) *frame =
+      [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
+                                                  rotation:RTCVideoRotation_0
+                                               timeStampNs:0];
 
   cricket::FakeVideoRenderer *video_renderer = new cricket::FakeVideoRenderer();
   const rtc::VideoSinkWants video_sink_wants;
@@ -119,11 +125,13 @@
   CVPixelBufferCreate(
       NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
 
-  RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
+  RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+      [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
 
-  RTCVideoFrame *frame = [[RTCVideoFrame alloc] initWithBuffer:buffer
-                                                      rotation:RTCVideoRotation_0
-                                                   timeStampNs:0];
+  RTC_OBJC_TYPE(RTCVideoFrame) *frame =
+      [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
+                                                  rotation:RTCVideoRotation_0
+                                               timeStampNs:0];
 
   cricket::FakeVideoRenderer *video_renderer = new cricket::FakeVideoRenderer();
   const rtc::VideoSinkWants video_sink_wants;
@@ -159,16 +167,19 @@
   CVPixelBufferCreate(
       NULL, 360, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
 
-  RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
-  RTCVideoFrame *frame =
-      [[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0];
+  RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+      [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
+  RTC_OBJC_TYPE(RTCVideoFrame) *frame =
+      [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
+                                                  rotation:RTCVideoRotation_0
+                                               timeStampNs:0];
 
   XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
-  ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) {
+  ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
     XCTAssertEqual(frame.width, outputFrame.width);
     XCTAssertEqual(frame.height, outputFrame.height);
 
-    RTCCVPixelBuffer *outputBuffer = outputFrame.buffer;
+    RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer;
     XCTAssertEqual(buffer.cropX, outputBuffer.cropX);
     XCTAssertEqual(buffer.cropY, outputBuffer.cropY);
     XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
@@ -192,16 +203,19 @@
   CVPixelBufferCreate(
       NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
 
-  RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
-  RTCVideoFrame *frame =
-      [[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0];
+  RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+      [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
+  RTC_OBJC_TYPE(RTCVideoFrame) *frame =
+      [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
+                                                  rotation:RTCVideoRotation_0
+                                               timeStampNs:0];
 
   XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
-  ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) {
+  ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
     XCTAssertEqual(outputFrame.width, 360);
     XCTAssertEqual(outputFrame.height, 640);
 
-    RTCCVPixelBuffer *outputBuffer = outputFrame.buffer;
+    RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer;
     XCTAssertEqual(outputBuffer.cropX, 0);
     XCTAssertEqual(outputBuffer.cropY, 0);
     XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
@@ -225,16 +239,19 @@
   CVPixelBufferCreate(
       NULL, 380, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
 
-  RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
-  RTCVideoFrame *frame =
-      [[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0];
+  RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+      [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
+  RTC_OBJC_TYPE(RTCVideoFrame) *frame =
+      [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
+                                                  rotation:RTCVideoRotation_0
+                                               timeStampNs:0];
 
   XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
-  ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) {
+  ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
     XCTAssertEqual(outputFrame.width, 360);
     XCTAssertEqual(outputFrame.height, 640);
 
-    RTCCVPixelBuffer *outputBuffer = outputFrame.buffer;
+    RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer;
     XCTAssertEqual(outputBuffer.cropX, 10);
     XCTAssertEqual(outputBuffer.cropY, 0);
     XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
@@ -259,22 +276,25 @@
       NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
 
   // Create a frame that's already adapted down.
-  RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef
-                                                              adaptedWidth:640
-                                                             adaptedHeight:360
-                                                                 cropWidth:720
-                                                                cropHeight:1280
-                                                                     cropX:0
-                                                                     cropY:0];
-  RTCVideoFrame *frame =
-      [[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0];
+  RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+      [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef
+                                                      adaptedWidth:640
+                                                     adaptedHeight:360
+                                                         cropWidth:720
+                                                        cropHeight:1280
+                                                             cropX:0
+                                                             cropY:0];
+  RTC_OBJC_TYPE(RTCVideoFrame) *frame =
+      [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
+                                                  rotation:RTCVideoRotation_0
+                                               timeStampNs:0];
 
   XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
-  ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) {
+  ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
     XCTAssertEqual(outputFrame.width, 480);
     XCTAssertEqual(outputFrame.height, 270);
 
-    RTCCVPixelBuffer *outputBuffer = outputFrame.buffer;
+    RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer;
     XCTAssertEqual(outputBuffer.cropX, 0);
     XCTAssertEqual(outputBuffer.cropY, 0);
     XCTAssertEqual(outputBuffer.cropWidth, 640);
@@ -300,22 +320,25 @@
   CVPixelBufferCreate(
       NULL, 380, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
 
-  RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef
-                                                              adaptedWidth:370
-                                                             adaptedHeight:640
-                                                                 cropWidth:370
-                                                                cropHeight:640
-                                                                     cropX:10
-                                                                     cropY:0];
-  RTCVideoFrame *frame =
-      [[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0];
+  RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+      [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef
+                                                      adaptedWidth:370
+                                                     adaptedHeight:640
+                                                         cropWidth:370
+                                                        cropHeight:640
+                                                             cropX:10
+                                                             cropY:0];
+  RTC_OBJC_TYPE(RTCVideoFrame) *frame =
+      [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
+                                                  rotation:RTCVideoRotation_0
+                                               timeStampNs:0];
 
   XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
-  ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) {
+  ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
     XCTAssertEqual(outputFrame.width, 360);
     XCTAssertEqual(outputFrame.height, 640);
 
-    RTCCVPixelBuffer *outputBuffer = outputFrame.buffer;
+    RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer;
     XCTAssertEqual(outputBuffer.cropX, 14);
     XCTAssertEqual(outputBuffer.cropY, 0);
     XCTAssertEqual(outputBuffer.cropWidth, 360);
@@ -341,22 +364,25 @@
   CVPixelBufferCreate(
       NULL, 380, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
 
-  RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef
-                                                              adaptedWidth:300
-                                                             adaptedHeight:640
-                                                                 cropWidth:300
-                                                                cropHeight:640
-                                                                     cropX:40
-                                                                     cropY:0];
-  RTCVideoFrame *frame =
-      [[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0];
+  RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+      [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef
+                                                      adaptedWidth:300
+                                                     adaptedHeight:640
+                                                         cropWidth:300
+                                                        cropHeight:640
+                                                             cropX:40
+                                                             cropY:0];
+  RTC_OBJC_TYPE(RTCVideoFrame) *frame =
+      [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
+                                                  rotation:RTCVideoRotation_0
+                                               timeStampNs:0];
 
   XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
-  ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) {
+  ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
     XCTAssertEqual(outputFrame.width, 300);
     XCTAssertEqual(outputFrame.height, 534);
 
-    RTCCVPixelBuffer *outputBuffer = outputFrame.buffer;
+    RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer;
     XCTAssertEqual(outputBuffer.cropX, 40);
     XCTAssertEqual(outputBuffer.cropY, 52);
     XCTAssertEqual(outputBuffer.cropWidth, 300);
@@ -379,16 +405,19 @@
 
 - (void)testOnCapturedFrameI420BufferNeedsAdaptation {
   rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer = CreateI420Gradient(720, 1280);
-  RTCI420Buffer *buffer = [[RTCI420Buffer alloc] initWithFrameBuffer:i420Buffer];
-  RTCVideoFrame *frame =
-      [[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0];
+  RTC_OBJC_TYPE(RTCI420Buffer) *buffer =
+      [[RTC_OBJC_TYPE(RTCI420Buffer) alloc] initWithFrameBuffer:i420Buffer];
+  RTC_OBJC_TYPE(RTCVideoFrame) *frame =
+      [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
+                                                  rotation:RTCVideoRotation_0
+                                               timeStampNs:0];
 
   XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
-  ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) {
+  ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
     XCTAssertEqual(outputFrame.width, 360);
     XCTAssertEqual(outputFrame.height, 640);
 
-    RTCI420Buffer *outputBuffer = (RTCI420Buffer *)outputFrame.buffer;
+    RTC_OBJC_TYPE(RTCI420Buffer) *outputBuffer = (RTC_OBJC_TYPE(RTCI420Buffer) *)outputFrame.buffer;
 
     double psnr = I420PSNR(*[buffer nativeI420Buffer], *[outputBuffer nativeI420Buffer]);
     XCTAssertEqual(psnr, webrtc::kPerfectPSNR);
@@ -408,16 +437,19 @@
 
 - (void)testOnCapturedFrameI420BufferNeedsCropping {
   rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer = CreateI420Gradient(380, 640);
-  RTCI420Buffer *buffer = [[RTCI420Buffer alloc] initWithFrameBuffer:i420Buffer];
-  RTCVideoFrame *frame =
-      [[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0];
+  RTC_OBJC_TYPE(RTCI420Buffer) *buffer =
+      [[RTC_OBJC_TYPE(RTCI420Buffer) alloc] initWithFrameBuffer:i420Buffer];
+  RTC_OBJC_TYPE(RTCVideoFrame) *frame =
+      [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
+                                                  rotation:RTCVideoRotation_0
+                                               timeStampNs:0];
 
   XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
-  ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) {
+  ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
     XCTAssertEqual(outputFrame.width, 360);
     XCTAssertEqual(outputFrame.height, 640);
 
-    RTCI420Buffer *outputBuffer = (RTCI420Buffer *)outputFrame.buffer;
+    RTC_OBJC_TYPE(RTCI420Buffer) *outputBuffer = (RTC_OBJC_TYPE(RTCI420Buffer) *)outputFrame.buffer;
 
     double psnr = I420PSNR(*[buffer nativeI420Buffer], *[outputBuffer nativeI420Buffer]);
     XCTAssertGreaterThanOrEqual(psnr, 40);
diff --git a/sdk/objc/unittests/RTCAudioDevice_xctest.mm b/sdk/objc/unittests/RTCAudioDevice_xctest.mm
index a3db613..c936399 100644
--- a/sdk/objc/unittests/RTCAudioDevice_xctest.mm
+++ b/sdk/objc/unittests/RTCAudioDevice_xctest.mm
@@ -21,7 +21,7 @@
   std::unique_ptr<webrtc::ios_adm::AudioDeviceIOS> _audio_device;
 }
 
-@property(nonatomic) RTCAudioSession *audioSession;
+@property(nonatomic) RTC_OBJC_TYPE(RTCAudioSession) * audioSession;
 
 @end
 
@@ -34,7 +34,7 @@
 
   _audioDeviceModule = webrtc::CreateAudioDeviceModule();
   _audio_device.reset(new webrtc::ios_adm::AudioDeviceIOS());
-  self.audioSession = [RTCAudioSession sharedInstance];
+  self.audioSession = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
 
   NSError *error = nil;
   [self.audioSession lockForConfiguration];
@@ -61,21 +61,21 @@
 
 // Verifies that the AudioDeviceIOS is_interrupted_ flag is reset correctly
 // after an iOS AVAudioSessionInterruptionTypeEnded notification event.
-// AudioDeviceIOS listens to RTCAudioSession interrupted notifications by:
+// AudioDeviceIOS listens to RTC_OBJC_TYPE(RTCAudioSession) interrupted notifications by:
 // - In AudioDeviceIOS.InitPlayOrRecord registers its audio_session_observer_
-//   callback with RTCAudioSession's delegate list.
-// - When RTCAudioSession receives an iOS audio interrupted notification, it
+//   callback with RTC_OBJC_TYPE(RTCAudioSession)'s delegate list.
+// - When RTC_OBJC_TYPE(RTCAudioSession) receives an iOS audio interrupted notification, it
 //   passes the notification to callbacks in its delegate list which sets
 //   AudioDeviceIOS's is_interrupted_ flag to true.
 // - When AudioDeviceIOS.ShutdownPlayOrRecord is called, its
 //   audio_session_observer_ callback is removed from RTCAudioSessions's
 //   delegate list.
-//   So if RTCAudioSession receives an iOS end audio interruption notification,
-//   AudioDeviceIOS is not notified as its callback is not in RTCAudioSession's
+//   So if RTC_OBJC_TYPE(RTCAudioSession) receives an iOS end audio interruption notification,
+//   AudioDeviceIOS is not notified as its callback is not in RTC_OBJC_TYPE(RTCAudioSession)'s
 //   delegate list. This causes AudioDeviceIOS's is_interrupted_ flag to be in
 //   the wrong (true) state and the audio session will ignore audio changes.
-// As RTCAudioSession keeps its own interrupted state, the fix is to initialize
-// AudioDeviceIOS's is_interrupted_ flag to RTCAudioSession's isInterrupted
+// As RTC_OBJC_TYPE(RTCAudioSession) keeps its own interrupted state, the fix is to initialize
+// AudioDeviceIOS's is_interrupted_ flag to RTC_OBJC_TYPE(RTCAudioSession)'s isInterrupted
 // flag in AudioDeviceIOS.InitPlayOrRecord.
 - (void)testInterruptedAudioSession {
   XCTAssertTrue(self.audioSession.isActive);
diff --git a/sdk/objc/unittests/RTCAudioSessionTest.mm b/sdk/objc/unittests/RTCAudioSessionTest.mm
index c2140c3..4e309ca 100644
--- a/sdk/objc/unittests/RTCAudioSessionTest.mm
+++ b/sdk/objc/unittests/RTCAudioSessionTest.mm
@@ -20,9 +20,11 @@
 #import "components/audio/RTCAudioSession.h"
 #import "components/audio/RTCAudioSessionConfiguration.h"
 
-@interface RTCAudioSession (UnitTesting)
+@interface RTC_OBJC_TYPE (RTCAudioSession)
+(UnitTesting)
 
-@property(nonatomic, readonly) std::vector<__weak id<RTCAudioSessionDelegate> > delegates;
+    @property(nonatomic,
+              readonly) std::vector<__weak id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)> > delegates;
 
 - (instancetype)initWithAudioSession:(id)audioSession;
 
@@ -38,7 +40,7 @@
 @synthesize outputVolume = _outputVolume;
 @end
 
-@interface RTCAudioSessionTestDelegate : NSObject <RTCAudioSessionDelegate>
+@interface RTCAudioSessionTestDelegate : NSObject <RTC_OBJC_TYPE (RTCAudioSessionDelegate)>
 
 @property (nonatomic, readonly) float outputVolume;
 
@@ -55,31 +57,31 @@
   return self;
 }
 
-- (void)audioSessionDidBeginInterruption:(RTCAudioSession *)session {
+- (void)audioSessionDidBeginInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
 }
 
-- (void)audioSessionDidEndInterruption:(RTCAudioSession *)session
+- (void)audioSessionDidEndInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session
                    shouldResumeSession:(BOOL)shouldResumeSession {
 }
 
-- (void)audioSessionDidChangeRoute:(RTCAudioSession *)session
-           reason:(AVAudioSessionRouteChangeReason)reason
-    previousRoute:(AVAudioSessionRouteDescription *)previousRoute {
+- (void)audioSessionDidChangeRoute:(RTC_OBJC_TYPE(RTCAudioSession) *)session
+                            reason:(AVAudioSessionRouteChangeReason)reason
+                     previousRoute:(AVAudioSessionRouteDescription *)previousRoute {
 }
 
-- (void)audioSessionMediaServerTerminated:(RTCAudioSession *)session {
+- (void)audioSessionMediaServerTerminated:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
 }
 
-- (void)audioSessionMediaServerReset:(RTCAudioSession *)session {
+- (void)audioSessionMediaServerReset:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
 }
 
-- (void)audioSessionShouldConfigure:(RTCAudioSession *)session {
+- (void)audioSessionShouldConfigure:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
 }
 
-- (void)audioSessionShouldUnconfigure:(RTCAudioSession *)session {
+- (void)audioSessionShouldUnconfigure:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
 }
 
-- (void)audioSession:(RTCAudioSession *)audioSession
+- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession
     didChangeOutputVolume:(float)outputVolume {
   _outputVolume = outputVolume;
 }
@@ -95,14 +97,14 @@
 
 - (instancetype)init {
   if (self = [super init]) {
-    RTCAudioSession *session = [RTCAudioSession sharedInstance];
+    RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
     [session addDelegate:self];
   }
   return self;
 }
 
 - (void)dealloc {
-  RTCAudioSession *session = [RTCAudioSession sharedInstance];
+  RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
   [session removeDelegate:self];
 }
 
@@ -118,7 +120,7 @@
 @implementation RTCAudioSessionTest
 
 - (void)testLockForConfiguration {
-  RTCAudioSession *session = [RTCAudioSession sharedInstance];
+  RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
 
   for (size_t i = 0; i < 2; i++) {
     [session lockForConfiguration];
@@ -132,7 +134,7 @@
 }
 
 - (void)testAddAndRemoveDelegates {
-  RTCAudioSession *session = [RTCAudioSession sharedInstance];
+  RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
   NSMutableArray *delegates = [NSMutableArray array];
   const size_t count = 5;
   for (size_t i = 0; i < count; ++i) {
@@ -151,7 +153,7 @@
 }
 
 - (void)testPushDelegate {
-  RTCAudioSession *session = [RTCAudioSession sharedInstance];
+  RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
   NSMutableArray *delegates = [NSMutableArray array];
   const size_t count = 2;
   for (size_t i = 0; i < count; ++i) {
@@ -184,7 +186,7 @@
 // Tests that delegates added to the audio session properly zero out. This is
 // checking an implementation detail (that vectors of __weak work as expected).
 - (void)testZeroingWeakDelegate {
-  RTCAudioSession *session = [RTCAudioSession sharedInstance];
+  RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
   @autoreleasepool {
     // Add a delegate to the session. There should be one delegate at this
     // point.
@@ -212,12 +214,12 @@
         [[RTCTestRemoveOnDeallocDelegate alloc] init];
     EXPECT_TRUE(delegate);
   }
-  RTCAudioSession *session = [RTCAudioSession sharedInstance];
+  RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
   EXPECT_EQ(0u, session.delegates.size());
 }
 
 - (void)testAudioSessionActivation {
-  RTCAudioSession *audioSession = [RTCAudioSession sharedInstance];
+  RTC_OBJC_TYPE(RTCAudioSession) *audioSession = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
   EXPECT_EQ(0, audioSession.activationCount);
   [audioSession audioSessionDidActivate:[AVAudioSession sharedInstance]];
   EXPECT_EQ(1, audioSession.activationCount);
@@ -255,10 +257,10 @@
       setActive:YES withOptions:0 error:((NSError __autoreleasing **)[OCMArg anyPointer])]).
       andDo(setActiveBlock);
 
-  id mockAudioSession = OCMPartialMock([RTCAudioSession sharedInstance]);
+  id mockAudioSession = OCMPartialMock([RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]);
   OCMStub([mockAudioSession session]).andReturn(mockAVAudioSession);
 
-  RTCAudioSession *audioSession = mockAudioSession;
+  RTC_OBJC_TYPE(RTCAudioSession) *audioSession = mockAudioSession;
   EXPECT_EQ(0, audioSession.activationCount);
   [audioSession lockForConfiguration];
   EXPECT_TRUE([audioSession checkLock:nil]);
@@ -286,7 +288,8 @@
 
 - (void)testAudioVolumeDidNotify {
   MockAVAudioSession *mockAVAudioSession = [[MockAVAudioSession alloc] init];
-  RTCAudioSession *session = [[RTCAudioSession alloc] initWithAudioSession:mockAVAudioSession];
+  RTC_OBJC_TYPE(RTCAudioSession) *session =
+      [[RTC_OBJC_TYPE(RTCAudioSession) alloc] initWithAudioSession:mockAVAudioSession];
   RTCAudioSessionTestDelegate *delegate =
       [[RTCAudioSessionTestDelegate alloc] init];
   [session addDelegate:delegate];
@@ -304,8 +307,8 @@
 class AudioSessionTest : public ::testing::Test {
  protected:
   void TearDown() override {
-    RTCAudioSession *session = [RTCAudioSession sharedInstance];
-    for (id<RTCAudioSessionDelegate> delegate : session.delegates) {
+    RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
+    for (id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)> delegate : session.delegates) {
       [session removeDelegate:delegate];
     }
   }
diff --git a/sdk/objc/unittests/RTCCVPixelBuffer_xctest.mm b/sdk/objc/unittests/RTCCVPixelBuffer_xctest.mm
index ee97064..3a1ab24 100644
--- a/sdk/objc/unittests/RTCCVPixelBuffer_xctest.mm
+++ b/sdk/objc/unittests/RTCCVPixelBuffer_xctest.mm
@@ -31,7 +31,8 @@
   CVPixelBufferRef pixelBufferRef = NULL;
   CVPixelBufferCreate(
       NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
-  RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
+  RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+      [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
 
   XCTAssertFalse([buffer requiresCropping]);
 
@@ -42,13 +43,14 @@
   CVPixelBufferRef pixelBufferRef = NULL;
   CVPixelBufferCreate(
       NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
-  RTCCVPixelBuffer *croppedBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef
-                                                                     adaptedWidth:720
-                                                                    adaptedHeight:1280
-                                                                        cropWidth:360
-                                                                       cropHeight:640
-                                                                            cropX:100
-                                                                            cropY:100];
+  RTC_OBJC_TYPE(RTCCVPixelBuffer) *croppedBuffer =
+      [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef
+                                                      adaptedWidth:720
+                                                     adaptedHeight:1280
+                                                         cropWidth:360
+                                                        cropHeight:640
+                                                             cropX:100
+                                                             cropY:100];
 
   XCTAssertTrue([croppedBuffer requiresCropping]);
 
@@ -60,7 +62,8 @@
   CVPixelBufferCreate(
       NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
 
-  RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
+  RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+      [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
   XCTAssertFalse([buffer requiresScalingToWidth:720 height:1280]);
 
   CVBufferRelease(pixelBufferRef);
@@ -71,7 +74,8 @@
   CVPixelBufferCreate(
       NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
 
-  RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
+  RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+      [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
   XCTAssertTrue([buffer requiresScalingToWidth:360 height:640]);
 
   CVBufferRelease(pixelBufferRef);
@@ -82,13 +86,14 @@
   CVPixelBufferCreate(
       NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
 
-  RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef
-                                                              adaptedWidth:720
-                                                             adaptedHeight:1280
-                                                                 cropWidth:360
-                                                                cropHeight:640
-                                                                     cropX:100
-                                                                     cropY:100];
+  RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+      [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef
+                                                      adaptedWidth:720
+                                                     adaptedHeight:1280
+                                                         cropWidth:360
+                                                        cropHeight:640
+                                                             cropX:100
+                                                             cropY:100];
   XCTAssertFalse([buffer requiresScalingToWidth:360 height:640]);
 
   CVBufferRelease(pixelBufferRef);
@@ -99,7 +104,8 @@
   CVPixelBufferCreate(
       NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
 
-  RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
+  RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+      [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
   XCTAssertEqual([buffer bufferSizeForCroppingAndScalingToWidth:360 height:640], 576000);
 
   CVBufferRelease(pixelBufferRef);
@@ -109,7 +115,8 @@
   CVPixelBufferRef pixelBufferRef = NULL;
   CVPixelBufferCreate(NULL, 720, 1280, kCVPixelFormatType_32BGRA, NULL, &pixelBufferRef);
 
-  RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
+  RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+      [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
   XCTAssertEqual([buffer bufferSizeForCroppingAndScalingToWidth:360 height:640], 0);
 
   CVBufferRelease(pixelBufferRef);
@@ -198,7 +205,8 @@
   rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer = CreateI420Gradient(720, 1280);
   CopyI420BufferToCVPixelBuffer(i420Buffer, pixelBufferRef);
 
-  RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
+  RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+      [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
   XCTAssertEqual(buffer.width, 720);
   XCTAssertEqual(buffer.height, 1280);
 
@@ -218,14 +226,14 @@
 
   [buffer cropAndScaleTo:outputPixelBufferRef withTempBuffer:frameScaleBuffer.data()];
 
-  RTCCVPixelBuffer *scaledBuffer =
-      [[RTCCVPixelBuffer alloc] initWithPixelBuffer:outputPixelBufferRef];
+  RTC_OBJC_TYPE(RTCCVPixelBuffer) *scaledBuffer =
+      [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:outputPixelBufferRef];
   XCTAssertEqual(scaledBuffer.width, outputSize.width);
   XCTAssertEqual(scaledBuffer.height, outputSize.height);
 
   if (outputSize.width > 0 && outputSize.height > 0) {
-    RTCI420Buffer *originalBufferI420 = [buffer toI420];
-    RTCI420Buffer *scaledBufferI420 = [scaledBuffer toI420];
+    RTC_OBJC_TYPE(RTCI420Buffer) *originalBufferI420 = [buffer toI420];
+    RTC_OBJC_TYPE(RTCI420Buffer) *scaledBufferI420 = [scaledBuffer toI420];
     double psnr =
         I420PSNR(*[originalBufferI420 nativeI420Buffer], *[scaledBufferI420 nativeI420Buffer]);
     XCTAssertEqual(psnr, webrtc::kPerfectPSNR);
@@ -244,14 +252,14 @@
 
   DrawGradientInRGBPixelBuffer(pixelBufferRef);
 
-  RTCCVPixelBuffer *buffer =
-      [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef
-                                       adaptedWidth:CVPixelBufferGetWidth(pixelBufferRef)
-                                      adaptedHeight:CVPixelBufferGetHeight(pixelBufferRef)
-                                          cropWidth:CVPixelBufferGetWidth(pixelBufferRef) - cropX
-                                         cropHeight:CVPixelBufferGetHeight(pixelBufferRef) - cropY
-                                              cropX:cropX
-                                              cropY:cropY];
+  RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc]
+      initWithPixelBuffer:pixelBufferRef
+             adaptedWidth:CVPixelBufferGetWidth(pixelBufferRef)
+            adaptedHeight:CVPixelBufferGetHeight(pixelBufferRef)
+                cropWidth:CVPixelBufferGetWidth(pixelBufferRef) - cropX
+               cropHeight:CVPixelBufferGetHeight(pixelBufferRef) - cropY
+                    cropX:cropX
+                    cropY:cropY];
 
   XCTAssertEqual(buffer.width, 720);
   XCTAssertEqual(buffer.height, 1280);
@@ -260,13 +268,13 @@
   CVPixelBufferCreate(NULL, 360, 640, pixelFormat, NULL, &outputPixelBufferRef);
   [buffer cropAndScaleTo:outputPixelBufferRef withTempBuffer:NULL];
 
-  RTCCVPixelBuffer *scaledBuffer =
-      [[RTCCVPixelBuffer alloc] initWithPixelBuffer:outputPixelBufferRef];
+  RTC_OBJC_TYPE(RTCCVPixelBuffer) *scaledBuffer =
+      [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:outputPixelBufferRef];
   XCTAssertEqual(scaledBuffer.width, 360);
   XCTAssertEqual(scaledBuffer.height, 640);
 
-  RTCI420Buffer *originalBufferI420 = [buffer toI420];
-  RTCI420Buffer *scaledBufferI420 = [scaledBuffer toI420];
+  RTC_OBJC_TYPE(RTCI420Buffer) *originalBufferI420 = [buffer toI420];
+  RTC_OBJC_TYPE(RTCI420Buffer) *scaledBufferI420 = [scaledBuffer toI420];
   double psnr =
       I420PSNR(*[originalBufferI420 nativeI420Buffer], *[scaledBufferI420 nativeI420Buffer]);
   XCTAssertEqual(psnr, webrtc::kPerfectPSNR);
@@ -282,8 +290,9 @@
 
   CopyI420BufferToCVPixelBuffer(i420Buffer, pixelBufferRef);
 
-  RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef];
-  RTCI420Buffer *fromCVPixelBuffer = [buffer toI420];
+  RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
+      [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
+  RTC_OBJC_TYPE(RTCI420Buffer) *fromCVPixelBuffer = [buffer toI420];
 
   double psnr = I420PSNR(*i420Buffer, *[fromCVPixelBuffer nativeI420Buffer]);
   double target = webrtc::kPerfectPSNR;
diff --git a/sdk/objc/unittests/RTCCallbackLogger_xctest.m b/sdk/objc/unittests/RTCCallbackLogger_xctest.m
index ceaa762..1b6fb1c 100644
--- a/sdk/objc/unittests/RTCCallbackLogger_xctest.m
+++ b/sdk/objc/unittests/RTCCallbackLogger_xctest.m
@@ -14,7 +14,7 @@
 
 @interface RTCCallbackLoggerTests : XCTestCase
 
-@property(nonatomic, strong) RTCCallbackLogger *logger;
+@property(nonatomic, strong) RTC_OBJC_TYPE(RTCCallbackLogger) * logger;
 
 @end
 
@@ -23,7 +23,7 @@
 @synthesize logger;
 
 - (void)setUp {
-  self.logger = [[RTCCallbackLogger alloc] init];
+  self.logger = [[RTC_OBJC_TYPE(RTCCallbackLogger) alloc] init];
 }
 
 - (void)tearDown {
diff --git a/sdk/objc/unittests/RTCCameraVideoCapturerTests.mm b/sdk/objc/unittests/RTCCameraVideoCapturerTests.mm
index bd74fc7..34551e5 100644
--- a/sdk/objc/unittests/RTCCameraVideoCapturerTests.mm
+++ b/sdk/objc/unittests/RTCCameraVideoCapturerTests.mm
@@ -59,9 +59,11 @@
 
 }
 #endif
-@interface RTCCameraVideoCapturer (Tests)<AVCaptureVideoDataOutputSampleBufferDelegate>
-- (instancetype)initWithDelegate:(__weak id<RTCVideoCapturerDelegate>)delegate
-                  captureSession:(AVCaptureSession *)captureSession;
+@interface RTC_OBJC_TYPE (RTCCameraVideoCapturer)
+(Tests)<AVCaptureVideoDataOutputSampleBufferDelegate> -
+    (instancetype)initWithDelegate
+    : (__weak id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate captureSession
+    : (AVCaptureSession *)captureSession;
 @end
 
 @interface RTCCameraVideoCapturerTests : NSObject
@@ -69,7 +71,7 @@
 @property(nonatomic, strong) id deviceMock;
 @property(nonatomic, strong) id captureConnectionMock;
 @property(nonatomic, strong) id captureSessionMock;
-@property(nonatomic, strong) RTCCameraVideoCapturer *capturer;
+@property(nonatomic, strong) RTC_OBJC_TYPE(RTCCameraVideoCapturer) * capturer;
 @end
 
 @implementation RTCCameraVideoCapturerTests
@@ -80,9 +82,10 @@
 @synthesize capturer = _capturer;
 
 - (void)setup {
-  self.delegateMock = OCMProtocolMock(@protocol(RTCVideoCapturerDelegate));
+  self.delegateMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoCapturerDelegate)));
   self.captureConnectionMock = OCMClassMock([AVCaptureConnection class]);
-  self.capturer = [[RTCCameraVideoCapturer alloc] initWithDelegate:self.delegateMock];
+  self.capturer =
+      [[RTC_OBJC_TYPE(RTCCameraVideoCapturer) alloc] initWithDelegate:self.delegateMock];
   self.deviceMock = [self createDeviceMock];
 }
 
@@ -94,10 +97,11 @@
   OCMStub([self.captureSessionMock addOutput:[OCMArg any]]);
   OCMStub([self.captureSessionMock beginConfiguration]);
   OCMStub([self.captureSessionMock commitConfiguration]);
-  self.delegateMock = OCMProtocolMock(@protocol(RTCVideoCapturerDelegate));
+  self.delegateMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoCapturerDelegate)));
   self.captureConnectionMock = OCMClassMock([AVCaptureConnection class]);
-  self.capturer = [[RTCCameraVideoCapturer alloc] initWithDelegate:self.delegateMock
-                                                    captureSession:self.captureSessionMock];
+  self.capturer =
+      [[RTC_OBJC_TYPE(RTCCameraVideoCapturer) alloc] initWithDelegate:self.delegateMock
+                                                       captureSession:self.captureSessionMock];
   self.deviceMock = [self createDeviceMock];
 }
 
@@ -160,7 +164,8 @@
   OCMStub([self.deviceMock formats]).andReturn(formats);
 
   // when
-  NSArray *supportedFormats = [RTCCameraVideoCapturer supportedFormatsForDevice:self.deviceMock];
+  NSArray *supportedFormats =
+      [RTC_OBJC_TYPE(RTCCameraVideoCapturer) supportedFormatsForDevice:self.deviceMock];
 
   // then
   EXPECT_EQ(supportedFormats.count, 3u);
@@ -199,7 +204,8 @@
 
   // then
   [[self.delegateMock expect] capturer:self.capturer
-                  didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTCVideoFrame *expectedFrame) {
+                  didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTC_OBJC_TYPE(RTCVideoFrame) *
+                                                                    expectedFrame) {
                     EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_270);
                     return YES;
                   }]];
@@ -240,22 +246,23 @@
   CMSampleBufferRef sampleBuffer = createTestSampleBufferRef();
 
   [[self.delegateMock expect] capturer:self.capturer
-                  didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTCVideoFrame *expectedFrame) {
-    if (camera == AVCaptureDevicePositionFront) {
-      if (deviceOrientation == UIDeviceOrientationLandscapeLeft) {
-        EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_180);
-      } else if (deviceOrientation == UIDeviceOrientationLandscapeRight) {
-        EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_0);
-      }
-    } else if (camera == AVCaptureDevicePositionBack) {
-      if (deviceOrientation == UIDeviceOrientationLandscapeLeft) {
-        EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_0);
-      } else if (deviceOrientation == UIDeviceOrientationLandscapeRight) {
-        EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_180);
-      }
-    }
-    return YES;
-  }]];
+                  didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTC_OBJC_TYPE(RTCVideoFrame) *
+                                                                    expectedFrame) {
+                    if (camera == AVCaptureDevicePositionFront) {
+                      if (deviceOrientation == UIDeviceOrientationLandscapeLeft) {
+                        EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_180);
+                      } else if (deviceOrientation == UIDeviceOrientationLandscapeRight) {
+                        EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_0);
+                      }
+                    } else if (camera == AVCaptureDevicePositionBack) {
+                      if (deviceOrientation == UIDeviceOrientationLandscapeLeft) {
+                        EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_0);
+                      } else if (deviceOrientation == UIDeviceOrientationLandscapeRight) {
+                        EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_180);
+                      }
+                    }
+                    return YES;
+                  }]];
 
   NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
   [center postNotificationName:UIDeviceOrientationDidChangeNotification object:nil];
@@ -298,12 +305,13 @@
   CMSampleBufferRef sampleBuffer = createTestSampleBufferRef();
 
   [[self.delegateMock expect] capturer:self.capturer
-                  didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTCVideoFrame *expectedFrame) {
-    // Front camera and landscape left should return 180. But the frame says its from the back
-    // camera, so rotation should be 0.
-    EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_0);
-    return YES;
-  }]];
+                  didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTC_OBJC_TYPE(RTCVideoFrame) *
+                                                                    expectedFrame) {
+                    // Front camera and landscape left should return 180. But the frame says its
+                    // from the back camera, so rotation should be 0.
+                    EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_0);
+                    return YES;
+                  }]];
 
   NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
   [center postNotificationName:UIDeviceOrientationDidChangeNotification object:nil];
diff --git a/sdk/objc/unittests/RTCCertificateTest.mm b/sdk/objc/unittests/RTCCertificateTest.mm
index 5bf1eb3..38c935c 100644
--- a/sdk/objc/unittests/RTCCertificateTest.mm
+++ b/sdk/objc/unittests/RTCCertificateTest.mm
@@ -29,38 +29,39 @@
 @implementation RTCCertificateTest
 
 - (void)testCertificateIsUsedInConfig {
-  RTCConfiguration *originalConfig = [[RTCConfiguration alloc] init];
+  RTC_OBJC_TYPE(RTCConfiguration) *originalConfig = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
 
   NSArray *urlStrings = @[ @"stun:stun1.example.net" ];
-  RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:urlStrings];
+  RTC_OBJC_TYPE(RTCIceServer) *server =
+      [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:urlStrings];
   originalConfig.iceServers = @[ server ];
 
   // Generate a new certificate.
-  RTCCertificate *originalCertificate = [RTCCertificate generateCertificateWithParams:@{
-    @"expires" : @100000,
-    @"name" : @"RSASSA-PKCS1-v1_5"
-  }];
+  RTC_OBJC_TYPE(RTCCertificate) *originalCertificate = [RTC_OBJC_TYPE(RTCCertificate)
+      generateCertificateWithParams:@{@"expires" : @100000, @"name" : @"RSASSA-PKCS1-v1_5"}];
 
   // Store certificate in configuration.
   originalConfig.certificate = originalCertificate;
 
-  RTCMediaConstraints *contraints =
-      [[RTCMediaConstraints alloc] initWithMandatoryConstraints:@{} optionalConstraints:nil];
-  RTCPeerConnectionFactory *factory = [[RTCPeerConnectionFactory alloc] init];
+  RTC_OBJC_TYPE(RTCMediaConstraints) *contraints =
+      [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
+                                                           optionalConstraints:nil];
+  RTC_OBJC_TYPE(RTCPeerConnectionFactory) *factory =
+      [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
 
   // Create PeerConnection with this certificate.
-  RTCPeerConnection *peerConnection =
+  RTC_OBJC_TYPE(RTCPeerConnection) *peerConnection =
       [factory peerConnectionWithConfiguration:originalConfig constraints:contraints delegate:nil];
 
   // Retrieve certificate from the configuration.
-  RTCConfiguration *retrievedConfig = peerConnection.configuration;
+  RTC_OBJC_TYPE(RTCConfiguration) *retrievedConfig = peerConnection.configuration;
 
   // Extract PEM strings from original certificate.
   std::string originalPrivateKeyField = [[originalCertificate private_key] UTF8String];
   std::string originalCertificateField = [[originalCertificate certificate] UTF8String];
 
   // Extract PEM strings from certificate retrieved from configuration.
-  RTCCertificate *retrievedCertificate = retrievedConfig.certificate;
+  RTC_OBJC_TYPE(RTCCertificate) *retrievedCertificate = retrievedConfig.certificate;
   std::string retrievedPrivateKeyField = [[retrievedCertificate private_key] UTF8String];
   std::string retrievedCertificateField = [[retrievedCertificate certificate] UTF8String];
 
diff --git a/sdk/objc/unittests/RTCConfigurationTest.mm b/sdk/objc/unittests/RTCConfigurationTest.mm
index 3fb4d42..51e4a70 100644
--- a/sdk/objc/unittests/RTCConfigurationTest.mm
+++ b/sdk/objc/unittests/RTCConfigurationTest.mm
@@ -28,9 +28,10 @@
 
 - (void)testConversionToNativeConfiguration {
   NSArray *urlStrings = @[ @"stun:stun1.example.net" ];
-  RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:urlStrings];
+  RTC_OBJC_TYPE(RTCIceServer) *server =
+      [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:urlStrings];
 
-  RTCConfiguration *config = [[RTCConfiguration alloc] init];
+  RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
   config.iceServers = @[ server ];
   config.iceTransportPolicy = RTCIceTransportPolicyRelay;
   config.bundlePolicy = RTCBundlePolicyMaxBundle;
@@ -47,10 +48,11 @@
   config.continualGatheringPolicy =
       RTCContinualGatheringPolicyGatherContinually;
   config.shouldPruneTurnPorts = YES;
-  config.cryptoOptions = [[RTCCryptoOptions alloc] initWithSrtpEnableGcmCryptoSuites:YES
-                                                 srtpEnableAes128Sha1_32CryptoCipher:YES
-                                              srtpEnableEncryptedRtpHeaderExtensions:YES
-                                                        sframeRequireFrameEncryption:YES];
+  config.cryptoOptions =
+      [[RTC_OBJC_TYPE(RTCCryptoOptions) alloc] initWithSrtpEnableGcmCryptoSuites:YES
+                                             srtpEnableAes128Sha1_32CryptoCipher:YES
+                                          srtpEnableEncryptedRtpHeaderExtensions:YES
+                                                    sframeRequireFrameEncryption:YES];
   config.rtcpAudioReportIntervalMs = 2500;
   config.rtcpVideoReportIntervalMs = 3750;
 
@@ -89,9 +91,10 @@
 
 - (void)testNativeConversionToConfiguration {
   NSArray *urlStrings = @[ @"stun:stun1.example.net" ];
-  RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:urlStrings];
+  RTC_OBJC_TYPE(RTCIceServer) *server =
+      [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:urlStrings];
 
-  RTCConfiguration *config = [[RTCConfiguration alloc] init];
+  RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
   config.iceServers = @[ server ];
   config.iceTransportPolicy = RTCIceTransportPolicyRelay;
   config.bundlePolicy = RTCBundlePolicyMaxBundle;
@@ -108,20 +111,21 @@
   config.continualGatheringPolicy =
       RTCContinualGatheringPolicyGatherContinually;
   config.shouldPruneTurnPorts = YES;
-  config.cryptoOptions = [[RTCCryptoOptions alloc] initWithSrtpEnableGcmCryptoSuites:YES
-                                                 srtpEnableAes128Sha1_32CryptoCipher:NO
-                                              srtpEnableEncryptedRtpHeaderExtensions:NO
-                                                        sframeRequireFrameEncryption:NO];
+  config.cryptoOptions =
+      [[RTC_OBJC_TYPE(RTCCryptoOptions) alloc] initWithSrtpEnableGcmCryptoSuites:YES
+                                             srtpEnableAes128Sha1_32CryptoCipher:NO
+                                          srtpEnableEncryptedRtpHeaderExtensions:NO
+                                                    sframeRequireFrameEncryption:NO];
   config.rtcpAudioReportIntervalMs = 1500;
   config.rtcpVideoReportIntervalMs = 2150;
 
   webrtc::PeerConnectionInterface::RTCConfiguration *nativeConfig =
       [config createNativeConfiguration];
-  RTCConfiguration *newConfig = [[RTCConfiguration alloc]
-      initWithNativeConfiguration:*nativeConfig];
+  RTC_OBJC_TYPE(RTCConfiguration) *newConfig =
+      [[RTC_OBJC_TYPE(RTCConfiguration) alloc] initWithNativeConfiguration:*nativeConfig];
   EXPECT_EQ([config.iceServers count], newConfig.iceServers.count);
-  RTCIceServer *newServer = newConfig.iceServers[0];
-  RTCIceServer *origServer = config.iceServers[0];
+  RTC_OBJC_TYPE(RTCIceServer) *newServer = newConfig.iceServers[0];
+  RTC_OBJC_TYPE(RTCIceServer) *origServer = config.iceServers[0];
   EXPECT_EQ(origServer.urlStrings.count, server.urlStrings.count);
   std::string origUrl = origServer.urlStrings.firstObject.UTF8String;
   std::string url = newServer.urlStrings.firstObject.UTF8String;
@@ -152,7 +156,7 @@
 }
 
 - (void)testDefaultValues {
-  RTCConfiguration *config = [[RTCConfiguration alloc] init];
+  RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
   EXPECT_EQ(config.cryptoOptions, nil);
 }
 
diff --git a/sdk/objc/unittests/RTCDataChannelConfigurationTest.mm b/sdk/objc/unittests/RTCDataChannelConfigurationTest.mm
index a96ae517..b3461cc 100644
--- a/sdk/objc/unittests/RTCDataChannelConfigurationTest.mm
+++ b/sdk/objc/unittests/RTCDataChannelConfigurationTest.mm
@@ -30,8 +30,8 @@
   int channelId = 4;
   NSString *protocol = @"protocol";
 
-  RTCDataChannelConfiguration *dataChannelConfig =
-      [[RTCDataChannelConfiguration alloc] init];
+  RTC_OBJC_TYPE(RTCDataChannelConfiguration) *dataChannelConfig =
+      [[RTC_OBJC_TYPE(RTCDataChannelConfiguration) alloc] init];
   dataChannelConfig.isOrdered = isOrdered;
   dataChannelConfig.maxPacketLifeTime = maxPacketLifeTime;
   dataChannelConfig.maxRetransmits = maxRetransmits;
@@ -50,7 +50,7 @@
 
 @end
 
-TEST(RTCDataChannelConfiguration, NativeDataChannelInitConversionTest) {
+TEST(RTC_OBJC_TYPE(RTCDataChannelConfiguration), NativeDataChannelInitConversionTest) {
   @autoreleasepool {
     RTCDataChannelConfigurationTest *test =
         [[RTCDataChannelConfigurationTest alloc] init];
diff --git a/sdk/objc/unittests/RTCEncodedImage_xctest.mm b/sdk/objc/unittests/RTCEncodedImage_xctest.mm
index 577ecda..84804fe 100644
--- a/sdk/objc/unittests/RTCEncodedImage_xctest.mm
+++ b/sdk/objc/unittests/RTCEncodedImage_xctest.mm
@@ -22,15 +22,15 @@
   webrtc::EncodedImage encoded_image;
   encoded_image.SetEncodedData(encoded_data);
 
-  RTCEncodedImage *encodedImage =
-      [[RTCEncodedImage alloc] initWithNativeEncodedImage:encoded_image];
+  RTC_OBJC_TYPE(RTCEncodedImage) *encodedImage =
+      [[RTC_OBJC_TYPE(RTCEncodedImage) alloc] initWithNativeEncodedImage:encoded_image];
 
   XCTAssertEqual([encodedImage nativeEncodedImage].GetEncodedData(), encoded_data);
 }
 
 - (void)testInitWithNSData {
   NSData *bufferData = [NSData data];
-  RTCEncodedImage *encodedImage = [[RTCEncodedImage alloc] init];
+  RTC_OBJC_TYPE(RTCEncodedImage) *encodedImage = [[RTC_OBJC_TYPE(RTCEncodedImage) alloc] init];
   encodedImage.buffer = bufferData;
 
   webrtc::EncodedImage result_encoded_image = [encodedImage nativeEncodedImage];
@@ -39,12 +39,13 @@
 }
 
 - (void)testRetainsNativeEncodedImage {
-  RTCEncodedImage *encodedImage;
+  RTC_OBJC_TYPE(RTCEncodedImage) * encodedImage;
   {
     const auto encoded_data = webrtc::EncodedImageBuffer::Create();
     webrtc::EncodedImage encoded_image;
     encoded_image.SetEncodedData(encoded_data);
-    encodedImage = [[RTCEncodedImage alloc] initWithNativeEncodedImage:encoded_image];
+    encodedImage =
+        [[RTC_OBJC_TYPE(RTCEncodedImage) alloc] initWithNativeEncodedImage:encoded_image];
   }
   webrtc::EncodedImage result_encoded_image = [encodedImage nativeEncodedImage];
   XCTAssertTrue(result_encoded_image.GetEncodedData() != nullptr);
diff --git a/sdk/objc/unittests/RTCFileVideoCapturer_xctest.mm b/sdk/objc/unittests/RTCFileVideoCapturer_xctest.mm
index 01deb68..2407c88 100644
--- a/sdk/objc/unittests/RTCFileVideoCapturer_xctest.mm
+++ b/sdk/objc/unittests/RTCFileVideoCapturer_xctest.mm
@@ -17,7 +17,7 @@
 NSString *const kTestFileName = @"foreman.mp4";
 static const int kTestTimeoutMs = 5 * 1000;  // 5secs.
 
-@interface MockCapturerDelegate : NSObject <RTCVideoCapturerDelegate>
+@interface MockCapturerDelegate : NSObject <RTC_OBJC_TYPE (RTCVideoCapturerDelegate)>
 
 @property(nonatomic, assign) NSInteger capturedFramesCount;
 
@@ -26,7 +26,8 @@
 @implementation MockCapturerDelegate
 @synthesize capturedFramesCount = _capturedFramesCount;
 
-- (void)capturer:(RTCVideoCapturer *)capturer didCaptureVideoFrame:(RTCVideoFrame *)frame {
+- (void)capturer:(RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer
+    didCaptureVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
   self.capturedFramesCount++;
 }
 
@@ -35,7 +36,7 @@
 NS_CLASS_AVAILABLE_IOS(10)
 @interface RTCFileVideoCapturerTests : XCTestCase
 
-@property(nonatomic, strong) RTCFileVideoCapturer *capturer;
+@property(nonatomic, strong) RTC_OBJC_TYPE(RTCFileVideoCapturer) * capturer;
 @property(nonatomic, strong) MockCapturerDelegate *mockDelegate;
 
 @end
@@ -46,7 +47,7 @@
 
 - (void)setUp {
   self.mockDelegate = [[MockCapturerDelegate alloc] init];
-  self.capturer = [[RTCFileVideoCapturer alloc] initWithDelegate:self.mockDelegate];
+  self.capturer = [[RTC_OBJC_TYPE(RTCFileVideoCapturer) alloc] initWithDelegate:self.mockDelegate];
 }
 
 - (void)tearDown {
diff --git a/sdk/objc/unittests/RTCH264ProfileLevelId_xctest.m b/sdk/objc/unittests/RTCH264ProfileLevelId_xctest.m
index 0669586..ec9dc41 100644
--- a/sdk/objc/unittests/RTCH264ProfileLevelId_xctest.m
+++ b/sdk/objc/unittests/RTCH264ProfileLevelId_xctest.m
@@ -22,24 +22,26 @@
 @implementation RTCH264ProfileLevelIdTests
 
 - (void)testInitWithString {
-  RTCH264ProfileLevelId *profileLevelId =
-      [[RTCH264ProfileLevelId alloc] initWithHexString:level31ConstrainedHigh];
+  RTC_OBJC_TYPE(RTCH264ProfileLevelId) *profileLevelId =
+      [[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc] initWithHexString:level31ConstrainedHigh];
   XCTAssertEqual(profileLevelId.profile, RTCH264ProfileConstrainedHigh);
   XCTAssertEqual(profileLevelId.level, RTCH264Level3_1);
 
-  profileLevelId = [[RTCH264ProfileLevelId alloc] initWithHexString:level31ConstrainedBaseline];
+  profileLevelId =
+      [[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc] initWithHexString:level31ConstrainedBaseline];
   XCTAssertEqual(profileLevelId.profile, RTCH264ProfileConstrainedBaseline);
   XCTAssertEqual(profileLevelId.level, RTCH264Level3_1);
 }
 
 - (void)testInitWithProfileAndLevel {
-  RTCH264ProfileLevelId *profileLevelId =
-      [[RTCH264ProfileLevelId alloc] initWithProfile:RTCH264ProfileConstrainedHigh
-                                               level:RTCH264Level3_1];
+  RTC_OBJC_TYPE(RTCH264ProfileLevelId) *profileLevelId =
+      [[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc] initWithProfile:RTCH264ProfileConstrainedHigh
+                                                              level:RTCH264Level3_1];
   XCTAssertEqualObjects(profileLevelId.hexString, level31ConstrainedHigh);
 
-  profileLevelId = [[RTCH264ProfileLevelId alloc] initWithProfile:RTCH264ProfileConstrainedBaseline
-                                                            level:RTCH264Level3_1];
+  profileLevelId = [[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc]
+      initWithProfile:RTCH264ProfileConstrainedBaseline
+                level:RTCH264Level3_1];
   XCTAssertEqualObjects(profileLevelId.hexString, level31ConstrainedBaseline);
 }
 
diff --git a/sdk/objc/unittests/RTCIceCandidateTest.mm b/sdk/objc/unittests/RTCIceCandidateTest.mm
index 18dcdad..b0b6cb6 100644
--- a/sdk/objc/unittests/RTCIceCandidateTest.mm
+++ b/sdk/objc/unittests/RTCIceCandidateTest.mm
@@ -30,9 +30,8 @@
                    "fdff:2642:12a6:fe38:c001:beda:fcf9:51aa "
                    "59052 typ host generation 0";
 
-  RTCIceCandidate *candidate = [[RTCIceCandidate alloc] initWithSdp:sdp
-                                                      sdpMLineIndex:0
-                                                             sdpMid:@"audio"];
+  RTC_OBJC_TYPE(RTCIceCandidate) *candidate =
+      [[RTC_OBJC_TYPE(RTCIceCandidate) alloc] initWithSdp:sdp sdpMLineIndex:0 sdpMid:@"audio"];
 
   std::unique_ptr<webrtc::IceCandidateInterface> nativeCandidate =
       candidate.nativeCandidate;
@@ -51,8 +50,8 @@
   webrtc::IceCandidateInterface *nativeCandidate =
       webrtc::CreateIceCandidate("audio", 0, sdp, nullptr);
 
-  RTCIceCandidate *iceCandidate =
-      [[RTCIceCandidate alloc] initWithNativeCandidate:nativeCandidate];
+  RTC_OBJC_TYPE(RTCIceCandidate) *iceCandidate =
+      [[RTC_OBJC_TYPE(RTCIceCandidate) alloc] initWithNativeCandidate:nativeCandidate];
   EXPECT_TRUE([@"audio" isEqualToString:iceCandidate.sdpMid]);
   EXPECT_EQ(0, iceCandidate.sdpMLineIndex);
 
diff --git a/sdk/objc/unittests/RTCIceServerTest.mm b/sdk/objc/unittests/RTCIceServerTest.mm
index 8ef5195..5dbb92f 100644
--- a/sdk/objc/unittests/RTCIceServerTest.mm
+++ b/sdk/objc/unittests/RTCIceServerTest.mm
@@ -28,8 +28,8 @@
 @implementation RTCIceServerTest
 
 - (void)testOneURLServer {
-  RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:@[
-      @"stun:stun1.example.net" ]];
+  RTC_OBJC_TYPE(RTCIceServer) *server =
+      [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ @"stun:stun1.example.net" ]];
 
   webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer;
   EXPECT_EQ(1u, iceStruct.urls.size());
@@ -39,8 +39,8 @@
 }
 
 - (void)testTwoURLServer {
-  RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:@[
-      @"turn1:turn1.example.net", @"turn2:turn2.example.net" ]];
+  RTC_OBJC_TYPE(RTCIceServer) *server = [[RTC_OBJC_TYPE(RTCIceServer) alloc]
+      initWithURLStrings:@[ @"turn1:turn1.example.net", @"turn2:turn2.example.net" ]];
 
   webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer;
   EXPECT_EQ(2u, iceStruct.urls.size());
@@ -51,10 +51,10 @@
 }
 
 - (void)testPasswordCredential {
-  RTCIceServer *server = [[RTCIceServer alloc]
-      initWithURLStrings:@[ @"turn1:turn1.example.net" ]
-                username:@"username"
-              credential:@"credential"];
+  RTC_OBJC_TYPE(RTCIceServer) *server =
+      [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ]
+                                                     username:@"username"
+                                                   credential:@"credential"];
   webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer;
   EXPECT_EQ(1u, iceStruct.urls.size());
   EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front());
@@ -63,11 +63,12 @@
 }
 
 - (void)testHostname {
-  RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ]
-                                                         username:@"username"
-                                                       credential:@"credential"
-                                                    tlsCertPolicy:RTCTlsCertPolicySecure
-                                                         hostname:@"hostname"];
+  RTC_OBJC_TYPE(RTCIceServer) *server =
+      [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ]
+                                                     username:@"username"
+                                                   credential:@"credential"
+                                                tlsCertPolicy:RTCTlsCertPolicySecure
+                                                     hostname:@"hostname"];
   webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer;
   EXPECT_EQ(1u, iceStruct.urls.size());
   EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front());
@@ -77,12 +78,13 @@
 }
 
 - (void)testTlsAlpnProtocols {
-  RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ]
-                                                         username:@"username"
-                                                       credential:@"credential"
-                                                    tlsCertPolicy:RTCTlsCertPolicySecure
-                                                         hostname:@"hostname"
-                                                 tlsAlpnProtocols:@[ @"proto1", @"proto2" ]];
+  RTC_OBJC_TYPE(RTCIceServer) *server =
+      [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ]
+                                                     username:@"username"
+                                                   credential:@"credential"
+                                                tlsCertPolicy:RTCTlsCertPolicySecure
+                                                     hostname:@"hostname"
+                                             tlsAlpnProtocols:@[ @"proto1", @"proto2" ]];
   webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer;
   EXPECT_EQ(1u, iceStruct.urls.size());
   EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front());
@@ -93,13 +95,14 @@
 }
 
 - (void)testTlsEllipticCurves {
-  RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ]
-                                                         username:@"username"
-                                                       credential:@"credential"
-                                                    tlsCertPolicy:RTCTlsCertPolicySecure
-                                                         hostname:@"hostname"
-                                                 tlsAlpnProtocols:@[ @"proto1", @"proto2" ]
-                                                tlsEllipticCurves:@[ @"curve1", @"curve2" ]];
+  RTC_OBJC_TYPE(RTCIceServer) *server =
+      [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ]
+                                                     username:@"username"
+                                                   credential:@"credential"
+                                                tlsCertPolicy:RTCTlsCertPolicySecure
+                                                     hostname:@"hostname"
+                                             tlsAlpnProtocols:@[ @"proto1", @"proto2" ]
+                                            tlsEllipticCurves:@[ @"curve1", @"curve2" ]];
   webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer;
   EXPECT_EQ(1u, iceStruct.urls.size());
   EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front());
@@ -121,8 +124,8 @@
   nativeServer.tls_elliptic_curves.push_back("curve1");
   nativeServer.tls_elliptic_curves.push_back("curve2");
 
-  RTCIceServer *iceServer =
-      [[RTCIceServer alloc] initWithNativeServer:nativeServer];
+  RTC_OBJC_TYPE(RTCIceServer) *iceServer =
+      [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithNativeServer:nativeServer];
   EXPECT_EQ(1u, iceServer.urlStrings.count);
   EXPECT_EQ("stun:stun.example.net",
       [NSString stdStringForString:iceServer.urlStrings.firstObject]);
diff --git a/sdk/objc/unittests/RTCMTLVideoView_xctest.m b/sdk/objc/unittests/RTCMTLVideoView_xctest.m
index d7fa12c..eb519bb 100644
--- a/sdk/objc/unittests/RTCMTLVideoView_xctest.m
+++ b/sdk/objc/unittests/RTCMTLVideoView_xctest.m
@@ -21,10 +21,11 @@
 #import "components/renderer/metal/RTCMTLNV12Renderer.h"
 #import "components/video_frame_buffer/RTCCVPixelBuffer.h"
 
-// Extension of RTCMTLVideoView for testing purposes.
-@interface RTCMTLVideoView (Testing)
+// Extension of RTC_OBJC_TYPE(RTCMTLVideoView) for testing purposes.
+@interface RTC_OBJC_TYPE (RTCMTLVideoView)
+(Testing)
 
-@property(nonatomic, readonly) MTKView *metalView;
+    @property(nonatomic, readonly) MTKView *metalView;
 
 + (BOOL)isMetalAvailable;
 + (UIView *)createMetalView:(CGRect)frame;
@@ -48,7 +49,7 @@
 @synthesize frameMock = _frameMock;
 
 - (void)setUp {
-  self.classMock = OCMClassMock([RTCMTLVideoView class]);
+  self.classMock = OCMClassMock([RTC_OBJC_TYPE(RTCMTLVideoView) class]);
   [self startMockingNilView];
 }
 
@@ -64,15 +65,16 @@
 }
 
 - (id)frameMockWithCVPixelBuffer:(BOOL)hasCVPixelBuffer {
-  id frameMock = OCMClassMock([RTCVideoFrame class]);
+  id frameMock = OCMClassMock([RTC_OBJC_TYPE(RTCVideoFrame) class]);
   if (hasCVPixelBuffer) {
     CVPixelBufferRef pixelBufferRef;
     CVPixelBufferCreate(
         kCFAllocatorDefault, 200, 200, kCVPixelFormatType_420YpCbCr8Planar, nil, &pixelBufferRef);
     OCMStub([frameMock buffer])
-        .andReturn([[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef]);
+        .andReturn([[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]);
   } else {
-    OCMStub([frameMock buffer]).andReturn([[RTCI420Buffer alloc] initWithWidth:200 height:200]);
+    OCMStub([frameMock buffer])
+        .andReturn([[RTC_OBJC_TYPE(RTCI420Buffer) alloc] initWithWidth:200 height:200]);
   }
   OCMStub([frameMock timeStampNs]).andReturn(arc4random_uniform(INT_MAX));
   return frameMock;
@@ -98,7 +100,8 @@
   // when
   BOOL asserts = NO;
   @try {
-    RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] initWithFrame:CGRectZero];
+    RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
+        [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectZero];
     (void)realView;
   } @catch (NSException *ex) {
     asserts = YES;
@@ -111,8 +114,9 @@
   // given
   OCMStub([self.classMock isMetalAvailable]).andReturn(YES);
 
-  RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
-  self.frameMock = OCMClassMock([RTCVideoFrame class]);
+  RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
+      [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
+  self.frameMock = OCMClassMock([RTC_OBJC_TYPE(RTCVideoFrame) class]);
 
   [[self.frameMock reject] buffer];
   [[self.classMock reject] createNV12Renderer];
@@ -137,7 +141,8 @@
   OCMExpect([self.classMock createI420Renderer]).andReturn(self.rendererI420Mock);
   [[self.classMock reject] createNV12Renderer];
 
-  RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
+  RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
+      [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
 
   // when
   [realView renderFrame:self.frameMock];
@@ -158,7 +163,8 @@
   OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock);
   [[self.classMock reject] createI420Renderer];
 
-  RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
+  RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
+      [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
 
   // when
   [realView renderFrame:self.frameMock];
@@ -178,7 +184,8 @@
   OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock);
   [[self.classMock reject] createI420Renderer];
 
-  RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
+  RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
+      [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
 
   [realView renderFrame:self.frameMock];
   [realView drawInMTKView:realView.metalView];
@@ -186,7 +193,7 @@
   [self.classMock verify];
 
   // Recreate view.
-  realView = [[RTCMTLVideoView alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
+  realView = [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
   OCMExpect([self.rendererNV12Mock drawFrame:self.frameMock]);
   // View hould reinit renderer.
   OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock);
@@ -206,7 +213,8 @@
   OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock);
   [[self.classMock reject] createI420Renderer];
 
-  RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
+  RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
+      [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
   [realView renderFrame:self.frameMock];
   [realView drawInMTKView:realView.metalView];
 
@@ -230,7 +238,8 @@
   OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock);
   [[self.classMock reject] createI420Renderer];
 
-  RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
+  RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
+      [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
   [realView renderFrame:self.frameMock];
   [realView drawInMTKView:realView.metalView];
 
@@ -250,11 +259,12 @@
 - (void)testReportsSizeChangesToDelegate {
   OCMStub([self.classMock isMetalAvailable]).andReturn(YES);
 
-  id delegateMock = OCMProtocolMock(@protocol(RTCVideoViewDelegate));
+  id delegateMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoViewDelegate)));
   CGSize size = CGSizeMake(640, 480);
   OCMExpect([delegateMock videoView:[OCMArg any] didChangeVideoSize:size]);
 
-  RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
+  RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
+      [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
   realView.delegate = delegateMock;
   [realView setSize:size];
 
@@ -269,7 +279,7 @@
       createMetalView:CGRectZero];
   OCMExpect([metalKitView setContentMode:UIViewContentModeScaleAspectFill]);
 
-  RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] init];
+  RTC_OBJC_TYPE(RTCMTLVideoView) *realView = [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] init];
   [realView setVideoContentMode:UIViewContentModeScaleAspectFill];
 
   OCMVerify(metalKitView);
diff --git a/sdk/objc/unittests/RTCMediaConstraintsTest.mm b/sdk/objc/unittests/RTCMediaConstraintsTest.mm
index 4d5e450..7664a7e 100644
--- a/sdk/objc/unittests/RTCMediaConstraintsTest.mm
+++ b/sdk/objc/unittests/RTCMediaConstraintsTest.mm
@@ -28,9 +28,9 @@
   NSDictionary *mandatory = @{@"key1": @"value1", @"key2": @"value2"};
   NSDictionary *optional = @{@"key3": @"value3", @"key4": @"value4"};
 
-  RTCMediaConstraints *constraints = [[RTCMediaConstraints alloc]
-      initWithMandatoryConstraints:mandatory
-               optionalConstraints:optional];
+  RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
+      [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:mandatory
+                                                           optionalConstraints:optional];
   std::unique_ptr<webrtc::MediaConstraints> nativeConstraints =
       [constraints nativeConstraints];
 
diff --git a/sdk/objc/unittests/RTCNV12TextureCache_xctest.m b/sdk/objc/unittests/RTCNV12TextureCache_xctest.m
index d5fa65b..7bdc538 100644
--- a/sdk/objc/unittests/RTCNV12TextureCache_xctest.m
+++ b/sdk/objc/unittests/RTCNV12TextureCache_xctest.m
@@ -43,10 +43,12 @@
 
 - (void)testNV12TextureCacheDoesNotCrashOnEmptyFrame {
   CVPixelBufferRef nullPixelBuffer = NULL;
-  RTCCVPixelBuffer *badFrameBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:nullPixelBuffer];
-  RTCVideoFrame *badFrame = [[RTCVideoFrame alloc] initWithBuffer:badFrameBuffer
-                                                         rotation:RTCVideoRotation_0
-                                                      timeStampNs:0];
+  RTC_OBJC_TYPE(RTCCVPixelBuffer) *badFrameBuffer =
+      [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:nullPixelBuffer];
+  RTC_OBJC_TYPE(RTCVideoFrame) *badFrame =
+      [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:badFrameBuffer
+                                                  rotation:RTCVideoRotation_0
+                                               timeStampNs:0];
   [_nv12TextureCache uploadFrameToTextures:badFrame];
 }
 
diff --git a/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm b/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm
index 40b3aa0..7d19d40 100644
--- a/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm
+++ b/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm
@@ -39,7 +39,7 @@
 @implementation RTCPeerConnectionFactoryBuilderTest
 
 - (void)testBuilder {
-  id factoryMock = OCMStrictClassMock([RTCPeerConnectionFactory class]);
+  id factoryMock = OCMStrictClassMock([RTC_OBJC_TYPE(RTCPeerConnectionFactory) class]);
   OCMExpect([factoryMock alloc]).andReturn(factoryMock);
 #ifdef HAVE_NO_MEDIA
   RTC_UNUSED([[[factoryMock expect] andReturn:factoryMock] initWithNoMedia]);
@@ -54,13 +54,14 @@
                   mediaTransportFactory:nullptr]);
 #endif
   RTCPeerConnectionFactoryBuilder* builder = [[RTCPeerConnectionFactoryBuilder alloc] init];
-  RTCPeerConnectionFactory* peerConnectionFactory = [builder createPeerConnectionFactory];
+  RTC_OBJC_TYPE(RTCPeerConnectionFactory)* peerConnectionFactory =
+      [builder createPeerConnectionFactory];
   EXPECT_TRUE(peerConnectionFactory != nil);
   OCMVerifyAll(factoryMock);
 }
 
 - (void)testDefaultComponentsBuilder {
-  id factoryMock = OCMStrictClassMock([RTCPeerConnectionFactory class]);
+  id factoryMock = OCMStrictClassMock([RTC_OBJC_TYPE(RTCPeerConnectionFactory) class]);
   OCMExpect([factoryMock alloc]).andReturn(factoryMock);
 #ifdef HAVE_NO_MEDIA
   RTC_UNUSED([[[factoryMock expect] andReturn:factoryMock] initWithNoMedia]);
@@ -75,7 +76,8 @@
                   mediaTransportFactory:nullptr]);
 #endif
   RTCPeerConnectionFactoryBuilder* builder = [RTCPeerConnectionFactoryBuilder defaultBuilder];
-  RTCPeerConnectionFactory* peerConnectionFactory = [builder createPeerConnectionFactory];
+  RTC_OBJC_TYPE(RTCPeerConnectionFactory)* peerConnectionFactory =
+      [builder createPeerConnectionFactory];
   EXPECT_TRUE(peerConnectionFactory != nil);
   OCMVerifyAll(factoryMock);
 }
diff --git a/sdk/objc/unittests/RTCPeerConnectionFactory_xctest.m b/sdk/objc/unittests/RTCPeerConnectionFactory_xctest.m
index 477b541..2737bb6 100644
--- a/sdk/objc/unittests/RTCPeerConnectionFactory_xctest.m
+++ b/sdk/objc/unittests/RTCPeerConnectionFactory_xctest.m
@@ -30,16 +30,17 @@
 
 - (void)testPeerConnectionLifetime {
   @autoreleasepool {
-    RTCConfiguration *config = [[RTCConfiguration alloc] init];
+    RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
 
-    RTCMediaConstraints *constraints =
-        [[RTCMediaConstraints alloc] initWithMandatoryConstraints:@{} optionalConstraints:nil];
+    RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
+        [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
+                                                             optionalConstraints:nil];
 
-    RTCPeerConnectionFactory *factory;
-    RTCPeerConnection *peerConnection;
+    RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
+    RTC_OBJC_TYPE(RTCPeerConnection) * peerConnection;
 
     @autoreleasepool {
-      factory = [[RTCPeerConnectionFactory alloc] init];
+      factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
       peerConnection =
           [factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil];
       [peerConnection close];
@@ -53,11 +54,11 @@
 
 - (void)testMediaStreamLifetime {
   @autoreleasepool {
-    RTCPeerConnectionFactory *factory;
-    RTCMediaStream *mediaStream;
+    RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
+    RTC_OBJC_TYPE(RTCMediaStream) * mediaStream;
 
     @autoreleasepool {
-      factory = [[RTCPeerConnectionFactory alloc] init];
+      factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
       mediaStream = [factory mediaStreamWithStreamId:@"mediaStream"];
       factory = nil;
     }
@@ -69,17 +70,19 @@
 
 - (void)testDataChannelLifetime {
   @autoreleasepool {
-    RTCConfiguration *config = [[RTCConfiguration alloc] init];
-    RTCMediaConstraints *constraints =
-        [[RTCMediaConstraints alloc] initWithMandatoryConstraints:@{} optionalConstraints:nil];
-    RTCDataChannelConfiguration *dataChannelConfig = [[RTCDataChannelConfiguration alloc] init];
+    RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
+    RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
+        [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
+                                                             optionalConstraints:nil];
+    RTC_OBJC_TYPE(RTCDataChannelConfiguration) *dataChannelConfig =
+        [[RTC_OBJC_TYPE(RTCDataChannelConfiguration) alloc] init];
 
-    RTCPeerConnectionFactory *factory;
-    RTCPeerConnection *peerConnection;
-    RTCDataChannel *dataChannel;
+    RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
+    RTC_OBJC_TYPE(RTCPeerConnection) * peerConnection;
+    RTC_OBJC_TYPE(RTCDataChannel) * dataChannel;
 
     @autoreleasepool {
-      factory = [[RTCPeerConnectionFactory alloc] init];
+      factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
       peerConnection =
           [factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil];
       dataChannel =
@@ -97,18 +100,20 @@
 
 - (void)testRTCRtpTransceiverLifetime {
   @autoreleasepool {
-    RTCConfiguration *config = [[RTCConfiguration alloc] init];
+    RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
     config.sdpSemantics = RTCSdpSemanticsUnifiedPlan;
-    RTCMediaConstraints *contraints =
-        [[RTCMediaConstraints alloc] initWithMandatoryConstraints:@{} optionalConstraints:nil];
-    RTCRtpTransceiverInit *init = [[RTCRtpTransceiverInit alloc] init];
+    RTC_OBJC_TYPE(RTCMediaConstraints) *contraints =
+        [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
+                                                             optionalConstraints:nil];
+    RTC_OBJC_TYPE(RTCRtpTransceiverInit) *init =
+        [[RTC_OBJC_TYPE(RTCRtpTransceiverInit) alloc] init];
 
-    RTCPeerConnectionFactory *factory;
-    RTCPeerConnection *peerConnection;
-    RTCRtpTransceiver *tranceiver;
+    RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
+    RTC_OBJC_TYPE(RTCPeerConnection) * peerConnection;
+    RTC_OBJC_TYPE(RTCRtpTransceiver) * tranceiver;
 
     @autoreleasepool {
-      factory = [[RTCPeerConnectionFactory alloc] init];
+      factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
       peerConnection =
           [factory peerConnectionWithConfiguration:config constraints:contraints delegate:nil];
       tranceiver = [peerConnection addTransceiverOfType:RTCRtpMediaTypeAudio init:init];
@@ -125,16 +130,17 @@
 
 - (void)testRTCRtpSenderLifetime {
   @autoreleasepool {
-    RTCConfiguration *config = [[RTCConfiguration alloc] init];
-    RTCMediaConstraints *constraints =
-        [[RTCMediaConstraints alloc] initWithMandatoryConstraints:@{} optionalConstraints:nil];
+    RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
+    RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
+        [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
+                                                             optionalConstraints:nil];
 
-    RTCPeerConnectionFactory *factory;
-    RTCPeerConnection *peerConnection;
-    RTCRtpSender *sender;
+    RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
+    RTC_OBJC_TYPE(RTCPeerConnection) * peerConnection;
+    RTC_OBJC_TYPE(RTCRtpSender) * sender;
 
     @autoreleasepool {
-      factory = [[RTCPeerConnectionFactory alloc] init];
+      factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
       peerConnection =
           [factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil];
       sender = [peerConnection senderWithKind:kRTCMediaStreamTrackKindVideo streamId:@"stream"];
@@ -151,19 +157,20 @@
 
 - (void)testRTCRtpReceiverLifetime {
   @autoreleasepool {
-    RTCConfiguration *config = [[RTCConfiguration alloc] init];
-    RTCMediaConstraints *constraints =
-        [[RTCMediaConstraints alloc] initWithMandatoryConstraints:@{} optionalConstraints:nil];
+    RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
+    RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
+        [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
+                                                             optionalConstraints:nil];
 
-    RTCPeerConnectionFactory *factory;
-    RTCPeerConnection *pc1;
-    RTCPeerConnection *pc2;
+    RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
+    RTC_OBJC_TYPE(RTCPeerConnection) * pc1;
+    RTC_OBJC_TYPE(RTCPeerConnection) * pc2;
 
-    NSArray<RTCRtpReceiver *> *receivers1;
-    NSArray<RTCRtpReceiver *> *receivers2;
+    NSArray<RTC_OBJC_TYPE(RTCRtpReceiver) *> *receivers1;
+    NSArray<RTC_OBJC_TYPE(RTCRtpReceiver) *> *receivers2;
 
     @autoreleasepool {
-      factory = [[RTCPeerConnectionFactory alloc] init];
+      factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
       pc1 = [factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil];
       [pc1 senderWithKind:kRTCMediaStreamTrackKindAudio streamId:@"stream"];
 
@@ -197,11 +204,11 @@
 
 - (void)testAudioSourceLifetime {
   @autoreleasepool {
-    RTCPeerConnectionFactory *factory;
-    RTCAudioSource *audioSource;
+    RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
+    RTC_OBJC_TYPE(RTCAudioSource) * audioSource;
 
     @autoreleasepool {
-      factory = [[RTCPeerConnectionFactory alloc] init];
+      factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
       audioSource = [factory audioSourceWithConstraints:nil];
       XCTAssertNotNil(audioSource);
       factory = nil;
@@ -214,11 +221,11 @@
 
 - (void)testVideoSourceLifetime {
   @autoreleasepool {
-    RTCPeerConnectionFactory *factory;
-    RTCVideoSource *videoSource;
+    RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
+    RTC_OBJC_TYPE(RTCVideoSource) * videoSource;
 
     @autoreleasepool {
-      factory = [[RTCPeerConnectionFactory alloc] init];
+      factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
       videoSource = [factory videoSource];
       XCTAssertNotNil(videoSource);
       factory = nil;
@@ -231,11 +238,11 @@
 
 - (void)testAudioTrackLifetime {
   @autoreleasepool {
-    RTCPeerConnectionFactory *factory;
-    RTCAudioTrack *audioTrack;
+    RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
+    RTC_OBJC_TYPE(RTCAudioTrack) * audioTrack;
 
     @autoreleasepool {
-      factory = [[RTCPeerConnectionFactory alloc] init];
+      factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
       audioTrack = [factory audioTrackWithTrackId:@"audioTrack"];
       XCTAssertNotNil(audioTrack);
       factory = nil;
@@ -248,11 +255,11 @@
 
 - (void)testVideoTrackLifetime {
   @autoreleasepool {
-    RTCPeerConnectionFactory *factory;
-    RTCVideoTrack *videoTrack;
+    RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
+    RTC_OBJC_TYPE(RTCVideoTrack) * videoTrack;
 
     @autoreleasepool {
-      factory = [[RTCPeerConnectionFactory alloc] init];
+      factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
       videoTrack = [factory videoTrackWithSource:[factory videoSource] trackId:@"videoTrack"];
       XCTAssertNotNil(videoTrack);
       factory = nil;
@@ -263,20 +270,20 @@
   XCTAssertTrue(true, "Expect test does not crash");
 }
 
-- (bool)negotiatePeerConnection:(RTCPeerConnection *)pc1
-             withPeerConnection:(RTCPeerConnection *)pc2
+- (bool)negotiatePeerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)pc1
+             withPeerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)pc2
              negotiationTimeout:(NSTimeInterval)timeout {
-  __weak RTCPeerConnection *weakPC1 = pc1;
-  __weak RTCPeerConnection *weakPC2 = pc2;
-  RTCMediaConstraints *sdpConstraints =
-      [[RTCMediaConstraints alloc] initWithMandatoryConstraints:@{
+  __weak RTC_OBJC_TYPE(RTCPeerConnection) *weakPC1 = pc1;
+  __weak RTC_OBJC_TYPE(RTCPeerConnection) *weakPC2 = pc2;
+  RTC_OBJC_TYPE(RTCMediaConstraints) *sdpConstraints =
+      [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{
         kRTCMediaConstraintsOfferToReceiveAudio : kRTCMediaConstraintsValueTrue
       }
-                                            optionalConstraints:nil];
+                                                           optionalConstraints:nil];
 
   dispatch_semaphore_t negotiatedSem = dispatch_semaphore_create(0);
   [weakPC1 offerForConstraints:sdpConstraints
-             completionHandler:^(RTCSessionDescription *offer, NSError *error) {
+             completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) * offer, NSError * error) {
                XCTAssertNil(error);
                XCTAssertNotNil(offer);
                [weakPC1
@@ -289,8 +296,9 @@
                                 XCTAssertNil(error);
                                 [weakPC2
                                     answerForConstraints:sdpConstraints
-                                       completionHandler:^(RTCSessionDescription *answer,
-                                                           NSError *error) {
+                                       completionHandler:^(
+                                           RTC_OBJC_TYPE(RTCSessionDescription) * answer,
+                                           NSError * error) {
                                          XCTAssertNil(error);
                                          XCTAssertNotNil(answer);
                                          [weakPC2
diff --git a/sdk/objc/unittests/RTCPeerConnectionTest.mm b/sdk/objc/unittests/RTCPeerConnectionTest.mm
index 53fe27b..e45ca93 100644
--- a/sdk/objc/unittests/RTCPeerConnectionTest.mm
+++ b/sdk/objc/unittests/RTCPeerConnectionTest.mm
@@ -34,9 +34,10 @@
 
 - (void)testConfigurationGetter {
   NSArray *urlStrings = @[ @"stun:stun1.example.net" ];
-  RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:urlStrings];
+  RTC_OBJC_TYPE(RTCIceServer) *server =
+      [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:urlStrings];
 
-  RTCConfiguration *config = [[RTCConfiguration alloc] init];
+  RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
   config.iceServers = @[ server ];
   config.iceTransportPolicy = RTCIceTransportPolicyRelay;
   config.bundlePolicy = RTCBundlePolicyMaxBundle;
@@ -54,18 +55,21 @@
       RTCContinualGatheringPolicyGatherContinually;
   config.shouldPruneTurnPorts = YES;
   config.activeResetSrtpParams = YES;
-  config.cryptoOptions = [[RTCCryptoOptions alloc] initWithSrtpEnableGcmCryptoSuites:YES
-                                                 srtpEnableAes128Sha1_32CryptoCipher:YES
-                                              srtpEnableEncryptedRtpHeaderExtensions:NO
-                                                        sframeRequireFrameEncryption:NO];
+  config.cryptoOptions =
+      [[RTC_OBJC_TYPE(RTCCryptoOptions) alloc] initWithSrtpEnableGcmCryptoSuites:YES
+                                             srtpEnableAes128Sha1_32CryptoCipher:YES
+                                          srtpEnableEncryptedRtpHeaderExtensions:NO
+                                                    sframeRequireFrameEncryption:NO];
 
-  RTCMediaConstraints *contraints = [[RTCMediaConstraints alloc] initWithMandatoryConstraints:@{}
-      optionalConstraints:nil];
-  RTCPeerConnectionFactory *factory = [[RTCPeerConnectionFactory alloc] init];
+  RTC_OBJC_TYPE(RTCMediaConstraints) *contraints =
+      [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
+                                                           optionalConstraints:nil];
+  RTC_OBJC_TYPE(RTCPeerConnectionFactory) *factory =
+      [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
 
-  RTCConfiguration *newConfig;
+  RTC_OBJC_TYPE(RTCConfiguration) * newConfig;
   @autoreleasepool {
-    RTCPeerConnection *peerConnection =
+    RTC_OBJC_TYPE(RTCPeerConnection) *peerConnection =
         [factory peerConnectionWithConfiguration:config constraints:contraints delegate:nil];
     newConfig = peerConnection.configuration;
 
@@ -78,8 +82,8 @@
   }
 
   EXPECT_EQ([config.iceServers count], [newConfig.iceServers count]);
-  RTCIceServer *newServer = newConfig.iceServers[0];
-  RTCIceServer *origServer = config.iceServers[0];
+  RTC_OBJC_TYPE(RTCIceServer) *newServer = newConfig.iceServers[0];
+  RTC_OBJC_TYPE(RTCIceServer) *origServer = config.iceServers[0];
   std::string origUrl = origServer.urlStrings.firstObject.UTF8String;
   std::string url = newServer.urlStrings.firstObject.UTF8String;
   EXPECT_EQ(origUrl, url);
@@ -109,19 +113,22 @@
 
 - (void)testWithDependencies {
   NSArray *urlStrings = @[ @"stun:stun1.example.net" ];
-  RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:urlStrings];
+  RTC_OBJC_TYPE(RTCIceServer) *server =
+      [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:urlStrings];
 
-  RTCConfiguration *config = [[RTCConfiguration alloc] init];
+  RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
   config.iceServers = @[ server ];
-  RTCMediaConstraints *contraints = [[RTCMediaConstraints alloc] initWithMandatoryConstraints:@{}
-                                                                          optionalConstraints:nil];
-  RTCPeerConnectionFactory *factory = [[RTCPeerConnectionFactory alloc] init];
+  RTC_OBJC_TYPE(RTCMediaConstraints) *contraints =
+      [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
+                                                           optionalConstraints:nil];
+  RTC_OBJC_TYPE(RTCPeerConnectionFactory) *factory =
+      [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
 
-  RTCConfiguration *newConfig;
+  RTC_OBJC_TYPE(RTCConfiguration) * newConfig;
   std::unique_ptr<webrtc::PeerConnectionDependencies> pc_dependencies =
       std::make_unique<webrtc::PeerConnectionDependencies>(nullptr);
   @autoreleasepool {
-    RTCPeerConnection *peerConnection =
+    RTC_OBJC_TYPE(RTCPeerConnection) *peerConnection =
         [factory peerConnectionWithDependencies:config
                                     constraints:contraints
                                    dependencies:std::move(pc_dependencies)
diff --git a/sdk/objc/unittests/RTCSessionDescriptionTest.mm b/sdk/objc/unittests/RTCSessionDescriptionTest.mm
index 0807eed..ee65649 100644
--- a/sdk/objc/unittests/RTCSessionDescriptionTest.mm
+++ b/sdk/objc/unittests/RTCSessionDescriptionTest.mm
@@ -24,19 +24,18 @@
 @implementation RTCSessionDescriptionTest
 
 /**
- * Test conversion of an Objective-C RTCSessionDescription to a native
+ * Test conversion of an Objective-C RTC_OBJC_TYPE(RTCSessionDescription) to a native
  * SessionDescriptionInterface (based on the types and SDP strings being equal).
  */
 - (void)testSessionDescriptionConversion {
-  RTCSessionDescription *description =
-      [[RTCSessionDescription alloc] initWithType:RTCSdpTypeAnswer
-                                              sdp:[self sdp]];
+  RTC_OBJC_TYPE(RTCSessionDescription) *description =
+      [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithType:RTCSdpTypeAnswer sdp:[self sdp]];
 
   webrtc::SessionDescriptionInterface *nativeDescription =
       description.nativeDescription;
 
   EXPECT_EQ(RTCSdpTypeAnswer,
-      [RTCSessionDescription typeForStdString:nativeDescription->type()]);
+            [RTC_OBJC_TYPE(RTCSessionDescription) typeForStdString:nativeDescription->type()]);
 
   std::string sdp;
   nativeDescription->ToString(&sdp);
@@ -51,11 +50,10 @@
       [self sdp].stdString,
       nullptr);
 
-  RTCSessionDescription *description =
-      [[RTCSessionDescription alloc] initWithNativeDescription:
-      nativeDescription];
+  RTC_OBJC_TYPE(RTCSessionDescription) *description =
+      [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithNativeDescription:nativeDescription];
   EXPECT_EQ(webrtc::SessionDescriptionInterface::kAnswer,
-      [RTCSessionDescription stdStringForType:description.type]);
+            [RTC_OBJC_TYPE(RTCSessionDescription) stdStringForType:description.type]);
   EXPECT_TRUE([[self sdp] isEqualToString:description.sdp]);
 }
 
diff --git a/sdk/objc/unittests/objc_video_decoder_factory_tests.mm b/sdk/objc/unittests/objc_video_decoder_factory_tests.mm
index bd31a6e..cc31f67 100644
--- a/sdk/objc/unittests/objc_video_decoder_factory_tests.mm
+++ b/sdk/objc/unittests/objc_video_decoder_factory_tests.mm
@@ -13,6 +13,7 @@
 
 #include "sdk/objc/native/src/objc_video_decoder_factory.h"
 
+#import "base/RTCMacros.h"
 #import "base/RTCVideoDecoder.h"
 #import "base/RTCVideoDecoderFactory.h"
 #include "media/base/codec.h"
@@ -20,8 +21,8 @@
 #include "modules/video_coding/include/video_error_codes.h"
 #include "rtc_base/gunit.h"
 
-id<RTCVideoDecoderFactory> CreateDecoderFactoryReturning(int return_code) {
-  id decoderMock = OCMProtocolMock(@protocol(RTCVideoDecoder));
+id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> CreateDecoderFactoryReturning(int return_code) {
+  id decoderMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoDecoder)));
   OCMStub([decoderMock startDecodeWithNumberOfCores:1]).andReturn(return_code);
   OCMStub([decoderMock decode:[OCMArg any]
                     missingFrames:NO
@@ -30,22 +31,24 @@
       .andReturn(return_code);
   OCMStub([decoderMock releaseDecoder]).andReturn(return_code);
 
-  id decoderFactoryMock = OCMProtocolMock(@protocol(RTCVideoDecoderFactory));
-  RTCVideoCodecInfo *supported = [[RTCVideoCodecInfo alloc] initWithName:@"H264" parameters:nil];
+  id decoderFactoryMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoDecoderFactory)));
+  RTC_OBJC_TYPE(RTCVideoCodecInfo)* supported =
+      [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:@"H264" parameters:nil];
   OCMStub([decoderFactoryMock supportedCodecs]).andReturn(@[ supported ]);
   OCMStub([decoderFactoryMock createDecoder:[OCMArg any]]).andReturn(decoderMock);
   return decoderFactoryMock;
 }
 
-id<RTCVideoDecoderFactory> CreateOKDecoderFactory() {
+id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> CreateOKDecoderFactory() {
   return CreateDecoderFactoryReturning(WEBRTC_VIDEO_CODEC_OK);
 }
 
-id<RTCVideoDecoderFactory> CreateErrorDecoderFactory() {
+id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> CreateErrorDecoderFactory() {
   return CreateDecoderFactoryReturning(WEBRTC_VIDEO_CODEC_ERROR);
 }
 
-std::unique_ptr<webrtc::VideoDecoder> GetObjCDecoder(id<RTCVideoDecoderFactory> factory) {
+std::unique_ptr<webrtc::VideoDecoder> GetObjCDecoder(
+    id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> factory) {
   webrtc::ObjCVideoDecoderFactory decoder_factory(factory);
   return decoder_factory.CreateVideoDecoder(webrtc::SdpVideoFormat(cricket::kH264CodecName));
 }
diff --git a/sdk/objc/unittests/objc_video_encoder_factory_tests.mm b/sdk/objc/unittests/objc_video_encoder_factory_tests.mm
index 452c815..728dc01 100644
--- a/sdk/objc/unittests/objc_video_encoder_factory_tests.mm
+++ b/sdk/objc/unittests/objc_video_encoder_factory_tests.mm
@@ -25,8 +25,8 @@
 #include "rtc_base/gunit.h"
 #include "sdk/objc/native/src/objc_frame_buffer.h"
 
-id<RTCVideoEncoderFactory> CreateEncoderFactoryReturning(int return_code) {
-  id encoderMock = OCMProtocolMock(@protocol(RTCVideoEncoder));
+id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> CreateEncoderFactoryReturning(int return_code) {
+  id encoderMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoEncoder)));
   OCMStub([encoderMock startEncodeWithSettings:[OCMArg any] numberOfCores:1])
       .andReturn(return_code);
   OCMStub([encoderMock encode:[OCMArg any] codecSpecificInfo:[OCMArg any] frameTypes:[OCMArg any]])
@@ -34,23 +34,25 @@
   OCMStub([encoderMock releaseEncoder]).andReturn(return_code);
   OCMStub([encoderMock setBitrate:0 framerate:0]).andReturn(return_code);
 
-  id encoderFactoryMock = OCMProtocolMock(@protocol(RTCVideoEncoderFactory));
-  RTCVideoCodecInfo *supported = [[RTCVideoCodecInfo alloc] initWithName:@"H264" parameters:nil];
+  id encoderFactoryMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoEncoderFactory)));
+  RTC_OBJC_TYPE(RTCVideoCodecInfo)* supported =
+      [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:@"H264" parameters:nil];
   OCMStub([encoderFactoryMock supportedCodecs]).andReturn(@[ supported ]);
   OCMStub([encoderFactoryMock implementations]).andReturn(@[ supported ]);
   OCMStub([encoderFactoryMock createEncoder:[OCMArg any]]).andReturn(encoderMock);
   return encoderFactoryMock;
 }
 
-id<RTCVideoEncoderFactory> CreateOKEncoderFactory() {
+id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> CreateOKEncoderFactory() {
   return CreateEncoderFactoryReturning(WEBRTC_VIDEO_CODEC_OK);
 }
 
-id<RTCVideoEncoderFactory> CreateErrorEncoderFactory() {
+id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> CreateErrorEncoderFactory() {
   return CreateEncoderFactoryReturning(WEBRTC_VIDEO_CODEC_ERROR);
 }
 
-std::unique_ptr<webrtc::VideoEncoder> GetObjCEncoder(id<RTCVideoEncoderFactory> factory) {
+std::unique_ptr<webrtc::VideoEncoder> GetObjCEncoder(
+    id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> factory) {
   webrtc::ObjCVideoEncoderFactory encoder_factory(factory);
   webrtc::SdpVideoFormat format("H264");
   return encoder_factory.CreateVideoEncoder(format);
@@ -83,7 +85,7 @@
   CVPixelBufferCreate(kCFAllocatorDefault, 640, 480, kCVPixelFormatType_32ARGB, nil, &pixel_buffer);
   rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
       new rtc::RefCountedObject<webrtc::ObjCFrameBuffer>(
-          [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixel_buffer]);
+          [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixel_buffer]);
   webrtc::VideoFrame frame = webrtc::VideoFrame::Builder()
                                  .set_video_frame_buffer(buffer)
                                  .set_rotation(webrtc::kVideoRotation_0)
@@ -101,7 +103,7 @@
   CVPixelBufferCreate(kCFAllocatorDefault, 640, 480, kCVPixelFormatType_32ARGB, nil, &pixel_buffer);
   rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
       new rtc::RefCountedObject<webrtc::ObjCFrameBuffer>(
-          [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixel_buffer]);
+          [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixel_buffer]);
   webrtc::VideoFrame frame = webrtc::VideoFrame::Builder()
                                  .set_video_frame_buffer(buffer)
                                  .set_rotation(webrtc::kVideoRotation_0)