Replace rtc::Optional with absl::optional

This is a no-op change because rtc::Optional is an alias to absl::optional

This CL generated by running script passing top level directories except rtc_base and api

find $@ -type f \( -name \*.h -o -name \*.cc -o -name \*.mm \) \
-exec sed -i 's|rtc::Optional|absl::optional|g' {} \+ \
-exec sed -i 's|rtc::nullopt|absl::nullopt|g' {} \+ \
-exec sed -i 's|#include "api/optional.h"|#include "absl/types/optional.h"|' {} \+

find $@ -type f -name BUILD.gn \
-exec sed -r -i 's|"[\./api]*:optional"|"//third_party/abseil-cpp/absl/types:optional"|' {} \+;

git cl format

Bug: webrtc:9078
Change-Id: I9465c172e65ba6e6ed4e4fdc35b0b265038d6f71
Reviewed-on: https://webrtc-review.googlesource.com/84584
Reviewed-by: Karl Wiberg <kwiberg@webrtc.org>
Commit-Queue: Danil Chapovalov <danilchap@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#23697}
diff --git a/common_audio/BUILD.gn b/common_audio/BUILD.gn
index 32cca43..89ede38 100644
--- a/common_audio/BUILD.gn
+++ b/common_audio/BUILD.gn
@@ -64,7 +64,6 @@
     ":sinc_resampler",
     "..:webrtc_common",
     "../:typedefs",
-    "../api:optional",
     "../rtc_base:checks",
     "../rtc_base:gtest_prod",
     "../rtc_base:rtc_base_approved",
@@ -72,6 +71,7 @@
     "../rtc_base/memory:aligned_malloc",
     "../system_wrappers",
     "../system_wrappers:cpu_features_api",
+    "//third_party/abseil-cpp/absl/types:optional",
   ]
 
   defines = []
diff --git a/common_audio/mocks/mock_smoothing_filter.h b/common_audio/mocks/mock_smoothing_filter.h
index dec6ea5..712049f 100644
--- a/common_audio/mocks/mock_smoothing_filter.h
+++ b/common_audio/mocks/mock_smoothing_filter.h
@@ -19,7 +19,7 @@
 class MockSmoothingFilter : public SmoothingFilter {
  public:
   MOCK_METHOD1(AddSample, void(float));
-  MOCK_METHOD0(GetAverage, rtc::Optional<float>());
+  MOCK_METHOD0(GetAverage, absl::optional<float>());
   MOCK_METHOD1(SetTimeConstantMs, bool(int));
 };
 
diff --git a/common_audio/smoothing_filter.cc b/common_audio/smoothing_filter.cc
index ecfb5c2..d426bda 100644
--- a/common_audio/smoothing_filter.cc
+++ b/common_audio/smoothing_filter.cc
@@ -52,10 +52,10 @@
   last_sample_ = sample;
 }
 
-rtc::Optional<float> SmoothingFilterImpl::GetAverage() {
+absl::optional<float> SmoothingFilterImpl::GetAverage() {
   if (!init_end_time_ms_) {
     // |init_end_time_ms_| undefined since we have not received any sample.
-    return rtc::nullopt;
+    return absl::nullopt;
   }
   ExtrapolateLastSample(rtc::TimeMillis());
   return state_;
diff --git a/common_audio/smoothing_filter.h b/common_audio/smoothing_filter.h
index b8ab4e5..cff7469 100644
--- a/common_audio/smoothing_filter.h
+++ b/common_audio/smoothing_filter.h
@@ -11,7 +11,7 @@
 #ifndef COMMON_AUDIO_SMOOTHING_FILTER_H_
 #define COMMON_AUDIO_SMOOTHING_FILTER_H_
 
-#include "api/optional.h"
+#include "absl/types/optional.h"
 #include "rtc_base/constructormagic.h"
 #include "system_wrappers/include/clock.h"
 
@@ -21,7 +21,7 @@
  public:
   virtual ~SmoothingFilter() = default;
   virtual void AddSample(float sample) = 0;
-  virtual rtc::Optional<float> GetAverage() = 0;
+  virtual absl::optional<float> GetAverage() = 0;
   virtual bool SetTimeConstantMs(int time_constant_ms) = 0;
 };
 
@@ -44,7 +44,7 @@
   ~SmoothingFilterImpl() override;
 
   void AddSample(float sample) override;
-  rtc::Optional<float> GetAverage() override;
+  absl::optional<float> GetAverage() override;
   bool SetTimeConstantMs(int time_constant_ms) override;
 
   // Methods used for unittests.
@@ -58,7 +58,7 @@
   const float init_factor_;
   const float init_const_;
 
-  rtc::Optional<int64_t> init_end_time_ms_;
+  absl::optional<int64_t> init_end_time_ms_;
   float last_sample_;
   float alpha_;
   float state_;
diff --git a/common_video/BUILD.gn b/common_video/BUILD.gn
index 650b6ad..0f4d400 100644
--- a/common_video/BUILD.gn
+++ b/common_video/BUILD.gn
@@ -57,7 +57,6 @@
   deps = [
     "..:webrtc_common",
     "../:typedefs",
-    "../api:optional",
     "../api/video:video_bitrate_allocation",
     "../api/video:video_frame",
     "../api/video:video_frame_i420",
@@ -67,6 +66,7 @@
     "../rtc_base:rtc_base",
     "../rtc_base:rtc_task_queue",
     "../rtc_base:safe_minmax",
+    "//third_party/abseil-cpp/absl/types:optional",
     "//third_party/libyuv",
   ]
 }
diff --git a/common_video/bitrate_adjuster.cc b/common_video/bitrate_adjuster.cc
index 3bb0694..163c4b1 100644
--- a/common_video/bitrate_adjuster.cc
+++ b/common_video/bitrate_adjuster.cc
@@ -68,7 +68,7 @@
   return adjusted_bitrate_bps_;
 }
 
-rtc::Optional<uint32_t> BitrateAdjuster::GetEstimatedBitrateBps() {
+absl::optional<uint32_t> BitrateAdjuster::GetEstimatedBitrateBps() {
   rtc::CritScope cs(&crit_);
   return bitrate_tracker_.Rate(rtc::TimeMillis());
 }
diff --git a/common_video/h264/h264_bitstream_parser.cc b/common_video/h264/h264_bitstream_parser.cc
index 031fcde..d8f8a62 100644
--- a/common_video/h264/h264_bitstream_parser.cc
+++ b/common_video/h264/h264_bitstream_parser.cc
@@ -44,7 +44,7 @@
   if (!sps_ || !pps_)
     return kInvalidStream;
 
-  last_slice_qp_delta_ = rtc::nullopt;
+  last_slice_qp_delta_ = absl::nullopt;
   const std::vector<uint8_t> slice_rbsp =
       H264::ParseRbsp(source, source_length);
   if (slice_rbsp.size() < H264::kNaluTypeSize)
diff --git a/common_video/h264/h264_bitstream_parser.h b/common_video/h264/h264_bitstream_parser.h
index 4ef6b40..b3fac7b 100644
--- a/common_video/h264/h264_bitstream_parser.h
+++ b/common_video/h264/h264_bitstream_parser.h
@@ -13,7 +13,7 @@
 #include <stddef.h>
 #include <stdint.h>
 
-#include "api/optional.h"
+#include "absl/types/optional.h"
 #include "common_video/h264/pps_parser.h"
 #include "common_video/h264/sps_parser.h"
 
@@ -53,11 +53,11 @@
                                   uint8_t nalu_type);
 
   // SPS/PPS state, updated when parsing new SPS/PPS, used to parse slices.
-  rtc::Optional<SpsParser::SpsState> sps_;
-  rtc::Optional<PpsParser::PpsState> pps_;
+  absl::optional<SpsParser::SpsState> sps_;
+  absl::optional<PpsParser::PpsState> pps_;
 
   // Last parsed slice QP.
-  rtc::Optional<int32_t> last_slice_qp_delta_;
+  absl::optional<int32_t> last_slice_qp_delta_;
 };
 
 }  // namespace webrtc
diff --git a/common_video/h264/pps_parser.cc b/common_video/h264/pps_parser.cc
index 7bc0ff7..5bc29f3 100644
--- a/common_video/h264/pps_parser.cc
+++ b/common_video/h264/pps_parser.cc
@@ -19,7 +19,7 @@
 
 #define RETURN_EMPTY_ON_FAIL(x) \
   if (!(x)) {                   \
-    return rtc::nullopt;        \
+    return absl::nullopt;       \
   }
 
 namespace {
@@ -33,8 +33,8 @@
 // You can find it on this page:
 // http://www.itu.int/rec/T-REC-H.264
 
-rtc::Optional<PpsParser::PpsState> PpsParser::ParsePps(const uint8_t* data,
-                                                       size_t length) {
+absl::optional<PpsParser::PpsState> PpsParser::ParsePps(const uint8_t* data,
+                                                        size_t length) {
   // First, parse out rbsp, which is basically the source buffer minus emulation
   // bytes (the last byte of a 0x00 0x00 0x03 sequence). RBSP is defined in
   // section 7.3.1 of the H.264 standard.
@@ -57,26 +57,26 @@
   return ParsePpsIdsInternal(&bit_buffer, pps_id, sps_id);
 }
 
-rtc::Optional<uint32_t> PpsParser::ParsePpsIdFromSlice(const uint8_t* data,
-                                                       size_t length) {
+absl::optional<uint32_t> PpsParser::ParsePpsIdFromSlice(const uint8_t* data,
+                                                        size_t length) {
   std::vector<uint8_t> unpacked_buffer = H264::ParseRbsp(data, length);
   rtc::BitBuffer slice_reader(unpacked_buffer.data(), unpacked_buffer.size());
 
   uint32_t golomb_tmp;
   // first_mb_in_slice: ue(v)
   if (!slice_reader.ReadExponentialGolomb(&golomb_tmp))
-    return rtc::nullopt;
+    return absl::nullopt;
   // slice_type: ue(v)
   if (!slice_reader.ReadExponentialGolomb(&golomb_tmp))
-    return rtc::nullopt;
+    return absl::nullopt;
   // pic_parameter_set_id: ue(v)
   uint32_t slice_pps_id;
   if (!slice_reader.ReadExponentialGolomb(&slice_pps_id))
-    return rtc::nullopt;
+    return absl::nullopt;
   return slice_pps_id;
 }
 
-rtc::Optional<PpsParser::PpsState> PpsParser::ParseInternal(
+absl::optional<PpsParser::PpsState> PpsParser::ParseInternal(
     rtc::BitBuffer* bit_buffer) {
   PpsState pps;
 
diff --git a/common_video/h264/pps_parser.h b/common_video/h264/pps_parser.h
index 571af97..d6c31b0 100644
--- a/common_video/h264/pps_parser.h
+++ b/common_video/h264/pps_parser.h
@@ -11,7 +11,7 @@
 #ifndef COMMON_VIDEO_H264_PPS_PARSER_H_
 #define COMMON_VIDEO_H264_PPS_PARSER_H_
 
-#include "api/optional.h"
+#include "absl/types/optional.h"
 
 namespace rtc {
 class BitBuffer;
@@ -38,20 +38,20 @@
   };
 
   // Unpack RBSP and parse PPS state from the supplied buffer.
-  static rtc::Optional<PpsState> ParsePps(const uint8_t* data, size_t length);
+  static absl::optional<PpsState> ParsePps(const uint8_t* data, size_t length);
 
   static bool ParsePpsIds(const uint8_t* data,
                           size_t length,
                           uint32_t* pps_id,
                           uint32_t* sps_id);
 
-  static rtc::Optional<uint32_t> ParsePpsIdFromSlice(const uint8_t* data,
-                                                     size_t length);
+  static absl::optional<uint32_t> ParsePpsIdFromSlice(const uint8_t* data,
+                                                      size_t length);
 
  protected:
   // Parse the PPS state, for a bit buffer where RBSP decoding has already been
   // performed.
-  static rtc::Optional<PpsState> ParseInternal(rtc::BitBuffer* bit_buffer);
+  static absl::optional<PpsState> ParseInternal(rtc::BitBuffer* bit_buffer);
   static bool ParsePpsIdsInternal(rtc::BitBuffer* bit_buffer,
                                   uint32_t* pps_id,
                                   uint32_t* sps_id);
diff --git a/common_video/h264/pps_parser_unittest.cc b/common_video/h264/pps_parser_unittest.cc
index 14a425f..9fdbf7e 100644
--- a/common_video/h264/pps_parser_unittest.cc
+++ b/common_video/h264/pps_parser_unittest.cc
@@ -192,7 +192,7 @@
 
   PpsParser::PpsState generated_pps_;
   rtc::Buffer buffer_;
-  rtc::Optional<PpsParser::PpsState> parsed_pps_;
+  absl::optional<PpsParser::PpsState> parsed_pps_;
 };
 
 TEST_F(PpsParserTest, ZeroPps) {
@@ -215,7 +215,7 @@
 }
 
 TEST_F(PpsParserTest, PpsIdFromSlice) {
-  rtc::Optional<uint32_t> pps_id = PpsParser::ParsePpsIdFromSlice(
+  absl::optional<uint32_t> pps_id = PpsParser::ParsePpsIdFromSlice(
       kH264BitstreamChunk, sizeof(kH264BitstreamChunk));
   ASSERT_TRUE(pps_id);
   EXPECT_EQ(2u, *pps_id);
diff --git a/common_video/h264/profile_level_id_unittest.cc b/common_video/h264/profile_level_id_unittest.cc
index d7b7cfb..66ad300 100644
--- a/common_video/h264/profile_level_id_unittest.cc
+++ b/common_video/h264/profile_level_id_unittest.cc
@@ -125,7 +125,7 @@
 }
 
 TEST(H264ProfileLevelId, TestParseSdpProfileLevelIdEmpty) {
-  const rtc::Optional<ProfileLevelId> profile_level_id =
+  const absl::optional<ProfileLevelId> profile_level_id =
       ParseSdpProfileLevelId(CodecParameterMap());
   EXPECT_TRUE(profile_level_id);
   EXPECT_EQ(kProfileConstrainedBaseline, profile_level_id->profile);
@@ -135,7 +135,7 @@
 TEST(H264ProfileLevelId, TestParseSdpProfileLevelIdConstrainedHigh) {
   CodecParameterMap params;
   params["profile-level-id"] = "640c2a";
-  const rtc::Optional<ProfileLevelId> profile_level_id =
+  const absl::optional<ProfileLevelId> profile_level_id =
       ParseSdpProfileLevelId(params);
   EXPECT_TRUE(profile_level_id);
   EXPECT_EQ(kProfileConstrainedHigh, profile_level_id->profile);
diff --git a/common_video/h264/sps_parser.cc b/common_video/h264/sps_parser.cc
index c921972..b313f48 100644
--- a/common_video/h264/sps_parser.cc
+++ b/common_video/h264/sps_parser.cc
@@ -18,7 +18,7 @@
 #include "rtc_base/logging.h"
 
 namespace {
-typedef rtc::Optional<webrtc::SpsParser::SpsState> OptionalSps;
+typedef absl::optional<webrtc::SpsParser::SpsState> OptionalSps;
 
 #define RETURN_EMPTY_ON_FAIL(x) \
   if (!(x)) {                   \
@@ -38,14 +38,14 @@
 // http://www.itu.int/rec/T-REC-H.264
 
 // Unpack RBSP and parse SPS state from the supplied buffer.
-rtc::Optional<SpsParser::SpsState> SpsParser::ParseSps(const uint8_t* data,
-                                                       size_t length) {
+absl::optional<SpsParser::SpsState> SpsParser::ParseSps(const uint8_t* data,
+                                                        size_t length) {
   std::vector<uint8_t> unpacked_buffer = H264::ParseRbsp(data, length);
   rtc::BitBuffer bit_buffer(unpacked_buffer.data(), unpacked_buffer.size());
   return ParseSpsUpToVui(&bit_buffer);
 }
 
-rtc::Optional<SpsParser::SpsState> SpsParser::ParseSpsUpToVui(
+absl::optional<SpsParser::SpsState> SpsParser::ParseSpsUpToVui(
     rtc::BitBuffer* buffer) {
   // Now, we need to use a bit buffer to parse through the actual AVC SPS
   // format. See Section 7.3.2.1.1 ("Sequence parameter set data syntax") of the
diff --git a/common_video/h264/sps_parser.h b/common_video/h264/sps_parser.h
index 1fddc0c..d4294b2 100644
--- a/common_video/h264/sps_parser.h
+++ b/common_video/h264/sps_parser.h
@@ -11,7 +11,7 @@
 #ifndef COMMON_VIDEO_H264_SPS_PARSER_H_
 #define COMMON_VIDEO_H264_SPS_PARSER_H_
 
-#include "api/optional.h"
+#include "absl/types/optional.h"
 
 namespace rtc {
 class BitBuffer;
@@ -41,12 +41,12 @@
   };
 
   // Unpack RBSP and parse SPS state from the supplied buffer.
-  static rtc::Optional<SpsState> ParseSps(const uint8_t* data, size_t length);
+  static absl::optional<SpsState> ParseSps(const uint8_t* data, size_t length);
 
  protected:
   // Parse the SPS state, up till the VUI part, for a bit buffer where RBSP
   // decoding has already been performed.
-  static rtc::Optional<SpsState> ParseSpsUpToVui(rtc::BitBuffer* buffer);
+  static absl::optional<SpsState> ParseSpsUpToVui(rtc::BitBuffer* buffer);
 };
 
 }  // namespace webrtc
diff --git a/common_video/h264/sps_parser_unittest.cc b/common_video/h264/sps_parser_unittest.cc
index 6856c1b..50227ed 100644
--- a/common_video/h264/sps_parser_unittest.cc
+++ b/common_video/h264/sps_parser_unittest.cc
@@ -112,7 +112,7 @@
   H264SpsParserTest() {}
   virtual ~H264SpsParserTest() {}
 
-  rtc::Optional<SpsParser::SpsState> sps_;
+  absl::optional<SpsParser::SpsState> sps_;
 };
 
 TEST_F(H264SpsParserTest, TestSampleSPSHdLandscape) {
diff --git a/common_video/h264/sps_vui_rewriter.cc b/common_video/h264/sps_vui_rewriter.cc
index c346865..749b62e 100644
--- a/common_video/h264/sps_vui_rewriter.cc
+++ b/common_video/h264/sps_vui_rewriter.cc
@@ -72,13 +72,13 @@
 SpsVuiRewriter::ParseResult SpsVuiRewriter::ParseAndRewriteSps(
     const uint8_t* buffer,
     size_t length,
-    rtc::Optional<SpsParser::SpsState>* sps,
+    absl::optional<SpsParser::SpsState>* sps,
     rtc::Buffer* destination) {
   // Create temporary RBSP decoded buffer of the payload (exlcuding the
   // leading nalu type header byte (the SpsParser uses only the payload).
   std::vector<uint8_t> rbsp_buffer = H264::ParseRbsp(buffer, length);
   rtc::BitBuffer source_buffer(rbsp_buffer.data(), rbsp_buffer.size());
-  rtc::Optional<SpsParser::SpsState> sps_state =
+  absl::optional<SpsParser::SpsState> sps_state =
       SpsParser::ParseSpsUpToVui(&source_buffer);
   if (!sps_state)
     return ParseResult::kFailure;
diff --git a/common_video/h264/sps_vui_rewriter.h b/common_video/h264/sps_vui_rewriter.h
index f639c0d..233051d 100644
--- a/common_video/h264/sps_vui_rewriter.h
+++ b/common_video/h264/sps_vui_rewriter.h
@@ -12,7 +12,7 @@
 #ifndef COMMON_VIDEO_H264_SPS_VUI_REWRITER_H_
 #define COMMON_VIDEO_H264_SPS_VUI_REWRITER_H_
 
-#include "api/optional.h"
+#include "absl/types/optional.h"
 #include "common_video/h264/sps_parser.h"
 #include "rtc_base/buffer.h"
 
@@ -43,10 +43,11 @@
   // SPS state. This function assumes that any previous headers
   // (NALU start, type, Stap-A, etc) have already been parsed and that RBSP
   // decoding has been performed.
-  static ParseResult ParseAndRewriteSps(const uint8_t* buffer,
-                                        size_t length,
-                                        rtc::Optional<SpsParser::SpsState>* sps,
-                                        rtc::Buffer* destination);
+  static ParseResult ParseAndRewriteSps(
+      const uint8_t* buffer,
+      size_t length,
+      absl::optional<SpsParser::SpsState>* sps,
+      rtc::Buffer* destination);
 };
 
 }  // namespace webrtc
diff --git a/common_video/h264/sps_vui_rewriter_unittest.cc b/common_video/h264/sps_vui_rewriter_unittest.cc
index 0de5d33..9464de8 100644
--- a/common_video/h264/sps_vui_rewriter_unittest.cc
+++ b/common_video/h264/sps_vui_rewriter_unittest.cc
@@ -159,7 +159,7 @@
   index.payload_start_offset += H264::kNaluTypeSize;
   index.payload_size -= H264::kNaluTypeSize;
 
-  rtc::Optional<SpsParser::SpsState> sps;
+  absl::optional<SpsParser::SpsState> sps;
   rtc::Buffer out_buffer;
   SpsVuiRewriter::ParseResult result =
       SpsVuiRewriter::ParseAndRewriteSps(&buffer[index.payload_start_offset],
diff --git a/common_video/include/bitrate_adjuster.h b/common_video/include/bitrate_adjuster.h
index 76ce9e9..ee312e4 100644
--- a/common_video/include/bitrate_adjuster.h
+++ b/common_video/include/bitrate_adjuster.h
@@ -44,7 +44,7 @@
   uint32_t GetAdjustedBitrateBps() const;
 
   // Returns what we think the current bitrate is.
-  rtc::Optional<uint32_t> GetEstimatedBitrateBps();
+  absl::optional<uint32_t> GetEstimatedBitrateBps();
 
   // This should be called after each frame is encoded. The timestamp at which
   // it is called is used to estimate the output bitrate of the encoder.
diff --git a/common_video/incoming_video_stream.cc b/common_video/incoming_video_stream.cc
index b5a2c31..efca514 100644
--- a/common_video/incoming_video_stream.cc
+++ b/common_video/incoming_video_stream.cc
@@ -69,7 +69,7 @@
 void IncomingVideoStream::Dequeue() {
   TRACE_EVENT0("webrtc", "IncomingVideoStream::Dequeue");
   RTC_DCHECK(incoming_render_queue_.IsCurrent());
-  rtc::Optional<VideoFrame> frame_to_render = render_buffers_.FrameToRender();
+  absl::optional<VideoFrame> frame_to_render = render_buffers_.FrameToRender();
   if (frame_to_render)
     callback_->OnFrame(*frame_to_render);
 
diff --git a/common_video/video_render_frames.cc b/common_video/video_render_frames.cc
index 4fa9ef7..982923c 100644
--- a/common_video/video_render_frames.cc
+++ b/common_video/video_render_frames.cc
@@ -73,8 +73,8 @@
   return static_cast<int32_t>(incoming_frames_.size());
 }
 
-rtc::Optional<VideoFrame> VideoRenderFrames::FrameToRender() {
-  rtc::Optional<VideoFrame> render_frame;
+absl::optional<VideoFrame> VideoRenderFrames::FrameToRender() {
+  absl::optional<VideoFrame> render_frame;
   // Get the newest frame that can be released for rendering.
   while (!incoming_frames_.empty() && TimeToNextFrameRelease() <= 0) {
     render_frame = std::move(incoming_frames_.front());
diff --git a/common_video/video_render_frames.h b/common_video/video_render_frames.h
index af254f2..31a4634 100644
--- a/common_video/video_render_frames.h
+++ b/common_video/video_render_frames.h
@@ -15,7 +15,7 @@
 
 #include <list>
 
-#include "api/optional.h"
+#include "absl/types/optional.h"
 #include "api/video/video_frame.h"
 
 namespace webrtc {
@@ -30,7 +30,7 @@
   int32_t AddFrame(VideoFrame&& new_frame);
 
   // Get a frame for rendering, or false if it's not time to render.
-  rtc::Optional<VideoFrame> FrameToRender();
+  absl::optional<VideoFrame> FrameToRender();
 
   // Returns the number of ms to next frame to render
   uint32_t TimeToNextFrameRelease();
diff --git a/examples/peerconnection/client/conductor.cc b/examples/peerconnection/client/conductor.cc
index b7f06aa..89c8984 100644
--- a/examples/peerconnection/client/conductor.cc
+++ b/examples/peerconnection/client/conductor.cc
@@ -270,7 +270,7 @@
       }
       return;
     }
-    rtc::Optional<webrtc::SdpType> type_maybe =
+    absl::optional<webrtc::SdpType> type_maybe =
         webrtc::SdpTypeFromString(type_str);
     if (!type_maybe) {
       RTC_LOG(LS_ERROR) << "Unknown SDP type: " << type_str;
diff --git a/logging/rtc_event_log/rtc_event_log_parser.h b/logging/rtc_event_log/rtc_event_log_parser.h
index af4b2de..862a48d 100644
--- a/logging/rtc_event_log/rtc_event_log_parser.h
+++ b/logging/rtc_event_log/rtc_event_log_parser.h
@@ -57,8 +57,8 @@
   struct BweProbeResultEvent {
     uint64_t timestamp;
     uint32_t id;
-    rtc::Optional<uint64_t> bitrate_bps;
-    rtc::Optional<ProbeFailureReason> failure_reason;
+    absl::optional<uint64_t> bitrate_bps;
+    absl::optional<ProbeFailureReason> failure_reason;
   };
 
   struct BweDelayBasedUpdate {
diff --git a/logging/rtc_event_log/rtc_event_log_unittest.cc b/logging/rtc_event_log/rtc_event_log_unittest.cc
index 21f9cf3..f40722d 100644
--- a/logging/rtc_event_log/rtc_event_log_unittest.cc
+++ b/logging/rtc_event_log/rtc_event_log_unittest.cc
@@ -748,8 +748,8 @@
   EXPECT_GT(parsed_log.GetNumberOfEvents(), 2u);
 
   RtcEventLogTestHelper::VerifyLogStartEvent(parsed_log, 0);
-  rtc::Optional<int64_t> last_timestamp;
-  rtc::Optional<uint32_t> last_ssrc;
+  absl::optional<int64_t> last_timestamp;
+  absl::optional<uint32_t> last_ssrc;
   for (size_t i = 1; i < parsed_log.GetNumberOfEvents() - 1; i++) {
     EXPECT_EQ(parsed_log.GetEventType(i),
               ParsedRtcEventLogNew::EventType::AUDIO_PLAYOUT_EVENT);
diff --git a/modules/BUILD.gn b/modules/BUILD.gn
index 7a50406..12345b8 100644
--- a/modules/BUILD.gn
+++ b/modules/BUILD.gn
@@ -37,7 +37,7 @@
   deps = [
     "..:webrtc_common",
     "../:typedefs",
-    "../api:optional",
+    "//third_party/abseil-cpp/absl/types:optional",
   ]
 }
 
@@ -53,13 +53,13 @@
     "..:webrtc_common",
     "../:typedefs",
     "../api:libjingle_peerconnection_api",
-    "../api:optional",
     "../api/transport:network_control",
     "../api/video:video_frame",
     "../api/video:video_frame_i420",
     "../rtc_base:deprecation",
     "../rtc_base:rtc_base_approved",
     "video_coding:codec_globals_headers",
+    "//third_party/abseil-cpp/absl/types:optional",
   ]
 }
 
diff --git a/modules/audio_device/BUILD.gn b/modules/audio_device/BUILD.gn
index 4b58779..96f53b8 100644
--- a/modules/audio_device/BUILD.gn
+++ b/modules/audio_device/BUILD.gn
@@ -204,9 +204,9 @@
       ":audio_device_api",
       ":audio_device_buffer",
       ":windows_core_audio_utility",
-      "../../api:optional",
       "../../rtc_base:checks",
       "../../rtc_base:rtc_base_approved",
+      "//third_party/abseil-cpp/absl/types:optional",
     ]
   }
 }
@@ -483,7 +483,6 @@
       ":audio_device_impl",
       ":mock_audio_device",
       "../../api:array_view",
-      "../../api:optional",
       "../../common_audio",
       "../../rtc_base:checks",
       "../../rtc_base:rtc_base_approved",
@@ -491,6 +490,7 @@
       "../../test:fileutils",
       "../../test:test_support",
       "../utility:utility",
+      "//third_party/abseil-cpp/absl/types:optional",
     ]
     if (is_linux || is_mac || is_win) {
       sources += [ "audio_device_unittest.cc" ]
diff --git a/modules/audio_device/audio_device_unittest.cc b/modules/audio_device/audio_device_unittest.cc
index 40b6987..d9ebc8f 100644
--- a/modules/audio_device/audio_device_unittest.cc
+++ b/modules/audio_device/audio_device_unittest.cc
@@ -13,8 +13,8 @@
 #include <memory>
 #include <numeric>
 
+#include "absl/types/optional.h"
 #include "api/array_view.h"
-#include "api/optional.h"
 #include "modules/audio_device/audio_device_impl.h"
 #include "modules/audio_device/include/audio_device.h"
 #include "modules/audio_device/include/mock_audio_transport.h"
@@ -294,7 +294,7 @@
   rtc::ThreadChecker read_thread_checker_;
   rtc::ThreadChecker write_thread_checker_;
 
-  rtc::Optional<int64_t> pulse_time_ RTC_GUARDED_BY(lock_);
+  absl::optional<int64_t> pulse_time_ RTC_GUARDED_BY(lock_);
   std::vector<int> latencies_ RTC_GUARDED_BY(race_checker_);
   size_t read_count_ RTC_GUARDED_BY(read_thread_checker_) = 0;
   size_t write_count_ RTC_GUARDED_BY(write_thread_checker_) = 0;
diff --git a/modules/audio_device/win/core_audio_input_win.cc b/modules/audio_device/win/core_audio_input_win.cc
index 1322f1c..fe11e02 100644
--- a/modules/audio_device/win/core_audio_input_win.cc
+++ b/modules/audio_device/win/core_audio_input_win.cc
@@ -315,10 +315,10 @@
   return true;
 }
 
-rtc::Optional<int> CoreAudioInput::EstimateLatencyMillis(
+absl::optional<int> CoreAudioInput::EstimateLatencyMillis(
     uint64_t capture_time_100ns) {
   if (!qpc_to_100ns_) {
-    return rtc::nullopt;
+    return absl::nullopt;
   }
   // Input parameter |capture_time_100ns| contains the performance counter at
   // the time that the audio endpoint device recorded the device position of
@@ -329,7 +329,7 @@
   // - subtracting |capture_time_100ns| from now_time_100ns.
   LARGE_INTEGER perf_counter_now = {};
   if (!::QueryPerformanceCounter(&perf_counter_now)) {
-    return rtc::nullopt;
+    return absl::nullopt;
   }
   uint64_t qpc_now_raw = perf_counter_now.QuadPart;
   uint64_t now_time_100ns = qpc_now_raw * (*qpc_to_100ns_);
diff --git a/modules/audio_device/win/core_audio_input_win.h b/modules/audio_device/win/core_audio_input_win.h
index 5cfbf2a..0dd2e37 100644
--- a/modules/audio_device/win/core_audio_input_win.h
+++ b/modules/audio_device/win/core_audio_input_win.h
@@ -14,7 +14,7 @@
 #include <memory>
 #include <string>
 
-#include "api/optional.h"
+#include "absl/types/optional.h"
 #include "modules/audio_device/win/audio_device_module_win.h"
 #include "modules/audio_device/win/core_audio_base_win.h"
 
@@ -53,11 +53,11 @@
 
  private:
   bool OnDataCallback(uint64_t device_frequency);
-  rtc::Optional<int> EstimateLatencyMillis(uint64_t capture_time_100ns);
+  absl::optional<int> EstimateLatencyMillis(uint64_t capture_time_100ns);
 
   std::unique_ptr<FineAudioBuffer> fine_audio_buffer_;
   Microsoft::WRL::ComPtr<IAudioCaptureClient> audio_capture_client_;
-  rtc::Optional<double> qpc_to_100ns_;
+  absl::optional<double> qpc_to_100ns_;
 };
 
 }  // namespace webrtc_win
diff --git a/modules/include/module_common_types.h b/modules/include/module_common_types.h
index a1889d4..b5b023d 100644
--- a/modules/include/module_common_types.h
+++ b/modules/include/module_common_types.h
@@ -17,7 +17,7 @@
 #include <algorithm>
 #include <limits>
 
-#include "api/optional.h"
+#include "absl/types/optional.h"
 #include "api/rtp_headers.h"
 #include "api/transport/network_types.h"
 #include "api/video/video_rotation.h"
diff --git a/modules/include/module_common_types_public.h b/modules/include/module_common_types_public.h
index f1ae3de..2afd9af 100644
--- a/modules/include/module_common_types_public.h
+++ b/modules/include/module_common_types_public.h
@@ -13,7 +13,7 @@
 
 #include <limits>
 
-#include "api/optional.h"
+#include "absl/types/optional.h"
 #include "typedefs.h"  // NOLINT(build/include)
 
 namespace webrtc {
@@ -78,7 +78,7 @@
   }
 
  private:
-  rtc::Optional<int64_t> last_value_;
+  absl::optional<int64_t> last_value_;
 };
 
 using SequenceNumberUnwrapper = Unwrapper<uint16_t>;
diff --git a/sdk/BUILD.gn b/sdk/BUILD.gn
index 1e3bd62..37764b4 100644
--- a/sdk/BUILD.gn
+++ b/sdk/BUILD.gn
@@ -304,12 +304,12 @@
         ":videoframebuffer_objc",
         ":videosource_objc",
         "../api:libjingle_peerconnection_api",
-        "../api:optional",
         "../api/video:video_frame",
         "../common_video",
         "../media:rtc_media_base",
         "../rtc_base:checks",
         "../rtc_base:rtc_base",
+        "//third_party/abseil-cpp/absl/types:optional",
       ]
 
       configs += [
diff --git a/sdk/android/BUILD.gn b/sdk/android/BUILD.gn
index 031a20f..498c7dc 100644
--- a/sdk/android/BUILD.gn
+++ b/sdk/android/BUILD.gn
@@ -87,11 +87,11 @@
     ":internal_jni",
     ":native_api_jni",
     "../../api:libjingle_peerconnection_api",
-    "../../api:optional",
     "../../rtc_base:checks",
     "../../rtc_base:rtc_base",
     "../../rtc_base:rtc_base_approved",
     "../../system_wrappers:metrics_api",
+    "//third_party/abseil-cpp/absl/types:optional",
   ]
 }
 
@@ -145,11 +145,11 @@
     ":base_jni",
     ":generated_audio_device_module_base_jni",
     ":native_api_jni",
-    "../../api:optional",
     "../../modules/audio_device:audio_device_buffer",
     "../../rtc_base:checks",
     "../../rtc_base:rtc_base_approved",
     "../../system_wrappers:metrics_api",
+    "//third_party/abseil-cpp/absl/types:optional",
   ]
 }
 
@@ -170,13 +170,13 @@
       ":audio_device_module_base",
       ":base_jni",
       "../../api:array_view",
-      "../../api:optional",
       "../../modules/audio_device:audio_device",
       "../../modules/audio_device:audio_device_buffer",
       "../../rtc_base:checks",
       "../../rtc_base:rtc_base",
       "../../rtc_base:rtc_base_approved",
       "../../system_wrappers",
+      "//third_party/abseil-cpp/absl/types:optional",
     ]
   }
 }
@@ -196,11 +196,11 @@
     ":audio_device_module_base",
     ":base_jni",
     "../../api:array_view",
-    "../../api:optional",
     "../../modules/audio_device:audio_device",
     "../../modules/audio_device:audio_device_buffer",
     "../../rtc_base:checks",
     "../../rtc_base:rtc_base_approved",
+    "//third_party/abseil-cpp/absl/types:optional",
   ]
 }
 
@@ -217,12 +217,12 @@
     ":audio_device_module_base",
     ":base_jni",
     ":generated_java_audio_device_module_native_jni",
-    "../../api:optional",
     "../../modules/audio_device:audio_device",
     "../../modules/audio_device:audio_device_buffer",
     "../../rtc_base:checks",
     "../../rtc_base:rtc_base_approved",
     "../../system_wrappers:metrics_api",
+    "//third_party/abseil-cpp/absl/types:optional",
   ]
 }
 
@@ -1148,9 +1148,9 @@
     ":generated_external_classes_jni",
     ":generated_native_api_jni",
     ":internal_jni",
-    "//api:optional",
     "//rtc_base:checks",
     "//rtc_base:rtc_base_approved",
+    "//third_party/abseil-cpp/absl/types:optional",
   ]
 }
 
diff --git a/sdk/android/native_api/jni/java_types.cc b/sdk/android/native_api/jni/java_types.cc
index fbc35aa..df0a229 100644
--- a/sdk/android/native_api/jni/java_types.cc
+++ b/sdk/android/native_api/jni/java_types.cc
@@ -125,18 +125,18 @@
   return JNI_Long::Java_Long_longValue(env, j_long);
 }
 
-rtc::Optional<bool> JavaToNativeOptionalBool(JNIEnv* jni,
-                                             const JavaRef<jobject>& boolean) {
+absl::optional<bool> JavaToNativeOptionalBool(JNIEnv* jni,
+                                              const JavaRef<jobject>& boolean) {
   if (IsNull(jni, boolean))
-    return rtc::nullopt;
+    return absl::nullopt;
   return JNI_Boolean::Java_Boolean_booleanValue(jni, boolean);
 }
 
-rtc::Optional<int32_t> JavaToNativeOptionalInt(
+absl::optional<int32_t> JavaToNativeOptionalInt(
     JNIEnv* jni,
     const JavaRef<jobject>& integer) {
   if (IsNull(jni, integer))
-    return rtc::nullopt;
+    return absl::nullopt;
   return JNI_Integer::Java_Integer_intValue(jni, integer);
 }
 
@@ -196,13 +196,13 @@
 
 ScopedJavaLocalRef<jobject> NativeToJavaInteger(
     JNIEnv* jni,
-    const rtc::Optional<int32_t>& optional_int) {
+    const absl::optional<int32_t>& optional_int) {
   return optional_int ? NativeToJavaInteger(jni, *optional_int) : nullptr;
 }
 
 ScopedJavaLocalRef<jstring> NativeToJavaString(
     JNIEnv* jni,
-    const rtc::Optional<std::string>& str) {
+    const absl::optional<std::string>& str) {
   return str ? NativeToJavaString(jni, *str) : nullptr;
 }
 
diff --git a/sdk/android/native_api/jni/java_types.h b/sdk/android/native_api/jni/java_types.h
index a84b7d9..3b85704 100644
--- a/sdk/android/native_api/jni/java_types.h
+++ b/sdk/android/native_api/jni/java_types.h
@@ -22,7 +22,7 @@
 #include <string>
 #include <vector>
 
-#include "api/optional.h"
+#include "absl/types/optional.h"
 #include "rtc_base/checks.h"
 #include "rtc_base/thread_checker.h"
 #include "sdk/android/native_api/jni/scoped_java_ref.h"
@@ -126,10 +126,11 @@
 
 int64_t JavaToNativeLong(JNIEnv* env, const JavaRef<jobject>& j_long);
 
-rtc::Optional<bool> JavaToNativeOptionalBool(JNIEnv* jni,
-                                             const JavaRef<jobject>& boolean);
-rtc::Optional<int32_t> JavaToNativeOptionalInt(JNIEnv* jni,
-                                               const JavaRef<jobject>& integer);
+absl::optional<bool> JavaToNativeOptionalBool(JNIEnv* jni,
+                                              const JavaRef<jobject>& boolean);
+absl::optional<int32_t> JavaToNativeOptionalInt(
+    JNIEnv* jni,
+    const JavaRef<jobject>& integer);
 
 // Given a (UTF-16) jstring return a new UTF-8 native string.
 std::string JavaToNativeString(JNIEnv* jni, const JavaRef<jstring>& j_string);
@@ -196,10 +197,10 @@
 
 ScopedJavaLocalRef<jobject> NativeToJavaInteger(
     JNIEnv* jni,
-    const rtc::Optional<int32_t>& optional_int);
+    const absl::optional<int32_t>& optional_int);
 ScopedJavaLocalRef<jstring> NativeToJavaString(
     JNIEnv* jni,
-    const rtc::Optional<std::string>& str);
+    const absl::optional<std::string>& str);
 
 // Helper function for converting std::vector<T> into a Java array.
 template <typename T, typename Convert>
diff --git a/sdk/android/native_api/video/videosource.cc b/sdk/android/native_api/video/videosource.cc
index 9470feb..4c302da 100644
--- a/sdk/android/native_api/video/videosource.cc
+++ b/sdk/android/native_api/video/videosource.cc
@@ -74,7 +74,7 @@
     return android_video_track_source_->is_screencast();
   }
 
-  rtc::Optional<bool> needs_denoising() const override {
+  absl::optional<bool> needs_denoising() const override {
     return android_video_track_source_->needs_denoising();
   }
 
diff --git a/sdk/android/src/jni/androidmediadecoder.cc b/sdk/android/src/jni/androidmediadecoder.cc
index a5f47da..1ca9e06 100644
--- a/sdk/android/src/jni/androidmediadecoder.cc
+++ b/sdk/android/src/jni/androidmediadecoder.cc
@@ -124,7 +124,7 @@
   int current_delay_time_ms_;     // Overall delay time in the current second.
   int32_t max_pending_frames_;    // Maximum number of pending input frames.
   H264BitstreamParser h264_bitstream_parser_;
-  std::deque<rtc::Optional<uint8_t>> pending_frame_qps_;
+  std::deque<absl::optional<uint8_t>> pending_frame_qps_;
 
   // State that is constant for the lifetime of this object once the ctor
   // returns.
@@ -506,7 +506,7 @@
   // Save input image timestamps for later output.
   frames_received_++;
   current_bytes_ += inputImage._length;
-  rtc::Optional<uint8_t> qp;
+  absl::optional<uint8_t> qp;
   if (codecType_ == kVideoCodecVP8) {
     int qp_int;
     if (vp8::GetQp(inputImage._buffer, inputImage._length, &qp_int)) {
@@ -743,7 +743,7 @@
     decoded_frame.set_timestamp(output_timestamps_ms);
     decoded_frame.set_ntp_time_ms(output_ntp_timestamps_ms);
 
-    rtc::Optional<uint8_t> qp = pending_frame_qps_.front();
+    absl::optional<uint8_t> qp = pending_frame_qps_.front();
     pending_frame_qps_.pop_front();
     callback_->Decoded(decoded_frame, decode_time_ms, qp);
   }
diff --git a/sdk/android/src/jni/androidmediaencoder.cc b/sdk/android/src/jni/androidmediaencoder.cc
index de2743b..9edbf2d 100644
--- a/sdk/android/src/jni/androidmediaencoder.cc
+++ b/sdk/android/src/jni/androidmediaencoder.cc
@@ -347,7 +347,7 @@
   // Check allowed H.264 profile
   profile_ = H264::Profile::kProfileBaseline;
   if (codec_type == kVideoCodecH264) {
-    const rtc::Optional<H264::ProfileLevelId> profile_level_id =
+    const absl::optional<H264::ProfileLevelId> profile_level_id =
         H264::ParseSdpProfileLevelId(codec_.params);
     RTC_DCHECK(profile_level_id);
     profile_ = profile_level_id->profile;
diff --git a/sdk/android/src/jni/androidvideotracksource.cc b/sdk/android/src/jni/androidvideotracksource.cc
index 1d75a4f..41d4278 100644
--- a/sdk/android/src/jni/androidvideotracksource.cc
+++ b/sdk/android/src/jni/androidvideotracksource.cc
@@ -38,7 +38,7 @@
   return is_screencast_;
 }
 
-rtc::Optional<bool> AndroidVideoTrackSource::needs_denoising() const {
+absl::optional<bool> AndroidVideoTrackSource::needs_denoising() const {
   return false;
 }
 
diff --git a/sdk/android/src/jni/androidvideotracksource.h b/sdk/android/src/jni/androidvideotracksource.h
index 3dbcb2a..3c4d1ef 100644
--- a/sdk/android/src/jni/androidvideotracksource.h
+++ b/sdk/android/src/jni/androidvideotracksource.h
@@ -37,7 +37,7 @@
   // Indicates that the encoder should denoise video before encoding it.
   // If it is not set, the default configuration is used which is different
   // depending on video codec.
-  rtc::Optional<bool> needs_denoising() const override;
+  absl::optional<bool> needs_denoising() const override;
 
   // Called by the native capture observer
   void SetState(SourceState state);
diff --git a/sdk/android/src/jni/audio_device/aaudio_player.cc b/sdk/android/src/jni/audio_device/aaudio_player.cc
index e6bcddd..f32c265 100644
--- a/sdk/android/src/jni/audio_device/aaudio_player.cc
+++ b/sdk/android/src/jni/audio_device/aaudio_player.cc
@@ -135,16 +135,16 @@
   return -1;
 }
 
-rtc::Optional<uint32_t> AAudioPlayer::SpeakerVolume() const {
-  return rtc::nullopt;
+absl::optional<uint32_t> AAudioPlayer::SpeakerVolume() const {
+  return absl::nullopt;
 }
 
-rtc::Optional<uint32_t> AAudioPlayer::MaxSpeakerVolume() const {
-  return rtc::nullopt;
+absl::optional<uint32_t> AAudioPlayer::MaxSpeakerVolume() const {
+  return absl::nullopt;
 }
 
-rtc::Optional<uint32_t> AAudioPlayer::MinSpeakerVolume() const {
-  return rtc::nullopt;
+absl::optional<uint32_t> AAudioPlayer::MinSpeakerVolume() const {
+  return absl::nullopt;
 }
 
 void AAudioPlayer::OnErrorCallback(aaudio_result_t error) {
diff --git a/sdk/android/src/jni/audio_device/aaudio_player.h b/sdk/android/src/jni/audio_device/aaudio_player.h
index 92d1800..b43b5b3 100644
--- a/sdk/android/src/jni/audio_device/aaudio_player.h
+++ b/sdk/android/src/jni/audio_device/aaudio_player.h
@@ -14,7 +14,7 @@
 #include <aaudio/AAudio.h>
 #include <memory>
 
-#include "api/optional.h"
+#include "absl/types/optional.h"
 #include "modules/audio_device/audio_device_buffer.h"
 #include "modules/audio_device/include/audio_device_defines.h"
 #include "rtc_base/messagehandler.h"
@@ -73,9 +73,9 @@
   // Not implemented in AAudio.
   bool SpeakerVolumeIsAvailable() override;
   int SetSpeakerVolume(uint32_t volume) override;
-  rtc::Optional<uint32_t> SpeakerVolume() const override;
-  rtc::Optional<uint32_t> MaxSpeakerVolume() const override;
-  rtc::Optional<uint32_t> MinSpeakerVolume() const override;
+  absl::optional<uint32_t> SpeakerVolume() const override;
+  absl::optional<uint32_t> MaxSpeakerVolume() const override;
+  absl::optional<uint32_t> MinSpeakerVolume() const override;
 
  protected:
   // AAudioObserverInterface implementation.
diff --git a/sdk/android/src/jni/audio_device/audio_device_module.cc b/sdk/android/src/jni/audio_device/audio_device_module.cc
index 0b18fe4..196c655 100644
--- a/sdk/android/src/jni/audio_device/audio_device_module.cc
+++ b/sdk/android/src/jni/audio_device/audio_device_module.cc
@@ -341,7 +341,7 @@
     RTC_LOG(INFO) << __FUNCTION__;
     if (!initialized_)
       return -1;
-    rtc::Optional<uint32_t> volume = output_->SpeakerVolume();
+    absl::optional<uint32_t> volume = output_->SpeakerVolume();
     if (!volume)
       return -1;
     *output_volume = *volume;
@@ -353,7 +353,7 @@
     RTC_LOG(INFO) << __FUNCTION__;
     if (!initialized_)
       return -1;
-    rtc::Optional<uint32_t> max_volume = output_->MaxSpeakerVolume();
+    absl::optional<uint32_t> max_volume = output_->MaxSpeakerVolume();
     if (!max_volume)
       return -1;
     *output_max_volume = *max_volume;
@@ -364,7 +364,7 @@
     RTC_LOG(INFO) << __FUNCTION__;
     if (!initialized_)
       return -1;
-    rtc::Optional<uint32_t> min_volume = output_->MinSpeakerVolume();
+    absl::optional<uint32_t> min_volume = output_->MinSpeakerVolume();
     if (!min_volume)
       return -1;
     *output_min_volume = *min_volume;
diff --git a/sdk/android/src/jni/audio_device/audio_device_module.h b/sdk/android/src/jni/audio_device/audio_device_module.h
index c8fdfc3..cddd3e0 100644
--- a/sdk/android/src/jni/audio_device/audio_device_module.h
+++ b/sdk/android/src/jni/audio_device/audio_device_module.h
@@ -13,7 +13,7 @@
 
 #include <memory>
 
-#include "api/optional.h"
+#include "absl/types/optional.h"
 #include "modules/audio_device/audio_device_buffer.h"
 #include "sdk/android/native_api/jni/scoped_java_ref.h"
 
@@ -58,9 +58,9 @@
   virtual bool Playing() const = 0;
   virtual bool SpeakerVolumeIsAvailable() = 0;
   virtual int SetSpeakerVolume(uint32_t volume) = 0;
-  virtual rtc::Optional<uint32_t> SpeakerVolume() const = 0;
-  virtual rtc::Optional<uint32_t> MaxSpeakerVolume() const = 0;
-  virtual rtc::Optional<uint32_t> MinSpeakerVolume() const = 0;
+  virtual absl::optional<uint32_t> SpeakerVolume() const = 0;
+  virtual absl::optional<uint32_t> MaxSpeakerVolume() const = 0;
+  virtual absl::optional<uint32_t> MinSpeakerVolume() const = 0;
   virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) = 0;
 };
 
diff --git a/sdk/android/src/jni/audio_device/audio_track_jni.cc b/sdk/android/src/jni/audio_device/audio_track_jni.cc
index 1b49de6..03959d6 100644
--- a/sdk/android/src/jni/audio_device/audio_track_jni.cc
+++ b/sdk/android/src/jni/audio_device/audio_track_jni.cc
@@ -144,17 +144,17 @@
              : -1;
 }
 
-rtc::Optional<uint32_t> AudioTrackJni::MaxSpeakerVolume() const {
+absl::optional<uint32_t> AudioTrackJni::MaxSpeakerVolume() const {
   RTC_DCHECK(thread_checker_.CalledOnValidThread());
   return Java_WebRtcAudioTrack_getStreamMaxVolume(env_, j_audio_track_);
 }
 
-rtc::Optional<uint32_t> AudioTrackJni::MinSpeakerVolume() const {
+absl::optional<uint32_t> AudioTrackJni::MinSpeakerVolume() const {
   RTC_DCHECK(thread_checker_.CalledOnValidThread());
   return 0;
 }
 
-rtc::Optional<uint32_t> AudioTrackJni::SpeakerVolume() const {
+absl::optional<uint32_t> AudioTrackJni::SpeakerVolume() const {
   RTC_DCHECK(thread_checker_.CalledOnValidThread());
   const uint32_t volume =
       Java_WebRtcAudioTrack_getStreamVolume(env_, j_audio_track_);
diff --git a/sdk/android/src/jni/audio_device/audio_track_jni.h b/sdk/android/src/jni/audio_device/audio_track_jni.h
index 1225caf..25c6b6f 100644
--- a/sdk/android/src/jni/audio_device/audio_track_jni.h
+++ b/sdk/android/src/jni/audio_device/audio_track_jni.h
@@ -14,7 +14,7 @@
 #include <jni.h>
 #include <memory>
 
-#include "api/optional.h"
+#include "absl/types/optional.h"
 #include "modules/audio_device/audio_device_buffer.h"
 #include "modules/audio_device/include/audio_device_defines.h"
 #include "rtc_base/thread_checker.h"
@@ -62,9 +62,9 @@
 
   bool SpeakerVolumeIsAvailable() override;
   int SetSpeakerVolume(uint32_t volume) override;
-  rtc::Optional<uint32_t> SpeakerVolume() const override;
-  rtc::Optional<uint32_t> MaxSpeakerVolume() const override;
-  rtc::Optional<uint32_t> MinSpeakerVolume() const override;
+  absl::optional<uint32_t> SpeakerVolume() const override;
+  absl::optional<uint32_t> MaxSpeakerVolume() const override;
+  absl::optional<uint32_t> MinSpeakerVolume() const override;
 
   void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override;
 
diff --git a/sdk/android/src/jni/audio_device/opensles_player.cc b/sdk/android/src/jni/audio_device/opensles_player.cc
index f5f56bf..3e831a9 100644
--- a/sdk/android/src/jni/audio_device/opensles_player.cc
+++ b/sdk/android/src/jni/audio_device/opensles_player.cc
@@ -182,16 +182,16 @@
   return -1;
 }
 
-rtc::Optional<uint32_t> OpenSLESPlayer::SpeakerVolume() const {
-  return rtc::nullopt;
+absl::optional<uint32_t> OpenSLESPlayer::SpeakerVolume() const {
+  return absl::nullopt;
 }
 
-rtc::Optional<uint32_t> OpenSLESPlayer::MaxSpeakerVolume() const {
-  return rtc::nullopt;
+absl::optional<uint32_t> OpenSLESPlayer::MaxSpeakerVolume() const {
+  return absl::nullopt;
 }
 
-rtc::Optional<uint32_t> OpenSLESPlayer::MinSpeakerVolume() const {
-  return rtc::nullopt;
+absl::optional<uint32_t> OpenSLESPlayer::MinSpeakerVolume() const {
+  return absl::nullopt;
 }
 
 void OpenSLESPlayer::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
diff --git a/sdk/android/src/jni/audio_device/opensles_player.h b/sdk/android/src/jni/audio_device/opensles_player.h
index d5f4a68..d8befe5 100644
--- a/sdk/android/src/jni/audio_device/opensles_player.h
+++ b/sdk/android/src/jni/audio_device/opensles_player.h
@@ -16,7 +16,7 @@
 #include <SLES/OpenSLES_AndroidConfiguration.h>
 
 #include <memory>
-#include "api/optional.h"
+#include "absl/types/optional.h"
 #include "modules/audio_device/audio_device_buffer.h"
 #include "modules/audio_device/fine_audio_buffer.h"
 #include "modules/audio_device/include/audio_device_defines.h"
@@ -75,9 +75,9 @@
 
   bool SpeakerVolumeIsAvailable() override;
   int SetSpeakerVolume(uint32_t volume) override;
-  rtc::Optional<uint32_t> SpeakerVolume() const override;
-  rtc::Optional<uint32_t> MaxSpeakerVolume() const override;
-  rtc::Optional<uint32_t> MinSpeakerVolume() const override;
+  absl::optional<uint32_t> SpeakerVolume() const override;
+  absl::optional<uint32_t> MaxSpeakerVolume() const override;
+  absl::optional<uint32_t> MinSpeakerVolume() const override;
 
   void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override;
 
diff --git a/sdk/android/src/jni/pc/icecandidate.cc b/sdk/android/src/jni/pc/icecandidate.cc
index 84e0f6e..b9d66a4 100644
--- a/sdk/android/src/jni/pc/icecandidate.cc
+++ b/sdk/android/src/jni/pc/icecandidate.cc
@@ -207,13 +207,13 @@
   return PeerConnectionInterface::kTlsCertPolicySecure;
 }
 
-rtc::Optional<rtc::AdapterType> JavaToNativeNetworkPreference(
+absl::optional<rtc::AdapterType> JavaToNativeNetworkPreference(
     JNIEnv* jni,
     const JavaRef<jobject>& j_network_preference) {
   std::string enum_name = GetJavaEnumName(jni, j_network_preference);
 
   if (enum_name == "UNKNOWN")
-    return rtc::nullopt;
+    return absl::nullopt;
 
   if (enum_name == "ETHERNET")
     return rtc::ADAPTER_TYPE_ETHERNET;
@@ -231,7 +231,7 @@
     return rtc::ADAPTER_TYPE_LOOPBACK;
 
   RTC_CHECK(false) << "Unexpected NetworkPreference enum_name " << enum_name;
-  return rtc::nullopt;
+  return absl::nullopt;
 }
 
 }  // namespace jni
diff --git a/sdk/android/src/jni/pc/icecandidate.h b/sdk/android/src/jni/pc/icecandidate.h
index be4d27c..662b649 100644
--- a/sdk/android/src/jni/pc/icecandidate.h
+++ b/sdk/android/src/jni/pc/icecandidate.h
@@ -75,7 +75,7 @@
     JNIEnv* jni,
     const JavaRef<jobject>& j_ice_server_tls_cert_policy);
 
-rtc::Optional<rtc::AdapterType> JavaToNativeNetworkPreference(
+absl::optional<rtc::AdapterType> JavaToNativeNetworkPreference(
     JNIEnv* jni,
     const JavaRef<jobject>& j_network_preference);
 
diff --git a/sdk/android/src/jni/pc/peerconnectionfactory.cc b/sdk/android/src/jni/pc/peerconnectionfactory.cc
index ab5bf55..5730c20 100644
--- a/sdk/android/src/jni/pc/peerconnectionfactory.cc
+++ b/sdk/android/src/jni/pc/peerconnectionfactory.cc
@@ -428,7 +428,7 @@
   if (key_type != rtc::KT_DEFAULT) {
     rtc::scoped_refptr<rtc::RTCCertificate> certificate =
         rtc::RTCCertificateGenerator::GenerateCertificate(
-            rtc::KeyParams(key_type), rtc::nullopt);
+            rtc::KeyParams(key_type), absl::nullopt);
     if (!certificate) {
       RTC_LOG(LS_ERROR) << "Failed to generate certificate. KeyType: "
                         << key_type;
diff --git a/sdk/android/src/jni/pc/rtptransceiver.cc b/sdk/android/src/jni/pc/rtptransceiver.cc
index 0a115c2..fa20d80 100644
--- a/sdk/android/src/jni/pc/rtptransceiver.cc
+++ b/sdk/android/src/jni/pc/rtptransceiver.cc
@@ -89,7 +89,7 @@
     JNIEnv* jni,
     const base::android::JavaParamRef<jclass>&,
     jlong j_rtp_transceiver_pointer) {
-  rtc::Optional<std::string> mid =
+  absl::optional<std::string> mid =
       reinterpret_cast<RtpTransceiverInterface*>(j_rtp_transceiver_pointer)
           ->mid();
   return NativeToJavaString(jni, mid);
@@ -133,7 +133,7 @@
     JNIEnv* jni,
     const base::android::JavaParamRef<jclass>&,
     jlong j_rtp_transceiver_pointer) {
-  rtc::Optional<RtpTransceiverDirection> direction =
+  absl::optional<RtpTransceiverDirection> direction =
       reinterpret_cast<RtpTransceiverInterface*>(j_rtp_transceiver_pointer)
           ->current_direction();
   return direction ? NativeToJavaRtpTransceiverDirection(jni, *direction)
diff --git a/sdk/android/src/jni/pc/sessiondescription.cc b/sdk/android/src/jni/pc/sessiondescription.cc
index 6bdbfa1..bd3806d 100644
--- a/sdk/android/src/jni/pc/sessiondescription.cc
+++ b/sdk/android/src/jni/pc/sessiondescription.cc
@@ -27,7 +27,7 @@
       jni, Java_SessionDescription_getTypeInCanonicalForm(jni, j_sdp));
   std::string std_description =
       JavaToStdString(jni, Java_SessionDescription_getDescription(jni, j_sdp));
-  rtc::Optional<SdpType> sdp_type_maybe = SdpTypeFromString(std_type);
+  absl::optional<SdpType> sdp_type_maybe = SdpTypeFromString(std_type);
   if (!sdp_type_maybe) {
     RTC_LOG(LS_ERROR) << "Unexpected SDP type: " << std_type;
     return nullptr;
diff --git a/sdk/android/src/jni/videodecoderwrapper.cc b/sdk/android/src/jni/videodecoderwrapper.cc
index a7aee04..5fbd72f 100644
--- a/sdk/android/src/jni/videodecoderwrapper.cc
+++ b/sdk/android/src/jni/videodecoderwrapper.cc
@@ -30,9 +30,9 @@
 const int64_t kNumRtpTicksPerMillisec = 90000 / rtc::kNumMillisecsPerSec;
 
 template <typename Dst, typename Src>
-inline rtc::Optional<Dst> cast_optional(const rtc::Optional<Src>& value) {
-  return value ? rtc::Optional<Dst>(rtc::dchecked_cast<Dst, Src>(*value))
-               : rtc::nullopt;
+inline absl::optional<Dst> cast_optional(const absl::optional<Src>& value) {
+  return value ? absl::optional<Dst>(rtc::dchecked_cast<Dst, Src>(*value))
+               : absl::nullopt;
 }
 }  // namespace
 
@@ -106,7 +106,7 @@
   frame_extra_info.timestamp_rtp = input_image._timeStamp;
   frame_extra_info.timestamp_ntp = input_image.ntp_time_ms_;
   frame_extra_info.qp =
-      qp_parsing_enabled_ ? ParseQP(input_image) : rtc::nullopt;
+      qp_parsing_enabled_ ? ParseQP(input_image) : absl::nullopt;
   {
     rtc::CritScope cs(&frame_extra_infos_lock_);
     frame_extra_infos_.push_back(frame_extra_info);
@@ -183,10 +183,10 @@
       JavaToNativeFrame(env, j_frame, frame_extra_info.timestamp_rtp);
   frame.set_ntp_time_ms(frame_extra_info.timestamp_ntp);
 
-  rtc::Optional<int32_t> decoding_time_ms =
+  absl::optional<int32_t> decoding_time_ms =
       JavaToNativeOptionalInt(env, j_decode_time_ms);
 
-  rtc::Optional<uint8_t> decoder_qp =
+  absl::optional<uint8_t> decoder_qp =
       cast_optional<uint8_t, int32_t>(JavaToNativeOptionalInt(env, j_qp));
   // If the decoder provides QP values itself, no need to parse the bitstream.
   // Enable QP parsing if decoder does not provide QP values itself.
@@ -226,13 +226,13 @@
   return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
 }
 
-rtc::Optional<uint8_t> VideoDecoderWrapper::ParseQP(
+absl::optional<uint8_t> VideoDecoderWrapper::ParseQP(
     const EncodedImage& input_image) {
   if (input_image.qp_ != -1) {
     return input_image.qp_;
   }
 
-  rtc::Optional<uint8_t> qp;
+  absl::optional<uint8_t> qp;
   switch (codec_settings_.codecType) {
     case kVideoCodecVP8: {
       int qp_int;
diff --git a/sdk/android/src/jni/videodecoderwrapper.h b/sdk/android/src/jni/videodecoderwrapper.h
index b56a3a7..c719aa4 100644
--- a/sdk/android/src/jni/videodecoderwrapper.h
+++ b/sdk/android/src/jni/videodecoderwrapper.h
@@ -66,7 +66,7 @@
 
     uint32_t timestamp_rtp;
     int64_t timestamp_ntp;
-    rtc::Optional<uint8_t> qp;
+    absl::optional<uint8_t> qp;
 
     FrameExtraInfo();
     FrameExtraInfo(const FrameExtraInfo&);
@@ -82,7 +82,7 @@
                            const char* method_name)
       RTC_RUN_ON(decoder_thread_checker_);
 
-  rtc::Optional<uint8_t> ParseQP(const EncodedImage& input_image)
+  absl::optional<uint8_t> ParseQP(const EncodedImage& input_image)
       RTC_RUN_ON(decoder_thread_checker_);
 
   const ScopedJavaGlobalRef<jobject> decoder_;
diff --git a/sdk/android/src/jni/videoencoderwrapper.cc b/sdk/android/src/jni/videoencoderwrapper.cc
index ce48f56..94719ea 100644
--- a/sdk/android/src/jni/videoencoderwrapper.cc
+++ b/sdk/android/src/jni/videoencoderwrapper.cc
@@ -165,10 +165,10 @@
   if (!isOn)
     return ScalingSettings::kOff;
 
-  rtc::Optional<int> low = JavaToNativeOptionalInt(
+  absl::optional<int> low = JavaToNativeOptionalInt(
       jni,
       Java_VideoEncoderWrapper_getScalingSettingsLow(jni, j_scaling_settings));
-  rtc::Optional<int> high = JavaToNativeOptionalInt(
+  absl::optional<int> high = JavaToNativeOptionalInt(
       jni,
       Java_VideoEncoderWrapper_getScalingSettingsHigh(jni, j_scaling_settings));
 
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCConfiguration.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCConfiguration.mm
index 4bd0450..a357085 100644
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCConfiguration.mm
+++ b/sdk/objc/Framework/Classes/PeerConnection/RTCConfiguration.mm
@@ -171,8 +171,8 @@
   // Generate non-default certificate.
   if (keyType != rtc::KT_DEFAULT) {
     rtc::scoped_refptr<rtc::RTCCertificate> certificate =
-        rtc::RTCCertificateGenerator::GenerateCertificate(
-            rtc::KeyParams(keyType), rtc::Optional<uint64_t>());
+        rtc::RTCCertificateGenerator::GenerateCertificate(rtc::KeyParams(keyType),
+                                                          absl::optional<uint64_t>());
     if (!certificate) {
       RTCLogError(@"Failed to generate certificate.");
       return nullptr;
@@ -184,14 +184,13 @@
   nativeConfig->presume_writable_when_fully_relayed =
       _shouldPresumeWritableWhenFullyRelayed ? true : false;
   if (_iceCheckMinInterval != nil) {
-    nativeConfig->ice_check_min_interval =
-        rtc::Optional<int>(_iceCheckMinInterval.intValue);
+    nativeConfig->ice_check_min_interval = absl::optional<int>(_iceCheckMinInterval.intValue);
   }
   if (_iceRegatherIntervalRange != nil) {
     std::unique_ptr<rtc::IntervalRange> nativeIntervalRange(
         _iceRegatherIntervalRange.nativeIntervalRange);
     nativeConfig->ice_regather_interval_range =
-        rtc::Optional<rtc::IntervalRange>(*nativeIntervalRange);
+        absl::optional<rtc::IntervalRange>(*nativeIntervalRange);
   }
   nativeConfig->sdp_semantics = [[self class] nativeSdpSemanticsForSdpSemantics:_sdpSemantics];
   if (_turnCustomizer) {
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCH264ProfileLevelId.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCH264ProfileLevelId.mm
index 33f9ae9..04a5689 100644
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCH264ProfileLevelId.mm
+++ b/sdk/objc/Framework/Classes/PeerConnection/RTCH264ProfileLevelId.mm
@@ -31,7 +31,7 @@
   if (self = [super init]) {
     self.hexString = hexString;
 
-    rtc::Optional<webrtc::H264::ProfileLevelId> profile_level_id =
+    absl::optional<webrtc::H264::ProfileLevelId> profile_level_id =
         webrtc::H264::ParseProfileLevelId([hexString cStringUsingEncoding:NSUTF8StringEncoding]);
     if (profile_level_id.has_value()) {
       self.profile = static_cast<RTCH264Profile>(profile_level_id->profile);
@@ -46,7 +46,7 @@
     self.profile = profile;
     self.level = level;
 
-    rtc::Optional<std::string> hex_string =
+    absl::optional<std::string> hex_string =
         webrtc::H264::ProfileLevelIdToString(webrtc::H264::ProfileLevelId(
             static_cast<webrtc::H264::Profile>(profile), static_cast<webrtc::H264::Level>(level)));
     self.hexString =
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection.mm
index c169422..bea0ede 100644
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection.mm
+++ b/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection.mm
@@ -470,13 +470,13 @@
               maxBitrateBps:(nullable NSNumber *)maxBitrateBps {
   webrtc::PeerConnectionInterface::BitrateParameters params;
   if (minBitrateBps != nil) {
-    params.min_bitrate_bps = rtc::Optional<int>(minBitrateBps.intValue);
+    params.min_bitrate_bps = absl::optional<int>(minBitrateBps.intValue);
   }
   if (currentBitrateBps != nil) {
-    params.current_bitrate_bps = rtc::Optional<int>(currentBitrateBps.intValue);
+    params.current_bitrate_bps = absl::optional<int>(currentBitrateBps.intValue);
   }
   if (maxBitrateBps != nil) {
-    params.max_bitrate_bps = rtc::Optional<int>(maxBitrateBps.intValue);
+    params.max_bitrate_bps = absl::optional<int>(maxBitrateBps.intValue);
   }
   return _peerConnection->SetBitrate(params).ok();
 }
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpCodecParameters.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCRtpCodecParameters.mm
index b6baee6..7951cee 100644
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpCodecParameters.mm
+++ b/sdk/objc/Framework/Classes/PeerConnection/RTCRtpCodecParameters.mm
@@ -93,10 +93,10 @@
     RTC_NOTREACHED();
   }
   if (_clockRate != nil) {
-    parameters.clock_rate = rtc::Optional<int>(_clockRate.intValue);
+    parameters.clock_rate = absl::optional<int>(_clockRate.intValue);
   }
   if (_numChannels != nil) {
-    parameters.num_channels = rtc::Optional<int>(_numChannels.intValue);
+    parameters.num_channels = absl::optional<int>(_numChannels.intValue);
   }
   for (NSString *paramKey in _parameters.allKeys) {
     std::string key = [NSString stdStringForString:paramKey];
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpEncodingParameters.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCRtpEncodingParameters.mm
index 8521862..299e318 100644
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpEncodingParameters.mm
+++ b/sdk/objc/Framework/Classes/PeerConnection/RTCRtpEncodingParameters.mm
@@ -44,13 +44,13 @@
   webrtc::RtpEncodingParameters parameters;
   parameters.active = _isActive;
   if (_maxBitrateBps != nil) {
-    parameters.max_bitrate_bps = rtc::Optional<int>(_maxBitrateBps.intValue);
+    parameters.max_bitrate_bps = absl::optional<int>(_maxBitrateBps.intValue);
   }
   if (_minBitrateBps != nil) {
-    parameters.min_bitrate_bps = rtc::Optional<int>(_minBitrateBps.intValue);
+    parameters.min_bitrate_bps = absl::optional<int>(_minBitrateBps.intValue);
   }
   if (_ssrc != nil) {
-    parameters.ssrc = rtc::Optional<uint32_t>(_ssrc.unsignedLongValue);
+    parameters.ssrc = absl::optional<uint32_t>(_ssrc.unsignedLongValue);
   }
   return parameters;
 }
diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoCodec.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCVideoCodec.mm
index 63be2dc..d752126 100644
--- a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoCodec.mm
+++ b/sdk/objc/Framework/Classes/PeerConnection/RTCVideoCodec.mm
@@ -43,9 +43,9 @@
 using namespace webrtc::H264;
 
 NSString *MaxSupportedLevelForProfile(Profile profile) {
-  const rtc::Optional<ProfileLevelId> profileLevelId = [UIDevice maxSupportedH264Profile];
+  const absl::optional<ProfileLevelId> profileLevelId = [UIDevice maxSupportedH264Profile];
   if (profileLevelId && profileLevelId->profile >= profile) {
-    const rtc::Optional<std::string> profileString =
+    const absl::optional<std::string> profileString =
         ProfileLevelIdToString(ProfileLevelId(profile, profileLevelId->level));
     if (profileString) {
       return [NSString stringForStdString:*profileString];
diff --git a/sdk/objc/Framework/Classes/Video/RTCDefaultShader.mm b/sdk/objc/Framework/Classes/Video/RTCDefaultShader.mm
index 3caf144..c5fbde1 100644
--- a/sdk/objc/Framework/Classes/Video/RTCDefaultShader.mm
+++ b/sdk/objc/Framework/Classes/Video/RTCDefaultShader.mm
@@ -20,7 +20,7 @@
 #import "RTCShader.h"
 #import "WebRTC/RTCLogging.h"
 
-#include "api/optional.h"
+#include "absl/types/optional.h"
 
 static const int kYTextureUnit = 0;
 static const int kUTextureUnit = 1;
@@ -73,7 +73,7 @@
   GLuint _vertexBuffer;
   GLuint _vertexArray;
   // Store current rotation and only upload new vertex data when rotation changes.
-  rtc::Optional<RTCVideoRotation> _currentRotation;
+  absl::optional<RTCVideoRotation> _currentRotation;
 
   GLuint _i420Program;
   GLuint _nv12Program;
@@ -144,7 +144,7 @@
 #endif
   glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
   if (!_currentRotation || rotation != *_currentRotation) {
-    _currentRotation = rtc::Optional<RTCVideoRotation>(rotation);
+    _currentRotation = absl::optional<RTCVideoRotation>(rotation);
     RTCSetVertexData(*_currentRotation);
   }
   return YES;
diff --git a/sdk/objc/Framework/Classes/Video/UIDevice+H264Profile.h b/sdk/objc/Framework/Classes/Video/UIDevice+H264Profile.h
index 03ea780..bb6f6ce 100644
--- a/sdk/objc/Framework/Classes/Video/UIDevice+H264Profile.h
+++ b/sdk/objc/Framework/Classes/Video/UIDevice+H264Profile.h
@@ -14,6 +14,6 @@
 
 @interface UIDevice (H264Profile)
 
-+ (rtc::Optional<webrtc::H264::ProfileLevelId>)maxSupportedH264Profile;
++ (absl::optional<webrtc::H264::ProfileLevelId>)maxSupportedH264Profile;
 
 @end
diff --git a/sdk/objc/Framework/Classes/Video/UIDevice+H264Profile.mm b/sdk/objc/Framework/Classes/Video/UIDevice+H264Profile.mm
index ef94c14..196e34e 100644
--- a/sdk/objc/Framework/Classes/Video/UIDevice+H264Profile.mm
+++ b/sdk/objc/Framework/Classes/Video/UIDevice+H264Profile.mm
@@ -85,7 +85,7 @@
     {RTCDeviceTypeIPadPro10Inch, {kProfileHigh, kLevel4_2}},   // https://support.apple.com/kb/SP762
 };
 
-rtc::Optional<ProfileLevelId> FindMaxSupportedProfileForDevice(RTCDeviceType deviceType) {
+absl::optional<ProfileLevelId> FindMaxSupportedProfileForDevice(RTCDeviceType deviceType) {
   const auto* result = std::find_if(std::begin(kH264MaxSupportedProfiles),
                                     std::end(kH264MaxSupportedProfiles),
                                     [deviceType](const SupportedH264Profile& supportedProfile) {
@@ -94,14 +94,14 @@
   if (result != std::end(kH264MaxSupportedProfiles)) {
     return result->profile;
   }
-  return rtc::nullopt;
+  return absl::nullopt;
 }
 
 }  // namespace
 
 @implementation UIDevice (H264Profile)
 
-+ (rtc::Optional<webrtc::H264::ProfileLevelId>)maxSupportedH264Profile {
++ (absl::optional<webrtc::H264::ProfileLevelId>)maxSupportedH264Profile {
   return FindMaxSupportedProfileForDevice([self deviceType]);
 }
 
diff --git a/sdk/objc/Framework/Classes/VideoToolbox/RTCVideoEncoderH264.mm b/sdk/objc/Framework/Classes/VideoToolbox/RTCVideoEncoderH264.mm
index 66e9b61..27dcdee 100644
--- a/sdk/objc/Framework/Classes/VideoToolbox/RTCVideoEncoderH264.mm
+++ b/sdk/objc/Framework/Classes/VideoToolbox/RTCVideoEncoderH264.mm
@@ -172,7 +172,7 @@
 // returned. The user must initialize the encoder with a resolution and
 // framerate conforming to the selected H264 level regardless.
 CFStringRef ExtractProfile(webrtc::SdpVideoFormat videoFormat) {
-  const rtc::Optional<webrtc::H264::ProfileLevelId> profile_level_id =
+  const absl::optional<webrtc::H264::ProfileLevelId> profile_level_id =
       webrtc::H264::ParseSdpProfileLevelId(videoFormat.parameters);
   RTC_DCHECK(profile_level_id);
   switch (profile_level_id->profile) {
diff --git a/sdk/objc/Framework/Native/src/objc_video_track_source.h b/sdk/objc/Framework/Native/src/objc_video_track_source.h
index 1062e96..d237980 100644
--- a/sdk/objc/Framework/Native/src/objc_video_track_source.h
+++ b/sdk/objc/Framework/Native/src/objc_video_track_source.h
@@ -36,7 +36,7 @@
   // Indicates that the encoder should denoise video before encoding it.
   // If it is not set, the default configuration is used which is different
   // depending on video codec.
-  rtc::Optional<bool> needs_denoising() const override { return false; }
+  absl::optional<bool> needs_denoising() const override { return false; }
 
   SourceState state() const override { return SourceState::kLive; }
 
diff --git a/system_wrappers/BUILD.gn b/system_wrappers/BUILD.gn
index 9b178f9..5e2858f 100644
--- a/system_wrappers/BUILD.gn
+++ b/system_wrappers/BUILD.gn
@@ -40,10 +40,10 @@
     ":runtime_enabled_features_api",
     "..:webrtc_common",
     "../:typedefs",
-    "../api:optional",
     "../modules:module_api_public",
     "../rtc_base:checks",
     "../rtc_base/synchronization:rw_lock_wrapper",
+    "//third_party/abseil-cpp/absl/types:optional",
   ]
 
   if (is_posix || is_fuchsia) {
diff --git a/system_wrappers/include/rtp_to_ntp_estimator.h b/system_wrappers/include/rtp_to_ntp_estimator.h
index 7c0757c..62a79a5 100644
--- a/system_wrappers/include/rtp_to_ntp_estimator.h
+++ b/system_wrappers/include/rtp_to_ntp_estimator.h
@@ -13,7 +13,7 @@
 
 #include <list>
 
-#include "api/optional.h"
+#include "absl/types/optional.h"
 #include "modules/include/module_common_types_public.h"
 #include "rtc_base/numerics/moving_median_filter.h"
 #include "system_wrappers/include/ntp_time.h"
@@ -72,7 +72,7 @@
   bool Estimate(int64_t rtp_timestamp, int64_t* rtp_timestamp_ms) const;
 
   // Returns estimated rtp to ntp linear transform parameters.
-  const rtc::Optional<Parameters> params() const;
+  const absl::optional<Parameters> params() const;
 
   static const int kMaxInvalidSamples = 3;
 
diff --git a/system_wrappers/source/rtp_to_ntp_estimator.cc b/system_wrappers/source/rtp_to_ntp_estimator.cc
index 5af102a..730c4f6 100644
--- a/system_wrappers/source/rtp_to_ntp_estimator.cc
+++ b/system_wrappers/source/rtp_to_ntp_estimator.cc
@@ -193,9 +193,9 @@
   return true;
 }
 
-const rtc::Optional<RtpToNtpEstimator::Parameters> RtpToNtpEstimator::params()
+const absl::optional<RtpToNtpEstimator::Parameters> RtpToNtpEstimator::params()
     const {
-  rtc::Optional<Parameters> res;
+  absl::optional<Parameters> res;
   if (params_calculated_) {
     res.emplace(smoothing_filter_.GetFilteredValue());
   }