Delete support for has_internal_source

Bug: webrtc:12875
Change-Id: I9683e71e1fe5b24802033ffcb32a531ca685fc6f
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/179220
Commit-Queue: Niels Moller <nisse@webrtc.org>
Reviewed-by: Tommi <tommi@webrtc.org>
Reviewed-by: Harald Alvestrand <hta@webrtc.org>
Reviewed-by: Ilya Nikolaevskiy <ilnik@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#35353}
diff --git a/api/test/mock_video_encoder_factory.h b/api/test/mock_video_encoder_factory.h
index 1aa1463..7985109 100644
--- a/api/test/mock_video_encoder_factory.h
+++ b/api/test/mock_video_encoder_factory.h
@@ -28,10 +28,6 @@
               GetSupportedFormats,
               (),
               (const, override));
-  MOCK_METHOD(CodecInfo,
-              QueryVideoEncoder,
-              (const SdpVideoFormat&),
-              (const, override));
   MOCK_METHOD(std::unique_ptr<VideoEncoder>,
               CreateVideoEncoder,
               (const SdpVideoFormat&),
diff --git a/api/video_codecs/builtin_video_encoder_factory.cc b/api/video_codecs/builtin_video_encoder_factory.cc
index 9463a9c..78e5f8b 100644
--- a/api/video_codecs/builtin_video_encoder_factory.cc
+++ b/api/video_codecs/builtin_video_encoder_factory.cc
@@ -32,15 +32,6 @@
   BuiltinVideoEncoderFactory()
       : internal_encoder_factory_(new InternalEncoderFactory()) {}
 
-  VideoEncoderFactory::CodecInfo QueryVideoEncoder(
-      const SdpVideoFormat& format) const override {
-    // Format must be one of the internal formats.
-    RTC_DCHECK(
-        format.IsCodecInList(internal_encoder_factory_->GetSupportedFormats()));
-    VideoEncoderFactory::CodecInfo info;
-    return info;
-  }
-
   std::unique_ptr<VideoEncoder> CreateVideoEncoder(
       const SdpVideoFormat& format) override {
     // Try creating internal encoder.
diff --git a/api/video_codecs/test/video_encoder_software_fallback_wrapper_unittest.cc b/api/video_codecs/test/video_encoder_software_fallback_wrapper_unittest.cc
index 1db1908..2150a76 100644
--- a/api/video_codecs/test/video_encoder_software_fallback_wrapper_unittest.cc
+++ b/api/video_codecs/test/video_encoder_software_fallback_wrapper_unittest.cc
@@ -71,13 +71,6 @@
   return info;
 }
 
-VideoEncoder::EncoderInfo GetEncoderInfoWithInternalSource(
-    bool internal_source) {
-  VideoEncoder::EncoderInfo info;
-  info.has_internal_source = internal_source;
-  return info;
-}
-
 class FakeEncodedImageCallback : public EncodedImageCallback {
  public:
   Result OnEncodedImage(const EncodedImage& encoded_image,
@@ -803,35 +796,6 @@
   EXPECT_FALSE(wrapper->GetEncoderInfo().is_hardware_accelerated);
 }
 
-TEST(SoftwareFallbackEncoderTest, ReportsInternalSource) {
-  auto* sw_encoder = new ::testing::NiceMock<MockVideoEncoder>();
-  auto* hw_encoder = new ::testing::NiceMock<MockVideoEncoder>();
-  EXPECT_CALL(*sw_encoder, GetEncoderInfo())
-      .WillRepeatedly(Return(GetEncoderInfoWithInternalSource(false)));
-  EXPECT_CALL(*hw_encoder, GetEncoderInfo())
-      .WillRepeatedly(Return(GetEncoderInfoWithInternalSource(true)));
-
-  std::unique_ptr<VideoEncoder> wrapper =
-      CreateVideoEncoderSoftwareFallbackWrapper(
-          std::unique_ptr<VideoEncoder>(sw_encoder),
-          std::unique_ptr<VideoEncoder>(hw_encoder));
-  EXPECT_TRUE(wrapper->GetEncoderInfo().has_internal_source);
-
-  VideoCodec codec_ = {};
-  codec_.width = 100;
-  codec_.height = 100;
-  wrapper->InitEncode(&codec_, kSettings);
-
-  // Trigger fallback to software.
-  EXPECT_CALL(*hw_encoder, Encode)
-      .WillOnce(Return(WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE));
-  VideoFrame frame = VideoFrame::Builder()
-                         .set_video_frame_buffer(I420Buffer::Create(100, 100))
-                         .build();
-  wrapper->Encode(frame, nullptr);
-  EXPECT_FALSE(wrapper->GetEncoderInfo().has_internal_source);
-}
-
 class PreferTemporalLayersFallbackTest : public ::testing::Test {
  public:
   PreferTemporalLayersFallbackTest() {}
diff --git a/api/video_codecs/video_encoder.cc b/api/video_codecs/video_encoder.cc
index ac5d84b..83c3291 100644
--- a/api/video_codecs/video_encoder.cc
+++ b/api/video_codecs/video_encoder.cc
@@ -99,7 +99,6 @@
       implementation_name("unknown"),
       has_trusted_rate_controller(false),
       is_hardware_accelerated(true),
-      has_internal_source(false),
       fps_allocation{absl::InlinedVector<uint8_t, kMaxTemporalStreams>(
           1,
           kMaxFramerateFraction)},
@@ -133,7 +132,6 @@
          ", has_trusted_rate_controller = "
       << has_trusted_rate_controller
       << ", is_hardware_accelerated = " << is_hardware_accelerated
-      << ", has_internal_source = " << has_internal_source
       << ", fps_allocation = [";
   size_t num_spatial_layer_with_fps_allocation = 0;
   for (size_t i = 0; i < kMaxSpatialLayers; ++i) {
@@ -214,8 +212,7 @@
   if (supports_native_handle != rhs.supports_native_handle ||
       implementation_name != rhs.implementation_name ||
       has_trusted_rate_controller != rhs.has_trusted_rate_controller ||
-      is_hardware_accelerated != rhs.is_hardware_accelerated ||
-      has_internal_source != rhs.has_internal_source) {
+      is_hardware_accelerated != rhs.is_hardware_accelerated) {
     return false;
   }
 
diff --git a/api/video_codecs/video_encoder.h b/api/video_codecs/video_encoder.h
index 3035dd7..94d7287 100644
--- a/api/video_codecs/video_encoder.h
+++ b/api/video_codecs/video_encoder.h
@@ -207,13 +207,6 @@
     // thresholds will be used in CPU adaptation.
     bool is_hardware_accelerated;
 
-    // If this field is true, the encoder uses internal camera sources, meaning
-    // that it does not require/expect frames to be delivered via
-    // webrtc::VideoEncoder::Encode.
-    // Internal source encoders are deprecated and support for them will be
-    // phased out.
-    bool has_internal_source;
-
     // For each spatial layer (simulcast stream or SVC layer), represented as an
     // element in `fps_allocation` a vector indicates how many temporal layers
     // the encoder is using for that spatial layer.
diff --git a/api/video_codecs/video_encoder_factory.h b/api/video_codecs/video_encoder_factory.h
index 2768079..b1bf3fd 100644
--- a/api/video_codecs/video_encoder_factory.h
+++ b/api/video_codecs/video_encoder_factory.h
@@ -29,12 +29,6 @@
  public:
   // TODO(magjed): Try to get rid of this struct.
   struct CodecInfo {
-    // `has_internal_source` is true if encoders created by this factory of the
-    // given codec will use internal camera sources, meaning that they don't
-    // require/expect frames to be delivered via webrtc::VideoEncoder::Encode.
-    // This flag is used as the internal_source parameter to
-    // webrtc::ViEExternalCodec::RegisterExternalSendCodec.
-    bool has_internal_source = false;
   };
 
   struct CodecSupport {
diff --git a/media/engine/encoder_simulcast_proxy_unittest.cc b/media/engine/encoder_simulcast_proxy_unittest.cc
index e5eb7a3..fbd9d55 100644
--- a/media/engine/encoder_simulcast_proxy_unittest.cc
+++ b/media/engine/encoder_simulcast_proxy_unittest.cc
@@ -184,35 +184,5 @@
   EXPECT_TRUE(simulcast_enabled_proxy.GetEncoderInfo().is_hardware_accelerated);
 }
 
-TEST(EncoderSimulcastProxy, ForwardsInternalSource) {
-  auto mock_encoder_owned = std::make_unique<NiceMock<MockVideoEncoder>>();
-  NiceMock<MockVideoEncoder>* mock_encoder = mock_encoder_owned.get();
-  NiceMock<MockVideoEncoderFactory> simulcast_factory;
-
-  EXPECT_CALL(*mock_encoder, InitEncode(_, _))
-      .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK));
-
-  EXPECT_CALL(simulcast_factory, CreateVideoEncoder)
-      .Times(1)
-      .WillOnce(Return(ByMove(std::move(mock_encoder_owned))));
-
-  EncoderSimulcastProxy simulcast_enabled_proxy(&simulcast_factory,
-                                                SdpVideoFormat("VP8"));
-  VideoCodec codec_settings;
-  webrtc::test::CodecSettings(kVideoCodecVP8, &codec_settings);
-  EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
-            simulcast_enabled_proxy.InitEncode(&codec_settings, kSettings));
-
-  VideoEncoder::EncoderInfo info;
-
-  info.has_internal_source = false;
-  EXPECT_CALL(*mock_encoder, GetEncoderInfo()).WillOnce(Return(info));
-  EXPECT_FALSE(simulcast_enabled_proxy.GetEncoderInfo().has_internal_source);
-
-  info.has_internal_source = true;
-  EXPECT_CALL(*mock_encoder, GetEncoderInfo()).WillOnce(Return(info));
-  EXPECT_TRUE(simulcast_enabled_proxy.GetEncoderInfo().has_internal_source);
-}
-
 }  // namespace testing
 }  // namespace webrtc
diff --git a/media/engine/fake_webrtc_video_engine.cc b/media/engine/fake_webrtc_video_engine.cc
index 6b042f7..7383c26 100644
--- a/media/engine/fake_webrtc_video_engine.cc
+++ b/media/engine/fake_webrtc_video_engine.cc
@@ -167,7 +167,6 @@
     const {
   EncoderInfo info;
   info.is_hardware_accelerated = true;
-  info.has_internal_source = false;
   return info;
 }
 
@@ -188,7 +187,6 @@
 // Video encoder factory.
 FakeWebRtcVideoEncoderFactory::FakeWebRtcVideoEncoderFactory()
     : num_created_encoders_(0),
-      encoders_have_internal_sources_(false),
       vp8_factory_mode_(false) {}
 
 std::vector<webrtc::SdpVideoFormat>
@@ -227,14 +225,6 @@
   return encoder;
 }
 
-webrtc::VideoEncoderFactory::CodecInfo
-FakeWebRtcVideoEncoderFactory::QueryVideoEncoder(
-    const webrtc::SdpVideoFormat& format) const {
-  webrtc::VideoEncoderFactory::CodecInfo info;
-  info.has_internal_source = encoders_have_internal_sources_;
-  return info;
-}
-
 bool FakeWebRtcVideoEncoderFactory::WaitForCreatedVideoEncoders(
     int num_encoders) {
   int64_t start_offset_ms = rtc::TimeMillis();
@@ -254,11 +244,6 @@
                   encoders_.end());
 }
 
-void FakeWebRtcVideoEncoderFactory::set_encoders_have_internal_sources(
-    bool internal_source) {
-  encoders_have_internal_sources_ = internal_source;
-}
-
 void FakeWebRtcVideoEncoderFactory::AddSupportedVideoCodec(
     const webrtc::SdpVideoFormat& format) {
   formats_.push_back(format);
diff --git a/media/engine/fake_webrtc_video_engine.h b/media/engine/fake_webrtc_video_engine.h
index 831e616..37ef53a 100644
--- a/media/engine/fake_webrtc_video_engine.h
+++ b/media/engine/fake_webrtc_video_engine.h
@@ -116,8 +116,6 @@
   std::vector<webrtc::SdpVideoFormat> GetSupportedFormats() const override;
   std::unique_ptr<webrtc::VideoEncoder> CreateVideoEncoder(
       const webrtc::SdpVideoFormat& format) override;
-  CodecInfo QueryVideoEncoder(
-      const webrtc::SdpVideoFormat& format) const override;
 
   bool WaitForCreatedVideoEncoders(int num_encoders);
   void EncoderDestroyed(FakeWebRtcVideoEncoder* encoder);
@@ -133,7 +131,6 @@
   std::vector<webrtc::SdpVideoFormat> formats_;
   std::vector<FakeWebRtcVideoEncoder*> encoders_ RTC_GUARDED_BY(mutex_);
   int num_created_encoders_ RTC_GUARDED_BY(mutex_);
-  bool encoders_have_internal_sources_;
   bool vp8_factory_mode_;
 };
 
diff --git a/media/engine/simulcast_encoder_adapter.cc b/media/engine/simulcast_encoder_adapter.cc
index d793390..9143361 100644
--- a/media/engine/simulcast_encoder_adapter.cc
+++ b/media/engine/simulcast_encoder_adapter.cc
@@ -896,7 +896,6 @@
           encoder_impl_info.has_trusted_rate_controller;
       encoder_info.is_hardware_accelerated =
           encoder_impl_info.is_hardware_accelerated;
-      encoder_info.has_internal_source = encoder_impl_info.has_internal_source;
       encoder_info.is_qp_trusted = encoder_impl_info.is_qp_trusted;
     } else {
       encoder_info.implementation_name += ", ";
@@ -917,9 +916,6 @@
       encoder_info.is_hardware_accelerated |=
           encoder_impl_info.is_hardware_accelerated;
 
-      // Has internal source only if all encoders have it.
-      encoder_info.has_internal_source &= encoder_impl_info.has_internal_source;
-
       // Treat QP from frame/slice/tile header as average QP only if all
       // encoders report it as average QP.
       encoder_info.is_qp_trusted =
diff --git a/media/engine/simulcast_encoder_adapter_unittest.cc b/media/engine/simulcast_encoder_adapter_unittest.cc
index f28a1af..bb2335c 100644
--- a/media/engine/simulcast_encoder_adapter_unittest.cc
+++ b/media/engine/simulcast_encoder_adapter_unittest.cc
@@ -242,7 +242,6 @@
         apply_alignment_to_all_simulcast_layers_;
     info.has_trusted_rate_controller = has_trusted_rate_controller_;
     info.is_hardware_accelerated = is_hardware_accelerated_;
-    info.has_internal_source = has_internal_source_;
     info.fps_allocation[0] = fps_allocation_;
     info.supports_simulcast = supports_simulcast_;
     info.is_qp_trusted = is_qp_trusted_;
@@ -295,10 +294,6 @@
     is_hardware_accelerated_ = is_hardware_accelerated;
   }
 
-  void set_has_internal_source(bool has_internal_source) {
-    has_internal_source_ = has_internal_source;
-  }
-
   void set_fps_allocation(const FramerateFractions& fps_allocation) {
     fps_allocation_ = fps_allocation;
   }
@@ -330,7 +325,6 @@
   bool apply_alignment_to_all_simulcast_layers_ = false;
   bool has_trusted_rate_controller_ = false;
   bool is_hardware_accelerated_ = false;
-  bool has_internal_source_ = false;
   int32_t init_encode_return_value_ = 0;
   VideoEncoder::RateControlParameters last_set_rates_;
   FramerateFractions fps_allocation_;
@@ -1379,28 +1373,6 @@
           VideoEncoder::ResolutionBitrateLimits{789, 33000, 66000, 99000}));
 }
 
-TEST_F(TestSimulcastEncoderAdapterFake, ReportsInternalSource) {
-  SimulcastTestFixtureImpl::DefaultSettings(
-      &codec_, static_cast<const int*>(kTestTemporalLayerProfile),
-      kVideoCodecVP8);
-  codec_.numberOfSimulcastStreams = 3;
-  adapter_->RegisterEncodeCompleteCallback(this);
-  EXPECT_EQ(0, adapter_->InitEncode(&codec_, kSettings));
-  ASSERT_EQ(3u, helper_->factory()->encoders().size());
-
-  // All encoders have internal source, simulcast adapter reports true.
-  for (MockVideoEncoder* encoder : helper_->factory()->encoders()) {
-    encoder->set_has_internal_source(true);
-  }
-  EXPECT_EQ(0, adapter_->InitEncode(&codec_, kSettings));
-  EXPECT_TRUE(adapter_->GetEncoderInfo().has_internal_source);
-
-  // One encoder does not have internal source, simulcast adapter reports false.
-  helper_->factory()->encoders()[2]->set_has_internal_source(false);
-  EXPECT_EQ(0, adapter_->InitEncode(&codec_, kSettings));
-  EXPECT_FALSE(adapter_->GetEncoderInfo().has_internal_source);
-}
-
 TEST_F(TestSimulcastEncoderAdapterFake, ReportsIsQpTrusted) {
   SimulcastTestFixtureImpl::DefaultSettings(
       &codec_, static_cast<const int*>(kTestTemporalLayerProfile),
diff --git a/media/engine/webrtc_video_engine_unittest.cc b/media/engine/webrtc_video_engine_unittest.cc
index 2265dd9..01ba354 100644
--- a/media/engine/webrtc_video_engine_unittest.cc
+++ b/media/engine/webrtc_video_engine_unittest.cc
@@ -1211,11 +1211,7 @@
                                       /*lntf_expected=*/false);
 
   // Mock encoder creation. `engine` take ownership of the encoder.
-  webrtc::VideoEncoderFactory::CodecInfo codec_info;
-  codec_info.has_internal_source = false;
   const webrtc::SdpVideoFormat format("VP8");
-  EXPECT_CALL(*encoder_factory, QueryVideoEncoder(format))
-      .WillRepeatedly(Return(codec_info));
   EXPECT_CALL(*encoder_factory, CreateVideoEncoder(format)).WillOnce([&] {
     return std::make_unique<FakeWebRtcVideoEncoder>(nullptr);
   });
diff --git a/modules/video_coding/codecs/h264/h264_encoder_impl.cc b/modules/video_coding/codecs/h264/h264_encoder_impl.cc
index b8c6916..13dcba8 100644
--- a/modules/video_coding/codecs/h264/h264_encoder_impl.cc
+++ b/modules/video_coding/codecs/h264/h264_encoder_impl.cc
@@ -635,7 +635,6 @@
   info.scaling_settings =
       VideoEncoder::ScalingSettings(kLowH264QpThreshold, kHighH264QpThreshold);
   info.is_hardware_accelerated = false;
-  info.has_internal_source = false;
   info.supports_simulcast = true;
   info.preferred_pixel_formats = {VideoFrameBuffer::Type::kI420};
   return info;
diff --git a/modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc b/modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc
index 10f4517..80744e2 100644
--- a/modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc
+++ b/modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc
@@ -141,8 +141,6 @@
       encoder_info_.apply_alignment_to_all_simulcast_layers = true;
     }
 
-    encoder_info_.has_internal_source = false;
-
     encoders_.emplace_back(std::move(encoder));
   }
   encoder_info_.implementation_name += ")";
diff --git a/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc b/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc
index 44a379f..8661725 100644
--- a/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc
+++ b/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc
@@ -1215,7 +1215,6 @@
   info.has_trusted_rate_controller =
       rate_control_settings_.LibvpxVp8TrustedRateController();
   info.is_hardware_accelerated = false;
-  info.has_internal_source = false;
   info.supports_simulcast = true;
   if (!resolution_bitrate_limits_.empty()) {
     info.resolution_bitrate_limits = resolution_bitrate_limits_;
diff --git a/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc b/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc
index 00258fe..2e2597e 100644
--- a/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc
+++ b/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc
@@ -520,7 +520,6 @@
 
   EXPECT_FALSE(info.supports_native_handle);
   EXPECT_FALSE(info.is_hardware_accelerated);
-  EXPECT_FALSE(info.has_internal_source);
   EXPECT_TRUE(info.supports_simulcast);
   EXPECT_EQ(info.implementation_name, "libvpx");
   EXPECT_EQ(info.requested_resolution_alignment, 1);
diff --git a/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc b/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc
index 443efc2..75c2f16 100644
--- a/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc
+++ b/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc
@@ -1735,7 +1735,6 @@
   }
   info.has_trusted_rate_controller = trusted_rate_controller_;
   info.is_hardware_accelerated = false;
-  info.has_internal_source = false;
   if (inited_) {
     // Find the max configured fps of any active spatial layer.
     float max_fps = 0.0;
diff --git a/sdk/android/src/jni/video_encoder_wrapper.cc b/sdk/android/src/jni/video_encoder_wrapper.cc
index b919db7..1a841e6 100644
--- a/sdk/android/src/jni/video_encoder_wrapper.cc
+++ b/sdk/android/src/jni/video_encoder_wrapper.cc
@@ -101,7 +101,6 @@
 
 void VideoEncoderWrapper::UpdateEncoderInfo(JNIEnv* jni) {
   encoder_info_.supports_native_handle = true;
-  encoder_info_.has_internal_source = false;
 
   encoder_info_.implementation_name = JavaToStdString(
       jni, Java_VideoEncoder_getImplementationName(jni, encoder_));
diff --git a/sdk/objc/native/src/objc_video_encoder_factory.mm b/sdk/objc/native/src/objc_video_encoder_factory.mm
index 06515e5..f04aa89 100644
--- a/sdk/objc/native/src/objc_video_encoder_factory.mm
+++ b/sdk/objc/native/src/objc_video_encoder_factory.mm
@@ -101,7 +101,6 @@
     info.apply_alignment_to_all_simulcast_layers = encoder_.applyAlignmentToAllSimulcastLayers;
     info.supports_native_handle = encoder_.supportsNativeHandle;
     info.is_hardware_accelerated = true;
-    info.has_internal_source = false;
     return info;
   }
 
diff --git a/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc b/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc
index 64df2b8..d3fbab0 100644
--- a/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc
+++ b/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc
@@ -372,12 +372,6 @@
   return delegate_->GetSupportedFormats();
 }
 
-VideoEncoderFactory::CodecInfo
-QualityAnalyzingVideoEncoderFactory::QueryVideoEncoder(
-    const SdpVideoFormat& format) const {
-  return delegate_->QueryVideoEncoder(format);
-}
-
 std::unique_ptr<VideoEncoder>
 QualityAnalyzingVideoEncoderFactory::CreateVideoEncoder(
     const SdpVideoFormat& format) {
diff --git a/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h b/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h
index 11d3be3..135c85c 100644
--- a/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h
+++ b/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h
@@ -176,8 +176,6 @@
 
   // Methods of VideoEncoderFactory interface.
   std::vector<SdpVideoFormat> GetSupportedFormats() const override;
-  VideoEncoderFactory::CodecInfo QueryVideoEncoder(
-      const SdpVideoFormat& format) const override;
   std::unique_ptr<VideoEncoder> CreateVideoEncoder(
       const SdpVideoFormat& format) override;
 
diff --git a/test/peer_scenario/peer_scenario_client.cc b/test/peer_scenario/peer_scenario_client.cc
index cd5beee..7692b8a 100644
--- a/test/peer_scenario/peer_scenario_client.cc
+++ b/test/peer_scenario/peer_scenario_client.cc
@@ -172,11 +172,6 @@
   std::vector<SdpVideoFormat> GetSupportedFormats() const override {
     return {SdpVideoFormat("VP8")};
   }
-  CodecInfo QueryVideoEncoder(const SdpVideoFormat& format) const override {
-    RTC_CHECK_EQ(format.name, "VP8");
-    CodecInfo info;
-    return info;
-  }
   std::unique_ptr<VideoEncoder> CreateVideoEncoder(
       const SdpVideoFormat& format) override {
     return std::make_unique<FakeVp8Encoder>(clock_);
diff --git a/test/video_encoder_proxy_factory.h b/test/video_encoder_proxy_factory.h
index f40e69d..ffa8efc 100644
--- a/test/video_encoder_proxy_factory.h
+++ b/test/video_encoder_proxy_factory.h
@@ -38,7 +38,6 @@
         encoder_selector_(encoder_selector),
         num_simultaneous_encoder_instances_(0),
         max_num_simultaneous_encoder_instances_(0) {
-    codec_info_.has_internal_source = false;
   }
 
   // Unused by tests.
@@ -47,10 +46,6 @@
     return {};
   }
 
-  CodecInfo QueryVideoEncoder(const SdpVideoFormat& format) const override {
-    return codec_info_;
-  }
-
   std::unique_ptr<VideoEncoder> CreateVideoEncoder(
       const SdpVideoFormat& format) override {
     ++num_simultaneous_encoder_instances_;
@@ -69,10 +64,6 @@
     return nullptr;
   }
 
-  void SetHasInternalSource(bool has_internal_source) {
-    codec_info_.has_internal_source = has_internal_source;
-  }
-
   int GetMaxNumberOfSimultaneousEncoderInstances() {
     return max_num_simultaneous_encoder_instances_;
   }
@@ -151,7 +142,6 @@
 
   VideoEncoder* const encoder_;
   EncoderSelectorInterface* const encoder_selector_;
-  CodecInfo codec_info_;
 
   int num_simultaneous_encoder_instances_;
   int max_num_simultaneous_encoder_instances_;
diff --git a/video/frame_encode_metadata_writer.cc b/video/frame_encode_metadata_writer.cc
index ff2034e..51f09b0 100644
--- a/video/frame_encode_metadata_writer.cc
+++ b/video/frame_encode_metadata_writer.cc
@@ -50,7 +50,6 @@
 FrameEncodeMetadataWriter::FrameEncodeMetadataWriter(
     EncodedImageCallback* frame_drop_callback)
     : frame_drop_callback_(frame_drop_callback),
-      internal_source_(false),
       framerate_fps_(0),
       last_timing_frame_time_ms_(-1),
       reordered_frames_logged_messages_(0),
@@ -59,12 +58,9 @@
 }
 FrameEncodeMetadataWriter::~FrameEncodeMetadataWriter() {}
 
-void FrameEncodeMetadataWriter::OnEncoderInit(const VideoCodec& codec,
-                                              bool internal_source) {
+void FrameEncodeMetadataWriter::OnEncoderInit(const VideoCodec& codec) {
   MutexLock lock(&lock_);
   codec_settings_ = codec;
-  internal_source_ = internal_source;
-
   size_t num_spatial_layers = codec_settings_.numberOfSimulcastStreams;
   if (codec_settings_.codecType == kVideoCodecVP9) {
     num_spatial_layers = std::max(
@@ -96,9 +92,6 @@
 
 void FrameEncodeMetadataWriter::OnEncodeStarted(const VideoFrame& frame) {
   MutexLock lock(&lock_);
-  if (internal_source_) {
-    return;
-  }
 
   timing_frames_info_.resize(num_spatial_layers_);
   FrameMetadata metadata;
@@ -148,12 +141,8 @@
 
   int64_t encode_done_ms = rtc::TimeMillis();
 
-  // Encoders with internal sources do not call OnEncodeStarted
-  // `timing_frames_info_` may be not filled here.
-  if (!internal_source_) {
-    encode_start_ms =
-        ExtractEncodeStartTimeAndFillMetadata(simulcast_svc_idx, encoded_image);
-  }
+  encode_start_ms =
+      ExtractEncodeStartTimeAndFillMetadata(simulcast_svc_idx, encoded_image);
 
   if (timing_frames_info_.size() > simulcast_svc_idx) {
     size_t target_bitrate =
@@ -187,21 +176,6 @@
     last_timing_frame_time_ms_ = encoded_image->capture_time_ms_;
   }
 
-  // Workaround for chromoting encoder: it passes encode start and finished
-  // timestamps in `timing_` field, but they (together with capture timestamp)
-  // are not in the WebRTC clock.
-  if (internal_source_ && encoded_image->timing_.encode_finish_ms > 0 &&
-      encoded_image->timing_.encode_start_ms > 0) {
-    int64_t clock_offset_ms =
-        encode_done_ms - encoded_image->timing_.encode_finish_ms;
-    // Translate capture timestamp to local WebRTC clock.
-    encoded_image->capture_time_ms_ += clock_offset_ms;
-    encoded_image->SetTimestamp(
-        static_cast<uint32_t>(encoded_image->capture_time_ms_ * 90));
-    encode_start_ms.emplace(encoded_image->timing_.encode_start_ms +
-                            clock_offset_ms);
-  }
-
   // If encode start is not available that means that encoder uses internal
   // source. In that case capture timestamp may be from a different clock with a
   // drift relative to rtc::TimeMillis(). We can't use it for Timing frames,
diff --git a/video/frame_encode_metadata_writer.h b/video/frame_encode_metadata_writer.h
index 80e5c5e..afebca8 100644
--- a/video/frame_encode_metadata_writer.h
+++ b/video/frame_encode_metadata_writer.h
@@ -28,7 +28,7 @@
   explicit FrameEncodeMetadataWriter(EncodedImageCallback* frame_drop_callback);
   ~FrameEncodeMetadataWriter();
 
-  void OnEncoderInit(const VideoCodec& codec, bool internal_source = false);
+  void OnEncoderInit(const VideoCodec& codec);
   void OnSetRates(const VideoBitrateAllocation& bitrate_allocation,
                   uint32_t framerate_fps);
 
@@ -67,7 +67,6 @@
   Mutex lock_;
   EncodedImageCallback* const frame_drop_callback_;
   VideoCodec codec_settings_ RTC_GUARDED_BY(&lock_);
-  bool internal_source_ RTC_GUARDED_BY(&lock_);
   uint32_t framerate_fps_ RTC_GUARDED_BY(&lock_);
 
   size_t num_spatial_layers_ RTC_GUARDED_BY(&lock_);
diff --git a/video/frame_encode_metadata_writer_unittest.cc b/video/frame_encode_metadata_writer_unittest.cc
index 8b60a8c..e151282 100644
--- a/video/frame_encode_metadata_writer_unittest.cc
+++ b/video/frame_encode_metadata_writer_unittest.cc
@@ -78,7 +78,7 @@
   codec_settings.numberOfSimulcastStreams = num_streams;
   codec_settings.timing_frame_thresholds = {delay_ms,
                                             kDefaultOutlierFrameSizePercent};
-  encode_timer.OnEncoderInit(codec_settings, false);
+  encode_timer.OnEncoderInit(codec_settings);
   const size_t kFramerate = 30;
   VideoBitrateAllocation bitrate_allocation;
   for (int si = 0; si < num_streams; ++si) {
@@ -205,7 +205,7 @@
   VideoCodec codec_settings;
   // Make all frames timing frames.
   codec_settings.timing_frame_thresholds.delay_ms = 1;
-  encode_timer.OnEncoderInit(codec_settings, false);
+  encode_timer.OnEncoderInit(codec_settings);
   VideoBitrateAllocation bitrate_allocation;
   bitrate_allocation.SetBitrate(0, 0, 500000);
   encode_timer.OnSetRates(bitrate_allocation, 30);
@@ -228,51 +228,6 @@
   EXPECT_FALSE(IsTimingFrame(image));
 }
 
-TEST(FrameEncodeMetadataWriterTest,
-     AdjustsCaptureTimeForInternalSourceEncoder) {
-  const int64_t kEncodeStartDelayMs = 2;
-  const int64_t kEncodeFinishDelayMs = 10;
-  constexpr size_t kFrameSize = 500;
-
-  int64_t timestamp = 1;
-  EncodedImage image;
-  image.SetEncodedData(EncodedImageBuffer::Create(kFrameSize));
-  image.capture_time_ms_ = timestamp;
-  image.SetTimestamp(static_cast<uint32_t>(timestamp * 90));
-
-  FakeEncodedImageCallback sink;
-  FrameEncodeMetadataWriter encode_timer(&sink);
-
-  VideoCodec codec_settings;
-  // Make all frames timing frames.
-  codec_settings.timing_frame_thresholds.delay_ms = 1;
-  encode_timer.OnEncoderInit(codec_settings, true);
-
-  VideoBitrateAllocation bitrate_allocation;
-  bitrate_allocation.SetBitrate(0, 0, 500000);
-  encode_timer.OnSetRates(bitrate_allocation, 30);
-
-  // Verify a single frame without encode timestamps isn't a timing frame.
-  encode_timer.FillTimingInfo(0, &image);
-  EXPECT_FALSE(IsTimingFrame(image));
-
-  // New frame, but this time with encode timestamps set in timing_.
-  // This should be a timing frame.
-  image.capture_time_ms_ = ++timestamp;
-  image.SetTimestamp(static_cast<uint32_t>(timestamp * 90));
-  image.timing_ = EncodedImage::Timing();
-  image.timing_.encode_start_ms = timestamp + kEncodeStartDelayMs;
-  image.timing_.encode_finish_ms = timestamp + kEncodeFinishDelayMs;
-
-  encode_timer.FillTimingInfo(0, &image);
-  EXPECT_TRUE(IsTimingFrame(image));
-
-  // Frame is captured kEncodeFinishDelayMs before it's encoded, so restored
-  // capture timestamp should be kEncodeFinishDelayMs in the past.
-  EXPECT_NEAR(image.capture_time_ms_, rtc::TimeMillis() - kEncodeFinishDelayMs,
-              1);
-}
-
 TEST(FrameEncodeMetadataWriterTest, NotifiesAboutDroppedFrames) {
   const int64_t kTimestampMs1 = 47721840;
   const int64_t kTimestampMs2 = 47721850;
@@ -281,7 +236,7 @@
 
   FakeEncodedImageCallback sink;
   FrameEncodeMetadataWriter encode_timer(&sink);
-  encode_timer.OnEncoderInit(VideoCodec(), false);
+  encode_timer.OnEncoderInit(VideoCodec());
   // Any non-zero bitrate needed to be set before the first frame.
   VideoBitrateAllocation bitrate_allocation;
   bitrate_allocation.SetBitrate(0, 0, 500000);
@@ -338,7 +293,7 @@
   FakeEncodedImageCallback sink;
 
   FrameEncodeMetadataWriter encode_timer(&sink);
-  encode_timer.OnEncoderInit(VideoCodec(), false);
+  encode_timer.OnEncoderInit(VideoCodec());
   // Any non-zero bitrate needed to be set before the first frame.
   VideoBitrateAllocation bitrate_allocation;
   bitrate_allocation.SetBitrate(0, 0, 500000);
@@ -363,7 +318,7 @@
   FakeEncodedImageCallback sink;
 
   FrameEncodeMetadataWriter encode_timer(&sink);
-  encode_timer.OnEncoderInit(VideoCodec(), false);
+  encode_timer.OnEncoderInit(VideoCodec());
   // Any non-zero bitrate needed to be set before the first frame.
   VideoBitrateAllocation bitrate_allocation;
   bitrate_allocation.SetBitrate(0, 0, 500000);
@@ -389,7 +344,7 @@
   FrameEncodeMetadataWriter encode_timer(&sink);
   VideoCodec codec;
   codec.mode = VideoCodecMode::kScreensharing;
-  encode_timer.OnEncoderInit(codec, false);
+  encode_timer.OnEncoderInit(codec);
   // Any non-zero bitrate needed to be set before the first frame.
   VideoBitrateAllocation bitrate_allocation;
   bitrate_allocation.SetBitrate(0, 0, 500000);
@@ -413,7 +368,7 @@
   FakeEncodedImageCallback sink;
 
   FrameEncodeMetadataWriter encode_timer(&sink);
-  encode_timer.OnEncoderInit(VideoCodec(), false);
+  encode_timer.OnEncoderInit(VideoCodec());
   // Any non-zero bitrate needed to be set before the first frame.
   VideoBitrateAllocation bitrate_allocation;
   bitrate_allocation.SetBitrate(0, 0, 500000);
@@ -440,7 +395,7 @@
   FakeEncodedImageCallback sink;
 
   FrameEncodeMetadataWriter encode_timer(&sink);
-  encode_timer.OnEncoderInit(VideoCodec(), false);
+  encode_timer.OnEncoderInit(VideoCodec());
   // Any non-zero bitrate needed to be set before the first frame.
   VideoBitrateAllocation bitrate_allocation;
   bitrate_allocation.SetBitrate(0, 0, 500000);
diff --git a/video/video_send_stream_tests.cc b/video/video_send_stream_tests.cc
index 485072e..4dc5f57 100644
--- a/video/video_send_stream_tests.cc
+++ b/video/video_send_stream_tests.cc
@@ -105,13 +105,6 @@
   kVP8,
 };
 
-VideoFrame CreateVideoFrame(int width, int height, int64_t timestamp_ms) {
-  return webrtc::VideoFrame::Builder()
-      .set_video_frame_buffer(I420Buffer::Create(width, height))
-      .set_rotation(webrtc::kVideoRotation_0)
-      .set_timestamp_ms(timestamp_ms)
-      .build();
-}
 }  // namespace
 
 class VideoSendStreamTest : public test::CallTest {
@@ -2206,145 +2199,6 @@
   absl::optional<int> bitrate_kbps_ RTC_GUARDED_BY(mutex_);
 };
 
-// This test that if the encoder use an internal source, VideoEncoder::SetRates
-// will be called with zero bitrate during initialization and that
-// VideoSendStream::Stop also triggers VideoEncoder::SetRates Start to be called
-// with zero bitrate.
-TEST_F(VideoSendStreamTest, VideoSendStreamStopSetEncoderRateToZero) {
-  test::NullTransport transport;
-  StartStopBitrateObserver encoder;
-  test::VideoEncoderProxyFactory encoder_factory(&encoder);
-  encoder_factory.SetHasInternalSource(true);
-  test::FrameForwarder forwarder;
-
-  SendTask(RTC_FROM_HERE, task_queue(),
-           [this, &transport, &encoder_factory, &forwarder]() {
-             CreateSenderCall();
-             CreateSendConfig(1, 0, 0, &transport);
-
-             sender_call_->SignalChannelNetworkState(MediaType::VIDEO,
-                                                     kNetworkUp);
-             GetVideoSendConfig()->encoder_settings.encoder_factory =
-                 &encoder_factory;
-
-             CreateVideoStreams();
-             // Inject a frame, to force encoder creation.
-             GetVideoSendStream()->Start();
-             GetVideoSendStream()->SetSource(&forwarder,
-                                             DegradationPreference::DISABLED);
-             forwarder.IncomingCapturedFrame(CreateVideoFrame(640, 480, 4));
-           });
-
-  EXPECT_TRUE(encoder.WaitForEncoderInit());
-
-  SendTask(RTC_FROM_HERE, task_queue(),
-           [this]() { GetVideoSendStream()->Start(); });
-  EXPECT_TRUE(encoder.WaitBitrateChanged(WaitUntil::kNonZero));
-
-  SendTask(RTC_FROM_HERE, task_queue(),
-           [this]() { GetVideoSendStream()->Stop(); });
-  EXPECT_TRUE(encoder.WaitBitrateChanged(WaitUntil::kZero));
-
-  SendTask(RTC_FROM_HERE, task_queue(),
-           [this]() { GetVideoSendStream()->Start(); });
-  EXPECT_TRUE(encoder.WaitBitrateChanged(WaitUntil::kNonZero));
-
-  SendTask(RTC_FROM_HERE, task_queue(), [this]() {
-    DestroyStreams();
-    DestroyCalls();
-  });
-}
-
-// Tests that when the encoder uses an internal source, the VideoEncoder will
-// be updated with a new bitrate when turning the VideoSendStream on/off with
-// VideoSendStream::UpdateActiveSimulcastLayers, and when the VideoStreamEncoder
-// is reconfigured with new active layers.
-TEST_F(VideoSendStreamTest, VideoSendStreamUpdateActiveSimulcastLayers) {
-  test::NullTransport transport;
-  StartStopBitrateObserver encoder;
-  test::VideoEncoderProxyFactory encoder_factory(&encoder);
-  encoder_factory.SetHasInternalSource(true);
-  test::FrameForwarder forwarder;
-
-  SendTask(RTC_FROM_HERE, task_queue(),
-           [this, &transport, &encoder_factory, &forwarder]() {
-             CreateSenderCall();
-             // Create two simulcast streams.
-             CreateSendConfig(2, 0, 0, &transport);
-
-             sender_call_->SignalChannelNetworkState(MediaType::VIDEO,
-                                                     kNetworkUp);
-             GetVideoSendConfig()->encoder_settings.encoder_factory =
-                 &encoder_factory;
-
-             CreateVideoStreams();
-
-             EXPECT_FALSE(GetVideoSendStream()->started());
-
-             // Inject a frame, to force encoder creation.
-             GetVideoSendStream()->Start();
-             GetVideoSendStream()->SetSource(&forwarder,
-                                             DegradationPreference::DISABLED);
-             forwarder.IncomingCapturedFrame(CreateVideoFrame(640, 480, 4));
-           });
-
-  EXPECT_TRUE(encoder.WaitForEncoderInit());
-
-  // When we turn on the simulcast layers it will update the BitrateAllocator,
-  // which in turn updates the VideoEncoder's bitrate.
-  SendTask(RTC_FROM_HERE, task_queue(), [this]() {
-    GetVideoSendStream()->UpdateActiveSimulcastLayers({true, true});
-    EXPECT_TRUE(GetVideoSendStream()->started());
-  });
-  EXPECT_TRUE(encoder.WaitBitrateChanged(WaitUntil::kNonZero));
-
-  GetVideoEncoderConfig()->simulcast_layers[0].active = true;
-  GetVideoEncoderConfig()->simulcast_layers[1].active = false;
-  SendTask(RTC_FROM_HERE, task_queue(), [this]() {
-    GetVideoSendStream()->ReconfigureVideoEncoder(
-        GetVideoEncoderConfig()->Copy());
-  });
-  EXPECT_TRUE(encoder.WaitBitrateChanged(WaitUntil::kNonZero));
-
-  // Turning off both simulcast layers should trigger a bitrate change of 0.
-  GetVideoEncoderConfig()->simulcast_layers[0].active = false;
-  GetVideoEncoderConfig()->simulcast_layers[1].active = false;
-  SendTask(RTC_FROM_HERE, task_queue(), [this]() {
-    GetVideoSendStream()->UpdateActiveSimulcastLayers({false, false});
-    EXPECT_FALSE(GetVideoSendStream()->started());
-  });
-  EXPECT_TRUE(encoder.WaitBitrateChanged(WaitUntil::kZero));
-
-  // Re-activating a layer should resume sending and trigger a bitrate change.
-  GetVideoEncoderConfig()->simulcast_layers[0].active = true;
-  SendTask(RTC_FROM_HERE, task_queue(), [this]() {
-    GetVideoSendStream()->UpdateActiveSimulcastLayers({true, false});
-    EXPECT_TRUE(GetVideoSendStream()->started());
-  });
-  EXPECT_TRUE(encoder.WaitBitrateChanged(WaitUntil::kNonZero));
-
-  // Stop the stream and make sure the bit rate goes to zero again.
-  SendTask(RTC_FROM_HERE, task_queue(), [this]() {
-    GetVideoSendStream()->Stop();
-    EXPECT_FALSE(GetVideoSendStream()->started());
-  });
-  EXPECT_TRUE(encoder.WaitBitrateChanged(WaitUntil::kZero));
-
-  // One last test to verify that after `Stop()` we can still implicitly start
-  // the stream if needed. This is what will happen when a send stream gets
-  // re-used. See crbug.com/1241213.
-  SendTask(RTC_FROM_HERE, task_queue(), [this]() {
-    GetVideoSendStream()->UpdateActiveSimulcastLayers({true, true});
-    EXPECT_TRUE(GetVideoSendStream()->started());
-  });
-  EXPECT_TRUE(encoder.WaitBitrateChanged(WaitUntil::kNonZero));
-
-  SendTask(RTC_FROM_HERE, task_queue(), [this]() {
-    DestroyStreams();
-    DestroyCalls();
-  });
-}
-
 TEST_F(VideoSendStreamTest, EncoderIsProperlyInitializedAndDestroyed) {
   class EncoderStateObserver : public test::SendTest, public VideoEncoder {
    public:
diff --git a/video/video_stream_encoder.cc b/video/video_stream_encoder.cc
index 560fcb7..c6870a0 100644
--- a/video/video_stream_encoder.cc
+++ b/video/video_stream_encoder.cc
@@ -839,21 +839,12 @@
         max_data_payload_length_ = max_data_payload_length;
         pending_encoder_reconfiguration_ = true;
 
-        // Reconfigure the encoder now if the encoder has an internal source or
-        // if the frame resolution is known. Otherwise, the reconfiguration is
-        // deferred until the next frame to minimize the number of
-        // reconfigurations. The codec configuration depends on incoming video
-        // frame size.
+        // Reconfigure the encoder now if the frame resolution is known.
+        // Otherwise, the reconfiguration is deferred until the next frame to
+        // minimize the number of reconfigurations. The codec configuration
+        // depends on incoming video frame size.
         if (last_frame_info_) {
           ReconfigureEncoder();
-        } else {
-          codec_info_ = settings_.encoder_factory->QueryVideoEncoder(
-              encoder_config_.video_format);
-          if (HasInternalSource()) {
-            last_frame_info_ = VideoFrameInfo(kDefaultInputPixelsWidth,
-                                              kDefaultInputPixelsHeight, false);
-            ReconfigureEncoder();
-          }
         }
       });
 }
@@ -884,9 +875,6 @@
 
     encoder_->SetFecControllerOverride(fec_controller_override_);
 
-    codec_info_ = settings_.encoder_factory->QueryVideoEncoder(
-        encoder_config_.video_format);
-
     encoder_reset_required = true;
   }
 
@@ -1158,8 +1146,7 @@
     } else {
       encoder_initialized_ = true;
       encoder_->RegisterEncodeCompleteCallback(this);
-      frame_encode_metadata_writer_.OnEncoderInit(send_codec_,
-                                                  HasInternalSource());
+      frame_encode_metadata_writer_.OnEncoderInit(send_codec_);
       next_frame_types_.clear();
       next_frame_types_.resize(
           std::max(static_cast<int>(codec.numberOfSimulcastStreams), 1),
@@ -1481,15 +1468,12 @@
   }
 
   // `bitrate_allocation` is 0 it means that the network is down or the send
-  // pacer is full. We currently only report this if the encoder has an internal
-  // source. If the encoder does not have an internal source, higher levels
-  // are expected to not call AddVideoFrame. We do this since it is unclear
-  // how current encoder implementations behave when given a zero target
+  // pacer is full. We currently don't pass this on to the encoder since it is
+  // unclear how current encoder implementations behave when given a zero target
   // bitrate.
   // TODO(perkj): Make sure all known encoder implementations handle zero
   // target bitrate and remove this check.
-  if (!HasInternalSource() &&
-      rate_settings.rate_control.bitrate.get_sum_bps() == 0) {
+  if (rate_settings.rate_control.bitrate.get_sum_bps() == 0) {
     return;
   }
 
@@ -1839,29 +1823,6 @@
   // TODO(webrtc:10615): Map keyframe request to spatial layer.
   std::fill(next_frame_types_.begin(), next_frame_types_.end(),
             VideoFrameType::kVideoFrameKey);
-
-  if (HasInternalSource()) {
-    // Try to request the frame if we have an external encoder with
-    // internal source since AddVideoFrame never will be called.
-
-    // TODO(nisse): Used only with internal source. Delete as soon as
-    // that feature is removed. The only implementation I've been able
-    // to find ignores what's in the frame. With one exception: It seems
-    // a few test cases, e.g.,
-    // VideoSendStreamTest.VideoSendStreamStopSetEncoderRateToZero, set
-    // internal_source to true and use FakeEncoder. And the latter will
-    // happily encode this 1x1 frame and pass it on down the pipeline.
-    if (encoder_->Encode(VideoFrame::Builder()
-                             .set_video_frame_buffer(I420Buffer::Create(1, 1))
-                             .set_rotation(kVideoRotation_0)
-                             .set_timestamp_us(0)
-                             .build(),
-                         &next_frame_types_) == WEBRTC_VIDEO_CODEC_OK) {
-      // Try to remove just-performed keyframe request, if stream still exists.
-      std::fill(next_frame_types_.begin(), next_frame_types_.end(),
-                VideoFrameType::kVideoFrameDelta);
-    }
-  }
 }
 
 void VideoStreamEncoder::OnLossNotification(
@@ -2204,16 +2165,6 @@
     frame_dropper_.Fill(frame_size.bytes(), !keyframe);
   }
 
-  if (HasInternalSource()) {
-    // Update frame dropper after the fact for internal sources.
-    input_framerate_.Update(1u, clock_->TimeInMilliseconds());
-    frame_dropper_.Leak(GetInputFramerateFps());
-    // Signal to encoder to drop next frame.
-    if (frame_dropper_.DropFrame()) {
-      pending_frame_drops_.fetch_add(1);
-    }
-  }
-
   stream_resource_manager_.OnEncodeCompleted(encoded_image, time_sent_us,
                                              encode_duration_us, frame_size);
   if (bitrate_adjuster_) {
@@ -2222,12 +2173,6 @@
   }
 }
 
-bool VideoStreamEncoder::HasInternalSource() const {
-  // TODO(sprang): Checking both info from encoder and from encoder factory
-  // until we have deprecated and removed the encoder factory info.
-  return codec_info_.has_internal_source || encoder_info_.has_internal_source;
-}
-
 void VideoStreamEncoder::ReleaseEncoder() {
   if (!encoder_ || !encoder_initialized_) {
     return;
diff --git a/video/video_stream_encoder.h b/video/video_stream_encoder.h
index 10833c5..e0eda70 100644
--- a/video/video_stream_encoder.h
+++ b/video/video_stream_encoder.h
@@ -228,7 +228,6 @@
                      int64_t time_sent_us,
                      int temporal_index,
                      DataSize frame_size);
-  bool HasInternalSource() const RTC_RUN_ON(&encoder_queue_);
   void ReleaseEncoder() RTC_RUN_ON(&encoder_queue_);
   // After calling this function `resource_adaptation_processor_` will be null.
   void ShutdownResourceAdaptationQueue();
@@ -334,7 +333,6 @@
   absl::optional<int64_t> last_encode_info_ms_ RTC_GUARDED_BY(&encoder_queue_);
 
   VideoEncoder::EncoderInfo encoder_info_ RTC_GUARDED_BY(&encoder_queue_);
-  VideoEncoderFactory::CodecInfo codec_info_ RTC_GUARDED_BY(&encoder_queue_);
   VideoCodec send_codec_ RTC_GUARDED_BY(&encoder_queue_);
 
   FrameDropper frame_dropper_ RTC_GUARDED_BY(&encoder_queue_);
diff --git a/video/video_stream_encoder_unittest.cc b/video/video_stream_encoder_unittest.cc
index 1331b8c..ba29d0b 100644
--- a/video/video_stream_encoder_unittest.cc
+++ b/video/video_stream_encoder_unittest.cc
@@ -7240,68 +7240,6 @@
   video_stream_encoder_->Stop();
 }
 
-TEST_F(VideoStreamEncoderTest, RequestKeyframeInternalSource) {
-  // Configure internal source factory and setup test again.
-  encoder_factory_.SetHasInternalSource(true);
-  ResetEncoder("VP8", 1, 1, 1, false);
-  video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
-      kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
-
-  // Call encoder directly, simulating internal source where encoded frame
-  // callback in VideoStreamEncoder is called despite no OnFrame().
-  fake_encoder_.InjectFrame(CreateFrame(1, nullptr), true);
-  EXPECT_TRUE(WaitForFrame(kDefaultTimeoutMs));
-  EXPECT_THAT(
-      fake_encoder_.LastFrameTypes(),
-      ::testing::ElementsAre(VideoFrameType{VideoFrameType::kVideoFrameKey}));
-
-  const std::vector<VideoFrameType> kDeltaFrame = {
-      VideoFrameType::kVideoFrameDelta};
-  // Need to set timestamp manually since manually for injected frame.
-  VideoFrame frame = CreateFrame(101, nullptr);
-  frame.set_timestamp(101);
-  fake_encoder_.InjectFrame(frame, false);
-  EXPECT_TRUE(WaitForFrame(kDefaultTimeoutMs));
-  EXPECT_THAT(
-      fake_encoder_.LastFrameTypes(),
-      ::testing::ElementsAre(VideoFrameType{VideoFrameType::kVideoFrameDelta}));
-
-  // Request key-frame. The forces a dummy frame down into the encoder.
-  fake_encoder_.ExpectNullFrame();
-  video_stream_encoder_->SendKeyFrame();
-  EXPECT_TRUE(WaitForFrame(kDefaultTimeoutMs));
-  EXPECT_THAT(
-      fake_encoder_.LastFrameTypes(),
-      ::testing::ElementsAre(VideoFrameType{VideoFrameType::kVideoFrameKey}));
-
-  video_stream_encoder_->Stop();
-}
-
-TEST_F(VideoStreamEncoderTest, AdjustsTimestampInternalSource) {
-  // Configure internal source factory and setup test again.
-  encoder_factory_.SetHasInternalSource(true);
-  ResetEncoder("VP8", 1, 1, 1, false);
-  video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
-      kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
-
-  int64_t timestamp = 1;
-  EncodedImage image;
-  image.capture_time_ms_ = ++timestamp;
-  image.SetTimestamp(static_cast<uint32_t>(timestamp * 90));
-  const int64_t kEncodeFinishDelayMs = 10;
-  image.timing_.encode_start_ms = timestamp;
-  image.timing_.encode_finish_ms = timestamp + kEncodeFinishDelayMs;
-  fake_encoder_.InjectEncodedImage(image, /*codec_specific_info=*/nullptr);
-  // Wait for frame without incrementing clock.
-  EXPECT_TRUE(sink_.WaitForFrame(kDefaultTimeoutMs));
-  // Frame is captured kEncodeFinishDelayMs before it's encoded, so restored
-  // capture timestamp should be kEncodeFinishDelayMs in the past.
-  EXPECT_EQ(sink_.GetLastCaptureTimeMs(),
-            CurrentTimeMs() - kEncodeFinishDelayMs);
-
-  video_stream_encoder_->Stop();
-}
-
 TEST_F(VideoStreamEncoderTest, DoesNotRewriteH264BitstreamWithOptimalSps) {
   // SPS contains VUI with restrictions on the maximum number of reordered
   // pictures, there is no need to rewrite the bitstream to enable faster