This is an initial cleanup step, aiming to delete the
webrtc::VideoRenderer class, replacing it by rtc::VideoSinkInterface.

The next step is to convert all places where a renderer is attached to
rtc::VideoSourceInterface, and at that point, the
SmoothsRenderedFrames method can be replaced by a flag
rtc::VideoSinkWants::smoothed_frames.

Delete unused method IsTextureSupported.
Delete unused time argument to RenderFrame.
Let webrtc::VideoRenderer inherit rtc::VideoSinkInterface. Rename RenderFrame --> OnFrame.

TBR=kjellander@webrtc.org
BUG=webrtc:5426

Review URL: https://codereview.webrtc.org/1814763002

Cr-Commit-Position: refs/heads/master@{#12070}
diff --git a/webrtc/DEPS b/webrtc/DEPS
index 292c996..aebafa7 100644
--- a/webrtc/DEPS
+++ b/webrtc/DEPS
@@ -44,4 +44,7 @@
   "video_frame\.h": [
     "+webrtc/common_video",
   ],
+  "video_renderer\.h": [
+    "+webrtc/media/base",
+  ],
 }
diff --git a/webrtc/call/call_perf_tests.cc b/webrtc/call/call_perf_tests.cc
index beb05a0..7501ada 100644
--- a/webrtc/call/call_perf_tests.cc
+++ b/webrtc/call/call_perf_tests.cc
@@ -88,8 +88,7 @@
         first_time_in_sync_(-1),
         receive_stream_(nullptr) {}
 
-  void RenderFrame(const VideoFrame& video_frame,
-                   int time_to_render_ms) override {
+  void OnFrame(const VideoFrame& video_frame) override {
     VideoReceiveStream::Stats stats;
     {
       rtc::CritScope lock(&crit_);
@@ -129,8 +128,6 @@
     }
   }
 
-  bool IsTextureSupported() const override { return false; }
-
   void set_receive_stream(VideoReceiveStream* receive_stream) {
     rtc::CritScope lock(&crit_);
     receive_stream_ = receive_stream;
@@ -377,8 +374,7 @@
           nullptr, this, test::PacketTransport::kReceiver, net_config_);
     }
 
-    void RenderFrame(const VideoFrame& video_frame,
-                     int time_to_render_ms) override {
+    void OnFrame(const VideoFrame& video_frame) override {
       rtc::CritScope lock(&crit_);
       if (video_frame.ntp_time_ms() <= 0) {
         // Haven't got enough RTCP SR in order to calculate the capture ntp
@@ -417,8 +413,6 @@
       EXPECT_TRUE(std::abs(time_offset_ms) < threshold_ms_);
     }
 
-    bool IsTextureSupported() const override { return false; }
-
     virtual Action OnSendRtp(const uint8_t* packet, size_t length) {
       rtc::CritScope lock(&crit_);
       RTPHeader header;
diff --git a/webrtc/media/engine/fakewebrtccall.cc b/webrtc/media/engine/fakewebrtccall.cc
index 3277e75..5b2f0a7 100644
--- a/webrtc/media/engine/fakewebrtccall.cc
+++ b/webrtc/media/engine/fakewebrtccall.cc
@@ -202,9 +202,8 @@
   return receiving_;
 }
 
-void FakeVideoReceiveStream::InjectFrame(const webrtc::VideoFrame& frame,
-                                         int time_to_render_ms) {
-  config_.renderer->RenderFrame(frame, time_to_render_ms);
+void FakeVideoReceiveStream::InjectFrame(const webrtc::VideoFrame& frame) {
+  config_.renderer->OnFrame(frame);
 }
 
 webrtc::VideoReceiveStream::Stats FakeVideoReceiveStream::GetStats() const {
diff --git a/webrtc/media/engine/fakewebrtccall.h b/webrtc/media/engine/fakewebrtccall.h
index 41a92df..cf96cc5 100644
--- a/webrtc/media/engine/fakewebrtccall.h
+++ b/webrtc/media/engine/fakewebrtccall.h
@@ -159,7 +159,7 @@
 
   bool IsReceiving() const;
 
-  void InjectFrame(const webrtc::VideoFrame& frame, int time_to_render_ms);
+  void InjectFrame(const webrtc::VideoFrame& frame);
 
   void SetStats(const webrtc::VideoReceiveStream::Stats& stats);
 
diff --git a/webrtc/media/engine/webrtcvideoengine2.cc b/webrtc/media/engine/webrtcvideoengine2.cc
index 2d8cf00..395dcb5 100644
--- a/webrtc/media/engine/webrtcvideoengine2.cc
+++ b/webrtc/media/engine/webrtcvideoengine2.cc
@@ -2358,9 +2358,8 @@
   allocated_decoders->clear();
 }
 
-void WebRtcVideoChannel2::WebRtcVideoReceiveStream::RenderFrame(
-    const webrtc::VideoFrame& frame,
-    int time_to_render_ms) {
+void WebRtcVideoChannel2::WebRtcVideoReceiveStream::OnFrame(
+    const webrtc::VideoFrame& frame) {
   rtc::CritScope crit(&sink_lock_);
 
   if (first_frame_timestamp_ < 0)
@@ -2387,10 +2386,6 @@
   sink_->OnFrame(render_frame);
 }
 
-bool WebRtcVideoChannel2::WebRtcVideoReceiveStream::IsTextureSupported() const {
-  return true;
-}
-
 bool WebRtcVideoChannel2::WebRtcVideoReceiveStream::SmoothsRenderedFrames()
     const {
   return disable_prerenderer_smoothing_;
diff --git a/webrtc/media/engine/webrtcvideoengine2.h b/webrtc/media/engine/webrtcvideoengine2.h
index 171f26a..27eadb2 100644
--- a/webrtc/media/engine/webrtcvideoengine2.h
+++ b/webrtc/media/engine/webrtcvideoengine2.h
@@ -417,9 +417,7 @@
                                webrtc::RtcpMode rtcp_mode);
     void SetRecvParameters(const ChangedRecvParameters& recv_params);
 
-    void RenderFrame(const webrtc::VideoFrame& frame,
-                     int time_to_render_ms) override;
-    bool IsTextureSupported() const override;
+    void OnFrame(const webrtc::VideoFrame& frame) override;
     bool SmoothsRenderedFrames() const override;
     bool IsDefaultStream() const;
 
diff --git a/webrtc/media/engine/webrtcvideoengine2_unittest.cc b/webrtc/media/engine/webrtcvideoengine2_unittest.cc
index 21c4fd2..81f0e5b 100644
--- a/webrtc/media/engine/webrtcvideoengine2_unittest.cc
+++ b/webrtc/media/engine/webrtcvideoengine2_unittest.cc
@@ -2068,7 +2068,7 @@
   video_frame.set_timestamp(kInitialTimestamp);
   // Initial NTP time is not available on the first frame, but should still be
   // able to be estimated.
-  stream->InjectFrame(video_frame, 0);
+  stream->InjectFrame(video_frame);
 
   EXPECT_EQ(1, renderer.num_rendered_frames());
 
@@ -2077,7 +2077,7 @@
   // here.
   video_frame.set_timestamp(kFrameOffsetMs * 90 - 1);
   video_frame.set_ntp_time_ms(kInitialNtpTimeMs + kFrameOffsetMs);
-  stream->InjectFrame(video_frame, 0);
+  stream->InjectFrame(video_frame);
 
   EXPECT_EQ(2, renderer.num_rendered_frames());
 
diff --git a/webrtc/test/gl/gl_renderer.cc b/webrtc/test/gl/gl_renderer.cc
index ff87d99..8ce373b 100644
--- a/webrtc/test/gl/gl_renderer.cc
+++ b/webrtc/test/gl/gl_renderer.cc
@@ -69,8 +69,7 @@
                GL_UNSIGNED_INT_8_8_8_8, static_cast<GLvoid*>(buffer_));
 }
 
-void GlRenderer::RenderFrame(const webrtc::VideoFrame& frame,
-                             int /*render_delay_ms*/) {
+void GlRenderer::OnFrame(const webrtc::VideoFrame& frame) {
   assert(is_init_);
 
   if (static_cast<size_t>(frame.width()) != width_ ||
diff --git a/webrtc/test/gl/gl_renderer.h b/webrtc/test/gl/gl_renderer.h
index 7682d3c..01dc18d 100644
--- a/webrtc/test/gl/gl_renderer.h
+++ b/webrtc/test/gl/gl_renderer.h
@@ -26,8 +26,7 @@
 
 class GlRenderer : public VideoRenderer {
  public:
-  void RenderFrame(const webrtc::VideoFrame& frame,
-                   int time_to_render_ms) override;
+  void OnFrame(const webrtc::VideoFrame& frame) override;
 
  protected:
   GlRenderer();
diff --git a/webrtc/test/linux/glx_renderer.cc b/webrtc/test/linux/glx_renderer.cc
index c5071de..d6979c8 100644
--- a/webrtc/test/linux/glx_renderer.cc
+++ b/webrtc/test/linux/glx_renderer.cc
@@ -144,8 +144,7 @@
   XConfigureWindow(display_, window_, CWWidth | CWHeight, &wc);
 }
 
-void GlxRenderer::RenderFrame(const webrtc::VideoFrame& frame,
-                              int /*render_delay_ms*/) {
+void GlxRenderer::OnFrame(const webrtc::VideoFrame& frame) {
   if (static_cast<size_t>(frame.width()) != width_ ||
       static_cast<size_t>(frame.height()) != height_) {
     Resize(static_cast<size_t>(frame.width()),
@@ -168,7 +167,7 @@
     }
   }
 
-  GlRenderer::RenderFrame(frame, 0);
+  GlRenderer::OnFrame(frame);
   glXSwapBuffers(display_, window_);
 
   if (!glXMakeCurrent(display_, None, NULL)) {
diff --git a/webrtc/test/linux/glx_renderer.h b/webrtc/test/linux/glx_renderer.h
index c117281..4a758db 100644
--- a/webrtc/test/linux/glx_renderer.h
+++ b/webrtc/test/linux/glx_renderer.h
@@ -26,8 +26,7 @@
                              size_t height);
   virtual ~GlxRenderer();
 
-  void RenderFrame(const webrtc::VideoFrame& frame, int delta) override;
-  bool IsTextureSupported() const override { return false; }
+  void OnFrame(const webrtc::VideoFrame& frame) override;
 
  private:
   GlxRenderer(size_t width, size_t height);
diff --git a/webrtc/test/mac/video_renderer_mac.h b/webrtc/test/mac/video_renderer_mac.h
index 7baf794..702f5ae 100644
--- a/webrtc/test/mac/video_renderer_mac.h
+++ b/webrtc/test/mac/video_renderer_mac.h
@@ -27,8 +27,7 @@
   bool Init(const char* window_title, int width, int height);
 
   // Implements GlRenderer.
-  void RenderFrame(const VideoFrame& frame, int delta) override;
-  bool IsTextureSupported() const override { return false; }
+  void OnFrame(const VideoFrame& frame) override;
 
  private:
   CocoaWindow* window_;
diff --git a/webrtc/test/mac/video_renderer_mac.mm b/webrtc/test/mac/video_renderer_mac.mm
index 9cde95a..f79aecf 100644
--- a/webrtc/test/mac/video_renderer_mac.mm
+++ b/webrtc/test/mac/video_renderer_mac.mm
@@ -125,9 +125,9 @@
   return true;
 }
 
-void MacRenderer::RenderFrame(const VideoFrame& frame, int /*delta*/) {
+void MacRenderer::OnFrame(const VideoFrame& frame) {
   [window_ makeCurrentContext];
-  GlRenderer::RenderFrame(frame, 0);
+  GlRenderer::OnFrame(frame);
 }
 
 }  // test
diff --git a/webrtc/test/video_renderer.cc b/webrtc/test/video_renderer.cc
index c7b60e5..1af1ff8 100644
--- a/webrtc/test/video_renderer.cc
+++ b/webrtc/test/video_renderer.cc
@@ -17,9 +17,7 @@
 namespace test {
 
 class NullRenderer : public VideoRenderer {
-  void RenderFrame(const VideoFrame& video_frame,
-                   int time_to_render_ms) override {}
-  bool IsTextureSupported() const override { return false; }
+  void OnFrame(const VideoFrame& video_frame) override {}
 };
 
 VideoRenderer* VideoRenderer::Create(const char* window_title,
diff --git a/webrtc/test/win/d3d_renderer.cc b/webrtc/test/win/d3d_renderer.cc
index 86900e9..e09b32d 100644
--- a/webrtc/test/win/d3d_renderer.cc
+++ b/webrtc/test/win/d3d_renderer.cc
@@ -191,8 +191,7 @@
   vertex_buffer_->Unlock();
 }
 
-void D3dRenderer::RenderFrame(const webrtc::VideoFrame& frame,
-                              int /*render_delay_ms*/) {
+void D3dRenderer::OnFrame(const webrtc::VideoFrame& frame) {
   if (static_cast<size_t>(frame.width()) != width_ ||
       static_cast<size_t>(frame.height()) != height_) {
     Resize(static_cast<size_t>(frame.width()),
diff --git a/webrtc/test/win/d3d_renderer.h b/webrtc/test/win/d3d_renderer.h
index cf2319e..575d9c7 100644
--- a/webrtc/test/win/d3d_renderer.h
+++ b/webrtc/test/win/d3d_renderer.h
@@ -27,8 +27,7 @@
                              size_t height);
   virtual ~D3dRenderer();
 
-  void RenderFrame(const webrtc::VideoFrame& frame, int delta) override;
-  bool IsTextureSupported() const override { return false; }
+  void OnFrame(const webrtc::VideoFrame& frame) override;
 
  private:
   D3dRenderer(size_t width, size_t height);
diff --git a/webrtc/video/end_to_end_tests.cc b/webrtc/video/end_to_end_tests.cc
index 812c38e..b86b2c9 100644
--- a/webrtc/video/end_to_end_tests.cc
+++ b/webrtc/video/end_to_end_tests.cc
@@ -127,12 +127,7 @@
    public:
     Renderer() : event_(false, false) {}
 
-    void RenderFrame(const VideoFrame& video_frame,
-                     int /*time_to_render_ms*/) override {
-      event_.Set();
-    }
-
-    bool IsTextureSupported() const override { return false; }
+    void OnFrame(const VideoFrame& video_frame) override { event_.Set(); }
 
     bool Wait() { return event_.Wait(kDefaultTimeoutMs); }
 
@@ -195,11 +190,7 @@
    public:
     Renderer() : event_(false, false) {}
 
-    void RenderFrame(const VideoFrame& video_frame,
-                     int /*time_to_render_ms*/) override {
-      event_.Set();
-    }
-    bool IsTextureSupported() const override { return false; }
+    void OnFrame(const VideoFrame& video_frame) override { event_.Set(); }
 
     bool Wait() { return event_.Wait(kDefaultTimeoutMs); }
 
@@ -272,15 +263,12 @@
       (*receive_configs)[0].decoders[0].decoder = decoder_.get();
     }
 
-    void RenderFrame(const VideoFrame& video_frame,
-                     int time_to_render_ms) override {
+    void OnFrame(const VideoFrame& video_frame) override {
       const int kRequiredFrames = 500;
       if (++frame_counter_ == kRequiredFrames)
         observation_complete_.Set();
     }
 
-    bool IsTextureSupported() const override { return false; }
-
    private:
     std::unique_ptr<webrtc::VideoEncoder> encoder_;
     std::unique_ptr<webrtc::VideoDecoder> decoder_;
@@ -328,15 +316,12 @@
       (*receive_configs)[0].decoders[0].decoder = decoder_.get();
     }
 
-    void RenderFrame(const VideoFrame& video_frame,
-                     int time_to_render_ms) override {
+    void OnFrame(const VideoFrame& video_frame) override {
       const int kRequiredFrames = 500;
       if (++frame_counter_ == kRequiredFrames)
         observation_complete_.Set();
     }
 
-    bool IsTextureSupported() const override { return false; }
-
    private:
     std::unique_ptr<webrtc::VideoEncoder> encoder_;
     std::unique_ptr<webrtc::VideoDecoder> decoder_;
@@ -521,8 +506,7 @@
       return SEND_PACKET;
     }
 
-    void RenderFrame(const VideoFrame& video_frame,
-                     int time_to_render_ms) override {
+    void OnFrame(const VideoFrame& video_frame) override {
       rtc::CritScope lock(&crit_);
       // Rendering frame with timestamp of packet that was dropped -> FEC
       // protection worked.
@@ -530,8 +514,6 @@
         observation_complete_.Set();
     }
 
-    bool IsTextureSupported() const override { return false; }
-
     enum {
       kFirstPacket,
       kDropEveryOtherPacketUntilFec,
@@ -850,16 +832,13 @@
    public:
     Renderer() : event_(false, false) {}
 
-    void RenderFrame(const VideoFrame& video_frame,
-                     int /*time_to_render_ms*/) override {
+    void OnFrame(const VideoFrame& video_frame) override {
       EXPECT_EQ(0, *video_frame.buffer(kYPlane))
           << "Rendered frame should have zero luma which is applied by the "
              "pre-render callback.";
       event_.Set();
     }
 
-    bool IsTextureSupported() const override { return false; }
-
     bool Wait() { return event_.Wait(kDefaultTimeoutMs); }
     rtc::Event event_;
   } renderer;
@@ -997,8 +976,7 @@
       return SEND_PACKET;
     }
 
-    void RenderFrame(const VideoFrame& video_frame,
-                     int time_to_render_ms) override {
+    void OnFrame(const VideoFrame& video_frame) override {
       rtc::CritScope lock(&crit_);
       if (received_pli_ &&
           video_frame.timestamp() > highest_dropped_timestamp_) {
@@ -1008,8 +986,6 @@
         frames_to_drop_ = kPacketsToDrop;
     }
 
-    bool IsTextureSupported() const override { return false; }
-
     void ModifyVideoConfigs(
         VideoSendStream::Config* send_config,
         std::vector<VideoReceiveStream::Config>* receive_configs,
@@ -1323,8 +1299,7 @@
           frame_generator_(frame_generator),
           done_(false, false) {}
 
-    void RenderFrame(const VideoFrame& video_frame,
-                     int time_to_render_ms) override {
+    void OnFrame(const VideoFrame& video_frame) override {
       EXPECT_EQ(settings_.width, video_frame.width());
       EXPECT_EQ(settings_.height, video_frame.height());
       (*frame_generator_)->Stop();
@@ -1333,8 +1308,6 @@
 
     uint32_t Ssrc() { return ssrc_; }
 
-    bool IsTextureSupported() const override { return false; }
-
     bool Wait() { return done_.Wait(kDefaultTimeoutMs); }
 
    private:
diff --git a/webrtc/video/replay.cc b/webrtc/video/replay.cc
index 52b6ff6..3bfa7ff 100644
--- a/webrtc/video/replay.cc
+++ b/webrtc/video/replay.cc
@@ -152,10 +152,9 @@
   }
 
  private:
-  void RenderFrame(const VideoFrame& video_frame,
-                   int time_to_render_ms) override {
+  void OnFrame(const VideoFrame& video_frame) override {
     if (renderer_ != nullptr)
-      renderer_->RenderFrame(video_frame, time_to_render_ms);
+      renderer_->OnFrame(video_frame);
     if (basename_.empty())
       return;
     if (last_width_ != video_frame.width() ||
@@ -182,8 +181,6 @@
     PrintVideoFrame(video_frame, file_);
   }
 
-  bool IsTextureSupported() const override { return false; }
-
   const std::string basename_;
   VideoRenderer* const renderer_;
   FILE* file_;
diff --git a/webrtc/video/video_capture_input.cc b/webrtc/video/video_capture_input.cc
index 18decb2..e355a74 100644
--- a/webrtc/video/video_capture_input.cc
+++ b/webrtc/video/video_capture_input.cc
@@ -47,7 +47,7 @@
   // TODO(pbos): Remove local rendering, it should be handled by the client code
   // if required.
   if (local_renderer_)
-    local_renderer_->RenderFrame(video_frame, 0);
+    local_renderer_->OnFrame(video_frame);
 
   stats_proxy_->OnIncomingFrame(video_frame.width(), video_frame.height());
 
diff --git a/webrtc/video/video_quality_test.cc b/webrtc/video/video_quality_test.cc
index 8dfbc79..e2eb913 100644
--- a/webrtc/video/video_quality_test.cc
+++ b/webrtc/video/video_quality_test.cc
@@ -205,8 +205,7 @@
       encoded_frame_size_.AddSample(frame.length_);
   }
 
-  void RenderFrame(const VideoFrame& video_frame,
-                   int time_to_render_ms) override {
+  void OnFrame(const VideoFrame& video_frame) override {
     int64_t render_time_ms =
         Clock::GetRealTimeClock()->CurrentNtpInMilliseconds();
 
@@ -246,8 +245,6 @@
     last_rendered_frame_ = video_frame;
   }
 
-  bool IsTextureSupported() const override { return false; }
-
   void Wait() {
     // Frame comparisons can be very expensive. Wait for test to be done, but
     // at time-out check if frames_processed is going up. If so, give it more
diff --git a/webrtc/video/video_receive_stream.cc b/webrtc/video/video_receive_stream.cc
index 6cc2794..cb2515b 100644
--- a/webrtc/video/video_receive_stream.cc
+++ b/webrtc/video/video_receive_stream.cc
@@ -388,14 +388,8 @@
   if (vie_sync_.GetStreamSyncOffsetInMs(video_frame, &sync_offset_ms))
     stats_proxy_.OnSyncOffsetUpdated(sync_offset_ms);
 
-  // TODO(pbos): Wire up config_.render->IsTextureSupported() and convert if not
-  // supported. Or provide methods for converting a texture frame in
-  // VideoFrame.
-
   if (config_.renderer != nullptr)
-    config_.renderer->RenderFrame(
-        video_frame,
-        video_frame.render_time_ms() - clock_->TimeInMilliseconds());
+    config_.renderer->OnFrame(video_frame);
 
   stats_proxy_.OnRenderedFrame(video_frame);
 
diff --git a/webrtc/video_renderer.h b/webrtc/video_renderer.h
index 7cb9ed1..85c4ac5 100644
--- a/webrtc/video_renderer.h
+++ b/webrtc/video_renderer.h
@@ -11,20 +11,14 @@
 #ifndef WEBRTC_VIDEO_RENDERER_H_
 #define WEBRTC_VIDEO_RENDERER_H_
 
+#include "webrtc/media/base/videosinkinterface.h"
+
 namespace webrtc {
 
 class VideoFrame;
 
-class VideoRenderer {
+class VideoRenderer : public rtc::VideoSinkInterface<VideoFrame> {
  public:
-  // This function should return as soon as possible and not block until it's
-  // time to render the frame.
-  // TODO(mflodman) Remove time_to_render_ms when VideoFrame contains NTP.
-  virtual void RenderFrame(const VideoFrame& video_frame,
-                           int time_to_render_ms) = 0;
-
-  virtual bool IsTextureSupported() const = 0;
-
   // This function returns true if WebRTC should not delay frames for
   // smoothness. In general, this case means the renderer can schedule frames to
   // optimize smoothness.