Removed unused cricket::VideoCapturer methods:
void UpdateAspectRatio(int ratio_w, int ratio_h);
void ClearAspectRatio();
ool Pause(bool paused);
Restart(const VideoFormat& capture_format);
MuteToBlackThenPause(bool muted);
IsMuted() const
set_square_pixel_aspect_ratio
bool square_pixel_aspect_ratio()
This cl also remove the use of messages and posting of state change.
Further more - a thread checker is added to make sure methods are called on only one thread. Construction can happen on a separate thred.
It does not add restrictions on what thread frames are delivered on though.
There is more features in VideoCapturer::Onframe related to screen share in ARGB that probably can be cleaned up in a follow up cl.
BUG=webrtc:5426
Review URL: https://codereview.webrtc.org/1733673002
Cr-Original-Commit-Position: refs/heads/master@{#11773}
Cr-Mirrored-From: https://chromium.googlesource.com/external/webrtc
Cr-Mirrored-Commit: e9c0cdff2dad2553b6ff6820c0c7429cb2854861
diff --git a/api/androidvideocapturer.cc b/api/androidvideocapturer.cc
index a24d55c..db94289 100644
--- a/api/androidvideocapturer.cc
+++ b/api/androidvideocapturer.cc
@@ -152,7 +152,7 @@
delegate_->Stop();
current_state_ = cricket::CS_STOPPED;
- SignalStateChange(this, current_state_);
+ SetCaptureState(current_state_);
}
bool AndroidVideoCapturer::IsRunning() {
@@ -173,11 +173,7 @@
if (new_state == current_state_)
return;
current_state_ = new_state;
-
- // TODO(perkj): SetCaptureState can not be used since it posts to |thread_|.
- // But |thread_ | is currently just the thread that happened to create the
- // cricket::VideoCapturer.
- SignalStateChange(this, new_state);
+ SetCaptureState(new_state);
}
void AndroidVideoCapturer::OnIncomingFrame(
diff --git a/api/test/fakeperiodicvideocapturer.h b/api/test/fakeperiodicvideocapturer.h
index 2563618..65f83ed 100644
--- a/api/test/fakeperiodicvideocapturer.h
+++ b/api/test/fakeperiodicvideocapturer.h
@@ -19,7 +19,8 @@
namespace webrtc {
-class FakePeriodicVideoCapturer : public cricket::FakeVideoCapturer {
+class FakePeriodicVideoCapturer : public cricket::FakeVideoCapturer,
+ public rtc::MessageHandler {
public:
FakePeriodicVideoCapturer() {
std::vector<cricket::VideoFormat> formats;
@@ -55,8 +56,6 @@
GetCaptureFormat()->interval / rtc::kNumNanosecsPerMillisec),
this, MSG_CREATEFRAME);
}
- } else {
- FakeVideoCapturer::OnMessage(msg);
}
}
diff --git a/api/videosource.cc b/api/videosource.cc
index 3a55cd5..7d72e2a 100644
--- a/api/videosource.cc
+++ b/api/videosource.cc
@@ -55,7 +55,6 @@
case cricket::CS_RUNNING:
return MediaSourceInterface::kLive;
case cricket::CS_FAILED:
- case cricket::CS_NO_DEVICE:
case cricket::CS_STOPPED:
return MediaSourceInterface::kEnded;
case cricket::CS_PAUSED:
@@ -403,7 +402,7 @@
// This signal is triggered for all video capturers. Not only the one we are
// interested in.
void VideoSource::OnStateChange(cricket::VideoCapturer* capturer,
- cricket::CaptureState capture_state) {
+ cricket::CaptureState capture_state) {
if (capturer == video_capturer_.get()) {
SetState(GetReadyState(capture_state));
}
diff --git a/media/base/capturemanager.cc b/media/base/capturemanager.cc
index 3628fb3..0e72477 100644
--- a/media/base/capturemanager.cc
+++ b/media/base/capturemanager.cc
@@ -198,62 +198,6 @@
return true;
}
-bool CaptureManager::RestartVideoCapture(
- VideoCapturer* video_capturer,
- const VideoFormat& previous_format,
- const VideoFormat& desired_format,
- CaptureManager::RestartOptions options) {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
- if (!IsCapturerRegistered(video_capturer)) {
- LOG(LS_ERROR) << "RestartVideoCapture: video_capturer is not registered.";
- return false;
- }
- // Start the new format first. This keeps the capturer running.
- if (!StartVideoCapture(video_capturer, desired_format)) {
- LOG(LS_ERROR) << "RestartVideoCapture: unable to start video capture with "
- "desired_format=" << desired_format.ToString();
- return false;
- }
- // Stop the old format.
- if (!StopVideoCapture(video_capturer, previous_format)) {
- LOG(LS_ERROR) << "RestartVideoCapture: unable to stop video capture with "
- "previous_format=" << previous_format.ToString();
- // Undo the start request we just performed.
- StopVideoCapture(video_capturer, desired_format);
- return false;
- }
-
- switch (options) {
- case kForceRestart: {
- VideoCapturerState* capture_state = GetCaptureState(video_capturer);
- ASSERT(capture_state && capture_state->start_count() > 0);
- // Try a restart using the new best resolution.
- VideoFormat highest_asked_format =
- capture_state->GetHighestFormat(video_capturer);
- VideoFormat capture_format;
- if (video_capturer->GetBestCaptureFormat(highest_asked_format,
- &capture_format)) {
- if (!video_capturer->Restart(capture_format)) {
- LOG(LS_ERROR) << "RestartVideoCapture: Restart failed.";
- }
- } else {
- LOG(LS_WARNING)
- << "RestartVideoCapture: Couldn't find a best capture format for "
- << highest_asked_format.ToString();
- }
- break;
- }
- case kRequestRestart:
- // TODO(ryanpetrie): Support restart requests. Should this
- // to-be-implemented logic be used for {Start,Stop}VideoCapture as well?
- break;
- default:
- LOG(LS_ERROR) << "Unknown/unimplemented RestartOption";
- break;
- }
- return true;
-}
-
void CaptureManager::AddVideoSink(VideoCapturer* video_capturer,
rtc::VideoSinkInterface<VideoFrame>* sink) {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
diff --git a/media/base/capturemanager.h b/media/base/capturemanager.h
index 93f8e18..9445f27 100644
--- a/media/base/capturemanager.h
+++ b/media/base/capturemanager.h
@@ -53,15 +53,6 @@
virtual bool StopVideoCapture(VideoCapturer* video_capturer,
const VideoFormat& format);
- // Possibly restarts the capturer. If |options| is set to kRequestRestart,
- // the CaptureManager chooses whether this request can be handled with the
- // current state or if a restart is actually needed. If |options| is set to
- // kForceRestart, the capturer is restarted.
- virtual bool RestartVideoCapture(VideoCapturer* video_capturer,
- const VideoFormat& previous_format,
- const VideoFormat& desired_format,
- RestartOptions options);
-
virtual void AddVideoSink(VideoCapturer* video_capturer,
rtc::VideoSinkInterface<VideoFrame>* sink);
virtual void RemoveVideoSink(VideoCapturer* video_capturer,
diff --git a/media/base/capturemanager_unittest.cc b/media/base/capturemanager_unittest.cc
index 2feaf15..e643d9d 100644
--- a/media/base/capturemanager_unittest.cc
+++ b/media/base/capturemanager_unittest.cc
@@ -160,41 +160,3 @@
EXPECT_FALSE(capture_manager_.StopVideoCapture(&video_capturer_,
format_vga_));
}
-
-TEST_F(CaptureManagerTest, TestForceRestart) {
- EXPECT_TRUE(capture_manager_.StartVideoCapture(&video_capturer_,
- format_qvga_));
- capture_manager_.AddVideoSink(&video_capturer_, &video_renderer_);
- EXPECT_EQ_WAIT(1, callback_count(), kMsCallbackWait);
- EXPECT_TRUE(video_capturer_.CaptureFrame());
- EXPECT_EQ(1, NumFramesRendered());
- EXPECT_TRUE(WasRenderedResolution(format_qvga_));
- // Now restart with vga.
- EXPECT_TRUE(capture_manager_.RestartVideoCapture(
- &video_capturer_, format_qvga_, format_vga_,
- cricket::CaptureManager::kForceRestart));
- EXPECT_TRUE(video_capturer_.CaptureFrame());
- EXPECT_EQ(2, NumFramesRendered());
- EXPECT_TRUE(WasRenderedResolution(format_vga_));
- EXPECT_TRUE(capture_manager_.StopVideoCapture(&video_capturer_,
- format_vga_));
-}
-
-TEST_F(CaptureManagerTest, TestRequestRestart) {
- EXPECT_TRUE(capture_manager_.StartVideoCapture(&video_capturer_,
- format_vga_));
- capture_manager_.AddVideoSink(&video_capturer_, &video_renderer_);
- EXPECT_EQ_WAIT(1, callback_count(), kMsCallbackWait);
- EXPECT_TRUE(video_capturer_.CaptureFrame());
- EXPECT_EQ(1, NumFramesRendered());
- EXPECT_TRUE(WasRenderedResolution(format_vga_));
- // Now request restart with qvga.
- EXPECT_TRUE(capture_manager_.RestartVideoCapture(
- &video_capturer_, format_vga_, format_qvga_,
- cricket::CaptureManager::kRequestRestart));
- EXPECT_TRUE(video_capturer_.CaptureFrame());
- EXPECT_EQ(2, NumFramesRendered());
- EXPECT_TRUE(WasRenderedResolution(format_vga_));
- EXPECT_TRUE(capture_manager_.StopVideoCapture(&video_capturer_,
- format_qvga_));
-}
diff --git a/media/base/videocapturer.cc b/media/base/videocapturer.cc
index 2d6a15e..cdaf8ba 100644
--- a/media/base/videocapturer.cc
+++ b/media/base/videocapturer.cc
@@ -19,34 +19,18 @@
#include "webrtc/base/logging.h"
#include "webrtc/base/systeminfo.h"
#include "webrtc/media/base/videoframefactory.h"
-
-#if defined(HAVE_WEBRTC_VIDEO)
#include "webrtc/media/engine/webrtcvideoframe.h"
#include "webrtc/media/engine/webrtcvideoframefactory.h"
-#endif // HAVE_WEBRTC_VIDEO
namespace cricket {
namespace {
-// TODO(thorcarpenter): This is a BIG hack to flush the system with black
-// frames. Frontends should coordinate to update the video state of a muted
-// user. When all frontends to this consider removing the black frame business.
-const int kNumBlackFramesOnMute = 30;
-
-// MessageHandler constants.
-enum {
- MSG_DO_PAUSE = 0,
- MSG_DO_UNPAUSE,
- MSG_STATE_CHANGE
-};
-
static const int64_t kMaxDistance = ~(static_cast<int64_t>(1) << 63);
#ifdef WEBRTC_LINUX
static const int kYU12Penalty = 16; // Needs to be higher than MJPG index.
#endif
static const int kDefaultScreencastFps = 5;
-typedef rtc::TypedMessageData<CaptureState> StateChangeParams;
// Limit stats data collections to ~20 seconds of 30fps data before dropping
// old data in case stats aren't reset for long periods of time.
@@ -81,23 +65,16 @@
// Implementation of class VideoCapturer
/////////////////////////////////////////////////////////////////////
VideoCapturer::VideoCapturer()
- : thread_(rtc::Thread::Current()),
- adapt_frame_drops_data_(kMaxAccumulatorSize),
+ : adapt_frame_drops_data_(kMaxAccumulatorSize),
frame_time_data_(kMaxAccumulatorSize),
apply_rotation_(true) {
- Construct();
-}
-
-VideoCapturer::VideoCapturer(rtc::Thread* thread)
- : thread_(thread),
- adapt_frame_drops_data_(kMaxAccumulatorSize),
- frame_time_data_(kMaxAccumulatorSize),
- apply_rotation_(true) {
+ thread_checker_.DetachFromThread();
Construct();
}
void VideoCapturer::Construct() {
- ClearAspectRatio();
+ ratio_w_ = 0;
+ ratio_h_ = 0;
enable_camera_list_ = false;
square_pixel_aspect_ratio_ = false;
capture_state_ = CS_STOPPED;
@@ -108,19 +85,15 @@
SignalVideoFrame.connect(this, &VideoCapturer::OnFrame);
scaled_width_ = 0;
scaled_height_ = 0;
- muted_ = false;
- black_frame_count_down_ = kNumBlackFramesOnMute;
enable_video_adapter_ = true;
adapt_frame_drops_ = 0;
previous_frame_time_ = 0.0;
-#ifdef HAVE_WEBRTC_VIDEO
// There are lots of video capturers out there that don't call
// set_frame_factory. We can either go change all of them, or we
// can set this default.
// TODO(pthatcher): Remove this hack and require the frame factory
// to be passed in the constructor.
set_frame_factory(new WebRtcVideoFrameFactory());
-#endif
}
const std::vector<VideoFormat>* VideoCapturer::GetSupportedFormats() const {
@@ -128,6 +101,7 @@
}
bool VideoCapturer::StartCapturing(const VideoFormat& capture_format) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
previous_frame_time_ = frame_length_time_reporter_.TimerNow();
CaptureState result = Start(capture_format);
const bool success = (result == CS_RUNNING) || (result == CS_STARTING);
@@ -140,104 +114,18 @@
return true;
}
-void VideoCapturer::UpdateAspectRatio(int ratio_w, int ratio_h) {
- if (ratio_w == 0 || ratio_h == 0) {
- LOG(LS_WARNING) << "UpdateAspectRatio ignored invalid ratio: "
- << ratio_w << "x" << ratio_h;
- return;
- }
- ratio_w_ = ratio_w;
- ratio_h_ = ratio_h;
-}
-
-void VideoCapturer::ClearAspectRatio() {
- ratio_w_ = 0;
- ratio_h_ = 0;
-}
-
-// Override this to have more control of how your device is started/stopped.
-bool VideoCapturer::Pause(bool pause) {
- if (pause) {
- if (capture_state() == CS_PAUSED) {
- return true;
- }
- bool is_running = capture_state() == CS_STARTING ||
- capture_state() == CS_RUNNING;
- if (!is_running) {
- LOG(LS_ERROR) << "Cannot pause a stopped camera.";
- return false;
- }
- LOG(LS_INFO) << "Pausing a camera.";
- rtc::scoped_ptr<VideoFormat> capture_format_when_paused(
- capture_format_ ? new VideoFormat(*capture_format_) : NULL);
- Stop();
- SetCaptureState(CS_PAUSED);
- // If you override this function be sure to restore the capture format
- // after calling Stop().
- SetCaptureFormat(capture_format_when_paused.get());
- } else { // Unpause.
- if (capture_state() != CS_PAUSED) {
- LOG(LS_WARNING) << "Cannot unpause a camera that hasn't been paused.";
- return false;
- }
- if (!capture_format_) {
- LOG(LS_ERROR) << "Missing capture_format_, cannot unpause a camera.";
- return false;
- }
- if (muted_) {
- LOG(LS_WARNING) << "Camera cannot be unpaused while muted.";
- return false;
- }
- LOG(LS_INFO) << "Unpausing a camera.";
- if (!Start(*capture_format_)) {
- LOG(LS_ERROR) << "Camera failed to start when unpausing.";
- return false;
- }
- }
- return true;
-}
-
-bool VideoCapturer::Restart(const VideoFormat& capture_format) {
- if (!IsRunning()) {
- return StartCapturing(capture_format);
- }
-
- if (GetCaptureFormat() != NULL && *GetCaptureFormat() == capture_format) {
- // The reqested format is the same; nothing to do.
- return true;
- }
-
- Stop();
- return StartCapturing(capture_format);
-}
-
-bool VideoCapturer::MuteToBlackThenPause(bool muted) {
- if (muted == IsMuted()) {
- return true;
- }
-
- LOG(LS_INFO) << (muted ? "Muting" : "Unmuting") << " this video capturer.";
- muted_ = muted; // Do this before calling Pause().
- if (muted) {
- // Reset black frame count down.
- black_frame_count_down_ = kNumBlackFramesOnMute;
- // Following frames will be overritten with black, then the camera will be
- // paused.
- return true;
- }
- // Start the camera.
- thread_->Clear(this, MSG_DO_PAUSE);
- return Pause(false);
-}
-
void VideoCapturer::SetSupportedFormats(
const std::vector<VideoFormat>& formats) {
+ // This method is OK to call during initialization on a separate thread.
+ RTC_DCHECK(capture_state_ == CS_STOPPED ||
+ thread_checker_.CalledOnValidThread());
supported_formats_ = formats;
UpdateFilteredSupportedFormats();
}
bool VideoCapturer::GetBestCaptureFormat(const VideoFormat& format,
VideoFormat* best_format) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
// TODO(fbarchard): Directly support max_format.
UpdateFilteredSupportedFormats();
const std::vector<VideoFormat>* supported_formats = GetSupportedFormats();
@@ -276,6 +164,7 @@
}
void VideoCapturer::ConstrainSupportedFormats(const VideoFormat& max_format) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
max_format_.reset(new VideoFormat(max_format));
LOG(LS_VERBOSE) << " ConstrainSupportedFormats " << max_format.ToString();
UpdateFilteredSupportedFormats();
@@ -319,17 +208,20 @@
void VideoCapturer::RemoveSink(
rtc::VideoSinkInterface<cricket::VideoFrame>* sink) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
broadcaster_.RemoveSink(sink);
}
void VideoCapturer::AddOrUpdateSink(
rtc::VideoSinkInterface<cricket::VideoFrame>* sink,
const rtc::VideoSinkWants& wants) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
broadcaster_.AddOrUpdateSink(sink, wants);
OnSinkWantsChanged(broadcaster_.wants());
}
void VideoCapturer::OnSinkWantsChanged(const rtc::VideoSinkWants& wants) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
apply_rotation_ = wants.rotation_applied;
if (frame_factory_) {
frame_factory_->SetApplyRotation(apply_rotation_);
@@ -338,37 +230,28 @@
void VideoCapturer::OnFrameCaptured(VideoCapturer*,
const CapturedFrame* captured_frame) {
- if (muted_) {
- if (black_frame_count_down_ == 0) {
- thread_->Post(this, MSG_DO_PAUSE, NULL);
- } else {
- --black_frame_count_down_;
- }
- }
-
if (!broadcaster_.frame_wanted()) {
return;
}
// Use a temporary buffer to scale
rtc::scoped_ptr<uint8_t[]> scale_buffer;
-
if (IsScreencast()) {
int scaled_width, scaled_height;
- int desired_screencast_fps = capture_format_.get() ?
- VideoFormat::IntervalToFps(capture_format_->interval) :
- kDefaultScreencastFps;
+ int desired_screencast_fps =
+ capture_format_.get()
+ ? VideoFormat::IntervalToFps(capture_format_->interval)
+ : kDefaultScreencastFps;
ComputeScale(captured_frame->width, captured_frame->height,
desired_screencast_fps, &scaled_width, &scaled_height);
if (FOURCC_ARGB == captured_frame->fourcc &&
(scaled_width != captured_frame->width ||
- scaled_height != captured_frame->height)) {
+ scaled_height != captured_frame->height)) {
if (scaled_width != scaled_width_ || scaled_height != scaled_height_) {
- LOG(LS_INFO) << "Scaling Screencast from "
- << captured_frame->width << "x"
- << captured_frame->height << " to "
- << scaled_width << "x" << scaled_height;
+ LOG(LS_INFO) << "Scaling Screencast from " << captured_frame->width
+ << "x" << captured_frame->height << " to " << scaled_width
+ << "x" << scaled_height;
scaled_width_ = scaled_width;
scaled_height_ = scaled_height;
}
@@ -397,7 +280,6 @@
const int kArgbBpp = 4;
// TODO(fbarchard): Make a helper function to adjust pixels to square.
// TODO(fbarchard): Hook up experiment to scaling.
- // TODO(fbarchard): Avoid scale and convert if muted.
// Temporary buffer is scoped here so it will persist until i420_frame.Init()
// makes a copy of the frame, converting to I420.
rtc::scoped_ptr<uint8_t[]> temp_buffer;
@@ -525,10 +407,6 @@
return;
}
- if (muted_) {
- // TODO(pthatcher): Use frame_factory_->CreateBlackFrame() instead.
- adapted_frame->SetToBlack();
- }
SignalVideoFrame(this, adapted_frame.get());
UpdateStats(captured_frame);
}
@@ -538,35 +416,13 @@
}
void VideoCapturer::SetCaptureState(CaptureState state) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
if (state == capture_state_) {
// Don't trigger a state changed callback if the state hasn't changed.
return;
}
- StateChangeParams* state_params = new StateChangeParams(state);
capture_state_ = state;
- thread_->Post(this, MSG_STATE_CHANGE, state_params);
-}
-
-void VideoCapturer::OnMessage(rtc::Message* message) {
- switch (message->message_id) {
- case MSG_STATE_CHANGE: {
- rtc::scoped_ptr<StateChangeParams> p(
- static_cast<StateChangeParams*>(message->pdata));
- SignalStateChange(this, p->data());
- break;
- }
- case MSG_DO_PAUSE: {
- Pause(true);
- break;
- }
- case MSG_DO_UNPAUSE: {
- Pause(false);
- break;
- }
- default: {
- ASSERT(false);
- }
- }
+ SignalStateChange(this, capture_state_);
}
// Get the distance between the supported and desired formats.
@@ -578,6 +434,7 @@
// otherwise, we use preference.
int64_t VideoCapturer::GetFormatDistance(const VideoFormat& desired,
const VideoFormat& supported) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
int64_t distance = kMaxDistance;
// Check fourcc.
@@ -688,6 +545,7 @@
}
bool VideoCapturer::ShouldFilterFormat(const VideoFormat& format) const {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
if (!enable_camera_list_) {
return false;
}
diff --git a/media/base/videocapturer.h b/media/base/videocapturer.h
index 543af02..b904582 100644
--- a/media/base/videocapturer.h
+++ b/media/base/videocapturer.h
@@ -20,12 +20,11 @@
#include "webrtc/base/basictypes.h"
#include "webrtc/base/criticalsection.h"
#include "webrtc/media/base/videosourceinterface.h"
-#include "webrtc/base/messagehandler.h"
#include "webrtc/base/rollingaccumulator.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/sigslot.h"
-#include "webrtc/base/thread.h"
#include "webrtc/base/timing.h"
+#include "webrtc/base/thread_checker.h"
#include "webrtc/media/base/mediachannel.h"
#include "webrtc/media/base/videoadapter.h"
#include "webrtc/media/base/videobroadcaster.h"
@@ -37,8 +36,6 @@
namespace cricket {
// Current state of the capturer.
-// TODO(hellner): CS_NO_DEVICE is an error code not a capture state. Separate
-// error codes and states.
enum CaptureState {
CS_STOPPED, // The capturer has been stopped or hasn't started yet.
CS_STARTING, // The capturer is in the process of starting. Note, it may
@@ -47,7 +44,6 @@
// capturing.
CS_PAUSED, // The capturer has been paused.
CS_FAILED, // The capturer failed to start.
- CS_NO_DEVICE, // The capturer has no device and consequently failed to start.
};
class VideoFrame;
@@ -91,13 +87,13 @@
// The captured frames may need to be adapted (for example, cropping).
// Video adaptation is built into and enabled by default. After a frame has
// been captured from the device, it is sent to the video adapter, then out to
-// the encoder.
+// the sinks.
//
// Programming model:
// Create an object of a subclass of VideoCapturer
// Initialize
// SignalStateChange.connect()
-// SignalFrameCaptured.connect()
+// AddOrUpdateSink()
// Find the capture format for Start() by either calling GetSupportedFormats()
// and selecting one of the supported or calling GetBestCaptureFormat().
// video_adapter()->OnOutputFormatRequest(desired_encoding_format)
@@ -111,13 +107,10 @@
// thread safe.
//
class VideoCapturer : public sigslot::has_slots<>,
- public rtc::MessageHandler,
public rtc::VideoSourceInterface<cricket::VideoFrame> {
public:
- // All signals are marshalled to |thread| or the creating thread if
- // none is provided.
VideoCapturer();
- explicit VideoCapturer(rtc::Thread* thread);
+
virtual ~VideoCapturer() {}
// Gets the id of the underlying device, which is available after the capturer
@@ -163,12 +156,6 @@
// CS_FAILED: if the capturer failes to start..
// CS_NO_DEVICE: if the capturer has no device and fails to start.
virtual CaptureState Start(const VideoFormat& capture_format) = 0;
- // Sets the desired aspect ratio. If the capturer is capturing at another
- // aspect ratio it will crop the width or the height so that asked for
- // aspect ratio is acheived. Note that ratio_w and ratio_h do not need to be
- // relatively prime.
- void UpdateAspectRatio(int ratio_w, int ratio_h);
- void ClearAspectRatio();
// Get the current capture format, which is set by the Start() call.
// Note that the width and height of the captured frames may differ from the
@@ -178,23 +165,10 @@
return capture_format_.get();
}
- // Pause the video capturer.
- virtual bool Pause(bool paused);
// Stop the video capturer.
virtual void Stop() = 0;
// Check if the video capturer is running.
virtual bool IsRunning() = 0;
- // Restart the video capturer with the new |capture_format|.
- // Default implementation stops and starts the capturer.
- virtual bool Restart(const VideoFormat& capture_format);
- // TODO(thorcarpenter): This behavior of keeping the camera open just to emit
- // black frames is a total hack and should be fixed.
- // When muting, produce black frames then pause the camera.
- // When unmuting, start the camera. Camera starts unmuted.
- virtual bool MuteToBlackThenPause(bool muted);
- virtual bool IsMuted() const {
- return muted_;
- }
CaptureState capture_state() const {
return capture_state_;
}
@@ -219,14 +193,6 @@
return enable_camera_list_;
}
- // Enable scaling to ensure square pixels.
- void set_square_pixel_aspect_ratio(bool square_pixel_aspect_ratio) {
- square_pixel_aspect_ratio_ = square_pixel_aspect_ratio;
- }
- bool square_pixel_aspect_ratio() {
- return square_pixel_aspect_ratio_;
- }
-
// Signal all capture state changes that are not a direct result of calling
// Start().
sigslot::signal2<VideoCapturer*, CaptureState> SignalStateChange;
@@ -288,9 +254,6 @@
void SetCaptureState(CaptureState state);
- // Marshals SignalStateChange onto thread_.
- void OnMessage(rtc::Message* message) override;
-
// subclasses override this virtual method to provide a vector of fourccs, in
// order of preference, that are expected by the media engine.
virtual bool GetPreferredFourccs(std::vector<uint32_t>* fourccs) = 0;
@@ -339,7 +302,7 @@
const rtc::RollingAccumulator<T>& data,
VariableInfo<T>* stats);
- rtc::Thread* thread_;
+ rtc::ThreadChecker thread_checker_;
std::string id_;
CaptureState capture_state_;
rtc::scoped_ptr<VideoFrameFactory> frame_factory_;
@@ -354,8 +317,6 @@
bool square_pixel_aspect_ratio_; // Enable scaling to square pixels.
int scaled_width_; // Current output size from ComputeScale.
int scaled_height_;
- bool muted_;
- int black_frame_count_down_;
rtc::VideoBroadcaster broadcaster_;
bool enable_video_adapter_;
diff --git a/media/base/videocapturer_unittest.cc b/media/base/videocapturer_unittest.cc
index f385d59..849fc0d 100644
--- a/media/base/videocapturer_unittest.cc
+++ b/media/base/videocapturer_unittest.cc
@@ -26,7 +26,6 @@
const int kMsCallbackWait = 500;
// For HD only the height matters.
const int kMinHdHeight = 720;
-const uint32_t kTimeout = 5000U;
} // namespace
@@ -75,90 +74,6 @@
EXPECT_EQ(2, num_state_changes());
}
-TEST_F(VideoCapturerTest, TestRestart) {
- EXPECT_EQ(cricket::CS_RUNNING, capturer_.Start(cricket::VideoFormat(
- 640,
- 480,
- cricket::VideoFormat::FpsToInterval(30),
- cricket::FOURCC_I420)));
- EXPECT_TRUE(capturer_.IsRunning());
- EXPECT_EQ_WAIT(cricket::CS_RUNNING, capture_state(), kMsCallbackWait);
- EXPECT_EQ(1, num_state_changes());
- EXPECT_TRUE(capturer_.Restart(cricket::VideoFormat(
- 320,
- 240,
- cricket::VideoFormat::FpsToInterval(30),
- cricket::FOURCC_I420)));
- EXPECT_EQ_WAIT(cricket::CS_RUNNING, capture_state(), kMsCallbackWait);
- EXPECT_TRUE(capturer_.IsRunning());
- EXPECT_GE(1, num_state_changes());
- capturer_.Stop();
- rtc::Thread::Current()->ProcessMessages(100);
- EXPECT_FALSE(capturer_.IsRunning());
-}
-
-TEST_F(VideoCapturerTest, TestStartingWithRestart) {
- EXPECT_FALSE(capturer_.IsRunning());
- EXPECT_TRUE(capturer_.Restart(cricket::VideoFormat(
- 640,
- 480,
- cricket::VideoFormat::FpsToInterval(30),
- cricket::FOURCC_I420)));
- EXPECT_TRUE(capturer_.IsRunning());
- EXPECT_EQ_WAIT(cricket::CS_RUNNING, capture_state(), kMsCallbackWait);
-}
-
-TEST_F(VideoCapturerTest, TestRestartWithSameFormat) {
- cricket::VideoFormat format(640, 480,
- cricket::VideoFormat::FpsToInterval(30),
- cricket::FOURCC_I420);
- EXPECT_EQ(cricket::CS_RUNNING, capturer_.Start(format));
- EXPECT_TRUE(capturer_.IsRunning());
- EXPECT_EQ_WAIT(cricket::CS_RUNNING, capture_state(), kMsCallbackWait);
- EXPECT_EQ(1, num_state_changes());
- EXPECT_TRUE(capturer_.Restart(format));
- EXPECT_EQ(cricket::CS_RUNNING, capture_state());
- EXPECT_TRUE(capturer_.IsRunning());
- EXPECT_EQ(1, num_state_changes());
-}
-
-TEST_F(VideoCapturerTest, CameraOffOnMute) {
- EXPECT_EQ(cricket::CS_RUNNING, capturer_.Start(cricket::VideoFormat(
- 640,
- 480,
- cricket::VideoFormat::FpsToInterval(30),
- cricket::FOURCC_I420)));
- EXPECT_TRUE(capturer_.IsRunning());
- EXPECT_EQ(0, renderer_.num_rendered_frames());
- EXPECT_TRUE(capturer_.CaptureFrame());
- EXPECT_EQ(1, renderer_.num_rendered_frames());
- EXPECT_FALSE(capturer_.IsMuted());
-
- // Mute the camera and expect black output frame.
- capturer_.MuteToBlackThenPause(true);
- EXPECT_TRUE(capturer_.IsMuted());
- for (int i = 0; i < 31; ++i) {
- EXPECT_TRUE(capturer_.CaptureFrame());
- EXPECT_TRUE(renderer_.black_frame());
- }
- EXPECT_EQ(32, renderer_.num_rendered_frames());
- EXPECT_EQ_WAIT(cricket::CS_PAUSED,
- capturer_.capture_state(), kTimeout);
-
- // Verify that the camera is off.
- EXPECT_FALSE(capturer_.CaptureFrame());
- EXPECT_EQ(32, renderer_.num_rendered_frames());
-
- // Unmute the camera and expect non-black output frame.
- capturer_.MuteToBlackThenPause(false);
- EXPECT_FALSE(capturer_.IsMuted());
- EXPECT_EQ_WAIT(cricket::CS_RUNNING,
- capturer_.capture_state(), kTimeout);
- EXPECT_TRUE(capturer_.CaptureFrame());
- EXPECT_FALSE(renderer_.black_frame());
- EXPECT_EQ(33, renderer_.num_rendered_frames());
-}
-
TEST_F(VideoCapturerTest, ScreencastScaledOddWidth) {
capturer_.SetScreencast(true);
@@ -196,8 +111,6 @@
capturer_.ResetSupportedFormats(formats);
// capturer_ should compensate rotation as default.
- capturer_.UpdateAspectRatio(400, 200);
-
EXPECT_EQ(cricket::CS_RUNNING,
capturer_.Start(cricket::VideoFormat(
kWidth, kHeight, cricket::VideoFormat::FpsToInterval(30),
@@ -249,8 +162,6 @@
wants.rotation_applied = false;
capturer_.AddOrUpdateSink(&renderer_, wants);
- capturer_.UpdateAspectRatio(400, 200);
-
EXPECT_EQ(cricket::CS_RUNNING,
capturer_.Start(cricket::VideoFormat(
kWidth, kHeight, cricket::VideoFormat::FpsToInterval(30),
@@ -300,8 +211,6 @@
wants.rotation_applied = false;
capturer_.AddOrUpdateSink(&renderer_, wants);
- capturer_.UpdateAspectRatio(400, 200);
-
EXPECT_EQ(cricket::CS_RUNNING,
capturer_.Start(cricket::VideoFormat(
kWidth, kHeight, cricket::VideoFormat::FpsToInterval(30),
diff --git a/media/engine/webrtcvideocapturer.cc b/media/engine/webrtcvideocapturer.cc
index 1145564..a1848e9 100644
--- a/media/engine/webrtcvideocapturer.cc
+++ b/media/engine/webrtcvideocapturer.cc
@@ -262,7 +262,7 @@
CaptureState WebRtcVideoCapturer::Start(const VideoFormat& capture_format) {
if (!module_) {
LOG(LS_ERROR) << "The capturer has not been initialized";
- return CS_NO_DEVICE;
+ return CS_FAILED;
}
if (start_thread_) {
LOG(LS_ERROR) << "The capturer is already running";
diff --git a/media/engine/webrtcvideocapturer_unittest.cc b/media/engine/webrtcvideocapturer_unittest.cc
index 6d88bcc..17c93fc 100644
--- a/media/engine/webrtcvideocapturer_unittest.cc
+++ b/media/engine/webrtcvideocapturer_unittest.cc
@@ -127,7 +127,7 @@
TEST_F(WebRtcVideoCapturerTest, TestCaptureWithoutInit) {
cricket::VideoFormat format;
- EXPECT_EQ(cricket::CS_NO_DEVICE, capturer_->Start(format));
+ EXPECT_EQ(cricket::CS_FAILED, capturer_->Start(format));
EXPECT_TRUE(capturer_->GetCaptureFormat() == NULL);
EXPECT_FALSE(capturer_->IsRunning());
}
diff --git a/media/engine/webrtcvideoengine2.cc b/media/engine/webrtcvideoengine2.cc
index c6bdafe..5964654 100644
--- a/media/engine/webrtcvideoengine2.cc
+++ b/media/engine/webrtcvideoengine2.cc
@@ -1980,14 +1980,12 @@
info.adapt_reason = CoordinatedVideoAdapter::ADAPTREASON_NONE;
if (capturer_ != NULL) {
- if (!capturer_->IsMuted()) {
- VideoFormat last_captured_frame_format;
- capturer_->GetStats(&info.adapt_frame_drops, &info.effects_frame_drops,
- &info.capturer_frame_time,
- &last_captured_frame_format);
- info.input_frame_width = last_captured_frame_format.width;
- info.input_frame_height = last_captured_frame_format.height;
- }
+ VideoFormat last_captured_frame_format;
+ capturer_->GetStats(&info.adapt_frame_drops, &info.effects_frame_drops,
+ &info.capturer_frame_time,
+ &last_captured_frame_format);
+ info.input_frame_width = last_captured_frame_format.width;
+ info.input_frame_height = last_captured_frame_format.height;
if (capturer_->video_adapter() != nullptr) {
info.adapt_changes += capturer_->video_adapter()->adaptation_changes();
info.adapt_reason = capturer_->video_adapter()->adapt_reason();
diff --git a/pc/channelmanager.cc b/pc/channelmanager.cc
index fe0564b..92be70c 100644
--- a/pc/channelmanager.cc
+++ b/pc/channelmanager.cc
@@ -455,16 +455,6 @@
capture_manager_.get(), capturer, video_format));
}
-bool ChannelManager::MuteToBlackThenPause(
- VideoCapturer* video_capturer, bool muted) {
- if (!initialized_) {
- return false;
- }
- worker_thread_->Invoke<void>(
- Bind(&VideoCapturer::MuteToBlackThenPause, video_capturer, muted));
- return true;
-}
-
bool ChannelManager::StopVideoCapture(
VideoCapturer* capturer, const VideoFormat& video_format) {
return initialized_ && worker_thread_->Invoke<bool>(
@@ -472,16 +462,6 @@
capture_manager_.get(), capturer, video_format));
}
-bool ChannelManager::RestartVideoCapture(
- VideoCapturer* video_capturer,
- const VideoFormat& previous_format,
- const VideoFormat& desired_format,
- CaptureManager::RestartOptions options) {
- return initialized_ && worker_thread_->Invoke<bool>(
- Bind(&CaptureManager::RestartVideoCapture, capture_manager_.get(),
- video_capturer, previous_format, desired_format, options));
-}
-
void ChannelManager::AddVideoSink(
VideoCapturer* capturer, rtc::VideoSinkInterface<VideoFrame>* sink) {
if (initialized_)
diff --git a/pc/channelmanager.h b/pc/channelmanager.h
index bd1bc2f..cb0b2a5 100644
--- a/pc/channelmanager.h
+++ b/pc/channelmanager.h
@@ -129,9 +129,6 @@
// formats a a pseudo-handle.
bool StartVideoCapture(VideoCapturer* video_capturer,
const VideoFormat& video_format);
- // When muting, produce black frames then pause the camera.
- // When unmuting, start the camera. Camera starts unmuted.
- bool MuteToBlackThenPause(VideoCapturer* video_capturer, bool muted);
bool StopVideoCapture(VideoCapturer* video_capturer,
const VideoFormat& video_format);
bool RestartVideoCapture(VideoCapturer* video_capturer,