Delete unused and almost unused frame-related methods.
webrtc::VideoFrame::set_video_frame_buffer
webrtc::VideoFrame::ConvertNativeToI420Frame
cricket::WebRtcVideoFrame::InitToBlack
VideoFrameBuffer::data
VideoFrameBuffer::stride
VideoFrameBuffer::MutableData
TBR=tkchin@webrtc.org # Refactoring affecting RTCVideoFrame
BUG=webrtc:5682
Review-Url: https://codereview.webrtc.org/2065733003
Cr-Original-Commit-Position: refs/heads/master@{#13183}
Cr-Mirrored-From: https://chromium.googlesource.com/external/webrtc
Cr-Mirrored-Commit: 76270de4bc2dac188f10f805e6e2fb86693ef864
diff --git a/api/java/jni/androidmediadecoder_jni.cc b/api/java/jni/androidmediadecoder_jni.cc
index 3793756..2ec222f 100644
--- a/api/java/jni/androidmediadecoder_jni.cc
+++ b/api/java/jni/androidmediadecoder_jni.cc
@@ -794,12 +794,12 @@
libyuv::I420Copy(y_ptr, stride,
u_ptr, uv_stride,
v_ptr, uv_stride,
- frame_buffer->MutableData(webrtc::kYPlane),
- frame_buffer->stride(webrtc::kYPlane),
- frame_buffer->MutableData(webrtc::kUPlane),
- frame_buffer->stride(webrtc::kUPlane),
- frame_buffer->MutableData(webrtc::kVPlane),
- frame_buffer->stride(webrtc::kVPlane),
+ frame_buffer->MutableDataY(),
+ frame_buffer->StrideY(),
+ frame_buffer->MutableDataU(),
+ frame_buffer->StrideU(),
+ frame_buffer->MutableDataV(),
+ frame_buffer->StrideV(),
width, height);
} else {
// All other supported formats are nv12.
@@ -808,12 +808,12 @@
libyuv::NV12ToI420(
y_ptr, stride,
uv_ptr, stride,
- frame_buffer->MutableData(webrtc::kYPlane),
- frame_buffer->stride(webrtc::kYPlane),
- frame_buffer->MutableData(webrtc::kUPlane),
- frame_buffer->stride(webrtc::kUPlane),
- frame_buffer->MutableData(webrtc::kVPlane),
- frame_buffer->stride(webrtc::kVPlane),
+ frame_buffer->MutableDataY(),
+ frame_buffer->StrideY(),
+ frame_buffer->MutableDataU(),
+ frame_buffer->StrideU(),
+ frame_buffer->MutableDataV(),
+ frame_buffer->StrideV(),
width, height);
}
// Return output byte buffer back to codec.
diff --git a/api/java/jni/androidmediaencoder_jni.cc b/api/java/jni/androidmediaencoder_jni.cc
index ce1ebc1..20dc150 100644
--- a/api/java/jni/androidmediaencoder_jni.cc
+++ b/api/java/jni/androidmediaencoder_jni.cc
@@ -670,7 +670,8 @@
}
consecutive_full_queue_frame_drops_ = 0;
- VideoFrame input_frame = frame;
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> input_buffer(
+ frame.video_frame_buffer());
if (scale_) {
// Check framerate before spatial resolution change.
quality_scaler_.OnEncodeFrame(frame.width(), frame.height());
@@ -678,21 +679,22 @@
quality_scaler_.GetScaledResolution();
if (scaled_resolution.width != frame.width() ||
scaled_resolution.height != frame.height()) {
- if (frame.video_frame_buffer()->native_handle() != nullptr) {
- rtc::scoped_refptr<webrtc::VideoFrameBuffer> scaled_buffer(
- static_cast<AndroidTextureBuffer*>(
- frame.video_frame_buffer().get())->CropScaleAndRotate(
- frame.width(), frame.height(), 0, 0,
- scaled_resolution.width, scaled_resolution.height,
- webrtc::kVideoRotation_0));
- input_frame.set_video_frame_buffer(scaled_buffer);
+ if (input_buffer->native_handle() != nullptr) {
+ input_buffer = static_cast<AndroidTextureBuffer*>(input_buffer.get())
+ ->CropScaleAndRotate(frame.width(), frame.height(),
+ 0, 0,
+ scaled_resolution.width,
+ scaled_resolution.height,
+ webrtc::kVideoRotation_0);
} else {
- input_frame.set_video_frame_buffer(
- quality_scaler_.GetScaledBuffer(frame.video_frame_buffer()));
+ input_buffer = quality_scaler_.GetScaledBuffer(input_buffer);
}
}
}
+ VideoFrame input_frame(input_buffer, frame.timestamp(),
+ frame.render_time_ms(), frame.rotation());
+
if (!MaybeReconfigureEncoderOnCodecThread(input_frame)) {
ALOGE << "Failed to reconfigure encoder.";
return WEBRTC_VIDEO_CODEC_ERROR;
diff --git a/api/java/jni/androidvideocapturer_jni.cc b/api/java/jni/androidvideocapturer_jni.cc
index 4f3d64b..15cd2d4 100644
--- a/api/java/jni/androidvideocapturer_jni.cc
+++ b/api/java/jni/androidvideocapturer_jni.cc
@@ -215,10 +215,10 @@
libyuv::NV12ToI420Rotate(
y_plane + width * crop_y + crop_x, width,
uv_plane + uv_width * crop_y + crop_x, width,
- buffer->MutableData(webrtc::kYPlane), buffer->stride(webrtc::kYPlane),
+ buffer->MutableDataY(), buffer->StrideY(),
// Swap U and V, since we have NV21, not NV12.
- buffer->MutableData(webrtc::kVPlane), buffer->stride(webrtc::kVPlane),
- buffer->MutableData(webrtc::kUPlane), buffer->stride(webrtc::kUPlane),
+ buffer->MutableDataV(), buffer->StrideV(),
+ buffer->MutableDataU(), buffer->StrideU(),
crop_width, crop_height, static_cast<libyuv::RotationMode>(
capturer_->apply_rotation() ? rotation : 0));
diff --git a/common_video/corevideo_frame_buffer.cc b/common_video/corevideo_frame_buffer.cc
index 55dc00d..a58ddc7 100644
--- a/common_video/corevideo_frame_buffer.cc
+++ b/common_video/corevideo_frame_buffer.cc
@@ -46,9 +46,9 @@
int src_uv_stride = CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer_, 1);
int ret = libyuv::NV12ToI420(
src_y, src_y_stride, src_uv, src_uv_stride,
- buffer->MutableData(webrtc::kYPlane), buffer->stride(webrtc::kYPlane),
- buffer->MutableData(webrtc::kUPlane), buffer->stride(webrtc::kUPlane),
- buffer->MutableData(webrtc::kVPlane), buffer->stride(webrtc::kVPlane),
+ buffer->MutableDataY(), buffer->StrideY(),
+ buffer->MutableDataU(), buffer->StrideU(),
+ buffer->MutableDataV(), buffer->StrideV(),
width, height);
CVPixelBufferUnlockBaseAddress(pixel_buffer_, kCVPixelBufferLock_ReadOnly);
if (ret) {
diff --git a/common_video/include/video_frame_buffer.h b/common_video/include/video_frame_buffer.h
index dfffb9a..328c0ca 100644
--- a/common_video/include/video_frame_buffer.h
+++ b/common_video/include/video_frame_buffer.h
@@ -48,27 +48,20 @@
// Returns pointer to the pixel data for a given plane. The memory is owned by
// the VideoFrameBuffer object and must not be freed by the caller.
- virtual const uint8_t* DataY() const;
- virtual const uint8_t* DataU() const;
- virtual const uint8_t* DataV() const;
- // Deprecated method.
- // TODO(nisse): Delete after all users are updated.
- virtual const uint8_t* data(PlaneType type) const;
+ virtual const uint8_t* DataY() const = 0;
+ virtual const uint8_t* DataU() const = 0;
+ virtual const uint8_t* DataV() const = 0;
// TODO(nisse): Move MutableData methods to the I420Buffer subclass.
// Non-const data access.
virtual uint8_t* MutableDataY();
virtual uint8_t* MutableDataU();
virtual uint8_t* MutableDataV();
- // Deprecated method. TODO(nisse): Delete after all users are updated.
- virtual uint8_t* MutableData(PlaneType type);
// Returns the number of bytes between successive rows for a given plane.
- virtual int StrideY() const;
- virtual int StrideU() const;
- virtual int StrideV() const;
- // Deprecated method. TODO(nisse): Delete after all users are updated.
- virtual int stride(PlaneType type) const;
+ virtual int StrideY() const = 0;
+ virtual int StrideU() const = 0;
+ virtual int StrideV() const = 0;
// Return the handle of the underlying video frame. This is used when the
// frame is backed by a texture.
diff --git a/common_video/video_frame.cc b/common_video/video_frame.cc
index bfac3a6..463e8ed 100644
--- a/common_video/video_frame.cc
+++ b/common_video/video_frame.cc
@@ -155,20 +155,6 @@
return video_frame_buffer_;
}
-void VideoFrame::set_video_frame_buffer(
- const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer) {
- RTC_DCHECK(buffer);
- video_frame_buffer_ = buffer;
-}
-
-VideoFrame VideoFrame::ConvertNativeToI420Frame() const {
- RTC_DCHECK(video_frame_buffer_->native_handle());
- VideoFrame frame;
- frame.ShallowCopy(*this);
- frame.set_video_frame_buffer(video_frame_buffer_->NativeToI420Buffer());
- return frame;
-}
-
size_t EncodedImage::GetBufferPaddingBytes(VideoCodecType codec_type) {
switch (codec_type) {
case kVideoCodecVP8:
diff --git a/common_video/video_frame_buffer.cc b/common_video/video_frame_buffer.cc
index 14e19dc..60ecae3 100644
--- a/common_video/video_frame_buffer.cc
+++ b/common_video/video_frame_buffer.cc
@@ -30,54 +30,6 @@
} // namespace
-const uint8_t* VideoFrameBuffer::data(PlaneType type) const {
- switch (type) {
- case kYPlane:
- return DataY();
- case kUPlane:
- return DataU();
- case kVPlane:
- return DataV();
- default:
- RTC_NOTREACHED();
- return nullptr;
- }
-}
-
-const uint8_t* VideoFrameBuffer::DataY() const {
- return data(kYPlane);
-}
-const uint8_t* VideoFrameBuffer::DataU() const {
- return data(kUPlane);
-}
-const uint8_t* VideoFrameBuffer::DataV() const {
- return data(kVPlane);
-}
-
-int VideoFrameBuffer::stride(PlaneType type) const {
- switch (type) {
- case kYPlane:
- return StrideY();
- case kUPlane:
- return StrideU();
- case kVPlane:
- return StrideV();
- default:
- RTC_NOTREACHED();
- return 0;
- }
-}
-
-int VideoFrameBuffer::StrideY() const {
- return stride(kYPlane);
-}
-int VideoFrameBuffer::StrideU() const {
- return stride(kUPlane);
-}
-int VideoFrameBuffer::StrideV() const {
- return stride(kVPlane);
-}
-
uint8_t* VideoFrameBuffer::MutableDataY() {
RTC_NOTREACHED();
return nullptr;
@@ -91,20 +43,6 @@
return nullptr;
}
-uint8_t* VideoFrameBuffer::MutableData(PlaneType type) {
- switch (type) {
- case kYPlane:
- return MutableDataY();
- case kUPlane:
- return MutableDataU();
- case kVPlane:
- return MutableDataV();
- default:
- RTC_NOTREACHED();
- return nullptr;
- }
-}
-
VideoFrameBuffer::~VideoFrameBuffer() {}
I420Buffer::I420Buffer(int width, int height)
diff --git a/media/engine/webrtcvideoframe.cc b/media/engine/webrtcvideoframe.cc
index f77ca70..4f89c8b 100644
--- a/media/engine/webrtcvideoframe.cc
+++ b/media/engine/webrtcvideoframe.cc
@@ -65,20 +65,6 @@
frame->rotation, apply_rotation);
}
-// TODO(nisse): Deprecated, delete as soon as Chrome is updated.
-bool WebRtcVideoFrame::InitToBlack(int w, int h,
- int64_t time_stamp_ns) {
- rtc::scoped_refptr<webrtc::I420Buffer> buffer(
- new rtc::RefCountedObject<webrtc::I420Buffer>(w, h));
- buffer->SetToBlack();
-
- video_frame_buffer_ = new rtc::RefCountedObject<webrtc::I420Buffer>(w, h);
- SetTimeStamp(time_stamp_ns);
- rotation_ = webrtc::kVideoRotation_0;
-
- return true;
-}
-
int WebRtcVideoFrame::width() const {
return video_frame_buffer_ ? video_frame_buffer_->width() : 0;
}
diff --git a/media/engine/webrtcvideoframe.h b/media/engine/webrtcvideoframe.h
index 7f2a5c6..487e32e 100644
--- a/media/engine/webrtcvideoframe.h
+++ b/media/engine/webrtcvideoframe.h
@@ -63,9 +63,6 @@
void InitToEmptyBuffer(int w, int h);
void InitToEmptyBuffer(int w, int h, int64_t time_stamp_ns);
- // TODO(nisse): Deprecated, delete as soon as Chrome is updated.
- bool InitToBlack(int w, int h, int64_t time_stamp_ns);
-
int width() const override;
int height() const override;
diff --git a/modules/video_coding/codecs/h264/h264_decoder_impl.cc b/modules/video_coding/codecs/h264/h264_decoder_impl.cc
index f560a37..563df37 100644
--- a/modules/video_coding/codecs/h264/h264_decoder_impl.cc
+++ b/modules/video_coding/codecs/h264/h264_decoder_impl.cc
@@ -123,11 +123,16 @@
// The video frame is stored in |video_frame|. |av_frame| is FFmpeg's version
// of a video frame and will be set up to reference |video_frame|'s buffers.
- VideoFrame* video_frame = new VideoFrame();
+
+ // TODO(nisse): The VideoFrame's timestamp and rotation info is not used.
+ // Refactor to do not use a VideoFrame object at all.
+
// FFmpeg expects the initial allocation to be zero-initialized according to
// http://crbug.com/390941. Our pool is set up to zero-initialize new buffers.
- video_frame->set_video_frame_buffer(
- decoder->pool_.CreateBuffer(width, height));
+ VideoFrame* video_frame = new VideoFrame(
+ decoder->pool_.CreateBuffer(width, height),
+ 0 /* timestamp */, 0 /* render_time_ms */, kVideoRotation_0);
+
// DCHECK that we have a continuous buffer as is required.
RTC_DCHECK_EQ(video_frame->video_frame_buffer()->DataU(),
video_frame->video_frame_buffer()->DataY() +
@@ -355,22 +360,30 @@
video_frame->video_frame_buffer()->DataV());
video_frame->set_timestamp(input_image._timeStamp);
+ int32_t ret;
+
// The decoded image may be larger than what is supposed to be visible, see
// |AVGetBuffer2|'s use of |avcodec_align_dimensions|. This crops the image
// without copying the underlying buffer.
rtc::scoped_refptr<VideoFrameBuffer> buf = video_frame->video_frame_buffer();
if (av_frame_->width != buf->width() || av_frame_->height != buf->height()) {
- video_frame->set_video_frame_buffer(
+ rtc::scoped_refptr<VideoFrameBuffer> cropped_buf(
new rtc::RefCountedObject<WrappedI420Buffer>(
av_frame_->width, av_frame_->height,
buf->DataY(), buf->StrideY(),
buf->DataU(), buf->StrideU(),
buf->DataV(), buf->StrideV(),
rtc::KeepRefUntilDone(buf)));
+ VideoFrame cropped_frame(
+ cropped_buf, video_frame->timestamp(), video_frame->render_time_ms(),
+ video_frame->rotation());
+ // TODO(nisse): Timestamp and rotation are all zero here. Change decoder
+ // interface to pass a VideoFrameBuffer instead of a VideoFrame?
+ ret = decoded_image_callback_->Decoded(cropped_frame);
+ } else {
+ // Return decoded frame.
+ ret = decoded_image_callback_->Decoded(*video_frame);
}
-
- // Return decoded frame.
- int32_t ret = decoded_image_callback_->Decoded(*video_frame);
// Stop referencing it, possibly freeing |video_frame|.
av_frame_unref(av_frame_.get());
video_frame = nullptr;
diff --git a/modules/video_coding/codecs/vp9/vp9_impl.cc b/modules/video_coding/codecs/vp9/vp9_impl.cc
index 750f742..52ea1d6 100644
--- a/modules/video_coding/codecs/vp9/vp9_impl.cc
+++ b/modules/video_coding/codecs/vp9/vp9_impl.cc
@@ -960,9 +960,9 @@
// release |img_buffer|.
rtc::KeepRefUntilDone(img_buffer)));
- VideoFrame decoded_image;
- decoded_image.set_video_frame_buffer(img_wrapped_buffer);
- decoded_image.set_timestamp(timestamp);
+ VideoFrame decoded_image(img_wrapped_buffer, timestamp,
+ 0 /* render_time_ms */, webrtc::kVideoRotation_0);
+
int ret = decode_complete_callback_->Decoded(decoded_image);
if (ret != 0)
return ret;
diff --git a/modules/video_coding/video_sender.cc b/modules/video_coding/video_sender.cc
index b9c5ea5..e2c0d1f 100644
--- a/modules/video_coding/video_sender.cc
+++ b/modules/video_coding/video_sender.cc
@@ -288,9 +288,17 @@
!_encoder->SupportsNativeHandle()) {
// This module only supports software encoding.
// TODO(pbos): Offload conversion from the encoder thread.
- converted_frame = converted_frame.ConvertNativeToI420Frame();
- RTC_CHECK(!converted_frame.IsZeroSize())
- << "Frame conversion failed, won't be able to encode frame.";
+ rtc::scoped_refptr<VideoFrameBuffer> converted_buffer(
+ converted_frame.video_frame_buffer()->NativeToI420Buffer());
+
+ if (!converted_buffer) {
+ LOG(LS_ERROR) << "Frame conversion failed, dropping frame.";
+ return VCM_PARAMETER_ERROR;
+ }
+ converted_frame = VideoFrame(converted_buffer,
+ converted_frame.timestamp(),
+ converted_frame.render_time_ms(),
+ converted_frame.rotation());
}
int32_t ret =
_encoder->Encode(converted_frame, codecSpecificInfo, next_frame_types);
diff --git a/modules/video_processing/frame_preprocessor.cc b/modules/video_processing/frame_preprocessor.cc
index 1d21340..e86bbbb 100644
--- a/modules/video_processing/frame_preprocessor.cc
+++ b/modules/video_processing/frame_preprocessor.cc
@@ -96,19 +96,22 @@
const VideoFrame* current_frame = &frame;
if (denoiser_) {
- rtc::scoped_refptr<I420Buffer>* denoised_frame = &denoised_buffer_[0];
- rtc::scoped_refptr<I420Buffer>* denoised_frame_prev = &denoised_buffer_[1];
+ rtc::scoped_refptr<I420Buffer>* denoised_buffer = &denoised_buffer_[0];
+ rtc::scoped_refptr<I420Buffer>* denoised_buffer_prev = &denoised_buffer_[1];
// Swap the buffer to save one memcpy in DenoiseFrame.
if (denoised_frame_toggle_) {
- denoised_frame = &denoised_buffer_[1];
- denoised_frame_prev = &denoised_buffer_[0];
+ denoised_buffer = &denoised_buffer_[1];
+ denoised_buffer_prev = &denoised_buffer_[0];
}
// Invert the flag.
denoised_frame_toggle_ ^= 1;
- denoiser_->DenoiseFrame(current_frame->video_frame_buffer(), denoised_frame,
- denoised_frame_prev, true);
- denoised_frame_.ShallowCopy(*current_frame);
- denoised_frame_.set_video_frame_buffer(*denoised_frame);
+ denoiser_->DenoiseFrame(current_frame->video_frame_buffer(),
+ denoised_buffer,
+ denoised_buffer_prev, true);
+ denoised_frame_ = VideoFrame(*denoised_buffer,
+ current_frame->timestamp(),
+ current_frame->render_time_ms(),
+ current_frame->rotation());
current_frame = &denoised_frame_;
}
diff --git a/modules/video_processing/spatial_resampler.cc b/modules/video_processing/spatial_resampler.cc
index 74a570f..7c4aae2 100644
--- a/modules/video_processing/spatial_resampler.cc
+++ b/modules/video_processing/spatial_resampler.cc
@@ -58,10 +58,10 @@
scaled_buffer->CropAndScaleFrom(inFrame.video_frame_buffer());
- outFrame->set_video_frame_buffer(scaled_buffer);
- // Setting time parameters to the output frame.
- outFrame->set_timestamp(inFrame.timestamp());
- outFrame->set_render_time_ms(inFrame.render_time_ms());
+ *outFrame = VideoFrame(scaled_buffer,
+ inFrame.timestamp(),
+ inFrame.render_time_ms(),
+ inFrame.rotation());
return VPM_OK;
}
diff --git a/sdk/objc/Framework/Classes/RTCVideoFrame.mm b/sdk/objc/Framework/Classes/RTCVideoFrame.mm
index 872f6be..5b2d258 100644
--- a/sdk/objc/Framework/Classes/RTCVideoFrame.mm
+++ b/sdk/objc/Framework/Classes/RTCVideoFrame.mm
@@ -40,42 +40,42 @@
if (!self.i420Buffer) {
return nullptr;
}
- return self.i420Buffer->data(webrtc::kYPlane);
+ return self.i420Buffer->DataY();
}
- (const uint8_t *)uPlane {
if (!self.i420Buffer) {
return nullptr;
}
- return self.i420Buffer->data(webrtc::kUPlane);
+ return self.i420Buffer->DataU();
}
- (const uint8_t *)vPlane {
if (!self.i420Buffer) {
return nullptr;
}
- return self.i420Buffer->data(webrtc::kVPlane);
+ return self.i420Buffer->DataV();
}
- (int32_t)yPitch {
if (!self.i420Buffer) {
return 0;
}
- return self.i420Buffer->stride(webrtc::kYPlane);
+ return self.i420Buffer->StrideY();
}
- (int32_t)uPitch {
if (!self.i420Buffer) {
return 0;
}
- return self.i420Buffer->stride(webrtc::kUPlane);
+ return self.i420Buffer->StrideU();
}
- (int32_t)vPitch {
if (!self.i420Buffer) {
return 0;
}
- return self.i420Buffer->stride(webrtc::kVPlane);
+ return self.i420Buffer->StrideV();
}
- (int64_t)timeStamp {
diff --git a/video_frame.h b/video_frame.h
index b9ba69b..0f2b9a6 100644
--- a/video_frame.h
+++ b/video_frame.h
@@ -121,14 +121,6 @@
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& video_frame_buffer()
const;
- // Set the underlying buffer.
- void set_video_frame_buffer(
- const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer);
-
- // Convert native-handle frame to memory-backed I420 frame. Should not be
- // called on a non-native-handle frame.
- VideoFrame ConvertNativeToI420Frame() const;
-
// Return true if the frame is stored in a texture.
bool is_texture() {
return video_frame_buffer() &&