New helper function test::ReadI420Buffer, refactor FrameReader to use it.
This change reduces the number of places where we first fread a I420
frame into a uint8_t buffer, followed by a copy into a frame buffer
object.
BUG=None
Review-Url: https://codereview.webrtc.org/2362683002
Cr-Original-Commit-Position: refs/heads/master@{#14456}
Cr-Mirrored-From: https://chromium.googlesource.com/external/webrtc
Cr-Mirrored-Commit: 115bd153c7f5a64eabbac222bad66e223abbfe2b
diff --git a/common_video/libyuv/libyuv_unittest.cc b/common_video/libyuv/libyuv_unittest.cc
index 7545168..98546f0 100644
--- a/common_video/libyuv/libyuv_unittest.cc
+++ b/common_video/libyuv/libyuv_unittest.cc
@@ -15,6 +15,7 @@
#include "webrtc/test/gtest.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/test/frame_utils.h"
#include "webrtc/test/testsupport/fileutils.h"
#include "webrtc/video_frame.h"
@@ -35,8 +36,7 @@
virtual void TearDown();
FILE* source_file_;
- VideoFrame orig_frame_;
- std::unique_ptr<uint8_t[]> orig_buffer_;
+ std::unique_ptr<VideoFrame> orig_frame_;
const int width_;
const int height_;
const int size_y_;
@@ -51,9 +51,7 @@
height_(288),
size_y_(width_ * height_),
size_uv_(((width_ + 1) / 2) * ((height_ + 1) / 2)),
- frame_length_(CalcBufferSize(kI420, 352, 288)) {
- orig_buffer_.reset(new uint8_t[frame_length_]);
-}
+ frame_length_(CalcBufferSize(kI420, 352, 288)) {}
void TestLibYuv::SetUp() {
const std::string input_file_name = webrtc::test::ResourcePath("foreman_cif",
@@ -62,16 +60,10 @@
ASSERT_TRUE(source_file_ != NULL) << "Cannot read file: "<<
input_file_name << "\n";
- EXPECT_EQ(frame_length_,
- fread(orig_buffer_.get(), 1, frame_length_, source_file_));
- orig_frame_.CreateFrame(orig_buffer_.get(),
- orig_buffer_.get() + size_y_,
- orig_buffer_.get() +
- size_y_ + size_uv_,
- width_, height_,
- width_, (width_ + 1) / 2,
- (width_ + 1) / 2,
- kVideoRotation_0);
+ rtc::scoped_refptr<VideoFrameBuffer> buffer(
+ test::ReadI420Buffer(width_, height_, source_file_));
+
+ orig_frame_.reset(new VideoFrame(buffer, kVideoRotation_0, 0));
}
void TestLibYuv::TearDown() {
@@ -100,7 +92,7 @@
printf("\nConvert #%d I420 <-> I420 \n", j);
std::unique_ptr<uint8_t[]> out_i420_buffer(new uint8_t[frame_length_]);
- EXPECT_EQ(0, ConvertFromI420(orig_frame_, kI420, 0, out_i420_buffer.get()));
+ EXPECT_EQ(0, ConvertFromI420(*orig_frame_, kI420, 0, out_i420_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kI420, out_i420_buffer.get(), 0, 0, width_,
height_, 0, kVideoRotation_0,
res_i420_buffer.get()));
@@ -108,7 +100,7 @@
if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
return;
}
- psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer);
+ psnr = I420PSNR(*orig_frame_->video_frame_buffer(), *res_i420_buffer);
EXPECT_EQ(48.0, psnr);
j++;
@@ -120,7 +112,7 @@
Calc16ByteAlignedStride(width_, &stride_y, &stride_uv);
res_i420_buffer =
I420Buffer::Create(width_, height_, stride_y, stride_uv, stride_uv);
- EXPECT_EQ(0, ConvertFromI420(orig_frame_, kRGB24, 0, res_rgb_buffer2.get()));
+ EXPECT_EQ(0, ConvertFromI420(*orig_frame_, kRGB24, 0, res_rgb_buffer2.get()));
EXPECT_EQ(0, ConvertToI420(kRGB24, res_rgb_buffer2.get(), 0, 0, width_,
height_, 0, kVideoRotation_0,
@@ -129,7 +121,7 @@
if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
return;
}
- psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer);
+ psnr = I420PSNR(*orig_frame_->video_frame_buffer(), *res_i420_buffer);
// Optimization Speed- quality trade-off => 45 dB only (platform dependant).
EXPECT_GT(ceil(psnr), 44);
@@ -137,11 +129,11 @@
printf("\nConvert #%d I420 <-> UYVY\n", j);
std::unique_ptr<uint8_t[]> out_uyvy_buffer(new uint8_t[width_ * height_ * 2]);
- EXPECT_EQ(0, ConvertFromI420(orig_frame_, kUYVY, 0, out_uyvy_buffer.get()));
+ EXPECT_EQ(0, ConvertFromI420(*orig_frame_, kUYVY, 0, out_uyvy_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kUYVY, out_uyvy_buffer.get(), 0, 0, width_,
height_, 0, kVideoRotation_0,
res_i420_buffer.get()));
- psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer);
+ psnr = I420PSNR(*orig_frame_->video_frame_buffer(), *res_i420_buffer);
EXPECT_EQ(48.0, psnr);
if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
return;
@@ -150,7 +142,7 @@
printf("\nConvert #%d I420 <-> YUY2\n", j);
std::unique_ptr<uint8_t[]> out_yuy2_buffer(new uint8_t[width_ * height_ * 2]);
- EXPECT_EQ(0, ConvertFromI420(orig_frame_, kYUY2, 0, out_yuy2_buffer.get()));
+ EXPECT_EQ(0, ConvertFromI420(*orig_frame_, kYUY2, 0, out_yuy2_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kYUY2, out_yuy2_buffer.get(), 0, 0, width_,
height_, 0,
@@ -160,14 +152,14 @@
return;
}
- psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer);
+ psnr = I420PSNR(*orig_frame_->video_frame_buffer(), *res_i420_buffer);
EXPECT_EQ(48.0, psnr);
printf("\nConvert #%d I420 <-> RGB565\n", j);
std::unique_ptr<uint8_t[]> out_rgb565_buffer(
new uint8_t[width_ * height_ * 2]);
EXPECT_EQ(0,
- ConvertFromI420(orig_frame_, kRGB565, 0, out_rgb565_buffer.get()));
+ ConvertFromI420(*orig_frame_, kRGB565, 0, out_rgb565_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kRGB565, out_rgb565_buffer.get(), 0, 0, width_,
height_, 0,
@@ -177,7 +169,7 @@
}
j++;
- psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer);
+ psnr = I420PSNR(*orig_frame_->video_frame_buffer(), *res_i420_buffer);
// TODO(leozwang) Investigate the right psnr should be set for I420ToRGB565,
// Another example is I420ToRGB24, the psnr is 44
// TODO(mikhal): Add psnr for RGB565, 1555, 4444, convert to ARGB.
@@ -187,7 +179,7 @@
std::unique_ptr<uint8_t[]> out_argb8888_buffer(
new uint8_t[width_ * height_ * 4]);
EXPECT_EQ(0,
- ConvertFromI420(orig_frame_, kARGB, 0, out_argb8888_buffer.get()));
+ ConvertFromI420(*orig_frame_, kARGB, 0, out_argb8888_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kARGB, out_argb8888_buffer.get(), 0, 0, width_,
height_, 0, kVideoRotation_0,
@@ -197,7 +189,7 @@
return;
}
- psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer);
+ psnr = I420PSNR(*orig_frame_->video_frame_buffer(), *res_i420_buffer);
// TODO(leozwang) Investigate the right psnr should be set for
// I420ToARGB8888,
EXPECT_GT(ceil(psnr), 42);
@@ -221,7 +213,7 @@
rtc::scoped_refptr<I420Buffer> res_i420_buffer =
I420Buffer::Create(width_, height_, stride_y, stride_uv, stride_uv);
std::unique_ptr<uint8_t[]> out_i420_buffer(new uint8_t[frame_length_]);
- EXPECT_EQ(0, ConvertFromI420(orig_frame_, kI420, 0,
+ EXPECT_EQ(0, ConvertFromI420(*orig_frame_, kI420, 0,
out_i420_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kI420, out_i420_buffer.get(), 0, 0, width_,
height_, 0, kVideoRotation_0,
@@ -230,11 +222,10 @@
if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
return;
}
- psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer);
+ psnr = I420PSNR(*orig_frame_->video_frame_buffer(), *res_i420_buffer);
EXPECT_EQ(48.0, psnr);
}
-
TEST_F(TestLibYuv, RotateTest) {
// Use ConvertToI420 for multiple rotations - see that nothing breaks, all
// memory is properly allocated and end result is equal to the starting point.
@@ -242,18 +233,22 @@
int rotated_height = width_;
int stride_y;
int stride_uv;
+
+ // Assume compact layout, no padding.
+ const uint8_t *orig_buffer = orig_frame_->video_frame_buffer()->DataY();
+
Calc16ByteAlignedStride(rotated_width, &stride_y, &stride_uv);
rtc::scoped_refptr<I420Buffer> rotated_res_i420_buffer = I420Buffer::Create(
rotated_width, rotated_height, stride_y, stride_uv, stride_uv);
- EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0, width_, height_,
+ EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer, 0, 0, width_, height_,
0, kVideoRotation_90,
rotated_res_i420_buffer.get()));
- EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0, width_, height_,
+ EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer, 0, 0, width_, height_,
0, kVideoRotation_270,
rotated_res_i420_buffer.get()));
rotated_res_i420_buffer = I420Buffer::Create(
width_, height_, width_, (width_ + 1) / 2, (width_ + 1) / 2);
- EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0, width_, height_,
+ EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer, 0, 0, width_, height_,
0, kVideoRotation_180,
rotated_res_i420_buffer.get()));
}
diff --git a/modules/video_coding/codecs/test/videoprocessor.cc b/modules/video_coding/codecs/test/videoprocessor.cc
index 7b0c8e3..8436eb0 100644
--- a/modules/video_coding/codecs/test/videoprocessor.cc
+++ b/modules/video_coding/codecs/test/videoprocessor.cc
@@ -56,7 +56,6 @@
stats_(stats),
encode_callback_(NULL),
decode_callback_(NULL),
- source_buffer_(NULL),
first_key_frame_has_been_excluded_(false),
last_frame_missing_(false),
initialized_(false),
@@ -81,7 +80,6 @@
// Initialize data structures used by the encoder/decoder APIs
size_t frame_length_in_bytes = frame_reader_->FrameLength();
- source_buffer_ = new uint8_t[frame_length_in_bytes];
last_successful_frame_buffer_ = new uint8_t[frame_length_in_bytes];
// Set fixed properties common for all frames.
// To keep track of spatial resize actions by encoder.
@@ -143,7 +141,6 @@
}
VideoProcessorImpl::~VideoProcessorImpl() {
- delete[] source_buffer_;
delete[] last_successful_frame_buffer_;
encoder_->RegisterEncodeCompleteCallback(NULL);
delete encode_callback_;
@@ -190,17 +187,15 @@
if (frame_number == 0) {
prev_time_stamp_ = -1;
}
- if (frame_reader_->ReadFrame(source_buffer_)) {
- // Copy the source frame to the newly read frame data.
- source_frame_.CreateFrame(source_buffer_, config_.codec_settings->width,
- config_.codec_settings->height, kVideoRotation_0);
+ rtc::scoped_refptr<VideoFrameBuffer> buffer(frame_reader_->ReadFrame());
+ if (buffer) {
+ // Use the frame number as "timestamp" to identify frames
+ VideoFrame source_frame(buffer, frame_number, 0, webrtc::kVideoRotation_0);
// Ensure we have a new statistics data object we can fill:
FrameStatistic& stat = stats_->NewFrame(frame_number);
encode_start_ns_ = rtc::TimeNanos();
- // Use the frame number as "timestamp" to identify frames
- source_frame_.set_timestamp(frame_number);
// Decide if we're going to force a keyframe:
std::vector<FrameType> frame_types(1, kVideoFrameDelta);
@@ -213,7 +208,7 @@
encoded_frame_size_ = 0;
encoded_frame_type_ = kVideoFrameDelta;
- int32_t encode_result = encoder_->Encode(source_frame_, NULL, &frame_types);
+ int32_t encode_result = encoder_->Encode(source_frame, NULL, &frame_types);
if (encode_result != WEBRTC_VIDEO_CODEC_OK) {
fprintf(stderr, "Failed to encode frame %d, return code: %d\n",
diff --git a/modules/video_coding/codecs/test/videoprocessor.h b/modules/video_coding/codecs/test/videoprocessor.h
index 21ed55e..0887f4c 100644
--- a/modules/video_coding/codecs/test/videoprocessor.h
+++ b/modules/video_coding/codecs/test/videoprocessor.h
@@ -199,8 +199,6 @@
EncodedImageCallback* encode_callback_;
DecodedImageCallback* decode_callback_;
- // Buffer used for reading the source video file:
- uint8_t* source_buffer_;
// Keep track of the last successful frame, since we need to write that
// when decoding fails:
uint8_t* last_successful_frame_buffer_;
diff --git a/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc b/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc
index 214020b..3d54d5d 100644
--- a/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc
+++ b/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc
@@ -226,7 +226,8 @@
break;
}
frame_reader_ = new webrtc::test::FrameReaderImpl(
- config_.input_filename, config_.frame_length_in_bytes);
+ config_.input_filename, config_.codec_settings->width,
+ config_.codec_settings->height);
frame_writer_ = new webrtc::test::FrameWriterImpl(
config_.output_filename, config_.frame_length_in_bytes);
ASSERT_TRUE(frame_reader_->Init());
diff --git a/modules/video_coding/codecs/test/videoprocessor_unittest.cc b/modules/video_coding/codecs/test/videoprocessor_unittest.cc
index 77ea472..92445d6 100644
--- a/modules/video_coding/codecs/test/videoprocessor_unittest.cc
+++ b/modules/video_coding/codecs/test/videoprocessor_unittest.cc
@@ -75,7 +75,8 @@
TEST_F(VideoProcessorTest, ProcessFrame) {
ExpectInit();
EXPECT_CALL(encoder_mock_, Encode(_, _, _)).Times(1);
- EXPECT_CALL(frame_reader_mock_, ReadFrame(_)).WillOnce(Return(true));
+ EXPECT_CALL(frame_reader_mock_, ReadFrame())
+ .WillOnce(Return(I420Buffer::Create(50, 50)));
// Since we don't return any callback from the mock, the decoder will not
// be more than initialized...
VideoProcessorImpl video_processor(
diff --git a/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc b/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc
index 252eb71..40203a4 100644
--- a/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc
+++ b/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc
@@ -17,6 +17,7 @@
#include "webrtc/base/timeutils.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
+#include "webrtc/test/frame_utils.h"
#include "webrtc/test/testsupport/fileutils.h"
namespace webrtc {
@@ -128,14 +129,11 @@
decoder_->RegisterDecodeCompleteCallback(decode_complete_callback_.get());
// Using a QCIF image (aligned stride (u,v planes) > width).
// Processing only one frame.
- length_source_frame_ = CalcBufferSize(kI420, kWidth, kHeight);
- source_buffer_.reset(new uint8_t[length_source_frame_]);
source_file_ = fopen(test::ResourcePath("paris_qcif", "yuv").c_str(), "rb");
ASSERT_TRUE(source_file_ != NULL);
- // Set input frame.
- ASSERT_EQ(
- fread(source_buffer_.get(), 1, length_source_frame_, source_file_),
- length_source_frame_);
+ rtc::scoped_refptr<VideoFrameBuffer> compact_buffer(
+ test::ReadI420Buffer(kWidth, kHeight, source_file_));
+ ASSERT_TRUE(compact_buffer);
codec_inst_.width = kWidth;
codec_inst_.height = kHeight;
const int kFramerate = 30;
@@ -147,15 +145,15 @@
EXPECT_EQ(stride_y, 176);
EXPECT_EQ(stride_uv, 96);
- rtc::scoped_refptr<I420Buffer> buffer = I420Buffer::Create(
- codec_inst_.width, codec_inst_.height, stride_y, stride_uv, stride_uv);
- // Using ConvertToI420 to add stride to the image.
- EXPECT_EQ(
- 0, ConvertToI420(kI420, source_buffer_.get(), 0, 0, codec_inst_.width,
- codec_inst_.height, 0, kVideoRotation_0,
- buffer.get()));
+ rtc::scoped_refptr<I420Buffer> stride_buffer(
+ I420Buffer::Create(kWidth, kHeight, stride_y, stride_uv, stride_uv));
+
+ // No scaling in our case, just a copy, to add stride to the image.
+ stride_buffer->ScaleFrom(compact_buffer);
+
input_frame_.reset(
- new VideoFrame(buffer, kTestTimestamp, 0, webrtc::kVideoRotation_0));
+ new VideoFrame(stride_buffer, kVideoRotation_0, 0));
+ input_frame_->set_timestamp(kTestTimestamp);
}
void SetUpEncodeDecode() {
@@ -202,7 +200,6 @@
std::unique_ptr<VideoDecoder> decoder_;
EncodedImage encoded_frame_;
VideoFrame decoded_frame_;
- size_t length_source_frame_;
VideoCodec codec_inst_;
};
diff --git a/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc b/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc
index 8a374db..8eb091e 100644
--- a/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc
+++ b/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc
@@ -163,19 +163,16 @@
int64_t starttime = rtc::TimeMillis();
int frame_cnt = 1;
int frames_processed = 0;
- rtc::scoped_refptr<webrtc::I420Buffer> i420_buffer =
- webrtc::I420Buffer::Create(width, height, width, half_width, half_width);
-
- while (!feof(input_file) &&
- (num_frames == -1 || frames_processed < num_frames)) {
- if (fread(frame_buffer.get(), 1, length, input_file) != length)
- continue;
+ while (num_frames == -1 || frames_processed < num_frames) {
+ rtc::scoped_refptr<VideoFrameBuffer> buffer(
+ test::ReadI420Buffer(width, height, input_file));
+ if (!buffer) {
+ // EOF or read error.
+ break;
+ }
if (frame_cnt >= start_frame) {
- webrtc::ConvertToI420(webrtc::kI420, frame_buffer.get(), 0, 0, width,
- height, 0, webrtc::kVideoRotation_0, &i420_buffer);
- webrtc::VideoFrame input_frame(i420_buffer, 0, 0,
- webrtc::kVideoRotation_0);
- encoder->Encode(input_frame, NULL, NULL);
+ encoder->Encode(VideoFrame(buffer, webrtc::kVideoRotation_0, 0),
+ NULL, NULL);
decoder->Decode(encoder_callback.encoded_image(), false, NULL);
++frames_processed;
}
diff --git a/modules/video_processing/test/denoiser_test.cc b/modules/video_processing/test/denoiser_test.cc
index a968859..e6a7fa6 100644
--- a/modules/video_processing/test/denoiser_test.cc
+++ b/modules/video_processing/test/denoiser_test.cc
@@ -125,6 +125,8 @@
EXPECT_EQ(COPY_BLOCK, decision);
}
+// TODO(nisse): Refactor to not use test fixture. Can use some static
+// helper method to open the input file.
TEST_F(VideoProcessingTest, Denoiser) {
// Used in swap buffer.
int denoised_frame_toggle = 0;
@@ -137,14 +139,11 @@
rtc::scoped_refptr<I420Buffer> denoised_frame_sse_neon;
rtc::scoped_refptr<I420Buffer> denoised_frame_prev_sse_neon;
- std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
- while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
- frame_length_) {
- // Using ConvertToI420 to add stride to the image.
- rtc::scoped_refptr<webrtc::I420Buffer> input_buffer =
- I420Buffer::Create(width_, height_, width_, half_width_, half_width_);
- EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
- 0, kVideoRotation_0, input_buffer.get()));
+ for (;;) {
+ rtc::scoped_refptr<VideoFrameBuffer> video_frame_buffer(
+ test::ReadI420Buffer(width_, height_, source_file_));
+ if (!video_frame_buffer)
+ break;
rtc::scoped_refptr<I420Buffer>* p_denoised_c = &denoised_frame_c;
rtc::scoped_refptr<I420Buffer>* p_denoised_prev_c = &denoised_frame_prev_c;
@@ -159,9 +158,11 @@
p_denoised_sse_neon = &denoised_frame_prev_sse_neon;
p_denoised_prev_sse_neon = &denoised_frame_sse_neon;
}
- denoiser_c.DenoiseFrame(input_buffer, p_denoised_c, p_denoised_prev_c,
+ denoiser_c.DenoiseFrame(video_frame_buffer,
+ p_denoised_c, p_denoised_prev_c,
false);
- denoiser_sse_neon.DenoiseFrame(input_buffer, p_denoised_sse_neon,
+ denoiser_sse_neon.DenoiseFrame(video_frame_buffer,
+ p_denoised_sse_neon,
p_denoised_prev_sse_neon, false);
// Invert the flag.
denoised_frame_toggle ^= 1;
diff --git a/modules/video_processing/test/video_processing_unittest.cc b/modules/video_processing/test/video_processing_unittest.cc
index 8c1154a..43238bc 100644
--- a/modules/video_processing/test/video_processing_unittest.cc
+++ b/modules/video_processing/test/video_processing_unittest.cc
@@ -18,6 +18,7 @@
#include "webrtc/base/keep_ref_until_done.h"
#include "webrtc/base/timeutils.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/test/frame_utils.h"
#include "webrtc/test/testsupport/fileutils.h"
namespace webrtc {
@@ -34,10 +35,8 @@
int target_height,
VideoProcessing* vpm,
const VideoFrame* out_frame);
-rtc::scoped_refptr<VideoFrameBuffer> CropBuffer(
- const rtc::scoped_refptr<VideoFrameBuffer>& source_buffer,
- int source_width,
- int source_height,
+static rtc::scoped_refptr<VideoFrameBuffer> CropBuffer(
+ rtc::scoped_refptr<VideoFrameBuffer> source_buffer,
int offset_x,
int offset_y,
int cropped_width,
@@ -48,7 +47,7 @@
// verified under the same conditions.
static void TestSize(
const VideoFrame& source_frame,
- const rtc::scoped_refptr<VideoFrameBuffer>& cropped_source_buffer,
+ const VideoFrameBuffer& cropped_source_buffer,
int target_width,
int target_height,
double expected_psnr,
@@ -133,15 +132,9 @@
vp_->EnableTemporalDecimation(false);
// Reading test frame
- rtc::scoped_refptr<webrtc::I420Buffer> buffer =
- I420Buffer::Create(width_, height_, width_, half_width_, half_width_);
-
- ASSERT_EQ(static_cast<size_t>(size_y_),
- fread(buffer->MutableDataY(), 1, size_y_, source_file_));
- ASSERT_EQ(static_cast<size_t>(size_uv_),
- fread(buffer->MutableDataU(), 1, size_uv_, source_file_));
- ASSERT_EQ(static_cast<size_t>(size_uv_),
- fread(buffer->MutableDataV(), 1, size_uv_, source_file_));
+ rtc::scoped_refptr<VideoFrameBuffer> video_buffer(
+ test::ReadI420Buffer(width_, height_, source_file_));
+ ASSERT_TRUE(video_buffer);
for (uint32_t run_idx = 0; run_idx < NumRuns; run_idx++) {
// Initiate test timer.
@@ -149,36 +142,36 @@
// Init the sourceFrame with a timestamp.
int64_t time_start_ms = time_start / rtc::kNumNanosecsPerMillisec;
- VideoFrame video_frame(buffer, time_start_ms * 90, time_start_ms,
+ VideoFrame video_frame(video_buffer, time_start_ms * 90, time_start_ms,
webrtc::kVideoRotation_0);
// Test scaling to different sizes: source is of |width|/|height| = 352/288.
// Pure scaling:
- TestSize(video_frame, buffer, width_ / 4, height_ / 4, 25.2, vp_);
- TestSize(video_frame, buffer, width_ / 2, height_ / 2, 28.1, vp_);
+ TestSize(video_frame, *video_buffer, width_ / 4, height_ / 4, 25.2, vp_);
+ TestSize(video_frame, *video_buffer, width_ / 2, height_ / 2, 28.1, vp_);
// No resampling:
- TestSize(video_frame, buffer, width_, height_, -1, vp_);
- TestSize(video_frame, buffer, 2 * width_, 2 * height_, 32.2, vp_);
+ TestSize(video_frame, *video_buffer, width_, height_, -1, vp_);
+ TestSize(video_frame, *video_buffer, 2 * width_, 2 * height_, 32.2, vp_);
// Scaling and cropping. The cropped source frame is the largest center
// aligned region that can be used from the source while preserving aspect
// ratio.
- TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 56, 352, 176),
+ TestSize(video_frame, *CropBuffer(video_buffer, 0, 56, 352, 176),
100, 50, 24.0, vp_);
- TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 30, 352, 225),
+ TestSize(video_frame, *CropBuffer(video_buffer, 0, 30, 352, 225),
400, 256, 31.3, vp_);
- TestSize(video_frame, CropBuffer(buffer, width_, height_, 68, 0, 216, 288),
+ TestSize(video_frame, *CropBuffer(video_buffer, 68, 0, 216, 288),
480, 640, 32.15, vp_);
- TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 12, 352, 264),
+ TestSize(video_frame, *CropBuffer(video_buffer, 0, 12, 352, 264),
960, 720, 32.2, vp_);
- TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 44, 352, 198),
+ TestSize(video_frame, *CropBuffer(video_buffer, 0, 44, 352, 198),
1280, 720, 32.15, vp_);
// Upsampling to odd size.
- TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 26, 352, 233),
+ TestSize(video_frame, *CropBuffer(video_buffer, 0, 26, 352, 233),
501, 333, 32.05, vp_);
// Downsample to odd size.
- TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 34, 352, 219),
+ TestSize(video_frame, *CropBuffer(video_buffer, 0, 34, 352, 219),
281, 175, 29.3, vp_);
// Stop timer.
@@ -219,20 +212,18 @@
}
rtc::scoped_refptr<VideoFrameBuffer> CropBuffer(
- const rtc::scoped_refptr<VideoFrameBuffer>& source_buffer,
- int source_width,
- int source_height,
+ rtc::scoped_refptr<VideoFrameBuffer> source_buffer,
int offset_x,
int offset_y,
int cropped_width,
int cropped_height) {
// Force even.
- offset_x &= 1;
- offset_y &= 1;
+ offset_x &= ~1;
+ offset_y &= ~1;
size_t y_start = offset_x + offset_y * source_buffer->StrideY();
size_t u_start = (offset_x / 2) + (offset_y / 2) * source_buffer->StrideU();
- size_t v_start = (offset_x / 2) + (offset_y / 2) * source_buffer->StrideU();
+ size_t v_start = (offset_x / 2) + (offset_y / 2) * source_buffer->StrideV();
return rtc::scoped_refptr<VideoFrameBuffer>(
new rtc::RefCountedObject<WrappedI420Buffer>(
@@ -243,7 +234,7 @@
}
void TestSize(const VideoFrame& source_frame,
- const rtc::scoped_refptr<VideoFrameBuffer>& cropped_source_buffer,
+ const VideoFrameBuffer& cropped_source,
int target_width,
int target_height,
double expected_psnr,
@@ -260,14 +251,14 @@
// Scale |resampled_source_frame| back to the source scale.
VideoFrame resampled_source_frame;
resampled_source_frame.CopyFrame(*out_frame);
- PreprocessFrameAndVerify(resampled_source_frame,
- cropped_source_buffer->width(),
- cropped_source_buffer->height(), vpm, out_frame);
+ // Compute PSNR against the cropped source frame and check expectation.
+ PreprocessFrameAndVerify(resampled_source_frame, cropped_source.width(),
+ cropped_source.height(), vpm, out_frame);
WriteProcessedFrameForVisualInspection(resampled_source_frame, *out_frame);
// Compute PSNR against the cropped source frame and check expectation.
double psnr =
- I420PSNR(*cropped_source_buffer, *out_frame->video_frame_buffer());
+ I420PSNR(cropped_source, *out_frame->video_frame_buffer());
EXPECT_GT(psnr, expected_psnr);
printf(
"PSNR: %f. PSNR is between source of size %d %d, and a modified "
diff --git a/test/BUILD.gn b/test/BUILD.gn
index 94cf7be..8de7ae1 100644
--- a/test/BUILD.gn
+++ b/test/BUILD.gn
@@ -128,6 +128,7 @@
]
deps = [
+ ":video_test_common",
"../base:gtest_prod",
"../base:rtc_base_approved",
"../common_video",
@@ -320,7 +321,6 @@
deps = [
":rtp_test_utils",
":test_support",
- ":video_test_common",
"..:webrtc_common",
"../audio",
"../base:rtc_base_approved",
diff --git a/test/frame_generator.cc b/test/frame_generator.cc
index 27935e4..2f911d8 100644
--- a/test/frame_generator.cc
+++ b/test/frame_generator.cc
@@ -18,7 +18,7 @@
#include "webrtc/base/checks.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/system_wrappers/include/clock.h"
-#include "libyuv/convert.h"
+#include "webrtc/test/frame_utils.h"
namespace webrtc {
namespace test {
@@ -101,35 +101,21 @@
return temp_frame_.get();
}
- // TODO(nisse): Have a frame reader in one place. And read directly
- // into the planes of an I420Buffer, the extra copying below is silly.
void ReadNextFrame() {
- size_t bytes_read =
- fread(frame_buffer_.get(), 1, frame_size_, files_[file_index_]);
- if (bytes_read < frame_size_) {
+ last_read_buffer_ =
+ test::ReadI420Buffer(static_cast<int>(width_),
+ static_cast<int>(height_),
+ files_[file_index_]);
+ if (!last_read_buffer_) {
// No more frames to read in this file, rewind and move to next file.
rewind(files_[file_index_]);
file_index_ = (file_index_ + 1) % files_.size();
- bytes_read = fread(frame_buffer_.get(), 1, frame_size_,
- files_[file_index_]);
- assert(bytes_read >= frame_size_);
+ last_read_buffer_ =
+ test::ReadI420Buffer(static_cast<int>(width_),
+ static_cast<int>(height_),
+ files_[file_index_]);
+ RTC_CHECK(last_read_buffer_);
}
-
- size_t half_width = (width_ + 1) / 2;
- size_t size_y = width_ * height_;
- size_t size_uv = half_width * ((height_ + 1) / 2);
- last_read_buffer_ = I420Buffer::Create(
- static_cast<int>(width_), static_cast<int>(height_),
- static_cast<int>(width_), static_cast<int>(half_width),
- static_cast<int>(half_width));
- libyuv::I420Copy(
- frame_buffer_.get(), static_cast<int>(width_),
- frame_buffer_.get() + size_y, static_cast<int>(half_width),
- frame_buffer_.get() + size_y + size_uv, static_cast<int>(half_width),
- last_read_buffer_->MutableDataY(), last_read_buffer_->StrideY(),
- last_read_buffer_->MutableDataU(), last_read_buffer_->StrideU(),
- last_read_buffer_->MutableDataV(), last_read_buffer_->StrideV(),
- static_cast<int>(width_), static_cast<int>(height_));
}
private:
diff --git a/test/frame_utils.cc b/test/frame_utils.cc
index 0fad3ad..b332b8f 100644
--- a/test/frame_utils.cc
+++ b/test/frame_utils.cc
@@ -72,5 +72,22 @@
half_width, half_height);
}
+rtc::scoped_refptr<I420Buffer> ReadI420Buffer(int width, int height, FILE *f) {
+ int half_width = (width + 1) / 2;
+ rtc::scoped_refptr<I420Buffer> buffer(
+ // Explicit stride, no padding between rows.
+ I420Buffer::Create(width, height, width, half_width, half_width));
+ size_t size_y = static_cast<size_t>(width) * height;
+ size_t size_uv = static_cast<size_t>(half_width) * ((height + 1) / 2);
+
+ if (fread(buffer->MutableDataY(), 1, size_y, f) < size_y)
+ return nullptr;
+ if (fread(buffer->MutableDataU(), 1, size_uv, f) < size_uv)
+ return nullptr;
+ if (fread(buffer->MutableDataV(), 1, size_uv, f) < size_uv)
+ return nullptr;
+ return buffer;
+}
+
} // namespace test
} // namespace webrtc
diff --git a/test/frame_utils.h b/test/frame_utils.h
index aef3c9f..28a7e45 100644
--- a/test/frame_utils.h
+++ b/test/frame_utils.h
@@ -14,6 +14,7 @@
#include "webrtc/base/scoped_ref_ptr.h"
namespace webrtc {
+class I420Buffer;
class VideoFrame;
class VideoFrameBuffer;
namespace test {
@@ -38,6 +39,8 @@
bool FrameBufsEqual(const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& f1,
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& f2);
+rtc::scoped_refptr<I420Buffer> ReadI420Buffer(int width, int height, FILE *);
+
} // namespace test
} // namespace webrtc
diff --git a/test/test.gyp b/test/test.gyp
index 1d476a9..c2223a8 100644
--- a/test/test.gyp
+++ b/test/test.gyp
@@ -81,6 +81,7 @@
'<(webrtc_root)/base/base.gyp:rtc_base_approved',
'<(webrtc_root)/common_video/common_video.gyp:common_video',
'<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
+ 'video_test_common',
],
'sources': [
'gmock.h',
@@ -211,7 +212,6 @@
'<(webrtc_root)/webrtc.gyp:webrtc',
'rtp_test_utils',
'test_support',
- 'video_test_common',
],
},
{
diff --git a/test/testsupport/frame_reader.cc b/test/testsupport/frame_reader.cc
index b03616a..dc0d822 100644
--- a/test/testsupport/frame_reader.cc
+++ b/test/testsupport/frame_reader.cc
@@ -12,15 +12,17 @@
#include <assert.h>
+#include "webrtc/test/frame_utils.h"
#include "webrtc/test/testsupport/fileutils.h"
+#include "webrtc/common_video/include/video_frame_buffer.h"
namespace webrtc {
namespace test {
FrameReaderImpl::FrameReaderImpl(std::string input_filename,
- size_t frame_length_in_bytes)
+ int width, int height)
: input_filename_(input_filename),
- frame_length_in_bytes_(frame_length_in_bytes),
+ width_(width), height_(height),
input_file_(NULL) {
}
@@ -29,11 +31,14 @@
}
bool FrameReaderImpl::Init() {
- if (frame_length_in_bytes_ <= 0) {
- fprintf(stderr, "Frame length must be >0, was %zu\n",
- frame_length_in_bytes_);
+ if (width_ <= 0 || height_ <= 0) {
+ fprintf(stderr, "Frame width and height must be >0, was %d x %d\n",
+ width_, height_);
return false;
}
+ frame_length_in_bytes_ =
+ width_ * height_ + 2 * ((width_ + 1) / 2) * ((height_ + 1) / 2);
+
input_file_ = fopen(input_filename_.c_str(), "rb");
if (input_file_ == NULL) {
fprintf(stderr, "Couldn't open input file for reading: %s\n",
@@ -58,24 +63,18 @@
}
}
-bool FrameReaderImpl::ReadFrame(uint8_t* source_buffer) {
- assert(source_buffer);
+rtc::scoped_refptr<I420Buffer> FrameReaderImpl::ReadFrame() {
if (input_file_ == NULL) {
fprintf(stderr, "FrameReader is not initialized (input file is NULL)\n");
- return false;
+ return nullptr;
}
- size_t nbr_read = fread(source_buffer, 1, frame_length_in_bytes_,
- input_file_);
- if (nbr_read != static_cast<unsigned int>(frame_length_in_bytes_) &&
- ferror(input_file_)) {
+ rtc::scoped_refptr<I420Buffer> buffer(
+ ReadI420Buffer(width_, height_, input_file_));
+ if (!buffer && ferror(input_file_)) {
fprintf(stderr, "Error reading from input file: %s\n",
input_filename_.c_str());
- return false;
}
- if (feof(input_file_) != 0) {
- return false; // No more frames to process.
- }
- return true;
+ return buffer;
}
size_t FrameReaderImpl::FrameLength() { return frame_length_in_bytes_; }
diff --git a/test/testsupport/frame_reader.h b/test/testsupport/frame_reader.h
index 9b50ec7..13800cd 100644
--- a/test/testsupport/frame_reader.h
+++ b/test/testsupport/frame_reader.h
@@ -15,12 +15,14 @@
#include <string>
+#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/typedefs.h"
namespace webrtc {
+class I420Buffer;
namespace test {
-// Handles reading of frames from video files.
+// Handles reading of I420 frames from video files.
class FrameReader {
public:
virtual ~FrameReader() {}
@@ -30,11 +32,9 @@
// Returns false if an error has occurred, in addition to printing to stderr.
virtual bool Init() = 0;
- // Reads a frame into the supplied buffer, which must contain enough space
- // for the frame size.
- // Returns true if there are more frames to read, false if we've already
- // read the last frame (in the previous call).
- virtual bool ReadFrame(uint8_t* source_buffer) = 0;
+ // Reads a frame from the input file. On success, returns the frame.
+ // Returns nullptr if encountering end of file or a read error.
+ virtual rtc::scoped_refptr<I420Buffer> ReadFrame() = 0;
// Closes the input file if open. Essentially makes this class impossible
// to use anymore. Will also be invoked by the destructor.
@@ -51,12 +51,11 @@
// Creates a file handler. The input file is assumed to exist and be readable.
// Parameters:
// input_filename The file to read from.
- // frame_length_in_bytes The size of each frame.
- // For YUV this is 3 * width * height / 2
- FrameReaderImpl(std::string input_filename, size_t frame_length_in_bytes);
+ // width, height Size of each frame to read.
+ FrameReaderImpl(std::string input_filename, int width, int height);
~FrameReaderImpl() override;
bool Init() override;
- bool ReadFrame(uint8_t* source_buffer) override;
+ rtc::scoped_refptr<I420Buffer> ReadFrame() override;
void Close() override;
size_t FrameLength() override;
int NumberOfFrames() override;
@@ -64,6 +63,8 @@
private:
std::string input_filename_;
size_t frame_length_in_bytes_;
+ int width_;
+ int height_;
int number_of_frames_;
FILE* input_file_;
};
diff --git a/test/testsupport/frame_reader_unittest.cc b/test/testsupport/frame_reader_unittest.cc
index 8b16926..d0b286e 100644
--- a/test/testsupport/frame_reader_unittest.cc
+++ b/test/testsupport/frame_reader_unittest.cc
@@ -12,14 +12,13 @@
#include "webrtc/test/gtest.h"
#include "webrtc/test/testsupport/fileutils.h"
+#include "webrtc/common_video/include/video_frame_buffer.h"
namespace webrtc {
namespace test {
const std::string kInputFileContents = "baz";
-// Setting the kFrameLength value to a value much larger than the
-// file to test causes the ReadFrame test to fail on Windows.
-const size_t kFrameLength = 1000;
+const size_t kFrameLength = 3;
class FrameReaderTest: public testing::Test {
protected:
@@ -33,7 +32,7 @@
fprintf(dummy, "%s", kInputFileContents.c_str());
fclose(dummy);
- frame_reader_ = new FrameReaderImpl(temp_filename_, kFrameLength);
+ frame_reader_ = new FrameReaderImpl(temp_filename_, 1, 1);
ASSERT_TRUE(frame_reader_->Init());
}
void TearDown() {
@@ -46,25 +45,25 @@
};
TEST_F(FrameReaderTest, InitSuccess) {
- FrameReaderImpl frame_reader(temp_filename_, kFrameLength);
+ FrameReaderImpl frame_reader(temp_filename_, 1, 1);
ASSERT_TRUE(frame_reader.Init());
ASSERT_EQ(kFrameLength, frame_reader.FrameLength());
- ASSERT_EQ(0, frame_reader.NumberOfFrames());
+ ASSERT_EQ(1, frame_reader.NumberOfFrames());
}
TEST_F(FrameReaderTest, ReadFrame) {
- uint8_t buffer[3];
- bool result = frame_reader_->ReadFrame(buffer);
- ASSERT_FALSE(result); // No more files to read.
- ASSERT_EQ(kInputFileContents[0], buffer[0]);
- ASSERT_EQ(kInputFileContents[1], buffer[1]);
- ASSERT_EQ(kInputFileContents[2], buffer[2]);
+ rtc::scoped_refptr<VideoFrameBuffer> buffer;
+ buffer = frame_reader_->ReadFrame();
+ ASSERT_TRUE(buffer);
+ ASSERT_EQ(kInputFileContents[0], buffer->DataY()[0]);
+ ASSERT_EQ(kInputFileContents[1], buffer->DataU()[0]);
+ ASSERT_EQ(kInputFileContents[2], buffer->DataV()[0]);
+ ASSERT_FALSE(frame_reader_->ReadFrame()); // End of file
}
TEST_F(FrameReaderTest, ReadFrameUninitialized) {
- uint8_t buffer[3];
- FrameReaderImpl file_reader(temp_filename_, kFrameLength);
- ASSERT_FALSE(file_reader.ReadFrame(buffer));
+ FrameReaderImpl file_reader(temp_filename_, 1, 1);
+ ASSERT_FALSE(file_reader.ReadFrame());
}
} // namespace test
diff --git a/test/testsupport/metrics/video_metrics.cc b/test/testsupport/metrics/video_metrics.cc
index 75bbc61..3013b61 100644
--- a/test/testsupport/metrics/video_metrics.cc
+++ b/test/testsupport/metrics/video_metrics.cc
@@ -17,6 +17,7 @@
#include <memory>
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/test/frame_utils.h"
#include "webrtc/video_frame.h"
#include "libyuv/convert.h"
@@ -110,44 +111,17 @@
int frame_number = 0;
// Read reference and test frames.
- const size_t frame_length = 3 * width * height >> 1;
- rtc::scoped_refptr<I420Buffer> ref_i420_buffer;
- rtc::scoped_refptr<I420Buffer> test_i420_buffer;
- std::unique_ptr<uint8_t[]> ref_buffer(new uint8_t[frame_length]);
- std::unique_ptr<uint8_t[]> test_buffer(new uint8_t[frame_length]);
+ for (;;) {
+ rtc::scoped_refptr<I420Buffer> ref_i420_buffer(
+ test::ReadI420Buffer(width, height, ref_fp));
+ if (!ref_i420_buffer)
+ break;
- // Set decoded image parameters.
- int half_width = (width + 1) / 2;
- ref_i420_buffer =
- I420Buffer::Create(width, height, width, half_width, half_width);
- test_i420_buffer =
- I420Buffer::Create(width, height, width, half_width, half_width);
+ rtc::scoped_refptr<I420Buffer> test_i420_buffer(
+ test::ReadI420Buffer(width, height, test_fp));
- // TODO(nisse): Have a frame reader in one place. And read directly
- // into the planes of an I420Buffer, the extra copying below is silly.
- size_t ref_bytes = fread(ref_buffer.get(), 1, frame_length, ref_fp);
- size_t test_bytes = fread(test_buffer.get(), 1, frame_length, test_fp);
- while (ref_bytes == frame_length && test_bytes == frame_length) {
- // Converting from buffer to plane representation.
- size_t size_y = width * height;
- size_t size_uv = half_width * ((height + 1) / 2);
- libyuv::I420Copy(
- ref_buffer.get(), width,
- ref_buffer.get() + size_y, half_width,
- ref_buffer.get() + size_y + size_uv, half_width,
- ref_i420_buffer->MutableDataY(), ref_i420_buffer->StrideY(),
- ref_i420_buffer->MutableDataU(), ref_i420_buffer->StrideU(),
- ref_i420_buffer->MutableDataV(), ref_i420_buffer->StrideV(),
- width, height);
-
- libyuv::I420Copy(
- test_buffer.get(), width,
- test_buffer.get() + size_y, half_width,
- test_buffer.get() + size_y + size_uv, half_width,
- test_i420_buffer->MutableDataY(), test_i420_buffer->StrideY(),
- test_i420_buffer->MutableDataU(), test_i420_buffer->StrideU(),
- test_i420_buffer->MutableDataV(), test_i420_buffer->StrideV(),
- width, height);
+ if (!test_i420_buffer)
+ break;
switch (video_metrics_type) {
case kPSNR:
@@ -166,8 +140,6 @@
break;
}
frame_number++;
- ref_bytes = fread(ref_buffer.get(), 1, frame_length, ref_fp);
- test_bytes = fread(test_buffer.get(), 1, frame_length, test_fp);
}
int return_code = 0;
if (frame_number == 0) {
diff --git a/test/testsupport/mock/mock_frame_reader.h b/test/testsupport/mock/mock_frame_reader.h
index 8d1c669..5fd2873 100644
--- a/test/testsupport/mock/mock_frame_reader.h
+++ b/test/testsupport/mock/mock_frame_reader.h
@@ -21,7 +21,7 @@
class MockFrameReader : public FrameReader {
public:
MOCK_METHOD0(Init, bool());
- MOCK_METHOD1(ReadFrame, bool(uint8_t* source_buffer));
+ MOCK_METHOD0(ReadFrame, rtc::scoped_refptr<I420Buffer>());
MOCK_METHOD0(Close, void());
MOCK_METHOD0(FrameLength, size_t());
MOCK_METHOD0(NumberOfFrames, int());