Reland "Add multiplex case to webrtc_perf_tests"
This is a reland of d90a7e842437f5760a34bbfa283b3c4182963889
Original change's description:
> Add multiplex case to webrtc_perf_tests
>
> This CL adds two new tests to perf, covering I420 and I420A input to multiplex
> codec. In order to have the correct input, it adds I420A case to
> SquareGenerator and corresponding PSNR and SSIM calculations.
>
> Bug: webrtc:7671
> Change-Id: I9735d725bbfba457e804e29907cee55406ae5c8d
> Reviewed-on: https://webrtc-review.googlesource.com/52180
> Reviewed-by: Patrik Höglund <phoglund@webrtc.org>
> Reviewed-by: Niklas Enbom <niklas.enbom@webrtc.org>
> Reviewed-by: Erik Språng <sprang@webrtc.org>
> Commit-Queue: Emircan Uysaler <emircan@webrtc.org>
> Cr-Commit-Position: refs/heads/master@{#22330}
Bug: webrtc:7671
Change-Id: Iba2e89aee73a73a0372edea26933d6a7ea2e0ec9
TBR: niklas.enbom@webrtc.org, phoglund@webrtc.org, sprang@webrtc.org
Reviewed-on: https://webrtc-review.googlesource.com/60600
Reviewed-by: Emircan Uysaler <emircan@webrtc.org>
Commit-Queue: Emircan Uysaler <emircan@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#22336}
diff --git a/call/bitrate_estimator_tests.cc b/call/bitrate_estimator_tests.cc
index 4c39787..b833034 100644
--- a/call/bitrate_estimator_tests.cc
+++ b/call/bitrate_estimator_tests.cc
@@ -172,8 +172,8 @@
test_->video_encoder_config_.Copy());
RTC_DCHECK_EQ(1, test_->video_encoder_config_.number_of_streams);
frame_generator_capturer_.reset(test::FrameGeneratorCapturer::Create(
- kDefaultWidth, kDefaultHeight, kDefaultFramerate,
- Clock::GetRealTimeClock()));
+ kDefaultWidth, kDefaultHeight, rtc::nullopt, rtc::nullopt,
+ kDefaultFramerate, Clock::GetRealTimeClock()));
send_stream_->SetSource(
frame_generator_capturer_.get(),
VideoSendStream::DegradationPreference::kMaintainFramerate);
diff --git a/common_video/libyuv/webrtc_libyuv.cc b/common_video/libyuv/webrtc_libyuv.cc
index 676dad3..01d93bb 100644
--- a/common_video/libyuv/webrtc_libyuv.cc
+++ b/common_video/libyuv/webrtc_libyuv.cc
@@ -13,6 +13,8 @@
#include <string.h>
#include "api/video/i420_buffer.h"
+#include "common_video/include/video_frame_buffer.h"
+#include "rtc_base/bind.h"
#include "rtc_base/checks.h"
#include "third_party/libyuv/include/libyuv.h"
@@ -203,6 +205,78 @@
ConvertVideoType(dst_video_type));
}
+// Helper functions for keeping references alive.
+void KeepBufferRefs(rtc::scoped_refptr<webrtc::VideoFrameBuffer>,
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer>) {}
+
+rtc::scoped_refptr<I420ABufferInterface> ScaleI420ABuffer(
+ const I420ABufferInterface& buffer,
+ int target_width,
+ int target_height) {
+ rtc::scoped_refptr<I420Buffer> yuv_buffer =
+ I420Buffer::Create(target_width, target_height);
+ yuv_buffer->ScaleFrom(buffer);
+ rtc::scoped_refptr<I420Buffer> axx_buffer =
+ I420Buffer::Create(target_width, target_height);
+ libyuv::ScalePlane(buffer.DataA(), buffer.StrideA(), buffer.width(),
+ buffer.height(), axx_buffer->MutableDataY(),
+ axx_buffer->StrideY(), target_width, target_height,
+ libyuv::kFilterBox);
+ rtc::scoped_refptr<I420ABufferInterface> merged_buffer = WrapI420ABuffer(
+ yuv_buffer->width(), yuv_buffer->height(), yuv_buffer->DataY(),
+ yuv_buffer->StrideY(), yuv_buffer->DataU(), yuv_buffer->StrideU(),
+ yuv_buffer->DataV(), yuv_buffer->StrideV(), axx_buffer->DataY(),
+ axx_buffer->StrideY(),
+ rtc::Bind(&KeepBufferRefs, yuv_buffer, axx_buffer));
+ return merged_buffer;
+}
+
+// Compute PSNR for an I420A frame (all planes). Can upscale test frame.
+double I420APSNR(const I420ABufferInterface& ref_buffer,
+ const I420ABufferInterface& test_buffer) {
+ RTC_DCHECK_GE(ref_buffer.width(), test_buffer.width());
+ RTC_DCHECK_GE(ref_buffer.height(), test_buffer.height());
+ if ((ref_buffer.width() != test_buffer.width()) ||
+ (ref_buffer.height() != test_buffer.height())) {
+ rtc::scoped_refptr<I420ABufferInterface> scaled_buffer =
+ ScaleI420ABuffer(test_buffer, ref_buffer.width(), ref_buffer.height());
+ return I420APSNR(ref_buffer, *scaled_buffer);
+ }
+ const int width = test_buffer.width();
+ const int height = test_buffer.height();
+ const uint64_t sse_y = libyuv::ComputeSumSquareErrorPlane(
+ ref_buffer.DataY(), ref_buffer.StrideY(), test_buffer.DataY(),
+ test_buffer.StrideY(), width, height);
+ const int width_uv = (width + 1) >> 1;
+ const int height_uv = (height + 1) >> 1;
+ const uint64_t sse_u = libyuv::ComputeSumSquareErrorPlane(
+ ref_buffer.DataU(), ref_buffer.StrideU(), test_buffer.DataU(),
+ test_buffer.StrideU(), width_uv, height_uv);
+ const uint64_t sse_v = libyuv::ComputeSumSquareErrorPlane(
+ ref_buffer.DataV(), ref_buffer.StrideV(), test_buffer.DataV(),
+ test_buffer.StrideV(), width_uv, height_uv);
+ const uint64_t sse_a = libyuv::ComputeSumSquareErrorPlane(
+ ref_buffer.DataA(), ref_buffer.StrideA(), test_buffer.DataA(),
+ test_buffer.StrideA(), width, height);
+ const uint64_t samples = 2 * (uint64_t)width * (uint64_t)height +
+ 2 * ((uint64_t)width_uv * (uint64_t)height_uv);
+ const uint64_t sse = sse_y + sse_u + sse_v + sse_a;
+ const double psnr = libyuv::SumSquareErrorToPsnr(sse, samples);
+ return (psnr > kPerfectPSNR) ? kPerfectPSNR : psnr;
+}
+
+// Compute PSNR for an I420A frame (all planes)
+double I420APSNR(const VideoFrame* ref_frame, const VideoFrame* test_frame) {
+ if (!ref_frame || !test_frame)
+ return -1;
+ RTC_DCHECK(ref_frame->video_frame_buffer()->type() ==
+ VideoFrameBuffer::Type::kI420A);
+ RTC_DCHECK(test_frame->video_frame_buffer()->type() ==
+ VideoFrameBuffer::Type::kI420A);
+ return I420APSNR(*ref_frame->video_frame_buffer()->GetI420A(),
+ *test_frame->video_frame_buffer()->GetI420A());
+}
+
// Compute PSNR for an I420 frame (all planes). Can upscale test frame.
double I420PSNR(const I420BufferInterface& ref_buffer,
const I420BufferInterface& test_buffer) {
@@ -234,6 +308,41 @@
*test_frame->video_frame_buffer()->ToI420());
}
+// Compute SSIM for an I420A frame (all planes). Can upscale test frame.
+double I420ASSIM(const I420ABufferInterface& ref_buffer,
+ const I420ABufferInterface& test_buffer) {
+ RTC_DCHECK_GE(ref_buffer.width(), test_buffer.width());
+ RTC_DCHECK_GE(ref_buffer.height(), test_buffer.height());
+ if ((ref_buffer.width() != test_buffer.width()) ||
+ (ref_buffer.height() != test_buffer.height())) {
+ rtc::scoped_refptr<I420ABufferInterface> scaled_buffer =
+ ScaleI420ABuffer(test_buffer, ref_buffer.width(), ref_buffer.height());
+ return I420ASSIM(ref_buffer, *scaled_buffer);
+ }
+ const double yuv_ssim = libyuv::I420Ssim(
+ ref_buffer.DataY(), ref_buffer.StrideY(), ref_buffer.DataU(),
+ ref_buffer.StrideU(), ref_buffer.DataV(), ref_buffer.StrideV(),
+ test_buffer.DataY(), test_buffer.StrideY(), test_buffer.DataU(),
+ test_buffer.StrideU(), test_buffer.DataV(), test_buffer.StrideV(),
+ test_buffer.width(), test_buffer.height());
+ const double a_ssim = libyuv::CalcFrameSsim(
+ ref_buffer.DataA(), ref_buffer.StrideA(), test_buffer.DataA(),
+ test_buffer.StrideA(), test_buffer.width(), test_buffer.height());
+ return (yuv_ssim + (a_ssim * 0.8)) / 1.8;
+}
+
+// Compute SSIM for an I420A frame (all planes)
+double I420ASSIM(const VideoFrame* ref_frame, const VideoFrame* test_frame) {
+ if (!ref_frame || !test_frame)
+ return -1;
+ RTC_DCHECK(ref_frame->video_frame_buffer()->type() ==
+ VideoFrameBuffer::Type::kI420A);
+ RTC_DCHECK(test_frame->video_frame_buffer()->type() ==
+ VideoFrameBuffer::Type::kI420A);
+ return I420ASSIM(*ref_frame->video_frame_buffer()->GetI420A(),
+ *test_frame->video_frame_buffer()->GetI420A());
+}
+
// Compute SSIM for an I420 frame (all planes). Can upscale test_buffer.
double I420SSIM(const I420BufferInterface& ref_buffer,
const I420BufferInterface& test_buffer) {
@@ -253,6 +362,7 @@
test_buffer.StrideU(), test_buffer.DataV(), test_buffer.StrideV(),
test_buffer.width(), test_buffer.height());
}
+
double I420SSIM(const VideoFrame* ref_frame, const VideoFrame* test_frame) {
if (!ref_frame || !test_frame)
return -1;
diff --git a/test/BUILD.gn b/test/BUILD.gn
index fcde65f..0b22f64 100644
--- a/test/BUILD.gn
+++ b/test/BUILD.gn
@@ -613,6 +613,7 @@
"../common_video",
"../logging:rtc_event_log_api",
"../logging:rtc_event_log_impl_base",
+ "../media:rtc_internal_video_codecs",
"../media:rtc_media_base",
"../modules/audio_device:mock_audio_device",
"../modules/audio_mixer:audio_mixer_impl",
@@ -623,6 +624,7 @@
"../modules/video_coding:video_codec_interface",
"../modules/video_coding:video_coding_utility",
"../modules/video_coding:webrtc_h264",
+ "../modules/video_coding:webrtc_multiplex",
"../modules/video_coding:webrtc_vp8",
"../modules/video_coding:webrtc_vp9",
"../rtc_base:checks",
diff --git a/test/DEPS b/test/DEPS
index d55be1b..f3dbff1 100644
--- a/test/DEPS
+++ b/test/DEPS
@@ -6,6 +6,7 @@
"+common_video",
"+logging/rtc_event_log",
"+media/base",
+ "+media/engine",
"+modules/audio_coding",
"+modules/audio_device",
"+modules/audio_mixer",
diff --git a/test/call_test.cc b/test/call_test.cc
index 80bdd02..2311fc7 100644
--- a/test/call_test.cc
+++ b/test/call_test.cc
@@ -316,7 +316,7 @@
int width,
int height) {
frame_generator_capturer_.reset(test::FrameGeneratorCapturer::Create(
- width, height, framerate * speed, clock));
+ width, height, rtc::nullopt, rtc::nullopt, framerate * speed, clock));
video_send_stream_->SetSource(
frame_generator_capturer_.get(),
VideoSendStream::DegradationPreference::kMaintainFramerate);
@@ -325,8 +325,8 @@
void CallTest::CreateFrameGeneratorCapturer(int framerate,
int width,
int height) {
- frame_generator_capturer_.reset(
- test::FrameGeneratorCapturer::Create(width, height, framerate, clock_));
+ frame_generator_capturer_.reset(test::FrameGeneratorCapturer::Create(
+ width, height, rtc::nullopt, rtc::nullopt, framerate, clock_));
video_send_stream_->SetSource(
frame_generator_capturer_.get(),
VideoSendStream::DegradationPreference::kMaintainFramerate);
diff --git a/test/encoder_settings.cc b/test/encoder_settings.cc
index 685ec63..83dea9c 100644
--- a/test/encoder_settings.cc
+++ b/test/encoder_settings.cc
@@ -12,7 +12,9 @@
#include <algorithm>
#include <string>
+#include "media/engine/internaldecoderfactory.h"
#include "modules/video_coding/codecs/h264/include/h264.h"
+#include "modules/video_coding/codecs/multiplex/include/multiplex_decoder_adapter.h"
#include "modules/video_coding/codecs/vp8/include/vp8.h"
#include "modules/video_coding/codecs/vp9/include/vp9.h"
#include "rtc_base/refcountedobject.h"
@@ -102,6 +104,9 @@
decoder.decoder = VP8Decoder::Create().release();
} else if (encoder_settings.payload_name == "VP9") {
decoder.decoder = VP9Decoder::Create().release();
+ } else if (encoder_settings.payload_name == "multiplex") {
+ decoder.decoder = new MultiplexDecoderAdapter(
+ new InternalDecoderFactory(), SdpVideoFormat(cricket::kVp9CodecName));
} else {
decoder.decoder = new FakeDecoder();
}
diff --git a/test/frame_generator.cc b/test/frame_generator.cc
index d6cc4e5..41e1971 100644
--- a/test/frame_generator.cc
+++ b/test/frame_generator.cc
@@ -28,12 +28,17 @@
namespace test {
namespace {
+// Helper method for keeping a reference to passed pointers.
+void KeepBufferRefs(rtc::scoped_refptr<webrtc::VideoFrameBuffer>,
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer>) {}
+
// SquareGenerator is a FrameGenerator that draws a given amount of randomly
// sized and colored squares. Between each new generated frame, the squares
// are moved slightly towards the lower right corner.
class SquareGenerator : public FrameGenerator {
public:
- SquareGenerator(int width, int height, int num_squares) {
+ SquareGenerator(int width, int height, OutputType type, int num_squares)
+ : type_(type) {
ChangeResolution(width, height);
for (int i = 0; i < num_squares; ++i) {
squares_.emplace_back(new Square(width, height, i + 1));
@@ -48,16 +53,39 @@
RTC_CHECK(height_ > 0);
}
- VideoFrame* NextFrame() override {
- rtc::CritScope lock(&crit_);
-
- rtc::scoped_refptr<I420Buffer> buffer(I420Buffer::Create(width_, height_));
-
- memset(buffer->MutableDataY(), 127, height_ * buffer->StrideY());
+ rtc::scoped_refptr<I420Buffer> CreateI420Buffer(int width, int height) {
+ rtc::scoped_refptr<I420Buffer> buffer(I420Buffer::Create(width, height));
+ memset(buffer->MutableDataY(), 127, height * buffer->StrideY());
memset(buffer->MutableDataU(), 127,
buffer->ChromaHeight() * buffer->StrideU());
memset(buffer->MutableDataV(), 127,
buffer->ChromaHeight() * buffer->StrideV());
+ return buffer;
+ }
+
+ VideoFrame* NextFrame() override {
+ rtc::CritScope lock(&crit_);
+
+ rtc::scoped_refptr<VideoFrameBuffer> buffer = nullptr;
+ switch (type_) {
+ case OutputType::I420: {
+ buffer = CreateI420Buffer(width_, height_);
+ break;
+ }
+ case OutputType::I420A: {
+ rtc::scoped_refptr<I420Buffer> yuv_buffer =
+ CreateI420Buffer(width_, height_);
+ rtc::scoped_refptr<I420Buffer> axx_buffer =
+ CreateI420Buffer(width_, height_);
+ buffer = WrapI420ABuffer(
+ yuv_buffer->width(), yuv_buffer->height(), yuv_buffer->DataY(),
+ yuv_buffer->StrideY(), yuv_buffer->DataU(), yuv_buffer->StrideU(),
+ yuv_buffer->DataV(), yuv_buffer->StrideV(), axx_buffer->DataY(),
+ axx_buffer->StrideY(),
+ rtc::Bind(&KeepBufferRefs, yuv_buffer, axx_buffer));
+ break;
+ }
+ }
for (const auto& square : squares_)
square->Draw(buffer);
@@ -76,25 +104,41 @@
length_(random_generator_.Rand(1, width > 4 ? width / 4 : 1)),
yuv_y_(random_generator_.Rand(0, 255)),
yuv_u_(random_generator_.Rand(0, 255)),
- yuv_v_(random_generator_.Rand(0, 255)) {}
+ yuv_v_(random_generator_.Rand(0, 255)),
+ yuv_a_(random_generator_.Rand(0, 255)) {}
- void Draw(const rtc::scoped_refptr<I420Buffer>& buffer) {
+ void Draw(const rtc::scoped_refptr<VideoFrameBuffer>& frame_buffer) {
+ RTC_DCHECK(frame_buffer->type() == VideoFrameBuffer::Type::kI420 ||
+ frame_buffer->type() == VideoFrameBuffer::Type::kI420A);
+ rtc::scoped_refptr<I420BufferInterface> buffer = frame_buffer->ToI420();
x_ = (x_ + random_generator_.Rand(0, 4)) % (buffer->width() - length_);
y_ = (y_ + random_generator_.Rand(0, 4)) % (buffer->height() - length_);
- for (int y = y_; y < y_ + length_; ++y) {
- uint8_t* pos_y =
- (buffer->MutableDataY() + x_ + y * buffer->StrideY());
- memset(pos_y, yuv_y_, length_);
- }
+ for (int y = y_; y < y_ + length_; ++y) {
+ uint8_t* pos_y = (const_cast<uint8_t*>(buffer->DataY()) + x_ +
+ y * buffer->StrideY());
+ memset(pos_y, yuv_y_, length_);
+ }
- for (int y = y_; y < y_ + length_; y = y + 2) {
- uint8_t* pos_u =
- (buffer->MutableDataU() + x_ / 2 + y / 2 * buffer->StrideU());
- memset(pos_u, yuv_u_, length_ / 2);
- uint8_t* pos_v =
- (buffer->MutableDataV() + x_ / 2 + y / 2 * buffer->StrideV());
- memset(pos_v, yuv_v_, length_ / 2);
- }
+ for (int y = y_; y < y_ + length_; y = y + 2) {
+ uint8_t* pos_u = (const_cast<uint8_t*>(buffer->DataU()) + x_ / 2 +
+ y / 2 * buffer->StrideU());
+ memset(pos_u, yuv_u_, length_ / 2);
+ uint8_t* pos_v = (const_cast<uint8_t*>(buffer->DataV()) + x_ / 2 +
+ y / 2 * buffer->StrideV());
+ memset(pos_v, yuv_v_, length_ / 2);
+ }
+
+ if (frame_buffer->type() == VideoFrameBuffer::Type::kI420)
+ return;
+
+ // Optionally draw on alpha plane if given.
+ const webrtc::I420ABufferInterface* yuva_buffer =
+ frame_buffer->GetI420A();
+ for (int y = y_; y < y_ + length_; ++y) {
+ uint8_t* pos_y = (const_cast<uint8_t*>(yuva_buffer->DataA()) + x_ +
+ y * yuva_buffer->StrideA());
+ memset(pos_y, yuv_a_, length_);
+ }
}
private:
@@ -105,9 +149,11 @@
const uint8_t yuv_y_;
const uint8_t yuv_u_;
const uint8_t yuv_v_;
+ const uint8_t yuv_a_;
};
rtc::CriticalSection crit_;
+ const OutputType type_;
int width_ RTC_GUARDED_BY(&crit_);
int height_ RTC_GUARDED_BY(&crit_);
std::vector<std::unique_ptr<Square>> squares_ RTC_GUARDED_BY(&crit_);
@@ -396,15 +442,12 @@
std::unique_ptr<FrameGenerator> FrameGenerator::CreateSquareGenerator(
int width,
- int height) {
+ int height,
+ rtc::Optional<OutputType> type,
+ rtc::Optional<int> num_squares) {
return std::unique_ptr<FrameGenerator>(
- new SquareGenerator(width, height, 10));
-}
-
-std::unique_ptr<FrameGenerator>
-FrameGenerator::CreateSquareGenerator(int width, int height, int num_squares) {
- return std::unique_ptr<FrameGenerator>(
- new SquareGenerator(width, height, num_squares));
+ new SquareGenerator(width, height, type.value_or(OutputType::I420),
+ num_squares.value_or(10)));
}
std::unique_ptr<FrameGenerator> FrameGenerator::CreateSlideGenerator(
diff --git a/test/frame_generator.h b/test/frame_generator.h
index e7e2c12..d12072a 100644
--- a/test/frame_generator.h
+++ b/test/frame_generator.h
@@ -58,13 +58,14 @@
RTC_NOTREACHED();
}
+ enum class OutputType { I420, I420A };
// Creates a frame generator that produces frames with small squares that
// move randomly towards the lower right corner.
- static std::unique_ptr<FrameGenerator> CreateSquareGenerator(int width,
- int height);
- static std::unique_ptr<FrameGenerator> CreateSquareGenerator(int width,
- int height,
- int num_squares);
+ static std::unique_ptr<FrameGenerator> CreateSquareGenerator(
+ int width,
+ int height,
+ rtc::Optional<OutputType> type,
+ rtc::Optional<int> num_squares);
// Creates a frame generator that repeatedly plays a set of yuv files.
// The frame_repeat_count determines how many times each frame is shown,
diff --git a/test/frame_generator_capturer.cc b/test/frame_generator_capturer.cc
index a8983b7..a9e80d7 100644
--- a/test/frame_generator_capturer.cc
+++ b/test/frame_generator_capturer.cc
@@ -19,7 +19,6 @@
#include "rtc_base/task_queue.h"
#include "rtc_base/timeutils.h"
#include "system_wrappers/include/clock.h"
-#include "test/frame_generator.h"
#include "call/video_send_stream.h"
namespace webrtc {
@@ -85,25 +84,16 @@
int64_t intended_run_time_ms_;
};
-FrameGeneratorCapturer* FrameGeneratorCapturer::Create(int width,
- int height,
- int target_fps,
- Clock* clock) {
+FrameGeneratorCapturer* FrameGeneratorCapturer::Create(
+ int width,
+ int height,
+ rtc::Optional<FrameGenerator::OutputType> type,
+ rtc::Optional<int> num_squares,
+ int target_fps,
+ Clock* clock) {
std::unique_ptr<FrameGeneratorCapturer> capturer(new FrameGeneratorCapturer(
- clock, FrameGenerator::CreateSquareGenerator(width, height), target_fps));
- if (!capturer->Init())
- return nullptr;
-
- return capturer.release();
-}
-
-FrameGeneratorCapturer* FrameGeneratorCapturer::Create(int width,
- int height,
- int num_squares,
- int target_fps,
- Clock* clock) {
- std::unique_ptr<FrameGeneratorCapturer> capturer(new FrameGeneratorCapturer(
- clock, FrameGenerator::CreateSquareGenerator(width, height, num_squares),
+ clock,
+ FrameGenerator::CreateSquareGenerator(width, height, type, num_squares),
target_fps));
if (!capturer->Init())
return nullptr;
diff --git a/test/frame_generator_capturer.h b/test/frame_generator_capturer.h
index 3a3fe82..8eb563e 100644
--- a/test/frame_generator_capturer.h
+++ b/test/frame_generator_capturer.h
@@ -16,6 +16,7 @@
#include "api/video/video_frame.h"
#include "rtc_base/criticalsection.h"
#include "rtc_base/task_queue.h"
+#include "test/frame_generator.h"
#include "test/video_capturer.h"
#include "typedefs.h" // NOLINT(build/include)
@@ -40,16 +41,13 @@
virtual ~SinkWantsObserver() {}
};
- static FrameGeneratorCapturer* Create(int width,
- int height,
- int target_fps,
- Clock* clock);
-
- static FrameGeneratorCapturer* Create(int width,
- int height,
- int num_squares,
- int target_fps,
- Clock* clock);
+ static FrameGeneratorCapturer* Create(
+ int width,
+ int height,
+ rtc::Optional<FrameGenerator::OutputType> type,
+ rtc::Optional<int> num_squares,
+ int target_fps,
+ Clock* clock);
static FrameGeneratorCapturer* CreateFromYuvFile(const std::string& file_name,
size_t width,
diff --git a/video/BUILD.gn b/video/BUILD.gn
index ab47bfc..fc2a750 100644
--- a/video/BUILD.gn
+++ b/video/BUILD.gn
@@ -118,6 +118,7 @@
"../modules/audio_mixer:audio_mixer_impl",
"../modules/rtp_rtcp",
"../modules/video_coding:webrtc_h264",
+ "../modules/video_coding:webrtc_multiplex",
"../modules/video_coding:webrtc_vp8",
"../modules/video_coding:webrtc_vp9",
"../rtc_base:rtc_base_approved",
diff --git a/video/end_to_end_tests/call_operation_tests.cc b/video/end_to_end_tests/call_operation_tests.cc
index f6f335e..361bdf7 100644
--- a/video/end_to_end_tests/call_operation_tests.cc
+++ b/video/end_to_end_tests/call_operation_tests.cc
@@ -131,7 +131,8 @@
// Create frames that are smaller than the send width/height, this is done
// to check that the callbacks are done after processing video.
std::unique_ptr<test::FrameGenerator> frame_generator(
- test::FrameGenerator::CreateSquareGenerator(kWidth, kHeight));
+ test::FrameGenerator::CreateSquareGenerator(
+ kWidth, kHeight, rtc::nullopt, rtc::nullopt));
video_send_stream_->SetSource(
&frame_forwarder,
VideoSendStream::DegradationPreference::kMaintainFramerate);
@@ -188,7 +189,7 @@
Start();
frame_generator = test::FrameGenerator::CreateSquareGenerator(
- kDefaultWidth, kDefaultHeight);
+ kDefaultWidth, kDefaultHeight, rtc::nullopt, rtc::nullopt);
video_send_stream_->SetSource(
&frame_forwarder,
VideoSendStream::DegradationPreference::kMaintainFramerate);
@@ -267,7 +268,7 @@
Start();
frame_generator = test::FrameGenerator::CreateSquareGenerator(
- kDefaultWidth, kDefaultHeight);
+ kDefaultWidth, kDefaultHeight, rtc::nullopt, rtc::nullopt);
video_send_stream_->SetSource(
&forwarder, VideoSendStream::DegradationPreference::kMaintainFramerate);
forwarder.IncomingCapturedFrame(*frame_generator->NextFrame());
diff --git a/video/end_to_end_tests/multi_stream_tester.cc b/video/end_to_end_tests/multi_stream_tester.cc
index 2d013a8..87f0ea2 100644
--- a/video/end_to_end_tests/multi_stream_tester.cc
+++ b/video/end_to_end_tests/multi_stream_tester.cc
@@ -100,7 +100,8 @@
receive_streams[i]->Start();
frame_generators[i] = test::FrameGeneratorCapturer::Create(
- width, height, 30, Clock::GetRealTimeClock());
+ width, height, rtc::nullopt, rtc::nullopt, 30,
+ Clock::GetRealTimeClock());
send_streams[i]->SetSource(
frame_generators[i],
VideoSendStream::DegradationPreference::kMaintainFramerate);
diff --git a/video/full_stack_tests.cc b/video/full_stack_tests.cc
index 9c8d3a6..4acd961 100644
--- a/video/full_stack_tests.cc
+++ b/video/full_stack_tests.cc
@@ -73,6 +73,29 @@
foreman_cif.pipe.queue_delay_ms = 50;
RunTest(foreman_cif);
}
+
+TEST_F(FullStackTest, ForemanCifWithoutPacketLossMultiplexI420Frame) {
+ VideoQualityTest::Params foreman_cif;
+ foreman_cif.call.send_side_bwe = true;
+ foreman_cif.video[0] = {true, 352, 288, 30, 700000,
+ 700000, 700000, false, "multiplex", 1,
+ 0, 0, false, false, "foreman_cif"};
+ foreman_cif.analyzer = {"foreman_cif_net_delay_0_0_plr_0_Multiplex", 0.0, 0.0,
+ kFullStackTestDurationSecs};
+ RunTest(foreman_cif);
+}
+
+TEST_F(FullStackTest, ForemanCifWithoutPacketLossMultiplexI420AFrame) {
+ VideoQualityTest::Params foreman_cif;
+ foreman_cif.call.send_side_bwe = true;
+ foreman_cif.video[0] = {true, 352, 288, 30, 700000,
+ 700000, 700000, false, "multiplex", 1,
+ 0, 0, false, false, "GeneratorI420A"};
+ foreman_cif.analyzer = {"foreman_cif_net_delay_0_0_plr_0_Multiplex", 0.0, 0.0,
+ kFullStackTestDurationSecs};
+ RunTest(foreman_cif);
+}
+
#endif // !defined(RTC_DISABLE_VP9)
TEST_F(FullStackTest, ParisQcifWithoutPacketLoss) {
diff --git a/video/video_quality_test.cc b/video/video_quality_test.cc
index dcb8fb9..01729cd 100644
--- a/video/video_quality_test.cc
+++ b/video/video_quality_test.cc
@@ -24,6 +24,7 @@
#include "modules/rtp_rtcp/source/rtp_format.h"
#include "modules/rtp_rtcp/source/rtp_utility.h"
#include "modules/video_coding/codecs/h264/include/h264.h"
+#include "modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h"
#include "modules/video_coding/codecs/vp8/include/vp8_common_types.h"
#include "modules/video_coding/codecs/vp9/include/vp9.h"
#include "rtc_base/cpu_time.h"
@@ -1386,6 +1387,10 @@
} else if (params_.video[video_idx].codec == "VP9") {
video_encoders_[video_idx] = VP9Encoder::Create();
payload_type = kPayloadTypeVP9;
+ } else if (params_.video[video_idx].codec == "multiplex") {
+ video_encoders_[video_idx] = rtc::MakeUnique<MultiplexEncoderAdapter>(
+ new InternalEncoderFactory(), SdpVideoFormat(cricket::kVp9CodecName));
+ payload_type = kPayloadTypeVP9;
} else {
RTC_NOTREACHED() << "Codec not supported!";
return;
@@ -1701,7 +1706,7 @@
for (size_t i = 0; i < num_thumbnail_streams; ++i) {
thumbnail_capturers_.emplace_back(test::FrameGeneratorCapturer::Create(
static_cast<int>(thumbnail.width), static_cast<int>(thumbnail.height),
- thumbnail.max_framerate, clock_));
+ rtc::nullopt, rtc::nullopt, thumbnail.max_framerate, clock_));
RTC_DCHECK(thumbnail_capturers_.back());
}
}
@@ -1767,7 +1772,13 @@
if (params_.video[video_idx].clip_name == "Generator") {
video_capturers_[video_idx].reset(test::FrameGeneratorCapturer::Create(
static_cast<int>(params_.video[video_idx].width),
+ static_cast<int>(params_.video[video_idx].height), rtc::nullopt,
+ rtc::nullopt, params_.video[video_idx].fps, clock_));
+ } else if (params_.video[video_idx].clip_name == "GeneratorI420A") {
+ video_capturers_[video_idx].reset(test::FrameGeneratorCapturer::Create(
+ static_cast<int>(params_.video[video_idx].width),
static_cast<int>(params_.video[video_idx].height),
+ test::FrameGenerator::OutputType::I420A, rtc::nullopt,
params_.video[video_idx].fps, clock_));
} else if (params_.video[video_idx].clip_name.empty()) {
video_capturers_[video_idx].reset(test::VcmCapturer::Create(
@@ -1780,7 +1791,8 @@
test::FrameGeneratorCapturer::Create(
static_cast<int>(params_.video[video_idx].width),
static_cast<int>(params_.video[video_idx].height),
- params_.video[video_idx].fps, clock_));
+ rtc::nullopt, rtc::nullopt, params_.video[video_idx].fps,
+ clock_));
}
} else {
video_capturers_[video_idx].reset(