blob: 7d1cabf1a77b9000e0d830272c5e2b02e49258bf [file] [log] [blame]
pbos@webrtc.org866b22b2014-12-09 10:36:401/*
2 * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
magjeda5b6c522017-07-10 10:26:3611#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_TEST_UTILITY_H_
12#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_TEST_UTILITY_H_
pbos@webrtc.org866b22b2014-12-09 10:36:4013
14#include <algorithm>
Erik Språngfafcfc02016-11-16 15:41:3015#include <map>
kwiberg7c6513a2016-02-29 13:51:5916#include <memory>
pbos@webrtc.org866b22b2014-12-09 10:36:4017#include <vector>
18
nissef0daa062017-01-10 15:44:2619#include "webrtc/api/video/i420_buffer.h"
20#include "webrtc/api/video/video_frame.h"
nissea24bcce2017-05-15 09:42:1121#include "webrtc/common_video/include/video_frame.h"
pbos@webrtc.org866b22b2014-12-09 10:36:4022#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
pbos@webrtc.org866b22b2014-12-09 10:36:4023#include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
kjellander05bd7c92017-03-08 13:42:2624#include "webrtc/modules/video_coding/codecs/vp8/simulcast_rate_allocator.h"
pbos@webrtc.org866b22b2014-12-09 10:36:4025#include "webrtc/modules/video_coding/codecs/vp8/temporal_layers.h"
kwiberg36a24792016-10-01 05:29:4326#include "webrtc/modules/video_coding/include/mock/mock_video_codec_interface.h"
ilnik39e693a2017-06-19 14:18:5527#include "webrtc/modules/video_coding/include/video_coding_defines.h"
Edward Lemur76de83e2017-07-06 17:44:3428#include "webrtc/rtc_base/checks.h"
kwiberg36a24792016-10-01 05:29:4329#include "webrtc/test/gtest.h"
pbos@webrtc.org866b22b2014-12-09 10:36:4030
pbos@webrtc.org866b22b2014-12-09 10:36:4031using ::testing::_;
32using ::testing::AllOf;
33using ::testing::Field;
34using ::testing::Return;
35
36namespace webrtc {
37namespace testing {
38
39const int kDefaultWidth = 1280;
40const int kDefaultHeight = 720;
41const int kNumberOfSimulcastStreams = 3;
42const int kColorY = 66;
43const int kColorU = 22;
44const int kColorV = 33;
45const int kMaxBitrates[kNumberOfSimulcastStreams] = {150, 600, 1200};
46const int kMinBitrates[kNumberOfSimulcastStreams] = {50, 150, 600};
47const int kTargetBitrates[kNumberOfSimulcastStreams] = {100, 450, 1000};
48const int kDefaultTemporalLayerProfile[3] = {3, 3, 3};
49
philipel20a98012015-12-21 11:04:4950template <typename T>
51void SetExpectedValues3(T value0, T value1, T value2, T* expected_values) {
pbos@webrtc.org866b22b2014-12-09 10:36:4052 expected_values[0] = value0;
53 expected_values[1] = value1;
54 expected_values[2] = value2;
55}
56
nisse59505d32016-09-28 10:14:0757enum PlaneType {
58 kYPlane = 0,
59 kUPlane = 1,
60 kVPlane = 2,
61 kNumOfPlanes = 3,
62};
63
pbos@webrtc.org866b22b2014-12-09 10:36:4064class Vp8TestEncodedImageCallback : public EncodedImageCallback {
65 public:
philipel20a98012015-12-21 11:04:4966 Vp8TestEncodedImageCallback() : picture_id_(-1) {
pbos@webrtc.org866b22b2014-12-09 10:36:4067 memset(temporal_layer_, -1, sizeof(temporal_layer_));
68 memset(layer_sync_, false, sizeof(layer_sync_));
69 }
70
71 ~Vp8TestEncodedImageCallback() {
philipel20a98012015-12-21 11:04:4972 delete[] encoded_key_frame_._buffer;
73 delete[] encoded_frame_._buffer;
pbos@webrtc.org866b22b2014-12-09 10:36:4074 }
75
Sergey Ulanov83a148b2016-08-03 00:46:4176 virtual Result OnEncodedImage(const EncodedImage& encoded_image,
77 const CodecSpecificInfo* codec_specific_info,
78 const RTPFragmentationHeader* fragmentation) {
pbos@webrtc.org866b22b2014-12-09 10:36:4079 // Only store the base layer.
80 if (codec_specific_info->codecSpecific.VP8.simulcastIdx == 0) {
Peter Boström9511e462015-10-23 13:58:1881 if (encoded_image._frameType == kVideoFrameKey) {
philipel20a98012015-12-21 11:04:4982 delete[] encoded_key_frame_._buffer;
pbos@webrtc.org866b22b2014-12-09 10:36:4083 encoded_key_frame_._buffer = new uint8_t[encoded_image._size];
84 encoded_key_frame_._size = encoded_image._size;
85 encoded_key_frame_._length = encoded_image._length;
Peter Boström9511e462015-10-23 13:58:1886 encoded_key_frame_._frameType = kVideoFrameKey;
pbos@webrtc.org866b22b2014-12-09 10:36:4087 encoded_key_frame_._completeFrame = encoded_image._completeFrame;
philipel20a98012015-12-21 11:04:4988 memcpy(encoded_key_frame_._buffer, encoded_image._buffer,
pbos@webrtc.org866b22b2014-12-09 10:36:4089 encoded_image._length);
90 } else {
philipel20a98012015-12-21 11:04:4991 delete[] encoded_frame_._buffer;
pbos@webrtc.org866b22b2014-12-09 10:36:4092 encoded_frame_._buffer = new uint8_t[encoded_image._size];
93 encoded_frame_._size = encoded_image._size;
94 encoded_frame_._length = encoded_image._length;
philipel20a98012015-12-21 11:04:4995 memcpy(encoded_frame_._buffer, encoded_image._buffer,
pbos@webrtc.org866b22b2014-12-09 10:36:4096 encoded_image._length);
97 }
98 }
99 picture_id_ = codec_specific_info->codecSpecific.VP8.pictureId;
100 layer_sync_[codec_specific_info->codecSpecific.VP8.simulcastIdx] =
101 codec_specific_info->codecSpecific.VP8.layerSync;
102 temporal_layer_[codec_specific_info->codecSpecific.VP8.simulcastIdx] =
103 codec_specific_info->codecSpecific.VP8.temporalIdx;
Sergey Ulanov83a148b2016-08-03 00:46:41104 return Result(Result::OK, encoded_image._timeStamp);
pbos@webrtc.org866b22b2014-12-09 10:36:40105 }
philipel20a98012015-12-21 11:04:49106 void GetLastEncodedFrameInfo(int* picture_id,
107 int* temporal_layer,
108 bool* layer_sync,
109 int stream) {
pbos@webrtc.org866b22b2014-12-09 10:36:40110 *picture_id = picture_id_;
111 *temporal_layer = temporal_layer_[stream];
112 *layer_sync = layer_sync_[stream];
113 }
114 void GetLastEncodedKeyFrame(EncodedImage* encoded_key_frame) {
115 *encoded_key_frame = encoded_key_frame_;
116 }
117 void GetLastEncodedFrame(EncodedImage* encoded_frame) {
118 *encoded_frame = encoded_frame_;
119 }
120
121 private:
122 EncodedImage encoded_key_frame_;
123 EncodedImage encoded_frame_;
124 int picture_id_;
125 int temporal_layer_[kNumberOfSimulcastStreams];
126 bool layer_sync_[kNumberOfSimulcastStreams];
127};
128
129class Vp8TestDecodedImageCallback : public DecodedImageCallback {
130 public:
philipel20a98012015-12-21 11:04:49131 Vp8TestDecodedImageCallback() : decoded_frames_(0) {}
Perb6b69982015-11-10 13:00:27132 int32_t Decoded(VideoFrame& decoded_image) override {
Magnus Jedvertd0cf29a2017-06-10 17:03:37133 rtc::scoped_refptr<I420BufferInterface> i420_buffer =
134 decoded_image.video_frame_buffer()->ToI420();
magjed@webrtc.org32ec5902015-03-16 13:46:52135 for (int i = 0; i < decoded_image.width(); ++i) {
Magnus Jedvertd0cf29a2017-06-10 17:03:37136 EXPECT_NEAR(kColorY, i420_buffer->DataY()[i], 1);
pbos@webrtc.org866b22b2014-12-09 10:36:40137 }
138
139 // TODO(mikhal): Verify the difference between U,V and the original.
Magnus Jedvertd0cf29a2017-06-10 17:03:37140 for (int i = 0; i < i420_buffer->ChromaWidth(); ++i) {
141 EXPECT_NEAR(kColorU, i420_buffer->DataU()[i], 4);
142 EXPECT_NEAR(kColorV, i420_buffer->DataV()[i], 4);
pbos@webrtc.org866b22b2014-12-09 10:36:40143 }
144 decoded_frames_++;
145 return 0;
146 }
Perb6b69982015-11-10 13:00:27147 int32_t Decoded(VideoFrame& decoded_image, int64_t decode_time_ms) override {
148 RTC_NOTREACHED();
149 return -1;
150 }
sakalbc6405b2017-02-09 12:53:45151 void Decoded(VideoFrame& decoded_image,
152 rtc::Optional<int32_t> decode_time_ms,
153 rtc::Optional<uint8_t> qp) override {
sakalfcb0e5d2017-02-20 14:43:58154 Decoded(decoded_image);
sakalbc6405b2017-02-09 12:53:45155 }
philipel20a98012015-12-21 11:04:49156 int DecodedFrames() { return decoded_frames_; }
pbos@webrtc.org866b22b2014-12-09 10:36:40157
158 private:
159 int decoded_frames_;
pbos@webrtc.org866b22b2014-12-09 10:36:40160};
161
pbos@webrtc.org866b22b2014-12-09 10:36:40162class TestVp8Simulcast : public ::testing::Test {
163 public:
nisse671d8052016-05-17 11:05:47164 static void SetPlane(uint8_t* data,
165 uint8_t value,
166 int width,
167 int height,
168 int stride) {
169 for (int i = 0; i < height; i++, data += stride) {
pbos@webrtc.org866b22b2014-12-09 10:36:40170 // Setting allocated area to zero - setting only image size to
171 // requested values - will make it easier to distinguish between image
172 // size and frame size (accounting for stride).
nisse671d8052016-05-17 11:05:47173 memset(data, value, width);
174 memset(data + width, 0, stride - width);
pbos@webrtc.org866b22b2014-12-09 10:36:40175 }
176 }
177
nisse652c67c2016-09-27 07:17:25178 // Fills in an I420Buffer from |plane_colors|.
179 static void CreateImage(const rtc::scoped_refptr<I420Buffer>& buffer,
nisse671d8052016-05-17 11:05:47180 int plane_colors[kNumOfPlanes]) {
Magnus Jedvertd0cf29a2017-06-10 17:03:37181 SetPlane(buffer->MutableDataY(), plane_colors[0], buffer->width(),
182 buffer->height(), buffer->StrideY());
nisse671d8052016-05-17 11:05:47183
Magnus Jedvertd0cf29a2017-06-10 17:03:37184 SetPlane(buffer->MutableDataU(), plane_colors[1], buffer->ChromaWidth(),
185 buffer->ChromaHeight(), buffer->StrideU());
nisse671d8052016-05-17 11:05:47186
Magnus Jedvertd0cf29a2017-06-10 17:03:37187 SetPlane(buffer->MutableDataV(), plane_colors[2], buffer->ChromaWidth(),
188 buffer->ChromaHeight(), buffer->StrideV());
nisse671d8052016-05-17 11:05:47189 }
190
pbos@webrtc.org866b22b2014-12-09 10:36:40191 static void DefaultSettings(VideoCodec* settings,
192 const int* temporal_layer_profile) {
Erik Språngfafcfc02016-11-16 15:41:30193 RTC_CHECK(settings);
pbos@webrtc.org866b22b2014-12-09 10:36:40194 memset(settings, 0, sizeof(VideoCodec));
195 strncpy(settings->plName, "VP8", 4);
196 settings->codecType = kVideoCodecVP8;
197 // 96 to 127 dynamic payload types for video codecs
198 settings->plType = 120;
199 settings->startBitrate = 300;
200 settings->minBitrate = 30;
201 settings->maxBitrate = 0;
202 settings->maxFramerate = 30;
203 settings->width = kDefaultWidth;
204 settings->height = kDefaultHeight;
205 settings->numberOfSimulcastStreams = kNumberOfSimulcastStreams;
206 ASSERT_EQ(3, kNumberOfSimulcastStreams);
ilnik39e693a2017-06-19 14:18:55207 settings->timing_frame_thresholds = {kDefaultTimingFramesDelayMs,
208 kDefaultOutlierFrameSizePercent};
philipel20a98012015-12-21 11:04:49209 ConfigureStream(kDefaultWidth / 4, kDefaultHeight / 4, kMaxBitrates[0],
210 kMinBitrates[0], kTargetBitrates[0],
211 &settings->simulcastStream[0], temporal_layer_profile[0]);
212 ConfigureStream(kDefaultWidth / 2, kDefaultHeight / 2, kMaxBitrates[1],
213 kMinBitrates[1], kTargetBitrates[1],
214 &settings->simulcastStream[1], temporal_layer_profile[1]);
215 ConfigureStream(kDefaultWidth, kDefaultHeight, kMaxBitrates[2],
216 kMinBitrates[2], kTargetBitrates[2],
217 &settings->simulcastStream[2], temporal_layer_profile[2]);
hta078cd6c2016-10-25 16:05:06218 settings->VP8()->resilience = kResilientStream;
219 settings->VP8()->denoisingOn = true;
220 settings->VP8()->errorConcealmentOn = false;
221 settings->VP8()->automaticResizeOn = false;
hta078cd6c2016-10-25 16:05:06222 settings->VP8()->frameDroppingOn = true;
223 settings->VP8()->keyFrameInterval = 3000;
pbos@webrtc.org866b22b2014-12-09 10:36:40224 }
225
226 static void ConfigureStream(int width,
227 int height,
228 int max_bitrate,
229 int min_bitrate,
230 int target_bitrate,
231 SimulcastStream* stream,
232 int num_temporal_layers) {
233 assert(stream);
234 stream->width = width;
235 stream->height = height;
236 stream->maxBitrate = max_bitrate;
237 stream->minBitrate = min_bitrate;
238 stream->targetBitrate = target_bitrate;
239 stream->numberOfTemporalLayers = num_temporal_layers;
240 stream->qpMax = 45;
241 }
242
243 protected:
magjeda5b6c522017-07-10 10:26:36244 virtual VP8Encoder* CreateEncoder() = 0;
245 virtual VP8Decoder* CreateDecoder() = 0;
246
247 void SetUp() override {
248 encoder_.reset(CreateEncoder());
249 decoder_.reset(CreateDecoder());
250 SetUpCodec(kDefaultTemporalLayerProfile);
251 }
pbos@webrtc.org866b22b2014-12-09 10:36:40252
Erik Språngfafcfc02016-11-16 15:41:30253 void TearDown() override {
254 encoder_->Release();
255 decoder_->Release();
magjeda5b6c522017-07-10 10:26:36256 encoder_.reset();
257 decoder_.reset();
Erik Språngfafcfc02016-11-16 15:41:30258 }
259
260 void SetUpCodec(const int* temporal_layer_profile) {
pbos@webrtc.org866b22b2014-12-09 10:36:40261 encoder_->RegisterEncodeCompleteCallback(&encoder_callback_);
262 decoder_->RegisterDecodeCompleteCallback(&decoder_callback_);
263 DefaultSettings(&settings_, temporal_layer_profile);
Erik Språngfafcfc02016-11-16 15:41:30264 SetUpRateAllocator();
pbos@webrtc.org866b22b2014-12-09 10:36:40265 EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200));
266 EXPECT_EQ(0, decoder_->InitDecode(&settings_, 1));
Magnus Jedvertd0cf29a2017-06-10 17:03:37267 input_buffer_ = I420Buffer::Create(kDefaultWidth, kDefaultHeight);
nisse652c67c2016-09-27 07:17:25268 input_buffer_->InitializeData();
269 input_frame_.reset(
270 new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0));
pbos@webrtc.org866b22b2014-12-09 10:36:40271 }
272
Erik Språngfafcfc02016-11-16 15:41:30273 void SetUpRateAllocator() {
274 TemporalLayersFactory* tl_factory = new TemporalLayersFactory();
275 rate_allocator_.reset(new SimulcastRateAllocator(
276 settings_, std::unique_ptr<TemporalLayersFactory>(tl_factory)));
277 settings_.VP8()->tl_factory = tl_factory;
278 }
279
280 void SetRates(uint32_t bitrate_kbps, uint32_t fps) {
281 encoder_->SetRateAllocation(
282 rate_allocator_->GetAllocation(bitrate_kbps * 1000, fps), fps);
pbos@webrtc.org866b22b2014-12-09 10:36:40283 }
284
pbosb63d8ad2015-10-19 09:39:06285 void ExpectStreams(FrameType frame_type, int expected_video_streams) {
pbos@webrtc.org866b22b2014-12-09 10:36:40286 ASSERT_GE(expected_video_streams, 0);
287 ASSERT_LE(expected_video_streams, kNumberOfSimulcastStreams);
288 if (expected_video_streams >= 1) {
philipel20a98012015-12-21 11:04:49289 EXPECT_CALL(
290 encoder_callback_,
Sergey Ulanov83a148b2016-08-03 00:46:41291 OnEncodedImage(
philipel20a98012015-12-21 11:04:49292 AllOf(Field(&EncodedImage::_frameType, frame_type),
293 Field(&EncodedImage::_encodedWidth, kDefaultWidth / 4),
294 Field(&EncodedImage::_encodedHeight, kDefaultHeight / 4)),
295 _, _))
pbos@webrtc.org866b22b2014-12-09 10:36:40296 .Times(1)
Sergey Ulanov83a148b2016-08-03 00:46:41297 .WillRepeatedly(Return(EncodedImageCallback::Result(
298 EncodedImageCallback::Result::OK, 0)));
pbos@webrtc.org866b22b2014-12-09 10:36:40299 }
300 if (expected_video_streams >= 2) {
philipel20a98012015-12-21 11:04:49301 EXPECT_CALL(
302 encoder_callback_,
Sergey Ulanov83a148b2016-08-03 00:46:41303 OnEncodedImage(
philipel20a98012015-12-21 11:04:49304 AllOf(Field(&EncodedImage::_frameType, frame_type),
305 Field(&EncodedImage::_encodedWidth, kDefaultWidth / 2),
306 Field(&EncodedImage::_encodedHeight, kDefaultHeight / 2)),
307 _, _))
pbos@webrtc.org866b22b2014-12-09 10:36:40308 .Times(1)
Sergey Ulanov83a148b2016-08-03 00:46:41309 .WillRepeatedly(Return(EncodedImageCallback::Result(
310 EncodedImageCallback::Result::OK, 0)));
pbos@webrtc.org866b22b2014-12-09 10:36:40311 }
312 if (expected_video_streams >= 3) {
philipel20a98012015-12-21 11:04:49313 EXPECT_CALL(
314 encoder_callback_,
Sergey Ulanov83a148b2016-08-03 00:46:41315 OnEncodedImage(
316 AllOf(Field(&EncodedImage::_frameType, frame_type),
317 Field(&EncodedImage::_encodedWidth, kDefaultWidth),
318 Field(&EncodedImage::_encodedHeight, kDefaultHeight)),
319 _, _))
pbos@webrtc.org866b22b2014-12-09 10:36:40320 .Times(1)
Sergey Ulanov83a148b2016-08-03 00:46:41321 .WillRepeatedly(Return(EncodedImageCallback::Result(
322 EncodedImageCallback::Result::OK, 0)));
pbos@webrtc.org866b22b2014-12-09 10:36:40323 }
pbos@webrtc.org866b22b2014-12-09 10:36:40324 }
325
326 void VerifyTemporalIdxAndSyncForAllSpatialLayers(
327 Vp8TestEncodedImageCallback* encoder_callback,
328 const int* expected_temporal_idx,
329 const bool* expected_layer_sync,
330 int num_spatial_layers) {
331 int picture_id = -1;
332 int temporal_layer = -1;
333 bool layer_sync = false;
334 for (int i = 0; i < num_spatial_layers; i++) {
335 encoder_callback->GetLastEncodedFrameInfo(&picture_id, &temporal_layer,
336 &layer_sync, i);
337 EXPECT_EQ(expected_temporal_idx[i], temporal_layer);
338 EXPECT_EQ(expected_layer_sync[i], layer_sync);
339 }
340 }
341
342 // We currently expect all active streams to generate a key frame even though
343 // a key frame was only requested for some of them.
344 void TestKeyFrameRequestsOnAllStreams() {
Erik Språngfafcfc02016-11-16 15:41:30345 SetRates(kMaxBitrates[2], 30); // To get all three streams.
Peter Boström9511e462015-10-23 13:58:18346 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
347 kVideoFrameDelta);
348 ExpectStreams(kVideoFrameKey, kNumberOfSimulcastStreams);
nisse652c67c2016-09-27 07:17:25349 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
pbos@webrtc.org866b22b2014-12-09 10:36:40350
Peter Boström9511e462015-10-23 13:58:18351 ExpectStreams(kVideoFrameDelta, kNumberOfSimulcastStreams);
nisse652c67c2016-09-27 07:17:25352 input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
353 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
pbos@webrtc.org866b22b2014-12-09 10:36:40354
Peter Boström9511e462015-10-23 13:58:18355 frame_types[0] = kVideoFrameKey;
356 ExpectStreams(kVideoFrameKey, kNumberOfSimulcastStreams);
nisse652c67c2016-09-27 07:17:25357 input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
358 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
pbos@webrtc.org866b22b2014-12-09 10:36:40359
Peter Boström9511e462015-10-23 13:58:18360 std::fill(frame_types.begin(), frame_types.end(), kVideoFrameDelta);
361 frame_types[1] = kVideoFrameKey;
362 ExpectStreams(kVideoFrameKey, kNumberOfSimulcastStreams);
nisse652c67c2016-09-27 07:17:25363 input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
364 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
pbos@webrtc.org866b22b2014-12-09 10:36:40365
Peter Boström9511e462015-10-23 13:58:18366 std::fill(frame_types.begin(), frame_types.end(), kVideoFrameDelta);
367 frame_types[2] = kVideoFrameKey;
368 ExpectStreams(kVideoFrameKey, kNumberOfSimulcastStreams);
nisse652c67c2016-09-27 07:17:25369 input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
370 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
pbos@webrtc.org866b22b2014-12-09 10:36:40371
Peter Boström9511e462015-10-23 13:58:18372 std::fill(frame_types.begin(), frame_types.end(), kVideoFrameDelta);
373 ExpectStreams(kVideoFrameDelta, kNumberOfSimulcastStreams);
nisse652c67c2016-09-27 07:17:25374 input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
375 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
pbos@webrtc.org866b22b2014-12-09 10:36:40376 }
377
378 void TestPaddingAllStreams() {
379 // We should always encode the base layer.
Erik Språngfafcfc02016-11-16 15:41:30380 SetRates(kMinBitrates[0] - 1, 30);
Peter Boström9511e462015-10-23 13:58:18381 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
382 kVideoFrameDelta);
383 ExpectStreams(kVideoFrameKey, 1);
nisse652c67c2016-09-27 07:17:25384 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
pbos@webrtc.org866b22b2014-12-09 10:36:40385
Peter Boström9511e462015-10-23 13:58:18386 ExpectStreams(kVideoFrameDelta, 1);
nisse652c67c2016-09-27 07:17:25387 input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
388 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
pbos@webrtc.org866b22b2014-12-09 10:36:40389 }
390
391 void TestPaddingTwoStreams() {
392 // We have just enough to get only the first stream and padding for two.
Erik Språngfafcfc02016-11-16 15:41:30393 SetRates(kMinBitrates[0], 30);
Peter Boström9511e462015-10-23 13:58:18394 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
395 kVideoFrameDelta);
396 ExpectStreams(kVideoFrameKey, 1);
nisse652c67c2016-09-27 07:17:25397 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
pbos@webrtc.org866b22b2014-12-09 10:36:40398
Peter Boström9511e462015-10-23 13:58:18399 ExpectStreams(kVideoFrameDelta, 1);
nisse652c67c2016-09-27 07:17:25400 input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
401 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
pbos@webrtc.org866b22b2014-12-09 10:36:40402 }
403
404 void TestPaddingTwoStreamsOneMaxedOut() {
405 // We are just below limit of sending second stream, so we should get
406 // the first stream maxed out (at |maxBitrate|), and padding for two.
Erik Språngfafcfc02016-11-16 15:41:30407 SetRates(kTargetBitrates[0] + kMinBitrates[1] - 1, 30);
Peter Boström9511e462015-10-23 13:58:18408 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
409 kVideoFrameDelta);
410 ExpectStreams(kVideoFrameKey, 1);
nisse652c67c2016-09-27 07:17:25411 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
pbos@webrtc.org866b22b2014-12-09 10:36:40412
Peter Boström9511e462015-10-23 13:58:18413 ExpectStreams(kVideoFrameDelta, 1);
nisse652c67c2016-09-27 07:17:25414 input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
415 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
pbos@webrtc.org866b22b2014-12-09 10:36:40416 }
417
418 void TestPaddingOneStream() {
419 // We have just enough to send two streams, so padding for one stream.
Erik Språngfafcfc02016-11-16 15:41:30420 SetRates(kTargetBitrates[0] + kMinBitrates[1], 30);
Peter Boström9511e462015-10-23 13:58:18421 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
422 kVideoFrameDelta);
423 ExpectStreams(kVideoFrameKey, 2);
nisse652c67c2016-09-27 07:17:25424 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
pbos@webrtc.org866b22b2014-12-09 10:36:40425
Peter Boström9511e462015-10-23 13:58:18426 ExpectStreams(kVideoFrameDelta, 2);
nisse652c67c2016-09-27 07:17:25427 input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
428 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
pbos@webrtc.org866b22b2014-12-09 10:36:40429 }
430
431 void TestPaddingOneStreamTwoMaxedOut() {
432 // We are just below limit of sending third stream, so we should get
433 // first stream's rate maxed out at |targetBitrate|, second at |maxBitrate|.
Erik Språngfafcfc02016-11-16 15:41:30434 SetRates(kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] - 1, 30);
Peter Boström9511e462015-10-23 13:58:18435 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
436 kVideoFrameDelta);
437 ExpectStreams(kVideoFrameKey, 2);
nisse652c67c2016-09-27 07:17:25438 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
pbos@webrtc.org866b22b2014-12-09 10:36:40439
Peter Boström9511e462015-10-23 13:58:18440 ExpectStreams(kVideoFrameDelta, 2);
nisse652c67c2016-09-27 07:17:25441 input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
442 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
pbos@webrtc.org866b22b2014-12-09 10:36:40443 }
444
445 void TestSendAllStreams() {
446 // We have just enough to send all streams.
Erik Språngfafcfc02016-11-16 15:41:30447 SetRates(kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2], 30);
Peter Boström9511e462015-10-23 13:58:18448 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
449 kVideoFrameDelta);
450 ExpectStreams(kVideoFrameKey, 3);
nisse652c67c2016-09-27 07:17:25451 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
pbos@webrtc.org866b22b2014-12-09 10:36:40452
Peter Boström9511e462015-10-23 13:58:18453 ExpectStreams(kVideoFrameDelta, 3);
nisse652c67c2016-09-27 07:17:25454 input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
455 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
pbos@webrtc.org866b22b2014-12-09 10:36:40456 }
457
458 void TestDisablingStreams() {
459 // We should get three media streams.
Erik Språngfafcfc02016-11-16 15:41:30460 SetRates(kMaxBitrates[0] + kMaxBitrates[1] + kMaxBitrates[2], 30);
Peter Boström9511e462015-10-23 13:58:18461 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
462 kVideoFrameDelta);
463 ExpectStreams(kVideoFrameKey, 3);
nisse652c67c2016-09-27 07:17:25464 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
pbos@webrtc.org866b22b2014-12-09 10:36:40465
Peter Boström9511e462015-10-23 13:58:18466 ExpectStreams(kVideoFrameDelta, 3);
nisse652c67c2016-09-27 07:17:25467 input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
468 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
pbos@webrtc.org866b22b2014-12-09 10:36:40469
470 // We should only get two streams and padding for one.
Erik Språngfafcfc02016-11-16 15:41:30471 SetRates(kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] / 2, 30);
Peter Boström9511e462015-10-23 13:58:18472 ExpectStreams(kVideoFrameDelta, 2);
nisse652c67c2016-09-27 07:17:25473 input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
474 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
pbos@webrtc.org866b22b2014-12-09 10:36:40475
476 // We should only get the first stream and padding for two.
Erik Språngfafcfc02016-11-16 15:41:30477 SetRates(kTargetBitrates[0] + kMinBitrates[1] / 2, 30);
Peter Boström9511e462015-10-23 13:58:18478 ExpectStreams(kVideoFrameDelta, 1);
nisse652c67c2016-09-27 07:17:25479 input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
480 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
pbos@webrtc.org866b22b2014-12-09 10:36:40481
482 // We don't have enough bitrate for the thumbnail stream, but we should get
483 // it anyway with current configuration.
Erik Språngfafcfc02016-11-16 15:41:30484 SetRates(kTargetBitrates[0] - 1, 30);
Peter Boström9511e462015-10-23 13:58:18485 ExpectStreams(kVideoFrameDelta, 1);
nisse652c67c2016-09-27 07:17:25486 input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
487 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
pbos@webrtc.org866b22b2014-12-09 10:36:40488
489 // We should only get two streams and padding for one.
Erik Språngfafcfc02016-11-16 15:41:30490 SetRates(kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] / 2, 30);
pbos@webrtc.org866b22b2014-12-09 10:36:40491 // We get a key frame because a new stream is being enabled.
Peter Boström9511e462015-10-23 13:58:18492 ExpectStreams(kVideoFrameKey, 2);
nisse652c67c2016-09-27 07:17:25493 input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
494 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
pbos@webrtc.org866b22b2014-12-09 10:36:40495
496 // We should get all three streams.
Erik Språngfafcfc02016-11-16 15:41:30497 SetRates(kTargetBitrates[0] + kTargetBitrates[1] + kTargetBitrates[2], 30);
pbos@webrtc.org866b22b2014-12-09 10:36:40498 // We get a key frame because a new stream is being enabled.
Peter Boström9511e462015-10-23 13:58:18499 ExpectStreams(kVideoFrameKey, 3);
nisse652c67c2016-09-27 07:17:25500 input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
501 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
pbos@webrtc.org866b22b2014-12-09 10:36:40502 }
503
504 void SwitchingToOneStream(int width, int height) {
505 // Disable all streams except the last and set the bitrate of the last to
506 // 100 kbps. This verifies the way GTP switches to screenshare mode.
hta078cd6c2016-10-25 16:05:06507 settings_.VP8()->numberOfTemporalLayers = 1;
pbos@webrtc.org866b22b2014-12-09 10:36:40508 settings_.maxBitrate = 100;
509 settings_.startBitrate = 100;
510 settings_.width = width;
511 settings_.height = height;
512 for (int i = 0; i < settings_.numberOfSimulcastStreams - 1; ++i) {
513 settings_.simulcastStream[i].maxBitrate = 0;
514 settings_.simulcastStream[i].width = settings_.width;
515 settings_.simulcastStream[i].height = settings_.height;
516 }
517 // Setting input image to new resolution.
Magnus Jedvertd0cf29a2017-06-10 17:03:37518 input_buffer_ = I420Buffer::Create(settings_.width, settings_.height);
nisse652c67c2016-09-27 07:17:25519 input_buffer_->InitializeData();
520
521 input_frame_.reset(
522 new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0));
pbos@webrtc.org866b22b2014-12-09 10:36:40523
524 // The for loop above did not set the bitrate of the highest layer.
philipel20a98012015-12-21 11:04:49525 settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1]
526 .maxBitrate = 0;
pbos@webrtc.org866b22b2014-12-09 10:36:40527 // The highest layer has to correspond to the non-simulcast resolution.
philipel20a98012015-12-21 11:04:49528 settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1].width =
529 settings_.width;
530 settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1].height =
531 settings_.height;
Erik Språngfafcfc02016-11-16 15:41:30532 SetUpRateAllocator();
pbos@webrtc.org866b22b2014-12-09 10:36:40533 EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200));
534
535 // Encode one frame and verify.
Erik Språngfafcfc02016-11-16 15:41:30536 SetRates(kMaxBitrates[0] + kMaxBitrates[1], 30);
Peter Boström9511e462015-10-23 13:58:18537 std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
538 kVideoFrameDelta);
Sergey Ulanov83a148b2016-08-03 00:46:41539 EXPECT_CALL(
540 encoder_callback_,
541 OnEncodedImage(AllOf(Field(&EncodedImage::_frameType, kVideoFrameKey),
542 Field(&EncodedImage::_encodedWidth, width),
543 Field(&EncodedImage::_encodedHeight, height)),
544 _, _))
pbos@webrtc.org866b22b2014-12-09 10:36:40545 .Times(1)
Sergey Ulanov83a148b2016-08-03 00:46:41546 .WillRepeatedly(Return(
547 EncodedImageCallback::Result(EncodedImageCallback::Result::OK, 0)));
nisse652c67c2016-09-27 07:17:25548 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
pbos@webrtc.org866b22b2014-12-09 10:36:40549
550 // Switch back.
551 DefaultSettings(&settings_, kDefaultTemporalLayerProfile);
552 // Start at the lowest bitrate for enabling base stream.
553 settings_.startBitrate = kMinBitrates[0];
Erik Språngfafcfc02016-11-16 15:41:30554 SetUpRateAllocator();
pbos@webrtc.org866b22b2014-12-09 10:36:40555 EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200));
Erik Språngfafcfc02016-11-16 15:41:30556 SetRates(settings_.startBitrate, 30);
Peter Boström9511e462015-10-23 13:58:18557 ExpectStreams(kVideoFrameKey, 1);
magjed@webrtc.org9eee22e2015-03-17 12:27:26558 // Resize |input_frame_| to the new resolution.
Magnus Jedvertd0cf29a2017-06-10 17:03:37559 input_buffer_ = I420Buffer::Create(settings_.width, settings_.height);
nisse652c67c2016-09-27 07:17:25560 input_buffer_->InitializeData();
561 input_frame_.reset(
562 new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0));
563 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
pbos@webrtc.org866b22b2014-12-09 10:36:40564 }
565
philipel20a98012015-12-21 11:04:49566 void TestSwitchingToOneStream() { SwitchingToOneStream(1024, 768); }
pbos@webrtc.org866b22b2014-12-09 10:36:40567
philipel20a98012015-12-21 11:04:49568 void TestSwitchingToOneOddStream() { SwitchingToOneStream(1023, 769); }
pbos@webrtc.org866b22b2014-12-09 10:36:40569
asapersson487ee592016-01-26 09:56:32570 void TestSwitchingToOneSmallStream() { SwitchingToOneStream(4, 4); }
571
pbos@webrtc.org866b22b2014-12-09 10:36:40572 // Test the layer pattern and sync flag for various spatial-temporal patterns.
573 // 3-3-3 pattern: 3 temporal layers for all spatial streams, so same
574 // temporal_layer id and layer_sync is expected for all streams.
575 void TestSaptioTemporalLayers333PatternEncoder() {
576 Vp8TestEncodedImageCallback encoder_callback;
577 encoder_->RegisterEncodeCompleteCallback(&encoder_callback);
Erik Språngfafcfc02016-11-16 15:41:30578 SetRates(kMaxBitrates[2], 30); // To get all three streams.
pbos@webrtc.org866b22b2014-12-09 10:36:40579
philipel20a98012015-12-21 11:04:49580 int expected_temporal_idx[3] = {-1, -1, -1};
pbos@webrtc.org866b22b2014-12-09 10:36:40581 bool expected_layer_sync[3] = {false, false, false};
582
583 // First frame: #0.
nisse652c67c2016-09-27 07:17:25584 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
pbos@webrtc.org866b22b2014-12-09 10:36:40585 SetExpectedValues3<int>(0, 0, 0, expected_temporal_idx);
586 SetExpectedValues3<bool>(true, true, true, expected_layer_sync);
philipel20a98012015-12-21 11:04:49587 VerifyTemporalIdxAndSyncForAllSpatialLayers(
588 &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
pbos@webrtc.org866b22b2014-12-09 10:36:40589
590 // Next frame: #1.
nisse652c67c2016-09-27 07:17:25591 input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
592 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
pbos@webrtc.org866b22b2014-12-09 10:36:40593 SetExpectedValues3<int>(2, 2, 2, expected_temporal_idx);
594 SetExpectedValues3<bool>(true, true, true, expected_layer_sync);
philipel20a98012015-12-21 11:04:49595 VerifyTemporalIdxAndSyncForAllSpatialLayers(
596 &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
pbos@webrtc.org866b22b2014-12-09 10:36:40597
598 // Next frame: #2.
nisse652c67c2016-09-27 07:17:25599 input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
600 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
pbos@webrtc.org866b22b2014-12-09 10:36:40601 SetExpectedValues3<int>(1, 1, 1, expected_temporal_idx);
602 SetExpectedValues3<bool>(true, true, true, expected_layer_sync);
philipel20a98012015-12-21 11:04:49603 VerifyTemporalIdxAndSyncForAllSpatialLayers(
604 &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
pbos@webrtc.org866b22b2014-12-09 10:36:40605
606 // Next frame: #3.
nisse652c67c2016-09-27 07:17:25607 input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
608 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
pbos@webrtc.org866b22b2014-12-09 10:36:40609 SetExpectedValues3<int>(2, 2, 2, expected_temporal_idx);
610 SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
philipel20a98012015-12-21 11:04:49611 VerifyTemporalIdxAndSyncForAllSpatialLayers(
612 &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
pbos@webrtc.org866b22b2014-12-09 10:36:40613
614 // Next frame: #4.
nisse652c67c2016-09-27 07:17:25615 input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
616 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
pbos@webrtc.org866b22b2014-12-09 10:36:40617 SetExpectedValues3<int>(0, 0, 0, expected_temporal_idx);
618 SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
philipel20a98012015-12-21 11:04:49619 VerifyTemporalIdxAndSyncForAllSpatialLayers(
620 &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
pbos@webrtc.org866b22b2014-12-09 10:36:40621
622 // Next frame: #5.
nisse652c67c2016-09-27 07:17:25623 input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
624 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
pbos@webrtc.org866b22b2014-12-09 10:36:40625 SetExpectedValues3<int>(2, 2, 2, expected_temporal_idx);
626 SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
philipel20a98012015-12-21 11:04:49627 VerifyTemporalIdxAndSyncForAllSpatialLayers(
628 &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
pbos@webrtc.org866b22b2014-12-09 10:36:40629 }
630
631 // Test the layer pattern and sync flag for various spatial-temporal patterns.
632 // 3-2-1 pattern: 3 temporal layers for lowest resolution, 2 for middle, and
633 // 1 temporal layer for highest resolution.
634 // For this profile, we expect the temporal index pattern to be:
635 // 1st stream: 0, 2, 1, 2, ....
636 // 2nd stream: 0, 1, 0, 1, ...
637 // 3rd stream: -1, -1, -1, -1, ....
638 // Regarding the 3rd stream, note that a stream/encoder with 1 temporal layer
639 // should always have temporal layer idx set to kNoTemporalIdx = -1.
Peter Boström07e22e62015-10-07 10:23:21640 // Since CodecSpecificInfoVP8.temporalIdx is uint8_t, this will wrap to 255.
pbos@webrtc.org866b22b2014-12-09 10:36:40641 // TODO(marpan): Although this seems safe for now, we should fix this.
642 void TestSpatioTemporalLayers321PatternEncoder() {
643 int temporal_layer_profile[3] = {3, 2, 1};
644 SetUpCodec(temporal_layer_profile);
645 Vp8TestEncodedImageCallback encoder_callback;
646 encoder_->RegisterEncodeCompleteCallback(&encoder_callback);
Erik Språngfafcfc02016-11-16 15:41:30647 SetRates(kMaxBitrates[2], 30); // To get all three streams.
pbos@webrtc.org866b22b2014-12-09 10:36:40648
philipel20a98012015-12-21 11:04:49649 int expected_temporal_idx[3] = {-1, -1, -1};
pbos@webrtc.org866b22b2014-12-09 10:36:40650 bool expected_layer_sync[3] = {false, false, false};
651
652 // First frame: #0.
nisse652c67c2016-09-27 07:17:25653 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
pbos@webrtc.org866b22b2014-12-09 10:36:40654 SetExpectedValues3<int>(0, 0, 255, expected_temporal_idx);
655 SetExpectedValues3<bool>(true, true, false, expected_layer_sync);
philipel20a98012015-12-21 11:04:49656 VerifyTemporalIdxAndSyncForAllSpatialLayers(
657 &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
pbos@webrtc.org866b22b2014-12-09 10:36:40658
659 // Next frame: #1.
nisse652c67c2016-09-27 07:17:25660 input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
661 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
pbos@webrtc.org866b22b2014-12-09 10:36:40662 SetExpectedValues3<int>(2, 1, 255, expected_temporal_idx);
663 SetExpectedValues3<bool>(true, true, false, expected_layer_sync);
philipel20a98012015-12-21 11:04:49664 VerifyTemporalIdxAndSyncForAllSpatialLayers(
665 &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
pbos@webrtc.org866b22b2014-12-09 10:36:40666
667 // Next frame: #2.
nisse652c67c2016-09-27 07:17:25668 input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
669 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
pbos@webrtc.org866b22b2014-12-09 10:36:40670 SetExpectedValues3<int>(1, 0, 255, expected_temporal_idx);
671 SetExpectedValues3<bool>(true, false, false, expected_layer_sync);
philipel20a98012015-12-21 11:04:49672 VerifyTemporalIdxAndSyncForAllSpatialLayers(
673 &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
pbos@webrtc.org866b22b2014-12-09 10:36:40674
675 // Next frame: #3.
nisse652c67c2016-09-27 07:17:25676 input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
677 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
pbos@webrtc.org866b22b2014-12-09 10:36:40678 SetExpectedValues3<int>(2, 1, 255, expected_temporal_idx);
679 SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
philipel20a98012015-12-21 11:04:49680 VerifyTemporalIdxAndSyncForAllSpatialLayers(
681 &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
pbos@webrtc.org866b22b2014-12-09 10:36:40682
683 // Next frame: #4.
nisse652c67c2016-09-27 07:17:25684 input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
685 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
pbos@webrtc.org866b22b2014-12-09 10:36:40686 SetExpectedValues3<int>(0, 0, 255, expected_temporal_idx);
687 SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
philipel20a98012015-12-21 11:04:49688 VerifyTemporalIdxAndSyncForAllSpatialLayers(
689 &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
pbos@webrtc.org866b22b2014-12-09 10:36:40690
691 // Next frame: #5.
nisse652c67c2016-09-27 07:17:25692 input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
693 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
pbos@webrtc.org866b22b2014-12-09 10:36:40694 SetExpectedValues3<int>(2, 1, 255, expected_temporal_idx);
695 SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
philipel20a98012015-12-21 11:04:49696 VerifyTemporalIdxAndSyncForAllSpatialLayers(
697 &encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
pbos@webrtc.org866b22b2014-12-09 10:36:40698 }
699
700 void TestStrideEncodeDecode() {
701 Vp8TestEncodedImageCallback encoder_callback;
702 Vp8TestDecodedImageCallback decoder_callback;
703 encoder_->RegisterEncodeCompleteCallback(&encoder_callback);
704 decoder_->RegisterDecodeCompleteCallback(&decoder_callback);
705
Erik Språngfafcfc02016-11-16 15:41:30706 SetRates(kMaxBitrates[2], 30); // To get all three streams.
pbos@webrtc.org866b22b2014-12-09 10:36:40707 // Setting two (possibly) problematic use cases for stride:
708 // 1. stride > width 2. stride_y != stride_uv/2
709 int stride_y = kDefaultWidth + 20;
710 int stride_uv = ((kDefaultWidth + 1) / 2) + 5;
nisse652c67c2016-09-27 07:17:25711 input_buffer_ = I420Buffer::Create(kDefaultWidth, kDefaultHeight, stride_y,
712 stride_uv, stride_uv);
713 input_frame_.reset(
714 new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0));
715
pbos@webrtc.org866b22b2014-12-09 10:36:40716 // Set color.
717 int plane_offset[kNumOfPlanes];
718 plane_offset[kYPlane] = kColorY;
719 plane_offset[kUPlane] = kColorU;
720 plane_offset[kVPlane] = kColorV;
nisse652c67c2016-09-27 07:17:25721 CreateImage(input_buffer_, plane_offset);
pbos@webrtc.org866b22b2014-12-09 10:36:40722
nisse652c67c2016-09-27 07:17:25723 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
pbos@webrtc.org866b22b2014-12-09 10:36:40724
725 // Change color.
726 plane_offset[kYPlane] += 1;
727 plane_offset[kUPlane] += 1;
728 plane_offset[kVPlane] += 1;
nisse652c67c2016-09-27 07:17:25729 CreateImage(input_buffer_, plane_offset);
730 input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
731 EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
pbos@webrtc.org866b22b2014-12-09 10:36:40732
733 EncodedImage encoded_frame;
734 // Only encoding one frame - so will be a key frame.
735 encoder_callback.GetLastEncodedKeyFrame(&encoded_frame);
736 EXPECT_EQ(0, decoder_->Decode(encoded_frame, false, NULL));
737 encoder_callback.GetLastEncodedFrame(&encoded_frame);
738 decoder_->Decode(encoded_frame, false, NULL);
739 EXPECT_EQ(2, decoder_callback.DecodedFrames());
740 }
741
kwiberg7c6513a2016-02-29 13:51:59742 std::unique_ptr<VP8Encoder> encoder_;
pbos@webrtc.org866b22b2014-12-09 10:36:40743 MockEncodedImageCallback encoder_callback_;
kwiberg7c6513a2016-02-29 13:51:59744 std::unique_ptr<VP8Decoder> decoder_;
pbos@webrtc.org866b22b2014-12-09 10:36:40745 MockDecodedImageCallback decoder_callback_;
746 VideoCodec settings_;
nisse652c67c2016-09-27 07:17:25747 rtc::scoped_refptr<I420Buffer> input_buffer_;
748 std::unique_ptr<VideoFrame> input_frame_;
Erik Språngfafcfc02016-11-16 15:41:30749 std::unique_ptr<SimulcastRateAllocator> rate_allocator_;
pbos@webrtc.org866b22b2014-12-09 10:36:40750};
751
752} // namespace testing
753} // namespace webrtc
754
magjeda5b6c522017-07-10 10:26:36755#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_TEST_UTILITY_H_