pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license |
| 5 | * that can be found in the LICENSE file in the root of the source |
| 6 | * tree. An additional intellectual property rights grant can be found |
| 7 | * in the file PATENTS. All contributing project authors may |
| 8 | * be found in the AUTHORS file in the root of the source tree. |
| 9 | */ |
| 10 | |
magjed | a5b6c52 | 2017-07-10 10:26:36 | [diff] [blame] | 11 | #ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_TEST_UTILITY_H_ |
| 12 | #define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_TEST_UTILITY_H_ |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 13 | |
| 14 | #include <algorithm> |
Erik Språng | fafcfc0 | 2016-11-16 15:41:30 | [diff] [blame] | 15 | #include <map> |
kwiberg | 7c6513a | 2016-02-29 13:51:59 | [diff] [blame] | 16 | #include <memory> |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 17 | #include <vector> |
| 18 | |
nisse | f0daa06 | 2017-01-10 15:44:26 | [diff] [blame] | 19 | #include "webrtc/api/video/i420_buffer.h" |
| 20 | #include "webrtc/api/video/video_frame.h" |
nisse | a24bcce | 2017-05-15 09:42:11 | [diff] [blame] | 21 | #include "webrtc/common_video/include/video_frame.h" |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 22 | #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 23 | #include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h" |
kjellander | 05bd7c9 | 2017-03-08 13:42:26 | [diff] [blame] | 24 | #include "webrtc/modules/video_coding/codecs/vp8/simulcast_rate_allocator.h" |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 25 | #include "webrtc/modules/video_coding/codecs/vp8/temporal_layers.h" |
kwiberg | 36a2479 | 2016-10-01 05:29:43 | [diff] [blame] | 26 | #include "webrtc/modules/video_coding/include/mock/mock_video_codec_interface.h" |
ilnik | 39e693a | 2017-06-19 14:18:55 | [diff] [blame] | 27 | #include "webrtc/modules/video_coding/include/video_coding_defines.h" |
Edward Lemur | 76de83e | 2017-07-06 17:44:34 | [diff] [blame] | 28 | #include "webrtc/rtc_base/checks.h" |
kwiberg | 36a2479 | 2016-10-01 05:29:43 | [diff] [blame] | 29 | #include "webrtc/test/gtest.h" |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 30 | |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 31 | using ::testing::_; |
| 32 | using ::testing::AllOf; |
| 33 | using ::testing::Field; |
| 34 | using ::testing::Return; |
| 35 | |
| 36 | namespace webrtc { |
| 37 | namespace testing { |
| 38 | |
| 39 | const int kDefaultWidth = 1280; |
| 40 | const int kDefaultHeight = 720; |
| 41 | const int kNumberOfSimulcastStreams = 3; |
| 42 | const int kColorY = 66; |
| 43 | const int kColorU = 22; |
| 44 | const int kColorV = 33; |
| 45 | const int kMaxBitrates[kNumberOfSimulcastStreams] = {150, 600, 1200}; |
| 46 | const int kMinBitrates[kNumberOfSimulcastStreams] = {50, 150, 600}; |
| 47 | const int kTargetBitrates[kNumberOfSimulcastStreams] = {100, 450, 1000}; |
| 48 | const int kDefaultTemporalLayerProfile[3] = {3, 3, 3}; |
| 49 | |
philipel | 20a9801 | 2015-12-21 11:04:49 | [diff] [blame] | 50 | template <typename T> |
| 51 | void SetExpectedValues3(T value0, T value1, T value2, T* expected_values) { |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 52 | expected_values[0] = value0; |
| 53 | expected_values[1] = value1; |
| 54 | expected_values[2] = value2; |
| 55 | } |
| 56 | |
nisse | 59505d3 | 2016-09-28 10:14:07 | [diff] [blame] | 57 | enum PlaneType { |
| 58 | kYPlane = 0, |
| 59 | kUPlane = 1, |
| 60 | kVPlane = 2, |
| 61 | kNumOfPlanes = 3, |
| 62 | }; |
| 63 | |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 64 | class Vp8TestEncodedImageCallback : public EncodedImageCallback { |
| 65 | public: |
philipel | 20a9801 | 2015-12-21 11:04:49 | [diff] [blame] | 66 | Vp8TestEncodedImageCallback() : picture_id_(-1) { |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 67 | memset(temporal_layer_, -1, sizeof(temporal_layer_)); |
| 68 | memset(layer_sync_, false, sizeof(layer_sync_)); |
| 69 | } |
| 70 | |
| 71 | ~Vp8TestEncodedImageCallback() { |
philipel | 20a9801 | 2015-12-21 11:04:49 | [diff] [blame] | 72 | delete[] encoded_key_frame_._buffer; |
| 73 | delete[] encoded_frame_._buffer; |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 74 | } |
| 75 | |
Sergey Ulanov | 83a148b | 2016-08-03 00:46:41 | [diff] [blame] | 76 | virtual Result OnEncodedImage(const EncodedImage& encoded_image, |
| 77 | const CodecSpecificInfo* codec_specific_info, |
| 78 | const RTPFragmentationHeader* fragmentation) { |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 79 | // Only store the base layer. |
| 80 | if (codec_specific_info->codecSpecific.VP8.simulcastIdx == 0) { |
Peter Boström | 9511e46 | 2015-10-23 13:58:18 | [diff] [blame] | 81 | if (encoded_image._frameType == kVideoFrameKey) { |
philipel | 20a9801 | 2015-12-21 11:04:49 | [diff] [blame] | 82 | delete[] encoded_key_frame_._buffer; |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 83 | encoded_key_frame_._buffer = new uint8_t[encoded_image._size]; |
| 84 | encoded_key_frame_._size = encoded_image._size; |
| 85 | encoded_key_frame_._length = encoded_image._length; |
Peter Boström | 9511e46 | 2015-10-23 13:58:18 | [diff] [blame] | 86 | encoded_key_frame_._frameType = kVideoFrameKey; |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 87 | encoded_key_frame_._completeFrame = encoded_image._completeFrame; |
philipel | 20a9801 | 2015-12-21 11:04:49 | [diff] [blame] | 88 | memcpy(encoded_key_frame_._buffer, encoded_image._buffer, |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 89 | encoded_image._length); |
| 90 | } else { |
philipel | 20a9801 | 2015-12-21 11:04:49 | [diff] [blame] | 91 | delete[] encoded_frame_._buffer; |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 92 | encoded_frame_._buffer = new uint8_t[encoded_image._size]; |
| 93 | encoded_frame_._size = encoded_image._size; |
| 94 | encoded_frame_._length = encoded_image._length; |
philipel | 20a9801 | 2015-12-21 11:04:49 | [diff] [blame] | 95 | memcpy(encoded_frame_._buffer, encoded_image._buffer, |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 96 | encoded_image._length); |
| 97 | } |
| 98 | } |
| 99 | picture_id_ = codec_specific_info->codecSpecific.VP8.pictureId; |
| 100 | layer_sync_[codec_specific_info->codecSpecific.VP8.simulcastIdx] = |
| 101 | codec_specific_info->codecSpecific.VP8.layerSync; |
| 102 | temporal_layer_[codec_specific_info->codecSpecific.VP8.simulcastIdx] = |
| 103 | codec_specific_info->codecSpecific.VP8.temporalIdx; |
Sergey Ulanov | 83a148b | 2016-08-03 00:46:41 | [diff] [blame] | 104 | return Result(Result::OK, encoded_image._timeStamp); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 105 | } |
philipel | 20a9801 | 2015-12-21 11:04:49 | [diff] [blame] | 106 | void GetLastEncodedFrameInfo(int* picture_id, |
| 107 | int* temporal_layer, |
| 108 | bool* layer_sync, |
| 109 | int stream) { |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 110 | *picture_id = picture_id_; |
| 111 | *temporal_layer = temporal_layer_[stream]; |
| 112 | *layer_sync = layer_sync_[stream]; |
| 113 | } |
| 114 | void GetLastEncodedKeyFrame(EncodedImage* encoded_key_frame) { |
| 115 | *encoded_key_frame = encoded_key_frame_; |
| 116 | } |
| 117 | void GetLastEncodedFrame(EncodedImage* encoded_frame) { |
| 118 | *encoded_frame = encoded_frame_; |
| 119 | } |
| 120 | |
| 121 | private: |
| 122 | EncodedImage encoded_key_frame_; |
| 123 | EncodedImage encoded_frame_; |
| 124 | int picture_id_; |
| 125 | int temporal_layer_[kNumberOfSimulcastStreams]; |
| 126 | bool layer_sync_[kNumberOfSimulcastStreams]; |
| 127 | }; |
| 128 | |
| 129 | class Vp8TestDecodedImageCallback : public DecodedImageCallback { |
| 130 | public: |
philipel | 20a9801 | 2015-12-21 11:04:49 | [diff] [blame] | 131 | Vp8TestDecodedImageCallback() : decoded_frames_(0) {} |
Per | b6b6998 | 2015-11-10 13:00:27 | [diff] [blame] | 132 | int32_t Decoded(VideoFrame& decoded_image) override { |
Magnus Jedvert | d0cf29a | 2017-06-10 17:03:37 | [diff] [blame] | 133 | rtc::scoped_refptr<I420BufferInterface> i420_buffer = |
| 134 | decoded_image.video_frame_buffer()->ToI420(); |
magjed@webrtc.org | 32ec590 | 2015-03-16 13:46:52 | [diff] [blame] | 135 | for (int i = 0; i < decoded_image.width(); ++i) { |
Magnus Jedvert | d0cf29a | 2017-06-10 17:03:37 | [diff] [blame] | 136 | EXPECT_NEAR(kColorY, i420_buffer->DataY()[i], 1); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 137 | } |
| 138 | |
| 139 | // TODO(mikhal): Verify the difference between U,V and the original. |
Magnus Jedvert | d0cf29a | 2017-06-10 17:03:37 | [diff] [blame] | 140 | for (int i = 0; i < i420_buffer->ChromaWidth(); ++i) { |
| 141 | EXPECT_NEAR(kColorU, i420_buffer->DataU()[i], 4); |
| 142 | EXPECT_NEAR(kColorV, i420_buffer->DataV()[i], 4); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 143 | } |
| 144 | decoded_frames_++; |
| 145 | return 0; |
| 146 | } |
Per | b6b6998 | 2015-11-10 13:00:27 | [diff] [blame] | 147 | int32_t Decoded(VideoFrame& decoded_image, int64_t decode_time_ms) override { |
| 148 | RTC_NOTREACHED(); |
| 149 | return -1; |
| 150 | } |
sakal | bc6405b | 2017-02-09 12:53:45 | [diff] [blame] | 151 | void Decoded(VideoFrame& decoded_image, |
| 152 | rtc::Optional<int32_t> decode_time_ms, |
| 153 | rtc::Optional<uint8_t> qp) override { |
sakal | fcb0e5d | 2017-02-20 14:43:58 | [diff] [blame] | 154 | Decoded(decoded_image); |
sakal | bc6405b | 2017-02-09 12:53:45 | [diff] [blame] | 155 | } |
philipel | 20a9801 | 2015-12-21 11:04:49 | [diff] [blame] | 156 | int DecodedFrames() { return decoded_frames_; } |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 157 | |
| 158 | private: |
| 159 | int decoded_frames_; |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 160 | }; |
| 161 | |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 162 | class TestVp8Simulcast : public ::testing::Test { |
| 163 | public: |
nisse | 671d805 | 2016-05-17 11:05:47 | [diff] [blame] | 164 | static void SetPlane(uint8_t* data, |
| 165 | uint8_t value, |
| 166 | int width, |
| 167 | int height, |
| 168 | int stride) { |
| 169 | for (int i = 0; i < height; i++, data += stride) { |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 170 | // Setting allocated area to zero - setting only image size to |
| 171 | // requested values - will make it easier to distinguish between image |
| 172 | // size and frame size (accounting for stride). |
nisse | 671d805 | 2016-05-17 11:05:47 | [diff] [blame] | 173 | memset(data, value, width); |
| 174 | memset(data + width, 0, stride - width); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 175 | } |
| 176 | } |
| 177 | |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 178 | // Fills in an I420Buffer from |plane_colors|. |
| 179 | static void CreateImage(const rtc::scoped_refptr<I420Buffer>& buffer, |
nisse | 671d805 | 2016-05-17 11:05:47 | [diff] [blame] | 180 | int plane_colors[kNumOfPlanes]) { |
Magnus Jedvert | d0cf29a | 2017-06-10 17:03:37 | [diff] [blame] | 181 | SetPlane(buffer->MutableDataY(), plane_colors[0], buffer->width(), |
| 182 | buffer->height(), buffer->StrideY()); |
nisse | 671d805 | 2016-05-17 11:05:47 | [diff] [blame] | 183 | |
Magnus Jedvert | d0cf29a | 2017-06-10 17:03:37 | [diff] [blame] | 184 | SetPlane(buffer->MutableDataU(), plane_colors[1], buffer->ChromaWidth(), |
| 185 | buffer->ChromaHeight(), buffer->StrideU()); |
nisse | 671d805 | 2016-05-17 11:05:47 | [diff] [blame] | 186 | |
Magnus Jedvert | d0cf29a | 2017-06-10 17:03:37 | [diff] [blame] | 187 | SetPlane(buffer->MutableDataV(), plane_colors[2], buffer->ChromaWidth(), |
| 188 | buffer->ChromaHeight(), buffer->StrideV()); |
nisse | 671d805 | 2016-05-17 11:05:47 | [diff] [blame] | 189 | } |
| 190 | |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 191 | static void DefaultSettings(VideoCodec* settings, |
| 192 | const int* temporal_layer_profile) { |
Erik Språng | fafcfc0 | 2016-11-16 15:41:30 | [diff] [blame] | 193 | RTC_CHECK(settings); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 194 | memset(settings, 0, sizeof(VideoCodec)); |
| 195 | strncpy(settings->plName, "VP8", 4); |
| 196 | settings->codecType = kVideoCodecVP8; |
| 197 | // 96 to 127 dynamic payload types for video codecs |
| 198 | settings->plType = 120; |
| 199 | settings->startBitrate = 300; |
| 200 | settings->minBitrate = 30; |
| 201 | settings->maxBitrate = 0; |
| 202 | settings->maxFramerate = 30; |
| 203 | settings->width = kDefaultWidth; |
| 204 | settings->height = kDefaultHeight; |
| 205 | settings->numberOfSimulcastStreams = kNumberOfSimulcastStreams; |
| 206 | ASSERT_EQ(3, kNumberOfSimulcastStreams); |
ilnik | 39e693a | 2017-06-19 14:18:55 | [diff] [blame] | 207 | settings->timing_frame_thresholds = {kDefaultTimingFramesDelayMs, |
| 208 | kDefaultOutlierFrameSizePercent}; |
philipel | 20a9801 | 2015-12-21 11:04:49 | [diff] [blame] | 209 | ConfigureStream(kDefaultWidth / 4, kDefaultHeight / 4, kMaxBitrates[0], |
| 210 | kMinBitrates[0], kTargetBitrates[0], |
| 211 | &settings->simulcastStream[0], temporal_layer_profile[0]); |
| 212 | ConfigureStream(kDefaultWidth / 2, kDefaultHeight / 2, kMaxBitrates[1], |
| 213 | kMinBitrates[1], kTargetBitrates[1], |
| 214 | &settings->simulcastStream[1], temporal_layer_profile[1]); |
| 215 | ConfigureStream(kDefaultWidth, kDefaultHeight, kMaxBitrates[2], |
| 216 | kMinBitrates[2], kTargetBitrates[2], |
| 217 | &settings->simulcastStream[2], temporal_layer_profile[2]); |
hta | 078cd6c | 2016-10-25 16:05:06 | [diff] [blame] | 218 | settings->VP8()->resilience = kResilientStream; |
| 219 | settings->VP8()->denoisingOn = true; |
| 220 | settings->VP8()->errorConcealmentOn = false; |
| 221 | settings->VP8()->automaticResizeOn = false; |
hta | 078cd6c | 2016-10-25 16:05:06 | [diff] [blame] | 222 | settings->VP8()->frameDroppingOn = true; |
| 223 | settings->VP8()->keyFrameInterval = 3000; |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 224 | } |
| 225 | |
| 226 | static void ConfigureStream(int width, |
| 227 | int height, |
| 228 | int max_bitrate, |
| 229 | int min_bitrate, |
| 230 | int target_bitrate, |
| 231 | SimulcastStream* stream, |
| 232 | int num_temporal_layers) { |
| 233 | assert(stream); |
| 234 | stream->width = width; |
| 235 | stream->height = height; |
| 236 | stream->maxBitrate = max_bitrate; |
| 237 | stream->minBitrate = min_bitrate; |
| 238 | stream->targetBitrate = target_bitrate; |
| 239 | stream->numberOfTemporalLayers = num_temporal_layers; |
| 240 | stream->qpMax = 45; |
| 241 | } |
| 242 | |
| 243 | protected: |
magjed | a5b6c52 | 2017-07-10 10:26:36 | [diff] [blame] | 244 | virtual VP8Encoder* CreateEncoder() = 0; |
| 245 | virtual VP8Decoder* CreateDecoder() = 0; |
| 246 | |
| 247 | void SetUp() override { |
| 248 | encoder_.reset(CreateEncoder()); |
| 249 | decoder_.reset(CreateDecoder()); |
| 250 | SetUpCodec(kDefaultTemporalLayerProfile); |
| 251 | } |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 252 | |
Erik Språng | fafcfc0 | 2016-11-16 15:41:30 | [diff] [blame] | 253 | void TearDown() override { |
| 254 | encoder_->Release(); |
| 255 | decoder_->Release(); |
magjed | a5b6c52 | 2017-07-10 10:26:36 | [diff] [blame] | 256 | encoder_.reset(); |
| 257 | decoder_.reset(); |
Erik Språng | fafcfc0 | 2016-11-16 15:41:30 | [diff] [blame] | 258 | } |
| 259 | |
| 260 | void SetUpCodec(const int* temporal_layer_profile) { |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 261 | encoder_->RegisterEncodeCompleteCallback(&encoder_callback_); |
| 262 | decoder_->RegisterDecodeCompleteCallback(&decoder_callback_); |
| 263 | DefaultSettings(&settings_, temporal_layer_profile); |
Erik Språng | fafcfc0 | 2016-11-16 15:41:30 | [diff] [blame] | 264 | SetUpRateAllocator(); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 265 | EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200)); |
| 266 | EXPECT_EQ(0, decoder_->InitDecode(&settings_, 1)); |
Magnus Jedvert | d0cf29a | 2017-06-10 17:03:37 | [diff] [blame] | 267 | input_buffer_ = I420Buffer::Create(kDefaultWidth, kDefaultHeight); |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 268 | input_buffer_->InitializeData(); |
| 269 | input_frame_.reset( |
| 270 | new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 271 | } |
| 272 | |
Erik Språng | fafcfc0 | 2016-11-16 15:41:30 | [diff] [blame] | 273 | void SetUpRateAllocator() { |
| 274 | TemporalLayersFactory* tl_factory = new TemporalLayersFactory(); |
| 275 | rate_allocator_.reset(new SimulcastRateAllocator( |
| 276 | settings_, std::unique_ptr<TemporalLayersFactory>(tl_factory))); |
| 277 | settings_.VP8()->tl_factory = tl_factory; |
| 278 | } |
| 279 | |
| 280 | void SetRates(uint32_t bitrate_kbps, uint32_t fps) { |
| 281 | encoder_->SetRateAllocation( |
| 282 | rate_allocator_->GetAllocation(bitrate_kbps * 1000, fps), fps); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 283 | } |
| 284 | |
pbos | b63d8ad | 2015-10-19 09:39:06 | [diff] [blame] | 285 | void ExpectStreams(FrameType frame_type, int expected_video_streams) { |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 286 | ASSERT_GE(expected_video_streams, 0); |
| 287 | ASSERT_LE(expected_video_streams, kNumberOfSimulcastStreams); |
| 288 | if (expected_video_streams >= 1) { |
philipel | 20a9801 | 2015-12-21 11:04:49 | [diff] [blame] | 289 | EXPECT_CALL( |
| 290 | encoder_callback_, |
Sergey Ulanov | 83a148b | 2016-08-03 00:46:41 | [diff] [blame] | 291 | OnEncodedImage( |
philipel | 20a9801 | 2015-12-21 11:04:49 | [diff] [blame] | 292 | AllOf(Field(&EncodedImage::_frameType, frame_type), |
| 293 | Field(&EncodedImage::_encodedWidth, kDefaultWidth / 4), |
| 294 | Field(&EncodedImage::_encodedHeight, kDefaultHeight / 4)), |
| 295 | _, _)) |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 296 | .Times(1) |
Sergey Ulanov | 83a148b | 2016-08-03 00:46:41 | [diff] [blame] | 297 | .WillRepeatedly(Return(EncodedImageCallback::Result( |
| 298 | EncodedImageCallback::Result::OK, 0))); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 299 | } |
| 300 | if (expected_video_streams >= 2) { |
philipel | 20a9801 | 2015-12-21 11:04:49 | [diff] [blame] | 301 | EXPECT_CALL( |
| 302 | encoder_callback_, |
Sergey Ulanov | 83a148b | 2016-08-03 00:46:41 | [diff] [blame] | 303 | OnEncodedImage( |
philipel | 20a9801 | 2015-12-21 11:04:49 | [diff] [blame] | 304 | AllOf(Field(&EncodedImage::_frameType, frame_type), |
| 305 | Field(&EncodedImage::_encodedWidth, kDefaultWidth / 2), |
| 306 | Field(&EncodedImage::_encodedHeight, kDefaultHeight / 2)), |
| 307 | _, _)) |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 308 | .Times(1) |
Sergey Ulanov | 83a148b | 2016-08-03 00:46:41 | [diff] [blame] | 309 | .WillRepeatedly(Return(EncodedImageCallback::Result( |
| 310 | EncodedImageCallback::Result::OK, 0))); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 311 | } |
| 312 | if (expected_video_streams >= 3) { |
philipel | 20a9801 | 2015-12-21 11:04:49 | [diff] [blame] | 313 | EXPECT_CALL( |
| 314 | encoder_callback_, |
Sergey Ulanov | 83a148b | 2016-08-03 00:46:41 | [diff] [blame] | 315 | OnEncodedImage( |
| 316 | AllOf(Field(&EncodedImage::_frameType, frame_type), |
| 317 | Field(&EncodedImage::_encodedWidth, kDefaultWidth), |
| 318 | Field(&EncodedImage::_encodedHeight, kDefaultHeight)), |
| 319 | _, _)) |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 320 | .Times(1) |
Sergey Ulanov | 83a148b | 2016-08-03 00:46:41 | [diff] [blame] | 321 | .WillRepeatedly(Return(EncodedImageCallback::Result( |
| 322 | EncodedImageCallback::Result::OK, 0))); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 323 | } |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 324 | } |
| 325 | |
| 326 | void VerifyTemporalIdxAndSyncForAllSpatialLayers( |
| 327 | Vp8TestEncodedImageCallback* encoder_callback, |
| 328 | const int* expected_temporal_idx, |
| 329 | const bool* expected_layer_sync, |
| 330 | int num_spatial_layers) { |
| 331 | int picture_id = -1; |
| 332 | int temporal_layer = -1; |
| 333 | bool layer_sync = false; |
| 334 | for (int i = 0; i < num_spatial_layers; i++) { |
| 335 | encoder_callback->GetLastEncodedFrameInfo(&picture_id, &temporal_layer, |
| 336 | &layer_sync, i); |
| 337 | EXPECT_EQ(expected_temporal_idx[i], temporal_layer); |
| 338 | EXPECT_EQ(expected_layer_sync[i], layer_sync); |
| 339 | } |
| 340 | } |
| 341 | |
| 342 | // We currently expect all active streams to generate a key frame even though |
| 343 | // a key frame was only requested for some of them. |
| 344 | void TestKeyFrameRequestsOnAllStreams() { |
Erik Språng | fafcfc0 | 2016-11-16 15:41:30 | [diff] [blame] | 345 | SetRates(kMaxBitrates[2], 30); // To get all three streams. |
Peter Boström | 9511e46 | 2015-10-23 13:58:18 | [diff] [blame] | 346 | std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, |
| 347 | kVideoFrameDelta); |
| 348 | ExpectStreams(kVideoFrameKey, kNumberOfSimulcastStreams); |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 349 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 350 | |
Peter Boström | 9511e46 | 2015-10-23 13:58:18 | [diff] [blame] | 351 | ExpectStreams(kVideoFrameDelta, kNumberOfSimulcastStreams); |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 352 | input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 353 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 354 | |
Peter Boström | 9511e46 | 2015-10-23 13:58:18 | [diff] [blame] | 355 | frame_types[0] = kVideoFrameKey; |
| 356 | ExpectStreams(kVideoFrameKey, kNumberOfSimulcastStreams); |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 357 | input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 358 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 359 | |
Peter Boström | 9511e46 | 2015-10-23 13:58:18 | [diff] [blame] | 360 | std::fill(frame_types.begin(), frame_types.end(), kVideoFrameDelta); |
| 361 | frame_types[1] = kVideoFrameKey; |
| 362 | ExpectStreams(kVideoFrameKey, kNumberOfSimulcastStreams); |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 363 | input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 364 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 365 | |
Peter Boström | 9511e46 | 2015-10-23 13:58:18 | [diff] [blame] | 366 | std::fill(frame_types.begin(), frame_types.end(), kVideoFrameDelta); |
| 367 | frame_types[2] = kVideoFrameKey; |
| 368 | ExpectStreams(kVideoFrameKey, kNumberOfSimulcastStreams); |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 369 | input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 370 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 371 | |
Peter Boström | 9511e46 | 2015-10-23 13:58:18 | [diff] [blame] | 372 | std::fill(frame_types.begin(), frame_types.end(), kVideoFrameDelta); |
| 373 | ExpectStreams(kVideoFrameDelta, kNumberOfSimulcastStreams); |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 374 | input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 375 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 376 | } |
| 377 | |
| 378 | void TestPaddingAllStreams() { |
| 379 | // We should always encode the base layer. |
Erik Språng | fafcfc0 | 2016-11-16 15:41:30 | [diff] [blame] | 380 | SetRates(kMinBitrates[0] - 1, 30); |
Peter Boström | 9511e46 | 2015-10-23 13:58:18 | [diff] [blame] | 381 | std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, |
| 382 | kVideoFrameDelta); |
| 383 | ExpectStreams(kVideoFrameKey, 1); |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 384 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 385 | |
Peter Boström | 9511e46 | 2015-10-23 13:58:18 | [diff] [blame] | 386 | ExpectStreams(kVideoFrameDelta, 1); |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 387 | input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 388 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 389 | } |
| 390 | |
| 391 | void TestPaddingTwoStreams() { |
| 392 | // We have just enough to get only the first stream and padding for two. |
Erik Språng | fafcfc0 | 2016-11-16 15:41:30 | [diff] [blame] | 393 | SetRates(kMinBitrates[0], 30); |
Peter Boström | 9511e46 | 2015-10-23 13:58:18 | [diff] [blame] | 394 | std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, |
| 395 | kVideoFrameDelta); |
| 396 | ExpectStreams(kVideoFrameKey, 1); |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 397 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 398 | |
Peter Boström | 9511e46 | 2015-10-23 13:58:18 | [diff] [blame] | 399 | ExpectStreams(kVideoFrameDelta, 1); |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 400 | input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 401 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 402 | } |
| 403 | |
| 404 | void TestPaddingTwoStreamsOneMaxedOut() { |
| 405 | // We are just below limit of sending second stream, so we should get |
| 406 | // the first stream maxed out (at |maxBitrate|), and padding for two. |
Erik Språng | fafcfc0 | 2016-11-16 15:41:30 | [diff] [blame] | 407 | SetRates(kTargetBitrates[0] + kMinBitrates[1] - 1, 30); |
Peter Boström | 9511e46 | 2015-10-23 13:58:18 | [diff] [blame] | 408 | std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, |
| 409 | kVideoFrameDelta); |
| 410 | ExpectStreams(kVideoFrameKey, 1); |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 411 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 412 | |
Peter Boström | 9511e46 | 2015-10-23 13:58:18 | [diff] [blame] | 413 | ExpectStreams(kVideoFrameDelta, 1); |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 414 | input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 415 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 416 | } |
| 417 | |
| 418 | void TestPaddingOneStream() { |
| 419 | // We have just enough to send two streams, so padding for one stream. |
Erik Språng | fafcfc0 | 2016-11-16 15:41:30 | [diff] [blame] | 420 | SetRates(kTargetBitrates[0] + kMinBitrates[1], 30); |
Peter Boström | 9511e46 | 2015-10-23 13:58:18 | [diff] [blame] | 421 | std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, |
| 422 | kVideoFrameDelta); |
| 423 | ExpectStreams(kVideoFrameKey, 2); |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 424 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 425 | |
Peter Boström | 9511e46 | 2015-10-23 13:58:18 | [diff] [blame] | 426 | ExpectStreams(kVideoFrameDelta, 2); |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 427 | input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 428 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 429 | } |
| 430 | |
| 431 | void TestPaddingOneStreamTwoMaxedOut() { |
| 432 | // We are just below limit of sending third stream, so we should get |
| 433 | // first stream's rate maxed out at |targetBitrate|, second at |maxBitrate|. |
Erik Språng | fafcfc0 | 2016-11-16 15:41:30 | [diff] [blame] | 434 | SetRates(kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] - 1, 30); |
Peter Boström | 9511e46 | 2015-10-23 13:58:18 | [diff] [blame] | 435 | std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, |
| 436 | kVideoFrameDelta); |
| 437 | ExpectStreams(kVideoFrameKey, 2); |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 438 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 439 | |
Peter Boström | 9511e46 | 2015-10-23 13:58:18 | [diff] [blame] | 440 | ExpectStreams(kVideoFrameDelta, 2); |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 441 | input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 442 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 443 | } |
| 444 | |
| 445 | void TestSendAllStreams() { |
| 446 | // We have just enough to send all streams. |
Erik Språng | fafcfc0 | 2016-11-16 15:41:30 | [diff] [blame] | 447 | SetRates(kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2], 30); |
Peter Boström | 9511e46 | 2015-10-23 13:58:18 | [diff] [blame] | 448 | std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, |
| 449 | kVideoFrameDelta); |
| 450 | ExpectStreams(kVideoFrameKey, 3); |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 451 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 452 | |
Peter Boström | 9511e46 | 2015-10-23 13:58:18 | [diff] [blame] | 453 | ExpectStreams(kVideoFrameDelta, 3); |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 454 | input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 455 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 456 | } |
| 457 | |
| 458 | void TestDisablingStreams() { |
| 459 | // We should get three media streams. |
Erik Språng | fafcfc0 | 2016-11-16 15:41:30 | [diff] [blame] | 460 | SetRates(kMaxBitrates[0] + kMaxBitrates[1] + kMaxBitrates[2], 30); |
Peter Boström | 9511e46 | 2015-10-23 13:58:18 | [diff] [blame] | 461 | std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, |
| 462 | kVideoFrameDelta); |
| 463 | ExpectStreams(kVideoFrameKey, 3); |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 464 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 465 | |
Peter Boström | 9511e46 | 2015-10-23 13:58:18 | [diff] [blame] | 466 | ExpectStreams(kVideoFrameDelta, 3); |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 467 | input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 468 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 469 | |
| 470 | // We should only get two streams and padding for one. |
Erik Språng | fafcfc0 | 2016-11-16 15:41:30 | [diff] [blame] | 471 | SetRates(kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] / 2, 30); |
Peter Boström | 9511e46 | 2015-10-23 13:58:18 | [diff] [blame] | 472 | ExpectStreams(kVideoFrameDelta, 2); |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 473 | input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 474 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 475 | |
| 476 | // We should only get the first stream and padding for two. |
Erik Språng | fafcfc0 | 2016-11-16 15:41:30 | [diff] [blame] | 477 | SetRates(kTargetBitrates[0] + kMinBitrates[1] / 2, 30); |
Peter Boström | 9511e46 | 2015-10-23 13:58:18 | [diff] [blame] | 478 | ExpectStreams(kVideoFrameDelta, 1); |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 479 | input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 480 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 481 | |
| 482 | // We don't have enough bitrate for the thumbnail stream, but we should get |
| 483 | // it anyway with current configuration. |
Erik Språng | fafcfc0 | 2016-11-16 15:41:30 | [diff] [blame] | 484 | SetRates(kTargetBitrates[0] - 1, 30); |
Peter Boström | 9511e46 | 2015-10-23 13:58:18 | [diff] [blame] | 485 | ExpectStreams(kVideoFrameDelta, 1); |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 486 | input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 487 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 488 | |
| 489 | // We should only get two streams and padding for one. |
Erik Språng | fafcfc0 | 2016-11-16 15:41:30 | [diff] [blame] | 490 | SetRates(kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] / 2, 30); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 491 | // We get a key frame because a new stream is being enabled. |
Peter Boström | 9511e46 | 2015-10-23 13:58:18 | [diff] [blame] | 492 | ExpectStreams(kVideoFrameKey, 2); |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 493 | input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 494 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 495 | |
| 496 | // We should get all three streams. |
Erik Språng | fafcfc0 | 2016-11-16 15:41:30 | [diff] [blame] | 497 | SetRates(kTargetBitrates[0] + kTargetBitrates[1] + kTargetBitrates[2], 30); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 498 | // We get a key frame because a new stream is being enabled. |
Peter Boström | 9511e46 | 2015-10-23 13:58:18 | [diff] [blame] | 499 | ExpectStreams(kVideoFrameKey, 3); |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 500 | input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 501 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 502 | } |
| 503 | |
| 504 | void SwitchingToOneStream(int width, int height) { |
| 505 | // Disable all streams except the last and set the bitrate of the last to |
| 506 | // 100 kbps. This verifies the way GTP switches to screenshare mode. |
hta | 078cd6c | 2016-10-25 16:05:06 | [diff] [blame] | 507 | settings_.VP8()->numberOfTemporalLayers = 1; |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 508 | settings_.maxBitrate = 100; |
| 509 | settings_.startBitrate = 100; |
| 510 | settings_.width = width; |
| 511 | settings_.height = height; |
| 512 | for (int i = 0; i < settings_.numberOfSimulcastStreams - 1; ++i) { |
| 513 | settings_.simulcastStream[i].maxBitrate = 0; |
| 514 | settings_.simulcastStream[i].width = settings_.width; |
| 515 | settings_.simulcastStream[i].height = settings_.height; |
| 516 | } |
| 517 | // Setting input image to new resolution. |
Magnus Jedvert | d0cf29a | 2017-06-10 17:03:37 | [diff] [blame] | 518 | input_buffer_ = I420Buffer::Create(settings_.width, settings_.height); |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 519 | input_buffer_->InitializeData(); |
| 520 | |
| 521 | input_frame_.reset( |
| 522 | new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 523 | |
| 524 | // The for loop above did not set the bitrate of the highest layer. |
philipel | 20a9801 | 2015-12-21 11:04:49 | [diff] [blame] | 525 | settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1] |
| 526 | .maxBitrate = 0; |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 527 | // The highest layer has to correspond to the non-simulcast resolution. |
philipel | 20a9801 | 2015-12-21 11:04:49 | [diff] [blame] | 528 | settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1].width = |
| 529 | settings_.width; |
| 530 | settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1].height = |
| 531 | settings_.height; |
Erik Språng | fafcfc0 | 2016-11-16 15:41:30 | [diff] [blame] | 532 | SetUpRateAllocator(); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 533 | EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200)); |
| 534 | |
| 535 | // Encode one frame and verify. |
Erik Språng | fafcfc0 | 2016-11-16 15:41:30 | [diff] [blame] | 536 | SetRates(kMaxBitrates[0] + kMaxBitrates[1], 30); |
Peter Boström | 9511e46 | 2015-10-23 13:58:18 | [diff] [blame] | 537 | std::vector<FrameType> frame_types(kNumberOfSimulcastStreams, |
| 538 | kVideoFrameDelta); |
Sergey Ulanov | 83a148b | 2016-08-03 00:46:41 | [diff] [blame] | 539 | EXPECT_CALL( |
| 540 | encoder_callback_, |
| 541 | OnEncodedImage(AllOf(Field(&EncodedImage::_frameType, kVideoFrameKey), |
| 542 | Field(&EncodedImage::_encodedWidth, width), |
| 543 | Field(&EncodedImage::_encodedHeight, height)), |
| 544 | _, _)) |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 545 | .Times(1) |
Sergey Ulanov | 83a148b | 2016-08-03 00:46:41 | [diff] [blame] | 546 | .WillRepeatedly(Return( |
| 547 | EncodedImageCallback::Result(EncodedImageCallback::Result::OK, 0))); |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 548 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 549 | |
| 550 | // Switch back. |
| 551 | DefaultSettings(&settings_, kDefaultTemporalLayerProfile); |
| 552 | // Start at the lowest bitrate for enabling base stream. |
| 553 | settings_.startBitrate = kMinBitrates[0]; |
Erik Språng | fafcfc0 | 2016-11-16 15:41:30 | [diff] [blame] | 554 | SetUpRateAllocator(); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 555 | EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200)); |
Erik Språng | fafcfc0 | 2016-11-16 15:41:30 | [diff] [blame] | 556 | SetRates(settings_.startBitrate, 30); |
Peter Boström | 9511e46 | 2015-10-23 13:58:18 | [diff] [blame] | 557 | ExpectStreams(kVideoFrameKey, 1); |
magjed@webrtc.org | 9eee22e | 2015-03-17 12:27:26 | [diff] [blame] | 558 | // Resize |input_frame_| to the new resolution. |
Magnus Jedvert | d0cf29a | 2017-06-10 17:03:37 | [diff] [blame] | 559 | input_buffer_ = I420Buffer::Create(settings_.width, settings_.height); |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 560 | input_buffer_->InitializeData(); |
| 561 | input_frame_.reset( |
| 562 | new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0)); |
| 563 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 564 | } |
| 565 | |
philipel | 20a9801 | 2015-12-21 11:04:49 | [diff] [blame] | 566 | void TestSwitchingToOneStream() { SwitchingToOneStream(1024, 768); } |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 567 | |
philipel | 20a9801 | 2015-12-21 11:04:49 | [diff] [blame] | 568 | void TestSwitchingToOneOddStream() { SwitchingToOneStream(1023, 769); } |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 569 | |
asapersson | 487ee59 | 2016-01-26 09:56:32 | [diff] [blame] | 570 | void TestSwitchingToOneSmallStream() { SwitchingToOneStream(4, 4); } |
| 571 | |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 572 | // Test the layer pattern and sync flag for various spatial-temporal patterns. |
| 573 | // 3-3-3 pattern: 3 temporal layers for all spatial streams, so same |
| 574 | // temporal_layer id and layer_sync is expected for all streams. |
| 575 | void TestSaptioTemporalLayers333PatternEncoder() { |
| 576 | Vp8TestEncodedImageCallback encoder_callback; |
| 577 | encoder_->RegisterEncodeCompleteCallback(&encoder_callback); |
Erik Språng | fafcfc0 | 2016-11-16 15:41:30 | [diff] [blame] | 578 | SetRates(kMaxBitrates[2], 30); // To get all three streams. |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 579 | |
philipel | 20a9801 | 2015-12-21 11:04:49 | [diff] [blame] | 580 | int expected_temporal_idx[3] = {-1, -1, -1}; |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 581 | bool expected_layer_sync[3] = {false, false, false}; |
| 582 | |
| 583 | // First frame: #0. |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 584 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 585 | SetExpectedValues3<int>(0, 0, 0, expected_temporal_idx); |
| 586 | SetExpectedValues3<bool>(true, true, true, expected_layer_sync); |
philipel | 20a9801 | 2015-12-21 11:04:49 | [diff] [blame] | 587 | VerifyTemporalIdxAndSyncForAllSpatialLayers( |
| 588 | &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 589 | |
| 590 | // Next frame: #1. |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 591 | input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 592 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 593 | SetExpectedValues3<int>(2, 2, 2, expected_temporal_idx); |
| 594 | SetExpectedValues3<bool>(true, true, true, expected_layer_sync); |
philipel | 20a9801 | 2015-12-21 11:04:49 | [diff] [blame] | 595 | VerifyTemporalIdxAndSyncForAllSpatialLayers( |
| 596 | &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 597 | |
| 598 | // Next frame: #2. |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 599 | input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 600 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 601 | SetExpectedValues3<int>(1, 1, 1, expected_temporal_idx); |
| 602 | SetExpectedValues3<bool>(true, true, true, expected_layer_sync); |
philipel | 20a9801 | 2015-12-21 11:04:49 | [diff] [blame] | 603 | VerifyTemporalIdxAndSyncForAllSpatialLayers( |
| 604 | &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 605 | |
| 606 | // Next frame: #3. |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 607 | input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 608 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 609 | SetExpectedValues3<int>(2, 2, 2, expected_temporal_idx); |
| 610 | SetExpectedValues3<bool>(false, false, false, expected_layer_sync); |
philipel | 20a9801 | 2015-12-21 11:04:49 | [diff] [blame] | 611 | VerifyTemporalIdxAndSyncForAllSpatialLayers( |
| 612 | &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 613 | |
| 614 | // Next frame: #4. |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 615 | input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 616 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 617 | SetExpectedValues3<int>(0, 0, 0, expected_temporal_idx); |
| 618 | SetExpectedValues3<bool>(false, false, false, expected_layer_sync); |
philipel | 20a9801 | 2015-12-21 11:04:49 | [diff] [blame] | 619 | VerifyTemporalIdxAndSyncForAllSpatialLayers( |
| 620 | &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 621 | |
| 622 | // Next frame: #5. |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 623 | input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 624 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 625 | SetExpectedValues3<int>(2, 2, 2, expected_temporal_idx); |
| 626 | SetExpectedValues3<bool>(false, false, false, expected_layer_sync); |
philipel | 20a9801 | 2015-12-21 11:04:49 | [diff] [blame] | 627 | VerifyTemporalIdxAndSyncForAllSpatialLayers( |
| 628 | &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 629 | } |
| 630 | |
| 631 | // Test the layer pattern and sync flag for various spatial-temporal patterns. |
| 632 | // 3-2-1 pattern: 3 temporal layers for lowest resolution, 2 for middle, and |
| 633 | // 1 temporal layer for highest resolution. |
| 634 | // For this profile, we expect the temporal index pattern to be: |
| 635 | // 1st stream: 0, 2, 1, 2, .... |
| 636 | // 2nd stream: 0, 1, 0, 1, ... |
| 637 | // 3rd stream: -1, -1, -1, -1, .... |
| 638 | // Regarding the 3rd stream, note that a stream/encoder with 1 temporal layer |
| 639 | // should always have temporal layer idx set to kNoTemporalIdx = -1. |
Peter Boström | 07e22e6 | 2015-10-07 10:23:21 | [diff] [blame] | 640 | // Since CodecSpecificInfoVP8.temporalIdx is uint8_t, this will wrap to 255. |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 641 | // TODO(marpan): Although this seems safe for now, we should fix this. |
| 642 | void TestSpatioTemporalLayers321PatternEncoder() { |
| 643 | int temporal_layer_profile[3] = {3, 2, 1}; |
| 644 | SetUpCodec(temporal_layer_profile); |
| 645 | Vp8TestEncodedImageCallback encoder_callback; |
| 646 | encoder_->RegisterEncodeCompleteCallback(&encoder_callback); |
Erik Språng | fafcfc0 | 2016-11-16 15:41:30 | [diff] [blame] | 647 | SetRates(kMaxBitrates[2], 30); // To get all three streams. |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 648 | |
philipel | 20a9801 | 2015-12-21 11:04:49 | [diff] [blame] | 649 | int expected_temporal_idx[3] = {-1, -1, -1}; |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 650 | bool expected_layer_sync[3] = {false, false, false}; |
| 651 | |
| 652 | // First frame: #0. |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 653 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 654 | SetExpectedValues3<int>(0, 0, 255, expected_temporal_idx); |
| 655 | SetExpectedValues3<bool>(true, true, false, expected_layer_sync); |
philipel | 20a9801 | 2015-12-21 11:04:49 | [diff] [blame] | 656 | VerifyTemporalIdxAndSyncForAllSpatialLayers( |
| 657 | &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 658 | |
| 659 | // Next frame: #1. |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 660 | input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 661 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 662 | SetExpectedValues3<int>(2, 1, 255, expected_temporal_idx); |
| 663 | SetExpectedValues3<bool>(true, true, false, expected_layer_sync); |
philipel | 20a9801 | 2015-12-21 11:04:49 | [diff] [blame] | 664 | VerifyTemporalIdxAndSyncForAllSpatialLayers( |
| 665 | &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 666 | |
| 667 | // Next frame: #2. |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 668 | input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 669 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 670 | SetExpectedValues3<int>(1, 0, 255, expected_temporal_idx); |
| 671 | SetExpectedValues3<bool>(true, false, false, expected_layer_sync); |
philipel | 20a9801 | 2015-12-21 11:04:49 | [diff] [blame] | 672 | VerifyTemporalIdxAndSyncForAllSpatialLayers( |
| 673 | &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 674 | |
| 675 | // Next frame: #3. |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 676 | input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 677 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 678 | SetExpectedValues3<int>(2, 1, 255, expected_temporal_idx); |
| 679 | SetExpectedValues3<bool>(false, false, false, expected_layer_sync); |
philipel | 20a9801 | 2015-12-21 11:04:49 | [diff] [blame] | 680 | VerifyTemporalIdxAndSyncForAllSpatialLayers( |
| 681 | &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 682 | |
| 683 | // Next frame: #4. |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 684 | input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 685 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 686 | SetExpectedValues3<int>(0, 0, 255, expected_temporal_idx); |
| 687 | SetExpectedValues3<bool>(false, false, false, expected_layer_sync); |
philipel | 20a9801 | 2015-12-21 11:04:49 | [diff] [blame] | 688 | VerifyTemporalIdxAndSyncForAllSpatialLayers( |
| 689 | &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 690 | |
| 691 | // Next frame: #5. |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 692 | input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 693 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 694 | SetExpectedValues3<int>(2, 1, 255, expected_temporal_idx); |
| 695 | SetExpectedValues3<bool>(false, false, false, expected_layer_sync); |
philipel | 20a9801 | 2015-12-21 11:04:49 | [diff] [blame] | 696 | VerifyTemporalIdxAndSyncForAllSpatialLayers( |
| 697 | &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 698 | } |
| 699 | |
| 700 | void TestStrideEncodeDecode() { |
| 701 | Vp8TestEncodedImageCallback encoder_callback; |
| 702 | Vp8TestDecodedImageCallback decoder_callback; |
| 703 | encoder_->RegisterEncodeCompleteCallback(&encoder_callback); |
| 704 | decoder_->RegisterDecodeCompleteCallback(&decoder_callback); |
| 705 | |
Erik Språng | fafcfc0 | 2016-11-16 15:41:30 | [diff] [blame] | 706 | SetRates(kMaxBitrates[2], 30); // To get all three streams. |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 707 | // Setting two (possibly) problematic use cases for stride: |
| 708 | // 1. stride > width 2. stride_y != stride_uv/2 |
| 709 | int stride_y = kDefaultWidth + 20; |
| 710 | int stride_uv = ((kDefaultWidth + 1) / 2) + 5; |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 711 | input_buffer_ = I420Buffer::Create(kDefaultWidth, kDefaultHeight, stride_y, |
| 712 | stride_uv, stride_uv); |
| 713 | input_frame_.reset( |
| 714 | new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0)); |
| 715 | |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 716 | // Set color. |
| 717 | int plane_offset[kNumOfPlanes]; |
| 718 | plane_offset[kYPlane] = kColorY; |
| 719 | plane_offset[kUPlane] = kColorU; |
| 720 | plane_offset[kVPlane] = kColorV; |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 721 | CreateImage(input_buffer_, plane_offset); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 722 | |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 723 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 724 | |
| 725 | // Change color. |
| 726 | plane_offset[kYPlane] += 1; |
| 727 | plane_offset[kUPlane] += 1; |
| 728 | plane_offset[kVPlane] += 1; |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 729 | CreateImage(input_buffer_, plane_offset); |
| 730 | input_frame_->set_timestamp(input_frame_->timestamp() + 3000); |
| 731 | EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL)); |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 732 | |
| 733 | EncodedImage encoded_frame; |
| 734 | // Only encoding one frame - so will be a key frame. |
| 735 | encoder_callback.GetLastEncodedKeyFrame(&encoded_frame); |
| 736 | EXPECT_EQ(0, decoder_->Decode(encoded_frame, false, NULL)); |
| 737 | encoder_callback.GetLastEncodedFrame(&encoded_frame); |
| 738 | decoder_->Decode(encoded_frame, false, NULL); |
| 739 | EXPECT_EQ(2, decoder_callback.DecodedFrames()); |
| 740 | } |
| 741 | |
kwiberg | 7c6513a | 2016-02-29 13:51:59 | [diff] [blame] | 742 | std::unique_ptr<VP8Encoder> encoder_; |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 743 | MockEncodedImageCallback encoder_callback_; |
kwiberg | 7c6513a | 2016-02-29 13:51:59 | [diff] [blame] | 744 | std::unique_ptr<VP8Decoder> decoder_; |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 745 | MockDecodedImageCallback decoder_callback_; |
| 746 | VideoCodec settings_; |
nisse | 652c67c | 2016-09-27 07:17:25 | [diff] [blame] | 747 | rtc::scoped_refptr<I420Buffer> input_buffer_; |
| 748 | std::unique_ptr<VideoFrame> input_frame_; |
Erik Språng | fafcfc0 | 2016-11-16 15:41:30 | [diff] [blame] | 749 | std::unique_ptr<SimulcastRateAllocator> rate_allocator_; |
pbos@webrtc.org | 866b22b | 2014-12-09 10:36:40 | [diff] [blame] | 750 | }; |
| 751 | |
| 752 | } // namespace testing |
| 753 | } // namespace webrtc |
| 754 | |
magjed | a5b6c52 | 2017-07-10 10:26:36 | [diff] [blame] | 755 | #endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_TEST_UTILITY_H_ |