blob: 9684b4ac3ac055cf9a61255a1103c543addbb6e5 [file] [log] [blame]
Tommid3807da2020-05-22 15:36:361/*
2 * Copyright 2017 The WebRTC Project Authors. All rights reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11#include "video/rtp_video_stream_receiver2.h"
12
13#include <memory>
14#include <utility>
15
16#include "api/video/video_codec_type.h"
17#include "api/video/video_frame_type.h"
18#include "common_video/h264/h264_common.h"
19#include "media/base/media_constants.h"
20#include "modules/rtp_rtcp/source/rtp_descriptor_authentication.h"
21#include "modules/rtp_rtcp/source/rtp_format.h"
22#include "modules/rtp_rtcp/source/rtp_format_vp9.h"
23#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h"
24#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h"
25#include "modules/rtp_rtcp/source/rtp_header_extensions.h"
26#include "modules/rtp_rtcp/source/rtp_packet_received.h"
27#include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
28#include "modules/utility/include/process_thread.h"
29#include "modules/video_coding/frame_object.h"
30#include "modules/video_coding/include/video_coding_defines.h"
31#include "modules/video_coding/rtp_frame_reference_finder.h"
32#include "rtc_base/byte_buffer.h"
33#include "rtc_base/logging.h"
34#include "system_wrappers/include/clock.h"
35#include "system_wrappers/include/field_trial.h"
36#include "test/field_trial.h"
37#include "test/gmock.h"
38#include "test/gtest.h"
39#include "test/mock_frame_transformer.h"
Tomas Gunnarssonfae05622020-06-03 06:54:3940#include "test/time_controller/simulated_task_queue.h"
Tommid3807da2020-05-22 15:36:3641
42using ::testing::_;
43using ::testing::ElementsAre;
44using ::testing::Invoke;
45using ::testing::SizeIs;
46using ::testing::Values;
47
48namespace webrtc {
49
50namespace {
51
52const uint8_t kH264StartCode[] = {0x00, 0x00, 0x00, 0x01};
53
54std::vector<uint64_t> GetAbsoluteCaptureTimestamps(
55 const video_coding::EncodedFrame* frame) {
56 std::vector<uint64_t> result;
57 for (const auto& packet_info : frame->PacketInfos()) {
58 if (packet_info.absolute_capture_time()) {
59 result.push_back(
60 packet_info.absolute_capture_time()->absolute_capture_timestamp);
61 }
62 }
63 return result;
64}
65
66RTPVideoHeader GetGenericVideoHeader(VideoFrameType frame_type) {
67 RTPVideoHeader video_header;
68 video_header.is_first_packet_in_frame = true;
69 video_header.is_last_packet_in_frame = true;
70 video_header.codec = kVideoCodecGeneric;
71 video_header.frame_type = frame_type;
72 return video_header;
73}
74
75class MockTransport : public Transport {
76 public:
77 MOCK_METHOD(bool,
78 SendRtp,
79 (const uint8_t*, size_t length, const PacketOptions& options),
80 (override));
81 MOCK_METHOD(bool, SendRtcp, (const uint8_t*, size_t length), (override));
82};
83
84class MockNackSender : public NackSender {
85 public:
86 MOCK_METHOD(void,
87 SendNack,
88 (const std::vector<uint16_t>& sequence_numbers,
89 bool buffering_allowed),
90 (override));
91};
92
93class MockKeyFrameRequestSender : public KeyFrameRequestSender {
94 public:
95 MOCK_METHOD(void, RequestKeyFrame, (), (override));
96};
97
98class MockOnCompleteFrameCallback
99 : public video_coding::OnCompleteFrameCallback {
100 public:
101 MOCK_METHOD(void, DoOnCompleteFrame, (video_coding::EncodedFrame*), ());
102 MOCK_METHOD(void,
103 DoOnCompleteFrameFailNullptr,
104 (video_coding::EncodedFrame*),
105 ());
106 MOCK_METHOD(void,
107 DoOnCompleteFrameFailLength,
108 (video_coding::EncodedFrame*),
109 ());
110 MOCK_METHOD(void,
111 DoOnCompleteFrameFailBitstream,
112 (video_coding::EncodedFrame*),
113 ());
114 void OnCompleteFrame(
115 std::unique_ptr<video_coding::EncodedFrame> frame) override {
116 if (!frame) {
117 DoOnCompleteFrameFailNullptr(nullptr);
118 return;
119 }
120 EXPECT_EQ(buffer_.Length(), frame->size());
121 if (buffer_.Length() != frame->size()) {
122 DoOnCompleteFrameFailLength(frame.get());
123 return;
124 }
125 if (frame->size() != buffer_.Length() ||
126 memcmp(buffer_.Data(), frame->data(), buffer_.Length()) != 0) {
127 DoOnCompleteFrameFailBitstream(frame.get());
128 return;
129 }
130 DoOnCompleteFrame(frame.get());
131 }
132
133 void ClearExpectedBitstream() { buffer_.Clear(); }
134
135 void AppendExpectedBitstream(const uint8_t data[], size_t size_in_bytes) {
136 // TODO(Johan): Let rtc::ByteBuffer handle uint8_t* instead of char*.
137 buffer_.WriteBytes(reinterpret_cast<const char*>(data), size_in_bytes);
138 }
139 rtc::ByteBufferWriter buffer_;
140};
141
142class MockRtpPacketSink : public RtpPacketSinkInterface {
143 public:
144 MOCK_METHOD(void, OnRtpPacket, (const RtpPacketReceived&), (override));
145};
146
147constexpr uint32_t kSsrc = 111;
Tommid3807da2020-05-22 15:36:36148constexpr int kPayloadType = 100;
149constexpr int kRedPayloadType = 125;
150
151std::unique_ptr<RtpPacketReceived> CreateRtpPacketReceived() {
Tomas Gunnarsson8408c992021-02-14 13:19:12152 constexpr uint16_t kSequenceNumber = 222;
Tommid3807da2020-05-22 15:36:36153 auto packet = std::make_unique<RtpPacketReceived>();
154 packet->SetSsrc(kSsrc);
155 packet->SetSequenceNumber(kSequenceNumber);
156 packet->SetPayloadType(kPayloadType);
157 return packet;
158}
159
160MATCHER_P(SamePacketAs, other, "") {
161 return arg.Ssrc() == other.Ssrc() &&
162 arg.SequenceNumber() == other.SequenceNumber();
163}
164
165} // namespace
166
Tomas Gunnarsson8408c992021-02-14 13:19:12167class RtpVideoStreamReceiver2Test : public ::testing::Test,
168 public RtpPacketSinkInterface {
Tommid3807da2020-05-22 15:36:36169 public:
170 RtpVideoStreamReceiver2Test() : RtpVideoStreamReceiver2Test("") {}
171 explicit RtpVideoStreamReceiver2Test(std::string field_trials)
172 : override_field_trials_(field_trials),
173 config_(CreateConfig()),
174 process_thread_(ProcessThread::Create("TestThread")) {
175 rtp_receive_statistics_ =
176 ReceiveStatistics::Create(Clock::GetRealTimeClock());
177 rtp_video_stream_receiver_ = std::make_unique<RtpVideoStreamReceiver2>(
Tommi63673fe2020-05-27 10:55:38178 TaskQueueBase::Current(), Clock::GetRealTimeClock(), &mock_transport_,
179 nullptr, nullptr, &config_, rtp_receive_statistics_.get(), nullptr,
180 nullptr, process_thread_.get(), &mock_nack_sender_,
181 &mock_key_frame_request_sender_, &mock_on_complete_frame_callback_,
182 nullptr, nullptr);
Tommid3807da2020-05-22 15:36:36183 VideoCodec codec;
Tommid3807da2020-05-22 15:36:36184 codec.codecType = kVideoCodecGeneric;
Niels Möller5401bad2020-08-11 10:17:42185 rtp_video_stream_receiver_->AddReceiveCodec(kPayloadType, codec, {},
Tommid3807da2020-05-22 15:36:36186 /*raw_payload=*/false);
187 }
188
189 RTPVideoHeader GetDefaultH264VideoHeader() {
190 RTPVideoHeader video_header;
191 video_header.codec = kVideoCodecH264;
192 video_header.video_type_header.emplace<RTPVideoHeaderH264>();
193 return video_header;
194 }
195
196 // TODO(Johan): refactor h264_sps_pps_tracker_unittests.cc to avoid duplicate
197 // code.
198 void AddSps(RTPVideoHeader* video_header,
199 uint8_t sps_id,
200 rtc::CopyOnWriteBuffer* data) {
201 NaluInfo info;
202 info.type = H264::NaluType::kSps;
203 info.sps_id = sps_id;
204 info.pps_id = -1;
Andrew Johnsonf288f5b2020-09-19 05:35:59205 data->AppendData<uint8_t, 2>({H264::NaluType::kSps, sps_id});
Tommid3807da2020-05-22 15:36:36206 auto& h264 = absl::get<RTPVideoHeaderH264>(video_header->video_type_header);
207 h264.nalus[h264.nalus_length++] = info;
208 }
209
210 void AddPps(RTPVideoHeader* video_header,
211 uint8_t sps_id,
212 uint8_t pps_id,
213 rtc::CopyOnWriteBuffer* data) {
214 NaluInfo info;
215 info.type = H264::NaluType::kPps;
216 info.sps_id = sps_id;
217 info.pps_id = pps_id;
Andrew Johnsonf288f5b2020-09-19 05:35:59218 data->AppendData<uint8_t, 2>({H264::NaluType::kPps, pps_id});
Tommid3807da2020-05-22 15:36:36219 auto& h264 = absl::get<RTPVideoHeaderH264>(video_header->video_type_header);
220 h264.nalus[h264.nalus_length++] = info;
221 }
222
223 void AddIdr(RTPVideoHeader* video_header, int pps_id) {
224 NaluInfo info;
225 info.type = H264::NaluType::kIdr;
226 info.sps_id = -1;
227 info.pps_id = pps_id;
228 auto& h264 = absl::get<RTPVideoHeaderH264>(video_header->video_type_header);
229 h264.nalus[h264.nalus_length++] = info;
230 }
231
Tomas Gunnarsson8408c992021-02-14 13:19:12232 void OnRtpPacket(const RtpPacketReceived& packet) override {
233 if (test_packet_sink_)
234 test_packet_sink_->OnRtpPacket(packet);
235 }
236
Tommid3807da2020-05-22 15:36:36237 protected:
Tomas Gunnarsson8408c992021-02-14 13:19:12238 VideoReceiveStream::Config CreateConfig() {
Tommid3807da2020-05-22 15:36:36239 VideoReceiveStream::Config config(nullptr);
240 config.rtp.remote_ssrc = 1111;
241 config.rtp.local_ssrc = 2222;
242 config.rtp.red_payload_type = kRedPayloadType;
Tomas Gunnarsson8408c992021-02-14 13:19:12243 config.rtp.packet_sink_ = this;
Tommid3807da2020-05-22 15:36:36244 return config;
245 }
246
Tomas Gunnarssonfae05622020-06-03 06:54:39247 TokenTaskQueue task_queue_;
248 TokenTaskQueue::CurrentTaskQueueSetter task_queue_setter_{&task_queue_};
249
Tommid3807da2020-05-22 15:36:36250 const webrtc::test::ScopedFieldTrials override_field_trials_;
251 VideoReceiveStream::Config config_;
252 MockNackSender mock_nack_sender_;
253 MockKeyFrameRequestSender mock_key_frame_request_sender_;
254 MockTransport mock_transport_;
255 MockOnCompleteFrameCallback mock_on_complete_frame_callback_;
256 std::unique_ptr<ProcessThread> process_thread_;
257 std::unique_ptr<ReceiveStatistics> rtp_receive_statistics_;
258 std::unique_ptr<RtpVideoStreamReceiver2> rtp_video_stream_receiver_;
Tomas Gunnarsson8408c992021-02-14 13:19:12259 RtpPacketSinkInterface* test_packet_sink_ = nullptr;
Tommid3807da2020-05-22 15:36:36260};
261
262TEST_F(RtpVideoStreamReceiver2Test, CacheColorSpaceFromLastPacketOfKeyframe) {
263 // Test that color space is cached from the last packet of a key frame and
264 // that it's not reset by padding packets without color space.
265 constexpr int kVp9PayloadType = 99;
266 const ColorSpace kColorSpace(
267 ColorSpace::PrimaryID::kFILM, ColorSpace::TransferID::kBT2020_12,
268 ColorSpace::MatrixID::kBT2020_NCL, ColorSpace::RangeID::kFull);
269 const std::vector<uint8_t> kKeyFramePayload = {0, 1, 2, 3, 4, 5,
270 6, 7, 8, 9, 10};
271 const std::vector<uint8_t> kDeltaFramePayload = {0, 1, 2, 3, 4};
272
273 // Anonymous helper class that generates received packets.
274 class {
275 public:
276 void SetPayload(const std::vector<uint8_t>& payload,
277 VideoFrameType video_frame_type) {
278 video_frame_type_ = video_frame_type;
279 RtpPacketizer::PayloadSizeLimits pay_load_size_limits;
280 // Reduce max payload length to make sure the key frame generates two
281 // packets.
282 pay_load_size_limits.max_payload_len = 8;
283 RTPVideoHeaderVP9 rtp_video_header_vp9;
284 rtp_video_header_vp9.InitRTPVideoHeaderVP9();
285 rtp_video_header_vp9.inter_pic_predicted =
286 (video_frame_type == VideoFrameType::kVideoFrameDelta);
287 rtp_packetizer_ = std::make_unique<RtpPacketizerVp9>(
288 payload, pay_load_size_limits, rtp_video_header_vp9);
289 }
290
291 size_t NumPackets() { return rtp_packetizer_->NumPackets(); }
292 void SetColorSpace(const ColorSpace& color_space) {
293 color_space_ = color_space;
294 }
295
296 RtpPacketReceived NextPacket() {
297 RtpHeaderExtensionMap extension_map;
298 extension_map.Register<ColorSpaceExtension>(1);
299 RtpPacketToSend packet_to_send(&extension_map);
300 packet_to_send.SetSequenceNumber(sequence_number_++);
301 packet_to_send.SetSsrc(kSsrc);
302 packet_to_send.SetPayloadType(kVp9PayloadType);
303 bool include_color_space =
304 (rtp_packetizer_->NumPackets() == 1u &&
305 video_frame_type_ == VideoFrameType::kVideoFrameKey);
306 if (include_color_space) {
307 EXPECT_TRUE(
308 packet_to_send.SetExtension<ColorSpaceExtension>(color_space_));
309 }
310 rtp_packetizer_->NextPacket(&packet_to_send);
311
312 RtpPacketReceived received_packet(&extension_map);
313 received_packet.Parse(packet_to_send.data(), packet_to_send.size());
314 return received_packet;
315 }
316
317 private:
318 uint16_t sequence_number_ = 0;
319 VideoFrameType video_frame_type_;
320 ColorSpace color_space_;
321 std::unique_ptr<RtpPacketizer> rtp_packetizer_;
322 } received_packet_generator;
323 received_packet_generator.SetColorSpace(kColorSpace);
324
325 // Prepare the receiver for VP9.
326 VideoCodec codec;
Tommid3807da2020-05-22 15:36:36327 codec.codecType = kVideoCodecVP9;
328 std::map<std::string, std::string> codec_params;
Niels Möller5401bad2020-08-11 10:17:42329 rtp_video_stream_receiver_->AddReceiveCodec(kVp9PayloadType, codec,
330 codec_params,
Tommid3807da2020-05-22 15:36:36331 /*raw_payload=*/false);
332
333 // Generate key frame packets.
334 received_packet_generator.SetPayload(kKeyFramePayload,
335 VideoFrameType::kVideoFrameKey);
336 EXPECT_EQ(received_packet_generator.NumPackets(), 2u);
337 RtpPacketReceived key_frame_packet1 = received_packet_generator.NextPacket();
338 RtpPacketReceived key_frame_packet2 = received_packet_generator.NextPacket();
339
340 // Generate delta frame packet.
341 received_packet_generator.SetPayload(kDeltaFramePayload,
342 VideoFrameType::kVideoFrameDelta);
343 EXPECT_EQ(received_packet_generator.NumPackets(), 1u);
344 RtpPacketReceived delta_frame_packet = received_packet_generator.NextPacket();
345
346 rtp_video_stream_receiver_->StartReceive();
347 mock_on_complete_frame_callback_.AppendExpectedBitstream(
348 kKeyFramePayload.data(), kKeyFramePayload.size());
349
350 // Send the key frame and expect a callback with color space information.
351 EXPECT_FALSE(key_frame_packet1.GetExtension<ColorSpaceExtension>());
352 EXPECT_TRUE(key_frame_packet2.GetExtension<ColorSpaceExtension>());
353 rtp_video_stream_receiver_->OnRtpPacket(key_frame_packet1);
354 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_))
355 .WillOnce(Invoke([kColorSpace](video_coding::EncodedFrame* frame) {
356 ASSERT_TRUE(frame->EncodedImage().ColorSpace());
357 EXPECT_EQ(*frame->EncodedImage().ColorSpace(), kColorSpace);
358 }));
359 rtp_video_stream_receiver_->OnRtpPacket(key_frame_packet2);
360 // Resend the first key frame packet to simulate padding for example.
361 rtp_video_stream_receiver_->OnRtpPacket(key_frame_packet1);
362
363 mock_on_complete_frame_callback_.ClearExpectedBitstream();
364 mock_on_complete_frame_callback_.AppendExpectedBitstream(
365 kDeltaFramePayload.data(), kDeltaFramePayload.size());
366
367 // Expect delta frame to have color space set even though color space not
368 // included in the RTP packet.
369 EXPECT_FALSE(delta_frame_packet.GetExtension<ColorSpaceExtension>());
370 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_))
371 .WillOnce(Invoke([kColorSpace](video_coding::EncodedFrame* frame) {
372 ASSERT_TRUE(frame->EncodedImage().ColorSpace());
373 EXPECT_EQ(*frame->EncodedImage().ColorSpace(), kColorSpace);
374 }));
375 rtp_video_stream_receiver_->OnRtpPacket(delta_frame_packet);
376}
377
378TEST_F(RtpVideoStreamReceiver2Test, GenericKeyFrame) {
379 RtpPacketReceived rtp_packet;
380 rtc::CopyOnWriteBuffer data({1, 2, 3, 4});
381 rtp_packet.SetPayloadType(kPayloadType);
382 rtp_packet.SetSequenceNumber(1);
383 RTPVideoHeader video_header =
384 GetGenericVideoHeader(VideoFrameType::kVideoFrameKey);
385 mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(),
386 data.size());
387 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_));
388 rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
389 video_header);
390}
391
392TEST_F(RtpVideoStreamReceiver2Test, PacketInfoIsPropagatedIntoVideoFrames) {
393 constexpr uint64_t kAbsoluteCaptureTimestamp = 12;
394 constexpr int kId0 = 1;
395
396 RtpHeaderExtensionMap extension_map;
397 extension_map.Register<AbsoluteCaptureTimeExtension>(kId0);
398 RtpPacketReceived rtp_packet(&extension_map);
399 rtp_packet.SetPayloadType(kPayloadType);
400 rtc::CopyOnWriteBuffer data({1, 2, 3, 4});
401 rtp_packet.SetSequenceNumber(1);
402 rtp_packet.SetTimestamp(1);
403 rtp_packet.SetSsrc(kSsrc);
404 rtp_packet.SetExtension<AbsoluteCaptureTimeExtension>(
405 AbsoluteCaptureTime{kAbsoluteCaptureTimestamp,
406 /*estimated_capture_clock_offset=*/absl::nullopt});
407
408 RTPVideoHeader video_header =
409 GetGenericVideoHeader(VideoFrameType::kVideoFrameKey);
410 mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(),
411 data.size());
412 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_))
413 .WillOnce(Invoke(
414 [kAbsoluteCaptureTimestamp](video_coding::EncodedFrame* frame) {
415 EXPECT_THAT(GetAbsoluteCaptureTimestamps(frame),
416 ElementsAre(kAbsoluteCaptureTimestamp));
417 }));
418 rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
419 video_header);
420}
421
422TEST_F(RtpVideoStreamReceiver2Test,
423 MissingAbsoluteCaptureTimeIsFilledWithExtrapolatedValue) {
424 constexpr uint64_t kAbsoluteCaptureTimestamp = 12;
425 constexpr int kId0 = 1;
426
427 RtpHeaderExtensionMap extension_map;
428 extension_map.Register<AbsoluteCaptureTimeExtension>(kId0);
429 RtpPacketReceived rtp_packet(&extension_map);
430 rtp_packet.SetPayloadType(kPayloadType);
431
432 rtc::CopyOnWriteBuffer data({1, 2, 3, 4});
433 uint16_t sequence_number = 1;
434 uint32_t rtp_timestamp = 1;
435 rtp_packet.SetSequenceNumber(sequence_number);
436 rtp_packet.SetTimestamp(rtp_timestamp);
437 rtp_packet.SetSsrc(kSsrc);
438 rtp_packet.SetExtension<AbsoluteCaptureTimeExtension>(
439 AbsoluteCaptureTime{kAbsoluteCaptureTimestamp,
440 /*estimated_capture_clock_offset=*/absl::nullopt});
441
442 RTPVideoHeader video_header =
443 GetGenericVideoHeader(VideoFrameType::kVideoFrameKey);
444 mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(),
445 data.size());
446 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_));
447 rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
448 video_header);
449
450 // Rtp packet without absolute capture time.
451 rtp_packet = RtpPacketReceived(&extension_map);
452 rtp_packet.SetPayloadType(kPayloadType);
453 rtp_packet.SetSequenceNumber(++sequence_number);
454 rtp_packet.SetTimestamp(++rtp_timestamp);
455 rtp_packet.SetSsrc(kSsrc);
456
457 // There is no absolute capture time in the second packet.
458 // Expect rtp video stream receiver to extrapolate it for the resulting video
459 // frame using absolute capture time from the previous packet.
460 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_))
461 .WillOnce(Invoke([](video_coding::EncodedFrame* frame) {
462 EXPECT_THAT(GetAbsoluteCaptureTimestamps(frame), SizeIs(1));
463 }));
464 rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
465 video_header);
466}
467
468TEST_F(RtpVideoStreamReceiver2Test,
469 NoInfiniteRecursionOnEncapsulatedRedPacket) {
470 const std::vector<uint8_t> data({
471 0x80, // RTP version.
472 kRedPayloadType, // Payload type.
473 0, 0, 0, 0, 0, 0, // Don't care.
474 0, 0, 0x4, 0x57, // SSRC
475 kRedPayloadType, // RED header.
476 0, 0, 0, 0, 0 // Don't care.
477 });
478 RtpPacketReceived packet;
479 EXPECT_TRUE(packet.Parse(data.data(), data.size()));
480 rtp_video_stream_receiver_->StartReceive();
481 rtp_video_stream_receiver_->OnRtpPacket(packet);
482}
483
484TEST_F(RtpVideoStreamReceiver2Test,
485 DropsPacketWithRedPayloadTypeAndEmptyPayload) {
486 const uint8_t kRedPayloadType = 125;
487 config_.rtp.red_payload_type = kRedPayloadType;
488 SetUp(); // re-create rtp_video_stream_receiver with red payload type.
489 // clang-format off
490 const uint8_t data[] = {
491 0x80, // RTP version.
492 kRedPayloadType, // Payload type.
493 0, 0, 0, 0, 0, 0, // Don't care.
494 0, 0, 0x4, 0x57, // SSRC
495 // Empty rtp payload.
496 };
497 // clang-format on
498 RtpPacketReceived packet;
499 // Manually convert to CopyOnWriteBuffer to be sure capacity == size
500 // and asan bot can catch read buffer overflow.
501 EXPECT_TRUE(packet.Parse(rtc::CopyOnWriteBuffer(data)));
502 rtp_video_stream_receiver_->StartReceive();
503 rtp_video_stream_receiver_->OnRtpPacket(packet);
504 // Expect asan doesn't find anything.
505}
506
507TEST_F(RtpVideoStreamReceiver2Test, GenericKeyFrameBitstreamError) {
508 RtpPacketReceived rtp_packet;
509 rtp_packet.SetPayloadType(kPayloadType);
510 rtc::CopyOnWriteBuffer data({1, 2, 3, 4});
511 rtp_packet.SetSequenceNumber(1);
512 RTPVideoHeader video_header =
513 GetGenericVideoHeader(VideoFrameType::kVideoFrameKey);
514 constexpr uint8_t expected_bitsteam[] = {1, 2, 3, 0xff};
515 mock_on_complete_frame_callback_.AppendExpectedBitstream(
516 expected_bitsteam, sizeof(expected_bitsteam));
517 EXPECT_CALL(mock_on_complete_frame_callback_,
518 DoOnCompleteFrameFailBitstream(_));
519 rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
520 video_header);
521}
522
523class RtpVideoStreamReceiver2TestH264
524 : public RtpVideoStreamReceiver2Test,
525 public ::testing::WithParamInterface<std::string> {
526 protected:
527 RtpVideoStreamReceiver2TestH264() : RtpVideoStreamReceiver2Test(GetParam()) {}
528};
529
530INSTANTIATE_TEST_SUITE_P(SpsPpsIdrIsKeyframe,
531 RtpVideoStreamReceiver2TestH264,
532 Values("", "WebRTC-SpsPpsIdrIsH264Keyframe/Enabled/"));
533
Andrew Johnsonf288f5b2020-09-19 05:35:59534TEST_P(RtpVideoStreamReceiver2TestH264, InBandSpsPps) {
Tommid3807da2020-05-22 15:36:36535 rtc::CopyOnWriteBuffer sps_data;
536 RtpPacketReceived rtp_packet;
537 RTPVideoHeader sps_video_header = GetDefaultH264VideoHeader();
538 AddSps(&sps_video_header, 0, &sps_data);
539 rtp_packet.SetSequenceNumber(0);
540 rtp_packet.SetPayloadType(kPayloadType);
541 sps_video_header.is_first_packet_in_frame = true;
542 sps_video_header.frame_type = VideoFrameType::kEmptyFrame;
543 mock_on_complete_frame_callback_.AppendExpectedBitstream(
544 kH264StartCode, sizeof(kH264StartCode));
545 mock_on_complete_frame_callback_.AppendExpectedBitstream(sps_data.data(),
546 sps_data.size());
547 rtp_video_stream_receiver_->OnReceivedPayloadData(sps_data, rtp_packet,
548 sps_video_header);
549
550 rtc::CopyOnWriteBuffer pps_data;
551 RTPVideoHeader pps_video_header = GetDefaultH264VideoHeader();
552 AddPps(&pps_video_header, 0, 1, &pps_data);
553 rtp_packet.SetSequenceNumber(1);
554 pps_video_header.is_first_packet_in_frame = true;
555 pps_video_header.frame_type = VideoFrameType::kEmptyFrame;
556 mock_on_complete_frame_callback_.AppendExpectedBitstream(
557 kH264StartCode, sizeof(kH264StartCode));
558 mock_on_complete_frame_callback_.AppendExpectedBitstream(pps_data.data(),
559 pps_data.size());
560 rtp_video_stream_receiver_->OnReceivedPayloadData(pps_data, rtp_packet,
561 pps_video_header);
562
563 rtc::CopyOnWriteBuffer idr_data;
564 RTPVideoHeader idr_video_header = GetDefaultH264VideoHeader();
565 AddIdr(&idr_video_header, 1);
566 rtp_packet.SetSequenceNumber(2);
567 idr_video_header.is_first_packet_in_frame = true;
568 idr_video_header.is_last_packet_in_frame = true;
569 idr_video_header.frame_type = VideoFrameType::kVideoFrameKey;
570 const uint8_t idr[] = {0x65, 1, 2, 3};
571 idr_data.AppendData(idr);
572 mock_on_complete_frame_callback_.AppendExpectedBitstream(
573 kH264StartCode, sizeof(kH264StartCode));
574 mock_on_complete_frame_callback_.AppendExpectedBitstream(idr_data.data(),
575 idr_data.size());
576 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_));
577 rtp_video_stream_receiver_->OnReceivedPayloadData(idr_data, rtp_packet,
578 idr_video_header);
579}
580
581TEST_P(RtpVideoStreamReceiver2TestH264, OutOfBandFmtpSpsPps) {
582 constexpr int kPayloadType = 99;
583 VideoCodec codec;
Tommid3807da2020-05-22 15:36:36584 std::map<std::string, std::string> codec_params;
585 // Example parameter sets from https://tools.ietf.org/html/rfc3984#section-8.2
586 // .
587 codec_params.insert(
588 {cricket::kH264FmtpSpropParameterSets, "Z0IACpZTBYmI,aMljiA=="});
Niels Möller5401bad2020-08-11 10:17:42589 rtp_video_stream_receiver_->AddReceiveCodec(kPayloadType, codec, codec_params,
Tommid3807da2020-05-22 15:36:36590 /*raw_payload=*/false);
591 const uint8_t binary_sps[] = {0x67, 0x42, 0x00, 0x0a, 0x96,
592 0x53, 0x05, 0x89, 0x88};
593 mock_on_complete_frame_callback_.AppendExpectedBitstream(
594 kH264StartCode, sizeof(kH264StartCode));
595 mock_on_complete_frame_callback_.AppendExpectedBitstream(binary_sps,
596 sizeof(binary_sps));
597 const uint8_t binary_pps[] = {0x68, 0xc9, 0x63, 0x88};
598 mock_on_complete_frame_callback_.AppendExpectedBitstream(
599 kH264StartCode, sizeof(kH264StartCode));
600 mock_on_complete_frame_callback_.AppendExpectedBitstream(binary_pps,
601 sizeof(binary_pps));
602
603 RtpPacketReceived rtp_packet;
604 RTPVideoHeader video_header = GetDefaultH264VideoHeader();
605 AddIdr(&video_header, 0);
606 rtp_packet.SetPayloadType(kPayloadType);
607 rtp_packet.SetSequenceNumber(2);
608 video_header.is_first_packet_in_frame = true;
609 video_header.is_last_packet_in_frame = true;
610 video_header.codec = kVideoCodecH264;
611 video_header.frame_type = VideoFrameType::kVideoFrameKey;
612 rtc::CopyOnWriteBuffer data({1, 2, 3});
613 mock_on_complete_frame_callback_.AppendExpectedBitstream(
614 kH264StartCode, sizeof(kH264StartCode));
615 mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(),
616 data.size());
617 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_));
618 rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
619 video_header);
620}
621
Andrew Johnsonf288f5b2020-09-19 05:35:59622TEST_P(RtpVideoStreamReceiver2TestH264, ForceSpsPpsIdrIsKeyframe) {
623 constexpr int kPayloadType = 99;
624 VideoCodec codec;
625 std::map<std::string, std::string> codec_params;
626 if (GetParam() ==
627 "") { // Forcing can be done either with field trial or codec_params.
628 codec_params.insert({cricket::kH264FmtpSpsPpsIdrInKeyframe, ""});
629 }
630 rtp_video_stream_receiver_->AddReceiveCodec(kPayloadType, codec, codec_params,
631 /*raw_payload=*/false);
632 rtc::CopyOnWriteBuffer sps_data;
633 RtpPacketReceived rtp_packet;
634 RTPVideoHeader sps_video_header = GetDefaultH264VideoHeader();
635 AddSps(&sps_video_header, 0, &sps_data);
636 rtp_packet.SetSequenceNumber(0);
637 rtp_packet.SetPayloadType(kPayloadType);
638 sps_video_header.is_first_packet_in_frame = true;
639 sps_video_header.frame_type = VideoFrameType::kEmptyFrame;
640 mock_on_complete_frame_callback_.AppendExpectedBitstream(
641 kH264StartCode, sizeof(kH264StartCode));
642 mock_on_complete_frame_callback_.AppendExpectedBitstream(sps_data.data(),
643 sps_data.size());
644 rtp_video_stream_receiver_->OnReceivedPayloadData(sps_data, rtp_packet,
645 sps_video_header);
646
647 rtc::CopyOnWriteBuffer pps_data;
648 RTPVideoHeader pps_video_header = GetDefaultH264VideoHeader();
649 AddPps(&pps_video_header, 0, 1, &pps_data);
650 rtp_packet.SetSequenceNumber(1);
651 pps_video_header.is_first_packet_in_frame = true;
652 pps_video_header.frame_type = VideoFrameType::kEmptyFrame;
653 mock_on_complete_frame_callback_.AppendExpectedBitstream(
654 kH264StartCode, sizeof(kH264StartCode));
655 mock_on_complete_frame_callback_.AppendExpectedBitstream(pps_data.data(),
656 pps_data.size());
657 rtp_video_stream_receiver_->OnReceivedPayloadData(pps_data, rtp_packet,
658 pps_video_header);
659
660 rtc::CopyOnWriteBuffer idr_data;
661 RTPVideoHeader idr_video_header = GetDefaultH264VideoHeader();
662 AddIdr(&idr_video_header, 1);
663 rtp_packet.SetSequenceNumber(2);
664 idr_video_header.is_first_packet_in_frame = true;
665 idr_video_header.is_last_packet_in_frame = true;
666 idr_video_header.frame_type = VideoFrameType::kVideoFrameKey;
667 const uint8_t idr[] = {0x65, 1, 2, 3};
668 idr_data.AppendData(idr);
669 mock_on_complete_frame_callback_.AppendExpectedBitstream(
670 kH264StartCode, sizeof(kH264StartCode));
671 mock_on_complete_frame_callback_.AppendExpectedBitstream(idr_data.data(),
672 idr_data.size());
673 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame)
674 .WillOnce([&](video_coding::EncodedFrame* frame) {
675 EXPECT_TRUE(frame->is_keyframe());
676 });
677 rtp_video_stream_receiver_->OnReceivedPayloadData(idr_data, rtp_packet,
678 idr_video_header);
679 mock_on_complete_frame_callback_.ClearExpectedBitstream();
680 mock_on_complete_frame_callback_.AppendExpectedBitstream(
681 kH264StartCode, sizeof(kH264StartCode));
682 mock_on_complete_frame_callback_.AppendExpectedBitstream(idr_data.data(),
683 idr_data.size());
684 rtp_packet.SetSequenceNumber(3);
685 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame)
686 .WillOnce([&](video_coding::EncodedFrame* frame) {
687 EXPECT_FALSE(frame->is_keyframe());
688 });
689 rtp_video_stream_receiver_->OnReceivedPayloadData(idr_data, rtp_packet,
690 idr_video_header);
691}
692
Tommid3807da2020-05-22 15:36:36693TEST_F(RtpVideoStreamReceiver2Test, PaddingInMediaStream) {
694 RtpPacketReceived rtp_packet;
695 RTPVideoHeader video_header = GetDefaultH264VideoHeader();
696 rtc::CopyOnWriteBuffer data({1, 2, 3});
697 rtp_packet.SetPayloadType(kPayloadType);
698 rtp_packet.SetSequenceNumber(2);
699 video_header.is_first_packet_in_frame = true;
700 video_header.is_last_packet_in_frame = true;
701 video_header.codec = kVideoCodecGeneric;
702 video_header.frame_type = VideoFrameType::kVideoFrameKey;
703 mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(),
704 data.size());
705
706 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_));
707 rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
708 video_header);
709
710 rtp_packet.SetSequenceNumber(3);
711 rtp_video_stream_receiver_->OnReceivedPayloadData({}, rtp_packet,
712 video_header);
713
714 rtp_packet.SetSequenceNumber(4);
715 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_));
716 video_header.frame_type = VideoFrameType::kVideoFrameDelta;
717 rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
718 video_header);
719
720 rtp_packet.SetSequenceNumber(6);
721 rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
722 video_header);
723
724 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_));
725 rtp_packet.SetSequenceNumber(5);
726 rtp_video_stream_receiver_->OnReceivedPayloadData({}, rtp_packet,
727 video_header);
728}
729
730TEST_F(RtpVideoStreamReceiver2Test, RequestKeyframeIfFirstFrameIsDelta) {
731 RtpPacketReceived rtp_packet;
732 rtp_packet.SetPayloadType(kPayloadType);
733 rtc::CopyOnWriteBuffer data({1, 2, 3, 4});
734 rtp_packet.SetSequenceNumber(1);
735 RTPVideoHeader video_header =
736 GetGenericVideoHeader(VideoFrameType::kVideoFrameDelta);
737 EXPECT_CALL(mock_key_frame_request_sender_, RequestKeyFrame());
738 rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
739 video_header);
740}
741
742TEST_F(RtpVideoStreamReceiver2Test, RequestKeyframeWhenPacketBufferGetsFull) {
743 constexpr int kPacketBufferMaxSize = 2048;
744
745 RtpPacketReceived rtp_packet;
746 rtp_packet.SetPayloadType(kPayloadType);
747 rtc::CopyOnWriteBuffer data({1, 2, 3, 4});
748 RTPVideoHeader video_header =
749 GetGenericVideoHeader(VideoFrameType::kVideoFrameDelta);
750 // Incomplete frames so that the packet buffer is filling up.
751 video_header.is_last_packet_in_frame = false;
752 uint16_t start_sequence_number = 1234;
753 rtp_packet.SetSequenceNumber(start_sequence_number);
754 while (rtp_packet.SequenceNumber() - start_sequence_number <
755 kPacketBufferMaxSize) {
756 rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
757 video_header);
758 rtp_packet.SetSequenceNumber(rtp_packet.SequenceNumber() + 2);
759 }
760
761 EXPECT_CALL(mock_key_frame_request_sender_, RequestKeyFrame());
762 rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
763 video_header);
764}
765
Tomas Gunnarsson8408c992021-02-14 13:19:12766TEST_F(RtpVideoStreamReceiver2Test, SinkGetsRtpNotifications) {
Tommid3807da2020-05-22 15:36:36767 rtp_video_stream_receiver_->StartReceive();
768
Tomas Gunnarsson8408c992021-02-14 13:19:12769 MockRtpPacketSink test_sink;
770 test_packet_sink_ = &test_sink;
Tommid3807da2020-05-22 15:36:36771
772 auto rtp_packet = CreateRtpPacketReceived();
Tomas Gunnarsson8408c992021-02-14 13:19:12773 EXPECT_CALL(test_sink, OnRtpPacket(SamePacketAs(*rtp_packet)));
Tommid3807da2020-05-22 15:36:36774
775 rtp_video_stream_receiver_->OnRtpPacket(*rtp_packet);
776
777 // Test tear-down.
778 rtp_video_stream_receiver_->StopReceive();
Tomas Gunnarsson8408c992021-02-14 13:19:12779 test_packet_sink_ = nullptr;
Tommid3807da2020-05-22 15:36:36780}
781
Tomas Gunnarsson8408c992021-02-14 13:19:12782TEST_F(RtpVideoStreamReceiver2Test, NonStartedStreamGetsNoRtpCallbacks) {
Tommid3807da2020-05-22 15:36:36783 // Explicitly showing that the stream is not in the |started| state,
784 // regardless of whether streams start out |started| or |stopped|.
785 rtp_video_stream_receiver_->StopReceive();
786
Tomas Gunnarsson8408c992021-02-14 13:19:12787 MockRtpPacketSink test_sink;
788 test_packet_sink_ = &test_sink;
Tommid3807da2020-05-22 15:36:36789
790 auto rtp_packet = CreateRtpPacketReceived();
Tomas Gunnarsson8408c992021-02-14 13:19:12791 EXPECT_CALL(test_sink, OnRtpPacket(_)).Times(0);
Tommid3807da2020-05-22 15:36:36792
793 rtp_video_stream_receiver_->OnRtpPacket(*rtp_packet);
794
Tomas Gunnarsson8408c992021-02-14 13:19:12795 test_packet_sink_ = nullptr;
Tommid3807da2020-05-22 15:36:36796}
797
798TEST_F(RtpVideoStreamReceiver2Test, ParseGenericDescriptorOnePacket) {
799 const std::vector<uint8_t> data = {0, 1, 2, 3, 4};
800 const int kSpatialIndex = 1;
801
802 rtp_video_stream_receiver_->StartReceive();
803
804 RtpHeaderExtensionMap extension_map;
805 extension_map.Register<RtpGenericFrameDescriptorExtension00>(5);
806 RtpPacketReceived rtp_packet(&extension_map);
807 rtp_packet.SetPayloadType(kPayloadType);
808
809 RtpGenericFrameDescriptor generic_descriptor;
810 generic_descriptor.SetFirstPacketInSubFrame(true);
811 generic_descriptor.SetLastPacketInSubFrame(true);
812 generic_descriptor.SetFrameId(100);
813 generic_descriptor.SetSpatialLayersBitmask(1 << kSpatialIndex);
814 generic_descriptor.AddFrameDependencyDiff(90);
815 generic_descriptor.AddFrameDependencyDiff(80);
816 ASSERT_TRUE(rtp_packet.SetExtension<RtpGenericFrameDescriptorExtension00>(
817 generic_descriptor));
818
819 uint8_t* payload = rtp_packet.SetPayloadSize(data.size());
820 memcpy(payload, data.data(), data.size());
821 // The first byte is the header, so we ignore the first byte of |data|.
822 mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data() + 1,
823 data.size() - 1);
824
825 rtp_packet.SetMarker(true);
826 rtp_packet.SetPayloadType(kPayloadType);
827 rtp_packet.SetSequenceNumber(1);
828
829 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame)
830 .WillOnce(Invoke([kSpatialIndex](video_coding::EncodedFrame* frame) {
831 EXPECT_EQ(frame->num_references, 2U);
philipel9aa9b8d2021-02-15 12:31:29832 EXPECT_EQ(frame->references[0], frame->Id() - 90);
833 EXPECT_EQ(frame->references[1], frame->Id() - 80);
philipela65d7852020-11-20 16:49:24834 EXPECT_EQ(frame->SpatialIndex(), kSpatialIndex);
Tommid3807da2020-05-22 15:36:36835 EXPECT_THAT(frame->PacketInfos(), SizeIs(1));
836 }));
837
838 rtp_video_stream_receiver_->OnRtpPacket(rtp_packet);
839}
840
841TEST_F(RtpVideoStreamReceiver2Test, ParseGenericDescriptorTwoPackets) {
842 const std::vector<uint8_t> data = {0, 1, 2, 3, 4};
843 const int kSpatialIndex = 1;
844
845 rtp_video_stream_receiver_->StartReceive();
846
847 RtpHeaderExtensionMap extension_map;
848 extension_map.Register<RtpGenericFrameDescriptorExtension00>(5);
849 RtpPacketReceived first_packet(&extension_map);
850
851 RtpGenericFrameDescriptor first_packet_descriptor;
852 first_packet_descriptor.SetFirstPacketInSubFrame(true);
853 first_packet_descriptor.SetLastPacketInSubFrame(false);
854 first_packet_descriptor.SetFrameId(100);
855 first_packet_descriptor.SetSpatialLayersBitmask(1 << kSpatialIndex);
856 first_packet_descriptor.SetResolution(480, 360);
857 ASSERT_TRUE(first_packet.SetExtension<RtpGenericFrameDescriptorExtension00>(
858 first_packet_descriptor));
859
860 uint8_t* first_packet_payload = first_packet.SetPayloadSize(data.size());
861 memcpy(first_packet_payload, data.data(), data.size());
862 // The first byte is the header, so we ignore the first byte of |data|.
863 mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data() + 1,
864 data.size() - 1);
865
866 first_packet.SetPayloadType(kPayloadType);
867 first_packet.SetSequenceNumber(1);
868 rtp_video_stream_receiver_->OnRtpPacket(first_packet);
869
870 RtpPacketReceived second_packet(&extension_map);
871 RtpGenericFrameDescriptor second_packet_descriptor;
872 second_packet_descriptor.SetFirstPacketInSubFrame(false);
873 second_packet_descriptor.SetLastPacketInSubFrame(true);
874 ASSERT_TRUE(second_packet.SetExtension<RtpGenericFrameDescriptorExtension00>(
875 second_packet_descriptor));
876
877 second_packet.SetMarker(true);
878 second_packet.SetPayloadType(kPayloadType);
879 second_packet.SetSequenceNumber(2);
880
881 uint8_t* second_packet_payload = second_packet.SetPayloadSize(data.size());
882 memcpy(second_packet_payload, data.data(), data.size());
883 // The first byte is the header, so we ignore the first byte of |data|.
884 mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data() + 1,
885 data.size() - 1);
886
887 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame)
888 .WillOnce(Invoke([kSpatialIndex](video_coding::EncodedFrame* frame) {
889 EXPECT_EQ(frame->num_references, 0U);
philipela65d7852020-11-20 16:49:24890 EXPECT_EQ(frame->SpatialIndex(), kSpatialIndex);
Tommid3807da2020-05-22 15:36:36891 EXPECT_EQ(frame->EncodedImage()._encodedWidth, 480u);
892 EXPECT_EQ(frame->EncodedImage()._encodedHeight, 360u);
893 EXPECT_THAT(frame->PacketInfos(), SizeIs(2));
894 }));
895
896 rtp_video_stream_receiver_->OnRtpPacket(second_packet);
897}
898
899TEST_F(RtpVideoStreamReceiver2Test, ParseGenericDescriptorRawPayload) {
900 const std::vector<uint8_t> data = {0, 1, 2, 3, 4};
901 const int kRawPayloadType = 123;
902
903 VideoCodec codec;
Niels Möller5401bad2020-08-11 10:17:42904 rtp_video_stream_receiver_->AddReceiveCodec(kRawPayloadType, codec, {},
905 /*raw_payload=*/true);
Tommid3807da2020-05-22 15:36:36906 rtp_video_stream_receiver_->StartReceive();
907
908 RtpHeaderExtensionMap extension_map;
909 extension_map.Register<RtpGenericFrameDescriptorExtension00>(5);
910 RtpPacketReceived rtp_packet(&extension_map);
911
912 RtpGenericFrameDescriptor generic_descriptor;
913 generic_descriptor.SetFirstPacketInSubFrame(true);
914 generic_descriptor.SetLastPacketInSubFrame(true);
915 ASSERT_TRUE(rtp_packet.SetExtension<RtpGenericFrameDescriptorExtension00>(
916 generic_descriptor));
917
918 uint8_t* payload = rtp_packet.SetPayloadSize(data.size());
919 memcpy(payload, data.data(), data.size());
920 mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(),
921 data.size());
922
923 rtp_packet.SetMarker(true);
924 rtp_packet.SetPayloadType(kRawPayloadType);
925 rtp_packet.SetSequenceNumber(1);
926
927 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame);
928 rtp_video_stream_receiver_->OnRtpPacket(rtp_packet);
929}
930
931TEST_F(RtpVideoStreamReceiver2Test, UnwrapsFrameId) {
932 const std::vector<uint8_t> data = {0, 1, 2, 3, 4};
933 const int kPayloadType = 123;
934
935 VideoCodec codec;
Niels Möller5401bad2020-08-11 10:17:42936 rtp_video_stream_receiver_->AddReceiveCodec(kPayloadType, codec, {},
937 /*raw_payload=*/true);
Tommid3807da2020-05-22 15:36:36938 rtp_video_stream_receiver_->StartReceive();
939 RtpHeaderExtensionMap extension_map;
940 extension_map.Register<RtpGenericFrameDescriptorExtension00>(5);
941
942 uint16_t rtp_sequence_number = 1;
943 auto inject_packet = [&](uint16_t wrapped_frame_id) {
944 RtpPacketReceived rtp_packet(&extension_map);
945
946 RtpGenericFrameDescriptor generic_descriptor;
947 generic_descriptor.SetFirstPacketInSubFrame(true);
948 generic_descriptor.SetLastPacketInSubFrame(true);
949 generic_descriptor.SetFrameId(wrapped_frame_id);
950 ASSERT_TRUE(rtp_packet.SetExtension<RtpGenericFrameDescriptorExtension00>(
951 generic_descriptor));
952
953 uint8_t* payload = rtp_packet.SetPayloadSize(data.size());
954 ASSERT_TRUE(payload);
955 memcpy(payload, data.data(), data.size());
956 mock_on_complete_frame_callback_.ClearExpectedBitstream();
957 mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(),
958 data.size());
959 rtp_packet.SetMarker(true);
960 rtp_packet.SetPayloadType(kPayloadType);
961 rtp_packet.SetSequenceNumber(++rtp_sequence_number);
962 rtp_video_stream_receiver_->OnRtpPacket(rtp_packet);
963 };
964
965 int64_t first_picture_id;
966 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame)
967 .WillOnce([&](video_coding::EncodedFrame* frame) {
philipel9aa9b8d2021-02-15 12:31:29968 first_picture_id = frame->Id();
Tommid3807da2020-05-22 15:36:36969 });
970 inject_packet(/*wrapped_frame_id=*/0xffff);
971
972 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame)
973 .WillOnce([&](video_coding::EncodedFrame* frame) {
philipel9aa9b8d2021-02-15 12:31:29974 EXPECT_EQ(frame->Id() - first_picture_id, 3);
Tommid3807da2020-05-22 15:36:36975 });
976 inject_packet(/*wrapped_frame_id=*/0x0002);
977}
978
979class RtpVideoStreamReceiver2DependencyDescriptorTest
980 : public RtpVideoStreamReceiver2Test {
981 public:
982 RtpVideoStreamReceiver2DependencyDescriptorTest() {
983 VideoCodec codec;
Niels Möller5401bad2020-08-11 10:17:42984 rtp_video_stream_receiver_->AddReceiveCodec(payload_type_, codec, {},
Tommid3807da2020-05-22 15:36:36985 /*raw_payload=*/true);
986 extension_map_.Register<RtpDependencyDescriptorExtension>(7);
987 rtp_video_stream_receiver_->StartReceive();
988 }
989
990 // Returns some valid structure for the DependencyDescriptors.
991 // First template of that structure always fit for a key frame.
992 static FrameDependencyStructure CreateStreamStructure() {
993 FrameDependencyStructure stream_structure;
994 stream_structure.num_decode_targets = 1;
995 stream_structure.templates = {
Danil Chapovalov24263f42020-06-11 11:23:45996 FrameDependencyTemplate().Dtis("S"),
997 FrameDependencyTemplate().Dtis("S").FrameDiffs({1}),
Tommid3807da2020-05-22 15:36:36998 };
999 return stream_structure;
1000 }
1001
1002 void InjectPacketWith(const FrameDependencyStructure& stream_structure,
1003 const DependencyDescriptor& dependency_descriptor) {
1004 const std::vector<uint8_t> data = {0, 1, 2, 3, 4};
1005 RtpPacketReceived rtp_packet(&extension_map_);
1006 ASSERT_TRUE(rtp_packet.SetExtension<RtpDependencyDescriptorExtension>(
1007 stream_structure, dependency_descriptor));
1008 uint8_t* payload = rtp_packet.SetPayloadSize(data.size());
1009 ASSERT_TRUE(payload);
1010 memcpy(payload, data.data(), data.size());
1011 mock_on_complete_frame_callback_.ClearExpectedBitstream();
1012 mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(),
1013 data.size());
1014 rtp_packet.SetMarker(true);
1015 rtp_packet.SetPayloadType(payload_type_);
1016 rtp_packet.SetSequenceNumber(++rtp_sequence_number_);
1017 rtp_video_stream_receiver_->OnRtpPacket(rtp_packet);
1018 }
1019
1020 private:
1021 const int payload_type_ = 123;
1022 RtpHeaderExtensionMap extension_map_;
1023 uint16_t rtp_sequence_number_ = 321;
1024};
1025
1026TEST_F(RtpVideoStreamReceiver2DependencyDescriptorTest, UnwrapsFrameId) {
1027 FrameDependencyStructure stream_structure = CreateStreamStructure();
1028
1029 DependencyDescriptor keyframe_descriptor;
1030 keyframe_descriptor.attached_structure =
1031 std::make_unique<FrameDependencyStructure>(stream_structure);
1032 keyframe_descriptor.frame_dependencies = stream_structure.templates[0];
1033 keyframe_descriptor.frame_number = 0xfff0;
1034 // DependencyDescriptor doesn't support reordering delta frame before
1035 // keyframe. Thus feed a key frame first, then test reodered delta frames.
1036 int64_t first_picture_id;
1037 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame)
1038 .WillOnce([&](video_coding::EncodedFrame* frame) {
philipel9aa9b8d2021-02-15 12:31:291039 first_picture_id = frame->Id();
Tommid3807da2020-05-22 15:36:361040 });
1041 InjectPacketWith(stream_structure, keyframe_descriptor);
1042
1043 DependencyDescriptor deltaframe1_descriptor;
1044 deltaframe1_descriptor.frame_dependencies = stream_structure.templates[1];
1045 deltaframe1_descriptor.frame_number = 0xfffe;
1046
1047 DependencyDescriptor deltaframe2_descriptor;
1048 deltaframe1_descriptor.frame_dependencies = stream_structure.templates[1];
1049 deltaframe2_descriptor.frame_number = 0x0002;
1050
1051 // Parser should unwrap frame ids correctly even if packets were reordered by
1052 // the network.
1053 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame)
1054 .WillOnce([&](video_coding::EncodedFrame* frame) {
1055 // 0x0002 - 0xfff0
philipel9aa9b8d2021-02-15 12:31:291056 EXPECT_EQ(frame->Id() - first_picture_id, 18);
Tommid3807da2020-05-22 15:36:361057 })
1058 .WillOnce([&](video_coding::EncodedFrame* frame) {
1059 // 0xfffe - 0xfff0
philipel9aa9b8d2021-02-15 12:31:291060 EXPECT_EQ(frame->Id() - first_picture_id, 14);
Tommid3807da2020-05-22 15:36:361061 });
1062 InjectPacketWith(stream_structure, deltaframe2_descriptor);
1063 InjectPacketWith(stream_structure, deltaframe1_descriptor);
1064}
1065
1066TEST_F(RtpVideoStreamReceiver2DependencyDescriptorTest,
1067 DropsLateDeltaFramePacketWithDependencyDescriptorExtension) {
1068 FrameDependencyStructure stream_structure1 = CreateStreamStructure();
1069 FrameDependencyStructure stream_structure2 = CreateStreamStructure();
1070 // Make sure template ids for these two structures do not collide:
1071 // adjust structure_id (that is also used as template id offset).
1072 stream_structure1.structure_id = 13;
1073 stream_structure2.structure_id =
1074 stream_structure1.structure_id + stream_structure1.templates.size();
1075
1076 DependencyDescriptor keyframe1_descriptor;
1077 keyframe1_descriptor.attached_structure =
1078 std::make_unique<FrameDependencyStructure>(stream_structure1);
1079 keyframe1_descriptor.frame_dependencies = stream_structure1.templates[0];
1080 keyframe1_descriptor.frame_number = 1;
1081 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame);
1082 InjectPacketWith(stream_structure1, keyframe1_descriptor);
1083
1084 // Pass in 2nd key frame with different structure.
1085 DependencyDescriptor keyframe2_descriptor;
1086 keyframe2_descriptor.attached_structure =
1087 std::make_unique<FrameDependencyStructure>(stream_structure2);
1088 keyframe2_descriptor.frame_dependencies = stream_structure2.templates[0];
1089 keyframe2_descriptor.frame_number = 3;
1090 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame);
1091 InjectPacketWith(stream_structure2, keyframe2_descriptor);
1092
1093 // Pass in late delta frame that uses structure of the 1st key frame.
1094 DependencyDescriptor deltaframe_descriptor;
1095 deltaframe_descriptor.frame_dependencies = stream_structure1.templates[0];
1096 deltaframe_descriptor.frame_number = 2;
1097 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame).Times(0);
1098 InjectPacketWith(stream_structure1, deltaframe_descriptor);
1099}
1100
1101TEST_F(RtpVideoStreamReceiver2DependencyDescriptorTest,
1102 DropsLateKeyFramePacketWithDependencyDescriptorExtension) {
1103 FrameDependencyStructure stream_structure1 = CreateStreamStructure();
1104 FrameDependencyStructure stream_structure2 = CreateStreamStructure();
1105 // Make sure template ids for these two structures do not collide:
1106 // adjust structure_id (that is also used as template id offset).
1107 stream_structure1.structure_id = 13;
1108 stream_structure2.structure_id =
1109 stream_structure1.structure_id + stream_structure1.templates.size();
1110
1111 DependencyDescriptor keyframe1_descriptor;
1112 keyframe1_descriptor.attached_structure =
1113 std::make_unique<FrameDependencyStructure>(stream_structure1);
1114 keyframe1_descriptor.frame_dependencies = stream_structure1.templates[0];
1115 keyframe1_descriptor.frame_number = 1;
1116
1117 DependencyDescriptor keyframe2_descriptor;
1118 keyframe2_descriptor.attached_structure =
1119 std::make_unique<FrameDependencyStructure>(stream_structure2);
1120 keyframe2_descriptor.frame_dependencies = stream_structure2.templates[0];
1121 keyframe2_descriptor.frame_number = 3;
1122
1123 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame)
1124 .WillOnce([&](video_coding::EncodedFrame* frame) {
philipel9aa9b8d2021-02-15 12:31:291125 EXPECT_EQ(frame->Id() & 0xFFFF, 3);
Tommid3807da2020-05-22 15:36:361126 });
1127 InjectPacketWith(stream_structure2, keyframe2_descriptor);
1128 InjectPacketWith(stream_structure1, keyframe1_descriptor);
1129
1130 // Pass in delta frame that uses structure of the 2nd key frame. Late key
1131 // frame shouldn't block it.
1132 DependencyDescriptor deltaframe_descriptor;
1133 deltaframe_descriptor.frame_dependencies = stream_structure2.templates[0];
1134 deltaframe_descriptor.frame_number = 4;
1135 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame)
1136 .WillOnce([&](video_coding::EncodedFrame* frame) {
philipel9aa9b8d2021-02-15 12:31:291137 EXPECT_EQ(frame->Id() & 0xFFFF, 4);
Tommid3807da2020-05-22 15:36:361138 });
1139 InjectPacketWith(stream_structure2, deltaframe_descriptor);
1140}
1141
Tommid3807da2020-05-22 15:36:361142TEST_F(RtpVideoStreamReceiver2Test, TransformFrame) {
1143 rtc::scoped_refptr<MockFrameTransformer> mock_frame_transformer =
1144 new rtc::RefCountedObject<testing::NiceMock<MockFrameTransformer>>();
1145 EXPECT_CALL(*mock_frame_transformer,
1146 RegisterTransformedFrameSinkCallback(_, config_.rtp.remote_ssrc));
1147 auto receiver = std::make_unique<RtpVideoStreamReceiver2>(
Tommi63673fe2020-05-27 10:55:381148 TaskQueueBase::Current(), Clock::GetRealTimeClock(), &mock_transport_,
1149 nullptr, nullptr, &config_, rtp_receive_statistics_.get(), nullptr,
1150 nullptr, process_thread_.get(), &mock_nack_sender_, nullptr,
1151 &mock_on_complete_frame_callback_, nullptr, mock_frame_transformer);
Tommid3807da2020-05-22 15:36:361152 VideoCodec video_codec;
Tommid3807da2020-05-22 15:36:361153 video_codec.codecType = kVideoCodecGeneric;
Niels Möller5401bad2020-08-11 10:17:421154 receiver->AddReceiveCodec(kPayloadType, video_codec, {},
1155 /*raw_payload=*/false);
Tommid3807da2020-05-22 15:36:361156
1157 RtpPacketReceived rtp_packet;
1158 rtp_packet.SetPayloadType(kPayloadType);
1159 rtc::CopyOnWriteBuffer data({1, 2, 3, 4});
1160 rtp_packet.SetSequenceNumber(1);
1161 RTPVideoHeader video_header =
1162 GetGenericVideoHeader(VideoFrameType::kVideoFrameKey);
1163 mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(),
1164 data.size());
1165 EXPECT_CALL(*mock_frame_transformer, Transform(_));
1166 receiver->OnReceivedPayloadData(data, rtp_packet, video_header);
1167
1168 EXPECT_CALL(*mock_frame_transformer,
1169 UnregisterTransformedFrameSinkCallback(config_.rtp.remote_ssrc));
1170 receiver = nullptr;
1171}
1172
1173// Test default behavior and when playout delay is overridden by field trial.
Niels Möllerd381eed2020-09-02 13:34:401174const VideoPlayoutDelay kTransmittedPlayoutDelay = {100, 200};
1175const VideoPlayoutDelay kForcedPlayoutDelay = {70, 90};
Tommid3807da2020-05-22 15:36:361176struct PlayoutDelayOptions {
1177 std::string field_trial;
Niels Möllerd381eed2020-09-02 13:34:401178 VideoPlayoutDelay expected_delay;
Tommid3807da2020-05-22 15:36:361179};
1180const PlayoutDelayOptions kDefaultBehavior = {
1181 /*field_trial=*/"", /*expected_delay=*/kTransmittedPlayoutDelay};
1182const PlayoutDelayOptions kOverridePlayoutDelay = {
1183 /*field_trial=*/"WebRTC-ForcePlayoutDelay/min_ms:70,max_ms:90/",
1184 /*expected_delay=*/kForcedPlayoutDelay};
1185
1186class RtpVideoStreamReceiver2TestPlayoutDelay
1187 : public RtpVideoStreamReceiver2Test,
1188 public ::testing::WithParamInterface<PlayoutDelayOptions> {
1189 protected:
1190 RtpVideoStreamReceiver2TestPlayoutDelay()
1191 : RtpVideoStreamReceiver2Test(GetParam().field_trial) {}
1192};
1193
1194INSTANTIATE_TEST_SUITE_P(PlayoutDelay,
1195 RtpVideoStreamReceiver2TestPlayoutDelay,
1196 Values(kDefaultBehavior, kOverridePlayoutDelay));
1197
1198TEST_P(RtpVideoStreamReceiver2TestPlayoutDelay, PlayoutDelay) {
1199 rtc::CopyOnWriteBuffer payload_data({1, 2, 3, 4});
1200 RtpHeaderExtensionMap extension_map;
1201 extension_map.Register<PlayoutDelayLimits>(1);
1202 RtpPacketToSend packet_to_send(&extension_map);
1203 packet_to_send.SetPayloadType(kPayloadType);
1204 packet_to_send.SetSequenceNumber(1);
1205
1206 // Set playout delay on outgoing packet.
1207 EXPECT_TRUE(packet_to_send.SetExtension<PlayoutDelayLimits>(
1208 kTransmittedPlayoutDelay));
1209 uint8_t* payload = packet_to_send.AllocatePayload(payload_data.size());
1210 memcpy(payload, payload_data.data(), payload_data.size());
1211
1212 RtpPacketReceived received_packet(&extension_map);
1213 received_packet.Parse(packet_to_send.data(), packet_to_send.size());
1214
1215 RTPVideoHeader video_header =
1216 GetGenericVideoHeader(VideoFrameType::kVideoFrameKey);
1217 mock_on_complete_frame_callback_.AppendExpectedBitstream(payload_data.data(),
1218 payload_data.size());
1219 // Expect the playout delay of encoded frame to be the same as the transmitted
1220 // playout delay unless it was overridden by a field trial.
1221 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_))
1222 .WillOnce(Invoke([expected_playout_delay = GetParam().expected_delay](
1223 video_coding::EncodedFrame* frame) {
1224 EXPECT_EQ(frame->EncodedImage().playout_delay_, expected_playout_delay);
1225 }));
1226 rtp_video_stream_receiver_->OnReceivedPayloadData(
1227 received_packet.PayloadBuffer(), received_packet, video_header);
1228}
1229
1230} // namespace webrtc