niklase@google.com | 470e71d | 2011-07-07 08:21:25 | [diff] [blame] | 1 | /* |
pwestin@webrtc.org | f6bb77a | 2012-01-24 17:16:59 | [diff] [blame] | 2 | * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 | [diff] [blame] | 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license |
| 5 | * that can be found in the LICENSE file in the root of the source |
| 6 | * tree. An additional intellectual property rights grant can be found |
| 7 | * in the file PATENTS. All contributing project authors may |
| 8 | * be found in the AUTHORS file in the root of the source tree. |
| 9 | */ |
| 10 | |
Mirko Bonadei | 92ea95e | 2017-09-15 04:47:31 | [diff] [blame] | 11 | #include "modules/rtp_rtcp/source/rtp_sender_video.h" |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 | [diff] [blame] | 12 | |
pbos@webrtc.org | a048d7c | 2013-05-29 14:27:38 | [diff] [blame] | 13 | #include <stdlib.h> |
| 14 | #include <string.h> |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 | [diff] [blame] | 15 | |
Erik Språng | 56e611b | 2020-02-06 16:10:08 | [diff] [blame] | 16 | #include <algorithm> |
sprang | a8ae6f2 | 2017-09-04 14:23:56 | [diff] [blame] | 17 | #include <limits> |
kwiberg | 84be511 | 2016-04-27 08:19:58 | [diff] [blame] | 18 | #include <memory> |
Elad Alon | 8b60e8b | 2019-04-08 12:14:05 | [diff] [blame] | 19 | #include <string> |
danilchap | 7411061 | 2016-10-02 17:54:29 | [diff] [blame] | 20 | #include <utility> |
mflodman | fcf54bd | 2015-04-14 19:28:08 | [diff] [blame] | 21 | |
Danil Chapovalov | 670af26 | 2020-01-30 15:28:53 | [diff] [blame] | 22 | #include "absl/memory/memory.h" |
Niels Möller | aa3c1cc | 2018-11-02 09:54:56 | [diff] [blame] | 23 | #include "absl/strings/match.h" |
Steve Anton | 10542f2 | 2019-01-11 17:11:00 | [diff] [blame] | 24 | #include "api/crypto/frame_encryptor_interface.h" |
Danil Chapovalov | 670af26 | 2020-01-30 15:28:53 | [diff] [blame] | 25 | #include "api/transport/rtp/dependency_descriptor.h" |
Niels Möller | 59ab1cf | 2019-02-06 21:48:11 | [diff] [blame] | 26 | #include "modules/remote_bitrate_estimator/test/bwe_test_logging.h" |
Mirko Bonadei | 92ea95e | 2017-09-15 04:47:31 | [diff] [blame] | 27 | #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" |
Minyue Li | 5bb9adc | 2020-01-22 12:02:34 | [diff] [blame] | 28 | #include "modules/rtp_rtcp/source/absolute_capture_time_sender.h" |
Mirko Bonadei | 92ea95e | 2017-09-15 04:47:31 | [diff] [blame] | 29 | #include "modules/rtp_rtcp/source/byte_io.h" |
Danil Chapovalov | 670af26 | 2020-01-30 15:28:53 | [diff] [blame] | 30 | #include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h" |
Danil Chapovalov | 95800f6 | 2020-02-20 14:01:15 | [diff] [blame] | 31 | #include "modules/rtp_rtcp/source/rtp_descriptor_authentication.h" |
Mirta Dvornicic | fe68daa | 2019-05-23 11:21:12 | [diff] [blame] | 32 | #include "modules/rtp_rtcp/source/rtp_format.h" |
philipel | b3e42a4 | 2018-09-13 08:57:14 | [diff] [blame] | 33 | #include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h" |
Mirko Bonadei | 92ea95e | 2017-09-15 04:47:31 | [diff] [blame] | 34 | #include "modules/rtp_rtcp/source/rtp_header_extensions.h" |
| 35 | #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" |
Minyue Li | 5bb9adc | 2020-01-22 12:02:34 | [diff] [blame] | 36 | #include "modules/rtp_rtcp/source/time_util.h" |
Marina Ciocea | 3a087a8 | 2020-03-03 04:39:28 | [diff] [blame] | 37 | #include "modules/rtp_rtcp/source/transformable_encoded_frame.h" |
Mirko Bonadei | 92ea95e | 2017-09-15 04:47:31 | [diff] [blame] | 38 | #include "rtc_base/checks.h" |
| 39 | #include "rtc_base/logging.h" |
Mirko Bonadei | 92ea95e | 2017-09-15 04:47:31 | [diff] [blame] | 40 | #include "rtc_base/trace_event.h" |
ilnik | ed9b9ff | 2017-06-02 14:30:20 | [diff] [blame] | 41 | |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 | [diff] [blame] | 42 | namespace webrtc { |
Sergey Ulanov | ec4f068 | 2016-07-28 22:19:10 | [diff] [blame] | 43 | |
brandtr | 6631e8a | 2016-09-13 10:23:29 | [diff] [blame] | 44 | namespace { |
| 45 | constexpr size_t kRedForFecHeaderLength = 1; |
sprang | a8ae6f2 | 2017-09-04 14:23:56 | [diff] [blame] | 46 | constexpr int64_t kMaxUnretransmittableFrameIntervalMs = 33 * 4; |
danilchap | 7411061 | 2016-10-02 17:54:29 | [diff] [blame] | 47 | |
Anton Sukhanov | 1a13c8f | 2019-07-25 21:36:41 | [diff] [blame] | 48 | // This is experimental field trial to exclude transport sequence number from |
| 49 | // FEC packets and should only be used in conjunction with datagram transport. |
| 50 | // Datagram transport removes transport sequence numbers from RTP packets and |
| 51 | // uses datagram feedback loop to re-generate RTCP feedback packets, but FEC |
| 52 | // contorol packets are calculated before sequence number is removed and as a |
| 53 | // result recovered packets will be corrupt unless we also remove transport |
| 54 | // sequence number during FEC calculation. |
| 55 | // |
Marina Ciocea | e77912b | 2020-02-27 15:16:55 | [diff] [blame] | 56 | // TODO(sukhanov): We need to find a better way to implement FEC with datagram |
| 57 | // transport, probably moving FEC to datagram integration layter. We should |
| 58 | // also remove special field trial once we switch datagram path from |
Anton Sukhanov | 1a13c8f | 2019-07-25 21:36:41 | [diff] [blame] | 59 | // RTCConfiguration flags to field trial and use the same field trial for FEC |
| 60 | // workaround. |
| 61 | const char kExcludeTransportSequenceNumberFromFecFieldTrial[] = |
| 62 | "WebRTC-ExcludeTransportSequenceNumberFromFec"; |
| 63 | |
danilchap | 7411061 | 2016-10-02 17:54:29 | [diff] [blame] | 64 | void BuildRedPayload(const RtpPacketToSend& media_packet, |
| 65 | RtpPacketToSend* red_packet) { |
| 66 | uint8_t* red_payload = red_packet->AllocatePayload( |
| 67 | kRedForFecHeaderLength + media_packet.payload_size()); |
| 68 | RTC_DCHECK(red_payload); |
| 69 | red_payload[0] = media_packet.PayloadType(); |
danilchap | 96c1587 | 2016-11-21 09:35:29 | [diff] [blame] | 70 | |
| 71 | auto media_payload = media_packet.payload(); |
| 72 | memcpy(&red_payload[kRedForFecHeaderLength], media_payload.data(), |
| 73 | media_payload.size()); |
danilchap | 7411061 | 2016-10-02 17:54:29 | [diff] [blame] | 74 | } |
Danil Chapovalov | e4f8b38 | 2018-09-07 15:30:26 | [diff] [blame] | 75 | |
Danil Chapovalov | 51bf200 | 2019-10-11 08:53:27 | [diff] [blame] | 76 | bool MinimizeDescriptor(RTPVideoHeader* video_header) { |
| 77 | if (auto* vp8 = |
| 78 | absl::get_if<RTPVideoHeaderVP8>(&video_header->video_type_header)) { |
Danil Chapovalov | 84ffb35 | 2018-09-25 16:59:09 | [diff] [blame] | 79 | // Set minimum fields the RtpPacketizer is using to create vp8 packets. |
Danil Chapovalov | 51bf200 | 2019-10-11 08:53:27 | [diff] [blame] | 80 | // nonReference is the only field that doesn't require extra space. |
| 81 | bool non_reference = vp8->nonReference; |
| 82 | vp8->InitRTPVideoHeaderVP8(); |
| 83 | vp8->nonReference = non_reference; |
Danil Chapovalov | 84ffb35 | 2018-09-25 16:59:09 | [diff] [blame] | 84 | return true; |
| 85 | } |
| 86 | // TODO(danilchap): Reduce vp9 codec specific descriptor too. |
| 87 | return false; |
| 88 | } |
| 89 | |
Johannes Kron | d0b69a8 | 2018-12-03 13:18:53 | [diff] [blame] | 90 | bool IsBaseLayer(const RTPVideoHeader& video_header) { |
| 91 | switch (video_header.codec) { |
| 92 | case kVideoCodecVP8: { |
| 93 | const auto& vp8 = |
| 94 | absl::get<RTPVideoHeaderVP8>(video_header.video_type_header); |
| 95 | return (vp8.temporalIdx == 0 || vp8.temporalIdx == kNoTemporalIdx); |
| 96 | } |
| 97 | case kVideoCodecVP9: { |
| 98 | const auto& vp9 = |
| 99 | absl::get<RTPVideoHeaderVP9>(video_header.video_type_header); |
| 100 | return (vp9.temporal_idx == 0 || vp9.temporal_idx == kNoTemporalIdx); |
| 101 | } |
| 102 | case kVideoCodecH264: |
| 103 | // TODO(kron): Implement logic for H264 once WebRTC supports temporal |
| 104 | // layers for H264. |
| 105 | break; |
| 106 | default: |
| 107 | break; |
| 108 | } |
| 109 | return true; |
| 110 | } |
| 111 | |
Doudou Kisabaka | 2dec496 | 2019-11-28 13:24:31 | [diff] [blame] | 112 | #if RTC_TRACE_EVENTS_ENABLED |
Niels Möller | 87e2d78 | 2019-03-07 09:18:23 | [diff] [blame] | 113 | const char* FrameTypeToString(VideoFrameType frame_type) { |
Niels Möller | 59ab1cf | 2019-02-06 21:48:11 | [diff] [blame] | 114 | switch (frame_type) { |
Niels Möller | 8f7ce22 | 2019-03-21 14:43:58 | [diff] [blame] | 115 | case VideoFrameType::kEmptyFrame: |
Niels Möller | 59ab1cf | 2019-02-06 21:48:11 | [diff] [blame] | 116 | return "empty"; |
Niels Möller | 8f7ce22 | 2019-03-21 14:43:58 | [diff] [blame] | 117 | case VideoFrameType::kVideoFrameKey: |
Niels Möller | 59ab1cf | 2019-02-06 21:48:11 | [diff] [blame] | 118 | return "video_key"; |
Niels Möller | 8f7ce22 | 2019-03-21 14:43:58 | [diff] [blame] | 119 | case VideoFrameType::kVideoFrameDelta: |
Niels Möller | 59ab1cf | 2019-02-06 21:48:11 | [diff] [blame] | 120 | return "video_delta"; |
| 121 | default: |
| 122 | RTC_NOTREACHED(); |
| 123 | return ""; |
| 124 | } |
| 125 | } |
Doudou Kisabaka | 2dec496 | 2019-11-28 13:24:31 | [diff] [blame] | 126 | #endif |
Niels Möller | 59ab1cf | 2019-02-06 21:48:11 | [diff] [blame] | 127 | |
Erik Språng | 56e611b | 2020-02-06 16:10:08 | [diff] [blame] | 128 | bool IsNoopDelay(const PlayoutDelay& delay) { |
| 129 | return delay.min_ms == -1 && delay.max_ms == -1; |
| 130 | } |
| 131 | |
brandtr | 6631e8a | 2016-09-13 10:23:29 | [diff] [blame] | 132 | } // namespace |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 | [diff] [blame] | 133 | |
Erik Språng | dc34a25 | 2019-10-04 13:17:29 | [diff] [blame] | 134 | RTPSenderVideo::RTPSenderVideo(const Config& config) |
| 135 | : rtp_sender_(config.rtp_sender), |
| 136 | clock_(config.clock), |
Andrei Dumitru | 0987273 | 2019-08-29 19:00:29 | [diff] [blame] | 137 | retransmission_settings_( |
Erik Språng | dc34a25 | 2019-10-04 13:17:29 | [diff] [blame] | 138 | config.enable_retransmit_all_layers |
Andrei Dumitru | 0987273 | 2019-08-29 19:00:29 | [diff] [blame] | 139 | ? kRetransmitAllLayers |
| 140 | : (kRetransmitBaseLayer | kConditionallyRetransmitHigherLayers)), |
brandtr | d804895 | 2016-11-07 10:08:51 | [diff] [blame] | 141 | last_rotation_(kVideoRotation_0), |
Johannes Kron | d0b69a8 | 2018-12-03 13:18:53 | [diff] [blame] | 142 | transmit_color_space_next_frame_(false), |
Erik Språng | 56e611b | 2020-02-06 16:10:08 | [diff] [blame] | 143 | current_playout_delay_{-1, -1}, |
| 144 | playout_delay_pending_(false), |
Erik Språng | dc34a25 | 2019-10-04 13:17:29 | [diff] [blame] | 145 | red_payload_type_(config.red_payload_type), |
Erik Språng | f87536c | 2020-03-05 09:14:04 | [diff] [blame] | 146 | fec_generator_(config.fec_generator), |
Benjamin Wright | 192eeec | 2018-10-18 00:27:25 | [diff] [blame] | 147 | video_bitrate_(1000, RateStatistics::kBpsScale), |
Erik Språng | 482b3ef | 2019-01-08 15:19:11 | [diff] [blame] | 148 | packetization_overhead_bitrate_(1000, RateStatistics::kBpsScale), |
Erik Språng | dc34a25 | 2019-10-04 13:17:29 | [diff] [blame] | 149 | frame_encryptor_(config.frame_encryptor), |
| 150 | require_frame_encryption_(config.require_frame_encryption), |
Mirko Bonadei | 06d3559 | 2020-04-01 11:43:08 | [diff] [blame] | 151 | generic_descriptor_auth_experiment_(!absl::StartsWith( |
| 152 | config.field_trials->Lookup("WebRTC-GenericDescriptorAuth"), |
| 153 | "Disabled")), |
| 154 | exclude_transport_sequence_number_from_fec_experiment_(absl::StartsWith( |
| 155 | config.field_trials->Lookup( |
| 156 | kExcludeTransportSequenceNumberFromFecFieldTrial), |
| 157 | "Enabled")), |
Marina Ciocea | e77912b | 2020-02-27 15:16:55 | [diff] [blame] | 158 | absolute_capture_time_sender_(config.clock), |
Marina Ciocea | 3a087a8 | 2020-03-03 04:39:28 | [diff] [blame] | 159 | frame_transformer_delegate_( |
| 160 | config.frame_transformer |
| 161 | ? new rtc::RefCountedObject< |
| 162 | RTPSenderVideoFrameTransformerDelegate>( |
Mirko Bonadei | 06d3559 | 2020-04-01 11:43:08 | [diff] [blame] | 163 | this, std::move(config.frame_transformer)) |
Marina Ciocea | 3a087a8 | 2020-03-03 04:39:28 | [diff] [blame] | 164 | : nullptr) { |
| 165 | if (frame_transformer_delegate_) |
| 166 | frame_transformer_delegate_->Init(); |
| 167 | } |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 | [diff] [blame] | 168 | |
Marina Ciocea | 3a087a8 | 2020-03-03 04:39:28 | [diff] [blame] | 169 | RTPSenderVideo::~RTPSenderVideo() { |
| 170 | if (frame_transformer_delegate_) |
| 171 | frame_transformer_delegate_->Reset(); |
| 172 | } |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 | [diff] [blame] | 173 | |
Erik Språng | f1e97b9 | 2019-09-26 10:48:47 | [diff] [blame] | 174 | void RTPSenderVideo::LogAndSendToNetwork( |
| 175 | std::vector<std::unique_ptr<RtpPacketToSend>> packets, |
| 176 | size_t unpacketized_payload_size) { |
Erik Språng | 08a9f98 | 2019-09-30 10:48:14 | [diff] [blame] | 177 | int64_t now_ms = clock_->TimeInMilliseconds(); |
Erik Språng | f1e97b9 | 2019-09-26 10:48:47 | [diff] [blame] | 178 | #if BWE_TEST_LOGGING_COMPILE_TIME_ENABLE |
| 179 | for (const auto& packet : packets) { |
Björn Terelius | 31d0f7c | 2020-02-06 15:35:46 | [diff] [blame] | 180 | if (packet->packet_type() == RtpPacketMediaType::kForwardErrorCorrection) { |
Erik Språng | 67ac9e8 | 2019-10-25 13:24:15 | [diff] [blame] | 181 | const uint32_t ssrc = packet->Ssrc(); |
| 182 | BWE_TEST_LOGGING_PLOT_WITH_SSRC(1, "VideoFecBitrate_kbps", now_ms, |
| 183 | FecOverheadRate() / 1000, ssrc); |
| 184 | } |
Erik Språng | f1e97b9 | 2019-09-26 10:48:47 | [diff] [blame] | 185 | } |
Niels Möller | 59ab1cf | 2019-02-06 21:48:11 | [diff] [blame] | 186 | #endif |
Erik Språng | f1e97b9 | 2019-09-26 10:48:47 | [diff] [blame] | 187 | |
| 188 | { |
| 189 | rtc::CritScope cs(&stats_crit_); |
| 190 | size_t packetized_payload_size = 0; |
| 191 | for (const auto& packet : packets) { |
Erik Språng | f87536c | 2020-03-05 09:14:04 | [diff] [blame] | 192 | if (*packet->packet_type() == RtpPacketMediaType::kVideo) { |
| 193 | video_bitrate_.Update(packet->size(), now_ms); |
| 194 | packetized_payload_size += packet->payload_size(); |
Erik Språng | f1e97b9 | 2019-09-26 10:48:47 | [diff] [blame] | 195 | } |
| 196 | } |
Danil Chapovalov | 0682ca9 | 2019-11-28 15:50:02 | [diff] [blame] | 197 | // AV1 packetizer may produce less packetized bytes than unpacketized. |
| 198 | if (packetized_payload_size >= unpacketized_payload_size) { |
| 199 | packetization_overhead_bitrate_.Update( |
| 200 | packetized_payload_size - unpacketized_payload_size, |
| 201 | clock_->TimeInMilliseconds()); |
| 202 | } |
Erik Språng | f1e97b9 | 2019-09-26 10:48:47 | [diff] [blame] | 203 | } |
| 204 | |
Erik Språng | fc78aac | 2019-10-24 10:55:30 | [diff] [blame] | 205 | rtp_sender_->EnqueuePackets(std::move(packets)); |
Niels Möller | 59ab1cf | 2019-02-06 21:48:11 | [diff] [blame] | 206 | } |
| 207 | |
Danil Chapovalov | 0deef72 | 2019-10-14 12:20:20 | [diff] [blame] | 208 | size_t RTPSenderVideo::FecPacketOverhead() const { |
Erik Språng | f87536c | 2020-03-05 09:14:04 | [diff] [blame] | 209 | size_t overhead = fec_generator_ ? fec_generator_->MaxPacketOverhead() : 0u; |
brandtr | f1bb476 | 2016-11-07 11:05:06 | [diff] [blame] | 210 | if (red_enabled()) { |
brandtr | 131bc49 | 2016-11-10 13:01:11 | [diff] [blame] | 211 | // The RED overhead is due to a small header. |
| 212 | overhead += kRedForFecHeaderLength; |
Erik Språng | f87536c | 2020-03-05 09:14:04 | [diff] [blame] | 213 | |
| 214 | // TODO(bugs.webrtc.org/11340): Move this into UlpfecGenerator. |
| 215 | if (fec_generator_ && |
| 216 | fec_generator_->GetFecType() == VideoFecGenerator::FecType::kUlpFec) { |
| 217 | // For ULPFEC, the overhead is the FEC headers plus RED for FEC header |
| 218 | // (see above) plus anything in RTP header beyond the 12 bytes base header |
| 219 | // (CSRC list, extensions...) |
| 220 | // This reason for the header extensions to be included here is that |
| 221 | // from an FEC viewpoint, they are part of the payload to be protected. |
| 222 | // (The base RTP header is already protected by the FEC header.) |
| 223 | overhead += rtp_sender_->RtpHeaderLength() - kRtpHeaderSize; |
| 224 | } |
pbos@webrtc.org | b5e6bfc | 2014-09-12 11:05:55 | [diff] [blame] | 225 | } |
stefan | 8f4c77f | 2016-06-03 07:16:45 | [diff] [blame] | 226 | return overhead; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 | [diff] [blame] | 227 | } |
| 228 | |
brandtr | 1743a19 | 2016-11-07 11:36:05 | [diff] [blame] | 229 | void RTPSenderVideo::SetFecParameters(const FecProtectionParams& delta_params, |
| 230 | const FecProtectionParams& key_params) { |
Erik Språng | f87536c | 2020-03-05 09:14:04 | [diff] [blame] | 231 | if (fec_generator_) { |
| 232 | fec_generator_->SetProtectionParameters(delta_params, key_params); |
brandtr | 9dfff29 | 2016-11-14 13:14:50 | [diff] [blame] | 233 | } |
brandtr | 9dfff29 | 2016-11-14 13:14:50 | [diff] [blame] | 234 | } |
| 235 | |
Danil Chapovalov | 670af26 | 2020-01-30 15:28:53 | [diff] [blame] | 236 | void RTPSenderVideo::SetVideoStructure( |
| 237 | const FrameDependencyStructure* video_structure) { |
Marina Ciocea | 3a087a8 | 2020-03-03 04:39:28 | [diff] [blame] | 238 | if (frame_transformer_delegate_) { |
| 239 | frame_transformer_delegate_->SetVideoStructureUnderLock(video_structure); |
| 240 | return; |
| 241 | } |
| 242 | // Lock is being held by SetVideoStructure() caller. |
| 243 | SetVideoStructureUnderLock(video_structure); |
| 244 | } |
| 245 | |
| 246 | void RTPSenderVideo::SetVideoStructureUnderLock( |
| 247 | const FrameDependencyStructure* video_structure) { |
Danil Chapovalov | 670af26 | 2020-01-30 15:28:53 | [diff] [blame] | 248 | RTC_DCHECK_RUNS_SERIALIZED(&send_checker_); |
| 249 | if (video_structure == nullptr) { |
| 250 | video_structure_ = nullptr; |
| 251 | return; |
| 252 | } |
| 253 | // Simple sanity checks video structure is set up. |
| 254 | RTC_DCHECK_GT(video_structure->num_decode_targets, 0); |
| 255 | RTC_DCHECK_GT(video_structure->templates.size(), 0); |
| 256 | |
| 257 | int structure_id = 0; |
| 258 | if (video_structure_) { |
| 259 | if (*video_structure_ == *video_structure) { |
| 260 | // Same structure (just a new key frame), no update required. |
| 261 | return; |
| 262 | } |
| 263 | // When setting different video structure make sure structure_id is updated |
| 264 | // so that templates from different structures do not collide. |
| 265 | static constexpr int kMaxTemplates = 64; |
| 266 | structure_id = |
| 267 | (video_structure_->structure_id + video_structure_->templates.size()) % |
| 268 | kMaxTemplates; |
| 269 | } |
| 270 | |
| 271 | video_structure_ = |
| 272 | std::make_unique<FrameDependencyStructure>(*video_structure); |
| 273 | video_structure_->structure_id = structure_id; |
| 274 | // TODO(bugs.webrtc.org/10342): Support chains. |
| 275 | video_structure_->num_chains = 0; |
| 276 | } |
| 277 | |
Danil Chapovalov | 5efb02b | 2020-03-13 16:29:12 | [diff] [blame] | 278 | void RTPSenderVideo::AddRtpHeaderExtensions( |
| 279 | const RTPVideoHeader& video_header, |
| 280 | const absl::optional<AbsoluteCaptureTime>& absolute_capture_time, |
| 281 | bool first_packet, |
| 282 | bool last_packet, |
| 283 | RtpPacketToSend* packet) const { |
| 284 | // Send color space when changed or if the frame is a key frame. Keep |
| 285 | // sending color space information until the first base layer frame to |
| 286 | // guarantee that the information is retrieved by the receiver. |
| 287 | bool set_color_space = |
| 288 | video_header.color_space != last_color_space_ || |
| 289 | video_header.frame_type == VideoFrameType::kVideoFrameKey || |
| 290 | transmit_color_space_next_frame_; |
| 291 | // Color space requires two-byte header extensions if HDR metadata is |
| 292 | // included. Therefore, it's best to add this extension first so that the |
| 293 | // other extensions in the same packet are written as two-byte headers at |
| 294 | // once. |
| 295 | if (last_packet && set_color_space && video_header.color_space) |
| 296 | packet->SetExtension<ColorSpaceExtension>(video_header.color_space.value()); |
| 297 | |
| 298 | // According to |
| 299 | // http://www.etsi.org/deliver/etsi_ts/126100_126199/126114/12.07.00_60/ |
| 300 | // ts_126114v120700p.pdf Section 7.4.5: |
| 301 | // The MTSI client shall add the payload bytes as defined in this clause |
| 302 | // onto the last RTP packet in each group of packets which make up a key |
| 303 | // frame (I-frame or IDR frame in H.264 (AVC), or an IRAP picture in H.265 |
| 304 | // (HEVC)). The MTSI client may also add the payload bytes onto the last RTP |
| 305 | // packet in each group of packets which make up another type of frame |
| 306 | // (e.g. a P-Frame) only if the current value is different from the previous |
| 307 | // value sent. |
| 308 | // Set rotation when key frame or when changed (to follow standard). |
| 309 | // Or when different from 0 (to follow current receiver implementation). |
| 310 | bool set_video_rotation = |
| 311 | video_header.frame_type == VideoFrameType::kVideoFrameKey || |
| 312 | video_header.rotation != last_rotation_ || |
| 313 | video_header.rotation != kVideoRotation_0; |
| 314 | if (last_packet && set_video_rotation) |
| 315 | packet->SetExtension<VideoOrientation>(video_header.rotation); |
| 316 | |
| 317 | // Report content type only for key frames. |
| 318 | if (last_packet && |
| 319 | video_header.frame_type == VideoFrameType::kVideoFrameKey && |
| 320 | video_header.content_type != VideoContentType::UNSPECIFIED) |
| 321 | packet->SetExtension<VideoContentTypeExtension>(video_header.content_type); |
| 322 | |
| 323 | if (last_packet && |
| 324 | video_header.video_timing.flags != VideoSendTiming::kInvalid) |
| 325 | packet->SetExtension<VideoTimingExtension>(video_header.video_timing); |
| 326 | |
| 327 | // If transmitted, add to all packets; ack logic depends on this. |
| 328 | if (playout_delay_pending_) { |
| 329 | packet->SetExtension<PlayoutDelayLimits>(current_playout_delay_); |
| 330 | } |
| 331 | |
| 332 | if (first_packet && absolute_capture_time) { |
| 333 | packet->SetExtension<AbsoluteCaptureTimeExtension>(*absolute_capture_time); |
| 334 | } |
| 335 | |
| 336 | if (video_header.codec == kVideoCodecH264 && |
| 337 | video_header.frame_marking.temporal_id != kNoTemporalIdx) { |
| 338 | FrameMarking frame_marking = video_header.frame_marking; |
| 339 | frame_marking.start_of_frame = first_packet; |
| 340 | frame_marking.end_of_frame = last_packet; |
| 341 | packet->SetExtension<FrameMarkingExtension>(frame_marking); |
| 342 | } |
| 343 | |
| 344 | if (video_header.generic) { |
| 345 | bool extension_is_set = false; |
| 346 | if (video_structure_ != nullptr) { |
| 347 | DependencyDescriptor descriptor; |
| 348 | descriptor.first_packet_in_frame = first_packet; |
| 349 | descriptor.last_packet_in_frame = last_packet; |
| 350 | descriptor.frame_number = video_header.generic->frame_id & 0xFFFF; |
| 351 | descriptor.frame_dependencies.spatial_id = |
| 352 | video_header.generic->spatial_index; |
| 353 | descriptor.frame_dependencies.temporal_id = |
| 354 | video_header.generic->temporal_index; |
| 355 | for (int64_t dep : video_header.generic->dependencies) { |
| 356 | descriptor.frame_dependencies.frame_diffs.push_back( |
| 357 | video_header.generic->frame_id - dep); |
| 358 | } |
| 359 | descriptor.frame_dependencies.decode_target_indications = |
| 360 | video_header.generic->decode_target_indications; |
| 361 | RTC_DCHECK_EQ( |
| 362 | descriptor.frame_dependencies.decode_target_indications.size(), |
| 363 | video_structure_->num_decode_targets); |
| 364 | |
| 365 | // To avoid extra structure copy, temporary share ownership of the |
| 366 | // video_structure with the dependency descriptor. |
| 367 | if (video_header.frame_type == VideoFrameType::kVideoFrameKey && |
| 368 | first_packet) { |
| 369 | descriptor.attached_structure = |
| 370 | absl::WrapUnique(video_structure_.get()); |
| 371 | } |
| 372 | extension_is_set = packet->SetExtension<RtpDependencyDescriptorExtension>( |
| 373 | *video_structure_, descriptor); |
| 374 | |
| 375 | // Remove the temporary shared ownership. |
| 376 | descriptor.attached_structure.release(); |
| 377 | } |
| 378 | |
| 379 | // Do not use v0/v1 generic frame descriptor when v2 is stored. |
| 380 | if (!extension_is_set) { |
| 381 | RtpGenericFrameDescriptor generic_descriptor; |
| 382 | generic_descriptor.SetFirstPacketInSubFrame(first_packet); |
| 383 | generic_descriptor.SetLastPacketInSubFrame(last_packet); |
| 384 | generic_descriptor.SetDiscardable(video_header.generic->discardable); |
| 385 | |
| 386 | if (first_packet) { |
| 387 | generic_descriptor.SetFrameId( |
| 388 | static_cast<uint16_t>(video_header.generic->frame_id)); |
| 389 | for (int64_t dep : video_header.generic->dependencies) { |
| 390 | generic_descriptor.AddFrameDependencyDiff( |
| 391 | video_header.generic->frame_id - dep); |
| 392 | } |
| 393 | |
| 394 | uint8_t spatial_bimask = 1 << video_header.generic->spatial_index; |
| 395 | generic_descriptor.SetSpatialLayersBitmask(spatial_bimask); |
| 396 | |
| 397 | generic_descriptor.SetTemporalLayer( |
| 398 | video_header.generic->temporal_index); |
| 399 | |
| 400 | if (video_header.frame_type == VideoFrameType::kVideoFrameKey) { |
| 401 | generic_descriptor.SetResolution(video_header.width, |
| 402 | video_header.height); |
| 403 | } |
| 404 | } |
| 405 | |
Danil Chapovalov | 9d287bf | 2020-04-06 09:34:34 | [diff] [blame^] | 406 | packet->SetExtension<RtpGenericFrameDescriptorExtension00>( |
| 407 | generic_descriptor); |
Danil Chapovalov | 5efb02b | 2020-03-13 16:29:12 | [diff] [blame] | 408 | } |
| 409 | } |
| 410 | } |
| 411 | |
Elad Alon | b64af4b | 2019-06-05 09:39:37 | [diff] [blame] | 412 | bool RTPSenderVideo::SendVideo( |
Danil Chapovalov | 51bf200 | 2019-10-11 08:53:27 | [diff] [blame] | 413 | int payload_type, |
| 414 | absl::optional<VideoCodecType> codec_type, |
| 415 | uint32_t rtp_timestamp, |
| 416 | int64_t capture_time_ms, |
| 417 | rtc::ArrayView<const uint8_t> payload, |
| 418 | const RTPFragmentationHeader* fragmentation, |
| 419 | RTPVideoHeader video_header, |
| 420 | absl::optional<int64_t> expected_retransmission_time_ms) { |
Marina Ciocea | 3a087a8 | 2020-03-03 04:39:28 | [diff] [blame] | 421 | #if RTC_TRACE_EVENTS_ENABLED |
Niels Möller | 59ab1cf | 2019-02-06 21:48:11 | [diff] [blame] | 422 | TRACE_EVENT_ASYNC_STEP1("webrtc", "Video", capture_time_ms, "Send", "type", |
Danil Chapovalov | 51bf200 | 2019-10-11 08:53:27 | [diff] [blame] | 423 | FrameTypeToString(video_header.frame_type)); |
Marina Ciocea | 3a087a8 | 2020-03-03 04:39:28 | [diff] [blame] | 424 | #endif |
Erik Språng | 6cf554e | 2019-10-02 18:55:39 | [diff] [blame] | 425 | RTC_CHECK_RUNS_SERIALIZED(&send_checker_); |
Niels Möller | 59ab1cf | 2019-02-06 21:48:11 | [diff] [blame] | 426 | |
Danil Chapovalov | 51bf200 | 2019-10-11 08:53:27 | [diff] [blame] | 427 | if (video_header.frame_type == VideoFrameType::kEmptyFrame) |
Niels Möller | 59ab1cf | 2019-02-06 21:48:11 | [diff] [blame] | 428 | return true; |
| 429 | |
Danil Chapovalov | 51bf200 | 2019-10-11 08:53:27 | [diff] [blame] | 430 | if (payload.empty()) |
Sergey Ulanov | 525df3f | 2016-08-03 00:46:41 | [diff] [blame] | 431 | return false; |
niklase@google.com | 470e71d | 2011-07-07 08:21:25 | [diff] [blame] | 432 | |
Erik Språng | 6cf554e | 2019-10-02 18:55:39 | [diff] [blame] | 433 | int32_t retransmission_settings = retransmission_settings_; |
| 434 | if (codec_type == VideoCodecType::kVideoCodecH264) { |
| 435 | // Backward compatibility for older receivers without temporal layer logic. |
| 436 | retransmission_settings = kRetransmitBaseLayer | kRetransmitHigherLayers; |
| 437 | } |
| 438 | |
Erik Språng | 56e611b | 2020-02-06 16:10:08 | [diff] [blame] | 439 | MaybeUpdateCurrentPlayoutDelay(video_header); |
| 440 | if (video_header.frame_type == VideoFrameType::kVideoFrameKey && |
| 441 | !IsNoopDelay(current_playout_delay_)) { |
| 442 | // Force playout delay on key-frames, if set. |
| 443 | playout_delay_pending_ = true; |
| 444 | } |
Erik Språng | 6cf554e | 2019-10-02 18:55:39 | [diff] [blame] | 445 | |
Danil Chapovalov | e4f8b38 | 2018-09-07 15:30:26 | [diff] [blame] | 446 | // Maximum size of packet including rtp headers. |
| 447 | // Extra space left in case packet will be resent using fec or rtx. |
Danil Chapovalov | 0deef72 | 2019-10-14 12:20:20 | [diff] [blame] | 448 | int packet_capacity = rtp_sender_->MaxRtpPacketSize() - FecPacketOverhead() - |
Danil Chapovalov | fa5ec8d | 2018-09-07 08:57:26 | [diff] [blame] | 449 | (rtp_sender_->RtxStatus() ? kRtxHeaderSize : 0); |
Danil Chapovalov | e4f8b38 | 2018-09-07 15:30:26 | [diff] [blame] | 450 | |
Danil Chapovalov | 6026f05 | 2018-10-16 14:22:33 | [diff] [blame] | 451 | std::unique_ptr<RtpPacketToSend> single_packet = |
| 452 | rtp_sender_->AllocatePacket(); |
| 453 | RTC_DCHECK_LE(packet_capacity, single_packet->capacity()); |
| 454 | single_packet->SetPayloadType(payload_type); |
| 455 | single_packet->SetTimestamp(rtp_timestamp); |
| 456 | single_packet->set_capture_time_ms(capture_time_ms); |
Danil Chapovalov | e4f8b38 | 2018-09-07 15:30:26 | [diff] [blame] | 457 | |
Minyue Li | 5bb9adc | 2020-01-22 12:02:34 | [diff] [blame] | 458 | const absl::optional<AbsoluteCaptureTime> absolute_capture_time = |
| 459 | absolute_capture_time_sender_.OnSendPacket( |
| 460 | AbsoluteCaptureTimeSender::GetSource(single_packet->Ssrc(), |
| 461 | single_packet->Csrcs()), |
| 462 | single_packet->Timestamp(), kVideoPayloadTypeFrequency, |
| 463 | Int64MsToUQ32x32(single_packet->capture_time_ms() + NtpOffsetMs()), |
| 464 | /*estimated_capture_clock_offset=*/absl::nullopt); |
| 465 | |
Mirko Bonadei | 317a1f0 | 2019-09-17 15:06:18 | [diff] [blame] | 466 | auto first_packet = std::make_unique<RtpPacketToSend>(*single_packet); |
| 467 | auto middle_packet = std::make_unique<RtpPacketToSend>(*single_packet); |
| 468 | auto last_packet = std::make_unique<RtpPacketToSend>(*single_packet); |
Danil Chapovalov | e4f8b38 | 2018-09-07 15:30:26 | [diff] [blame] | 469 | // Simplest way to estimate how much extensions would occupy is to set them. |
Danil Chapovalov | 5efb02b | 2020-03-13 16:29:12 | [diff] [blame] | 470 | AddRtpHeaderExtensions(video_header, absolute_capture_time, |
| 471 | /*first_packet=*/true, /*last_packet=*/true, |
| 472 | single_packet.get()); |
| 473 | AddRtpHeaderExtensions(video_header, absolute_capture_time, |
| 474 | /*first_packet=*/true, /*last_packet=*/false, |
| 475 | first_packet.get()); |
| 476 | AddRtpHeaderExtensions(video_header, absolute_capture_time, |
| 477 | /*first_packet=*/false, /*last_packet=*/false, |
| 478 | middle_packet.get()); |
| 479 | AddRtpHeaderExtensions(video_header, absolute_capture_time, |
| 480 | /*first_packet=*/false, /*last_packet=*/true, |
| 481 | last_packet.get()); |
Johnny Lee | 1a1c52b | 2019-02-08 19:25:40 | [diff] [blame] | 482 | |
Danil Chapovalov | 6026f05 | 2018-10-16 14:22:33 | [diff] [blame] | 483 | RTC_DCHECK_GT(packet_capacity, single_packet->headers_size()); |
Danil Chapovalov | e4f8b38 | 2018-09-07 15:30:26 | [diff] [blame] | 484 | RTC_DCHECK_GT(packet_capacity, first_packet->headers_size()); |
| 485 | RTC_DCHECK_GT(packet_capacity, middle_packet->headers_size()); |
ilnik | 7a3006b | 2017-05-23 16:34:21 | [diff] [blame] | 486 | RTC_DCHECK_GT(packet_capacity, last_packet->headers_size()); |
Danil Chapovalov | fa5ec8d | 2018-09-07 08:57:26 | [diff] [blame] | 487 | RtpPacketizer::PayloadSizeLimits limits; |
Danil Chapovalov | e4f8b38 | 2018-09-07 15:30:26 | [diff] [blame] | 488 | limits.max_payload_len = packet_capacity - middle_packet->headers_size(); |
Danil Chapovalov | fa5ec8d | 2018-09-07 08:57:26 | [diff] [blame] | 489 | |
Danil Chapovalov | 6026f05 | 2018-10-16 14:22:33 | [diff] [blame] | 490 | RTC_DCHECK_GE(single_packet->headers_size(), middle_packet->headers_size()); |
| 491 | limits.single_packet_reduction_len = |
| 492 | single_packet->headers_size() - middle_packet->headers_size(); |
| 493 | |
Danil Chapovalov | e4f8b38 | 2018-09-07 15:30:26 | [diff] [blame] | 494 | RTC_DCHECK_GE(first_packet->headers_size(), middle_packet->headers_size()); |
| 495 | limits.first_packet_reduction_len = |
| 496 | first_packet->headers_size() - middle_packet->headers_size(); |
| 497 | |
| 498 | RTC_DCHECK_GE(last_packet->headers_size(), middle_packet->headers_size()); |
Danil Chapovalov | fa5ec8d | 2018-09-07 08:57:26 | [diff] [blame] | 499 | limits.last_packet_reduction_len = |
Danil Chapovalov | e4f8b38 | 2018-09-07 15:30:26 | [diff] [blame] | 500 | last_packet->headers_size() - middle_packet->headers_size(); |
danilchap | 7411061 | 2016-10-02 17:54:29 | [diff] [blame] | 501 | |
Danil Chapovalov | 9d287bf | 2020-04-06 09:34:34 | [diff] [blame^] | 502 | bool has_generic_descriptor = |
| 503 | first_packet->HasExtension<RtpGenericFrameDescriptorExtension00>() || |
Danil Chapovalov | 95800f6 | 2020-02-20 14:01:15 | [diff] [blame] | 504 | first_packet->HasExtension<RtpDependencyDescriptorExtension>(); |
Elad Alon | ccb9b75 | 2019-02-19 12:01:31 | [diff] [blame] | 505 | |
Danil Chapovalov | 95800f6 | 2020-02-20 14:01:15 | [diff] [blame] | 506 | // Minimization of the vp8 descriptor may erase temporal_id, so save it. |
Danil Chapovalov | 51bf200 | 2019-10-11 08:53:27 | [diff] [blame] | 507 | const uint8_t temporal_id = GetTemporalId(video_header); |
Danil Chapovalov | 95800f6 | 2020-02-20 14:01:15 | [diff] [blame] | 508 | if (has_generic_descriptor) { |
Danil Chapovalov | 51bf200 | 2019-10-11 08:53:27 | [diff] [blame] | 509 | MinimizeDescriptor(&video_header); |
Benjamin Wright | 192eeec | 2018-10-18 00:27:25 | [diff] [blame] | 510 | } |
| 511 | |
| 512 | // TODO(benwright@webrtc.org) - Allocate enough to always encrypt inline. |
| 513 | rtc::Buffer encrypted_video_payload; |
| 514 | if (frame_encryptor_ != nullptr) { |
Danil Chapovalov | 95800f6 | 2020-02-20 14:01:15 | [diff] [blame] | 515 | if (!has_generic_descriptor) { |
Benjamin Wright | 192eeec | 2018-10-18 00:27:25 | [diff] [blame] | 516 | return false; |
| 517 | } |
| 518 | |
| 519 | const size_t max_ciphertext_size = |
| 520 | frame_encryptor_->GetMaxCiphertextByteSize(cricket::MEDIA_TYPE_VIDEO, |
Danil Chapovalov | 51bf200 | 2019-10-11 08:53:27 | [diff] [blame] | 521 | payload.size()); |
Benjamin Wright | 192eeec | 2018-10-18 00:27:25 | [diff] [blame] | 522 | encrypted_video_payload.SetSize(max_ciphertext_size); |
| 523 | |
| 524 | size_t bytes_written = 0; |
Benjamin Wright | 168456c | 2018-12-07 19:31:25 | [diff] [blame] | 525 | |
Markus Handell | c1cbf6b | 2020-02-17 19:03:57 | [diff] [blame] | 526 | // Enable header authentication if the field trial isn't disabled. |
Danil Chapovalov | 95800f6 | 2020-02-20 14:01:15 | [diff] [blame] | 527 | std::vector<uint8_t> additional_data; |
Benjamin Wright | 168456c | 2018-12-07 19:31:25 | [diff] [blame] | 528 | if (generic_descriptor_auth_experiment_) { |
Danil Chapovalov | 95800f6 | 2020-02-20 14:01:15 | [diff] [blame] | 529 | additional_data = RtpDescriptorAuthentication(video_header); |
Benjamin Wright | 168456c | 2018-12-07 19:31:25 | [diff] [blame] | 530 | } |
| 531 | |
Benjamin Wright | 192eeec | 2018-10-18 00:27:25 | [diff] [blame] | 532 | if (frame_encryptor_->Encrypt( |
Benjamin Wright | 168456c | 2018-12-07 19:31:25 | [diff] [blame] | 533 | cricket::MEDIA_TYPE_VIDEO, first_packet->Ssrc(), additional_data, |
Danil Chapovalov | 51bf200 | 2019-10-11 08:53:27 | [diff] [blame] | 534 | payload, encrypted_video_payload, &bytes_written) != 0) { |
Benjamin Wright | 192eeec | 2018-10-18 00:27:25 | [diff] [blame] | 535 | return false; |
| 536 | } |
| 537 | |
| 538 | encrypted_video_payload.SetSize(bytes_written); |
Danil Chapovalov | 51bf200 | 2019-10-11 08:53:27 | [diff] [blame] | 539 | payload = encrypted_video_payload; |
Benjamin Wright | 192eeec | 2018-10-18 00:27:25 | [diff] [blame] | 540 | } else if (require_frame_encryption_) { |
| 541 | RTC_LOG(LS_WARNING) |
| 542 | << "No FrameEncryptor is attached to this video sending stream but " |
Jonas Olsson | b2b2031 | 2020-01-14 11:11:31 | [diff] [blame] | 543 | "one is required since require_frame_encryptor is set"; |
Danil Chapovalov | 84ffb35 | 2018-09-25 16:59:09 | [diff] [blame] | 544 | } |
| 545 | |
Danil Chapovalov | f7f8a1f | 2018-08-28 17:45:31 | [diff] [blame] | 546 | std::unique_ptr<RtpPacketizer> packetizer = RtpPacketizer::Create( |
Danil Chapovalov | 51bf200 | 2019-10-11 08:53:27 | [diff] [blame] | 547 | codec_type, payload, limits, video_header, fragmentation); |
sprang | a8ae6f2 | 2017-09-04 14:23:56 | [diff] [blame] | 548 | |
Elad Alon | b64af4b | 2019-06-05 09:39:37 | [diff] [blame] | 549 | // TODO(bugs.webrtc.org/10714): retransmission_settings_ should generally be |
| 550 | // replaced by expected_retransmission_time_ms.has_value(). For now, though, |
| 551 | // only VP8 with an injected frame buffer controller actually controls it. |
Erik Språng | 70768f4 | 2019-08-27 16:16:26 | [diff] [blame] | 552 | const bool allow_retransmission = |
Elad Alon | b64af4b | 2019-06-05 09:39:37 | [diff] [blame] | 553 | expected_retransmission_time_ms.has_value() |
Erik Språng | 70768f4 | 2019-08-27 16:16:26 | [diff] [blame] | 554 | ? AllowRetransmission(temporal_id, retransmission_settings, |
| 555 | expected_retransmission_time_ms.value()) |
| 556 | : false; |
Elad Alon | 8b60e8b | 2019-04-08 12:14:05 | [diff] [blame] | 557 | const size_t num_packets = packetizer->NumPackets(); |
ilnik | 7a3006b | 2017-05-23 16:34:21 | [diff] [blame] | 558 | |
Erik Språng | 482b3ef | 2019-01-08 15:19:11 | [diff] [blame] | 559 | size_t unpacketized_payload_size; |
| 560 | if (fragmentation && fragmentation->fragmentationVectorSize > 0) { |
| 561 | unpacketized_payload_size = 0; |
| 562 | for (uint16_t i = 0; i < fragmentation->fragmentationVectorSize; ++i) { |
| 563 | unpacketized_payload_size += fragmentation->fragmentationLength[i]; |
| 564 | } |
| 565 | } else { |
Danil Chapovalov | 51bf200 | 2019-10-11 08:53:27 | [diff] [blame] | 566 | unpacketized_payload_size = payload.size(); |
Erik Språng | 482b3ef | 2019-01-08 15:19:11 | [diff] [blame] | 567 | } |
Erik Språng | 482b3ef | 2019-01-08 15:19:11 | [diff] [blame] | 568 | |
ilnik | 7a3006b | 2017-05-23 16:34:21 | [diff] [blame] | 569 | if (num_packets == 0) |
| 570 | return false; |
pbos@webrtc.org | b5e6bfc | 2014-09-12 11:05:55 | [diff] [blame] | 571 | |
danilchap | 2a615fc | 2016-11-11 10:27:35 | [diff] [blame] | 572 | bool first_frame = first_frame_sent_(); |
Erik Språng | f1e97b9 | 2019-09-26 10:48:47 | [diff] [blame] | 573 | std::vector<std::unique_ptr<RtpPacketToSend>> rtp_packets; |
ilnik | 7a3006b | 2017-05-23 16:34:21 | [diff] [blame] | 574 | for (size_t i = 0; i < num_packets; ++i) { |
Danil Chapovalov | e4f8b38 | 2018-09-07 15:30:26 | [diff] [blame] | 575 | std::unique_ptr<RtpPacketToSend> packet; |
| 576 | int expected_payload_capacity; |
| 577 | // Choose right packet template: |
| 578 | if (num_packets == 1) { |
Danil Chapovalov | 6026f05 | 2018-10-16 14:22:33 | [diff] [blame] | 579 | packet = std::move(single_packet); |
Danil Chapovalov | fcebe0e | 2018-10-12 15:51:22 | [diff] [blame] | 580 | expected_payload_capacity = |
| 581 | limits.max_payload_len - limits.single_packet_reduction_len; |
Danil Chapovalov | e4f8b38 | 2018-09-07 15:30:26 | [diff] [blame] | 582 | } else if (i == 0) { |
| 583 | packet = std::move(first_packet); |
| 584 | expected_payload_capacity = |
| 585 | limits.max_payload_len - limits.first_packet_reduction_len; |
| 586 | } else if (i == num_packets - 1) { |
| 587 | packet = std::move(last_packet); |
| 588 | expected_payload_capacity = |
| 589 | limits.max_payload_len - limits.last_packet_reduction_len; |
| 590 | } else { |
Mirko Bonadei | 317a1f0 | 2019-09-17 15:06:18 | [diff] [blame] | 591 | packet = std::make_unique<RtpPacketToSend>(*middle_packet); |
Danil Chapovalov | e4f8b38 | 2018-09-07 15:30:26 | [diff] [blame] | 592 | expected_payload_capacity = limits.max_payload_len; |
| 593 | } |
| 594 | |
Erik Språng | 3663f94 | 2020-02-07 09:05:15 | [diff] [blame] | 595 | packet->set_first_packet_of_frame(i == 0); |
| 596 | |
ilnik | 7a3006b | 2017-05-23 16:34:21 | [diff] [blame] | 597 | if (!packetizer->NextPacket(packet.get())) |
Sergey Ulanov | 525df3f | 2016-08-03 00:46:41 | [diff] [blame] | 598 | return false; |
Danil Chapovalov | e4f8b38 | 2018-09-07 15:30:26 | [diff] [blame] | 599 | RTC_DCHECK_LE(packet->payload_size(), expected_payload_capacity); |
danilchap | 7411061 | 2016-10-02 17:54:29 | [diff] [blame] | 600 | if (!rtp_sender_->AssignSequenceNumber(packet.get())) |
| 601 | return false; |
| 602 | |
sprang | a8ae6f2 | 2017-09-04 14:23:56 | [diff] [blame] | 603 | // No FEC protection for upper temporal layers, if used. |
| 604 | bool protect_packet = temporal_id == 0 || temporal_id == kNoTemporalIdx; |
| 605 | |
Erik Språng | 70768f4 | 2019-08-27 16:16:26 | [diff] [blame] | 606 | packet->set_allow_retransmission(allow_retransmission); |
| 607 | |
ilnik | 04f4d12 | 2017-06-19 14:18:55 | [diff] [blame] | 608 | // Put packetization finish timestamp into extension. |
ilnik | e435019 | 2017-06-29 09:27:44 | [diff] [blame] | 609 | if (packet->HasExtension<VideoTimingExtension>()) { |
ilnik | 04f4d12 | 2017-06-19 14:18:55 | [diff] [blame] | 610 | packet->set_packetization_finish_time_ms(clock_->TimeInMilliseconds()); |
| 611 | } |
| 612 | |
Erik Språng | f87536c | 2020-03-05 09:14:04 | [diff] [blame] | 613 | if (protect_packet && fec_generator_) { |
| 614 | if (red_enabled() && |
| 615 | exclude_transport_sequence_number_from_fec_experiment_) { |
| 616 | // See comments at the top of the file why experiment |
| 617 | // "WebRTC-kExcludeTransportSequenceNumberFromFec" is needed in |
| 618 | // conjunction with datagram transport. |
| 619 | // TODO(sukhanov): We may also need to implement it for flexfec_sender |
| 620 | // if we decide to keep this approach in the future. |
| 621 | uint16_t transport_senquence_number; |
| 622 | if (packet->GetExtension<webrtc::TransportSequenceNumber>( |
| 623 | &transport_senquence_number)) { |
| 624 | if (!packet->RemoveExtension(webrtc::TransportSequenceNumber::kId)) { |
| 625 | RTC_NOTREACHED() |
| 626 | << "Failed to remove transport sequence number, packet=" |
| 627 | << packet->ToString(); |
| 628 | } |
| 629 | } |
| 630 | } |
| 631 | |
| 632 | fec_generator_->AddPacketAndGenerateFec(*packet); |
| 633 | } |
| 634 | |
Danil Chapovalov | 0deef72 | 2019-10-14 12:20:20 | [diff] [blame] | 635 | if (red_enabled()) { |
Erik Språng | f87536c | 2020-03-05 09:14:04 | [diff] [blame] | 636 | std::unique_ptr<RtpPacketToSend> red_packet(new RtpPacketToSend(*packet)); |
| 637 | BuildRedPayload(*packet, red_packet.get()); |
| 638 | red_packet->SetPayloadType(*red_payload_type_); |
| 639 | |
| 640 | // Send |red_packet| instead of |packet| for allocated sequence number. |
| 641 | red_packet->set_packet_type(RtpPacketMediaType::kVideo); |
| 642 | red_packet->set_allow_retransmission(packet->allow_retransmission()); |
| 643 | rtp_packets.emplace_back(std::move(red_packet)); |
mflodman | fcf54bd | 2015-04-14 19:28:08 | [diff] [blame] | 644 | } else { |
Björn Terelius | 31d0f7c | 2020-02-06 15:35:46 | [diff] [blame] | 645 | packet->set_packet_type(RtpPacketMediaType::kVideo); |
Erik Språng | f1e97b9 | 2019-09-26 10:48:47 | [diff] [blame] | 646 | rtp_packets.emplace_back(std::move(packet)); |
stefan@webrtc.org | 2ec5606 | 2014-07-31 14:59:24 | [diff] [blame] | 647 | } |
skvlad | 98bb664 | 2016-04-07 22:36:45 | [diff] [blame] | 648 | |
| 649 | if (first_frame) { |
ilnik | 7a3006b | 2017-05-23 16:34:21 | [diff] [blame] | 650 | if (i == 0) { |
Mirko Bonadei | 675513b | 2017-11-09 10:09:25 | [diff] [blame] | 651 | RTC_LOG(LS_INFO) |
skvlad | 98bb664 | 2016-04-07 22:36:45 | [diff] [blame] | 652 | << "Sent first RTP packet of the first video frame (pre-pacer)"; |
| 653 | } |
Danil Chapovalov | e4f8b38 | 2018-09-07 15:30:26 | [diff] [blame] | 654 | if (i == num_packets - 1) { |
Mirko Bonadei | 675513b | 2017-11-09 10:09:25 | [diff] [blame] | 655 | RTC_LOG(LS_INFO) |
skvlad | 98bb664 | 2016-04-07 22:36:45 | [diff] [blame] | 656 | << "Sent last RTP packet of the first video frame (pre-pacer)"; |
| 657 | } |
| 658 | } |
stefan@webrtc.org | 2ec5606 | 2014-07-31 14:59:24 | [diff] [blame] | 659 | } |
pbos@webrtc.org | b5e6bfc | 2014-09-12 11:05:55 | [diff] [blame] | 660 | |
Erik Språng | f87536c | 2020-03-05 09:14:04 | [diff] [blame] | 661 | if (fec_generator_) { |
| 662 | // Fetch any FEC packets generated from the media frame and add them to |
| 663 | // the list of packets to send. |
| 664 | auto fec_packets = fec_generator_->GetFecPackets(); |
| 665 | |
| 666 | // TODO(bugs.webrtc.org/11340): Move sequence number assignment into |
| 667 | // UlpfecGenerator. |
| 668 | const bool generate_sequence_numbers = !fec_generator_->FecSsrc(); |
| 669 | for (auto& fec_packet : fec_packets) { |
| 670 | if (generate_sequence_numbers) { |
| 671 | rtp_sender_->AssignSequenceNumber(fec_packet.get()); |
| 672 | } |
| 673 | rtp_packets.emplace_back(std::move(fec_packet)); |
| 674 | } |
| 675 | } |
| 676 | |
Erik Språng | f1e97b9 | 2019-09-26 10:48:47 | [diff] [blame] | 677 | LogAndSendToNetwork(std::move(rtp_packets), unpacketized_payload_size); |
Erik Språng | 482b3ef | 2019-01-08 15:19:11 | [diff] [blame] | 678 | |
Danil Chapovalov | 5efb02b | 2020-03-13 16:29:12 | [diff] [blame] | 679 | // Update details about the last sent frame. |
| 680 | last_rotation_ = video_header.rotation; |
| 681 | |
| 682 | if (video_header.color_space != last_color_space_) { |
| 683 | last_color_space_ = video_header.color_space; |
| 684 | transmit_color_space_next_frame_ = !IsBaseLayer(video_header); |
| 685 | } else { |
| 686 | transmit_color_space_next_frame_ = |
| 687 | transmit_color_space_next_frame_ ? !IsBaseLayer(video_header) : false; |
| 688 | } |
| 689 | |
| 690 | if (video_header.frame_type == VideoFrameType::kVideoFrameKey || |
| 691 | (IsBaseLayer(video_header) && |
| 692 | !(video_header.generic.has_value() ? video_header.generic->discardable |
| 693 | : false))) { |
| 694 | // This frame has guaranteed delivery, no need to populate playout |
| 695 | // delay extensions until it changes again. |
| 696 | playout_delay_pending_ = false; |
| 697 | } |
| 698 | |
Sergey Ulanov | ec4f068 | 2016-07-28 22:19:10 | [diff] [blame] | 699 | TRACE_EVENT_ASYNC_END1("webrtc", "Video", capture_time_ms, "timestamp", |
danilchap | e5b4141 | 2016-08-22 10:39:23 | [diff] [blame] | 700 | rtp_timestamp); |
Sergey Ulanov | 525df3f | 2016-08-03 00:46:41 | [diff] [blame] | 701 | return true; |
mflodman | fcf54bd | 2015-04-14 19:28:08 | [diff] [blame] | 702 | } |
| 703 | |
Marina Ciocea | 3a087a8 | 2020-03-03 04:39:28 | [diff] [blame] | 704 | bool RTPSenderVideo::SendEncodedImage( |
| 705 | int payload_type, |
| 706 | absl::optional<VideoCodecType> codec_type, |
| 707 | uint32_t rtp_timestamp, |
| 708 | const EncodedImage& encoded_image, |
| 709 | const RTPFragmentationHeader* fragmentation, |
| 710 | RTPVideoHeader video_header, |
| 711 | absl::optional<int64_t> expected_retransmission_time_ms) { |
| 712 | if (frame_transformer_delegate_) { |
| 713 | // The frame will be sent async once transformed. |
| 714 | return frame_transformer_delegate_->TransformFrame( |
| 715 | payload_type, codec_type, rtp_timestamp, encoded_image, fragmentation, |
| 716 | video_header, expected_retransmission_time_ms, rtp_sender_->SSRC()); |
| 717 | } |
| 718 | return SendVideo(payload_type, codec_type, rtp_timestamp, |
| 719 | encoded_image.capture_time_ms_, encoded_image, fragmentation, |
| 720 | video_header, expected_retransmission_time_ms); |
| 721 | } |
| 722 | |
pbos@webrtc.org | 2f44673 | 2013-04-08 11:08:41 | [diff] [blame] | 723 | uint32_t RTPSenderVideo::VideoBitrateSent() const { |
sprang | cd349d9 | 2016-07-13 16:11:28 | [diff] [blame] | 724 | rtc::CritScope cs(&stats_crit_); |
| 725 | return video_bitrate_.Rate(clock_->TimeInMilliseconds()).value_or(0); |
stefan@webrtc.org | fbea4e5 | 2011-10-27 16:08:29 | [diff] [blame] | 726 | } |
| 727 | |
pbos@webrtc.org | 2f44673 | 2013-04-08 11:08:41 | [diff] [blame] | 728 | uint32_t RTPSenderVideo::FecOverheadRate() const { |
Erik Språng | f87536c | 2020-03-05 09:14:04 | [diff] [blame] | 729 | return fec_generator_ ? fec_generator_->CurrentFecRate().bps<uint32_t>() : 0u; |
stefan@webrtc.org | d0bdab0 | 2011-10-14 14:24:54 | [diff] [blame] | 730 | } |
| 731 | |
Erik Språng | 482b3ef | 2019-01-08 15:19:11 | [diff] [blame] | 732 | uint32_t RTPSenderVideo::PacketizationOverheadBps() const { |
| 733 | rtc::CritScope cs(&stats_crit_); |
| 734 | return packetization_overhead_bitrate_.Rate(clock_->TimeInMilliseconds()) |
| 735 | .value_or(0); |
| 736 | } |
| 737 | |
Erik Språng | 70768f4 | 2019-08-27 16:16:26 | [diff] [blame] | 738 | bool RTPSenderVideo::AllowRetransmission( |
sprang | a8ae6f2 | 2017-09-04 14:23:56 | [diff] [blame] | 739 | uint8_t temporal_id, |
| 740 | int32_t retransmission_settings, |
| 741 | int64_t expected_retransmission_time_ms) { |
| 742 | if (retransmission_settings == kRetransmitOff) |
Erik Språng | 70768f4 | 2019-08-27 16:16:26 | [diff] [blame] | 743 | return false; |
sprang | a8ae6f2 | 2017-09-04 14:23:56 | [diff] [blame] | 744 | |
| 745 | rtc::CritScope cs(&stats_crit_); |
| 746 | // Media packet storage. |
| 747 | if ((retransmission_settings & kConditionallyRetransmitHigherLayers) && |
| 748 | UpdateConditionalRetransmit(temporal_id, |
| 749 | expected_retransmission_time_ms)) { |
| 750 | retransmission_settings |= kRetransmitHigherLayers; |
| 751 | } |
| 752 | |
| 753 | if (temporal_id == kNoTemporalIdx) |
Erik Språng | 70768f4 | 2019-08-27 16:16:26 | [diff] [blame] | 754 | return true; |
sprang | a8ae6f2 | 2017-09-04 14:23:56 | [diff] [blame] | 755 | |
| 756 | if ((retransmission_settings & kRetransmitBaseLayer) && temporal_id == 0) |
Erik Språng | 70768f4 | 2019-08-27 16:16:26 | [diff] [blame] | 757 | return true; |
sprang | a8ae6f2 | 2017-09-04 14:23:56 | [diff] [blame] | 758 | |
| 759 | if ((retransmission_settings & kRetransmitHigherLayers) && temporal_id > 0) |
Erik Språng | 70768f4 | 2019-08-27 16:16:26 | [diff] [blame] | 760 | return true; |
sprang | a8ae6f2 | 2017-09-04 14:23:56 | [diff] [blame] | 761 | |
Erik Språng | 70768f4 | 2019-08-27 16:16:26 | [diff] [blame] | 762 | return false; |
sprang | a8ae6f2 | 2017-09-04 14:23:56 | [diff] [blame] | 763 | } |
| 764 | |
| 765 | uint8_t RTPSenderVideo::GetTemporalId(const RTPVideoHeader& header) { |
Philip Eliasson | d52a1a6 | 2018-09-07 13:03:55 | [diff] [blame] | 766 | struct TemporalIdGetter { |
| 767 | uint8_t operator()(const RTPVideoHeaderVP8& vp8) { return vp8.temporalIdx; } |
| 768 | uint8_t operator()(const RTPVideoHeaderVP9& vp9) { |
| 769 | return vp9.temporal_idx; |
| 770 | } |
| 771 | uint8_t operator()(const RTPVideoHeaderH264&) { return kNoTemporalIdx; } |
Danil Chapovalov | b6bf0b2 | 2020-01-28 17:36:57 | [diff] [blame] | 772 | uint8_t operator()(const RTPVideoHeaderLegacyGeneric&) { |
| 773 | return kNoTemporalIdx; |
| 774 | } |
Philip Eliasson | d52a1a6 | 2018-09-07 13:03:55 | [diff] [blame] | 775 | uint8_t operator()(const absl::monostate&) { return kNoTemporalIdx; } |
| 776 | }; |
Johnny Lee | 1a1c52b | 2019-02-08 19:25:40 | [diff] [blame] | 777 | switch (header.codec) { |
| 778 | case kVideoCodecH264: |
| 779 | return header.frame_marking.temporal_id; |
| 780 | default: |
| 781 | return absl::visit(TemporalIdGetter(), header.video_type_header); |
| 782 | } |
sprang | a8ae6f2 | 2017-09-04 14:23:56 | [diff] [blame] | 783 | } |
| 784 | |
| 785 | bool RTPSenderVideo::UpdateConditionalRetransmit( |
| 786 | uint8_t temporal_id, |
| 787 | int64_t expected_retransmission_time_ms) { |
| 788 | int64_t now_ms = clock_->TimeInMilliseconds(); |
| 789 | // Update stats for any temporal layer. |
| 790 | TemporalLayerStats* current_layer_stats = |
| 791 | &frame_stats_by_temporal_layer_[temporal_id]; |
| 792 | current_layer_stats->frame_rate_fp1000s.Update(1, now_ms); |
| 793 | int64_t tl_frame_interval = now_ms - current_layer_stats->last_frame_time_ms; |
| 794 | current_layer_stats->last_frame_time_ms = now_ms; |
| 795 | |
| 796 | // Conditional retransmit only applies to upper layers. |
| 797 | if (temporal_id != kNoTemporalIdx && temporal_id > 0) { |
| 798 | if (tl_frame_interval >= kMaxUnretransmittableFrameIntervalMs) { |
| 799 | // Too long since a retransmittable frame in this layer, enable NACK |
| 800 | // protection. |
| 801 | return true; |
| 802 | } else { |
| 803 | // Estimate when the next frame of any lower layer will be sent. |
| 804 | const int64_t kUndefined = std::numeric_limits<int64_t>::max(); |
| 805 | int64_t expected_next_frame_time = kUndefined; |
| 806 | for (int i = temporal_id - 1; i >= 0; --i) { |
| 807 | TemporalLayerStats* stats = &frame_stats_by_temporal_layer_[i]; |
Danil Chapovalov | d264df5 | 2018-06-14 10:59:38 | [diff] [blame] | 808 | absl::optional<uint32_t> rate = stats->frame_rate_fp1000s.Rate(now_ms); |
sprang | a8ae6f2 | 2017-09-04 14:23:56 | [diff] [blame] | 809 | if (rate) { |
| 810 | int64_t tl_next = stats->last_frame_time_ms + 1000000 / *rate; |
| 811 | if (tl_next - now_ms > -expected_retransmission_time_ms && |
| 812 | tl_next < expected_next_frame_time) { |
| 813 | expected_next_frame_time = tl_next; |
| 814 | } |
| 815 | } |
| 816 | } |
| 817 | |
| 818 | if (expected_next_frame_time == kUndefined || |
| 819 | expected_next_frame_time - now_ms > expected_retransmission_time_ms) { |
| 820 | // The next frame in a lower layer is expected at a later time (or |
| 821 | // unable to tell due to lack of data) than a retransmission is |
| 822 | // estimated to be able to arrive, so allow this packet to be nacked. |
| 823 | return true; |
| 824 | } |
| 825 | } |
| 826 | } |
| 827 | |
| 828 | return false; |
| 829 | } |
| 830 | |
Erik Språng | 56e611b | 2020-02-06 16:10:08 | [diff] [blame] | 831 | void RTPSenderVideo::MaybeUpdateCurrentPlayoutDelay( |
| 832 | const RTPVideoHeader& header) { |
| 833 | if (IsNoopDelay(header.playout_delay)) { |
| 834 | return; |
| 835 | } |
| 836 | |
| 837 | PlayoutDelay requested_delay = header.playout_delay; |
| 838 | |
| 839 | if (requested_delay.min_ms > PlayoutDelayLimits::kMaxMs || |
| 840 | requested_delay.max_ms > PlayoutDelayLimits::kMaxMs) { |
| 841 | RTC_DLOG(LS_ERROR) |
| 842 | << "Requested playout delay values out of range, ignored"; |
| 843 | return; |
| 844 | } |
| 845 | if (requested_delay.max_ms != -1 && |
| 846 | requested_delay.min_ms > requested_delay.max_ms) { |
| 847 | RTC_DLOG(LS_ERROR) << "Requested playout delay values out of order"; |
| 848 | return; |
| 849 | } |
| 850 | |
| 851 | if (!playout_delay_pending_) { |
| 852 | current_playout_delay_ = requested_delay; |
| 853 | playout_delay_pending_ = true; |
| 854 | return; |
| 855 | } |
| 856 | |
| 857 | if ((requested_delay.min_ms == -1 || |
| 858 | requested_delay.min_ms == current_playout_delay_.min_ms) && |
| 859 | (requested_delay.max_ms == -1 || |
| 860 | requested_delay.max_ms == current_playout_delay_.max_ms)) { |
| 861 | // No change, ignore. |
| 862 | return; |
| 863 | } |
| 864 | |
| 865 | if (requested_delay.min_ms == -1) { |
| 866 | RTC_DCHECK_GE(requested_delay.max_ms, 0); |
| 867 | requested_delay.min_ms = |
| 868 | std::min(current_playout_delay_.min_ms, requested_delay.max_ms); |
| 869 | } |
| 870 | if (requested_delay.max_ms == -1) { |
| 871 | requested_delay.max_ms = |
| 872 | std::max(current_playout_delay_.max_ms, requested_delay.min_ms); |
| 873 | } |
| 874 | |
| 875 | current_playout_delay_ = requested_delay; |
| 876 | playout_delay_pending_ = true; |
| 877 | } |
| 878 | |
pbos@webrtc.org | d900e8b | 2013-07-03 15:12:26 | [diff] [blame] | 879 | } // namespace webrtc |