Sebastian Jansson | 8e0b15b | 2018-04-18 17:19:22 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2018 The WebRTC project authors. All Rights Reserved. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license |
| 5 | * that can be found in the LICENSE file in the root of the source |
| 6 | * tree. An additional intellectual property rights grant can be found |
| 7 | * in the file PATENTS. All contributing project authors may |
| 8 | * be found in the AUTHORS file in the root of the source tree. |
| 9 | */ |
| 10 | #include "video/video_send_stream_impl.h" |
| 11 | |
| 12 | #include <algorithm> |
| 13 | #include <string> |
| 14 | #include <utility> |
| 15 | |
| 16 | #include "call/rtp_transport_controller_send_interface.h" |
| 17 | #include "modules/pacing/packet_router.h" |
| 18 | #include "modules/rtp_rtcp/include/rtp_rtcp.h" |
| 19 | #include "modules/rtp_rtcp/source/rtp_sender.h" |
| 20 | #include "rtc_base/checks.h" |
| 21 | #include "rtc_base/experiments/alr_experiment.h" |
| 22 | #include "rtc_base/file.h" |
| 23 | #include "rtc_base/location.h" |
| 24 | #include "rtc_base/logging.h" |
| 25 | #include "rtc_base/numerics/safe_conversions.h" |
| 26 | #include "rtc_base/trace_event.h" |
| 27 | #include "system_wrappers/include/field_trial.h" |
| 28 | |
| 29 | namespace webrtc { |
| 30 | namespace internal { |
| 31 | namespace { |
| 32 | static const int kMinSendSidePacketHistorySize = 600; |
| 33 | |
| 34 | // Assume an average video stream has around 3 packets per frame (1 mbps / 30 |
| 35 | // fps / 1400B) A sequence number set with size 5500 will be able to store |
| 36 | // packet sequence number for at least last 60 seconds. |
| 37 | static const int kSendSideSeqNumSetMaxSize = 5500; |
| 38 | |
| 39 | // We don't do MTU discovery, so assume that we have the standard ethernet MTU. |
| 40 | const size_t kPathMTU = 1500; |
| 41 | |
| 42 | std::vector<RtpRtcp*> CreateRtpRtcpModules( |
| 43 | const VideoSendStream::Config& config, |
| 44 | RtcpIntraFrameObserver* intra_frame_callback, |
| 45 | RtcpBandwidthObserver* bandwidth_callback, |
| 46 | RtpTransportControllerSendInterface* transport, |
| 47 | RtcpRttStats* rtt_stats, |
| 48 | FlexfecSender* flexfec_sender, |
| 49 | SendStatisticsProxy* stats_proxy, |
| 50 | SendDelayStats* send_delay_stats, |
| 51 | RtcEventLog* event_log, |
| 52 | RateLimiter* retransmission_rate_limiter, |
| 53 | OverheadObserver* overhead_observer, |
| 54 | RtpKeepAliveConfig keepalive_config) { |
| 55 | RTC_DCHECK_GT(config.rtp.ssrcs.size(), 0); |
| 56 | RtpRtcp::Configuration configuration; |
| 57 | configuration.audio = false; |
| 58 | configuration.receiver_only = false; |
| 59 | configuration.outgoing_transport = config.send_transport; |
| 60 | configuration.intra_frame_callback = intra_frame_callback; |
| 61 | configuration.bandwidth_callback = bandwidth_callback; |
| 62 | configuration.transport_feedback_callback = |
| 63 | transport->transport_feedback_observer(); |
| 64 | configuration.rtt_stats = rtt_stats; |
| 65 | configuration.rtcp_packet_type_counter_observer = stats_proxy; |
| 66 | configuration.paced_sender = transport->packet_sender(); |
| 67 | configuration.transport_sequence_number_allocator = |
| 68 | transport->packet_router(); |
| 69 | configuration.send_bitrate_observer = stats_proxy; |
| 70 | configuration.send_frame_count_observer = stats_proxy; |
| 71 | configuration.send_side_delay_observer = stats_proxy; |
| 72 | configuration.send_packet_observer = send_delay_stats; |
| 73 | configuration.event_log = event_log; |
| 74 | configuration.retransmission_rate_limiter = retransmission_rate_limiter; |
| 75 | configuration.overhead_observer = overhead_observer; |
| 76 | configuration.keepalive_config = keepalive_config; |
| 77 | configuration.rtcp_interval_config.video_interval_ms = |
| 78 | config.rtcp.video_report_interval_ms; |
| 79 | configuration.rtcp_interval_config.audio_interval_ms = |
| 80 | config.rtcp.audio_report_interval_ms; |
| 81 | std::vector<RtpRtcp*> modules; |
| 82 | const std::vector<uint32_t>& flexfec_protected_ssrcs = |
| 83 | config.rtp.flexfec.protected_media_ssrcs; |
| 84 | for (uint32_t ssrc : config.rtp.ssrcs) { |
| 85 | bool enable_flexfec = flexfec_sender != nullptr && |
| 86 | std::find(flexfec_protected_ssrcs.begin(), |
| 87 | flexfec_protected_ssrcs.end(), |
| 88 | ssrc) != flexfec_protected_ssrcs.end(); |
| 89 | configuration.flexfec_sender = enable_flexfec ? flexfec_sender : nullptr; |
| 90 | RtpRtcp* rtp_rtcp = RtpRtcp::CreateRtpRtcp(configuration); |
| 91 | rtp_rtcp->SetSendingStatus(false); |
| 92 | rtp_rtcp->SetSendingMediaStatus(false); |
| 93 | rtp_rtcp->SetRTCPStatus(RtcpMode::kCompound); |
| 94 | modules.push_back(rtp_rtcp); |
| 95 | } |
| 96 | return modules; |
| 97 | } |
| 98 | |
| 99 | // TODO(brandtr): Update this function when we support multistream protection. |
| 100 | std::unique_ptr<FlexfecSender> MaybeCreateFlexfecSender( |
| 101 | const VideoSendStream::Config& config, |
| 102 | const std::map<uint32_t, RtpState>& suspended_ssrcs) { |
| 103 | if (config.rtp.flexfec.payload_type < 0) { |
| 104 | return nullptr; |
| 105 | } |
| 106 | RTC_DCHECK_GE(config.rtp.flexfec.payload_type, 0); |
| 107 | RTC_DCHECK_LE(config.rtp.flexfec.payload_type, 127); |
| 108 | if (config.rtp.flexfec.ssrc == 0) { |
| 109 | RTC_LOG(LS_WARNING) << "FlexFEC is enabled, but no FlexFEC SSRC given. " |
| 110 | "Therefore disabling FlexFEC."; |
| 111 | return nullptr; |
| 112 | } |
| 113 | if (config.rtp.flexfec.protected_media_ssrcs.empty()) { |
| 114 | RTC_LOG(LS_WARNING) |
| 115 | << "FlexFEC is enabled, but no protected media SSRC given. " |
| 116 | "Therefore disabling FlexFEC."; |
| 117 | return nullptr; |
| 118 | } |
| 119 | |
| 120 | if (config.rtp.flexfec.protected_media_ssrcs.size() > 1) { |
| 121 | RTC_LOG(LS_WARNING) |
| 122 | << "The supplied FlexfecConfig contained multiple protected " |
| 123 | "media streams, but our implementation currently only " |
| 124 | "supports protecting a single media stream. " |
| 125 | "To avoid confusion, disabling FlexFEC completely."; |
| 126 | return nullptr; |
| 127 | } |
| 128 | |
| 129 | const RtpState* rtp_state = nullptr; |
| 130 | auto it = suspended_ssrcs.find(config.rtp.flexfec.ssrc); |
| 131 | if (it != suspended_ssrcs.end()) { |
| 132 | rtp_state = &it->second; |
| 133 | } |
| 134 | |
| 135 | RTC_DCHECK_EQ(1U, config.rtp.flexfec.protected_media_ssrcs.size()); |
| 136 | return rtc::MakeUnique<FlexfecSender>( |
| 137 | config.rtp.flexfec.payload_type, config.rtp.flexfec.ssrc, |
| 138 | config.rtp.flexfec.protected_media_ssrcs[0], config.rtp.mid, |
| 139 | config.rtp.extensions, RTPSender::FecExtensionSizes(), rtp_state, |
| 140 | Clock::GetRealTimeClock()); |
| 141 | } |
| 142 | |
| 143 | bool TransportSeqNumExtensionConfigured(const VideoSendStream::Config& config) { |
| 144 | const std::vector<RtpExtension>& extensions = config.rtp.extensions; |
| 145 | return std::find_if( |
| 146 | extensions.begin(), extensions.end(), [](const RtpExtension& ext) { |
| 147 | return ext.uri == RtpExtension::kTransportSequenceNumberUri; |
| 148 | }) != extensions.end(); |
| 149 | } |
| 150 | |
| 151 | const char kForcedFallbackFieldTrial[] = |
| 152 | "WebRTC-VP8-Forced-Fallback-Encoder-v2"; |
| 153 | |
Danil Chapovalov | b9b146c | 2018-06-15 10:28:07 | [diff] [blame] | 154 | absl::optional<int> GetFallbackMinBpsFromFieldTrial() { |
Sebastian Jansson | 8e0b15b | 2018-04-18 17:19:22 | [diff] [blame] | 155 | if (!webrtc::field_trial::IsEnabled(kForcedFallbackFieldTrial)) |
Danil Chapovalov | b9b146c | 2018-06-15 10:28:07 | [diff] [blame] | 156 | return absl::nullopt; |
Sebastian Jansson | 8e0b15b | 2018-04-18 17:19:22 | [diff] [blame] | 157 | |
| 158 | std::string group = |
| 159 | webrtc::field_trial::FindFullName(kForcedFallbackFieldTrial); |
| 160 | if (group.empty()) |
Danil Chapovalov | b9b146c | 2018-06-15 10:28:07 | [diff] [blame] | 161 | return absl::nullopt; |
Sebastian Jansson | 8e0b15b | 2018-04-18 17:19:22 | [diff] [blame] | 162 | |
| 163 | int min_pixels; |
| 164 | int max_pixels; |
| 165 | int min_bps; |
| 166 | if (sscanf(group.c_str(), "Enabled-%d,%d,%d", &min_pixels, &max_pixels, |
| 167 | &min_bps) != 3) { |
Danil Chapovalov | b9b146c | 2018-06-15 10:28:07 | [diff] [blame] | 168 | return absl::nullopt; |
Sebastian Jansson | 8e0b15b | 2018-04-18 17:19:22 | [diff] [blame] | 169 | } |
| 170 | |
| 171 | if (min_bps <= 0) |
Danil Chapovalov | b9b146c | 2018-06-15 10:28:07 | [diff] [blame] | 172 | return absl::nullopt; |
Sebastian Jansson | 8e0b15b | 2018-04-18 17:19:22 | [diff] [blame] | 173 | |
| 174 | return min_bps; |
| 175 | } |
| 176 | |
| 177 | int GetEncoderMinBitrateBps() { |
| 178 | const int kDefaultEncoderMinBitrateBps = 30000; |
| 179 | return GetFallbackMinBpsFromFieldTrial().value_or( |
| 180 | kDefaultEncoderMinBitrateBps); |
| 181 | } |
| 182 | |
| 183 | bool PayloadTypeSupportsSkippingFecPackets(const std::string& payload_name) { |
| 184 | const VideoCodecType codecType = PayloadStringToCodecType(payload_name); |
| 185 | if (codecType == kVideoCodecVP8 || codecType == kVideoCodecVP9) { |
| 186 | return true; |
| 187 | } |
| 188 | return false; |
| 189 | } |
| 190 | |
| 191 | int CalculateMaxPadBitrateBps(std::vector<VideoStream> streams, |
| 192 | int min_transmit_bitrate_bps, |
| 193 | bool pad_to_min_bitrate) { |
| 194 | int pad_up_to_bitrate_bps = 0; |
| 195 | // Calculate max padding bitrate for a multi layer codec. |
| 196 | if (streams.size() > 1) { |
| 197 | // Pad to min bitrate of the highest layer. |
| 198 | pad_up_to_bitrate_bps = streams[streams.size() - 1].min_bitrate_bps; |
| 199 | // Add target_bitrate_bps of the lower layers. |
| 200 | for (size_t i = 0; i < streams.size() - 1; ++i) |
| 201 | pad_up_to_bitrate_bps += streams[i].target_bitrate_bps; |
| 202 | } else if (pad_to_min_bitrate) { |
| 203 | pad_up_to_bitrate_bps = streams[0].min_bitrate_bps; |
| 204 | } |
| 205 | |
| 206 | pad_up_to_bitrate_bps = |
| 207 | std::max(pad_up_to_bitrate_bps, min_transmit_bitrate_bps); |
| 208 | |
| 209 | return pad_up_to_bitrate_bps; |
| 210 | } |
| 211 | |
| 212 | uint32_t CalculateOverheadRateBps(int packets_per_second, |
| 213 | size_t overhead_bytes_per_packet, |
| 214 | uint32_t max_overhead_bps) { |
| 215 | uint32_t overhead_bps = |
| 216 | static_cast<uint32_t>(8 * overhead_bytes_per_packet * packets_per_second); |
| 217 | return std::min(overhead_bps, max_overhead_bps); |
| 218 | } |
| 219 | |
| 220 | int CalculatePacketRate(uint32_t bitrate_bps, size_t packet_size_bytes) { |
| 221 | size_t packet_size_bits = 8 * packet_size_bytes; |
| 222 | // Ceil for int value of bitrate_bps / packet_size_bits. |
| 223 | return static_cast<int>((bitrate_bps + packet_size_bits - 1) / |
| 224 | packet_size_bits); |
| 225 | } |
| 226 | |
| 227 | } // namespace |
| 228 | |
| 229 | // CheckEncoderActivityTask is used for tracking when the encoder last produced |
| 230 | // and encoded video frame. If the encoder has not produced anything the last |
| 231 | // kEncoderTimeOutMs we also want to stop sending padding. |
| 232 | class VideoSendStreamImpl::CheckEncoderActivityTask : public rtc::QueuedTask { |
| 233 | public: |
| 234 | static const int kEncoderTimeOutMs = 2000; |
| 235 | explicit CheckEncoderActivityTask( |
| 236 | const rtc::WeakPtr<VideoSendStreamImpl>& send_stream) |
| 237 | : activity_(0), send_stream_(std::move(send_stream)), timed_out_(false) {} |
| 238 | |
| 239 | void Stop() { |
| 240 | RTC_CHECK(task_checker_.CalledSequentially()); |
| 241 | send_stream_.reset(); |
| 242 | } |
| 243 | |
| 244 | void UpdateEncoderActivity() { |
| 245 | // UpdateEncoderActivity is called from VideoSendStreamImpl::Encoded on |
| 246 | // whatever thread the real encoder implementation run on. In the case of |
| 247 | // hardware encoders, there might be several encoders |
| 248 | // running in parallel on different threads. |
| 249 | rtc::AtomicOps::ReleaseStore(&activity_, 1); |
| 250 | } |
| 251 | |
| 252 | private: |
| 253 | bool Run() override { |
| 254 | RTC_CHECK(task_checker_.CalledSequentially()); |
| 255 | if (!send_stream_) |
| 256 | return true; |
| 257 | if (!rtc::AtomicOps::AcquireLoad(&activity_)) { |
| 258 | if (!timed_out_) { |
| 259 | send_stream_->SignalEncoderTimedOut(); |
| 260 | } |
| 261 | timed_out_ = true; |
| 262 | } else if (timed_out_) { |
| 263 | send_stream_->SignalEncoderActive(); |
| 264 | timed_out_ = false; |
| 265 | } |
| 266 | rtc::AtomicOps::ReleaseStore(&activity_, 0); |
| 267 | |
| 268 | rtc::TaskQueue::Current()->PostDelayedTask( |
| 269 | std::unique_ptr<rtc::QueuedTask>(this), kEncoderTimeOutMs); |
| 270 | // Return false to prevent this task from being deleted. Ownership has been |
| 271 | // transferred to the task queue when PostDelayedTask was called. |
| 272 | return false; |
| 273 | } |
| 274 | volatile int activity_; |
| 275 | |
| 276 | rtc::SequencedTaskChecker task_checker_; |
| 277 | rtc::WeakPtr<VideoSendStreamImpl> send_stream_; |
| 278 | bool timed_out_; |
| 279 | }; |
| 280 | |
| 281 | VideoSendStreamImpl::VideoSendStreamImpl( |
| 282 | SendStatisticsProxy* stats_proxy, |
| 283 | rtc::TaskQueue* worker_queue, |
| 284 | CallStats* call_stats, |
| 285 | RtpTransportControllerSendInterface* transport, |
Sebastian Jansson | 652dc91 | 2018-04-19 15:09:15 | [diff] [blame] | 286 | BitrateAllocatorInterface* bitrate_allocator, |
Sebastian Jansson | 8e0b15b | 2018-04-18 17:19:22 | [diff] [blame] | 287 | SendDelayStats* send_delay_stats, |
Sebastian Jansson | 652dc91 | 2018-04-19 15:09:15 | [diff] [blame] | 288 | VideoStreamEncoderInterface* video_stream_encoder, |
Sebastian Jansson | 8e0b15b | 2018-04-18 17:19:22 | [diff] [blame] | 289 | RtcEventLog* event_log, |
| 290 | const VideoSendStream::Config* config, |
| 291 | int initial_encoder_max_bitrate, |
| 292 | double initial_encoder_bitrate_priority, |
| 293 | std::map<uint32_t, RtpState> suspended_ssrcs, |
| 294 | std::map<uint32_t, RtpPayloadState> suspended_payload_states, |
| 295 | VideoEncoderConfig::ContentType content_type, |
| 296 | std::unique_ptr<FecController> fec_controller, |
| 297 | RateLimiter* retransmission_limiter) |
| 298 | : send_side_bwe_with_overhead_( |
| 299 | webrtc::field_trial::IsEnabled("WebRTC-SendSideBwe-WithOverhead")), |
| 300 | stats_proxy_(stats_proxy), |
| 301 | config_(config), |
| 302 | suspended_ssrcs_(std::move(suspended_ssrcs)), |
| 303 | fec_controller_(std::move(fec_controller)), |
| 304 | module_process_thread_(nullptr), |
| 305 | worker_queue_(worker_queue), |
| 306 | check_encoder_activity_task_(nullptr), |
| 307 | call_stats_(call_stats), |
| 308 | transport_(transport), |
| 309 | bitrate_allocator_(bitrate_allocator), |
| 310 | flexfec_sender_(MaybeCreateFlexfecSender(*config_, suspended_ssrcs_)), |
| 311 | max_padding_bitrate_(0), |
| 312 | encoder_min_bitrate_bps_(0), |
| 313 | encoder_target_rate_bps_(0), |
| 314 | encoder_bitrate_priority_(initial_encoder_bitrate_priority), |
| 315 | has_packet_feedback_(false), |
| 316 | video_stream_encoder_(video_stream_encoder), |
| 317 | encoder_feedback_(Clock::GetRealTimeClock(), |
| 318 | config_->rtp.ssrcs, |
| 319 | video_stream_encoder), |
| 320 | bandwidth_observer_(transport->GetBandwidthObserver()), |
| 321 | rtp_rtcp_modules_(CreateRtpRtcpModules(*config_, |
| 322 | &encoder_feedback_, |
| 323 | bandwidth_observer_, |
| 324 | transport, |
| 325 | call_stats, |
| 326 | flexfec_sender_.get(), |
| 327 | stats_proxy_, |
| 328 | send_delay_stats, |
| 329 | event_log, |
| 330 | retransmission_limiter, |
| 331 | this, |
| 332 | transport->keepalive_config())), |
| 333 | payload_router_(rtp_rtcp_modules_, |
| 334 | config_->rtp.ssrcs, |
| 335 | config_->rtp.payload_type, |
| 336 | suspended_payload_states), |
| 337 | weak_ptr_factory_(this), |
| 338 | overhead_bytes_per_packet_(0), |
| 339 | transport_overhead_bytes_per_packet_(0) { |
| 340 | RTC_DCHECK_RUN_ON(worker_queue_); |
| 341 | RTC_LOG(LS_INFO) << "VideoSendStreamInternal: " << config_->ToString(); |
| 342 | weak_ptr_ = weak_ptr_factory_.GetWeakPtr(); |
| 343 | module_process_thread_checker_.DetachFromThread(); |
| 344 | |
| 345 | RTC_DCHECK(!config_->rtp.ssrcs.empty()); |
| 346 | RTC_DCHECK(call_stats_); |
| 347 | RTC_DCHECK(transport_); |
| 348 | RTC_DCHECK_NE(initial_encoder_max_bitrate, 0); |
| 349 | |
| 350 | if (initial_encoder_max_bitrate > 0) { |
| 351 | encoder_max_bitrate_bps_ = |
| 352 | rtc::dchecked_cast<uint32_t>(initial_encoder_max_bitrate); |
| 353 | } else { |
| 354 | // TODO(srte): Make sure max bitrate is not set to negative values. We don't |
| 355 | // have any way to handle unset values in downstream code, such as the |
| 356 | // bitrate allocator. Previously -1 was implicitly casted to UINT32_MAX, a |
| 357 | // behaviour that is not safe. Converting to 10 Mbps should be safe for |
| 358 | // reasonable use cases as it allows adding the max of multiple streams |
| 359 | // without wrappping around. |
| 360 | const int kFallbackMaxBitrateBps = 10000000; |
| 361 | RTC_DLOG(LS_ERROR) << "ERROR: Initial encoder max bitrate = " |
| 362 | << initial_encoder_max_bitrate << " which is <= 0!"; |
| 363 | RTC_DLOG(LS_INFO) << "Using default encoder max bitrate = 10 Mbps"; |
| 364 | encoder_max_bitrate_bps_ = kFallbackMaxBitrateBps; |
| 365 | } |
| 366 | |
| 367 | RTC_CHECK(AlrExperimentSettings::MaxOneFieldTrialEnabled()); |
| 368 | // If send-side BWE is enabled, check if we should apply updated probing and |
| 369 | // pacing settings. |
| 370 | if (TransportSeqNumExtensionConfigured(*config_)) { |
| 371 | has_packet_feedback_ = true; |
| 372 | |
Danil Chapovalov | b9b146c | 2018-06-15 10:28:07 | [diff] [blame] | 373 | absl::optional<AlrExperimentSettings> alr_settings; |
Sebastian Jansson | 8e0b15b | 2018-04-18 17:19:22 | [diff] [blame] | 374 | if (content_type == VideoEncoderConfig::ContentType::kScreen) { |
| 375 | alr_settings = AlrExperimentSettings::CreateFromFieldTrial( |
| 376 | AlrExperimentSettings::kScreenshareProbingBweExperimentName); |
| 377 | } else { |
| 378 | alr_settings = AlrExperimentSettings::CreateFromFieldTrial( |
| 379 | AlrExperimentSettings::kStrictPacingAndProbingExperimentName); |
| 380 | } |
| 381 | if (alr_settings) { |
| 382 | transport->EnablePeriodicAlrProbing(true); |
| 383 | transport->SetPacingFactor(alr_settings->pacing_factor); |
| 384 | configured_pacing_factor_ = alr_settings->pacing_factor; |
| 385 | transport->SetQueueTimeLimit(alr_settings->max_paced_queue_time); |
| 386 | } else { |
| 387 | transport->EnablePeriodicAlrProbing(false); |
| 388 | transport->SetPacingFactor(PacedSender::kDefaultPaceMultiplier); |
| 389 | configured_pacing_factor_ = PacedSender::kDefaultPaceMultiplier; |
| 390 | transport->SetQueueTimeLimit(PacedSender::kMaxQueueLengthMs); |
| 391 | } |
| 392 | } |
| 393 | |
| 394 | if (config_->periodic_alr_bandwidth_probing) { |
| 395 | transport->EnablePeriodicAlrProbing(true); |
| 396 | } |
| 397 | |
| 398 | // RTP/RTCP initialization. |
| 399 | |
| 400 | // We add the highest spatial layer first to ensure it'll be prioritized |
| 401 | // when sending padding, with the hope that the packet rate will be smaller, |
| 402 | // and that it's more important to protect than the lower layers. |
| 403 | for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { |
| 404 | constexpr bool remb_candidate = true; |
| 405 | transport->packet_router()->AddSendRtpModule(rtp_rtcp, remb_candidate); |
| 406 | } |
| 407 | |
| 408 | for (size_t i = 0; i < config_->rtp.extensions.size(); ++i) { |
| 409 | const std::string& extension = config_->rtp.extensions[i].uri; |
| 410 | int id = config_->rtp.extensions[i].id; |
| 411 | // One-byte-extension local identifiers are in the range 1-14 inclusive. |
| 412 | RTC_DCHECK_GE(id, 1); |
| 413 | RTC_DCHECK_LE(id, 14); |
| 414 | RTC_DCHECK(RtpExtension::IsSupportedForVideo(extension)); |
| 415 | for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { |
| 416 | RTC_CHECK_EQ(0, rtp_rtcp->RegisterSendRtpHeaderExtension( |
| 417 | StringToRtpExtensionType(extension), id)); |
| 418 | } |
| 419 | } |
| 420 | |
| 421 | ConfigureProtection(); |
| 422 | ConfigureSsrcs(); |
| 423 | |
| 424 | if (!config_->rtp.mid.empty()) { |
| 425 | for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { |
| 426 | rtp_rtcp->SetMid(config_->rtp.mid); |
| 427 | } |
| 428 | } |
| 429 | |
| 430 | // TODO(pbos): Should we set CNAME on all RTP modules? |
| 431 | rtp_rtcp_modules_.front()->SetCNAME(config_->rtp.c_name.c_str()); |
| 432 | |
| 433 | for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { |
| 434 | rtp_rtcp->RegisterRtcpStatisticsCallback(stats_proxy_); |
| 435 | rtp_rtcp->RegisterSendChannelRtpStatisticsCallback(stats_proxy_); |
| 436 | rtp_rtcp->SetMaxRtpPacketSize(config_->rtp.max_packet_size); |
| 437 | rtp_rtcp->RegisterVideoSendPayload(config_->rtp.payload_type, |
| 438 | config_->rtp.payload_name.c_str()); |
| 439 | } |
| 440 | |
| 441 | fec_controller_->SetProtectionCallback(this); |
| 442 | // Signal congestion controller this object is ready for OnPacket* callbacks. |
| 443 | if (fec_controller_->UseLossVectorMask()) { |
| 444 | transport_->RegisterPacketFeedbackObserver(this); |
| 445 | } |
| 446 | |
Sebastian Jansson | 8e0b15b | 2018-04-18 17:19:22 | [diff] [blame] | 447 | RTC_DCHECK_GE(config_->rtp.payload_type, 0); |
| 448 | RTC_DCHECK_LE(config_->rtp.payload_type, 127); |
| 449 | |
| 450 | video_stream_encoder_->SetStartBitrate( |
| 451 | bitrate_allocator_->GetStartBitrate(this)); |
| 452 | |
| 453 | // Only request rotation at the source when we positively know that the remote |
| 454 | // side doesn't support the rotation extension. This allows us to prepare the |
| 455 | // encoder in the expectation that rotation is supported - which is the common |
| 456 | // case. |
| 457 | bool rotation_applied = |
| 458 | std::find_if(config_->rtp.extensions.begin(), |
| 459 | config_->rtp.extensions.end(), |
| 460 | [](const RtpExtension& extension) { |
| 461 | return extension.uri == RtpExtension::kVideoRotationUri; |
| 462 | }) == config_->rtp.extensions.end(); |
| 463 | |
| 464 | video_stream_encoder_->SetSink(this, rotation_applied); |
| 465 | } |
| 466 | |
| 467 | void VideoSendStreamImpl::RegisterProcessThread( |
| 468 | ProcessThread* module_process_thread) { |
| 469 | RTC_DCHECK_RUN_ON(&module_process_thread_checker_); |
| 470 | RTC_DCHECK(!module_process_thread_); |
| 471 | module_process_thread_ = module_process_thread; |
| 472 | |
| 473 | for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) |
| 474 | module_process_thread_->RegisterModule(rtp_rtcp, RTC_FROM_HERE); |
| 475 | } |
| 476 | |
| 477 | void VideoSendStreamImpl::DeRegisterProcessThread() { |
| 478 | RTC_DCHECK_RUN_ON(&module_process_thread_checker_); |
| 479 | for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) |
| 480 | module_process_thread_->DeRegisterModule(rtp_rtcp); |
| 481 | } |
| 482 | |
| 483 | VideoSendStreamImpl::~VideoSendStreamImpl() { |
| 484 | RTC_DCHECK_RUN_ON(worker_queue_); |
| 485 | RTC_DCHECK(!payload_router_.IsActive()) |
| 486 | << "VideoSendStreamImpl::Stop not called"; |
| 487 | RTC_LOG(LS_INFO) << "~VideoSendStreamInternal: " << config_->ToString(); |
| 488 | if (fec_controller_->UseLossVectorMask()) { |
| 489 | transport_->DeRegisterPacketFeedbackObserver(this); |
| 490 | } |
| 491 | for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { |
| 492 | transport_->packet_router()->RemoveSendRtpModule(rtp_rtcp); |
| 493 | delete rtp_rtcp; |
| 494 | } |
| 495 | } |
| 496 | |
| 497 | bool VideoSendStreamImpl::DeliverRtcp(const uint8_t* packet, size_t length) { |
| 498 | // Runs on a network thread. |
| 499 | RTC_DCHECK(!worker_queue_->IsCurrent()); |
| 500 | for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) |
| 501 | rtp_rtcp->IncomingRtcpPacket(packet, length); |
| 502 | return true; |
| 503 | } |
| 504 | |
| 505 | void VideoSendStreamImpl::UpdateActiveSimulcastLayers( |
| 506 | const std::vector<bool> active_layers) { |
| 507 | RTC_DCHECK_RUN_ON(worker_queue_); |
| 508 | RTC_DCHECK_EQ(rtp_rtcp_modules_.size(), active_layers.size()); |
| 509 | RTC_LOG(LS_INFO) << "VideoSendStream::UpdateActiveSimulcastLayers"; |
| 510 | bool previously_active = payload_router_.IsActive(); |
| 511 | payload_router_.SetActiveModules(active_layers); |
| 512 | if (!payload_router_.IsActive() && previously_active) { |
| 513 | // Payload router switched from active to inactive. |
| 514 | StopVideoSendStream(); |
| 515 | } else if (payload_router_.IsActive() && !previously_active) { |
| 516 | // Payload router switched from inactive to active. |
| 517 | StartupVideoSendStream(); |
| 518 | } |
| 519 | } |
| 520 | |
| 521 | void VideoSendStreamImpl::Start() { |
| 522 | RTC_DCHECK_RUN_ON(worker_queue_); |
| 523 | RTC_LOG(LS_INFO) << "VideoSendStream::Start"; |
| 524 | if (payload_router_.IsActive()) |
| 525 | return; |
| 526 | TRACE_EVENT_INSTANT0("webrtc", "VideoSendStream::Start"); |
| 527 | payload_router_.SetActive(true); |
| 528 | StartupVideoSendStream(); |
| 529 | } |
| 530 | |
| 531 | void VideoSendStreamImpl::StartupVideoSendStream() { |
| 532 | RTC_DCHECK_RUN_ON(worker_queue_); |
| 533 | bitrate_allocator_->AddObserver( |
Sebastian Jansson | 24ad720 | 2018-04-19 06:25:12 | [diff] [blame] | 534 | this, |
| 535 | MediaStreamAllocationConfig{ |
| 536 | static_cast<uint32_t>(encoder_min_bitrate_bps_), |
| 537 | encoder_max_bitrate_bps_, static_cast<uint32_t>(max_padding_bitrate_), |
| 538 | !config_->suspend_below_min_bitrate, config_->track_id, |
| 539 | encoder_bitrate_priority_, has_packet_feedback_}); |
Sebastian Jansson | 8e0b15b | 2018-04-18 17:19:22 | [diff] [blame] | 540 | // Start monitoring encoder activity. |
| 541 | { |
| 542 | rtc::CritScope lock(&encoder_activity_crit_sect_); |
| 543 | RTC_DCHECK(!check_encoder_activity_task_); |
| 544 | check_encoder_activity_task_ = new CheckEncoderActivityTask(weak_ptr_); |
| 545 | worker_queue_->PostDelayedTask( |
| 546 | std::unique_ptr<rtc::QueuedTask>(check_encoder_activity_task_), |
| 547 | CheckEncoderActivityTask::kEncoderTimeOutMs); |
| 548 | } |
| 549 | |
| 550 | video_stream_encoder_->SendKeyFrame(); |
| 551 | } |
| 552 | |
| 553 | void VideoSendStreamImpl::Stop() { |
| 554 | RTC_DCHECK_RUN_ON(worker_queue_); |
| 555 | RTC_LOG(LS_INFO) << "VideoSendStream::Stop"; |
| 556 | if (!payload_router_.IsActive()) |
| 557 | return; |
| 558 | TRACE_EVENT_INSTANT0("webrtc", "VideoSendStream::Stop"); |
| 559 | payload_router_.SetActive(false); |
| 560 | StopVideoSendStream(); |
| 561 | } |
| 562 | |
| 563 | void VideoSendStreamImpl::StopVideoSendStream() { |
| 564 | bitrate_allocator_->RemoveObserver(this); |
| 565 | { |
| 566 | rtc::CritScope lock(&encoder_activity_crit_sect_); |
| 567 | check_encoder_activity_task_->Stop(); |
| 568 | check_encoder_activity_task_ = nullptr; |
| 569 | } |
| 570 | video_stream_encoder_->OnBitrateUpdated(0, 0, 0); |
| 571 | stats_proxy_->OnSetEncoderTargetRate(0); |
| 572 | } |
| 573 | |
| 574 | void VideoSendStreamImpl::SignalEncoderTimedOut() { |
| 575 | RTC_DCHECK_RUN_ON(worker_queue_); |
| 576 | // If the encoder has not produced anything the last kEncoderTimeOutMs and it |
| 577 | // is supposed to, deregister as BitrateAllocatorObserver. This can happen |
| 578 | // if a camera stops producing frames. |
| 579 | if (encoder_target_rate_bps_ > 0) { |
| 580 | RTC_LOG(LS_INFO) << "SignalEncoderTimedOut, Encoder timed out."; |
| 581 | bitrate_allocator_->RemoveObserver(this); |
| 582 | } |
| 583 | } |
| 584 | |
| 585 | void VideoSendStreamImpl::OnBitrateAllocationUpdated( |
Erik Språng | 566124a | 2018-04-23 10:32:22 | [diff] [blame] | 586 | const VideoBitrateAllocation& allocation) { |
Sebastian Jansson | 8e0b15b | 2018-04-18 17:19:22 | [diff] [blame] | 587 | payload_router_.OnBitrateAllocationUpdated(allocation); |
| 588 | } |
| 589 | |
| 590 | void VideoSendStreamImpl::SignalEncoderActive() { |
| 591 | RTC_DCHECK_RUN_ON(worker_queue_); |
| 592 | RTC_LOG(LS_INFO) << "SignalEncoderActive, Encoder is active."; |
| 593 | bitrate_allocator_->AddObserver( |
Sebastian Jansson | 24ad720 | 2018-04-19 06:25:12 | [diff] [blame] | 594 | this, |
| 595 | MediaStreamAllocationConfig{ |
| 596 | static_cast<uint32_t>(encoder_min_bitrate_bps_), |
| 597 | encoder_max_bitrate_bps_, static_cast<uint32_t>(max_padding_bitrate_), |
| 598 | !config_->suspend_below_min_bitrate, config_->track_id, |
| 599 | encoder_bitrate_priority_, has_packet_feedback_}); |
Sebastian Jansson | 8e0b15b | 2018-04-18 17:19:22 | [diff] [blame] | 600 | } |
| 601 | |
| 602 | void VideoSendStreamImpl::OnEncoderConfigurationChanged( |
| 603 | std::vector<VideoStream> streams, |
| 604 | int min_transmit_bitrate_bps) { |
| 605 | if (!worker_queue_->IsCurrent()) { |
| 606 | rtc::WeakPtr<VideoSendStreamImpl> send_stream = weak_ptr_; |
| 607 | worker_queue_->PostTask([send_stream, streams, min_transmit_bitrate_bps]() { |
| 608 | if (send_stream) |
| 609 | send_stream->OnEncoderConfigurationChanged(std::move(streams), |
| 610 | min_transmit_bitrate_bps); |
| 611 | }); |
| 612 | return; |
| 613 | } |
| 614 | RTC_DCHECK_GE(config_->rtp.ssrcs.size(), streams.size()); |
| 615 | TRACE_EVENT0("webrtc", "VideoSendStream::OnEncoderConfigurationChanged"); |
| 616 | RTC_DCHECK_GE(config_->rtp.ssrcs.size(), streams.size()); |
| 617 | RTC_DCHECK_RUN_ON(worker_queue_); |
| 618 | |
| 619 | encoder_min_bitrate_bps_ = |
| 620 | std::max(streams[0].min_bitrate_bps, GetEncoderMinBitrateBps()); |
| 621 | encoder_max_bitrate_bps_ = 0; |
| 622 | double stream_bitrate_priority_sum = 0; |
| 623 | for (const auto& stream : streams) { |
| 624 | // We don't want to allocate more bitrate than needed to inactive streams. |
| 625 | encoder_max_bitrate_bps_ += stream.active ? stream.max_bitrate_bps : 0; |
| 626 | if (stream.bitrate_priority) { |
| 627 | RTC_DCHECK_GT(*stream.bitrate_priority, 0); |
| 628 | stream_bitrate_priority_sum += *stream.bitrate_priority; |
| 629 | } |
| 630 | } |
| 631 | RTC_DCHECK_GT(stream_bitrate_priority_sum, 0); |
| 632 | encoder_bitrate_priority_ = stream_bitrate_priority_sum; |
| 633 | encoder_max_bitrate_bps_ = |
| 634 | std::max(static_cast<uint32_t>(encoder_min_bitrate_bps_), |
| 635 | encoder_max_bitrate_bps_); |
“Michael | 277a656 | 2018-06-01 19:09:19 | [diff] [blame] | 636 | |
| 637 | const VideoCodecType codec_type = |
| 638 | PayloadStringToCodecType(config_->rtp.payload_name); |
| 639 | if (codec_type == kVideoCodecVP9) { |
| 640 | max_padding_bitrate_ = streams[0].target_bitrate_bps; |
| 641 | } else { |
| 642 | max_padding_bitrate_ = CalculateMaxPadBitrateBps( |
| 643 | streams, min_transmit_bitrate_bps, config_->suspend_below_min_bitrate); |
| 644 | } |
Sebastian Jansson | 8e0b15b | 2018-04-18 17:19:22 | [diff] [blame] | 645 | |
| 646 | // Clear stats for disabled layers. |
| 647 | for (size_t i = streams.size(); i < config_->rtp.ssrcs.size(); ++i) { |
| 648 | stats_proxy_->OnInactiveSsrc(config_->rtp.ssrcs[i]); |
| 649 | } |
| 650 | |
| 651 | const size_t num_temporal_layers = |
| 652 | streams.back().num_temporal_layers.value_or(1); |
| 653 | fec_controller_->SetEncodingData(streams[0].width, streams[0].height, |
| 654 | num_temporal_layers, |
| 655 | config_->rtp.max_packet_size); |
| 656 | |
| 657 | if (payload_router_.IsActive()) { |
| 658 | // The send stream is started already. Update the allocator with new bitrate |
| 659 | // limits. |
| 660 | bitrate_allocator_->AddObserver( |
Sebastian Jansson | 24ad720 | 2018-04-19 06:25:12 | [diff] [blame] | 661 | this, MediaStreamAllocationConfig{ |
| 662 | static_cast<uint32_t>(encoder_min_bitrate_bps_), |
| 663 | encoder_max_bitrate_bps_, |
| 664 | static_cast<uint32_t>(max_padding_bitrate_), |
| 665 | !config_->suspend_below_min_bitrate, config_->track_id, |
| 666 | encoder_bitrate_priority_, has_packet_feedback_}); |
Sebastian Jansson | 8e0b15b | 2018-04-18 17:19:22 | [diff] [blame] | 667 | } |
| 668 | } |
| 669 | |
| 670 | EncodedImageCallback::Result VideoSendStreamImpl::OnEncodedImage( |
| 671 | const EncodedImage& encoded_image, |
| 672 | const CodecSpecificInfo* codec_specific_info, |
| 673 | const RTPFragmentationHeader* fragmentation) { |
| 674 | // Encoded is called on whatever thread the real encoder implementation run |
| 675 | // on. In the case of hardware encoders, there might be several encoders |
| 676 | // running in parallel on different threads. |
| 677 | size_t simulcast_idx = 0; |
| 678 | if (codec_specific_info->codecType == kVideoCodecVP8) { |
| 679 | simulcast_idx = codec_specific_info->codecSpecific.VP8.simulcastIdx; |
| 680 | } |
| 681 | if (config_->post_encode_callback) { |
| 682 | config_->post_encode_callback->EncodedFrameCallback(EncodedFrame( |
| 683 | encoded_image._buffer, encoded_image._length, encoded_image._frameType, |
| 684 | simulcast_idx, encoded_image._timeStamp)); |
| 685 | } |
| 686 | { |
| 687 | rtc::CritScope lock(&encoder_activity_crit_sect_); |
| 688 | if (check_encoder_activity_task_) |
| 689 | check_encoder_activity_task_->UpdateEncoderActivity(); |
| 690 | } |
| 691 | |
| 692 | fec_controller_->UpdateWithEncodedData(encoded_image._length, |
| 693 | encoded_image._frameType); |
| 694 | EncodedImageCallback::Result result = payload_router_.OnEncodedImage( |
| 695 | encoded_image, codec_specific_info, fragmentation); |
| 696 | |
| 697 | RTC_DCHECK(codec_specific_info); |
| 698 | |
| 699 | int layer = codec_specific_info->codecType == kVideoCodecVP8 |
| 700 | ? codec_specific_info->codecSpecific.VP8.simulcastIdx |
| 701 | : 0; |
| 702 | { |
| 703 | rtc::CritScope lock(&ivf_writers_crit_); |
| 704 | if (file_writers_[layer].get()) { |
| 705 | bool ok = file_writers_[layer]->WriteFrame( |
| 706 | encoded_image, codec_specific_info->codecType); |
| 707 | RTC_DCHECK(ok); |
| 708 | } |
| 709 | } |
| 710 | |
| 711 | return result; |
| 712 | } |
| 713 | |
| 714 | void VideoSendStreamImpl::ConfigureProtection() { |
| 715 | RTC_DCHECK_RUN_ON(worker_queue_); |
| 716 | |
| 717 | // Consistency of FlexFEC parameters is checked in MaybeCreateFlexfecSender. |
| 718 | const bool flexfec_enabled = (flexfec_sender_ != nullptr); |
| 719 | |
| 720 | // Consistency of NACK and RED+ULPFEC parameters is checked in this function. |
| 721 | const bool nack_enabled = config_->rtp.nack.rtp_history_ms > 0; |
| 722 | int red_payload_type = config_->rtp.ulpfec.red_payload_type; |
| 723 | int ulpfec_payload_type = config_->rtp.ulpfec.ulpfec_payload_type; |
| 724 | |
| 725 | // Shorthands. |
| 726 | auto IsRedEnabled = [&]() { return red_payload_type >= 0; }; |
| 727 | auto DisableRed = [&]() { red_payload_type = -1; }; |
| 728 | auto IsUlpfecEnabled = [&]() { return ulpfec_payload_type >= 0; }; |
| 729 | auto DisableUlpfec = [&]() { ulpfec_payload_type = -1; }; |
| 730 | |
| 731 | if (webrtc::field_trial::IsEnabled("WebRTC-DisableUlpFecExperiment")) { |
| 732 | RTC_LOG(LS_INFO) << "Experiment to disable sending ULPFEC is enabled."; |
| 733 | DisableUlpfec(); |
| 734 | } |
| 735 | |
| 736 | // If enabled, FlexFEC takes priority over RED+ULPFEC. |
| 737 | if (flexfec_enabled) { |
| 738 | // We can safely disable RED here, because if the remote supports FlexFEC, |
| 739 | // we know that it has a receiver without the RED/RTX workaround. |
| 740 | // See http://crbug.com/webrtc/6650 for more information. |
| 741 | if (IsRedEnabled()) { |
| 742 | RTC_LOG(LS_INFO) << "Both FlexFEC and RED are configured. Disabling RED."; |
| 743 | DisableRed(); |
| 744 | } |
| 745 | if (IsUlpfecEnabled()) { |
| 746 | RTC_LOG(LS_INFO) |
| 747 | << "Both FlexFEC and ULPFEC are configured. Disabling ULPFEC."; |
| 748 | DisableUlpfec(); |
| 749 | } |
| 750 | } |
| 751 | |
| 752 | // Payload types without picture ID cannot determine that a stream is complete |
| 753 | // without retransmitting FEC, so using ULPFEC + NACK for H.264 (for instance) |
| 754 | // is a waste of bandwidth since FEC packets still have to be transmitted. |
| 755 | // Note that this is not the case with FlexFEC. |
| 756 | if (nack_enabled && IsUlpfecEnabled() && |
| 757 | !PayloadTypeSupportsSkippingFecPackets(config_->rtp.payload_name)) { |
| 758 | RTC_LOG(LS_WARNING) |
| 759 | << "Transmitting payload type without picture ID using " |
| 760 | "NACK+ULPFEC is a waste of bandwidth since ULPFEC packets " |
| 761 | "also have to be retransmitted. Disabling ULPFEC."; |
| 762 | DisableUlpfec(); |
| 763 | } |
| 764 | |
| 765 | // Verify payload types. |
| 766 | // |
| 767 | // Due to how old receivers work, we need to always send RED if it has been |
| 768 | // negotiated. This is a remnant of an old RED/RTX workaround, see |
| 769 | // https://codereview.webrtc.org/2469093003. |
| 770 | // TODO(brandtr): This change went into M56, so we can remove it in ~M59. |
| 771 | // At that time, we can disable RED whenever ULPFEC is disabled, as there is |
| 772 | // no point in using RED without ULPFEC. |
| 773 | if (IsRedEnabled()) { |
| 774 | RTC_DCHECK_GE(red_payload_type, 0); |
| 775 | RTC_DCHECK_LE(red_payload_type, 127); |
| 776 | } |
| 777 | if (IsUlpfecEnabled()) { |
| 778 | RTC_DCHECK_GE(ulpfec_payload_type, 0); |
| 779 | RTC_DCHECK_LE(ulpfec_payload_type, 127); |
| 780 | if (!IsRedEnabled()) { |
| 781 | RTC_LOG(LS_WARNING) |
| 782 | << "ULPFEC is enabled but RED is disabled. Disabling ULPFEC."; |
| 783 | DisableUlpfec(); |
| 784 | } |
| 785 | } |
| 786 | |
| 787 | for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { |
| 788 | // Set NACK. |
| 789 | rtp_rtcp->SetStorePacketsStatus(true, kMinSendSidePacketHistorySize); |
| 790 | // Set RED/ULPFEC information. |
| 791 | rtp_rtcp->SetUlpfecConfig(red_payload_type, ulpfec_payload_type); |
| 792 | } |
| 793 | |
| 794 | // Currently, both ULPFEC and FlexFEC use the same FEC rate calculation logic, |
| 795 | // so enable that logic if either of those FEC schemes are enabled. |
| 796 | fec_controller_->SetProtectionMethod(flexfec_enabled || IsUlpfecEnabled(), |
| 797 | nack_enabled); |
| 798 | } |
| 799 | |
| 800 | void VideoSendStreamImpl::ConfigureSsrcs() { |
| 801 | RTC_DCHECK_RUN_ON(worker_queue_); |
| 802 | // Configure regular SSRCs. |
| 803 | for (size_t i = 0; i < config_->rtp.ssrcs.size(); ++i) { |
| 804 | uint32_t ssrc = config_->rtp.ssrcs[i]; |
| 805 | RtpRtcp* const rtp_rtcp = rtp_rtcp_modules_[i]; |
| 806 | rtp_rtcp->SetSSRC(ssrc); |
| 807 | |
| 808 | // Restore RTP state if previous existed. |
| 809 | VideoSendStream::RtpStateMap::iterator it = suspended_ssrcs_.find(ssrc); |
| 810 | if (it != suspended_ssrcs_.end()) |
| 811 | rtp_rtcp->SetRtpState(it->second); |
| 812 | } |
| 813 | |
| 814 | // Set up RTX if available. |
| 815 | if (config_->rtp.rtx.ssrcs.empty()) |
| 816 | return; |
| 817 | |
| 818 | // Configure RTX SSRCs. |
| 819 | RTC_DCHECK_EQ(config_->rtp.rtx.ssrcs.size(), config_->rtp.ssrcs.size()); |
| 820 | for (size_t i = 0; i < config_->rtp.rtx.ssrcs.size(); ++i) { |
| 821 | uint32_t ssrc = config_->rtp.rtx.ssrcs[i]; |
| 822 | RtpRtcp* const rtp_rtcp = rtp_rtcp_modules_[i]; |
| 823 | rtp_rtcp->SetRtxSsrc(ssrc); |
| 824 | VideoSendStream::RtpStateMap::iterator it = suspended_ssrcs_.find(ssrc); |
| 825 | if (it != suspended_ssrcs_.end()) |
| 826 | rtp_rtcp->SetRtxState(it->second); |
| 827 | } |
| 828 | |
| 829 | // Configure RTX payload types. |
| 830 | RTC_DCHECK_GE(config_->rtp.rtx.payload_type, 0); |
| 831 | for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { |
| 832 | rtp_rtcp->SetRtxSendPayloadType(config_->rtp.rtx.payload_type, |
| 833 | config_->rtp.payload_type); |
| 834 | rtp_rtcp->SetRtxSendStatus(kRtxRetransmitted | kRtxRedundantPayloads); |
| 835 | } |
| 836 | if (config_->rtp.ulpfec.red_payload_type != -1 && |
| 837 | config_->rtp.ulpfec.red_rtx_payload_type != -1) { |
| 838 | for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { |
| 839 | rtp_rtcp->SetRtxSendPayloadType(config_->rtp.ulpfec.red_rtx_payload_type, |
| 840 | config_->rtp.ulpfec.red_payload_type); |
| 841 | } |
| 842 | } |
| 843 | } |
| 844 | |
| 845 | std::map<uint32_t, RtpState> VideoSendStreamImpl::GetRtpStates() const { |
| 846 | RTC_DCHECK_RUN_ON(worker_queue_); |
| 847 | std::map<uint32_t, RtpState> rtp_states; |
| 848 | |
| 849 | for (size_t i = 0; i < config_->rtp.ssrcs.size(); ++i) { |
| 850 | uint32_t ssrc = config_->rtp.ssrcs[i]; |
| 851 | RTC_DCHECK_EQ(ssrc, rtp_rtcp_modules_[i]->SSRC()); |
| 852 | rtp_states[ssrc] = rtp_rtcp_modules_[i]->GetRtpState(); |
| 853 | } |
| 854 | |
| 855 | for (size_t i = 0; i < config_->rtp.rtx.ssrcs.size(); ++i) { |
| 856 | uint32_t ssrc = config_->rtp.rtx.ssrcs[i]; |
| 857 | rtp_states[ssrc] = rtp_rtcp_modules_[i]->GetRtxState(); |
| 858 | } |
| 859 | |
| 860 | if (flexfec_sender_) { |
| 861 | uint32_t ssrc = config_->rtp.flexfec.ssrc; |
| 862 | rtp_states[ssrc] = flexfec_sender_->GetRtpState(); |
| 863 | } |
| 864 | |
| 865 | return rtp_states; |
| 866 | } |
| 867 | |
| 868 | std::map<uint32_t, RtpPayloadState> VideoSendStreamImpl::GetRtpPayloadStates() |
| 869 | const { |
| 870 | RTC_DCHECK_RUN_ON(worker_queue_); |
| 871 | return payload_router_.GetRtpPayloadStates(); |
| 872 | } |
| 873 | |
| 874 | void VideoSendStreamImpl::SignalNetworkState(NetworkState state) { |
| 875 | RTC_DCHECK_RUN_ON(worker_queue_); |
| 876 | for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { |
| 877 | rtp_rtcp->SetRTCPStatus(state == kNetworkUp ? config_->rtp.rtcp_mode |
| 878 | : RtcpMode::kOff); |
| 879 | } |
| 880 | } |
| 881 | |
| 882 | uint32_t VideoSendStreamImpl::OnBitrateUpdated(uint32_t bitrate_bps, |
| 883 | uint8_t fraction_loss, |
| 884 | int64_t rtt, |
| 885 | int64_t probing_interval_ms) { |
| 886 | RTC_DCHECK_RUN_ON(worker_queue_); |
| 887 | RTC_DCHECK(payload_router_.IsActive()) |
| 888 | << "VideoSendStream::Start has not been called."; |
| 889 | |
| 890 | // Substract overhead from bitrate. |
| 891 | rtc::CritScope lock(&overhead_bytes_per_packet_crit_); |
| 892 | uint32_t payload_bitrate_bps = bitrate_bps; |
| 893 | if (send_side_bwe_with_overhead_) { |
| 894 | payload_bitrate_bps -= CalculateOverheadRateBps( |
| 895 | CalculatePacketRate(bitrate_bps, |
| 896 | config_->rtp.max_packet_size + |
| 897 | transport_overhead_bytes_per_packet_), |
| 898 | overhead_bytes_per_packet_ + transport_overhead_bytes_per_packet_, |
| 899 | bitrate_bps); |
| 900 | } |
| 901 | |
| 902 | // Get the encoder target rate. It is the estimated network rate - |
| 903 | // protection overhead. |
| 904 | encoder_target_rate_bps_ = fec_controller_->UpdateFecRates( |
| 905 | payload_bitrate_bps, stats_proxy_->GetSendFrameRate(), fraction_loss, |
| 906 | loss_mask_vector_, rtt); |
| 907 | loss_mask_vector_.clear(); |
| 908 | |
| 909 | uint32_t encoder_overhead_rate_bps = |
| 910 | send_side_bwe_with_overhead_ |
| 911 | ? CalculateOverheadRateBps( |
| 912 | CalculatePacketRate(encoder_target_rate_bps_, |
| 913 | config_->rtp.max_packet_size + |
| 914 | transport_overhead_bytes_per_packet_ - |
| 915 | overhead_bytes_per_packet_), |
| 916 | overhead_bytes_per_packet_ + |
| 917 | transport_overhead_bytes_per_packet_, |
| 918 | bitrate_bps - encoder_target_rate_bps_) |
| 919 | : 0; |
| 920 | |
| 921 | // When the field trial "WebRTC-SendSideBwe-WithOverhead" is enabled |
| 922 | // protection_bitrate includes overhead. |
| 923 | uint32_t protection_bitrate = |
| 924 | bitrate_bps - (encoder_target_rate_bps_ + encoder_overhead_rate_bps); |
| 925 | |
| 926 | encoder_target_rate_bps_ = |
| 927 | std::min(encoder_max_bitrate_bps_, encoder_target_rate_bps_); |
| 928 | video_stream_encoder_->OnBitrateUpdated(encoder_target_rate_bps_, |
| 929 | fraction_loss, rtt); |
| 930 | stats_proxy_->OnSetEncoderTargetRate(encoder_target_rate_bps_); |
| 931 | return protection_bitrate; |
| 932 | } |
| 933 | |
| 934 | void VideoSendStreamImpl::EnableEncodedFrameRecording( |
| 935 | const std::vector<rtc::PlatformFile>& files, |
| 936 | size_t byte_limit) { |
| 937 | { |
| 938 | rtc::CritScope lock(&ivf_writers_crit_); |
| 939 | for (unsigned int i = 0; i < kMaxSimulcastStreams; ++i) { |
| 940 | if (i < files.size()) { |
| 941 | file_writers_[i] = IvfFileWriter::Wrap(rtc::File(files[i]), byte_limit); |
| 942 | } else { |
| 943 | file_writers_[i].reset(); |
| 944 | } |
| 945 | } |
| 946 | } |
| 947 | |
| 948 | if (!files.empty()) { |
| 949 | // Make a keyframe appear as early as possible in the logs, to give actually |
| 950 | // decodable output. |
| 951 | video_stream_encoder_->SendKeyFrame(); |
| 952 | } |
| 953 | } |
| 954 | |
| 955 | int VideoSendStreamImpl::ProtectionRequest( |
| 956 | const FecProtectionParams* delta_params, |
| 957 | const FecProtectionParams* key_params, |
| 958 | uint32_t* sent_video_rate_bps, |
| 959 | uint32_t* sent_nack_rate_bps, |
| 960 | uint32_t* sent_fec_rate_bps) { |
| 961 | RTC_DCHECK_RUN_ON(worker_queue_); |
| 962 | *sent_video_rate_bps = 0; |
| 963 | *sent_nack_rate_bps = 0; |
| 964 | *sent_fec_rate_bps = 0; |
| 965 | for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { |
| 966 | uint32_t not_used = 0; |
| 967 | uint32_t module_video_rate = 0; |
| 968 | uint32_t module_fec_rate = 0; |
| 969 | uint32_t module_nack_rate = 0; |
| 970 | rtp_rtcp->SetFecParameters(*delta_params, *key_params); |
| 971 | rtp_rtcp->BitrateSent(¬_used, &module_video_rate, &module_fec_rate, |
| 972 | &module_nack_rate); |
| 973 | *sent_video_rate_bps += module_video_rate; |
| 974 | *sent_nack_rate_bps += module_nack_rate; |
| 975 | *sent_fec_rate_bps += module_fec_rate; |
| 976 | } |
| 977 | return 0; |
| 978 | } |
| 979 | |
| 980 | void VideoSendStreamImpl::OnOverheadChanged(size_t overhead_bytes_per_packet) { |
| 981 | rtc::CritScope lock(&overhead_bytes_per_packet_crit_); |
| 982 | overhead_bytes_per_packet_ = overhead_bytes_per_packet; |
| 983 | } |
| 984 | |
| 985 | void VideoSendStreamImpl::SetTransportOverhead( |
| 986 | size_t transport_overhead_bytes_per_packet) { |
| 987 | if (transport_overhead_bytes_per_packet >= static_cast<int>(kPathMTU)) { |
| 988 | RTC_LOG(LS_ERROR) << "Transport overhead exceeds size of ethernet frame"; |
| 989 | return; |
| 990 | } |
| 991 | |
| 992 | transport_overhead_bytes_per_packet_ = transport_overhead_bytes_per_packet; |
| 993 | |
| 994 | size_t rtp_packet_size = |
| 995 | std::min(config_->rtp.max_packet_size, |
| 996 | kPathMTU - transport_overhead_bytes_per_packet_); |
| 997 | |
| 998 | for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) { |
| 999 | rtp_rtcp->SetMaxRtpPacketSize(rtp_packet_size); |
| 1000 | } |
| 1001 | } |
| 1002 | |
| 1003 | void VideoSendStreamImpl::OnPacketAdded(uint32_t ssrc, uint16_t seq_num) { |
| 1004 | if (!worker_queue_->IsCurrent()) { |
| 1005 | auto ptr = weak_ptr_; |
| 1006 | worker_queue_->PostTask([=] { |
| 1007 | if (!ptr.get()) |
| 1008 | return; |
| 1009 | ptr->OnPacketAdded(ssrc, seq_num); |
| 1010 | }); |
| 1011 | return; |
| 1012 | } |
| 1013 | const auto ssrcs = config_->rtp.ssrcs; |
| 1014 | if (std::find(ssrcs.begin(), ssrcs.end(), ssrc) != ssrcs.end()) { |
| 1015 | feedback_packet_seq_num_set_.insert(seq_num); |
| 1016 | if (feedback_packet_seq_num_set_.size() > kSendSideSeqNumSetMaxSize) { |
| 1017 | RTC_LOG(LS_WARNING) << "Feedback packet sequence number set exceed it's " |
| 1018 | "max size', will get reset."; |
| 1019 | feedback_packet_seq_num_set_.clear(); |
| 1020 | } |
| 1021 | } |
| 1022 | } |
| 1023 | |
| 1024 | void VideoSendStreamImpl::OnPacketFeedbackVector( |
| 1025 | const std::vector<PacketFeedback>& packet_feedback_vector) { |
| 1026 | if (!worker_queue_->IsCurrent()) { |
| 1027 | auto ptr = weak_ptr_; |
| 1028 | worker_queue_->PostTask([=] { |
| 1029 | if (!ptr.get()) |
| 1030 | return; |
| 1031 | ptr->OnPacketFeedbackVector(packet_feedback_vector); |
| 1032 | }); |
| 1033 | return; |
| 1034 | } |
| 1035 | // Lost feedbacks are not considered to be lost packets. |
| 1036 | for (const PacketFeedback& packet : packet_feedback_vector) { |
| 1037 | if (auto it = feedback_packet_seq_num_set_.find(packet.sequence_number) != |
| 1038 | feedback_packet_seq_num_set_.end()) { |
| 1039 | const bool lost = packet.arrival_time_ms == PacketFeedback::kNotReceived; |
| 1040 | loss_mask_vector_.push_back(lost); |
| 1041 | feedback_packet_seq_num_set_.erase(it); |
| 1042 | } |
| 1043 | } |
| 1044 | } |
| 1045 | } // namespace internal |
| 1046 | } // namespace webrtc |