Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license |
| 5 | * that can be found in the LICENSE file in the root of the source |
| 6 | * tree. An additional intellectual property rights grant can be found |
| 7 | * in the file PATENTS. All contributing project authors may |
| 8 | * be found in the AUTHORS file in the root of the source tree. |
| 9 | */ |
| 10 | |
| 11 | #include "call/rtp_payload_params.h" |
| 12 | |
Yves Gerey | 3e70781 | 2018-11-28 15:47:49 | [diff] [blame] | 13 | #include <stddef.h> |
Jonas Olsson | a4d8737 | 2019-07-05 17:08:33 | [diff] [blame] | 14 | |
Elad Alon | f5b216a | 2019-01-28 13:25:17 | [diff] [blame] | 15 | #include <algorithm> |
Yves Gerey | 3e70781 | 2018-11-28 15:47:49 | [diff] [blame] | 16 | |
| 17 | #include "absl/container/inlined_vector.h" |
Erik Språng | cbc0cba | 2020-04-18 12:36:59 | [diff] [blame] | 18 | #include "absl/strings/match.h" |
Yves Gerey | 3e70781 | 2018-11-28 15:47:49 | [diff] [blame] | 19 | #include "absl/types/variant.h" |
| 20 | #include "api/video/video_timing.h" |
Yves Gerey | 3e70781 | 2018-11-28 15:47:49 | [diff] [blame] | 21 | #include "modules/video_coding/codecs/h264/include/h264_globals.h" |
| 22 | #include "modules/video_coding/codecs/interface/common_constants.h" |
| 23 | #include "modules/video_coding/codecs/vp8/include/vp8_globals.h" |
| 24 | #include "modules/video_coding/codecs/vp9/include/vp9_globals.h" |
Danil Chapovalov | 02d71fb | 2020-02-10 15:22:57 | [diff] [blame] | 25 | #include "modules/video_coding/frame_dependencies_calculator.h" |
Elad Alon | f5b216a | 2019-01-28 13:25:17 | [diff] [blame] | 26 | #include "rtc_base/arraysize.h" |
Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 27 | #include "rtc_base/checks.h" |
philipel | bf2b620 | 2018-08-27 12:33:18 | [diff] [blame] | 28 | #include "rtc_base/logging.h" |
Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 29 | #include "rtc_base/random.h" |
Steve Anton | 10542f2 | 2019-01-11 17:11:00 | [diff] [blame] | 30 | #include "rtc_base/time_utils.h" |
Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 31 | |
| 32 | namespace webrtc { |
Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 33 | namespace { |
Danil Chapovalov | 5b298ab | 2022-06-08 09:18:51 | [diff] [blame] | 34 | |
| 35 | constexpr int kMaxSimulatedSpatialLayers = 3; |
| 36 | |
Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 37 | void PopulateRtpWithCodecSpecifics(const CodecSpecificInfo& info, |
Niels Möller | d3b8c63 | 2018-08-27 13:33:42 | [diff] [blame] | 38 | absl::optional<int> spatial_index, |
Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 39 | RTPVideoHeader* rtp) { |
| 40 | rtp->codec = info.codecType; |
Danil Chapovalov | 62a9a32 | 2020-11-11 15:15:07 | [diff] [blame] | 41 | rtp->is_last_frame_in_picture = info.end_of_picture; |
Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 42 | switch (info.codecType) { |
| 43 | case kVideoCodecVP8: { |
Philip Eliasson | d52a1a6 | 2018-09-07 13:03:55 | [diff] [blame] | 44 | auto& vp8_header = rtp->video_type_header.emplace<RTPVideoHeaderVP8>(); |
| 45 | vp8_header.InitRTPVideoHeaderVP8(); |
| 46 | vp8_header.nonReference = info.codecSpecific.VP8.nonReference; |
| 47 | vp8_header.temporalIdx = info.codecSpecific.VP8.temporalIdx; |
| 48 | vp8_header.layerSync = info.codecSpecific.VP8.layerSync; |
| 49 | vp8_header.keyIdx = info.codecSpecific.VP8.keyIdx; |
Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 50 | return; |
| 51 | } |
| 52 | case kVideoCodecVP9: { |
philipel | 29d8846 | 2018-08-08 12:26:00 | [diff] [blame] | 53 | auto& vp9_header = rtp->video_type_header.emplace<RTPVideoHeaderVP9>(); |
| 54 | vp9_header.InitRTPVideoHeaderVP9(); |
| 55 | vp9_header.inter_pic_predicted = |
Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 56 | info.codecSpecific.VP9.inter_pic_predicted; |
philipel | 29d8846 | 2018-08-08 12:26:00 | [diff] [blame] | 57 | vp9_header.flexible_mode = info.codecSpecific.VP9.flexible_mode; |
| 58 | vp9_header.ss_data_available = info.codecSpecific.VP9.ss_data_available; |
| 59 | vp9_header.non_ref_for_inter_layer_pred = |
Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 60 | info.codecSpecific.VP9.non_ref_for_inter_layer_pred; |
philipel | 29d8846 | 2018-08-08 12:26:00 | [diff] [blame] | 61 | vp9_header.temporal_idx = info.codecSpecific.VP9.temporal_idx; |
philipel | 29d8846 | 2018-08-08 12:26:00 | [diff] [blame] | 62 | vp9_header.temporal_up_switch = info.codecSpecific.VP9.temporal_up_switch; |
| 63 | vp9_header.inter_layer_predicted = |
Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 64 | info.codecSpecific.VP9.inter_layer_predicted; |
philipel | 29d8846 | 2018-08-08 12:26:00 | [diff] [blame] | 65 | vp9_header.gof_idx = info.codecSpecific.VP9.gof_idx; |
| 66 | vp9_header.num_spatial_layers = info.codecSpecific.VP9.num_spatial_layers; |
Ilya Nikolaevskiy | f5d8778 | 2020-02-04 10:06:33 | [diff] [blame] | 67 | vp9_header.first_active_layer = info.codecSpecific.VP9.first_active_layer; |
Niels Möller | d3b8c63 | 2018-08-27 13:33:42 | [diff] [blame] | 68 | if (vp9_header.num_spatial_layers > 1) { |
| 69 | vp9_header.spatial_idx = spatial_index.value_or(kNoSpatialIdx); |
| 70 | } else { |
| 71 | vp9_header.spatial_idx = kNoSpatialIdx; |
| 72 | } |
Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 73 | if (info.codecSpecific.VP9.ss_data_available) { |
philipel | 29d8846 | 2018-08-08 12:26:00 | [diff] [blame] | 74 | vp9_header.spatial_layer_resolution_present = |
Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 75 | info.codecSpecific.VP9.spatial_layer_resolution_present; |
| 76 | if (info.codecSpecific.VP9.spatial_layer_resolution_present) { |
| 77 | for (size_t i = 0; i < info.codecSpecific.VP9.num_spatial_layers; |
| 78 | ++i) { |
philipel | 29d8846 | 2018-08-08 12:26:00 | [diff] [blame] | 79 | vp9_header.width[i] = info.codecSpecific.VP9.width[i]; |
| 80 | vp9_header.height[i] = info.codecSpecific.VP9.height[i]; |
Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 81 | } |
| 82 | } |
philipel | 29d8846 | 2018-08-08 12:26:00 | [diff] [blame] | 83 | vp9_header.gof.CopyGofInfoVP9(info.codecSpecific.VP9.gof); |
Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 84 | } |
| 85 | |
philipel | 29d8846 | 2018-08-08 12:26:00 | [diff] [blame] | 86 | vp9_header.num_ref_pics = info.codecSpecific.VP9.num_ref_pics; |
Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 87 | for (int i = 0; i < info.codecSpecific.VP9.num_ref_pics; ++i) { |
philipel | 29d8846 | 2018-08-08 12:26:00 | [diff] [blame] | 88 | vp9_header.pid_diff[i] = info.codecSpecific.VP9.p_diff[i]; |
Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 89 | } |
Danil Chapovalov | 06bbeb3 | 2020-11-11 11:42:56 | [diff] [blame] | 90 | vp9_header.end_of_picture = info.end_of_picture; |
Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 91 | return; |
| 92 | } |
| 93 | case kVideoCodecH264: { |
philipel | 7d745e5 | 2018-08-02 12:03:53 | [diff] [blame] | 94 | auto& h264_header = rtp->video_type_header.emplace<RTPVideoHeaderH264>(); |
| 95 | h264_header.packetization_mode = |
Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 96 | info.codecSpecific.H264.packetization_mode; |
Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 97 | return; |
| 98 | } |
| 99 | case kVideoCodecMultiplex: |
| 100 | case kVideoCodecGeneric: |
| 101 | rtp->codec = kVideoCodecGeneric; |
Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 102 | return; |
| 103 | default: |
| 104 | return; |
| 105 | } |
| 106 | } |
| 107 | |
| 108 | void SetVideoTiming(const EncodedImage& image, VideoSendTiming* timing) { |
| 109 | if (image.timing_.flags == VideoSendTiming::TimingFrameFlags::kInvalid || |
| 110 | image.timing_.flags == VideoSendTiming::TimingFrameFlags::kNotTriggered) { |
| 111 | timing->flags = VideoSendTiming::TimingFrameFlags::kInvalid; |
| 112 | return; |
| 113 | } |
| 114 | |
| 115 | timing->encode_start_delta_ms = VideoSendTiming::GetDeltaCappedMs( |
| 116 | image.capture_time_ms_, image.timing_.encode_start_ms); |
| 117 | timing->encode_finish_delta_ms = VideoSendTiming::GetDeltaCappedMs( |
| 118 | image.capture_time_ms_, image.timing_.encode_finish_ms); |
| 119 | timing->packetization_finish_delta_ms = 0; |
| 120 | timing->pacer_exit_delta_ms = 0; |
| 121 | timing->network_timestamp_delta_ms = 0; |
| 122 | timing->network2_timestamp_delta_ms = 0; |
| 123 | timing->flags = image.timing_.flags; |
| 124 | } |
Danil Chapovalov | 5b298ab | 2022-06-08 09:18:51 | [diff] [blame] | 125 | |
| 126 | // Returns structure that aligns with simulated generic info. The templates |
| 127 | // allow to produce valid dependency descriptor for any stream where |
| 128 | // `num_spatial_layers` * `num_temporal_layers` <= 32 (limited by |
| 129 | // https://aomediacodec.github.io/av1-rtp-spec/#a82-syntax, see |
| 130 | // template_fdiffs()). The set of the templates is not tuned for any paricular |
| 131 | // structure thus dependency descriptor would use more bytes on the wire than |
| 132 | // with tuned templates. |
| 133 | FrameDependencyStructure MinimalisticStructure(int num_spatial_layers, |
| 134 | int num_temporal_layers) { |
| 135 | RTC_DCHECK_LE(num_spatial_layers, DependencyDescriptor::kMaxSpatialIds); |
| 136 | RTC_DCHECK_LE(num_temporal_layers, DependencyDescriptor::kMaxTemporalIds); |
| 137 | RTC_DCHECK_LE(num_spatial_layers * num_temporal_layers, 32); |
| 138 | FrameDependencyStructure structure; |
| 139 | structure.num_decode_targets = num_spatial_layers * num_temporal_layers; |
| 140 | structure.num_chains = num_spatial_layers; |
| 141 | structure.templates.reserve(num_spatial_layers * num_temporal_layers); |
| 142 | for (int sid = 0; sid < num_spatial_layers; ++sid) { |
| 143 | for (int tid = 0; tid < num_temporal_layers; ++tid) { |
| 144 | FrameDependencyTemplate a_template; |
| 145 | a_template.spatial_id = sid; |
| 146 | a_template.temporal_id = tid; |
| 147 | for (int s = 0; s < num_spatial_layers; ++s) { |
| 148 | for (int t = 0; t < num_temporal_layers; ++t) { |
| 149 | // Prefer kSwitch indication for frames that is part of the decode |
| 150 | // target because dependency descriptor information generated in this |
| 151 | // class use kSwitch indications more often that kRequired, increasing |
| 152 | // the chance of a good (or complete) template match. |
| 153 | a_template.decode_target_indications.push_back( |
| 154 | sid <= s && tid <= t ? DecodeTargetIndication::kSwitch |
| 155 | : DecodeTargetIndication::kNotPresent); |
| 156 | } |
| 157 | } |
| 158 | a_template.frame_diffs.push_back(tid == 0 ? num_spatial_layers * |
| 159 | num_temporal_layers |
| 160 | : num_spatial_layers); |
| 161 | a_template.chain_diffs.assign(structure.num_chains, 1); |
| 162 | structure.templates.push_back(a_template); |
| 163 | |
| 164 | structure.decode_target_protected_by_chain.push_back(sid); |
| 165 | } |
| 166 | } |
| 167 | return structure; |
| 168 | } |
Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 169 | } // namespace |
| 170 | |
| 171 | RtpPayloadParams::RtpPayloadParams(const uint32_t ssrc, |
Erik Språng | cbc0cba | 2020-04-18 12:36:59 | [diff] [blame] | 172 | const RtpPayloadState* state, |
Jonas Oreland | e62c2f2 | 2022-03-29 09:04:48 | [diff] [blame] | 173 | const FieldTrialsView& trials) |
philipel | bf2b620 | 2018-08-27 12:33:18 | [diff] [blame] | 174 | : ssrc_(ssrc), |
| 175 | generic_picture_id_experiment_( |
Erik Språng | cbc0cba | 2020-04-18 12:36:59 | [diff] [blame] | 176 | absl::StartsWith(trials.Lookup("WebRTC-GenericPictureId"), |
Danil Chapovalov | 5b298ab | 2022-06-08 09:18:51 | [diff] [blame] | 177 | "Enabled")), |
| 178 | simulate_generic_structure_(absl::StartsWith( |
| 179 | trials.Lookup("WebRTC-GenericCodecDependencyDescriptor"), |
| 180 | "Enabled")) { |
philipel | bf2b620 | 2018-08-27 12:33:18 | [diff] [blame] | 181 | for (auto& spatial_layer : last_shared_frame_id_) |
| 182 | spatial_layer.fill(-1); |
| 183 | |
Emil Lundmark | adfc700 | 2021-07-30 07:45:10 | [diff] [blame] | 184 | chain_last_frame_id_.fill(-1); |
Elad Alon | f5b216a | 2019-01-28 13:25:17 | [diff] [blame] | 185 | buffer_id_to_frame_id_.fill(-1); |
| 186 | |
Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 187 | Random random(rtc::TimeMicros()); |
| 188 | state_.picture_id = |
| 189 | state ? state->picture_id : (random.Rand<int16_t>() & 0x7FFF); |
| 190 | state_.tl0_pic_idx = state ? state->tl0_pic_idx : (random.Rand<uint8_t>()); |
| 191 | } |
philipel | bf2b620 | 2018-08-27 12:33:18 | [diff] [blame] | 192 | |
| 193 | RtpPayloadParams::RtpPayloadParams(const RtpPayloadParams& other) = default; |
| 194 | |
Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 195 | RtpPayloadParams::~RtpPayloadParams() {} |
| 196 | |
| 197 | RTPVideoHeader RtpPayloadParams::GetRtpVideoHeader( |
| 198 | const EncodedImage& image, |
philipel | bf2b620 | 2018-08-27 12:33:18 | [diff] [blame] | 199 | const CodecSpecificInfo* codec_specific_info, |
| 200 | int64_t shared_frame_id) { |
Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 201 | RTPVideoHeader rtp_video_header; |
| 202 | if (codec_specific_info) { |
Niels Möller | d3b8c63 | 2018-08-27 13:33:42 | [diff] [blame] | 203 | PopulateRtpWithCodecSpecifics(*codec_specific_info, image.SpatialIndex(), |
| 204 | &rtp_video_header); |
Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 205 | } |
Henrik Boström | c5a4c93 | 2023-02-21 14:53:43 | [diff] [blame] | 206 | rtp_video_header.simulcastIdx = image.SimulcastIndex().value_or(0); |
Zhaoliang Ma | f3dc47e | 2021-02-05 05:19:02 | [diff] [blame] | 207 | rtp_video_header.frame_type = image._frameType; |
Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 208 | rtp_video_header.rotation = image.rotation_; |
| 209 | rtp_video_header.content_type = image.content_type_; |
| 210 | rtp_video_header.playout_delay = image.playout_delay_; |
philipel | fab9129 | 2018-10-17 12:36:08 | [diff] [blame] | 211 | rtp_video_header.width = image._encodedWidth; |
| 212 | rtp_video_header.height = image._encodedHeight; |
Johannes Kron | d0b69a8 | 2018-12-03 13:18:53 | [diff] [blame] | 213 | rtp_video_header.color_space = image.ColorSpace() |
| 214 | ? absl::make_optional(*image.ColorSpace()) |
| 215 | : absl::nullopt; |
Jeremy Leconte | b258c56 | 2021-03-18 12:50:42 | [diff] [blame] | 216 | rtp_video_header.video_frame_tracking_id = image.VideoFrameTrackingId(); |
Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 217 | SetVideoTiming(image, &rtp_video_header.video_timing); |
| 218 | |
Niels Möller | 8f7ce22 | 2019-03-21 14:43:58 | [diff] [blame] | 219 | const bool is_keyframe = image._frameType == VideoFrameType::kVideoFrameKey; |
Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 220 | const bool first_frame_in_picture = |
| 221 | (codec_specific_info && codec_specific_info->codecType == kVideoCodecVP9) |
| 222 | ? codec_specific_info->codecSpecific.VP9.first_frame_in_picture |
| 223 | : true; |
philipel | bf2b620 | 2018-08-27 12:33:18 | [diff] [blame] | 224 | |
| 225 | SetCodecSpecific(&rtp_video_header, first_frame_in_picture); |
philipel | 569397f | 2018-09-26 10:25:31 | [diff] [blame] | 226 | |
Danil Chapovalov | 636865e | 2020-06-03 12:11:26 | [diff] [blame] | 227 | SetGeneric(codec_specific_info, shared_frame_id, is_keyframe, |
| 228 | &rtp_video_header); |
philipel | bf2b620 | 2018-08-27 12:33:18 | [diff] [blame] | 229 | |
Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 230 | return rtp_video_header; |
| 231 | } |
| 232 | |
| 233 | uint32_t RtpPayloadParams::ssrc() const { |
| 234 | return ssrc_; |
| 235 | } |
| 236 | |
| 237 | RtpPayloadState RtpPayloadParams::state() const { |
| 238 | return state_; |
| 239 | } |
| 240 | |
philipel | bf2b620 | 2018-08-27 12:33:18 | [diff] [blame] | 241 | void RtpPayloadParams::SetCodecSpecific(RTPVideoHeader* rtp_video_header, |
| 242 | bool first_frame_in_picture) { |
Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 243 | // Always set picture id. Set tl0_pic_idx iff temporal index is set. |
| 244 | if (first_frame_in_picture) { |
| 245 | state_.picture_id = (static_cast<uint16_t>(state_.picture_id) + 1) & 0x7FFF; |
| 246 | } |
| 247 | if (rtp_video_header->codec == kVideoCodecVP8) { |
Philip Eliasson | d52a1a6 | 2018-09-07 13:03:55 | [diff] [blame] | 248 | auto& vp8_header = |
| 249 | absl::get<RTPVideoHeaderVP8>(rtp_video_header->video_type_header); |
| 250 | vp8_header.pictureId = state_.picture_id; |
Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 251 | |
Philip Eliasson | d52a1a6 | 2018-09-07 13:03:55 | [diff] [blame] | 252 | if (vp8_header.temporalIdx != kNoTemporalIdx) { |
| 253 | if (vp8_header.temporalIdx == 0) { |
Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 254 | ++state_.tl0_pic_idx; |
| 255 | } |
Philip Eliasson | d52a1a6 | 2018-09-07 13:03:55 | [diff] [blame] | 256 | vp8_header.tl0PicIdx = state_.tl0_pic_idx; |
Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 257 | } |
| 258 | } |
| 259 | if (rtp_video_header->codec == kVideoCodecVP9) { |
philipel | 29d8846 | 2018-08-08 12:26:00 | [diff] [blame] | 260 | auto& vp9_header = |
| 261 | absl::get<RTPVideoHeaderVP9>(rtp_video_header->video_type_header); |
| 262 | vp9_header.picture_id = state_.picture_id; |
Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 263 | |
| 264 | // Note that in the case that we have no temporal layers but we do have |
| 265 | // spatial layers, packets will carry layering info with a temporal_idx of |
| 266 | // zero, and we then have to set and increment tl0_pic_idx. |
philipel | 29d8846 | 2018-08-08 12:26:00 | [diff] [blame] | 267 | if (vp9_header.temporal_idx != kNoTemporalIdx || |
| 268 | vp9_header.spatial_idx != kNoSpatialIdx) { |
Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 269 | if (first_frame_in_picture && |
philipel | 29d8846 | 2018-08-08 12:26:00 | [diff] [blame] | 270 | (vp9_header.temporal_idx == 0 || |
| 271 | vp9_header.temporal_idx == kNoTemporalIdx)) { |
Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 272 | ++state_.tl0_pic_idx; |
| 273 | } |
philipel | 29d8846 | 2018-08-08 12:26:00 | [diff] [blame] | 274 | vp9_header.tl0_pic_idx = state_.tl0_pic_idx; |
Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 275 | } |
| 276 | } |
philipel | bf2b620 | 2018-08-27 12:33:18 | [diff] [blame] | 277 | if (generic_picture_id_experiment_ && |
| 278 | rtp_video_header->codec == kVideoCodecGeneric) { |
Danil Chapovalov | b6bf0b2 | 2020-01-28 17:36:57 | [diff] [blame] | 279 | rtp_video_header->video_type_header.emplace<RTPVideoHeaderLegacyGeneric>() |
| 280 | .picture_id = state_.picture_id; |
philipel | bf2b620 | 2018-08-27 12:33:18 | [diff] [blame] | 281 | } |
Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 282 | } |
philipel | bf2b620 | 2018-08-27 12:33:18 | [diff] [blame] | 283 | |
Danil Chapovalov | 02d71fb | 2020-02-10 15:22:57 | [diff] [blame] | 284 | RTPVideoHeader::GenericDescriptorInfo |
| 285 | RtpPayloadParams::GenericDescriptorFromFrameInfo( |
| 286 | const GenericFrameInfo& frame_info, |
Danil Chapovalov | cf1308f | 2020-11-18 17:27:37 | [diff] [blame] | 287 | int64_t frame_id) { |
Danil Chapovalov | 02d71fb | 2020-02-10 15:22:57 | [diff] [blame] | 288 | RTPVideoHeader::GenericDescriptorInfo generic; |
| 289 | generic.frame_id = frame_id; |
| 290 | generic.dependencies = dependencies_calculator_.FromBuffersUsage( |
Danil Chapovalov | cf1308f | 2020-11-18 17:27:37 | [diff] [blame] | 291 | frame_id, frame_info.encoder_buffers); |
Danil Chapovalov | 4b860c1 | 2020-05-19 12:48:19 | [diff] [blame] | 292 | generic.chain_diffs = |
| 293 | chains_calculator_.From(frame_id, frame_info.part_of_chain); |
Danil Chapovalov | 02d71fb | 2020-02-10 15:22:57 | [diff] [blame] | 294 | generic.spatial_index = frame_info.spatial_id; |
| 295 | generic.temporal_index = frame_info.temporal_id; |
| 296 | generic.decode_target_indications = frame_info.decode_target_indications; |
Danil Chapovalov | e6ac8ff | 2020-06-26 11:51:08 | [diff] [blame] | 297 | generic.active_decode_targets = frame_info.active_decode_targets; |
Danil Chapovalov | 02d71fb | 2020-02-10 15:22:57 | [diff] [blame] | 298 | return generic; |
| 299 | } |
| 300 | |
Elad Alon | f5b216a | 2019-01-28 13:25:17 | [diff] [blame] | 301 | void RtpPayloadParams::SetGeneric(const CodecSpecificInfo* codec_specific_info, |
| 302 | int64_t frame_id, |
philipel | bf2b620 | 2018-08-27 12:33:18 | [diff] [blame] | 303 | bool is_keyframe, |
| 304 | RTPVideoHeader* rtp_video_header) { |
Danil Chapovalov | 02d71fb | 2020-02-10 15:22:57 | [diff] [blame] | 305 | if (codec_specific_info && codec_specific_info->generic_frame_info && |
| 306 | !codec_specific_info->generic_frame_info->encoder_buffers.empty()) { |
Danil Chapovalov | 4b860c1 | 2020-05-19 12:48:19 | [diff] [blame] | 307 | if (is_keyframe) { |
| 308 | // Key frame resets all chains it is in. |
| 309 | chains_calculator_.Reset( |
| 310 | codec_specific_info->generic_frame_info->part_of_chain); |
| 311 | } |
Danil Chapovalov | cf1308f | 2020-11-18 17:27:37 | [diff] [blame] | 312 | rtp_video_header->generic = GenericDescriptorFromFrameInfo( |
| 313 | *codec_specific_info->generic_frame_info, frame_id); |
Danil Chapovalov | 02d71fb | 2020-02-10 15:22:57 | [diff] [blame] | 314 | return; |
| 315 | } |
| 316 | |
Elad Alon | f5b216a | 2019-01-28 13:25:17 | [diff] [blame] | 317 | switch (rtp_video_header->codec) { |
| 318 | case VideoCodecType::kVideoCodecGeneric: |
philipel | 8aba8fe | 2019-06-13 13:13:16 | [diff] [blame] | 319 | GenericToGeneric(frame_id, is_keyframe, rtp_video_header); |
Elad Alon | f5b216a | 2019-01-28 13:25:17 | [diff] [blame] | 320 | return; |
| 321 | case VideoCodecType::kVideoCodecVP8: |
| 322 | if (codec_specific_info) { |
| 323 | Vp8ToGeneric(codec_specific_info->codecSpecific.VP8, frame_id, |
| 324 | is_keyframe, rtp_video_header); |
| 325 | } |
| 326 | return; |
| 327 | case VideoCodecType::kVideoCodecVP9: |
Emil Lundmark | 823ba0b | 2021-10-18 09:27:26 | [diff] [blame] | 328 | if (codec_specific_info != nullptr) { |
Danil Chapovalov | af36644 | 2021-04-22 13:20:28 | [diff] [blame] | 329 | Vp9ToGeneric(codec_specific_info->codecSpecific.VP9, frame_id, |
| 330 | *rtp_video_header); |
| 331 | } |
| 332 | return; |
Danil Chapovalov | dc36829 | 2019-11-26 13:48:20 | [diff] [blame] | 333 | case VideoCodecType::kVideoCodecAV1: |
Danil Chapovalov | af36644 | 2021-04-22 13:20:28 | [diff] [blame] | 334 | // TODO(philipel): Implement AV1 to generic descriptor. |
Elad Alon | f5b216a | 2019-01-28 13:25:17 | [diff] [blame] | 335 | return; |
| 336 | case VideoCodecType::kVideoCodecH264: |
philipel | 8aba8fe | 2019-06-13 13:13:16 | [diff] [blame] | 337 | if (codec_specific_info) { |
| 338 | H264ToGeneric(codec_specific_info->codecSpecific.H264, frame_id, |
| 339 | is_keyframe, rtp_video_header); |
| 340 | } |
| 341 | return; |
Elad Alon | f5b216a | 2019-01-28 13:25:17 | [diff] [blame] | 342 | case VideoCodecType::kVideoCodecMultiplex: |
| 343 | return; |
philipel | bf2b620 | 2018-08-27 12:33:18 | [diff] [blame] | 344 | } |
Artem Titov | d325196 | 2021-11-15 15:57:07 | [diff] [blame] | 345 | RTC_DCHECK_NOTREACHED() << "Unsupported codec."; |
philipel | bf2b620 | 2018-08-27 12:33:18 | [diff] [blame] | 346 | } |
| 347 | |
Danil Chapovalov | 5b298ab | 2022-06-08 09:18:51 | [diff] [blame] | 348 | absl::optional<FrameDependencyStructure> RtpPayloadParams::GenericStructure( |
| 349 | const CodecSpecificInfo* codec_specific_info) { |
| 350 | if (codec_specific_info == nullptr) { |
| 351 | return absl::nullopt; |
| 352 | } |
| 353 | // This helper shouldn't be used when template structure is specified |
| 354 | // explicetly. |
| 355 | RTC_DCHECK(!codec_specific_info->template_structure.has_value()); |
| 356 | switch (codec_specific_info->codecType) { |
| 357 | case VideoCodecType::kVideoCodecGeneric: |
| 358 | if (simulate_generic_structure_) { |
| 359 | return MinimalisticStructure(/*num_spatial_layers=*/1, |
| 360 | /*num_temporal_layer=*/1); |
| 361 | } |
| 362 | return absl::nullopt; |
| 363 | case VideoCodecType::kVideoCodecVP8: |
| 364 | return MinimalisticStructure(/*num_spatial_layers=*/1, |
| 365 | /*num_temporal_layer=*/kMaxTemporalStreams); |
| 366 | case VideoCodecType::kVideoCodecVP9: { |
| 367 | absl::optional<FrameDependencyStructure> structure = |
| 368 | MinimalisticStructure( |
| 369 | /*num_spatial_layers=*/kMaxSimulatedSpatialLayers, |
| 370 | /*num_temporal_layer=*/kMaxTemporalStreams); |
| 371 | const CodecSpecificInfoVP9& vp9 = codec_specific_info->codecSpecific.VP9; |
| 372 | if (vp9.ss_data_available && vp9.spatial_layer_resolution_present) { |
| 373 | RenderResolution first_valid; |
| 374 | RenderResolution last_valid; |
| 375 | for (size_t i = 0; i < vp9.num_spatial_layers; ++i) { |
| 376 | RenderResolution r(vp9.width[i], vp9.height[i]); |
| 377 | if (r.Valid()) { |
| 378 | if (!first_valid.Valid()) { |
| 379 | first_valid = r; |
| 380 | } |
| 381 | last_valid = r; |
| 382 | } |
| 383 | structure->resolutions.push_back(r); |
| 384 | } |
| 385 | if (!last_valid.Valid()) { |
| 386 | // No valid resolution found. Do not send resolutions. |
| 387 | structure->resolutions.clear(); |
| 388 | } else { |
| 389 | structure->resolutions.resize(kMaxSimulatedSpatialLayers, last_valid); |
| 390 | // VP9 encoder wrapper may disable first few spatial layers by |
| 391 | // setting invalid resolution (0,0). `structure->resolutions` |
| 392 | // doesn't support invalid resolution, so reset them to something |
| 393 | // valid. |
| 394 | for (RenderResolution& r : structure->resolutions) { |
| 395 | if (!r.Valid()) { |
| 396 | r = first_valid; |
| 397 | } |
| 398 | } |
| 399 | } |
| 400 | } |
| 401 | return structure; |
| 402 | } |
| 403 | case VideoCodecType::kVideoCodecAV1: |
| 404 | case VideoCodecType::kVideoCodecH264: |
| 405 | case VideoCodecType::kVideoCodecMultiplex: |
| 406 | return absl::nullopt; |
| 407 | } |
| 408 | RTC_DCHECK_NOTREACHED() << "Unsupported codec."; |
| 409 | } |
| 410 | |
philipel | 8aba8fe | 2019-06-13 13:13:16 | [diff] [blame] | 411 | void RtpPayloadParams::GenericToGeneric(int64_t shared_frame_id, |
| 412 | bool is_keyframe, |
| 413 | RTPVideoHeader* rtp_video_header) { |
| 414 | RTPVideoHeader::GenericDescriptorInfo& generic = |
| 415 | rtp_video_header->generic.emplace(); |
| 416 | |
| 417 | generic.frame_id = shared_frame_id; |
philipel | 5b231de | 2021-09-01 13:21:16 | [diff] [blame] | 418 | generic.decode_target_indications.push_back(DecodeTargetIndication::kSwitch); |
philipel | 8aba8fe | 2019-06-13 13:13:16 | [diff] [blame] | 419 | |
| 420 | if (is_keyframe) { |
philipel | 5b231de | 2021-09-01 13:21:16 | [diff] [blame] | 421 | generic.chain_diffs.push_back(0); |
philipel | 8aba8fe | 2019-06-13 13:13:16 | [diff] [blame] | 422 | last_shared_frame_id_[0].fill(-1); |
| 423 | } else { |
| 424 | int64_t frame_id = last_shared_frame_id_[0][0]; |
| 425 | RTC_DCHECK_NE(frame_id, -1); |
| 426 | RTC_DCHECK_LT(frame_id, shared_frame_id); |
philipel | 5b231de | 2021-09-01 13:21:16 | [diff] [blame] | 427 | generic.chain_diffs.push_back(shared_frame_id - frame_id); |
philipel | 8aba8fe | 2019-06-13 13:13:16 | [diff] [blame] | 428 | generic.dependencies.push_back(frame_id); |
| 429 | } |
| 430 | |
| 431 | last_shared_frame_id_[0][0] = shared_frame_id; |
| 432 | } |
| 433 | |
| 434 | void RtpPayloadParams::H264ToGeneric(const CodecSpecificInfoH264& h264_info, |
| 435 | int64_t shared_frame_id, |
| 436 | bool is_keyframe, |
| 437 | RTPVideoHeader* rtp_video_header) { |
| 438 | const int temporal_index = |
| 439 | h264_info.temporal_idx != kNoTemporalIdx ? h264_info.temporal_idx : 0; |
| 440 | |
| 441 | if (temporal_index >= RtpGenericFrameDescriptor::kMaxTemporalLayers) { |
| 442 | RTC_LOG(LS_WARNING) << "Temporal and/or spatial index is too high to be " |
| 443 | "used with generic frame descriptor."; |
| 444 | return; |
| 445 | } |
| 446 | |
| 447 | RTPVideoHeader::GenericDescriptorInfo& generic = |
| 448 | rtp_video_header->generic.emplace(); |
| 449 | |
| 450 | generic.frame_id = shared_frame_id; |
| 451 | generic.temporal_index = temporal_index; |
| 452 | |
| 453 | if (is_keyframe) { |
| 454 | RTC_DCHECK_EQ(temporal_index, 0); |
| 455 | last_shared_frame_id_[/*spatial index*/ 0].fill(-1); |
| 456 | last_shared_frame_id_[/*spatial index*/ 0][temporal_index] = |
| 457 | shared_frame_id; |
| 458 | return; |
| 459 | } |
| 460 | |
| 461 | if (h264_info.base_layer_sync) { |
| 462 | int64_t tl0_frame_id = last_shared_frame_id_[/*spatial index*/ 0][0]; |
| 463 | |
| 464 | for (int i = 1; i < RtpGenericFrameDescriptor::kMaxTemporalLayers; ++i) { |
| 465 | if (last_shared_frame_id_[/*spatial index*/ 0][i] < tl0_frame_id) { |
| 466 | last_shared_frame_id_[/*spatial index*/ 0][i] = -1; |
| 467 | } |
| 468 | } |
| 469 | |
| 470 | RTC_DCHECK_GE(tl0_frame_id, 0); |
| 471 | RTC_DCHECK_LT(tl0_frame_id, shared_frame_id); |
| 472 | generic.dependencies.push_back(tl0_frame_id); |
| 473 | } else { |
| 474 | for (int i = 0; i <= temporal_index; ++i) { |
| 475 | int64_t frame_id = last_shared_frame_id_[/*spatial index*/ 0][i]; |
| 476 | |
| 477 | if (frame_id != -1) { |
| 478 | RTC_DCHECK_LT(frame_id, shared_frame_id); |
| 479 | generic.dependencies.push_back(frame_id); |
| 480 | } |
| 481 | } |
| 482 | } |
| 483 | |
| 484 | last_shared_frame_id_[/*spatial_index*/ 0][temporal_index] = shared_frame_id; |
| 485 | } |
| 486 | |
Elad Alon | f5b216a | 2019-01-28 13:25:17 | [diff] [blame] | 487 | void RtpPayloadParams::Vp8ToGeneric(const CodecSpecificInfoVP8& vp8_info, |
| 488 | int64_t shared_frame_id, |
philipel | bf2b620 | 2018-08-27 12:33:18 | [diff] [blame] | 489 | bool is_keyframe, |
| 490 | RTPVideoHeader* rtp_video_header) { |
| 491 | const auto& vp8_header = |
| 492 | absl::get<RTPVideoHeaderVP8>(rtp_video_header->video_type_header); |
| 493 | const int spatial_index = 0; |
| 494 | const int temporal_index = |
| 495 | vp8_header.temporalIdx != kNoTemporalIdx ? vp8_header.temporalIdx : 0; |
| 496 | |
| 497 | if (temporal_index >= RtpGenericFrameDescriptor::kMaxTemporalLayers || |
| 498 | spatial_index >= RtpGenericFrameDescriptor::kMaxSpatialLayers) { |
| 499 | RTC_LOG(LS_WARNING) << "Temporal and/or spatial index is too high to be " |
| 500 | "used with generic frame descriptor."; |
| 501 | return; |
| 502 | } |
| 503 | |
| 504 | RTPVideoHeader::GenericDescriptorInfo& generic = |
| 505 | rtp_video_header->generic.emplace(); |
| 506 | |
| 507 | generic.frame_id = shared_frame_id; |
| 508 | generic.spatial_index = spatial_index; |
| 509 | generic.temporal_index = temporal_index; |
| 510 | |
Emil Lundmark | 6c81a42 | 2022-05-18 15:13:34 | [diff] [blame] | 511 | // Generate decode target indications. |
| 512 | RTC_DCHECK_LT(temporal_index, kMaxTemporalStreams); |
| 513 | generic.decode_target_indications.resize(kMaxTemporalStreams); |
| 514 | auto it = std::fill_n(generic.decode_target_indications.begin(), |
| 515 | temporal_index, DecodeTargetIndication::kNotPresent); |
| 516 | std::fill(it, generic.decode_target_indications.end(), |
| 517 | DecodeTargetIndication::kSwitch); |
| 518 | |
| 519 | // Frame dependencies. |
Qingsi Wang | 1c1b99e | 2020-01-07 19:16:33 | [diff] [blame] | 520 | if (vp8_info.useExplicitDependencies) { |
| 521 | SetDependenciesVp8New(vp8_info, shared_frame_id, is_keyframe, |
| 522 | vp8_header.layerSync, &generic); |
| 523 | } else { |
| 524 | SetDependenciesVp8Deprecated(vp8_info, shared_frame_id, is_keyframe, |
| 525 | spatial_index, temporal_index, |
| 526 | vp8_header.layerSync, &generic); |
| 527 | } |
Emil Lundmark | 6c81a42 | 2022-05-18 15:13:34 | [diff] [blame] | 528 | |
| 529 | // Calculate chains. |
| 530 | generic.chain_diffs = { |
| 531 | (is_keyframe || chain_last_frame_id_[0] < 0) |
| 532 | ? 0 |
| 533 | : static_cast<int>(shared_frame_id - chain_last_frame_id_[0])}; |
| 534 | if (temporal_index == 0) { |
| 535 | chain_last_frame_id_[0] = shared_frame_id; |
| 536 | } |
Qingsi Wang | 1c1b99e | 2020-01-07 19:16:33 | [diff] [blame] | 537 | } |
| 538 | |
Danil Chapovalov | af36644 | 2021-04-22 13:20:28 | [diff] [blame] | 539 | void RtpPayloadParams::Vp9ToGeneric(const CodecSpecificInfoVP9& vp9_info, |
| 540 | int64_t shared_frame_id, |
| 541 | RTPVideoHeader& rtp_video_header) { |
| 542 | const auto& vp9_header = |
| 543 | absl::get<RTPVideoHeaderVP9>(rtp_video_header.video_type_header); |
Danil Chapovalov | 5b298ab | 2022-06-08 09:18:51 | [diff] [blame] | 544 | const int num_spatial_layers = kMaxSimulatedSpatialLayers; |
| 545 | const int num_active_spatial_layers = vp9_header.num_spatial_layers; |
Danil Chapovalov | af36644 | 2021-04-22 13:20:28 | [diff] [blame] | 546 | const int num_temporal_layers = kMaxTemporalStreams; |
Danil Chapovalov | 5b298ab | 2022-06-08 09:18:51 | [diff] [blame] | 547 | static_assert(num_spatial_layers <= |
| 548 | RtpGenericFrameDescriptor::kMaxSpatialLayers); |
| 549 | static_assert(num_temporal_layers <= |
| 550 | RtpGenericFrameDescriptor::kMaxTemporalLayers); |
| 551 | static_assert(num_spatial_layers <= DependencyDescriptor::kMaxSpatialIds); |
| 552 | static_assert(num_temporal_layers <= DependencyDescriptor::kMaxTemporalIds); |
Danil Chapovalov | af36644 | 2021-04-22 13:20:28 | [diff] [blame] | 553 | |
| 554 | int spatial_index = |
| 555 | vp9_header.spatial_idx != kNoSpatialIdx ? vp9_header.spatial_idx : 0; |
| 556 | int temporal_index = |
| 557 | vp9_header.temporal_idx != kNoTemporalIdx ? vp9_header.temporal_idx : 0; |
| 558 | |
| 559 | if (spatial_index >= num_spatial_layers || |
| 560 | temporal_index >= num_temporal_layers || |
Danil Chapovalov | 5b298ab | 2022-06-08 09:18:51 | [diff] [blame] | 561 | num_active_spatial_layers > num_spatial_layers) { |
Danil Chapovalov | af36644 | 2021-04-22 13:20:28 | [diff] [blame] | 562 | // Prefer to generate no generic layering than an inconsistent one. |
| 563 | return; |
| 564 | } |
| 565 | |
| 566 | RTPVideoHeader::GenericDescriptorInfo& result = |
| 567 | rtp_video_header.generic.emplace(); |
| 568 | |
| 569 | result.frame_id = shared_frame_id; |
| 570 | result.spatial_index = spatial_index; |
| 571 | result.temporal_index = temporal_index; |
| 572 | |
| 573 | result.decode_target_indications.reserve(num_spatial_layers * |
| 574 | num_temporal_layers); |
| 575 | for (int sid = 0; sid < num_spatial_layers; ++sid) { |
| 576 | for (int tid = 0; tid < num_temporal_layers; ++tid) { |
| 577 | DecodeTargetIndication dti; |
| 578 | if (sid < spatial_index || tid < temporal_index) { |
| 579 | dti = DecodeTargetIndication::kNotPresent; |
| 580 | } else if (spatial_index != sid && |
| 581 | vp9_header.non_ref_for_inter_layer_pred) { |
| 582 | dti = DecodeTargetIndication::kNotPresent; |
| 583 | } else if (sid == spatial_index && tid == temporal_index) { |
| 584 | // Assume that if frame is decodable, all of its own layer is decodable. |
| 585 | dti = DecodeTargetIndication::kSwitch; |
| 586 | } else if (sid == spatial_index && vp9_header.temporal_up_switch) { |
| 587 | dti = DecodeTargetIndication::kSwitch; |
| 588 | } else if (!vp9_header.inter_pic_predicted) { |
| 589 | // Key frame or spatial upswitch |
| 590 | dti = DecodeTargetIndication::kSwitch; |
| 591 | } else { |
| 592 | // Make no other assumptions. That should be safe, though suboptimal. |
| 593 | // To provide more accurate dti, encoder wrapper should fill in |
| 594 | // CodecSpecificInfo::generic_frame_info |
| 595 | dti = DecodeTargetIndication::kRequired; |
| 596 | } |
| 597 | result.decode_target_indications.push_back(dti); |
| 598 | } |
| 599 | } |
| 600 | |
| 601 | // Calculate frame dependencies. |
| 602 | static constexpr int kPictureDiffLimit = 128; |
| 603 | if (last_vp9_frame_id_.empty()) { |
| 604 | // Create the array only if it is ever used. |
| 605 | last_vp9_frame_id_.resize(kPictureDiffLimit); |
| 606 | } |
philipel | 4e0bf2e | 2023-06-20 08:29:20 | [diff] [blame] | 607 | |
| 608 | if (vp9_header.flexible_mode) { |
| 609 | if (vp9_header.inter_layer_predicted && spatial_index > 0) { |
Danil Chapovalov | af36644 | 2021-04-22 13:20:28 | [diff] [blame] | 610 | result.dependencies.push_back( |
philipel | 4e0bf2e | 2023-06-20 08:29:20 | [diff] [blame] | 611 | last_vp9_frame_id_[vp9_header.picture_id % kPictureDiffLimit] |
| 612 | [spatial_index - 1]); |
Danil Chapovalov | af36644 | 2021-04-22 13:20:28 | [diff] [blame] | 613 | } |
philipel | 4e0bf2e | 2023-06-20 08:29:20 | [diff] [blame] | 614 | if (vp9_header.inter_pic_predicted) { |
| 615 | for (size_t i = 0; i < vp9_header.num_ref_pics; ++i) { |
| 616 | // picture_id is 15 bit number that wraps around. Though undeflow may |
| 617 | // produce picture that exceeds 2^15, it is ok because in this |
| 618 | // code block only last 7 bits of the picture_id are used. |
| 619 | uint16_t depend_on = vp9_header.picture_id - vp9_header.pid_diff[i]; |
| 620 | result.dependencies.push_back( |
| 621 | last_vp9_frame_id_[depend_on % kPictureDiffLimit][spatial_index]); |
| 622 | } |
| 623 | } |
| 624 | last_vp9_frame_id_[vp9_header.picture_id % kPictureDiffLimit] |
| 625 | [spatial_index] = shared_frame_id; |
| 626 | } else { |
| 627 | // Implementing general conversion logic for non-flexible mode requires some |
| 628 | // work and we will almost certainly never need it, so for now support only |
| 629 | // non-layerd streams. |
| 630 | if (spatial_index > 0 || temporal_index > 0) { |
| 631 | // Prefer to generate no generic layering than an inconsistent one. |
| 632 | rtp_video_header.generic.reset(); |
| 633 | return; |
| 634 | } |
| 635 | |
| 636 | if (vp9_header.inter_pic_predicted) { |
| 637 | // Since we only support non-scalable streams we only need to save the |
| 638 | // last frame id. |
| 639 | result.dependencies.push_back(last_vp9_frame_id_[0][0]); |
| 640 | } |
| 641 | last_vp9_frame_id_[0][0] = shared_frame_id; |
Danil Chapovalov | af36644 | 2021-04-22 13:20:28 | [diff] [blame] | 642 | } |
Danil Chapovalov | af36644 | 2021-04-22 13:20:28 | [diff] [blame] | 643 | |
Danil Chapovalov | 5b298ab | 2022-06-08 09:18:51 | [diff] [blame] | 644 | result.active_decode_targets = |
| 645 | ((uint32_t{1} << num_temporal_layers * num_active_spatial_layers) - 1); |
| 646 | |
Danil Chapovalov | af36644 | 2021-04-22 13:20:28 | [diff] [blame] | 647 | // Calculate chains, asuming chain includes all frames with temporal_id = 0 |
| 648 | if (!vp9_header.inter_pic_predicted && !vp9_header.inter_layer_predicted) { |
| 649 | // Assume frames without dependencies also reset chains. |
| 650 | for (int sid = spatial_index; sid < num_spatial_layers; ++sid) { |
| 651 | chain_last_frame_id_[sid] = -1; |
| 652 | } |
| 653 | } |
Danil Chapovalov | 5b298ab | 2022-06-08 09:18:51 | [diff] [blame] | 654 | result.chain_diffs.resize(num_spatial_layers, 0); |
| 655 | for (int sid = 0; sid < num_active_spatial_layers; ++sid) { |
Danil Chapovalov | af36644 | 2021-04-22 13:20:28 | [diff] [blame] | 656 | if (chain_last_frame_id_[sid] == -1) { |
| 657 | result.chain_diffs[sid] = 0; |
| 658 | continue; |
| 659 | } |
Danil Chapovalov | 50ca701 | 2023-08-02 09:28:58 | [diff] [blame^] | 660 | int64_t chain_diff = shared_frame_id - chain_last_frame_id_[sid]; |
| 661 | if (chain_diff >= 256) { |
| 662 | RTC_LOG(LS_ERROR) |
| 663 | << "Too many frames since last VP9 T0 frame for spatial layer #" |
| 664 | << sid << " at frame#" << shared_frame_id; |
| 665 | chain_last_frame_id_[sid] = -1; |
| 666 | chain_diff = 0; |
| 667 | } |
| 668 | result.chain_diffs[sid] = chain_diff; |
Danil Chapovalov | af36644 | 2021-04-22 13:20:28 | [diff] [blame] | 669 | } |
| 670 | |
| 671 | if (temporal_index == 0) { |
| 672 | chain_last_frame_id_[spatial_index] = shared_frame_id; |
| 673 | if (!vp9_header.non_ref_for_inter_layer_pred) { |
| 674 | for (int sid = spatial_index + 1; sid < num_spatial_layers; ++sid) { |
| 675 | chain_last_frame_id_[sid] = shared_frame_id; |
| 676 | } |
| 677 | } |
| 678 | } |
| 679 | } |
| 680 | |
Qingsi Wang | 1c1b99e | 2020-01-07 19:16:33 | [diff] [blame] | 681 | void RtpPayloadParams::SetDependenciesVp8Deprecated( |
| 682 | const CodecSpecificInfoVP8& vp8_info, |
| 683 | int64_t shared_frame_id, |
| 684 | bool is_keyframe, |
| 685 | int spatial_index, |
| 686 | int temporal_index, |
| 687 | bool layer_sync, |
| 688 | RTPVideoHeader::GenericDescriptorInfo* generic) { |
| 689 | RTC_DCHECK(!vp8_info.useExplicitDependencies); |
| 690 | RTC_DCHECK(!new_version_used_.has_value() || !new_version_used_.value()); |
| 691 | new_version_used_ = false; |
| 692 | |
| 693 | if (is_keyframe) { |
| 694 | RTC_DCHECK_EQ(temporal_index, 0); |
| 695 | last_shared_frame_id_[spatial_index].fill(-1); |
| 696 | last_shared_frame_id_[spatial_index][temporal_index] = shared_frame_id; |
| 697 | return; |
| 698 | } |
| 699 | |
| 700 | if (layer_sync) { |
| 701 | int64_t tl0_frame_id = last_shared_frame_id_[spatial_index][0]; |
| 702 | |
| 703 | for (int i = 1; i < RtpGenericFrameDescriptor::kMaxTemporalLayers; ++i) { |
| 704 | if (last_shared_frame_id_[spatial_index][i] < tl0_frame_id) { |
| 705 | last_shared_frame_id_[spatial_index][i] = -1; |
| 706 | } |
| 707 | } |
| 708 | |
| 709 | RTC_DCHECK_GE(tl0_frame_id, 0); |
| 710 | RTC_DCHECK_LT(tl0_frame_id, shared_frame_id); |
| 711 | generic->dependencies.push_back(tl0_frame_id); |
| 712 | } else { |
| 713 | for (int i = 0; i <= temporal_index; ++i) { |
| 714 | int64_t frame_id = last_shared_frame_id_[spatial_index][i]; |
| 715 | |
| 716 | if (frame_id != -1) { |
| 717 | RTC_DCHECK_LT(frame_id, shared_frame_id); |
| 718 | generic->dependencies.push_back(frame_id); |
| 719 | } |
| 720 | } |
| 721 | } |
| 722 | |
| 723 | last_shared_frame_id_[spatial_index][temporal_index] = shared_frame_id; |
| 724 | } |
| 725 | |
| 726 | void RtpPayloadParams::SetDependenciesVp8New( |
| 727 | const CodecSpecificInfoVP8& vp8_info, |
| 728 | int64_t shared_frame_id, |
| 729 | bool is_keyframe, |
| 730 | bool layer_sync, |
| 731 | RTPVideoHeader::GenericDescriptorInfo* generic) { |
| 732 | RTC_DCHECK(vp8_info.useExplicitDependencies); |
| 733 | RTC_DCHECK(!new_version_used_.has_value() || new_version_used_.value()); |
| 734 | new_version_used_ = true; |
| 735 | |
Elad Alon | f5b216a | 2019-01-28 13:25:17 | [diff] [blame] | 736 | if (is_keyframe) { |
| 737 | RTC_DCHECK_EQ(vp8_info.referencedBuffersCount, 0u); |
| 738 | buffer_id_to_frame_id_.fill(shared_frame_id); |
| 739 | return; |
| 740 | } |
| 741 | |
| 742 | constexpr size_t kBuffersCountVp8 = CodecSpecificInfoVP8::kBuffersCount; |
| 743 | |
| 744 | RTC_DCHECK_GT(vp8_info.referencedBuffersCount, 0u); |
| 745 | RTC_DCHECK_LE(vp8_info.referencedBuffersCount, |
| 746 | arraysize(vp8_info.referencedBuffers)); |
| 747 | |
| 748 | for (size_t i = 0; i < vp8_info.referencedBuffersCount; ++i) { |
| 749 | const size_t referenced_buffer = vp8_info.referencedBuffers[i]; |
| 750 | RTC_DCHECK_LT(referenced_buffer, kBuffersCountVp8); |
| 751 | RTC_DCHECK_LT(referenced_buffer, buffer_id_to_frame_id_.size()); |
| 752 | |
| 753 | const int64_t dependency_frame_id = |
| 754 | buffer_id_to_frame_id_[referenced_buffer]; |
| 755 | RTC_DCHECK_GE(dependency_frame_id, 0); |
| 756 | RTC_DCHECK_LT(dependency_frame_id, shared_frame_id); |
| 757 | |
| 758 | const bool is_new_dependency = |
Qingsi Wang | 1c1b99e | 2020-01-07 19:16:33 | [diff] [blame] | 759 | std::find(generic->dependencies.begin(), generic->dependencies.end(), |
| 760 | dependency_frame_id) == generic->dependencies.end(); |
Elad Alon | f5b216a | 2019-01-28 13:25:17 | [diff] [blame] | 761 | if (is_new_dependency) { |
Qingsi Wang | 1c1b99e | 2020-01-07 19:16:33 | [diff] [blame] | 762 | generic->dependencies.push_back(dependency_frame_id); |
Elad Alon | f5b216a | 2019-01-28 13:25:17 | [diff] [blame] | 763 | } |
| 764 | } |
| 765 | |
| 766 | RTC_DCHECK_LE(vp8_info.updatedBuffersCount, kBuffersCountVp8); |
| 767 | for (size_t i = 0; i < vp8_info.updatedBuffersCount; ++i) { |
| 768 | const size_t updated_id = vp8_info.updatedBuffers[i]; |
| 769 | buffer_id_to_frame_id_[updated_id] = shared_frame_id; |
| 770 | } |
| 771 | |
| 772 | RTC_DCHECK_LE(buffer_id_to_frame_id_.size(), kBuffersCountVp8); |
| 773 | } |
| 774 | |
Stefan Holmer | f704468 | 2018-07-17 08:16:41 | [diff] [blame] | 775 | } // namespace webrtc |