Remove deprected functions from EncodedImageCallback and RtpRtcp
Removed EncodedImageCallback::Encoded() and RtpRtcp::SendOutgoingData().
These methods should no longer be used anywhere and it's safe to remove
them.
BUG=chromium:621691
Review-Url: https://codereview.webrtc.org/2405173006
Cr-Original-Commit-Position: refs/heads/master@{#14902}
Cr-Mirrored-From: https://chromium.googlesource.com/external/webrtc
Cr-Mirrored-Commit: fa565842718ad178a7562721b25d916fbabc2b92
diff --git a/api/android/jni/androidmediaencoder_jni.cc b/api/android/jni/androidmediaencoder_jni.cc
index 0e6e96a..92b5aae 100644
--- a/api/android/jni/androidmediaencoder_jni.cc
+++ b/api/android/jni/androidmediaencoder_jni.cc
@@ -263,8 +263,8 @@
// |input_frame_infos_|.
// Frame size in bytes fed to MediaCodec.
int yuv_size_;
- // True only when between a callback_->Encoded() call return a positive value
- // and the next Encode() call being ignored.
+ // True only when between a callback_->OnEncodedImage() call return a positive
+ // value and the next Encode() call being ignored.
bool drop_next_input_frame_;
// Global references; must be deleted in Release().
std::vector<jobject> input_buffers_;
@@ -1063,7 +1063,8 @@
}
// Callback - return encoded frame.
- int32_t callback_status = 0;
+ webrtc::EncodedImageCallback::Result callback_result(
+ webrtc::EncodedImageCallback::Result::OK);
if (callback_) {
std::unique_ptr<webrtc::EncodedImage> image(
new webrtc::EncodedImage(payload, payload_size, payload_size));
@@ -1174,7 +1175,7 @@
}
}
- callback_status = callback_->Encoded(*image, &info, &header);
+ callback_result = callback_->OnEncodedImage(*image, &info, &header);
}
// Return output buffer back to the encoder.
@@ -1208,11 +1209,9 @@
current_encoding_time_ms_ += frame_encoding_time_ms;
LogStatistics(false);
- if (callback_status > 0) {
+ // Errors in callback_result are currently ignored.
+ if (callback_result.drop_next_frame)
drop_next_input_frame_ = true;
- // Theoretically could handle callback_status<0 here, but unclear what
- // that would mean for us.
- }
}
return true;
}
diff --git a/modules/rtp_rtcp/include/rtp_rtcp.h b/modules/rtp_rtcp/include/rtp_rtcp.h
index 9fa3959..0c36117 100644
--- a/modules/rtp_rtcp/include/rtp_rtcp.h
+++ b/modules/rtp_rtcp/include/rtp_rtcp.h
@@ -227,7 +227,6 @@
// as layers or RED
// |transport_frame_id_out| - set to RTP timestamp.
// Returns true on success.
-
virtual bool SendOutgoingData(FrameType frame_type,
int8_t payload_type,
uint32_t timestamp,
@@ -238,24 +237,6 @@
const RTPVideoHeader* rtp_video_header,
uint32_t* transport_frame_id_out) = 0;
- // Deprecated version of the method above.
- int32_t SendOutgoingData(
- FrameType frame_type,
- int8_t payload_type,
- uint32_t timestamp,
- int64_t capture_time_ms,
- const uint8_t* payload_data,
- size_t payload_size,
- const RTPFragmentationHeader* fragmentation = nullptr,
- const RTPVideoHeader* rtp_video_header = nullptr) {
- return SendOutgoingData(frame_type, payload_type, timestamp,
- capture_time_ms, payload_data, payload_size,
- fragmentation, rtp_video_header,
- /*frame_id_out=*/nullptr)
- ? 0
- : -1;
- }
-
virtual bool TimeToSendPacket(uint32_t ssrc,
uint16_t sequence_number,
int64_t capture_time_ms,
diff --git a/modules/video_coding/codecs/h264/h264_encoder_impl.cc b/modules/video_coding/codecs/h264/h264_encoder_impl.cc
index e32a2ca..5c0aa1b 100644
--- a/modules/video_coding/codecs/h264/h264_encoder_impl.cc
+++ b/modules/video_coding/codecs/h264/h264_encoder_impl.cc
@@ -377,8 +377,8 @@
// Deliver encoded image.
CodecSpecificInfo codec_specific;
codec_specific.codecType = kVideoCodecH264;
- encoded_image_callback_->Encoded(encoded_image_, &codec_specific,
- &frag_header);
+ encoded_image_callback_->OnEncodedImage(encoded_image_, &codec_specific,
+ &frag_header);
// Parse and report QP.
h264_bitstream_parser_.ParseBitstream(encoded_image_._buffer,
diff --git a/modules/video_coding/codecs/h264/h264_video_toolbox_encoder.mm b/modules/video_coding/codecs/h264/h264_video_toolbox_encoder.mm
index 8276448..538734b 100644
--- a/modules/video_coding/codecs/h264/h264_video_toolbox_encoder.mm
+++ b/modules/video_coding/codecs/h264/h264_video_toolbox_encoder.mm
@@ -650,9 +650,10 @@
quality_scaler_.ReportQP(qp);
}
- int result = callback_->Encoded(frame, &codec_specific_info, header.get());
- if (result != 0) {
- LOG(LS_ERROR) << "Encode callback failed: " << result;
+ EncodedImageCallback::Result result =
+ callback_->OnEncodedImage(frame, &codec_specific_info, header.get());
+ if (result.error != EncodedImageCallback::Result::OK) {
+ LOG(LS_ERROR) << "Encode callback failed: " << result.error;
return;
}
bitrate_adjuster_.Update(frame._size);
diff --git a/modules/video_coding/codecs/i420/i420.cc b/modules/video_coding/codecs/i420/i420.cc
index d0c8d0c..ad4a8a1 100644
--- a/modules/video_coding/codecs/i420/i420.cc
+++ b/modules/video_coding/codecs/i420/i420.cc
@@ -116,7 +116,8 @@
return WEBRTC_VIDEO_CODEC_MEMORY;
_encodedImage._length = ret_length + kI420HeaderSize;
- _encodedCompleteCallback->Encoded(_encodedImage, NULL, NULL);
+ _encodedCompleteCallback->OnEncodedImage(_encodedImage, nullptr, nullptr);
+
return WEBRTC_VIDEO_CODEC_OK;
}
diff --git a/modules/video_coding/codecs/vp8/simulcast_encoder_adapter_unittest.cc b/modules/video_coding/codecs/vp8/simulcast_encoder_adapter_unittest.cc
index 8bc3b48..9a06c0a 100644
--- a/modules/video_coding/codecs/vp8/simulcast_encoder_adapter_unittest.cc
+++ b/modules/video_coding/codecs/vp8/simulcast_encoder_adapter_unittest.cc
@@ -152,9 +152,9 @@
EncodedImage image;
image._encodedWidth = width;
image._encodedHeight = height;
- CodecSpecificInfo codecSpecificInfo;
- memset(&codecSpecificInfo, 0, sizeof(codecSpecificInfo));
- callback_->Encoded(image, &codecSpecificInfo, NULL);
+ CodecSpecificInfo codec_specific_info;
+ memset(&codec_specific_info, 0, sizeof(codec_specific_info));
+ callback_->OnEncodedImage(image, &codec_specific_info, NULL);
}
void set_supports_native_handle(bool enabled) {
diff --git a/modules/video_coding/codecs/vp8/vp8_impl.cc b/modules/video_coding/codecs/vp8/vp8_impl.cc
index 523c19c..1767ec4 100644
--- a/modules/video_coding/codecs/vp8/vp8_impl.cc
+++ b/modules/video_coding/codecs/vp8/vp8_impl.cc
@@ -1024,8 +1024,8 @@
vpx_codec_control(&encoders_[encoder_idx], VP8E_GET_LAST_QUANTIZER,
&qp_128);
encoded_images_[encoder_idx].qp_ = qp_128;
- encoded_complete_callback_->Encoded(encoded_images_[encoder_idx],
- &codec_specific, &frag_info);
+ encoded_complete_callback_->OnEncodedImage(encoded_images_[encoder_idx],
+ &codec_specific, &frag_info);
} else if (codec_.mode == kScreensharing) {
result = WEBRTC_VIDEO_CODEC_TARGET_BITRATE_OVERSHOOT;
}
diff --git a/modules/video_coding/codecs/vp9/vp9_impl.cc b/modules/video_coding/codecs/vp9/vp9_impl.cc
index 3a4efb3..8c798db 100644
--- a/modules/video_coding/codecs/vp9/vp9_impl.cc
+++ b/modules/video_coding/codecs/vp9/vp9_impl.cc
@@ -705,8 +705,8 @@
int qp = -1;
vpx_codec_control(encoder_, VP8E_GET_LAST_QUANTIZER, &qp);
encoded_image_.qp_ = qp;
- encoded_complete_callback_->Encoded(encoded_image_, &codec_specific,
- &frag_info);
+ encoded_complete_callback_->OnEncodedImage(encoded_image_, &codec_specific,
+ &frag_info);
}
return WEBRTC_VIDEO_CODEC_OK;
}
diff --git a/modules/video_coding/video_receiver.cc b/modules/video_coding/video_receiver.cc
index 8b61524..475f686 100644
--- a/modules/video_coding/video_receiver.cc
+++ b/modules/video_coding/video_receiver.cc
@@ -270,8 +270,8 @@
if (qp_parser_.GetQp(*frame, &qp)) {
encoded_image.qp_ = qp;
}
- pre_decode_image_callback_->Encoded(encoded_image, frame->CodecSpecific(),
- nullptr);
+ pre_decode_image_callback_->OnEncodedImage(encoded_image,
+ frame->CodecSpecific(), nullptr);
}
rtc::CritScope cs(&receive_crit_);
diff --git a/test/configurable_frame_size_encoder.cc b/test/configurable_frame_size_encoder.cc
index 9cb0c87..905b69a 100644
--- a/test/configurable_frame_size_encoder.cc
+++ b/test/configurable_frame_size_encoder.cc
@@ -52,7 +52,7 @@
RTPFragmentationHeader* fragmentation = NULL;
CodecSpecificInfo specific;
memset(&specific, 0, sizeof(specific));
- callback_->Encoded(encodedImage, &specific, fragmentation);
+ callback_->OnEncodedImage(encodedImage, &specific, fragmentation);
return WEBRTC_VIDEO_CODEC_OK;
}
diff --git a/test/fake_encoder.cc b/test/fake_encoder.cc
index 065b529..f518ce3 100644
--- a/test/fake_encoder.cc
+++ b/test/fake_encoder.cc
@@ -112,8 +112,10 @@
encoded.rotation_ = input_image.rotation();
RTC_DCHECK(callback_ != NULL);
specifics.codec_name = ImplementationName();
- if (callback_->Encoded(encoded, &specifics, NULL) != 0)
+ if (callback_->OnEncodedImage(encoded, &specifics, NULL).error !=
+ EncodedImageCallback::Result::OK) {
return -1;
+ }
bits_available -= std::min(encoded._length * 8, bits_available);
}
return 0;
diff --git a/video/payload_router_unittest.cc b/video/payload_router_unittest.cc
index 5b87554..fa5c35d 100644
--- a/video/payload_router_unittest.cc
+++ b/video/payload_router_unittest.cc
@@ -45,7 +45,9 @@
encoded_image.capture_time_ms_, &payload,
encoded_image._length, nullptr, _, _))
.Times(0);
- EXPECT_EQ(-1, payload_router.Encoded(encoded_image, nullptr, nullptr));
+ EXPECT_NE(
+ EncodedImageCallback::Result::OK,
+ payload_router.OnEncodedImage(encoded_image, nullptr, nullptr).error);
payload_router.set_active(true);
EXPECT_CALL(rtp, SendOutgoingData(encoded_image._frameType, payload_type,
@@ -53,7 +55,9 @@
encoded_image.capture_time_ms_, &payload,
encoded_image._length, nullptr, _, _))
.Times(1);
- EXPECT_EQ(0, payload_router.Encoded(encoded_image, nullptr, nullptr));
+ EXPECT_EQ(
+ EncodedImageCallback::Result::OK,
+ payload_router.OnEncodedImage(encoded_image, nullptr, nullptr).error);
payload_router.set_active(false);
EXPECT_CALL(rtp, SendOutgoingData(encoded_image._frameType, payload_type,
@@ -61,7 +65,9 @@
encoded_image.capture_time_ms_, &payload,
encoded_image._length, nullptr, _, _))
.Times(0);
- EXPECT_EQ(-1, payload_router.Encoded(encoded_image, nullptr, nullptr));
+ EXPECT_NE(
+ EncodedImageCallback::Result::OK,
+ payload_router.OnEncodedImage(encoded_image, nullptr, nullptr).error);
payload_router.set_active(true);
EXPECT_CALL(rtp, SendOutgoingData(encoded_image._frameType, payload_type,
@@ -69,7 +75,9 @@
encoded_image.capture_time_ms_, &payload,
encoded_image._length, nullptr, _, _))
.Times(1);
- EXPECT_EQ(0, payload_router.Encoded(encoded_image, nullptr, nullptr));
+ EXPECT_EQ(
+ EncodedImageCallback::Result::OK,
+ payload_router.OnEncodedImage(encoded_image, nullptr, nullptr).error);
}
TEST(PayloadRouterTest, SendSimulcast) {
@@ -103,7 +111,9 @@
encoded_image._length, nullptr, _, _))
.Times(1);
EXPECT_CALL(rtp_2, SendOutgoingData(_, _, _, _, _, _, _, _, _)).Times(0);
- EXPECT_EQ(0, payload_router.Encoded(encoded_image, &codec_info_1, nullptr));
+ EXPECT_EQ(EncodedImageCallback::Result::OK,
+ payload_router.OnEncodedImage(encoded_image, &codec_info_1, nullptr)
+ .error);
CodecSpecificInfo codec_info_2;
memset(&codec_info_2, 0, sizeof(CodecSpecificInfo));
@@ -117,7 +127,9 @@
.Times(1);
EXPECT_CALL(rtp_1, SendOutgoingData(_, _, _, _, _, _, _, _, _))
.Times(0);
- EXPECT_EQ(0, payload_router.Encoded(encoded_image, &codec_info_2, nullptr));
+ EXPECT_EQ(EncodedImageCallback::Result::OK,
+ payload_router.OnEncodedImage(encoded_image, &codec_info_2, nullptr)
+ .error);
// Inactive.
payload_router.set_active(false);
@@ -125,8 +137,12 @@
.Times(0);
EXPECT_CALL(rtp_2, SendOutgoingData(_, _, _, _, _, _, _, _, _))
.Times(0);
- EXPECT_EQ(-1, payload_router.Encoded(encoded_image, &codec_info_1, nullptr));
- EXPECT_EQ(-1, payload_router.Encoded(encoded_image, &codec_info_2, nullptr));
+ EXPECT_NE(EncodedImageCallback::Result::OK,
+ payload_router.OnEncodedImage(encoded_image, &codec_info_1, nullptr)
+ .error);
+ EXPECT_NE(EncodedImageCallback::Result::OK,
+ payload_router.OnEncodedImage(encoded_image, &codec_info_2, nullptr)
+ .error);
}
TEST(PayloadRouterTest, MaxPayloadLength) {
diff --git a/video/video_send_stream_tests.cc b/video/video_send_stream_tests.cc
index d703fc8..eafdefa 100644
--- a/video/video_send_stream_tests.cc
+++ b/video/video_send_stream_tests.cc
@@ -2419,8 +2419,10 @@
encoded._encodedWidth = kEncodedResolution[i].width;
encoded._encodedHeight = kEncodedResolution[i].height;
RTC_DCHECK(callback_);
- if (callback_->Encoded(encoded, &specifics, nullptr) != 0)
+ if (callback_->OnEncodedImage(encoded, &specifics, nullptr).error !=
+ EncodedImageCallback::Result::OK) {
return -1;
+ }
}
observation_complete_.Set();
diff --git a/video/vie_encoder_unittest.cc b/video/vie_encoder_unittest.cc
index 8951097..9f4fdc4 100644
--- a/video/vie_encoder_unittest.cc
+++ b/video/vie_encoder_unittest.cc
@@ -208,14 +208,15 @@
}
private:
- int32_t Encoded(const EncodedImage& encoded_image,
- const CodecSpecificInfo* codec_specific_info,
- const RTPFragmentationHeader* fragmentation) override {
+ Result OnEncodedImage(
+ const EncodedImage& encoded_image,
+ const CodecSpecificInfo* codec_specific_info,
+ const RTPFragmentationHeader* fragmentation) override {
rtc::CritScope lock(&crit_);
EXPECT_TRUE(expect_frames_);
timestamp_ = encoded_image._timeStamp;
encoded_frame_event_.Set();
- return 0;
+ return Result(Result::OK, timestamp_);
}
void OnEncoderConfigurationChanged(std::vector<VideoStream> streams,
diff --git a/video_encoder.h b/video_encoder.h
index d8b7921..8bfa72f 100644
--- a/video_encoder.h
+++ b/video_encoder.h
@@ -54,23 +54,10 @@
};
// Callback function which is called when an image has been encoded.
- virtual Result OnEncodedImage(const EncodedImage& encoded_image,
- const CodecSpecificInfo* codec_specific_info,
- const RTPFragmentationHeader* fragmentation) {
- return (Encoded(encoded_image, codec_specific_info, fragmentation) == 0)
- ? Result(Result::OK, 0)
- : Result(Result::ERROR_SEND_FAILED);
- }
-
- // DEPRECATED.
- // TODO(sergeyu): Remove this method.
- virtual int32_t Encoded(const EncodedImage& encoded_image,
- const CodecSpecificInfo* codec_specific_info,
- const RTPFragmentationHeader* fragmentation) {
- Result result =
- OnEncodedImage(encoded_image, codec_specific_info, fragmentation);
- return (result.error != Result::OK) ? -1 : (result.drop_next_frame ? 1 : 0);
- }
+ virtual Result OnEncodedImage(
+ const EncodedImage& encoded_image,
+ const CodecSpecificInfo* codec_specific_info,
+ const RTPFragmentationHeader* fragmentation) = 0;
};
class VideoEncoder {