New interface EncodedImageBufferInterface, replacing use of CopyOnWriteBuffer

Bug: webrtc:9378
Change-Id: I62b7adbd9dd539c545b5b1b1520721482a4623c4
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/138820
Reviewed-by: Kári Helgason <kthelgason@webrtc.org>
Reviewed-by: Sami Kalliomäki <sakal@webrtc.org>
Reviewed-by: Karl Wiberg <kwiberg@webrtc.org>
Commit-Queue: Niels Moller <nisse@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#28317}
diff --git a/api/test/loopback_media_transport_unittest.cc b/api/test/loopback_media_transport_unittest.cc
index 8e510e6..afd544f 100644
--- a/api/test/loopback_media_transport_unittest.cc
+++ b/api/test/loopback_media_transport_unittest.cc
@@ -105,9 +105,8 @@
   ::testing::StrictMock<MockMediaTransportVideoSinkInterface> sink;
   constexpr uint8_t encoded_data[] = {1, 2, 3};
   EncodedImage encoded_image;
-  encoded_image.Allocate(sizeof(encoded_data));
-  memcpy(encoded_image.data(), encoded_data, sizeof(encoded_data));
-  encoded_image.set_size(sizeof(encoded_data));
+  encoded_image.SetEncodedData(
+      EncodedImageBuffer::Create(encoded_data, sizeof(encoded_data)));
 
   EXPECT_CALL(sink, OnData(1, ::testing::Property(
                                   &MediaTransportEncodedVideoFrame::frame_id,
@@ -115,12 +114,8 @@
       .WillOnce(::testing::Invoke(
           [&encoded_image](int frame_id,
                            const MediaTransportEncodedVideoFrame& frame) {
-            EXPECT_NE(frame.encoded_image().data(), encoded_image.data());
+            EXPECT_EQ(frame.encoded_image().data(), encoded_image.data());
             EXPECT_EQ(frame.encoded_image().size(), encoded_image.size());
-            EXPECT_EQ(0,
-                      memcmp(frame.encoded_image().data(), encoded_image.data(),
-                             std::min(frame.encoded_image().size(),
-                                      encoded_image.size())));
           }));
 
   transport_pair.second()->SetReceiveVideoSink(&sink);
diff --git a/api/video/BUILD.gn b/api/video/BUILD.gn
index 557e8da..757acce 100644
--- a/api/video/BUILD.gn
+++ b/api/video/BUILD.gn
@@ -121,7 +121,9 @@
     ":video_frame",
     ":video_frame_type",
     ":video_rtp_headers",
+    "..:refcountedbase",
     "..:rtp_packet_info",
+    "..:scoped_refptr",
     "../..:webrtc_common",
     "../../rtc_base:checks",
     "../../rtc_base:rtc_base_approved",
diff --git a/api/video/DEPS b/api/video/DEPS
index 6cecc23..3f5df95 100644
--- a/api/video/DEPS
+++ b/api/video/DEPS
@@ -6,9 +6,8 @@
     "+modules/video_coding/encoded_frame.h",
   ],
 
-  # Used for a private member variable.
-  "encoded_image\.h": [
-    "+rtc_base/copy_on_write_buffer.h",
+  "encoded_image\.h" : [
+    "+rtc_base/ref_count.h",
   ],
 
   "i010_buffer\.h": [
diff --git a/api/video/encoded_image.cc b/api/video/encoded_image.cc
index 53687b4..d2cc751 100644
--- a/api/video/encoded_image.cc
+++ b/api/video/encoded_image.cc
@@ -10,10 +10,52 @@
 
 #include "api/video/encoded_image.h"
 
+#include <stdlib.h>
 #include <string.h>
 
+#include "rtc_base/ref_counted_object.h"
+
 namespace webrtc {
 
+EncodedImageBuffer::EncodedImageBuffer(size_t size) : size_(size) {
+  buffer_ = static_cast<uint8_t*>(malloc(size));
+}
+
+EncodedImageBuffer::EncodedImageBuffer(const uint8_t* data, size_t size)
+    : EncodedImageBuffer(size) {
+  memcpy(buffer_, data, size);
+}
+
+EncodedImageBuffer::~EncodedImageBuffer() {
+  free(buffer_);
+}
+
+// static
+rtc::scoped_refptr<EncodedImageBuffer> EncodedImageBuffer::Create(size_t size) {
+  return new rtc::RefCountedObject<EncodedImageBuffer>(size);
+}
+// static
+rtc::scoped_refptr<EncodedImageBuffer> EncodedImageBuffer::Create(
+    const uint8_t* data,
+    size_t size) {
+  return new rtc::RefCountedObject<EncodedImageBuffer>(data, size);
+}
+
+const uint8_t* EncodedImageBuffer::data() const {
+  return buffer_;
+}
+uint8_t* EncodedImageBuffer::data() {
+  return buffer_;
+}
+size_t EncodedImageBuffer::size() const {
+  return size_;
+}
+
+void EncodedImageBuffer::Realloc(size_t size) {
+  buffer_ = static_cast<uint8_t*>(realloc(buffer_, size));
+  size_ = size;
+}
+
 EncodedImage::EncodedImage() : EncodedImage(nullptr, 0, 0) {}
 
 EncodedImage::EncodedImage(EncodedImage&&) = default;
@@ -29,11 +71,20 @@
 
 void EncodedImage::Retain() {
   if (buffer_) {
-    encoded_data_.SetData(buffer_, size_);
+    encoded_data_ = EncodedImageBuffer::Create(buffer_, size_);
     buffer_ = nullptr;
   }
 }
 
+void EncodedImage::Allocate(size_t capacity) {
+  if (encoded_data_) {
+    encoded_data_->Realloc(capacity);
+  } else {
+    encoded_data_ = EncodedImageBuffer::Create(capacity);
+  }
+  buffer_ = nullptr;
+}
+
 void EncodedImage::SetEncodeTime(int64_t encode_start_ms,
                                  int64_t encode_finish_ms) {
   timing_.encode_start_ms = encode_start_ms;
diff --git a/api/video/encoded_image.h b/api/video/encoded_image.h
index 6bd1527..9091c02 100644
--- a/api/video/encoded_image.h
+++ b/api/video/encoded_image.h
@@ -15,6 +15,7 @@
 #include <map>
 
 #include "absl/types/optional.h"
+#include "api/scoped_refptr.h"
 #include "api/video/color_space.h"
 #include "api/video/video_codec_constants.h"
 #include "api/video/video_codec_type.h"
@@ -24,11 +25,48 @@
 #include "api/video/video_timing.h"
 #include "common_types.h"  // NOLINT(build/include)
 #include "rtc_base/checks.h"
-#include "rtc_base/copy_on_write_buffer.h"
+#include "rtc_base/ref_count.h"
 #include "rtc_base/system/rtc_export.h"
 
 namespace webrtc {
 
+// Abstract interface for buffer storage. Intended to support buffers owned by
+// external encoders with special release requirements, e.g, java encoders with
+// releaseOutputBuffer.
+class EncodedImageBufferInterface : public rtc::RefCountInterface {
+ public:
+  virtual const uint8_t* data() const = 0;
+  // TODO(bugs.webrtc.org/9378): Make interface essentially read-only, delete
+  // this non-const data method.
+  virtual uint8_t* data() = 0;
+  virtual size_t size() const = 0;
+  // TODO(bugs.webrtc.org/9378): Delete from this interface, together with
+  // EncodedImage::Allocate. Implemented properly only by the below concrete
+  // class
+  virtual void Realloc(size_t size) { RTC_NOTREACHED(); }
+};
+
+// Basic implementation of EncodedImageBufferInterface.
+class EncodedImageBuffer : public EncodedImageBufferInterface {
+ public:
+  static rtc::scoped_refptr<EncodedImageBuffer> Create(size_t size);
+  static rtc::scoped_refptr<EncodedImageBuffer> Create(const uint8_t* data,
+                                                       size_t size);
+
+  const uint8_t* data() const override;
+  uint8_t* data() override;
+  size_t size() const override;
+  void Realloc(size_t t) override;
+
+ protected:
+  explicit EncodedImageBuffer(size_t size);
+  EncodedImageBuffer(const uint8_t* data, size_t size);
+  ~EncodedImageBuffer();
+
+  size_t size_;
+  uint8_t* buffer_;
+};
+
 // TODO(bug.webrtc.org/9378): This is a legacy api class, which is slowly being
 // cleaned up. Direct use of its members is strongly discouraged.
 class RTC_EXPORT EncodedImage {
@@ -83,30 +121,39 @@
 
   size_t size() const { return size_; }
   void set_size(size_t new_size) {
-    RTC_DCHECK_LE(new_size, capacity());
+    // Allow set_size(0) even if we have no buffer.
+    RTC_DCHECK_LE(new_size, new_size == 0 ? 0 : capacity());
     size_ = new_size;
   }
-  size_t capacity() const { return buffer_ ? capacity_ : encoded_data_.size(); }
+  // TODO(nisse): Delete, provide only read-only access to the buffer.
+  size_t capacity() const {
+    return buffer_ ? capacity_ : (encoded_data_ ? encoded_data_->size() : 0);
+  }
 
   void set_buffer(uint8_t* buffer, size_t capacity) {
     buffer_ = buffer;
     capacity_ = capacity;
   }
 
-  void Allocate(size_t capacity) {
-    encoded_data_.SetSize(capacity);
-    buffer_ = nullptr;
-  }
+  // TODO(bugs.webrtc.org/9378): Delete; this method implies realloc, which
+  // should not be generally supported by the EncodedImageBufferInterface.
+  void Allocate(size_t capacity);
 
-  void SetEncodedData(const rtc::CopyOnWriteBuffer& encoded_data) {
+  void SetEncodedData(
+      rtc::scoped_refptr<EncodedImageBufferInterface> encoded_data) {
     encoded_data_ = encoded_data;
-    size_ = encoded_data.size();
+    size_ = encoded_data->size();
     buffer_ = nullptr;
   }
 
-  uint8_t* data() { return buffer_ ? buffer_ : encoded_data_.data(); }
+  // TODO(nisse): Delete, provide only read-only access to the buffer.
+  uint8_t* data() {
+    return buffer_ ? buffer_
+                   : (encoded_data_ ? encoded_data_->data() : nullptr);
+  }
   const uint8_t* data() const {
-    return buffer_ ? buffer_ : encoded_data_.cdata();
+    return buffer_ ? buffer_
+                   : (encoded_data_ ? encoded_data_->data() : nullptr);
   }
   // TODO(nisse): At some places, code accepts a const ref EncodedImage, but
   // still writes to it, to clear padding at the end of the encoded data.
@@ -153,7 +200,7 @@
  private:
   // TODO(bugs.webrtc.org/9378): We're transitioning to always owning the
   // encoded data.
-  rtc::CopyOnWriteBuffer encoded_data_;
+  rtc::scoped_refptr<EncodedImageBufferInterface> encoded_data_;
   size_t size_;      // Size of encoded frame data.
   // Non-null when used with an un-owned buffer.
   uint8_t* buffer_;
diff --git a/call/rtp_video_sender_unittest.cc b/call/rtp_video_sender_unittest.cc
index 016c259..5e01576b 100644
--- a/call/rtp_video_sender_unittest.cc
+++ b/call/rtp_video_sender_unittest.cc
@@ -186,9 +186,7 @@
   encoded_image.SetTimestamp(1);
   encoded_image.capture_time_ms_ = 2;
   encoded_image._frameType = VideoFrameType::kVideoFrameKey;
-  encoded_image.Allocate(1);
-  encoded_image.data()[0] = kPayload;
-  encoded_image.set_size(1);
+  encoded_image.SetEncodedData(EncodedImageBuffer::Create(&kPayload, 1));
 
   RtpVideoSenderTestFixture test({kSsrc1}, {kRtxSsrc1}, kPayloadType, {});
   EXPECT_NE(
@@ -217,9 +215,7 @@
   encoded_image_1.SetTimestamp(1);
   encoded_image_1.capture_time_ms_ = 2;
   encoded_image_1._frameType = VideoFrameType::kVideoFrameKey;
-  encoded_image_1.Allocate(1);
-  encoded_image_1.data()[0] = kPayload;
-  encoded_image_1.set_size(1);
+  encoded_image_1.SetEncodedData(EncodedImageBuffer::Create(&kPayload, 1));
 
   RtpVideoSenderTestFixture test({kSsrc1, kSsrc2}, {kRtxSsrc1, kRtxSsrc2},
                                  kPayloadType, {});
@@ -262,9 +258,7 @@
   encoded_image_1.SetTimestamp(1);
   encoded_image_1.capture_time_ms_ = 2;
   encoded_image_1._frameType = VideoFrameType::kVideoFrameKey;
-  encoded_image_1.Allocate(1);
-  encoded_image_1.data()[0] = kPayload;
-  encoded_image_1.set_size(1);
+  encoded_image_1.SetEncodedData(EncodedImageBuffer::Create(&kPayload, 1));
 
   EncodedImage encoded_image_2(encoded_image_1);
   encoded_image_2.SetSpatialIndex(1);
@@ -355,9 +349,7 @@
   encoded_image.SetTimestamp(1);
   encoded_image.capture_time_ms_ = 2;
   encoded_image._frameType = VideoFrameType::kVideoFrameKey;
-  encoded_image.Allocate(1);
-  encoded_image.data()[0] = kPayload;
-  encoded_image.set_size(1);
+  encoded_image.SetEncodedData(EncodedImageBuffer::Create(&kPayload, 1));
 
   encoded_image._frameType = VideoFrameType::kVideoFrameKey;
 
@@ -408,9 +400,7 @@
   encoded_image.SetTimestamp(1);
   encoded_image.capture_time_ms_ = 2;
   encoded_image._frameType = VideoFrameType::kVideoFrameKey;
-  encoded_image.Allocate(1);
-  encoded_image.data()[0] = kPayload;
-  encoded_image.set_size(1);
+  encoded_image.SetEncodedData(EncodedImageBuffer::Create(&kPayload, 1));
 
   // Send two tiny images, mapping to two RTP packets. Capture sequence numbers.
   rtc::Event event;
diff --git a/common_video/h264/sps_vui_rewriter.cc b/common_video/h264/sps_vui_rewriter.cc
index 8c62495..8f246ec 100644
--- a/common_video/h264/sps_vui_rewriter.cc
+++ b/common_video/h264/sps_vui_rewriter.cc
@@ -215,7 +215,7 @@
     const size_t* nalu_offsets,
     const size_t* nalu_lengths,
     const webrtc::ColorSpace* color_space,
-    rtc::CopyOnWriteBuffer* output_buffer,
+    rtc::Buffer* output_buffer,
     size_t* output_nalu_offsets,
     size_t* output_nalu_lengths) {
   // Allocate some extra space for potentially adding a missing VUI.
diff --git a/common_video/h264/sps_vui_rewriter.h b/common_video/h264/sps_vui_rewriter.h
index 0590b5a..4cd4cb9 100644
--- a/common_video/h264/sps_vui_rewriter.h
+++ b/common_video/h264/sps_vui_rewriter.h
@@ -19,7 +19,6 @@
 #include "api/video/color_space.h"
 #include "common_video/h264/sps_parser.h"
 #include "rtc_base/buffer.h"
-#include "rtc_base/copy_on_write_buffer.h"
 
 namespace webrtc {
 
@@ -62,7 +61,7 @@
       const size_t* nalu_offsets,
       const size_t* nalu_lengths,
       const ColorSpace* color_space,
-      rtc::CopyOnWriteBuffer* output_buffer,
+      rtc::Buffer* output_buffer,
       size_t* output_nalu_offsets,
       size_t* output_nalu_lengths);
 
diff --git a/common_video/h264/sps_vui_rewriter_unittest.cc b/common_video/h264/sps_vui_rewriter_unittest.cc
index 870981f..823a58c 100644
--- a/common_video/h264/sps_vui_rewriter_unittest.cc
+++ b/common_video/h264/sps_vui_rewriter_unittest.cc
@@ -407,7 +407,7 @@
   nalu_lengths[1] = sizeof(kIdr1);
   buffer.AppendData(kIdr1);
 
-  rtc::CopyOnWriteBuffer modified_buffer;
+  rtc::Buffer modified_buffer;
   size_t modified_nalu_offsets[kNumNalus];
   size_t modified_nalu_lengths[kNumNalus];
 
@@ -471,7 +471,7 @@
   expected_nalu_lengths[2] = sizeof(kIdr2);
   expected_buffer.AppendData(kIdr2);
 
-  rtc::CopyOnWriteBuffer modified_buffer;
+  rtc::Buffer modified_buffer;
   size_t modified_nalu_offsets[kNumNalus];
   size_t modified_nalu_lengths[kNumNalus];
 
diff --git a/modules/video_coding/codecs/h264/h264_encoder_impl.cc b/modules/video_coding/codecs/h264/h264_encoder_impl.cc
index 1cf9dad..13565bd 100644
--- a/modules/video_coding/codecs/h264/h264_encoder_impl.cc
+++ b/modules/video_coding/codecs/h264/h264_encoder_impl.cc
@@ -115,23 +115,8 @@
       required_capacity += layerInfo.pNalLengthInByte[nal];
     }
   }
-  if (encoded_image->capacity() < required_capacity) {
-    // Increase buffer size. Allocate enough to hold an unencoded image, this
-    // should be more than enough to hold any encoded data of future frames of
-    // the same size (avoiding possible future reallocation due to variations in
-    // required size).
-    size_t new_capacity = CalcBufferSize(VideoType::kI420, frame_buffer.width(),
-                                         frame_buffer.height());
-    if (new_capacity < required_capacity) {
-      // Encoded data > unencoded data. Allocate required bytes.
-      RTC_LOG(LS_WARNING)
-          << "Encoding produced more bytes than the original image "
-          << "data! Original bytes: " << new_capacity
-          << ", encoded bytes: " << required_capacity << ".";
-      new_capacity = required_capacity;
-    }
-    encoded_image->Allocate(new_capacity);
-  }
+  // TODO(nisse): Use a cache or buffer pool to avoid allocation?
+  encoded_image->SetEncodedData(EncodedImageBuffer::Create(required_capacity));
 
   // Iterate layers and NAL units, note each NAL unit as a fragment and copy
   // the data to |encoded_image->_buffer|.
@@ -300,7 +285,7 @@
     const size_t new_capacity =
         CalcBufferSize(VideoType::kI420, codec_.simulcastStream[idx].width,
                        codec_.simulcastStream[idx].height);
-    encoded_images_[i].Allocate(new_capacity);
+    encoded_images_[i].SetEncodedData(EncodedImageBuffer::Create(new_capacity));
     encoded_images_[i]._completeFrame = true;
     encoded_images_[i]._encodedWidth = codec_.simulcastStream[idx].width;
     encoded_images_[i]._encodedHeight = codec_.simulcastStream[idx].height;
diff --git a/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.cc b/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.cc
index b36ac32..38f16d7 100644
--- a/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.cc
+++ b/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.cc
@@ -189,8 +189,8 @@
     frame_headers.push_back(frame_header);
   }
 
-  combined_image.Allocate(bitstream_offset);
-  combined_image.set_size(bitstream_offset);
+  auto buffer = EncodedImageBuffer::Create(bitstream_offset);
+  combined_image.SetEncodedData(buffer);
 
   // header
   header_offset = PackHeader(combined_image.data(), header);
@@ -199,8 +199,8 @@
 
   // Frame Header
   for (size_t i = 0; i < images.size(); i++) {
-    int relative_offset = PackFrameHeader(combined_image.data() + header_offset,
-                                          frame_headers[i]);
+    int relative_offset =
+        PackFrameHeader(buffer->data() + header_offset, frame_headers[i]);
     RTC_DCHECK_EQ(relative_offset, kMultiplexImageComponentHeaderSize);
 
     header_offset = frame_headers[i].next_component_header_offset;
@@ -213,16 +213,15 @@
 
   // Augmenting Data
   if (multiplex_image.augmenting_data_size != 0) {
-    memcpy(combined_image.data() + header.augmenting_data_offset,
+    memcpy(buffer->data() + header.augmenting_data_offset,
            multiplex_image.augmenting_data.get(),
            multiplex_image.augmenting_data_size);
   }
 
   // Bitstreams
   for (size_t i = 0; i < images.size(); i++) {
-    PackBitstream(combined_image.data() + frame_headers[i].bitstream_offset,
+    PackBitstream(buffer->data() + frame_headers[i].bitstream_offset,
                   images[i]);
-    delete[] images[i].encoded_image.buffer();
   }
 
   return combined_image;
@@ -263,11 +262,9 @@
     EncodedImage encoded_image = combined_image;
     encoded_image.SetTimestamp(combined_image.Timestamp());
     encoded_image._frameType = frame_headers[i].frame_type;
-    encoded_image.Allocate(frame_headers[i].bitstream_length);
-    encoded_image.set_size(frame_headers[i].bitstream_length);
-    memcpy(encoded_image.data(),
-           combined_image.data() + frame_headers[i].bitstream_offset,
-           frame_headers[i].bitstream_length);
+    encoded_image.SetEncodedData(EncodedImageBuffer::Create(
+        combined_image.data() + frame_headers[i].bitstream_offset,
+        frame_headers[i].bitstream_length));
 
     image_component.encoded_image = encoded_image;
 
diff --git a/modules/video_coding/codecs/test/videoprocessor.cc b/modules/video_coding/codecs/test/videoprocessor.cc
index d0db139..f93d539 100644
--- a/modules/video_coding/codecs/test/videoprocessor.cc
+++ b/modules/video_coding/codecs/test/videoprocessor.cc
@@ -568,7 +568,7 @@
   const size_t payload_size_bytes = base_image.size() + encoded_image.size();
 
   EncodedImage copied_image = encoded_image;
-  copied_image.Allocate(payload_size_bytes);
+  copied_image.SetEncodedData(EncodedImageBuffer::Create(payload_size_bytes));
   if (base_image.size()) {
     RTC_CHECK(base_image.data());
     memcpy(copied_image.data(), base_image.data(), base_image.size());
diff --git a/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc b/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc
index c3257e4..9324723 100644
--- a/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc
+++ b/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc
@@ -533,7 +533,8 @@
     // allocate memory for encoded image
     size_t frame_capacity =
         CalcBufferSize(VideoType::kI420, codec_.width, codec_.height);
-    encoded_images_[i].Allocate(frame_capacity);
+    encoded_images_[i].SetEncodedData(
+        EncodedImageBuffer::Create(frame_capacity));
     encoded_images_[i]._completeFrame = true;
   }
   // populate encoder configuration with default values
diff --git a/modules/video_coding/codecs/vp9/vp9_impl.cc b/modules/video_coding/codecs/vp9/vp9_impl.cc
index d81c615..bc1d618 100644
--- a/modules/video_coding/codecs/vp9/vp9_impl.cc
+++ b/modules/video_coding/codecs/vp9/vp9_impl.cc
@@ -458,10 +458,6 @@
 
   is_svc_ = (num_spatial_layers_ > 1 || num_temporal_layers_ > 1);
 
-  // Allocate memory for encoded image
-  size_t frame_capacity =
-      CalcBufferSize(VideoType::kI420, codec_.width, codec_.height);
-  encoded_image_.Allocate(frame_capacity);
   encoded_image_._completeFrame = true;
   // Populate encoder configuration with default values.
   if (vpx_codec_enc_config_default(vpx_codec_vp9_cx(), config_, 0)) {
@@ -1417,11 +1413,10 @@
     DeliverBufferedFrame(end_of_picture);
   }
 
-  if (pkt->data.frame.sz > encoded_image_.capacity()) {
-    encoded_image_.Allocate(pkt->data.frame.sz);
-  }
-  memcpy(encoded_image_.data(), pkt->data.frame.buf, pkt->data.frame.sz);
-  encoded_image_.set_size(pkt->data.frame.sz);
+  // TODO(nisse): Introduce some buffer cache or buffer pool, to reduce
+  // allocations and/or copy operations.
+  encoded_image_.SetEncodedData(EncodedImageBuffer::Create(
+      static_cast<const uint8_t*>(pkt->data.frame.buf), pkt->data.frame.sz));
 
   const bool is_key_frame =
       (pkt->data.frame.flags & VPX_FRAME_IS_KEY) ? true : false;
diff --git a/modules/video_coding/encoded_frame.cc b/modules/video_coding/encoded_frame.cc
index a53f88d..7a204c1 100644
--- a/modules/video_coding/encoded_frame.cc
+++ b/modules/video_coding/encoded_frame.cc
@@ -162,9 +162,9 @@
 void VCMEncodedFrame::VerifyAndAllocate(size_t minimumSize) {
   size_t old_capacity = capacity();
   if (minimumSize > old_capacity) {
-    // TODO(nisse): EncodedImage::Allocate is implemented as
-    // std::vector::resize, which means that old contents is kept. Find out if
-    // any code depends on that behavior.
+    // TODO(nisse): EncodedImage::Allocate is implemented as a realloc
+    // operation, and is deprecated. Refactor to use EncodedImageBuffer::Realloc
+    // instead.
     Allocate(minimumSize);
   }
 }
diff --git a/modules/video_coding/encoded_frame.h b/modules/video_coding/encoded_frame.h
index eced69b..2ebef31 100644
--- a/modules/video_coding/encoded_frame.h
+++ b/modules/video_coding/encoded_frame.h
@@ -56,6 +56,7 @@
   using EncodedImage::data;
   using EncodedImage::set_size;
   using EncodedImage::SetColorSpace;
+  using EncodedImage::SetEncodedData;
   using EncodedImage::SetSpatialIndex;
   using EncodedImage::SetSpatialLayerFrameSize;
   using EncodedImage::SetTimestamp;
diff --git a/modules/video_coding/frame_object.cc b/modules/video_coding/frame_object.cc
index ccac4bb..fab6066 100644
--- a/modules/video_coding/frame_object.cc
+++ b/modules/video_coding/frame_object.cc
@@ -54,7 +54,8 @@
   // as of the first packet's.
   SetPlayoutDelay(first_packet->video_header.playout_delay);
 
-  AllocateBitstreamBuffer(frame_size);
+  // TODO(nisse): Change GetBitstream to return the buffer?
+  SetEncodedData(EncodedImageBuffer::Create(frame_size));
   bool bitstream_copied = packet_buffer_->GetBitstream(*this, data());
   RTC_DCHECK(bitstream_copied);
   _encodedWidth = first_packet->width();
@@ -166,13 +167,5 @@
   return packet->video_header.frame_marking;
 }
 
-void RtpFrameObject::AllocateBitstreamBuffer(size_t frame_size) {
-  if (capacity() < frame_size) {
-    Allocate(frame_size);
-  }
-
-  set_size(frame_size);
-}
-
 }  // namespace video_coding
 }  // namespace webrtc
diff --git a/modules/video_coding/frame_object.h b/modules/video_coding/frame_object.h
index c5c8934..1ba99cb 100644
--- a/modules/video_coding/frame_object.h
+++ b/modules/video_coding/frame_object.h
@@ -45,8 +45,6 @@
   absl::optional<FrameMarking> GetFrameMarking() const;
 
  private:
-  void AllocateBitstreamBuffer(size_t frame_size);
-
   rtc::scoped_refptr<PacketBuffer> packet_buffer_;
   VideoFrameType frame_type_;
   VideoCodecType codec_type_;
diff --git a/modules/video_coding/utility/ivf_file_writer_unittest.cc b/modules/video_coding/utility/ivf_file_writer_unittest.cc
index 365fc4a..12d1d11 100644
--- a/modules/video_coding/utility/ivf_file_writer_unittest.cc
+++ b/modules/video_coding/utility/ivf_file_writer_unittest.cc
@@ -41,8 +41,8 @@
                             int num_frames,
                             bool use_capture_tims_ms) {
     EncodedImage frame;
-    frame.Allocate(sizeof(dummy_payload));
-    memcpy(frame.data(), dummy_payload, sizeof(dummy_payload));
+    frame.SetEncodedData(
+        EncodedImageBuffer::Create(dummy_payload, sizeof(dummy_payload)));
     frame._encodedWidth = width;
     frame._encodedHeight = height;
     for (int i = 1; i <= num_frames; ++i) {
diff --git a/modules/video_coding/utility/simulcast_test_fixture_impl.cc b/modules/video_coding/utility/simulcast_test_fixture_impl.cc
index eadb5a2..d63e67f 100644
--- a/modules/video_coding/utility/simulcast_test_fixture_impl.cc
+++ b/modules/video_coding/utility/simulcast_test_fixture_impl.cc
@@ -82,14 +82,16 @@
     if (encoded_image.SpatialIndex().value_or(0) == 0) {
       if (encoded_image._frameType == VideoFrameType::kVideoFrameKey) {
         // TODO(nisse): Why not size() ?
-        encoded_key_frame_.Allocate(encoded_image.capacity());
+        encoded_key_frame_.SetEncodedData(
+            EncodedImageBuffer::Create(encoded_image.capacity()));
         encoded_key_frame_.set_size(encoded_image.size());
         encoded_key_frame_._frameType = VideoFrameType::kVideoFrameKey;
         encoded_key_frame_._completeFrame = encoded_image._completeFrame;
         memcpy(encoded_key_frame_.data(), encoded_image.data(),
                encoded_image.size());
       } else {
-        encoded_frame_.Allocate(encoded_image.capacity());
+        encoded_frame_.SetEncodedData(
+            EncodedImageBuffer::Create(encoded_image.capacity()));
         encoded_frame_.set_size(encoded_image.size());
         memcpy(encoded_frame_.data(), encoded_image.data(),
                encoded_image.size());
@@ -866,7 +868,8 @@
 
             size_t index = encoded_image.SpatialIndex().value_or(0);
             // TODO(nisse): Why not size()
-            encoded_frame[index].Allocate(encoded_image.capacity());
+            encoded_frame[index].SetEncodedData(
+                EncodedImageBuffer::Create(encoded_image.capacity()));
             encoded_frame[index].set_size(encoded_image.size());
             encoded_frame[index]._frameType = encoded_image._frameType;
             encoded_frame[index]._completeFrame = encoded_image._completeFrame;
diff --git a/sdk/android/src/jni/android_media_encoder.cc b/sdk/android/src/jni/android_media_encoder.cc
index 5a5f90b..94496ad 100644
--- a/sdk/android/src/jni/android_media_encoder.cc
+++ b/sdk/android/src/jni/android_media_encoder.cc
@@ -989,8 +989,11 @@
     EncodedImageCallback::Result callback_result(
         EncodedImageCallback::Result::OK);
     if (callback_) {
-      std::unique_ptr<EncodedImage> image(
-          new EncodedImage(payload, payload_size, payload_size));
+      auto image = absl::make_unique<EncodedImage>();
+      // The corresponding (and deprecated) java classes are not prepared for
+      // late calls to releaseOutputBuffer, so to keep things simple, make a
+      // copy here, and call releaseOutputBuffer before returning.
+      image->SetEncodedData(EncodedImageBuffer::Create(payload, payload_size));
       image->_encodedWidth = width_;
       image->_encodedHeight = height_;
       image->SetTimestamp(output_timestamp_);
diff --git a/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm b/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm
index 53a5c5c..f1df13e 100644
--- a/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm
+++ b/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm
@@ -14,15 +14,17 @@
 
 #include "rtc_base/numerics/safe_conversions.h"
 
-// A simple wrapper around rtc::CopyOnWriteBuffer to make it usable with
-// associated objects.
-@interface RTCWrappedCOWBuffer : NSObject
-@property(nonatomic) rtc::CopyOnWriteBuffer buffer;
-- (instancetype)initWithCOWBuffer:(rtc::CopyOnWriteBuffer)buffer;
+// A simple wrapper around webrtc::EncodedImageBufferInterface to make it usable with associated
+// objects.
+@interface RTCWrappedEncodedImageBuffer : NSObject
+@property(nonatomic) rtc::scoped_refptr<webrtc::EncodedImageBufferInterface> buffer;
+- (instancetype)initWithEncodedImageBuffer:
+    (rtc::scoped_refptr<webrtc::EncodedImageBufferInterface>)buffer;
 @end
-@implementation RTCWrappedCOWBuffer
+@implementation RTCWrappedEncodedImageBuffer
 @synthesize buffer = _buffer;
-- (instancetype)initWithCOWBuffer:(rtc::CopyOnWriteBuffer)buffer {
+- (instancetype)initWithEncodedImageBuffer:
+    (rtc::scoped_refptr<webrtc::EncodedImageBufferInterface>)buffer {
   self = [super init];
   if (self) {
     _buffer = buffer;
@@ -33,16 +35,18 @@
 
 @implementation RTCEncodedImage (Private)
 
-- (rtc::CopyOnWriteBuffer)encodedData {
-  RTCWrappedCOWBuffer *wrappedBuffer = objc_getAssociatedObject(self, @selector(encodedData));
+- (rtc::scoped_refptr<webrtc::EncodedImageBufferInterface>)encodedData {
+  RTCWrappedEncodedImageBuffer *wrappedBuffer =
+      objc_getAssociatedObject(self, @selector(encodedData));
   return wrappedBuffer.buffer;
 }
 
-- (void)setEncodedData:(rtc::CopyOnWriteBuffer)buffer {
-  return objc_setAssociatedObject(self,
-                                  @selector(encodedData),
-                                  [[RTCWrappedCOWBuffer alloc] initWithCOWBuffer:buffer],
-                                  OBJC_ASSOCIATION_RETAIN_NONATOMIC);
+- (void)setEncodedData:(rtc::scoped_refptr<webrtc::EncodedImageBufferInterface>)buffer {
+  return objc_setAssociatedObject(
+      self,
+      @selector(encodedData),
+      [[RTCWrappedEncodedImageBuffer alloc] initWithEncodedImageBuffer:buffer],
+      OBJC_ASSOCIATION_RETAIN_NONATOMIC);
 }
 
 - (instancetype)initWithNativeEncodedImage:(const webrtc::EncodedImage &)encodedImage {
diff --git a/test/fake_encoder.cc b/test/fake_encoder.cc
index 23b6094..6bb076e 100644
--- a/test/fake_encoder.cc
+++ b/test/fake_encoder.cc
@@ -117,8 +117,8 @@
     }
 
     EncodedImage encoded;
-    encoded.Allocate(frame_info.layers[i].size);
-    encoded.set_size(frame_info.layers[i].size);
+    encoded.SetEncodedData(
+        EncodedImageBuffer::Create(frame_info.layers[i].size));
 
     // Fill the buffer with arbitrary data. Write someting to make Asan happy.
     memset(encoded.data(), 9, frame_info.layers[i].size);
diff --git a/test/pc/e2e/analyzer/video/default_encoded_image_data_injector.cc b/test/pc/e2e/analyzer/video/default_encoded_image_data_injector.cc
index 7127ef2..2634e6e 100644
--- a/test/pc/e2e/analyzer/video/default_encoded_image_data_injector.cc
+++ b/test/pc/e2e/analyzer/video/default_encoded_image_data_injector.cc
@@ -42,8 +42,8 @@
     const EncodedImage& source,
     int /*coding_entity_id*/) {
   EncodedImage out = source;
-  out.Allocate(source.size() + kEncodedImageBufferExpansion);
-  out.set_size(source.size() + kEncodedImageBufferExpansion);
+  out.SetEncodedData(
+      EncodedImageBuffer::Create(source.size() + kEncodedImageBufferExpansion));
   memcpy(out.data(), source.data(), source.size());
   size_t insertion_pos = source.size();
   out.data()[insertion_pos] = id & 0x00ff;
@@ -64,7 +64,7 @@
     const EncodedImage& source,
     int /*coding_entity_id*/) {
   EncodedImage out = source;
-  out.Allocate(source.size());
+  out.SetEncodedData(EncodedImageBuffer::Create(source.size()));
 
   size_t source_pos = source.size() - 1;
   absl::optional<uint16_t> id = absl::nullopt;
diff --git a/video/frame_encode_metadata_writer.cc b/video/frame_encode_metadata_writer.cc
index 950e714..d2cbf7d 100644
--- a/video/frame_encode_metadata_writer.cc
+++ b/video/frame_encode_metadata_writer.cc
@@ -11,19 +11,36 @@
 #include "video/frame_encode_metadata_writer.h"
 
 #include <algorithm>
+#include <utility>
 
 #include "absl/memory/memory.h"
 #include "common_video/h264/sps_vui_rewriter.h"
 #include "modules/include/module_common_types_public.h"
 #include "modules/video_coding/include/video_coding_defines.h"
-#include "rtc_base/copy_on_write_buffer.h"
 #include "rtc_base/logging.h"
+#include "rtc_base/ref_counted_object.h"
 #include "rtc_base/time_utils.h"
 
 namespace webrtc {
 namespace {
 const int kMessagesThrottlingThreshold = 2;
 const int kThrottleRatio = 100000;
+
+class EncodedImageBufferWrapper : public EncodedImageBufferInterface {
+ public:
+  explicit EncodedImageBufferWrapper(rtc::Buffer&& buffer)
+      : buffer_(std::move(buffer)) {}
+
+  const uint8_t* data() const override { return buffer_.data(); }
+  uint8_t* data() override { return buffer_.data(); }
+  size_t size() const override { return buffer_.size(); }
+
+  void Realloc(size_t t) override { RTC_NOTREACHED(); }
+
+ private:
+  rtc::Buffer buffer_;
+};
+
 }  // namespace
 
 FrameEncodeMetadataWriter::TimingFramesLayerInfo::TimingFramesLayerInfo() =
@@ -197,7 +214,7 @@
     return nullptr;
   }
 
-  rtc::CopyOnWriteBuffer modified_buffer;
+  rtc::Buffer modified_buffer;
   std::unique_ptr<RTPFragmentationHeader> modified_fragmentation =
       absl::make_unique<RTPFragmentationHeader>();
   modified_fragmentation->CopyFrom(*fragmentation);
@@ -211,7 +228,9 @@
       modified_fragmentation->fragmentationOffset,
       modified_fragmentation->fragmentationLength);
 
-  encoded_image->SetEncodedData(modified_buffer);
+  encoded_image->SetEncodedData(
+      new rtc::RefCountedObject<EncodedImageBufferWrapper>(
+          std::move(modified_buffer)));
 
   return modified_fragmentation;
 }
diff --git a/video/frame_encode_metadata_writer_unittest.cc b/video/frame_encode_metadata_writer_unittest.cc
index 373b911..96df534 100644
--- a/video/frame_encode_metadata_writer_unittest.cc
+++ b/video/frame_encode_metadata_writer_unittest.cc
@@ -103,7 +103,7 @@
       bool dropped = i % (5 + si) == 0;
 
       EncodedImage image;
-      image.Allocate(max_frame_size);
+      image.SetEncodedData(EncodedImageBuffer::Create(max_frame_size));
       image.set_size(FrameSize(min_frame_size, max_frame_size, si, i));
       image.capture_time_ms_ = current_timestamp;
       image.SetTimestamp(static_cast<uint32_t>(current_timestamp * 90));
@@ -197,8 +197,7 @@
   int64_t timestamp = 1;
   constexpr size_t kFrameSize = 500;
   EncodedImage image;
-  image.Allocate(kFrameSize);
-  image.set_size(kFrameSize);
+  image.SetEncodedData(EncodedImageBuffer::Create(kFrameSize));
   image.capture_time_ms_ = timestamp;
   image.SetTimestamp(static_cast<uint32_t>(timestamp * 90));
 
@@ -238,8 +237,7 @@
 
   int64_t timestamp = 1;
   EncodedImage image;
-  image.Allocate(kFrameSize);
-  image.set_size(kFrameSize);
+  image.SetEncodedData(EncodedImageBuffer::Create(kFrameSize));
   image.capture_time_ms_ = timestamp;
   image.SetTimestamp(static_cast<uint32_t>(timestamp * 90));
 
diff --git a/video/video_stream_encoder_unittest.cc b/video/video_stream_encoder_unittest.cc
index 7214a64..44c126b 100644
--- a/video/video_stream_encoder_unittest.cc
+++ b/video/video_stream_encoder_unittest.cc
@@ -3936,7 +3936,8 @@
 
   int64_t timestamp = 1;
   EncodedImage image;
-  image.Allocate(kTargetBitrateBps / kDefaultFramerate / 8);
+  image.SetEncodedData(
+      EncodedImageBuffer::Create(kTargetBitrateBps / kDefaultFramerate / 8));
   image.capture_time_ms_ = ++timestamp;
   image.SetTimestamp(static_cast<uint32_t>(timestamp * 90));
   const int64_t kEncodeFinishDelayMs = 10;