Reland "Add unit tests covering MultiplexImageComponent"

This is a reland of 4dc891f5e3a4bcad4db31e1af0ad45b6c471eef2.

Original change's description:
> Add unit tests covering MultiplexImageComponent
>
> This CL changes some types in MultiplexImage and MultiplexImageComponent. Also,
> adds unit test coverage in TestMultiplexAdapter for these structs.
>
> Bug: webrtc:7671
> Change-Id: I832d0466dc67d3b6b7fa0d3fb76f02c0190e474f
> Reviewed-on: https://webrtc-review.googlesource.com/44081
> Commit-Queue: Emircan Uysaler <emircan@webrtc.org>
> Reviewed-by: Qiang Chen <qiangchen@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#21770}

TBR=qiangchen@chromium.org

Bug: webrtc:7671
Change-Id: Ibc5e6fd0bf3db22838ca45c39f17c72bd5ca2a12
Reviewed-on: https://webrtc-review.googlesource.com/45880
Reviewed-by: Emircan Uysaler <emircan@webrtc.org>
Commit-Queue: Emircan Uysaler <emircan@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#21810}
diff --git a/modules/video_coding/codecs/multiplex/include/multiplex_encoded_image_packer.h b/modules/video_coding/codecs/multiplex/include/multiplex_encoded_image_packer.h
index a84c52d..e7f6677 100644
--- a/modules/video_coding/codecs/multiplex/include/multiplex_encoded_image_packer.h
+++ b/modules/video_coding/codecs/multiplex/include/multiplex_encoded_image_packer.h
@@ -71,7 +71,7 @@
 
   // Identifies which component this frame represent, i.e. YUV frame vs Alpha
   // frame.
-  int component_index;
+  uint8_t component_index;
 
   // Stores the actual frame data of the encoded image.
   EncodedImage encoded_image;
@@ -79,11 +79,11 @@
 
 // Struct holding the whole frame bundle of components of an image.
 struct MultiplexImage {
-  int image_index;
-  int component_count;
+  uint16_t image_index;
+  uint8_t component_count;
   std::vector<MultiplexImageComponent> image_components;
 
-  MultiplexImage(int picture_index, int frame_count);
+  MultiplexImage(uint16_t picture_index, uint8_t component_count);
 };
 
 // A utility class providing conversion between two representations of a
diff --git a/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.cc b/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.cc
index 6e02a65..83be3a7 100644
--- a/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.cc
+++ b/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.cc
@@ -113,7 +113,7 @@
   memcpy(buffer, image.encoded_image._buffer, image.encoded_image._length);
 }
 
-MultiplexImage::MultiplexImage(int picture_index, int frame_count)
+MultiplexImage::MultiplexImage(uint16_t picture_index, uint8_t frame_count)
     : image_index(picture_index), component_count(frame_count) {}
 
 EncodedImage MultiplexEncodedImagePacker::PackAndRelease(
@@ -195,9 +195,7 @@
   const MultiplexImageHeader& header = UnpackHeader(combined_image._buffer);
 
   MultiplexImage multiplex_image(header.image_index, header.component_count);
-
   std::vector<MultiplexImageComponentHeader> frame_headers;
-
   int header_offset = header.first_component_header_offset;
 
   while (header_offset > 0) {
@@ -213,6 +211,7 @@
     image_component.codec_type = frame_headers[i].codec_type;
 
     EncodedImage encoded_image = combined_image;
+    encoded_image._timeStamp = combined_image._timeStamp;
     encoded_image._frameType = frame_headers[i].frame_type;
     encoded_image._length = encoded_image._size =
         static_cast<size_t>(frame_headers[i].bitstream_length);
diff --git a/modules/video_coding/codecs/multiplex/test/multiplex_adapter_unittest.cc b/modules/video_coding/codecs/multiplex/test/multiplex_adapter_unittest.cc
index d5e1fb6..6726b9c 100644
--- a/modules/video_coding/codecs/multiplex/test/multiplex_adapter_unittest.cc
+++ b/modules/video_coding/codecs/multiplex/test/multiplex_adapter_unittest.cc
@@ -15,6 +15,7 @@
 #include "common_video/libyuv/include/webrtc_libyuv.h"
 #include "media/base/mediaconstants.h"
 #include "modules/video_coding/codecs/multiplex/include/multiplex_decoder_adapter.h"
+#include "modules/video_coding/codecs/multiplex/include/multiplex_encoded_image_packer.h"
 #include "modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h"
 #include "modules/video_coding/codecs/test/video_codec_test.h"
 #include "modules/video_coding/codecs/vp9/include/vp9.h"
@@ -65,7 +66,21 @@
         yuv_buffer->DataV(), yuv_buffer->StrideV(), yuv_buffer->DataY(),
         yuv_buffer->StrideY(), rtc::KeepRefUntilDone(yuv_buffer));
     return rtc::WrapUnique<VideoFrame>(
-        new VideoFrame(yuva_buffer, kVideoRotation_0, 0));
+        new VideoFrame(yuva_buffer, 123 /* timestamp_us */,
+                       345 /* render_time_ms */, kVideoRotation_0));
+  }
+
+  std::unique_ptr<VideoFrame> ExtractAXXFrame(const VideoFrame& yuva_frame) {
+    const I420ABufferInterface* yuva_buffer =
+        yuva_frame.video_frame_buffer()->GetI420A();
+    rtc::scoped_refptr<I420BufferInterface> axx_buffer = WrapI420Buffer(
+        yuva_buffer->width(), yuva_buffer->height(), yuva_buffer->DataA(),
+        yuva_buffer->StrideA(), yuva_buffer->DataU(), yuva_buffer->StrideU(),
+        yuva_buffer->DataV(), yuva_buffer->StrideV(),
+        rtc::KeepRefUntilDone(yuva_frame.video_frame_buffer()));
+    return rtc::WrapUnique<VideoFrame>(
+        new VideoFrame(axx_buffer, 123 /* timestamp_us */,
+                       345 /* render_time_ms */, kVideoRotation_0));
   }
 
  private:
@@ -110,7 +125,6 @@
   EncodedImage encoded_frame;
   CodecSpecificInfo codec_specific_info;
   ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
-
   EXPECT_EQ(kVideoCodecMultiplex, codec_specific_info.codecType);
 
   EXPECT_EQ(
@@ -130,7 +144,6 @@
   EncodedImage encoded_frame;
   CodecSpecificInfo codec_specific_info;
   ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
-
   EXPECT_EQ(kVideoCodecMultiplex, codec_specific_info.codecType);
 
   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
@@ -140,6 +153,72 @@
   ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp));
   ASSERT_TRUE(decoded_frame);
   EXPECT_GT(I420PSNR(yuva_frame.get(), decoded_frame.get()), 36);
+
+  // Find PSNR for AXX bits.
+  std::unique_ptr<VideoFrame> input_axx_frame = ExtractAXXFrame(*yuva_frame);
+  std::unique_ptr<VideoFrame> output_axx_frame =
+      ExtractAXXFrame(*decoded_frame);
+  EXPECT_GT(I420PSNR(input_axx_frame.get(), output_axx_frame.get()), 47);
+}
+
+TEST_F(TestMultiplexAdapter, CheckSingleFrameEncodedBitstream) {
+  EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
+            encoder_->Encode(*input_frame_, nullptr, nullptr));
+  EncodedImage encoded_frame;
+  CodecSpecificInfo codec_specific_info;
+  ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
+  EXPECT_EQ(kVideoCodecMultiplex, codec_specific_info.codecType);
+  EXPECT_EQ(0, codec_specific_info.codecSpecific.generic.simulcast_idx);
+
+  const MultiplexImage& unpacked_frame =
+      MultiplexEncodedImagePacker::Unpack(encoded_frame);
+  EXPECT_EQ(0, unpacked_frame.image_index);
+  EXPECT_EQ(1, unpacked_frame.component_count);
+  const MultiplexImageComponent& component = unpacked_frame.image_components[0];
+  EXPECT_EQ(0, component.component_index);
+  EXPECT_NE(nullptr, component.encoded_image._buffer);
+  EXPECT_EQ(kVideoFrameKey, component.encoded_image._frameType);
+}
+
+TEST_F(TestMultiplexAdapter, CheckDoubleFramesEncodedBitstream) {
+  std::unique_ptr<VideoFrame> yuva_frame = CreateI420AInputFrame();
+  EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
+            encoder_->Encode(*yuva_frame, nullptr, nullptr));
+  EncodedImage encoded_frame;
+  CodecSpecificInfo codec_specific_info;
+  ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
+  EXPECT_EQ(kVideoCodecMultiplex, codec_specific_info.codecType);
+  EXPECT_EQ(0, codec_specific_info.codecSpecific.generic.simulcast_idx);
+
+  const MultiplexImage& unpacked_frame =
+      MultiplexEncodedImagePacker::Unpack(encoded_frame);
+  EXPECT_EQ(0, unpacked_frame.image_index);
+  EXPECT_EQ(2, unpacked_frame.component_count);
+  EXPECT_EQ(unpacked_frame.image_components.size(),
+            unpacked_frame.component_count);
+  for (int i = 0; i < unpacked_frame.component_count; ++i) {
+    const MultiplexImageComponent& component =
+        unpacked_frame.image_components[i];
+    EXPECT_EQ(i, component.component_index);
+    EXPECT_NE(nullptr, component.encoded_image._buffer);
+    EXPECT_EQ(kVideoFrameKey, component.encoded_image._frameType);
+  }
+}
+
+TEST_F(TestMultiplexAdapter, ImageIndexIncreases) {
+  std::unique_ptr<VideoFrame> yuva_frame = CreateI420AInputFrame();
+  const size_t expected_num_encoded_frames = 3;
+  for (size_t i = 0; i < expected_num_encoded_frames; ++i) {
+    EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
+              encoder_->Encode(*yuva_frame, nullptr, nullptr));
+    EncodedImage encoded_frame;
+    CodecSpecificInfo codec_specific_info;
+    ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
+    const MultiplexImage& unpacked_frame =
+        MultiplexEncodedImagePacker::Unpack(encoded_frame);
+    EXPECT_EQ(i, unpacked_frame.image_index);
+    EXPECT_EQ(i ? kVideoFrameDelta : kVideoFrameKey, encoded_frame._frameType);
+  }
 }
 
 }  // namespace webrtc