Reland #2 of Issue 2434073003: Extract bitrate allocation ...

This is yet another reland of https://codereview.webrtc.org/2434073003/
including two fixes:

1. SimulcastRateAllocator did not handle the screenshare settings properly for numSimulcastStreams = 1. Additional test case was added for that.
2. In VideoSender, when rate allocation is updated after setting a new VideoCodec config, only update the state of the EncoderParameters, but don't actually run SetRateAllocation on the encoder itself. This caused some problems upstreams.

Please review only the changes after patch set 1.

Original description:

Extract bitrate allocation of spatial/temporal layers out of codec impl.

This CL makes a number of intervowen changes:

* Add BitrateAllocation struct, that contains a codec independent view
  of how the target bitrate is distributed over spatial and temporal
  layers.

* Adds the BitrateAllocator interface, which takes a bitrate and frame
  rate and produces a BitrateAllocation.

* A default (non layered) implementation is added, and
  SimulcastRateAllocator is extended to fully handle VP8 allocation.
  This includes capturing TemporalLayer instances created by the
  encoder.

* ViEEncoder now owns both the bitrate allocator and the temporal layer
  factories for VP8. This allows allocation to happen fully outside of
  the encoder implementation.

This refactoring will make it possible for ViEEncoder to signal the
full picture of target bitrates to the RTCP module.

BUG=webrtc:6301
R=stefan@webrtc.org

Review URL: https://codereview.webrtc.org/2510583002 .

Cr-Commit-Position: refs/heads/master@{#15105}
diff --git a/webrtc/video/video_send_stream_tests.cc b/webrtc/video/video_send_stream_tests.cc
index f6112f9..d84ae73 100644
--- a/webrtc/video/video_send_stream_tests.cc
+++ b/webrtc/video/video_send_stream_tests.cc
@@ -1327,7 +1327,8 @@
     ChangingTransportOverheadTest()
         : EndToEndTest(test::CallTest::kDefaultTimeoutMs),
           call_(nullptr),
-          packets_sent_(0) {}
+          packets_sent_(0),
+          transport_overhead_(0) {}
 
     void OnCallsCreated(Call* sender_call, Call* receiver_call) override {
       call_ = sender_call;
@@ -1478,7 +1479,8 @@
           return;
         }
       }
-      init_encode_called_.Wait(VideoSendStreamTest::kDefaultTimeoutMs);
+      EXPECT_TRUE(
+          init_encode_called_.Wait(VideoSendStreamTest::kDefaultTimeoutMs));
       {
         rtc::CritScope lock(&crit_);
         EXPECT_EQ(width, last_initialized_frame_width_);
@@ -1494,9 +1496,7 @@
       last_initialized_frame_width_ = config->width;
       last_initialized_frame_height_ = config->height;
       ++number_of_initializations_;
-      // First time InitEncode is called, the frame size is unknown.
-      if (number_of_initializations_ > 1)
-        init_encode_called_.Set();
+      init_encode_called_.Set();
       return FakeEncoder::InitEncode(config, number_of_cores, max_payload_size);
     }
 
@@ -1615,41 +1615,50 @@
     StartStopBitrateObserver()
         : FakeEncoder(Clock::GetRealTimeClock()),
           encoder_init_(false, false),
-          bitrate_changed_(false, false),
-          bitrate_kbps_(0) {}
+          bitrate_changed_(false, false) {}
     int32_t InitEncode(const VideoCodec* config,
                        int32_t number_of_cores,
                        size_t max_payload_size) override {
       rtc::CritScope lock(&crit_);
-      bitrate_kbps_ = config->startBitrate;
       encoder_init_.Set();
       return FakeEncoder::InitEncode(config, number_of_cores, max_payload_size);
     }
 
-    int32_t SetRates(uint32_t new_target_bitrate, uint32_t framerate) override {
+    int32_t SetRateAllocation(const BitrateAllocation& bitrate,
+                              uint32_t framerate) override {
       rtc::CritScope lock(&crit_);
-      bitrate_kbps_ = new_target_bitrate;
+      bitrate_kbps_ = rtc::Optional<int>(bitrate.get_sum_kbps());
       bitrate_changed_.Set();
-      return FakeEncoder::SetRates(new_target_bitrate, framerate);
-    }
-
-    int GetBitrateKbps() const {
-      rtc::CritScope lock(&crit_);
-      return bitrate_kbps_;
+      return FakeEncoder::SetRateAllocation(bitrate, framerate);
     }
 
     bool WaitForEncoderInit() {
       return encoder_init_.Wait(VideoSendStreamTest::kDefaultTimeoutMs);
     }
-    bool WaitBitrateChanged() {
-      return bitrate_changed_.Wait(VideoSendStreamTest::kDefaultTimeoutMs);
+
+    bool WaitBitrateChanged(bool non_zero) {
+      do {
+        rtc::Optional<int> bitrate_kbps;
+        {
+          rtc::CritScope lock(&crit_);
+          bitrate_kbps = bitrate_kbps_;
+        }
+        if (!bitrate_kbps)
+          continue;
+
+        if ((non_zero && *bitrate_kbps > 0) ||
+            (!non_zero && *bitrate_kbps == 0)) {
+          return true;
+        }
+      } while (bitrate_changed_.Wait(VideoSendStreamTest::kDefaultTimeoutMs));
+      return false;
     }
 
    private:
     rtc::CriticalSection crit_;
     rtc::Event encoder_init_;
     rtc::Event bitrate_changed_;
-    int bitrate_kbps_ GUARDED_BY(crit_);
+    rtc::Optional<int> bitrate_kbps_ GUARDED_BY(crit_);
   };
 
   CreateSenderCall(Call::Config(&event_log_));
@@ -1664,16 +1673,15 @@
   CreateVideoStreams();
 
   EXPECT_TRUE(encoder.WaitForEncoderInit());
-  EXPECT_GT(encoder.GetBitrateKbps(), 0);
+
   video_send_stream_->Start();
-  EXPECT_TRUE(encoder.WaitBitrateChanged());
-  EXPECT_GT(encoder.GetBitrateKbps(), 0);
+  EXPECT_TRUE(encoder.WaitBitrateChanged(true));
+
   video_send_stream_->Stop();
-  EXPECT_TRUE(encoder.WaitBitrateChanged());
-  EXPECT_EQ(0, encoder.GetBitrateKbps());
+  EXPECT_TRUE(encoder.WaitBitrateChanged(false));
+
   video_send_stream_->Start();
-  EXPECT_TRUE(encoder.WaitBitrateChanged());
-  EXPECT_GT(encoder.GetBitrateKbps(), 0);
+  EXPECT_TRUE(encoder.WaitBitrateChanged(true));
 
   DestroyStreams();
 }
@@ -2085,10 +2093,12 @@
   }
 
   // Set expected temporal layers as they should have been set when
-  // reconfiguring the encoder and not match the set config.
+  // reconfiguring the encoder and not match the set config. Also copy the
+  // TemporalLayersFactory pointer that has been injected by ViEEncoder.
   VideoCodecVP8 encoder_settings = encoder_settings_;
   encoder_settings.numberOfTemporalLayers =
       kVideoCodecConfigObserverNumberOfTemporalLayers;
+  encoder_settings.tl_factory = config.VP8().tl_factory;
   EXPECT_EQ(
       0, memcmp(&config.VP8(), &encoder_settings, sizeof(encoder_settings_)));
 }
@@ -2310,16 +2320,17 @@
                                      maxPayloadSize);
     }
 
-    int32_t SetRates(uint32_t newBitRate, uint32_t frameRate) override {
+    int32_t SetRateAllocation(const BitrateAllocation& bitrate,
+                              uint32_t frameRate) override {
       {
         rtc::CritScope lock(&crit_);
-        if (target_bitrate_ == newBitRate) {
-          return FakeEncoder::SetRates(newBitRate, frameRate);
+        if (target_bitrate_ == bitrate.get_sum_kbps()) {
+          return FakeEncoder::SetRateAllocation(bitrate, frameRate);
         }
-        target_bitrate_ = newBitRate;
+        target_bitrate_ = bitrate.get_sum_kbps();
       }
       bitrate_changed_event_.Set();
-      return FakeEncoder::SetRates(newBitRate, frameRate);
+      return FakeEncoder::SetRateAllocation(bitrate, frameRate);
     }
 
     void WaitForSetRates(uint32_t expected_bitrate) {