Activate ACM test for Android in modules_tests.

TEST=local on Nexus 7.
R=tina.legrand@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/6589004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@5364 4adac7df-926f-26a2-2b94-8c16560cd09d
diff --git a/webrtc/modules/audio_coding/main/test/TestFEC.cc b/webrtc/modules/audio_coding/main/test/TestFEC.cc
index 032579c..8a1f27e 100644
--- a/webrtc/modules/audio_coding/main/test/TestFEC.cc
+++ b/webrtc/modules/audio_coding/main/test/TestFEC.cc
@@ -63,11 +63,11 @@
   return;
 #endif
   char nameG722[] = "G722";
-  EXPECT_EQ(0, RegisterSendCodec('A', nameG722, 16000));
+  RegisterSendCodec('A', nameG722, 16000);
   char nameCN[] = "CN";
-  EXPECT_EQ(0, RegisterSendCodec('A', nameCN, 16000));
+  RegisterSendCodec('A', nameCN, 16000);
   char nameRED[] = "RED";
-  EXPECT_EQ(0, RegisterSendCodec('A', nameRED));
+  RegisterSendCodec('A', nameRED);
   OpenOutFile(_testCntr);
   EXPECT_EQ(0, SetVAD(true, true, VADAggr));
   EXPECT_EQ(0, _acmA->SetFECStatus(false));
@@ -81,6 +81,9 @@
   Run();
   _outFileB.Close();
 
+  // FEC for iSAC is different that other codecs, therefore, we expect that iSAC
+  // be enabled for this test. The following is common for both floating-point
+  // and fixed-point implementations.
   char nameISAC[] = "iSAC";
   RegisterSendCodec('A', nameISAC, 16000);
   OpenOutFile(_testCntr);
@@ -96,6 +99,8 @@
   Run();
   _outFileB.Close();
 
+#if (defined(WEBRTC_CODEC_ISAC))
+  // Only for floating-point implementation, where super-wideband is supported.
   RegisterSendCodec('A', nameISAC, 32000);
   OpenOutFile(_testCntr);
   EXPECT_EQ(0, SetVAD(true, true, VADVeryAggr));
@@ -129,11 +134,26 @@
   EXPECT_TRUE(_acmA->FECStatus());
   Run();
   _outFileB.Close();
+#else
+  // For fixed-point implementation.
+  OpenOutFile(_testCntr);
+  EXPECT_EQ(0, SetVAD(false, false, VADVeryAggr));
+  EXPECT_EQ(0, _acmA->SetFECStatus(false));
+  EXPECT_FALSE(_acmA->FECStatus());
+  Run();
+  _outFileB.Close();
+
+  EXPECT_EQ(0, _acmA->SetFECStatus(true));
+  EXPECT_TRUE(_acmA->FECStatus());
+  OpenOutFile(_testCntr);
+  Run();
+  _outFileB.Close();
+#endif
 
   _channelA2B->SetFECTestWithPacketLoss(true);
 
-  EXPECT_EQ(0, RegisterSendCodec('A', nameG722));
-  EXPECT_EQ(0, RegisterSendCodec('A', nameCN, 16000));
+  RegisterSendCodec('A', nameG722);
+  RegisterSendCodec('A', nameCN, 16000);
   OpenOutFile(_testCntr);
   EXPECT_EQ(0, SetVAD(true, true, VADAggr));
   EXPECT_EQ(0, _acmA->SetFECStatus(false));
@@ -161,6 +181,8 @@
   Run();
   _outFileB.Close();
 
+#if (defined(WEBRTC_CODEC_ISAC))
+  // Only for floating-point implementation, where super-wideband is supported.
   RegisterSendCodec('A', nameISAC, 32000);
   OpenOutFile(_testCntr);
   EXPECT_EQ(0, SetVAD(true, true, VADVeryAggr));
@@ -194,16 +216,31 @@
   EXPECT_TRUE(_acmA->FECStatus());
   Run();
   _outFileB.Close();
+#else
+  // For fixed-point implementation.
+  OpenOutFile(_testCntr);
+  EXPECT_EQ(0, SetVAD(false, false, VADVeryAggr));
+  EXPECT_EQ(0, _acmA->SetFECStatus(false));
+  EXPECT_FALSE(_acmA->FECStatus());
+  Run();
+  _outFileB.Close();
+
+  EXPECT_EQ(0, _acmA->SetFECStatus(true));
+  EXPECT_TRUE(_acmA->FECStatus());
+  OpenOutFile(_testCntr);
+  Run();
+  _outFileB.Close();
+#endif
 }
 
 int32_t TestFEC::SetVAD(bool enableDTX, bool enableVAD, ACMVADMode vadMode) {
   return _acmA->SetVAD(enableDTX, enableVAD, vadMode);
 }
 
-int16_t TestFEC::RegisterSendCodec(char side, char* codecName,
+void TestFEC::RegisterSendCodec(char side, char* codecName,
                                    int32_t samplingFreqHz) {
   std::cout << std::flush;
-  AudioCodingModule* myACM;
+  AudioCodingModule* myACM = NULL;
   switch (side) {
     case 'A': {
       myACM = _acmA.get();
@@ -214,20 +251,15 @@
       break;
     }
     default:
-      return -1;
+      ASSERT_TRUE(false);
   }
 
-  if (myACM == NULL) {
-    assert(false);
-    return -1;
-  }
+  ASSERT_TRUE(myACM != NULL);
+
   CodecInst myCodecParam;
-  EXPECT_GT(AudioCodingModule::Codec(codecName, &myCodecParam,
+  ASSERT_GT(AudioCodingModule::Codec(codecName, &myCodecParam,
                                      samplingFreqHz, 1), -1);
-  EXPECT_GT(myACM->RegisterSendCodec(myCodecParam), -1);
-
-  // Initialization was successful.
-  return 0;
+  ASSERT_GT(myACM->RegisterSendCodec(myCodecParam), -1);
 }
 
 void TestFEC::Run() {
diff --git a/webrtc/modules/audio_coding/main/test/TestFEC.h b/webrtc/modules/audio_coding/main/test/TestFEC.h
index af3cdd7..f61e868 100644
--- a/webrtc/modules/audio_coding/main/test/TestFEC.h
+++ b/webrtc/modules/audio_coding/main/test/TestFEC.h
@@ -30,8 +30,8 @@
   // The default value of '-1' indicates that the registration is based only on
   // codec name and a sampling frequency matching is not required. This is
   // useful for codecs which support several sampling frequency.
-  int16_t RegisterSendCodec(char side, char* codecName,
-                            int32_t sampFreqHz = -1);
+  void RegisterSendCodec(char side, char* codecName,
+                         int32_t sampFreqHz = -1);
   void Run();
   void OpenOutFile(int16_t testNumber);
   int32_t SetVAD(bool enableDTX, bool enableVAD, ACMVADMode vadMode);
diff --git a/webrtc/modules/audio_coding/main/test/TestStereo.cc b/webrtc/modules/audio_coding/main/test/TestStereo.cc
index b26334c..88cf963 100644
--- a/webrtc/modules/audio_coding/main/test/TestStereo.cc
+++ b/webrtc/modules/audio_coding/main/test/TestStereo.cc
@@ -809,7 +809,14 @@
   channel->reset_payload_size();
   int error_count = 0;
 
-  while (1) {
+#ifdef WEBRTC_ARCH_ARM
+  const int kMaxNumProcessedFrames = 100;  // Limit to 1 second of audio.
+#else
+  const int kMaxNumProcessedFrames = 3000;  // Limit to 30 second of audio.
+#endif
+
+  int num_frames = 0;
+  while (num_frames < kMaxNumProcessedFrames) {
     // Simulate packet loss by setting |packet_loss_| to "true" in
     // |percent_loss| percent of the loops.
     if (percent_loss > 0) {
@@ -863,16 +870,15 @@
     out_file_.Write10MsData(
         audio_frame.data_,
         audio_frame.samples_per_channel_ * audio_frame.num_channels_);
+
+    ++num_frames;
   }
 
   EXPECT_EQ(0, error_count);
 
-  if (in_file_mono_->EndOfFile()) {
-    in_file_mono_->Rewind();
-  }
-  if (in_file_stereo_->EndOfFile()) {
-    in_file_stereo_->Rewind();
-  }
+  in_file_mono_->Rewind();
+  in_file_stereo_->Rewind();
+
   // Reset in case we ended with a lost packet
   channel->set_lost_packet(false);
 }
diff --git a/webrtc/modules/audio_coding/main/test/Tester.cc b/webrtc/modules/audio_coding/main/test/Tester.cc
index 31f7317..e089679 100644
--- a/webrtc/modules/audio_coding/main/test/Tester.cc
+++ b/webrtc/modules/audio_coding/main/test/Tester.cc
@@ -50,7 +50,7 @@
   Trace::ReturnTrace();
 }
 
-TEST(AudioCodingModuleTest, DISABLED_ON_ANDROID(TestEncodeDecode)) {
+TEST(AudioCodingModuleTest, TestEncodeDecode) {
   Trace::CreateTrace();
   Trace::SetTraceFile((webrtc::test::OutputPath() +
       "acm_encodedecode_trace.txt").c_str());
@@ -65,7 +65,7 @@
   Trace::ReturnTrace();
 }
 
-TEST(AudioCodingModuleTest, DISABLED_ON_ANDROID(TestFEC)) {
+TEST(AudioCodingModuleTest, TestFEC) {
   Trace::CreateTrace();
   Trace::SetTraceFile((webrtc::test::OutputPath() +
       "acm_fec_trace.txt").c_str());
@@ -80,7 +80,7 @@
   Trace::ReturnTrace();
 }
 
-TEST(AudioCodingModuleTest, DISABLED_ON_ANDROID(TestIsac)) {
+TEST(AudioCodingModuleTest, TestIsac) {
   Trace::CreateTrace();
   Trace::SetTraceFile((webrtc::test::OutputPath() +
       "acm_isac_trace.txt").c_str());
@@ -95,7 +95,7 @@
   Trace::ReturnTrace();
 }
 
-TEST(AudioCodingModuleTest, DISABLED_ON_ANDROID(TwoWayCommunication)) {
+TEST(AudioCodingModuleTest, TwoWayCommunication) {
   Trace::CreateTrace();
   Trace::SetTraceFile((webrtc::test::OutputPath() +
       "acm_twowaycom_trace.txt").c_str());
@@ -110,7 +110,7 @@
   Trace::ReturnTrace();
 }
 
-TEST(AudioCodingModuleTest, DISABLED_ON_ANDROID(TestStereo)) {
+TEST(AudioCodingModuleTest, TestStereo) {
   Trace::CreateTrace();
   Trace::SetTraceFile((webrtc::test::OutputPath() +
       "acm_stereo_trace.txt").c_str());
@@ -125,7 +125,7 @@
   Trace::ReturnTrace();
 }
 
-TEST(AudioCodingModuleTest, DISABLED_ON_ANDROID(TestVADDTX)) {
+TEST(AudioCodingModuleTest, TestVADDTX) {
   Trace::CreateTrace();
   Trace::SetTraceFile((webrtc::test::OutputPath() +
       "acm_vaddtx_trace.txt").c_str());
diff --git a/webrtc/modules/audio_coding/main/test/iSACTest.cc b/webrtc/modules/audio_coding/main/test/iSACTest.cc
index f7fef4a..eb86a4f 100644
--- a/webrtc/modules/audio_coding/main/test/iSACTest.cc
+++ b/webrtc/modules/audio_coding/main/test/iSACTest.cc
@@ -94,6 +94,24 @@
 
 ISACTest::~ISACTest() {}
 
+void ISACTest::Run10ms() {
+  AudioFrame audioFrame;
+  EXPECT_GT(_inFileA.Read10MsData(audioFrame), 0);
+  EXPECT_EQ(0, _acmA->Add10MsData(audioFrame));
+  EXPECT_EQ(0, _acmB->Add10MsData(audioFrame));
+  EXPECT_GT(_acmA->Process(), -1);
+  EXPECT_GT(_acmB->Process(), -1);
+  EXPECT_EQ(0, _acmA->PlayoutData10Ms(32000, &audioFrame));
+  _outFileA.Write10MsData(audioFrame);
+  EXPECT_EQ(0, _acmB->PlayoutData10Ms(32000, &audioFrame));
+  _outFileB.Write10MsData(audioFrame);
+}
+
+
+#if (defined(WEBRTC_CODEC_ISAC))
+// Depending on whether the floating-point iSAC is activated the following
+// implementations would differ.
+
 void ISACTest::Setup() {
   int codecCntr;
   CodecInst codecParam;
@@ -244,19 +262,6 @@
   }
 }
 
-void ISACTest::Run10ms() {
-  AudioFrame audioFrame;
-  EXPECT_GT(_inFileA.Read10MsData(audioFrame), 0);
-  EXPECT_EQ(0, _acmA->Add10MsData(audioFrame));
-  EXPECT_EQ(0, _acmB->Add10MsData(audioFrame));
-  EXPECT_GT(_acmA->Process(), -1);
-  EXPECT_GT(_acmB->Process(), -1);
-  EXPECT_EQ(0, _acmA->PlayoutData10Ms(32000, &audioFrame));
-  _outFileA.Write10MsData(audioFrame);
-  EXPECT_EQ(0, _acmB->PlayoutData10Ms(32000, &audioFrame));
-  _outFileB.Write10MsData(audioFrame);
-}
-
 void ISACTest::EncodeDecode(int testNr, ACMTestISACConfig& wbISACConfig,
                             ACMTestISACConfig& swbISACConfig) {
   // Files in Side A and B
@@ -317,9 +322,6 @@
     _channel_B2A->PrintStats(_paramISAC16kHz);
   }
 
-  _channel_A2B->ResetStats();
-  _channel_B2A->ResetStats();
-
   _outFileA.Close();
   _outFileB.Close();
   _inFileA.Close();
@@ -392,5 +394,210 @@
   _inFileA.Close();
   _inFileB.Close();
 }
+#else  // Only iSAC fixed-point is defined.
+
+static int PayloadSizeToInstantaneousRate(int payload_size_bytes,
+                                          int frame_size_ms) {
+    return payload_size_bytes * 8 / frame_size_ms / 1000;
+}
+
+void ISACTest::Setup() {
+  CodecInst codec_param;
+  codec_param.plfreq = 0;  // Invalid value.
+  for (int n = 0; n < AudioCodingModule::NumberOfCodecs(); ++n) {
+    EXPECT_EQ(0, AudioCodingModule::Codec(n, &codec_param));
+    if (!STR_CASE_CMP(codec_param.plname, "ISAC")) {
+      ASSERT_EQ(16000, codec_param.plfreq);
+      memcpy(&_paramISAC16kHz, &codec_param, sizeof(codec_param));
+      _idISAC16kHz = n;
+      break;
+    }
+  }
+  EXPECT_GT(codec_param.plfreq, 0);
+
+  EXPECT_EQ(0, _acmA->RegisterReceiveCodec(_paramISAC16kHz));
+  EXPECT_EQ(0, _acmB->RegisterReceiveCodec(_paramISAC16kHz));
+
+  //--- Set A-to-B channel
+  _channel_A2B.reset(new Channel);
+  EXPECT_EQ(0, _acmA->RegisterTransportCallback(_channel_A2B.get()));
+  _channel_A2B->RegisterReceiverACM(_acmB.get());
+
+  //--- Set B-to-A channel
+  _channel_B2A.reset(new Channel);
+  EXPECT_EQ(0, _acmB->RegisterTransportCallback(_channel_B2A.get()));
+  _channel_B2A->RegisterReceiverACM(_acmA.get());
+
+  file_name_swb_ = webrtc::test::ResourcePath("audio_coding/testfile32kHz",
+                                              "pcm");
+
+  EXPECT_EQ(0, _acmB->RegisterSendCodec(_paramISAC16kHz));
+  EXPECT_EQ(0, _acmA->RegisterSendCodec(_paramISAC16kHz));
+}
+
+void ISACTest::EncodeDecode(int test_number, ACMTestISACConfig& isac_config_a,
+                            ACMTestISACConfig& isac_config_b) {
+  // Files in Side A and B
+  _inFileA.Open(file_name_swb_, 32000, "rb", true);
+  _inFileB.Open(file_name_swb_, 32000, "rb", true);
+
+  std::string file_name_out;
+  std::stringstream file_stream_a;
+  std::stringstream file_stream_b;
+  file_stream_a << webrtc::test::OutputPath();
+  file_stream_b << webrtc::test::OutputPath();
+  file_stream_a << "out_iSACTest_A_" << test_number << ".pcm";
+  file_stream_b << "out_iSACTest_B_" << test_number << ".pcm";
+  file_name_out = file_stream_a.str();
+  _outFileA.Open(file_name_out, 32000, "wb");
+  file_name_out = file_stream_b.str();
+  _outFileB.Open(file_name_out, 32000, "wb");
+
+  CodecInst codec;
+  EXPECT_EQ(0, _acmA->SendCodec(&codec));
+  EXPECT_EQ(0, _acmB->SendCodec(&codec));
+
+  // Set the configurations.
+  SetISAConfig(isac_config_a, _acmA.get(), _testMode);
+  SetISAConfig(isac_config_b, _acmB.get(), _testMode);
+
+  bool adaptiveMode = false;
+  if (isac_config_a.currentRateBitPerSec == -1 ||
+      isac_config_b.currentRateBitPerSec == -1) {
+    adaptiveMode = true;
+  }
+  _channel_A2B->ResetStats();
+  _channel_B2A->ResetStats();
+
+  EventWrapper* myEvent = EventWrapper::Create();
+  EXPECT_TRUE(myEvent->StartTimer(true, 10));
+  while (!(_inFileA.EndOfFile() || _inFileA.Rewinded())) {
+    Run10ms();
+    if (adaptiveMode && _testMode != 0) {
+      myEvent->Wait(5000);
+    }
+  }
+
+  if (_testMode != 0) {
+    printf("\n\nSide A statistics\n\n");
+    _channel_A2B->PrintStats(_paramISAC32kHz);
+
+    printf("\n\nSide B statistics\n\n");
+    _channel_B2A->PrintStats(_paramISAC16kHz);
+  }
+
+  _outFileA.Close();
+  _outFileB.Close();
+  _inFileA.Close();
+  _inFileB.Close();
+}
+
+void ISACTest::Perform() {
+  Setup();
+
+  int16_t test_number = 0;
+  ACMTestISACConfig isac_config_a;
+  ACMTestISACConfig isac_config_b;
+
+  SetISACConfigDefault(isac_config_a);
+  SetISACConfigDefault(isac_config_b);
+
+  // Instantaneous mode.
+  isac_config_a.currentRateBitPerSec = 32000;
+  isac_config_b.currentRateBitPerSec = 12000;
+  EncodeDecode(test_number, isac_config_a, isac_config_b);
+  test_number++;
+
+  SetISACConfigDefault(isac_config_a);
+  SetISACConfigDefault(isac_config_b);
+
+  // Channel adaptive.
+  isac_config_a.currentRateBitPerSec = -1;
+  isac_config_b.currentRateBitPerSec = -1;
+  isac_config_a.initRateBitPerSec = 13000;
+  isac_config_a.initFrameSizeInMsec = 60;
+  isac_config_a.enforceFrameSize = true;
+  isac_config_a.currentFrameSizeMsec = 60;
+  isac_config_b.initRateBitPerSec = 20000;
+  isac_config_b.initFrameSizeInMsec = 30;
+  EncodeDecode(test_number, isac_config_a, isac_config_b);
+  test_number++;
+
+  SetISACConfigDefault(isac_config_a);
+  SetISACConfigDefault(isac_config_b);
+  isac_config_a.currentRateBitPerSec = 32000;
+  isac_config_b.currentRateBitPerSec = 32000;
+  isac_config_a.currentFrameSizeMsec = 30;
+  isac_config_b.currentFrameSizeMsec = 60;
+
+  int user_input;
+  const int kMaxPayloadLenBytes30MSec = 110;
+  const int kMaxPayloadLenBytes60MSec = 160;
+  if ((_testMode == 0) || (_testMode == 1)) {
+    isac_config_a.maxPayloadSizeByte =
+        static_cast<uint16_t>(kMaxPayloadLenBytes30MSec);
+    isac_config_b.maxPayloadSizeByte =
+        static_cast<uint16_t>(kMaxPayloadLenBytes60MSec);
+  } else {
+    printf("Enter the max payload-size for side A: ");
+    CHECK_ERROR(scanf("%d", &user_input));
+    isac_config_a.maxPayloadSizeByte = (uint16_t) user_input;
+    printf("Enter the max payload-size for side B: ");
+    CHECK_ERROR(scanf("%d", &user_input));
+    isac_config_b.maxPayloadSizeByte = (uint16_t) user_input;
+  }
+  EncodeDecode(test_number, isac_config_a, isac_config_b);
+  test_number++;
+
+  ACMTestPayloadStats payload_stats;
+  _channel_A2B->Stats(_paramISAC16kHz, payload_stats);
+  EXPECT_GT(payload_stats.frameSizeStats[0].maxPayloadLen, 0);
+  EXPECT_LE(payload_stats.frameSizeStats[0].maxPayloadLen,
+            static_cast<int>(isac_config_a.maxPayloadSizeByte));
+  _channel_B2A->Stats(_paramISAC16kHz, payload_stats);
+  EXPECT_GT(payload_stats.frameSizeStats[0].maxPayloadLen, 0);
+  EXPECT_LE(payload_stats.frameSizeStats[0].maxPayloadLen,
+            static_cast<int>(isac_config_b.maxPayloadSizeByte));
+
+  _acmA->ResetEncoder();
+  _acmB->ResetEncoder();
+  SetISACConfigDefault(isac_config_a);
+  SetISACConfigDefault(isac_config_b);
+  isac_config_a.currentRateBitPerSec = 32000;
+  isac_config_b.currentRateBitPerSec = 32000;
+  isac_config_a.currentFrameSizeMsec = 30;
+  isac_config_b.currentFrameSizeMsec = 60;
+
+  const int kMaxEncodingRateBitsPerSec = 32000;
+  if ((_testMode == 0) || (_testMode == 1)) {
+    isac_config_a.maxRateBitPerSec =
+        static_cast<uint32_t>(kMaxEncodingRateBitsPerSec);
+    isac_config_b.maxRateBitPerSec =
+        static_cast<uint32_t>(kMaxEncodingRateBitsPerSec);
+  } else {
+    printf("Enter the max rate for side A: ");
+    CHECK_ERROR(scanf("%d", &user_input));
+    isac_config_a.maxRateBitPerSec = (uint32_t) user_input;
+    printf("Enter the max rate for side B: ");
+    CHECK_ERROR(scanf("%d", &user_input));
+    isac_config_b.maxRateBitPerSec = (uint32_t) user_input;
+  }
+  EncodeDecode(test_number, isac_config_a, isac_config_b);
+
+  _channel_A2B->Stats(_paramISAC16kHz, payload_stats);
+  EXPECT_GT(payload_stats.frameSizeStats[0].maxPayloadLen, 0);
+  EXPECT_LE(PayloadSizeToInstantaneousRate(
+      payload_stats.frameSizeStats[0].maxPayloadLen,
+      isac_config_a.currentFrameSizeMsec),
+      static_cast<int>(isac_config_a.maxRateBitPerSec));
+
+  _channel_B2A->Stats(_paramISAC16kHz, payload_stats);
+  EXPECT_GT(payload_stats.frameSizeStats[0].maxPayloadLen, 0);
+  EXPECT_LE(PayloadSizeToInstantaneousRate(
+      payload_stats.frameSizeStats[0].maxPayloadLen,
+      isac_config_b.currentFrameSizeMsec),
+      static_cast<int>(isac_config_b.maxRateBitPerSec));
+}
+#endif  // WEBRTC_CODEC_ISAC
 
 }  // namespace webrtc