Store video_quality_loopback_test perf results in Chart JSON format.

Adds a flag to store the perf results in a JSON file using the Chart
JSON format [1].

[1] https://github.com/catapult-project/catapult/blob/master/dashboard/docs/data-format.md

TBR=phoglund@webrtc.org

Bug: chromium:755660
Change-Id: I6a896654a4a558df217ddefa4e8a52a487cdbebd
Reviewed-on: https://webrtc-review.googlesource.com/43180
Commit-Queue: Edward Lemur <ehmaldonado@webrtc.org>
Reviewed-by: Mirko Bonadei <mbonadei@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#21809}
diff --git a/examples/androidtests/video_quality_loopback_test.py b/examples/androidtests/video_quality_loopback_test.py
index bd4e94e..a47fbe7 100755
--- a/examples/androidtests/video_quality_loopback_test.py
+++ b/examples/androidtests/video_quality_loopback_test.py
@@ -86,6 +86,8 @@
   parser.add_argument('--temp_dir',
       help='A temporary directory to put the output.')
   parser.add_argument('--adb-path', help='Path to adb binary.', default='adb')
+  parser.add_argument('--chartjson-result-file',
+      help='Where to store perf results in chartjson format.', default=None)
 
   args = parser.parse_args()
   return args
@@ -148,7 +150,8 @@
       '8089']))
 
 
-def RunTest(android_device, adb_path, build_dir, temp_dir):
+def RunTest(android_device, adb_path, build_dir, temp_dir,
+            chartjson_result_file):
   ffmpeg_path = os.path.join(TOOLCHAIN_DIR, 'ffmpeg')
   def ConvertVideo(input_video, output_video):
     _RunCommand([ffmpeg_path, '-y', '-i', input_video, output_video])
@@ -181,8 +184,7 @@
   stats_file_ref = os.path.join(temp_dir, 'stats_ref.txt')
   stats_file_test = os.path.join(temp_dir, 'stats_test.txt')
 
-  _RunCommand([
-      sys.executable, compare_script,
+  args = [
       '--ref_video', reference_video_yuv,
       '--test_video', test_video_yuv,
       '--yuv_frame_width', '640',
@@ -191,7 +193,12 @@
       '--stats_file_test', stats_file_test,
       '--frame_analyzer', frame_analyzer,
       '--ffmpeg_path', ffmpeg_path,
-      '--zxing_path', zxing_path])
+      '--zxing_path', zxing_path,
+  ]
+  if chartjson_result_file:
+    args.extend(['--chartjson_result_file', chartjson_result_file])
+
+  _RunCommand([sys.executable, compare_script] + args)
 
 
 def main():
@@ -208,7 +215,8 @@
   try:
     android_device = SelectAndroidDevice(adb_path)
     SetUpTools(android_device, temp_dir, processes)
-    RunTest(android_device, adb_path, build_dir, temp_dir)
+    RunTest(android_device, adb_path, build_dir, temp_dir,
+            args.chartjson_result_file)
   finally:
     for process in processes:
       if process:
diff --git a/rtc_tools/BUILD.gn b/rtc_tools/BUILD.gn
index 0b1c631..976fa06 100644
--- a/rtc_tools/BUILD.gn
+++ b/rtc_tools/BUILD.gn
@@ -66,6 +66,7 @@
   ]
   deps = [
     "../common_video",
+    "../test:perf_test",
     "//third_party/libyuv",
   ]
 }
@@ -79,6 +80,7 @@
   deps = [
     ":command_line_parser",
     ":video_quality_analysis",
+    "../test:perf_test",
     "//build/win:default_exe_manifest",
   ]
 }
diff --git a/rtc_tools/compare_videos.py b/rtc_tools/compare_videos.py
index ce5bff3..40a2aab 100755
--- a/rtc_tools/compare_videos.py
+++ b/rtc_tools/compare_videos.py
@@ -62,6 +62,8 @@
                     help='Width of the YUV file\'s frames. Default: %default')
   parser.add_option('--yuv_frame_height', type='int', default=480,
                     help='Height of the YUV file\'s frames. Default: %default')
+  parser.add_option('--chartjson_result_file', type='str', default=None,
+                    help='Where to store perf results in chartjson format.')
   options, _ = parser.parse_args()
 
   if options.stats_file:
@@ -161,6 +163,8 @@
     '--width=%d' % options.yuv_frame_width,
     '--height=%d' % options.yuv_frame_height,
   ]
+  if options.chartjson_result_file:
+    cmd.append('--chartjson_result_file=%s' % options.chartjson_result_file)
   frame_analyzer = subprocess.Popen(cmd, stdin=_DevNull(),
                                     stdout=sys.stdout, stderr=sys.stderr)
   frame_analyzer.wait()
diff --git a/rtc_tools/frame_analyzer/frame_analyzer.cc b/rtc_tools/frame_analyzer/frame_analyzer.cc
index d7851e6..b3d9b94 100644
--- a/rtc_tools/frame_analyzer/frame_analyzer.cc
+++ b/rtc_tools/frame_analyzer/frame_analyzer.cc
@@ -17,6 +17,7 @@
 
 #include "rtc_tools/frame_analyzer/video_quality_analysis.h"
 #include "rtc_tools/simple_command_line_parser.h"
+#include "test/testsupport/perf_test.h"
 
 /*
  * A command line tool running PSNR and SSIM on a reference video and a test
@@ -62,7 +63,10 @@
       "  - reference_file(string): The reference YUV file to compare against."
       " Default: ref.yuv\n"
       "  - test_file(string): The test YUV file to run the analysis for."
-      " Default: test_file.yuv\n";
+      " Default: test_file.yuv\n"
+      "  - chartjson_result_file: Where to store perf result in chartjson"
+      " format. If not present, no perf result will be stored."
+      " Default: None\n";
 
   webrtc::test::CommandLineParser parser;
 
@@ -77,6 +81,7 @@
   parser.SetFlag("stats_file_test", "stats_test.txt");
   parser.SetFlag("reference_file", "ref.yuv");
   parser.SetFlag("test_file", "test.yuv");
+  parser.SetFlag("chartjson_result_file", "");
   parser.SetFlag("help", "false");
 
   parser.ProcessFlags();
@@ -101,11 +106,16 @@
                             parser.GetFlag("stats_file_ref").c_str(),
                             parser.GetFlag("stats_file_test").c_str(), width,
                             height, &results);
+  webrtc::test::GetMaxRepeatedAndSkippedFrames(
+      parser.GetFlag("stats_file_ref"), parser.GetFlag("stats_file_test"),
+      &results);
 
-  std::string label = parser.GetFlag("label");
-  webrtc::test::PrintAnalysisResults(label, &results);
-  webrtc::test::PrintMaxRepeatedAndSkippedFrames(
-      label, parser.GetFlag("stats_file_ref"),
-      parser.GetFlag("stats_file_test"));
+  webrtc::test::PrintAnalysisResults(parser.GetFlag("label"), &results);
+
+  std::string chartjson_result_file = parser.GetFlag("chartjson_result_file");
+  if (!chartjson_result_file.empty()) {
+    webrtc::test::WritePerfResults(chartjson_result_file);
+  }
+
   return 0;
 }
diff --git a/rtc_tools/frame_analyzer/video_quality_analysis.cc b/rtc_tools/frame_analyzer/video_quality_analysis.cc
index 7dce734..502ac82 100644
--- a/rtc_tools/frame_analyzer/video_quality_analysis.cc
+++ b/rtc_tools/frame_analyzer/video_quality_analysis.cc
@@ -18,6 +18,8 @@
 #include <map>
 #include <utility>
 
+#include "test/testsupport/perf_test.h"
+
 #define STATS_LINE_LENGTH 32
 #define Y4M_FILE_HEADER_MAX_SIZE 200
 #define Y4M_FRAME_DELIMITER "FRAME"
@@ -318,13 +320,6 @@
   delete[] reference_frame;
 }
 
-void PrintMaxRepeatedAndSkippedFrames(const std::string& label,
-                                      const std::string& stats_file_ref_name,
-                                      const std::string& stats_file_test_name) {
-  PrintMaxRepeatedAndSkippedFrames(stdout, label, stats_file_ref_name,
-                                   stats_file_test_name);
-}
-
 std::vector<std::pair<int, int> > CalculateFrameClusters(
     FILE* file,
     int* num_decode_errors) {
@@ -359,10 +354,9 @@
   return frame_cnt;
 }
 
-void PrintMaxRepeatedAndSkippedFrames(FILE* output,
-                                      const std::string& label,
-                                      const std::string& stats_file_ref_name,
-                                      const std::string& stats_file_test_name) {
+void GetMaxRepeatedAndSkippedFrames(const std::string& stats_file_ref_name,
+                                    const std::string& stats_file_test_name,
+                                    ResultsContainer* results) {
   FILE* stats_file_ref = fopen(stats_file_ref_name.c_str(), "r");
   FILE* stats_file_test = fopen(stats_file_test_name.c_str(), "r");
   if (stats_file_ref == NULL) {
@@ -460,22 +454,17 @@
       }
       continue;
     }
-    fprintf(output,
+    fprintf(stdout,
             "Found barcode %d in test video, which is not in reference video\n",
             it_test->first);
     break;
   }
 
-  fprintf(output, "RESULT Max_repeated: %s= %d\n", label.c_str(),
-          max_repeated_frames);
-  fprintf(output, "RESULT Max_skipped: %s= %d\n", label.c_str(),
-          max_skipped_frames);
-  fprintf(output, "RESULT Total_skipped: %s= %d\n", label.c_str(),
-          total_skipped_frames);
-  fprintf(output, "RESULT Decode_errors_reference: %s= %d\n", label.c_str(),
-          decode_errors_ref);
-  fprintf(output, "RESULT Decode_errors_test: %s= %d\n", label.c_str(),
-          decode_errors_test);
+  results->max_repeated_frames = max_repeated_frames;
+  results->max_skipped_frames = max_skipped_frames;
+  results->total_skipped_frames = total_skipped_frames;
+  results->decode_errors_ref = decode_errors_ref;
+  results->decode_errors_test = decode_errors_test;
 }
 
 void PrintAnalysisResults(const std::string& label, ResultsContainer* results) {
@@ -484,26 +473,32 @@
 
 void PrintAnalysisResults(FILE* output, const std::string& label,
                           ResultsContainer* results) {
-  std::vector<AnalysisResult>::iterator iter;
-
-  fprintf(output, "RESULT Unique_frames_count: %s= %u score\n", label.c_str(),
-          static_cast<unsigned int>(results->frames.size()));
+  SetPerfResultsOutput(output);
 
   if (results->frames.size() > 0u) {
-    fprintf(output, "RESULT PSNR: %s= [", label.c_str());
-    for (iter = results->frames.begin(); iter != results->frames.end() - 1;
-         ++iter) {
-      fprintf(output, "%f,", iter->psnr_value);
-    }
-    fprintf(output, "%f] dB\n", iter->psnr_value);
+    PrintResult("Unique_frames_count", "", label, results->frames.size(),
+                "score", false);
 
-    fprintf(output, "RESULT SSIM: %s= [", label.c_str());
-    for (iter = results->frames.begin(); iter != results->frames.end() - 1;
-         ++iter) {
-      fprintf(output, "%f,", iter->ssim_value);
+    std::vector<double> psnr_values;
+    std::vector<double> ssim_values;
+    for (const auto& frame : results->frames) {
+      psnr_values.push_back(frame.psnr_value);
+      ssim_values.push_back(frame.ssim_value);
     }
-    fprintf(output, "%f] score\n", iter->ssim_value);
+
+    PrintResultList("PSNR", "", label, psnr_values, "dB", false);
+    PrintResultList("SSIM", "", label, ssim_values, "score", false);
   }
+
+  PrintResult("Max_repeated", "", label, results->max_repeated_frames, "",
+              false);
+  PrintResult("Max_skipped", "", label, results->max_skipped_frames, "", false);
+  PrintResult("Total_skipped", "", label, results->total_skipped_frames, "",
+              false);
+  PrintResult("Decode_errors_reference", "", label, results->decode_errors_ref,
+              "", false);
+  PrintResult("Decode_errors_test", "", label, results->decode_errors_test, "",
+              false);
 }
 
 }  // namespace test
diff --git a/rtc_tools/frame_analyzer/video_quality_analysis.h b/rtc_tools/frame_analyzer/video_quality_analysis.h
index f6651f6..92228fc 100644
--- a/rtc_tools/frame_analyzer/video_quality_analysis.h
+++ b/rtc_tools/frame_analyzer/video_quality_analysis.h
@@ -37,6 +37,11 @@
   ~ResultsContainer();
 
   std::vector<AnalysisResult> frames;
+  int max_repeated_frames;
+  int max_skipped_frames;
+  int total_skipped_frames;
+  int decode_errors_ref;
+  int decode_errors_test;
 };
 
 enum VideoAnalysisMetricsType {kPSNR, kSSIM};
@@ -102,15 +107,9 @@
 
 // Calculates max repeated and skipped frames and prints them to stdout in a
 // format that is compatible with Chromium performance numbers.
-void PrintMaxRepeatedAndSkippedFrames(const std::string& label,
-                                      const std::string& stats_file_ref_name,
-                                      const std::string& stats_file_test_name);
-
-// Similar to the above, but will print to the specified file handle.
-void PrintMaxRepeatedAndSkippedFrames(FILE* output,
-                                      const std::string& label,
-                                      const std::string& stats_file_ref_name,
-                                      const std::string& stats_file_test_name);
+void GetMaxRepeatedAndSkippedFrames(const std::string& stats_file_ref_name,
+                                    const std::string& stats_file_test_name,
+                                    ResultsContainer* results);
 
 // Gets the next line from an open stats file.
 bool GetNextStatsLine(FILE* stats_file, char* line);
diff --git a/rtc_tools/frame_analyzer/video_quality_analysis_unittest.cc b/rtc_tools/frame_analyzer/video_quality_analysis_unittest.cc
index 0b1258a..6143c31 100644
--- a/rtc_tools/frame_analyzer/video_quality_analysis_unittest.cc
+++ b/rtc_tools/frame_analyzer/video_quality_analysis_unittest.cc
@@ -86,24 +86,25 @@
   PrintAnalysisResults(logfile_, "ThreeFrames", &result);
 }
 
-TEST_F(VideoQualityAnalysisTest, PrintMaxRepeatedAndSkippedFramesInvalidFile) {
+TEST_F(VideoQualityAnalysisTest, GetMaxRepeatedAndSkippedFramesInvalidFile) {
+  ResultsContainer result;
   remove(stats_filename_.c_str());
-  PrintMaxRepeatedAndSkippedFrames(logfile_, "NonExistingStatsFile",
-                                   stats_filename_ref_, stats_filename_);
+  GetMaxRepeatedAndSkippedFrames(stats_filename_ref_, stats_filename_, &result);
 }
 
 TEST_F(VideoQualityAnalysisTest,
-       PrintMaxRepeatedAndSkippedFramesEmptyStatsFile) {
+       GetMaxRepeatedAndSkippedFramesEmptyStatsFile) {
+  ResultsContainer result;
   std::ofstream stats_file;
   stats_file.open(stats_filename_ref_.c_str());
   stats_file.close();
   stats_file.open(stats_filename_.c_str());
   stats_file.close();
-  PrintMaxRepeatedAndSkippedFrames(logfile_, "EmptyStatsFile",
-                                   stats_filename_ref_, stats_filename_);
+  GetMaxRepeatedAndSkippedFrames(stats_filename_ref_, stats_filename_, &result);
 }
 
-TEST_F(VideoQualityAnalysisTest, PrintMaxRepeatedAndSkippedFramesNormalFile) {
+TEST_F(VideoQualityAnalysisTest, GetMaxRepeatedAndSkippedFramesNormalFile) {
+  ResultsContainer result;
   std::ofstream stats_file;
 
   stats_file.open(stats_filename_ref_.c_str());
@@ -123,8 +124,7 @@
   stats_file << "frame_0004 0106\n";
   stats_file.close();
 
-  PrintMaxRepeatedAndSkippedFrames(logfile_, "NormalStatsFile",
-                                   stats_filename_ref_, stats_filename_);
+  GetMaxRepeatedAndSkippedFrames(stats_filename_ref_, stats_filename_, &result);
 }
 
 namespace {
@@ -143,6 +143,7 @@
 
 TEST_F(VideoQualityAnalysisTest,
        PrintMaxRepeatedAndSkippedFramesSkippedFrames) {
+  ResultsContainer result;
   std::ofstream stats_file;
 
   std::string log_filename =
@@ -171,21 +172,22 @@
   stats_file << "frame_0006 0112\n";
   stats_file.close();
 
-  PrintMaxRepeatedAndSkippedFrames(logfile, "NormalStatsFile",
-                                   stats_filename_ref_, stats_filename_);
+  GetMaxRepeatedAndSkippedFrames(stats_filename_ref_, stats_filename_, &result);
+  PrintAnalysisResults(logfile, "NormalStatsFile", &result);
   ASSERT_EQ(0, fclose(logfile));
 
   std::vector<std::string> expected_out = {
-      "RESULT Max_repeated: NormalStatsFile= 2",
-      "RESULT Max_skipped: NormalStatsFile= 2",
-      "RESULT Total_skipped: NormalStatsFile= 3",
-      "RESULT Decode_errors_reference: NormalStatsFile= 0",
-      "RESULT Decode_errors_test: NormalStatsFile= 0"};
+      "RESULT Max_repeated: NormalStatsFile= 2 ",
+      "RESULT Max_skipped: NormalStatsFile= 2 ",
+      "RESULT Total_skipped: NormalStatsFile= 3 ",
+      "RESULT Decode_errors_reference: NormalStatsFile= 0 ",
+      "RESULT Decode_errors_test: NormalStatsFile= 0 "};
   VerifyLogOutput(log_filename, expected_out);
 }
 
 TEST_F(VideoQualityAnalysisTest,
        PrintMaxRepeatedAndSkippedFramesDecodeErrorInTest) {
+  ResultsContainer result;
   std::ofstream stats_file;
 
   std::string log_filename =
@@ -214,16 +216,16 @@
   stats_file << "frame_0006 0110\n";
   stats_file.close();
 
-  PrintMaxRepeatedAndSkippedFrames(logfile, "NormalStatsFile",
-                                   stats_filename_ref_, stats_filename_);
+  GetMaxRepeatedAndSkippedFrames(stats_filename_ref_, stats_filename_, &result);
+  PrintAnalysisResults(logfile, "NormalStatsFile", &result);
   ASSERT_EQ(0, fclose(logfile));
 
   std::vector<std::string> expected_out = {
-      "RESULT Max_repeated: NormalStatsFile= 1",
-      "RESULT Max_skipped: NormalStatsFile= 0",
-      "RESULT Total_skipped: NormalStatsFile= 0",
-      "RESULT Decode_errors_reference: NormalStatsFile= 0",
-      "RESULT Decode_errors_test: NormalStatsFile= 3"};
+      "RESULT Max_repeated: NormalStatsFile= 1 ",
+      "RESULT Max_skipped: NormalStatsFile= 0 ",
+      "RESULT Total_skipped: NormalStatsFile= 0 ",
+      "RESULT Decode_errors_reference: NormalStatsFile= 0 ",
+      "RESULT Decode_errors_test: NormalStatsFile= 3 "};
   VerifyLogOutput(log_filename, expected_out);
 }
 
diff --git a/test/BUILD.gn b/test/BUILD.gn
index 5f6150e..bfc521d 100644
--- a/test/BUILD.gn
+++ b/test/BUILD.gn
@@ -122,7 +122,6 @@
 
 rtc_source_set("perf_test") {
   visibility = [ "*" ]
-  testonly = true
   sources = [
     "testsupport/perf_test.cc",
     "testsupport/perf_test.h",