Reformat the WebRTC code base
Running clang-format with chromium's style guide.
The goal is n-fold:
* providing consistency and readability (that's what code guidelines are for)
* preventing noise with presubmit checks and git cl format
* building on the previous point: making it easier to automatically fix format issues
* you name it
Please consider using git-hyper-blame to ignore this commit.
Bug: webrtc:9340
Change-Id: I694567c4cdf8cee2860958cfe82bfaf25848bb87
Reviewed-on: https://webrtc-review.googlesource.com/81185
Reviewed-by: Patrik Höglund <phoglund@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#23660}
diff --git a/rtc_tools/agc/activity_metric.cc b/rtc_tools/agc/activity_metric.cc
index ed48543..b4ed3fa 100644
--- a/rtc_tools/agc/activity_metric.cc
+++ b/rtc_tools/agc/activity_metric.cc
@@ -8,7 +8,6 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-
#include <math.h>
#include <stdio.h>
#include <stdlib.h>
@@ -32,21 +31,30 @@
static const float kDefaultActivityThreshold = 0.3f;
DEFINE_bool(standalone_vad, true, "enable stand-alone VAD");
-DEFINE_string(true_vad, "", "name of a file containing true VAD in 'int'"
+DEFINE_string(true_vad,
+ "",
+ "name of a file containing true VAD in 'int'"
" format");
-DEFINE_string(video_vad, "", "name of a file containing video VAD (activity"
+DEFINE_string(video_vad,
+ "",
+ "name of a file containing video VAD (activity"
" probabilities) in double format. One activity per 10ms is"
" required. If no file is given the video information is not"
" incorporated. Negative activity is interpreted as video is"
" not adapted and the statistics are not computed during"
" the learning phase. Note that the negative video activities"
" are ONLY allowed at the beginning.");
-DEFINE_string(result, "", "name of a file to write the results. The results"
+DEFINE_string(result,
+ "",
+ "name of a file to write the results. The results"
" will be appended to the end of the file. This is optional.");
-DEFINE_string(audio_content, "", "name of a file where audio content is written"
+DEFINE_string(audio_content,
+ "",
+ "name of a file where audio content is written"
" to, in double format.");
-DEFINE_float(activity_threshold, kDefaultActivityThreshold,
- "Activity threshold");
+DEFINE_float(activity_threshold,
+ kDefaultActivityThreshold,
+ "Activity threshold");
DEFINE_bool(help, false, "prints this message");
namespace webrtc {
@@ -61,8 +69,8 @@
static void DitherSilence(AudioFrame* frame) {
ASSERT_EQ(1u, frame->num_channels_);
const double kRmsSilence = 5;
- const double sum_squared_silence = kRmsSilence * kRmsSilence *
- frame->samples_per_channel_;
+ const double sum_squared_silence =
+ kRmsSilence * kRmsSilence * frame->samples_per_channel_;
double sum_squared = 0;
int16_t* frame_data = frame->mutable_data();
for (size_t n = 0; n < frame->samples_per_channel_; n++)
@@ -97,21 +105,18 @@
audio_content_fid_ = audio_content_fid;
}
- int AddAudio(const AudioFrame& frame, double p_video,
- int* combined_vad) {
+ int AddAudio(const AudioFrame& frame, double p_video, int* combined_vad) {
if (frame.num_channels_ != 1 ||
- frame.samples_per_channel_ !=
- kSampleRateHz / 100 ||
- frame.sample_rate_hz_ != kSampleRateHz)
+ frame.samples_per_channel_ != kSampleRateHz / 100 ||
+ frame.sample_rate_hz_ != kSampleRateHz)
return -1;
video_vad_[video_index_++] = p_video;
AudioFeatures features;
const int16_t* frame_data = frame.data();
- audio_processing_->ExtractFeatures(
- frame_data, frame.samples_per_channel_, &features);
+ audio_processing_->ExtractFeatures(frame_data, frame.samples_per_channel_,
+ &features);
if (FLAG_standalone_vad) {
- standalone_vad_->AddAudio(frame_data,
- frame.samples_per_channel_);
+ standalone_vad_->AddAudio(frame_data, frame.samples_per_channel_);
}
if (features.num_frames > 0) {
double p[kMaxNumFrames] = {0.5, 0.5, 0.5, 0.5};
@@ -145,9 +150,7 @@
return static_cast<int>(features.num_frames);
}
- void Reset() {
- audio_content_->Reset();
- }
+ void Reset() { audio_content_->Reset(); }
void SetActivityThreshold(double activity_threshold) {
activity_threshold_ = activity_threshold;
@@ -165,7 +168,6 @@
FILE* audio_content_fid_;
};
-
void void_main(int argc, char* argv[]) {
webrtc::AgcStat agc_stat;
@@ -178,10 +180,10 @@
FILE* true_vad_fid = NULL;
ASSERT_GT(strlen(FLAG_true_vad), 0u) << "Specify the file containing true "
- "VADs using --true_vad flag.";
+ "VADs using --true_vad flag.";
true_vad_fid = fopen(FLAG_true_vad, "rb");
- ASSERT_TRUE(true_vad_fid != NULL) << "Cannot open the active list " <<
- FLAG_true_vad;
+ ASSERT_TRUE(true_vad_fid != NULL)
+ << "Cannot open the active list " << FLAG_true_vad;
FILE* results_fid = NULL;
if (strlen(FLAG_result) > 0) {
@@ -199,11 +201,12 @@
}
// Open in append mode.
results_fid = fopen(FLAG_result, "a");
- ASSERT_TRUE(results_fid != NULL) << "Cannot open the file, " <<
- FLAG_result << ", to write the results.";
+ ASSERT_TRUE(results_fid != NULL)
+ << "Cannot open the file, " << FLAG_result << ", to write the results.";
// Write the header if required.
if (write_header) {
- fprintf(results_fid, "%% Total Active, Misdetection, "
+ fprintf(results_fid,
+ "%% Total Active, Misdetection, "
"Total inactive, False Positive, On-sets, Missed segments, "
"Average response\n");
}
@@ -212,8 +215,9 @@
FILE* video_vad_fid = NULL;
if (strlen(FLAG_video_vad) > 0) {
video_vad_fid = fopen(FLAG_video_vad, "rb");
- ASSERT_TRUE(video_vad_fid != NULL) << "Cannot open the file, " <<
- FLAG_video_vad << " to read video-based VAD decisions.\n";
+ ASSERT_TRUE(video_vad_fid != NULL)
+ << "Cannot open the file, " << FLAG_video_vad
+ << " to read video-based VAD decisions.\n";
}
// AgsStat will be the owner of this file and will close it at its
@@ -221,8 +225,9 @@
FILE* audio_content_fid = NULL;
if (strlen(FLAG_audio_content) > 0) {
audio_content_fid = fopen(FLAG_audio_content, "wb");
- ASSERT_TRUE(audio_content_fid != NULL) << "Cannot open file, " <<
- FLAG_audio_content << " to write audio-content.\n";
+ ASSERT_TRUE(audio_content_fid != NULL)
+ << "Cannot open file, " << FLAG_audio_content
+ << " to write audio-content.\n";
agc_stat.set_audio_content_file(audio_content_fid);
}
@@ -230,8 +235,8 @@
frame.num_channels_ = 1;
frame.sample_rate_hz_ = 16000;
frame.samples_per_channel_ = frame.sample_rate_hz_ / 100;
- const size_t kSamplesToRead = frame.num_channels_ *
- frame.samples_per_channel_;
+ const size_t kSamplesToRead =
+ frame.num_channels_ * frame.samples_per_channel_;
agc_stat.SetActivityThreshold(FLAG_activity_threshold);
@@ -260,16 +265,17 @@
true_vad_fid))
<< "Size mismatch between True-VAD and the PCM file.\n";
if (video_vad_fid != NULL) {
- ASSERT_EQ(1u, fread(&p_video, sizeof(p_video), 1, video_vad_fid)) <<
- "Not enough video-based VAD probabilities.";
+ ASSERT_EQ(1u, fread(&p_video, sizeof(p_video), 1, video_vad_fid))
+ << "Not enough video-based VAD probabilities.";
}
// Negative video activity indicates that the video-based VAD is not yet
// adapted. Disregards the learning phase in statistics.
if (p_video < 0) {
if (video_adapted) {
- fprintf(stderr, "Negative video probabilities ONLY allowed at the "
- "beginning of the sequence, not in the middle.\n");
+ fprintf(stderr,
+ "Negative video probabilities ONLY allowed at the "
+ "beginning of the sequence, not in the middle.\n");
exit(1);
}
continue;
@@ -337,23 +343,15 @@
if (results_fid != NULL) {
fprintf(results_fid, "%4d %4d %4d %4d %4d %4d %4.0f %4.0f\n",
- total_active,
- total_missed_detection,
- total_passive,
- total_false_positive,
- num_onsets,
- num_not_adapted,
+ total_active, total_missed_detection, total_passive,
+ total_false_positive, num_onsets, num_not_adapted,
static_cast<float>(onset_adaptation) / (num_onsets + 1e-12),
static_cast<float>(total_false_positive_duration) /
- (total_passive + 1e-12));
+ (total_passive + 1e-12));
}
- fprintf(stdout, "%4d %4d %4d %4d %4d %4d %4.0f %4.0f\n",
- total_active,
- total_missed_detection,
- total_passive,
- total_false_positive,
- num_onsets,
- num_not_adapted,
+ fprintf(stdout, "%4d %4d %4d %4d %4d %4d %4.0f %4.0f\n", total_active,
+ total_missed_detection, total_passive, total_false_positive,
+ num_onsets, num_not_adapted,
static_cast<float>(onset_adaptation) / (num_onsets + 1e-12),
static_cast<float>(total_false_positive_duration) /
(total_passive + 1e-12));
@@ -373,16 +371,18 @@
int main(int argc, char* argv[]) {
if (argc == 1) {
// Print usage information.
- std::cout <<
- "\nCompute the number of misdetected and false-positive frames. Not\n"
- " that for each frame of audio (10 ms) there should be one true\n"
- " activity. If any video-based activity is given, there should also be\n"
- " one probability per frame.\n"
- "Run with --help for more details on available flags.\n"
- "\nUsage:\n\n"
- "activity_metric input_pcm [options]\n"
- "where 'input_pcm' is the input audio sampled at 16 kHz in 16 bits "
- "format.\n\n";
+ std::cout
+ << "\nCompute the number of misdetected and false-positive frames. "
+ "Not\n"
+ " that for each frame of audio (10 ms) there should be one true\n"
+ " activity. If any video-based activity is given, there should also "
+ "be\n"
+ " one probability per frame.\n"
+ "Run with --help for more details on available flags.\n"
+ "\nUsage:\n\n"
+ "activity_metric input_pcm [options]\n"
+ "where 'input_pcm' is the input audio sampled at 16 kHz in 16 bits "
+ "format.\n\n";
return 0;
}
rtc::FlagList::SetFlagsFromCommandLine(&argc, argv, true);
diff --git a/rtc_tools/converter/converter.cc b/rtc_tools/converter/converter.cc
index ee1316e..e36d405 100644
--- a/rtc_tools/converter/converter.cc
+++ b/rtc_tools/converter/converter.cc
@@ -27,10 +27,7 @@
namespace webrtc {
namespace test {
-Converter::Converter(int width, int height)
- : width_(width),
- height_(height) {
-}
+Converter::Converter(int width, int height) : width_(width), height_(height) {}
bool Converter::ConvertRGBAToI420Video(std::string frames_dir,
std::string output_file_name,
@@ -53,7 +50,7 @@
int v_plane_size = VPlaneSize();
uint8_t* dst_v = new uint8_t[v_plane_size];
- int counter = 0; // Counter to form frame names.
+ int counter = 0; // Counter to form frame names.
bool success = false; // Is conversion successful.
while (true) {
@@ -79,16 +76,13 @@
}
// Convert to I420 frame.
- libyuv::ABGRToI420(rgba_buffer, SrcStrideFrame(),
- dst_y, DstStrideY(),
- dst_u, DstStrideU(),
- dst_v, DstStrideV(),
- width_, height_);
+ libyuv::ABGRToI420(rgba_buffer, SrcStrideFrame(), dst_y, DstStrideY(),
+ dst_u, DstStrideU(), dst_v, DstStrideV(), width_,
+ height_);
// Add the I420 frame to the YUV video file.
- success = AddYUVToFile(dst_y, y_plane_size, dst_u, u_plane_size,
- dst_v, v_plane_size, output_file);
-
+ success = AddYUVToFile(dst_y, y_plane_size, dst_u, u_plane_size, dst_v,
+ v_plane_size, output_file);
if (!success) {
fprintf(stderr, "LibYUV error during RGBA to I420 frame conversion\n");
@@ -125,14 +119,17 @@
size_t bytes_written = fwrite(yuv_plane, 1, yuv_plane_size, file);
if (bytes_written != static_cast<size_t>(yuv_plane_size)) {
- fprintf(stderr, "Number of bytes written (%d) doesn't match size of y plane"
- " (%d)\n", static_cast<int>(bytes_written), yuv_plane_size);
+ fprintf(stderr,
+ "Number of bytes written (%d) doesn't match size of y plane"
+ " (%d)\n",
+ static_cast<int>(bytes_written), yuv_plane_size);
return false;
}
return true;
}
-bool Converter::ReadRGBAFrame(const char* input_file_name, int input_frame_size,
+bool Converter::ReadRGBAFrame(const char* input_file_name,
+ int input_frame_size,
unsigned char* buffer) {
FILE* input_file = fopen(input_file_name, "rb");
if (input_file == NULL) {
@@ -157,7 +154,7 @@
return dir_name + SEPARATOR + file_name;
}
-bool Converter:: FileExists(std::string file_name_to_check) {
+bool Converter::FileExists(std::string file_name_to_check) {
struct STAT file_info;
int result = STAT(file_name_to_check.c_str(), &file_info);
return (result == 0);
diff --git a/rtc_tools/converter/converter.h b/rtc_tools/converter/converter.h
index bd31f7e..e3b3ea3 100644
--- a/rtc_tools/converter/converter.h
+++ b/rtc_tools/converter/converter.h
@@ -27,51 +27,36 @@
// Converts RGBA to YUV video. If the delete_frames argument is true, the
// method will delete the input frames after conversion.
bool ConvertRGBAToI420Video(std::string frames_dir,
- std::string output_file_name, bool delete_frames);
+ std::string output_file_name,
+ bool delete_frames);
private:
- int width_; // Width of the video (respectively of the RGBA frames).
+ int width_; // Width of the video (respectively of the RGBA frames).
int height_; // Height of the video (respectively of the RGBA frames).
// Returns the size of the Y plane in bytes.
- int YPlaneSize() const {
- return width_*height_;
- }
+ int YPlaneSize() const { return width_ * height_; }
// Returns the size of the U plane in bytes.
- int UPlaneSize() const {
- return ((width_+1)/2)*((height_)/2);
- }
+ int UPlaneSize() const { return ((width_ + 1) / 2) * ((height_) / 2); }
// Returns the size of the V plane in bytes.
- int VPlaneSize() const {
- return ((width_+1)/2)*((height_)/2);
- }
+ int VPlaneSize() const { return ((width_ + 1) / 2) * ((height_) / 2); }
// Returns the number of bytes per row in the RGBA frame.
- int SrcStrideFrame() const {
- return width_*4;
- }
+ int SrcStrideFrame() const { return width_ * 4; }
// Returns the number of bytes in the Y plane.
- int DstStrideY() const {
- return width_;
- }
+ int DstStrideY() const { return width_; }
// Returns the number of bytes in the U plane.
- int DstStrideU() const {
- return (width_+1)/2;
- }
+ int DstStrideU() const { return (width_ + 1) / 2; }
// Returns the number of bytes in the V plane.
- int DstStrideV() const {
- return (width_+1)/2;
- }
+ int DstStrideV() const { return (width_ + 1) / 2; }
// Returns the size in bytes of the input RGBA frames.
- int InputFrameSize() const {
- return width_*height_*4;
- }
+ int InputFrameSize() const { return width_ * height_ * 4; }
// Writes the Y, U and V (in this order) planes to the file, thus adding a
// raw YUV frame to the file.
@@ -88,7 +73,8 @@
// Reads a RGBA frame from input_file_name with input_frame_size size in bytes
// into the buffer.
- bool ReadRGBAFrame(const char* input_file_name, int input_frame_size,
+ bool ReadRGBAFrame(const char* input_file_name,
+ int input_frame_size,
unsigned char* buffer);
// Finds the full path name of the file - concatenates the directory and file
@@ -99,7 +85,7 @@
bool FileExists(std::string file_name_to_check);
// Returns the name of the file in the form frame_<number>, where <number> is
- // 4 zero padded (i.e. frame_0000, frame_0001, etc.).
+ // 4 zero padded (i.e. frame_0000, frame_0001, etc.).
std::string FormFrameName(int width, int number);
};
diff --git a/rtc_tools/converter/rgba_to_i420_converter.cc b/rtc_tools/converter/rgba_to_i420_converter.cc
index e08dd8a..9fe5141 100644
--- a/rtc_tools/converter/rgba_to_i420_converter.cc
+++ b/rtc_tools/converter/rgba_to_i420_converter.cc
@@ -28,22 +28,25 @@
*/
int main(int argc, char* argv[]) {
std::string program_name = argv[0];
- std::string usage = "Converts RGBA raw image files to I420 frames for YUV.\n"
- "Example usage:\n" + program_name +
- " --frames_dir=. --output_file=output.yuv --width=320 --height=240\n"
- "IMPORTANT: If you pass the --delete_frames command line parameter, the "
- "tool will delete the input frames after conversion.\n"
- "Command line flags:\n"
- " - width(int): Width in pixels of the frames in the input file."
- " Default: -1\n"
- " - height(int): Height in pixels of the frames in the input file."
- " Default: -1\n"
- " - frames_dir(string): The path to the directory where the frames reside."
- " Default: .\n"
- " - output_file(string): The output file to which frames are written."
- " Default: output.yuv\n"
- " - delete_frames(bool): Whether or not to delete the input frames after"
- " the conversion. Default: false.\n";
+ std::string usage =
+ "Converts RGBA raw image files to I420 frames for YUV.\n"
+ "Example usage:\n" +
+ program_name +
+ " --frames_dir=. --output_file=output.yuv --width=320 --height=240\n"
+ "IMPORTANT: If you pass the --delete_frames command line parameter, the "
+ "tool will delete the input frames after conversion.\n"
+ "Command line flags:\n"
+ " - width(int): Width in pixels of the frames in the input file."
+ " Default: -1\n"
+ " - height(int): Height in pixels of the frames in the input file."
+ " Default: -1\n"
+ " - frames_dir(string): The path to the directory where the frames "
+ "reside."
+ " Default: .\n"
+ " - output_file(string): The output file to which frames are written."
+ " Default: output.yuv\n"
+ " - delete_frames(bool): Whether or not to delete the input frames after"
+ " the conversion. Default: false.\n";
webrtc::test::CommandLineParser parser;
@@ -76,9 +79,8 @@
bool del_frames = (parser.GetFlag("delete_frames") == "true") ? true : false;
webrtc::test::Converter converter(width, height);
- bool success = converter.ConvertRGBAToI420Video(parser.GetFlag("frames_dir"),
- parser.GetFlag("output_file"),
- del_frames);
+ bool success = converter.ConvertRGBAToI420Video(
+ parser.GetFlag("frames_dir"), parser.GetFlag("output_file"), del_frames);
if (success) {
fprintf(stdout, "Successful conversion of RGBA frames to YUV video!\n");
diff --git a/rtc_tools/event_log_visualizer/analyzer.cc b/rtc_tools/event_log_visualizer/analyzer.cc
index a8dbc30..f20d301 100644
--- a/rtc_tools/event_log_visualizer/analyzer.cc
+++ b/rtc_tools/event_log_visualizer/analyzer.cc
@@ -483,7 +483,7 @@
}
}
RTC_LOG(LS_INFO) << "Found " << log_segments_.size()
- << " (LOG_START, LOG_END) segments in log.";
+ << " (LOG_START, LOG_END) segments in log.";
}
class BitrateObserver : public NetworkChangedObserver,
@@ -641,8 +641,7 @@
plot->SetXAxis(ToCallTimeSec(begin_time_), call_duration_s_, "Time (s)",
kLeftMargin, kRightMargin);
- plot->SetYAxis(-127, 0, "Audio level (dBov)", kBottomMargin,
- kTopMargin);
+ plot->SetYAxis(-127, 0, "Audio level (dBov)", kBottomMargin, kTopMargin);
plot->SetTitle(GetDirectionAsString(direction) + " audio level");
}
diff --git a/rtc_tools/event_log_visualizer/main.cc b/rtc_tools/event_log_visualizer/main.cc
index 802633c..c82295e 100644
--- a/rtc_tools/event_log_visualizer/main.cc
+++ b/rtc_tools/event_log_visualizer/main.cc
@@ -157,7 +157,6 @@
void SetAllPlotFlags(bool setting);
-
int main(int argc, char* argv[]) {
std::string program_name = argv[0];
std::string usage =
@@ -381,7 +380,6 @@
return 0;
}
-
void SetAllPlotFlags(bool setting) {
FLAG_plot_incoming_packet_sizes = setting;
FLAG_plot_outgoing_packet_sizes = setting;
diff --git a/rtc_tools/event_log_visualizer/plot_protobuf.cc b/rtc_tools/event_log_visualizer/plot_protobuf.cc
index e986a74..0c4c709 100644
--- a/rtc_tools/event_log_visualizer/plot_protobuf.cc
+++ b/rtc_tools/event_log_visualizer/plot_protobuf.cc
@@ -69,8 +69,8 @@
// TODO(terelius): Ensure that there is no way to insert plots other than
// ProtobufPlots in a ProtobufPlotCollection. Needed to safely static_cast
// here.
- webrtc::analytics::Chart* protobuf_representation
- = collection->add_charts();
+ webrtc::analytics::Chart* protobuf_representation =
+ collection->add_charts();
static_cast<ProtobufPlot*>(plot.get())
->ExportProtobuf(protobuf_representation);
}
diff --git a/rtc_tools/frame_analyzer/reference_less_video_analysis.cc b/rtc_tools/frame_analyzer/reference_less_video_analysis.cc
index 4e87bc9..3d7fd7f 100644
--- a/rtc_tools/frame_analyzer/reference_less_video_analysis.cc
+++ b/rtc_tools/frame_analyzer/reference_less_video_analysis.cc
@@ -17,8 +17,10 @@
int main(int argc, char* argv[]) {
// This captures the freezing metrics for reference less video analysis.
std::string program_name = argv[0];
- std::string usage = "Outputs the freezing score by comparing current frame "
- "with the previous frame.\nExample usage:\n" + program_name +
+ std::string usage =
+ "Outputs the freezing score by comparing current frame "
+ "with the previous frame.\nExample usage:\n" +
+ program_name +
" --video_file=video_file.y4m\n"
"Command line flags:\n"
" - video_file(string): Path of the video "
diff --git a/rtc_tools/frame_analyzer/reference_less_video_analysis_lib.cc b/rtc_tools/frame_analyzer/reference_less_video_analysis_lib.cc
index 78d10ce..9a7535b 100644
--- a/rtc_tools/frame_analyzer/reference_less_video_analysis_lib.cc
+++ b/rtc_tools/frame_analyzer/reference_less_video_analysis_lib.cc
@@ -25,7 +25,9 @@
#define strtok_r strtok_s
#endif
-void get_height_width_fps(int *height, int *width, int *fps,
+void get_height_width_fps(int* height,
+ int* width,
+ int* fps,
const std::string& video_file) {
// File header looks like :
// YUV4MPEG2 W1280 H720 F25:1 Ip A0:0 C420mpeg2 XYSCSS=420MPEG2.
@@ -37,8 +39,8 @@
frame_header[bytes_read] = '\0';
std::string file_header_stats[5];
int no_of_stats = 0;
- char *save_ptr;
- char *token = strtok_r(frame_header, " ", &save_ptr);
+ char* save_ptr;
+ char* token = strtok_r(frame_header, " ", &save_ptr);
while (token != NULL) {
file_header_stats[no_of_stats++] = token;
@@ -54,27 +56,29 @@
}
bool frozen_frame(std::vector<double> psnr_per_frame,
- std::vector<double> ssim_per_frame, size_t frame) {
+ std::vector<double> ssim_per_frame,
+ size_t frame) {
if (psnr_per_frame[frame] >= PSNR_FREEZE_THRESHOLD ||
ssim_per_frame[frame] >= SSIM_FREEZE_THRESHOLD)
return true;
return false;
}
-std::vector<int> find_frame_clusters(const std::vector<double>& psnr_per_frame,
- const std::vector<double>& ssim_per_frame) {
+std::vector<int> find_frame_clusters(
+ const std::vector<double>& psnr_per_frame,
+ const std::vector<double>& ssim_per_frame) {
std::vector<int> identical_frame_clusters;
int num_frozen = 0;
size_t total_no_of_frames = psnr_per_frame.size();
for (size_t each_frame = 0; each_frame < total_no_of_frames; each_frame++) {
- if (frozen_frame(psnr_per_frame, ssim_per_frame, each_frame)) {
- num_frozen++;
- } else if (num_frozen > 0) {
- // Not frozen anymore.
- identical_frame_clusters.push_back(num_frozen);
- num_frozen = 0;
- }
+ if (frozen_frame(psnr_per_frame, ssim_per_frame, each_frame)) {
+ num_frozen++;
+ } else if (num_frozen > 0) {
+ // Not frozen anymore.
+ identical_frame_clusters.push_back(num_frozen);
+ num_frozen = 0;
+ }
}
return identical_frame_clusters;
}
@@ -106,8 +110,8 @@
*
*/
size_t total_no_of_frames = psnr_per_frame.size();
- std::vector<int> identical_frame_clusters = find_frame_clusters(
- psnr_per_frame, ssim_per_frame);
+ std::vector<int> identical_frame_clusters =
+ find_frame_clusters(psnr_per_frame, ssim_per_frame);
int total_identical_frames = std::accumulate(
identical_frame_clusters.begin(), identical_frame_clusters.end(), 0);
size_t unique_frames = total_no_of_frames - total_identical_frames;
@@ -126,7 +130,7 @@
printf("Print identical frame which appears in clusters : \n");
for (int cluster = 0;
- cluster < static_cast<int>(identical_frame_clusters.size()); cluster++)
+ cluster < static_cast<int>(identical_frame_clusters.size()); cluster++)
printf("%d ", identical_frame_clusters[cluster]);
printf("\n");
}
@@ -145,28 +149,22 @@
uint8_t* next_frame = new uint8_t[size];
while (true) {
- if (!(webrtc::test::ExtractFrameFromY4mFile (video_file_name.c_str(),
- width, height,
- no_of_frames,
- current_frame))) {
+ if (!(webrtc::test::ExtractFrameFromY4mFile(video_file_name.c_str(), width,
+ height, no_of_frames,
+ current_frame))) {
break;
}
- if (!(webrtc::test::ExtractFrameFromY4mFile (video_file_name.c_str(),
- width, height,
- no_of_frames + 1,
- next_frame))) {
+ if (!(webrtc::test::ExtractFrameFromY4mFile(video_file_name.c_str(), width,
+ height, no_of_frames + 1,
+ next_frame))) {
break;
}
- double result_psnr = webrtc::test::CalculateMetrics(webrtc::test::kPSNR,
- current_frame,
- next_frame,
- width, height);
- double result_ssim = webrtc::test::CalculateMetrics(webrtc::test::kSSIM,
- current_frame,
- next_frame,
- width, height);
+ double result_psnr = webrtc::test::CalculateMetrics(
+ webrtc::test::kPSNR, current_frame, next_frame, width, height);
+ double result_ssim = webrtc::test::CalculateMetrics(
+ webrtc::test::kSSIM, current_frame, next_frame, width, height);
psnr_per_frame->push_back(result_psnr);
ssim_per_frame->push_back(result_ssim);
@@ -178,7 +176,7 @@
}
bool check_file_extension(const std::string& video_file_name) {
- if (video_file_name.substr(video_file_name.length()-3, 3) != "y4m") {
+ if (video_file_name.substr(video_file_name.length() - 3, 3) != "y4m") {
printf("Only y4m video file format is supported. Given: %s\n",
video_file_name.c_str());
return false;
diff --git a/rtc_tools/frame_analyzer/reference_less_video_analysis_lib.h b/rtc_tools/frame_analyzer/reference_less_video_analysis_lib.h
index 1a7b1b1..a1de03b 100644
--- a/rtc_tools/frame_analyzer/reference_less_video_analysis_lib.h
+++ b/rtc_tools/frame_analyzer/reference_less_video_analysis_lib.h
@@ -16,13 +16,16 @@
// Parse the file header to extract height, width and fps
// for a given video file.
-void get_height_width_fps(int *height, int *width, int *fps,
+void get_height_width_fps(int* height,
+ int* width,
+ int* fps,
const std::string& video_file);
// Returns true if the frame is frozen based on psnr and ssim freezing
// threshold values.
bool frozen_frame(std::vector<double> psnr_per_frame,
- std::vector<double> ssim_per_frame, size_t frame);
+ std::vector<double> ssim_per_frame,
+ size_t frame);
// Returns the vector of identical cluster of frames that are frozen
// and appears continuously.
diff --git a/rtc_tools/frame_analyzer/reference_less_video_analysis_unittest.cc b/rtc_tools/frame_analyzer/reference_less_video_analysis_unittest.cc
index 13ccb85..4e20532 100644
--- a/rtc_tools/frame_analyzer/reference_less_video_analysis_unittest.cc
+++ b/rtc_tools/frame_analyzer/reference_less_video_analysis_unittest.cc
@@ -7,8 +7,8 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
-#include <string.h>
#include <stdio.h>
+#include <string.h>
#include <cstring>
#include <iostream>
#include <vector>
@@ -21,7 +21,7 @@
public:
void SetUp() override {
video_file =
- webrtc::test::ResourcePath("reference_less_video_test_file", "y4m");
+ webrtc::test::ResourcePath("reference_less_video_test_file", "y4m");
}
std::string video_file;
std::vector<double> psnr_per_frame;
@@ -62,7 +62,3 @@
webrtc::test::ResourcePath("video_quality_analysis_frame", "txt");
EXPECT_FALSE(check_file_extension(txt_file));
}
-
-
-
-
diff --git a/rtc_tools/frame_analyzer/video_quality_analysis.cc b/rtc_tools/frame_analyzer/video_quality_analysis.cc
index 502ac82..60a4a01 100644
--- a/rtc_tools/frame_analyzer/video_quality_analysis.cc
+++ b/rtc_tools/frame_analyzer/video_quality_analysis.cc
@@ -14,8 +14,8 @@
#include <stdio.h>
#include <stdlib.h>
#include <algorithm>
-#include <string>
#include <map>
+#include <string>
#include <utility>
#include "test/testsupport/perf_test.h"
@@ -35,7 +35,7 @@
int half_width = (width + 1) >> 1;
int half_height = (height + 1) >> 1;
- int y_plane = width * height; // I420 Y plane.
+ int y_plane = width * height; // I420 Y plane.
int u_plane = half_width * half_height; // I420 U plane.
int v_plane = half_width * half_height; // I420 V plane.
@@ -88,7 +88,7 @@
line[chars] = buf;
++chars;
}
- line[chars-1] = '\0'; // Strip the trailing \n and put end of string.
+ line[chars - 1] = '\0'; // Strip the trailing \n and put end of string.
return true;
}
@@ -112,8 +112,7 @@
fseek(input_file, offset, SEEK_SET);
size_t bytes_read = fread(result_frame, 1, frame_size, input_file);
- if (bytes_read != static_cast<size_t>(frame_size) &&
- ferror(input_file)) {
+ if (bytes_read != static_cast<size_t>(frame_size) && ferror(input_file)) {
fprintf(stdout, "Error while reading frame no %d from file %s\n",
frame_number, i420_file_name);
errors = true;
@@ -144,8 +143,7 @@
size_t bytes_read =
fread(frame_header, 1, Y4M_FILE_HEADER_MAX_SIZE - 1, input_file);
if (bytes_read != static_cast<size_t>(frame_size) && ferror(input_file)) {
- fprintf(stdout, "Error while reading frame from file %s\n",
- y4m_file_name);
+ fprintf(stdout, "Error while reading frame from file %s\n", y4m_file_name);
fclose(input_file);
return false;
}
@@ -154,7 +152,7 @@
std::size_t found = header_contents.find(Y4M_FRAME_DELIMITER);
if (found == std::string::npos) {
fprintf(stdout, "Corrupted Y4M header, could not find \"FRAME\" in %s\n",
- header_contents.c_str());
+ header_contents.c_str());
fclose(input_file);
return false;
}
@@ -206,19 +204,17 @@
switch (video_metrics_type) {
case kPSNR:
// In the following: stride is determined by width.
- result = libyuv::I420Psnr(src_y_a, width, src_u_a, half_width,
- src_v_a, half_width, src_y_b, width,
- src_u_b, half_width, src_v_b, half_width,
- width, height);
+ result = libyuv::I420Psnr(src_y_a, width, src_u_a, half_width, src_v_a,
+ half_width, src_y_b, width, src_u_b, half_width,
+ src_v_b, half_width, width, height);
// LibYuv sets the max psnr value to 128, we restrict it to 48.
// In case of 0 mse in one frame, 128 can skew the results significantly.
result = (result > 48.0) ? 48.0 : result;
break;
case kSSIM:
- result = libyuv::I420Ssim(src_y_a, stride_y, src_u_a, stride_uv,
- src_v_a, stride_uv, src_y_b, stride_y,
- src_u_b, stride_uv, src_v_b, stride_uv,
- width, height);
+ result = libyuv::I420Ssim(src_y_a, stride_y, src_u_a, stride_uv, src_v_a,
+ stride_uv, src_y_b, stride_y, src_u_b,
+ stride_uv, src_v_b, stride_uv, width, height);
break;
default:
assert(false);
@@ -471,7 +467,8 @@
PrintAnalysisResults(stdout, label, results);
}
-void PrintAnalysisResults(FILE* output, const std::string& label,
+void PrintAnalysisResults(FILE* output,
+ const std::string& label,
ResultsContainer* results) {
SetPerfResultsOutput(output);
diff --git a/rtc_tools/frame_analyzer/video_quality_analysis.h b/rtc_tools/frame_analyzer/video_quality_analysis.h
index 92228fc..dca719d 100644
--- a/rtc_tools/frame_analyzer/video_quality_analysis.h
+++ b/rtc_tools/frame_analyzer/video_quality_analysis.h
@@ -12,8 +12,8 @@
#define RTC_TOOLS_FRAME_ANALYZER_VIDEO_QUALITY_ANALYSIS_H_
#include <string>
-#include <vector>
#include <utility>
+#include <vector>
#include "third_party/libyuv/include/libyuv/compare.h"
#include "third_party/libyuv/include/libyuv/convert.h"
@@ -44,7 +44,7 @@
int decode_errors_test;
};
-enum VideoAnalysisMetricsType {kPSNR, kSSIM};
+enum VideoAnalysisMetricsType { kPSNR, kSSIM };
// A function to run the PSNR and SSIM analysis on the test file. The test file
// comprises the frames that were captured during the quality measurement test.
@@ -85,7 +85,8 @@
void PrintAnalysisResults(const std::string& label, ResultsContainer* results);
// Similar to the above, but will print to the specified file handle.
-void PrintAnalysisResults(FILE* output, const std::string& label,
+void PrintAnalysisResults(FILE* output,
+ const std::string& label,
ResultsContainer* results);
// The barcode number that means that the barcode could not be decoded.
diff --git a/rtc_tools/frame_analyzer/video_quality_analysis_unittest.cc b/rtc_tools/frame_analyzer/video_quality_analysis_unittest.cc
index 6143c31..d1edb30 100644
--- a/rtc_tools/frame_analyzer/video_quality_analysis_unittest.cc
+++ b/rtc_tools/frame_analyzer/video_quality_analysis_unittest.cc
@@ -43,10 +43,10 @@
TEST_F(VideoQualityAnalysisTest, MatchExtractedY4mFrame) {
std::string video_file =
- webrtc::test::ResourcePath("reference_less_video_test_file", "y4m");
+ webrtc::test::ResourcePath("reference_less_video_test_file", "y4m");
std::string extracted_frame_from_video_file =
- webrtc::test::ResourcePath("video_quality_analysis_frame", "txt");
+ webrtc::test::ResourcePath("video_quality_analysis_frame", "txt");
int frame_height = 720, frame_width = 1280;
int frame_number = 2;
@@ -57,8 +57,7 @@
FILE* input_file = fopen(extracted_frame_from_video_file.c_str(), "rb");
fread(expected_frame, 1, size, input_file);
- ExtractFrameFromY4mFile(video_file.c_str(),
- frame_width, frame_height,
+ ExtractFrameFromY4mFile(video_file.c_str(), frame_width, frame_height,
frame_number, result_frame);
EXPECT_EQ(*expected_frame, *result_frame);
@@ -92,8 +91,7 @@
GetMaxRepeatedAndSkippedFrames(stats_filename_ref_, stats_filename_, &result);
}
-TEST_F(VideoQualityAnalysisTest,
- GetMaxRepeatedAndSkippedFramesEmptyStatsFile) {
+TEST_F(VideoQualityAnalysisTest, GetMaxRepeatedAndSkippedFramesEmptyStatsFile) {
ResultsContainer result;
std::ofstream stats_file;
stats_file.open(stats_filename_ref_.c_str());
diff --git a/rtc_tools/frame_editing/frame_editing.cc b/rtc_tools/frame_editing/frame_editing.cc
index 67ca9cc..fc281e1 100644
--- a/rtc_tools/frame_editing/frame_editing.cc
+++ b/rtc_tools/frame_editing/frame_editing.cc
@@ -19,46 +19,51 @@
// A command-line tool to edit a YUV-video (I420 sub-sampled).
int main(int argc, char* argv[]) {
std::string program_name = argv[0];
- std::string usage = "Deletes a series of frames in a yuv file."
- " Only I420 is supported!\n"
- "Example usage:\n" + program_name +
- " --in_path=input.yuv --width=320 --height=240 --f=60 --interval=1 --l=120"
- " --out_path=edited_clip.yuv\n"
- "Command line flags:\n"
- "--in_path(string): Path and filename to the input file\n"
- "--width(int): Width in pixels of the frames in the input file."
- " Default: -1\n"
- "--height(int): Height in pixels of the frames in the input file."
- " Default: -1\n"
- "--f(int): First frame to process. Default: -1\n"
- "--l(int): Last frame to process. Default: -1\n"
- "Frame numbering starts at 1. The set of frames to be processed includes "
- "the frame with the number <f> and <l>.\n"
- "--interval(int): Interval specifies with what ratio the number of frames "
- "should be increased or decreased with.\n"
- "If you set <interval> to a positive number, frames between <f> and <l> "
- "will be inserted <interval> times."
- " If you set <interval> to a negative number then the amount of frames "
- "between <f> and <l> will be decreased with a ratio of abs(interval)."
- " Set interval=-1 if every frame between <f> and <l> should be "
- "deleted. Set interval=-2 if every second frame should be deleted, and so "
- "on. Frame numbering between <f> and <l> starts with 1 and frames with"
- " number n where (n - 1) % interval == 0 will be kept.\n"
- "Example 1:\n"
- "If one clip has 10 frames (1 to 10) and you specify <f>=4, <l>=7 and "
- "interval=2, then you will get a clip that contains frame "
- "1, 2, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 9 and 10.\n"
- "Example 2:\n"
- "If you specify f=4, l=7 and interval=-1, then you will get a clip that"
- " contains frame 1, 2, 3, 8, 9 and 10.\n"
- "Example 3:\n"
- "If one clip has 10 frames (1 to 10), and you specify f=1, l=10 and "
- " interval=-4, then you will get a clip that contains frame "
- "1, 5 and 9.\n"
- "No interpolation is done when up-sampling."
- " Default: -1\n"
- "--out_path(string): The output file to which frames are written."
- " Default: output.yuv\n";
+ std::string usage =
+ "Deletes a series of frames in a yuv file."
+ " Only I420 is supported!\n"
+ "Example usage:\n" +
+ program_name +
+ " --in_path=input.yuv --width=320 --height=240 --f=60 --interval=1 "
+ "--l=120"
+ " --out_path=edited_clip.yuv\n"
+ "Command line flags:\n"
+ "--in_path(string): Path and filename to the input file\n"
+ "--width(int): Width in pixels of the frames in the input file."
+ " Default: -1\n"
+ "--height(int): Height in pixels of the frames in the input file."
+ " Default: -1\n"
+ "--f(int): First frame to process. Default: -1\n"
+ "--l(int): Last frame to process. Default: -1\n"
+ "Frame numbering starts at 1. The set of frames to be processed includes "
+ "the frame with the number <f> and <l>.\n"
+ "--interval(int): Interval specifies with what ratio the number of "
+ "frames "
+ "should be increased or decreased with.\n"
+ "If you set <interval> to a positive number, frames between <f> and <l> "
+ "will be inserted <interval> times."
+ " If you set <interval> to a negative number then the amount of frames "
+ "between <f> and <l> will be decreased with a ratio of abs(interval)."
+ " Set interval=-1 if every frame between <f> and <l> should be "
+ "deleted. Set interval=-2 if every second frame should be deleted, and "
+ "so "
+ "on. Frame numbering between <f> and <l> starts with 1 and frames with"
+ " number n where (n - 1) % interval == 0 will be kept.\n"
+ "Example 1:\n"
+ "If one clip has 10 frames (1 to 10) and you specify <f>=4, <l>=7 and "
+ "interval=2, then you will get a clip that contains frame "
+ "1, 2, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 9 and 10.\n"
+ "Example 2:\n"
+ "If you specify f=4, l=7 and interval=-1, then you will get a clip that"
+ " contains frame 1, 2, 3, 8, 9 and 10.\n"
+ "Example 3:\n"
+ "If one clip has 10 frames (1 to 10), and you specify f=1, l=10 and "
+ " interval=-4, then you will get a clip that contains frame "
+ "1, 5 and 9.\n"
+ "No interpolation is done when up-sampling."
+ " Default: -1\n"
+ "--out_path(string): The output file to which frames are written."
+ " Default: output.yuv\n";
webrtc::test::CommandLineParser parser;
diff --git a/rtc_tools/frame_editing/frame_editing_lib.cc b/rtc_tools/frame_editing/frame_editing_lib.cc
index 5af0184..4418214 100644
--- a/rtc_tools/frame_editing/frame_editing_lib.cc
+++ b/rtc_tools/frame_editing/frame_editing_lib.cc
@@ -20,15 +20,19 @@
namespace webrtc {
-int EditFrames(const std::string& in_path, int width, int height,
- int first_frame_to_process, int interval,
- int last_frame_to_process, const std::string& out_path) {
+int EditFrames(const std::string& in_path,
+ int width,
+ int height,
+ int first_frame_to_process,
+ int interval,
+ int last_frame_to_process,
+ const std::string& out_path) {
if (last_frame_to_process < first_frame_to_process) {
fprintf(stderr, "The set of frames to cut is empty! (l < f)\n");
return -10;
}
- FILE* in_fid = fopen(in_path.c_str() , "rb");
+ FILE* in_fid = fopen(in_path.c_str(), "rb");
if (!in_fid) {
fprintf(stderr, "Could not read input file: %s.\n", in_path.c_str());
return -11;
@@ -51,8 +55,8 @@
int num_frames_read_between = 0;
size_t num_bytes_read;
- while ((num_bytes_read = fread(temp_buffer.get(), 1, frame_length, in_fid))
- == frame_length) {
+ while ((num_bytes_read = fread(temp_buffer.get(), 1, frame_length, in_fid)) ==
+ frame_length) {
num_frames_read++;
if ((num_frames_read < first_frame_to_process) ||
(last_frame_to_process < num_frames_read)) {
diff --git a/rtc_tools/frame_editing/frame_editing_lib.h b/rtc_tools/frame_editing/frame_editing_lib.h
index a805a11..91eaa3b 100644
--- a/rtc_tools/frame_editing/frame_editing_lib.h
+++ b/rtc_tools/frame_editing/frame_editing_lib.h
@@ -31,9 +31,13 @@
// 1, 2, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 9 and 10.
// No interpolation is done when up-sampling.
-int EditFrames(const std::string& in_path, int width, int height,
- int first_frame_to_process, int interval,
- int last_frame_to_process, const std::string& out_path);
+int EditFrames(const std::string& in_path,
+ int width,
+ int height,
+ int first_frame_to_process,
+ int interval,
+ int last_frame_to_process,
+ const std::string& out_path);
} // namespace webrtc
#endif // RTC_TOOLS_FRAME_EDITING_FRAME_EDITING_LIB_H_
diff --git a/rtc_tools/frame_editing/frame_editing_unittest.cc b/rtc_tools/frame_editing/frame_editing_unittest.cc
index 2cca308..e8261b5 100644
--- a/rtc_tools/frame_editing/frame_editing_unittest.cc
+++ b/rtc_tools/frame_editing/frame_editing_unittest.cc
@@ -63,14 +63,13 @@
if (!feof(ref_video_fid)) {
EXPECT_EQ(kFrameSize, num_bytes_read_);
}
- num_bytes_read_ = fread(test_buffer->get(), 1, kFrameSize,
- test_video_fid);
+ num_bytes_read_ =
+ fread(test_buffer->get(), 1, kFrameSize, test_video_fid);
if (!feof(test_video_fid)) {
EXPECT_EQ(kFrameSize, num_bytes_read_);
}
if (!feof(test_video_fid) && !feof(test_video_fid)) {
- EXPECT_EQ(0, memcmp(ref_buffer->get(), test_buffer->get(),
- kFrameSize));
+ EXPECT_EQ(0, memcmp(ref_buffer->get(), test_buffer->get(), kFrameSize));
}
}
// There should not be anything left in either stream.
@@ -91,26 +90,26 @@
const int kInterval = -1;
const int kLastFrameToProcess = 240;
- int result = EditFrames(reference_video_, kWidth, kHeight,
- kFirstFrameToProcess, kInterval, kLastFrameToProcess,
- test_video_);
+ int result =
+ EditFrames(reference_video_, kWidth, kHeight, kFirstFrameToProcess,
+ kInterval, kLastFrameToProcess, test_video_);
EXPECT_EQ(0, result);
for (int i = 1; i < kFirstFrameToProcess; ++i) {
- num_bytes_read_ = fread(original_buffer_.get(), 1, kFrameSize,
- original_fid_);
+ num_bytes_read_ =
+ fread(original_buffer_.get(), 1, kFrameSize, original_fid_);
EXPECT_EQ(kFrameSize, num_bytes_read_);
num_bytes_read_ = fread(edited_buffer_.get(), 1, kFrameSize, edited_fid_);
EXPECT_EQ(kFrameSize, num_bytes_read_);
- EXPECT_EQ(0, memcmp(original_buffer_.get(), edited_buffer_.get(),
- kFrameSize));
+ EXPECT_EQ(0,
+ memcmp(original_buffer_.get(), edited_buffer_.get(), kFrameSize));
}
// Do not compare the frames that have been cut.
for (int i = kFirstFrameToProcess; i <= kLastFrameToProcess; ++i) {
- num_bytes_read_ = fread(original_buffer_.get(), 1, kFrameSize,
- original_fid_);
+ num_bytes_read_ =
+ fread(original_buffer_.get(), 1, kFrameSize, original_fid_);
EXPECT_EQ(kFrameSize, num_bytes_read_);
}
CompareToTheEnd(edited_fid_, original_fid_, &original_buffer_,
@@ -122,9 +121,9 @@
const int kInterval = -1;
const int kLastFrameToProcess = 1;
- int result = EditFrames(reference_video_, kWidth, kHeight,
- kFirstFrameToProcess, kInterval, kLastFrameToProcess,
- test_video_);
+ int result =
+ EditFrames(reference_video_, kWidth, kHeight, kFirstFrameToProcess,
+ kInterval, kLastFrameToProcess, test_video_);
EXPECT_EQ(-10, result);
}
@@ -145,9 +144,9 @@
const int kInterval = -2;
const int kLastFrameToProcess = 10000;
// Set kLastFrameToProcess to a large value so that all frame are processed.
- int result = EditFrames(reference_video_, kWidth, kHeight,
- kFirstFrameToProcess, kInterval, kLastFrameToProcess,
- test_video_);
+ int result =
+ EditFrames(reference_video_, kWidth, kHeight, kFirstFrameToProcess,
+ kInterval, kLastFrameToProcess, test_video_);
EXPECT_EQ(0, result);
while (!feof(original_fid_) && !feof(edited_fid_)) {
@@ -162,14 +161,13 @@
// every second frame.
// num_frames_read_ - 1 because we have deleted frame number 2, 4 , 6 etc.
if ((num_frames_read_ - 1) % kInterval == -1) {
- num_bytes_read_ = fread(edited_buffer_.get(), 1, kFrameSize,
- edited_fid_);
+ num_bytes_read_ = fread(edited_buffer_.get(), 1, kFrameSize, edited_fid_);
if (!feof(edited_fid_)) {
EXPECT_EQ(kFrameSize, num_bytes_read_);
}
if (!feof(original_fid_) && !feof(edited_fid_)) {
- EXPECT_EQ(0, memcmp(original_buffer_.get(),
- edited_buffer_.get(), kFrameSize));
+ EXPECT_EQ(0, memcmp(original_buffer_.get(), edited_buffer_.get(),
+ kFrameSize));
}
}
}
@@ -180,33 +178,32 @@
const int kInterval = 2;
const int kLastFrameToProcess = 240;
- int result = EditFrames(reference_video_, kWidth, kHeight,
- kFirstFrameToProcess, kInterval, kLastFrameToProcess,
- test_video_);
+ int result =
+ EditFrames(reference_video_, kWidth, kHeight, kFirstFrameToProcess,
+ kInterval, kLastFrameToProcess, test_video_);
EXPECT_EQ(0, result);
for (int i = 1; i < kFirstFrameToProcess; ++i) {
- num_bytes_read_ = fread(original_buffer_.get(), 1, kFrameSize,
- original_fid_);
+ num_bytes_read_ =
+ fread(original_buffer_.get(), 1, kFrameSize, original_fid_);
EXPECT_EQ(kFrameSize, num_bytes_read_);
num_bytes_read_ = fread(edited_buffer_.get(), 1, kFrameSize, edited_fid_);
EXPECT_EQ(kFrameSize, num_bytes_read_);
- EXPECT_EQ(0, memcmp(original_buffer_.get(), edited_buffer_.get(),
- kFrameSize));
+ EXPECT_EQ(0,
+ memcmp(original_buffer_.get(), edited_buffer_.get(), kFrameSize));
}
// Do not compare the frames that have been repeated.
for (int i = kFirstFrameToProcess; i <= kLastFrameToProcess; ++i) {
- num_bytes_read_ = fread(original_buffer_.get(), 1, kFrameSize,
- original_fid_);
+ num_bytes_read_ =
+ fread(original_buffer_.get(), 1, kFrameSize, original_fid_);
EXPECT_EQ(kFrameSize, num_bytes_read_);
for (int i = 1; i <= kInterval; ++i) {
- num_bytes_read_ = fread(edited_buffer_.get(), 1, kFrameSize,
- edited_fid_);
+ num_bytes_read_ = fread(edited_buffer_.get(), 1, kFrameSize, edited_fid_);
EXPECT_EQ(kFrameSize, num_bytes_read_);
- EXPECT_EQ(0, memcmp(original_buffer_.get(), edited_buffer_.get(),
- kFrameSize));
+ EXPECT_EQ(
+ 0, memcmp(original_buffer_.get(), edited_buffer_.get(), kFrameSize));
}
}
CompareToTheEnd(edited_fid_, original_fid_, &original_buffer_,
diff --git a/rtc_tools/network_tester/network_tester_unittest.cc b/rtc_tools/network_tester/network_tester_unittest.cc
index b60a712..c4c9e0c 100644
--- a/rtc_tools/network_tester/network_tester_unittest.cc
+++ b/rtc_tools/network_tester/network_tester_unittest.cc
@@ -8,7 +8,6 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-
#ifdef WEBRTC_NETWORK_TESTER_TEST_ENABLED
#include "rtc_tools/network_tester/test_controller.h"
diff --git a/rtc_tools/psnr_ssim_analyzer/psnr_ssim_analyzer.cc b/rtc_tools/psnr_ssim_analyzer/psnr_ssim_analyzer.cc
index 0fefb57..dc63aca 100644
--- a/rtc_tools/psnr_ssim_analyzer/psnr_ssim_analyzer.cc
+++ b/rtc_tools/psnr_ssim_analyzer/psnr_ssim_analyzer.cc
@@ -21,8 +21,11 @@
#define MAX_NUM_FRAMES_PER_FILE INT_MAX
-void CompareFiles(const char* reference_file_name, const char* test_file_name,
- const char* results_file_name, int width, int height) {
+void CompareFiles(const char* reference_file_name,
+ const char* test_file_name,
+ const char* results_file_name,
+ int width,
+ int height) {
// Check if the reference_file_name ends with "y4m".
bool y4m_mode = false;
if (std::string(reference_file_name).find("y4m") != std::string::npos) {
@@ -39,13 +42,15 @@
bool read_result = true;
for (int frame_counter = 0; frame_counter < MAX_NUM_FRAMES_PER_FILE;
- ++frame_counter) {
- read_result &= (y4m_mode) ? webrtc::test::ExtractFrameFromY4mFile(
- reference_file_name, width, height, frame_counter, ref_frame):
- webrtc::test::ExtractFrameFromYuvFile(reference_file_name, width,
- height, frame_counter, ref_frame);
- read_result &= webrtc::test::ExtractFrameFromYuvFile(test_file_name, width,
- height, frame_counter, test_frame);
+ ++frame_counter) {
+ read_result &=
+ (y4m_mode)
+ ? webrtc::test::ExtractFrameFromY4mFile(
+ reference_file_name, width, height, frame_counter, ref_frame)
+ : webrtc::test::ExtractFrameFromYuvFile(
+ reference_file_name, width, height, frame_counter, ref_frame);
+ read_result &= webrtc::test::ExtractFrameFromYuvFile(
+ test_file_name, width, height, frame_counter, test_frame);
if (!read_result)
break;
@@ -82,9 +87,12 @@
*/
int main(int argc, char* argv[]) {
std::string program_name = argv[0];
- std::string usage = "Runs PSNR and SSIM on two I420 videos and write the"
+ std::string usage =
+ "Runs PSNR and SSIM on two I420 videos and write the"
"results in a file.\n"
- "Example usage:\n" + program_name + " --reference_file=ref.yuv "
+ "Example usage:\n" +
+ program_name +
+ " --reference_file=ref.yuv "
"--test_file=test.yuv --results_file=results.txt --width=320 "
"--height=240\n"
"Command line flags:\n"
diff --git a/rtc_tools/sanitizers_unittest.cc b/rtc_tools/sanitizers_unittest.cc
index 4cdf5fd..69c11fe 100644
--- a/rtc_tools/sanitizers_unittest.cc
+++ b/rtc_tools/sanitizers_unittest.cc
@@ -39,7 +39,7 @@
#if defined(ADDRESS_SANITIZER)
void HeapUseAfterFree() {
- char *buf = new char[2];
+ char* buf = new char[2];
delete[] buf;
buf[0] = buf[1];
}
@@ -61,8 +61,7 @@
virtual void f() {}
virtual ~Base() {}
};
-struct Derived : public Base {
-};
+struct Derived : public Base {};
void InvalidVptr() {
Base b;
@@ -71,7 +70,12 @@
}
TEST(SanitizersDeathTest, UndefinedSanitizer) {
- EXPECT_DEATH({ SignedIntegerOverflow(); InvalidVptr(); }, "runtime error");
+ EXPECT_DEATH(
+ {
+ SignedIntegerOverflow();
+ InvalidVptr();
+ },
+ "runtime error");
}
#endif
@@ -79,8 +83,7 @@
class IncrementThread : public Thread {
public:
explicit IncrementThread(int* value)
- : Thread(rtc::MakeUnique<NullSocketServer>()),
- value_(value) {}
+ : Thread(rtc::MakeUnique<NullSocketServer>()), value_(value) {}
void Run() override {
++*value_;
@@ -88,9 +91,7 @@
}
// Un-protect Thread::Join for the test.
- void Join() {
- Thread::Join();
- }
+ void Join() { Thread::Join(); }
private:
int* value_;
diff --git a/rtc_tools/simple_command_line_parser.cc b/rtc_tools/simple_command_line_parser.cc
index b20ba64..c34aa90 100644
--- a/rtc_tools/simple_command_line_parser.cc
+++ b/rtc_tools/simple_command_line_parser.cc
@@ -22,7 +22,7 @@
CommandLineParser::~CommandLineParser() {}
void CommandLineParser::Init(int argc, char** argv) {
- args_ = std::vector<std::string> (argv + 1, argv + argc);
+ args_ = std::vector<std::string>(argv + 1, argv + argc);
}
bool CommandLineParser::IsStandaloneFlag(std::string flag) {
diff --git a/rtc_tools/simple_command_line_parser_unittest.cc b/rtc_tools/simple_command_line_parser_unittest.cc
index d98868a..a94f992 100644
--- a/rtc_tools/simple_command_line_parser_unittest.cc
+++ b/rtc_tools/simple_command_line_parser_unittest.cc
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "test/gtest.h"
#include "rtc_tools/simple_command_line_parser.h"
+#include "test/gtest.h"
namespace webrtc {
namespace test {
diff --git a/rtc_tools/unpack_aecdump/unpack.cc b/rtc_tools/unpack_aecdump/unpack.cc
index 2b474bf..0224ad5b 100644
--- a/rtc_tools/unpack_aecdump/unpack.cc
+++ b/rtc_tools/unpack_aecdump/unpack.cc
@@ -30,9 +30,11 @@
// TODO(andrew): unpack more of the data.
DEFINE_string(input_file, "input", "The name of the input stream file.");
-DEFINE_string(output_file, "ref_out",
+DEFINE_string(output_file,
+ "ref_out",
"The name of the reference output stream file.");
-DEFINE_string(reverse_file, "reverse",
+DEFINE_string(reverse_file,
+ "reverse",
"The name of the reverse input stream file.");
DEFINE_string(delay_file, "delay.int32", "The name of the delay file.");
DEFINE_string(drift_file, "drift.int32", "The name of the drift file.");
@@ -42,16 +44,15 @@
"callorder",
"The name of the render/capture call order file.");
DEFINE_string(settings_file, "settings.txt", "The name of the settings file.");
-DEFINE_bool(full, false,
- "Unpack the full set of files (normally not needed).");
+DEFINE_bool(full, false, "Unpack the full set of files (normally not needed).");
DEFINE_bool(raw, false, "Write raw data instead of a WAV file.");
DEFINE_bool(text,
false,
"Write non-audio files as text files instead of binary files.");
DEFINE_bool(help, false, "Print this message.");
-#define PRINT_CONFIG(field_name) \
- if (msg.has_##field_name()) { \
+#define PRINT_CONFIG(field_name) \
+ if (msg.has_##field_name()) { \
fprintf(settings_file, " " #field_name ": %d\n", msg.field_name()); \
}
@@ -69,7 +70,9 @@
namespace {
-void WriteData(const void* data, size_t size, FILE* file,
+void WriteData(const void* data,
+ size_t size,
+ FILE* file,
const std::string& filename) {
if (fwrite(data, size, 1, file) != 1) {
printf("Error when writing to %s\n", filename.c_str());
@@ -88,11 +91,13 @@
int do_main(int argc, char* argv[]) {
std::string program_name = argv[0];
- std::string usage = "Commandline tool to unpack audioproc debug files.\n"
- "Example usage:\n" + program_name + " debug_dump.pb\n";
+ std::string usage =
+ "Commandline tool to unpack audioproc debug files.\n"
+ "Example usage:\n" +
+ program_name + " debug_dump.pb\n";
- if (rtc::FlagList::SetFlagsFromCommandLine(&argc, argv, true) ||
- FLAG_help || argc < 2) {
+ if (rtc::FlagList::SetFlagsFromCommandLine(&argc, argv, true) || FLAG_help ||
+ argc < 2) {
printf("%s", usage.c_str());
if (FLAG_help) {
rtc::FlagList::Print(nullptr, false);
@@ -133,8 +138,8 @@
const ReverseStream msg = event_msg.reverse_stream();
if (msg.has_data()) {
if (FLAG_raw && !reverse_raw_file) {
- reverse_raw_file.reset(new RawFile(std::string(FLAG_reverse_file) +
- ".pcm"));
+ reverse_raw_file.reset(
+ new RawFile(std::string(FLAG_reverse_file) + ".pcm"));
}
// TODO(aluebs): Replace "num_reverse_channels *
// reverse_samples_per_channel" with "msg.data().size() /
@@ -142,22 +147,19 @@
// it into stable: https://webrtc-codereview.appspot.com/15299004/
WriteIntData(reinterpret_cast<const int16_t*>(msg.data().data()),
num_reverse_channels * reverse_samples_per_channel,
- reverse_wav_file.get(),
- reverse_raw_file.get());
+ reverse_wav_file.get(), reverse_raw_file.get());
} else if (msg.channel_size() > 0) {
if (FLAG_raw && !reverse_raw_file) {
- reverse_raw_file.reset(new RawFile(std::string(FLAG_reverse_file) +
- ".float"));
+ reverse_raw_file.reset(
+ new RawFile(std::string(FLAG_reverse_file) + ".float"));
}
std::unique_ptr<const float* []> data(
- new const float* [num_reverse_channels]);
+ new const float*[num_reverse_channels]);
for (size_t i = 0; i < num_reverse_channels; ++i) {
data[i] = reinterpret_cast<const float*>(msg.channel(i).data());
}
- WriteFloatData(data.get(),
- reverse_samples_per_channel,
- num_reverse_channels,
- reverse_wav_file.get(),
+ WriteFloatData(data.get(), reverse_samples_per_channel,
+ num_reverse_channels, reverse_wav_file.get(),
reverse_raw_file.get());
}
if (FLAG_full) {
@@ -174,54 +176,48 @@
const Stream msg = event_msg.stream();
if (msg.has_input_data()) {
if (FLAG_raw && !input_raw_file) {
- input_raw_file.reset(new RawFile(std::string(FLAG_input_file) +
- ".pcm"));
+ input_raw_file.reset(
+ new RawFile(std::string(FLAG_input_file) + ".pcm"));
}
WriteIntData(reinterpret_cast<const int16_t*>(msg.input_data().data()),
num_input_channels * input_samples_per_channel,
- input_wav_file.get(),
- input_raw_file.get());
+ input_wav_file.get(), input_raw_file.get());
} else if (msg.input_channel_size() > 0) {
if (FLAG_raw && !input_raw_file) {
- input_raw_file.reset(new RawFile(std::string(FLAG_input_file) +
- ".float"));
+ input_raw_file.reset(
+ new RawFile(std::string(FLAG_input_file) + ".float"));
}
std::unique_ptr<const float* []> data(
- new const float* [num_input_channels]);
+ new const float*[num_input_channels]);
for (size_t i = 0; i < num_input_channels; ++i) {
data[i] = reinterpret_cast<const float*>(msg.input_channel(i).data());
}
- WriteFloatData(data.get(),
- input_samples_per_channel,
- num_input_channels,
- input_wav_file.get(),
+ WriteFloatData(data.get(), input_samples_per_channel,
+ num_input_channels, input_wav_file.get(),
input_raw_file.get());
}
if (msg.has_output_data()) {
if (FLAG_raw && !output_raw_file) {
- output_raw_file.reset(new RawFile(std::string(FLAG_output_file) +
- ".pcm"));
+ output_raw_file.reset(
+ new RawFile(std::string(FLAG_output_file) + ".pcm"));
}
WriteIntData(reinterpret_cast<const int16_t*>(msg.output_data().data()),
num_output_channels * output_samples_per_channel,
- output_wav_file.get(),
- output_raw_file.get());
+ output_wav_file.get(), output_raw_file.get());
} else if (msg.output_channel_size() > 0) {
if (FLAG_raw && !output_raw_file) {
- output_raw_file.reset(new RawFile(std::string(FLAG_output_file) +
- ".float"));
+ output_raw_file.reset(
+ new RawFile(std::string(FLAG_output_file) + ".float"));
}
std::unique_ptr<const float* []> data(
- new const float* [num_output_channels]);
+ new const float*[num_output_channels]);
for (size_t i = 0; i < num_output_channels; ++i) {
data[i] =
reinterpret_cast<const float*>(msg.output_channel(i).data());
}
- WriteFloatData(data.get(),
- output_samples_per_channel,
- num_output_channels,
- output_wav_file.get(),
+ WriteFloatData(data.get(), output_samples_per_channel,
+ num_output_channels, output_wav_file.get(),
output_raw_file.get());
}
@@ -316,8 +312,7 @@
int output_sample_rate = msg.output_sample_rate();
fprintf(settings_file, " Output sample rate: %d\n", output_sample_rate);
int reverse_sample_rate = msg.reverse_sample_rate();
- fprintf(settings_file,
- " Reverse sample rate: %d\n",
+ fprintf(settings_file, " Reverse sample rate: %d\n",
reverse_sample_rate);
num_input_channels = msg.num_input_channels();
fprintf(settings_file, " Input channels: %" PRIuS "\n",
@@ -340,8 +335,7 @@
reverse_samples_per_channel =
static_cast<size_t>(reverse_sample_rate / 100);
- input_samples_per_channel =
- static_cast<size_t>(input_sample_rate / 100);
+ input_samples_per_channel = static_cast<size_t>(input_sample_rate / 100);
output_samples_per_channel =
static_cast<size_t>(output_sample_rate / 100);
@@ -350,19 +344,16 @@
// their sample rate or number of channels.
std::stringstream reverse_name;
reverse_name << FLAG_reverse_file << frame_count << ".wav";
- reverse_wav_file.reset(new WavWriter(reverse_name.str(),
- reverse_sample_rate,
- num_reverse_channels));
+ reverse_wav_file.reset(new WavWriter(
+ reverse_name.str(), reverse_sample_rate, num_reverse_channels));
std::stringstream input_name;
input_name << FLAG_input_file << frame_count << ".wav";
- input_wav_file.reset(new WavWriter(input_name.str(),
- input_sample_rate,
+ input_wav_file.reset(new WavWriter(input_name.str(), input_sample_rate,
num_input_channels));
std::stringstream output_name;
output_name << FLAG_output_file << frame_count << ".wav";
- output_wav_file.reset(new WavWriter(output_name.str(),
- output_sample_rate,
- num_output_channels));
+ output_wav_file.reset(new WavWriter(
+ output_name.str(), output_sample_rate, num_output_channels));
std::stringstream callorder_name;
callorder_name << FLAG_callorder_file << frame_count << ".char";