diff --git a/include/benchmark/reporter.h b/include/benchmark/reporter.h index 6aebf0c0..bb4ccdfe 100644 --- a/include/benchmark/reporter.h +++ b/include/benchmark/reporter.h @@ -109,5 +109,14 @@ private: bool first_report_; }; +class CSVReporter : public BenchmarkReporter { +public: + virtual bool ReportContext(const Context& context); + virtual void ReportRuns(const std::vector& reports); + +private: + void PrintRunData(const Run& report); +}; + } // end namespace benchmark #endif // BENCHMARK_REPORTER_H_ diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 17fb42ad..40cd9ff4 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -2,8 +2,9 @@ include_directories(${PROJECT_SOURCE_DIR}/src) # Define the source files -set(SOURCE_FILES "benchmark.cc" "colorprint.cc" "commandlineflags.cc" "log.cc" - "json_reporter.cc" "reporter.cc" "sleep.cc" "string_util.cc" +set(SOURCE_FILES "benchmark.cc" "colorprint.cc" "commandlineflags.cc" + "console_reporter.cc" "csv_reporter.cc" "json_reporter.cc" + "log.cc" "reporter.cc" "sleep.cc" "string_util.cc" "sysinfo.cc" "walltime.cc") # Determine the correct regular expression engine to use if(HAVE_STD_REGEX) diff --git a/src/benchmark.cc b/src/benchmark.cc index 01ef5595..507fe510 100644 --- a/src/benchmark.cc +++ b/src/benchmark.cc @@ -58,7 +58,7 @@ DEFINE_int32(benchmark_repetitions, 1, DEFINE_string(benchmark_format, "tabular", "The format to use for console output. Valid values are " - "'tabular' or 'json'."); + "'tabular', 'json', or 'csv'."); DEFINE_bool(color_print, true, "Enables colorized logging."); @@ -804,9 +804,10 @@ std::unique_ptr GetDefaultReporter() { typedef std::unique_ptr PtrType; if (FLAGS_benchmark_format == "tabular") { return PtrType(new ConsoleReporter); - } - else if (FLAGS_benchmark_format == "json") { + } else if (FLAGS_benchmark_format == "json") { return PtrType(new JSONReporter); + } else if (FLAGS_benchmark_format == "csv") { + return PtrType(new CSVReporter); } else { std::cerr << "Unexpected format: '" << FLAGS_benchmark_format << "'\n"; std::exit(1); @@ -871,7 +872,8 @@ void ParseCommandLineFlags(int* argc, const char** argv) { } } if (FLAGS_benchmark_format != "tabular" && - FLAGS_benchmark_format != "json") { + FLAGS_benchmark_format != "json" && + FLAGS_benchmark_format != "csv") { PrintUsageAndExit(); } } diff --git a/src/console_reporter.cc b/src/console_reporter.cc new file mode 100644 index 00000000..7a99dfb4 --- /dev/null +++ b/src/console_reporter.cc @@ -0,0 +1,118 @@ +// Copyright 2015 Google Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "benchmark/reporter.h" + +#include +#include +#include +#include + +#include "check.h" +#include "colorprint.h" +#include "string_util.h" +#include "walltime.h" + +namespace benchmark { + +bool ConsoleReporter::ReportContext(const Context& context) { + name_field_width_ = context.name_field_width; + + std::cerr << "Run on (" << context.num_cpus << " X " << context.mhz_per_cpu + << " MHz CPU " << ((context.num_cpus > 1) ? "s" : "") << "\n"; + + std::cerr << LocalDateTimeString() << "\n"; + + if (context.cpu_scaling_enabled) { + std::cerr << "***WARNING*** CPU scaling is enabled, the benchmark " + "real time measurements may be noisy and will incure extra " + "overhead.\n"; + } + +#ifndef NDEBUG + std::cerr << "Build Type: DEBUG\n"; +#endif + + int output_width = + fprintf(stdout, + "%-*s %10s %10s %10s\n", + static_cast(name_field_width_), + "Benchmark", + "Time(ns)", "CPU(ns)", + "Iterations"); + std::cout << std::string(output_width - 1, '-') << "\n"; + + return true; +} + +void ConsoleReporter::ReportRuns(const std::vector& reports) { + if (reports.empty()) { + return; + } + + for (Run const& run : reports) { + CHECK_EQ(reports[0].benchmark_name, run.benchmark_name); + PrintRunData(run); + } + + if (reports.size() < 2) { + // We don't report aggregated data if there was a single run. + return; + } + + Run mean_data; + Run stddev_data; + BenchmarkReporter::ComputeStats(reports, &mean_data, &stddev_data); + + // Output using PrintRun. + PrintRunData(mean_data); + PrintRunData(stddev_data); +} + +void ConsoleReporter::PrintRunData(const Run& result) { + // Format bytes per second + std::string rate; + if (result.bytes_per_second > 0) { + rate = StrCat(" ", HumanReadableNumber(result.bytes_per_second), "B/s"); + } + + // Format items per second + std::string items; + if (result.items_per_second > 0) { + items = StrCat(" ", HumanReadableNumber(result.items_per_second), + " items/s"); + } + + double const multiplier = 1e9; // nano second multiplier + ColorPrintf(COLOR_GREEN, "%-*s ", + name_field_width_, result.benchmark_name.c_str()); + if (result.iterations == 0) { + ColorPrintf(COLOR_YELLOW, "%10.0f %10.0f ", + result.real_accumulated_time * multiplier, + result.cpu_accumulated_time * multiplier); + } else { + ColorPrintf(COLOR_YELLOW, "%10.0f %10.0f ", + (result.real_accumulated_time * multiplier) / + (static_cast(result.iterations)), + (result.cpu_accumulated_time * multiplier) / + (static_cast(result.iterations))); + } + ColorPrintf(COLOR_CYAN, "%10lld", result.iterations); + ColorPrintf(COLOR_DEFAULT, "%*s %*s %s\n", + 13, rate.c_str(), + 18, items.c_str(), + result.report_label.c_str()); +} + +} // end namespace benchmark diff --git a/src/csv_reporter.cc b/src/csv_reporter.cc new file mode 100644 index 00000000..ed0f4fa0 --- /dev/null +++ b/src/csv_reporter.cc @@ -0,0 +1,93 @@ +// Copyright 2015 Google Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "benchmark/reporter.h" + +#include +#include +#include +#include + +#include "string_util.h" +#include "walltime.h" + +namespace benchmark { + +bool CSVReporter::ReportContext(const Context& context) { + std::cerr << "Run on (" << context.num_cpus << " X " << context.mhz_per_cpu + << " MHz CPU " << ((context.num_cpus > 1) ? "s" : "") << "\n"; + + std::cerr << LocalDateTimeString() << "\n"; + + if (context.cpu_scaling_enabled) { + std::cerr << "***WARNING*** CPU scaling is enabled, the benchmark " + "real time measurements may be noisy and will incure extra " + "overhead.\n"; + } + +#ifndef NDEBUG + std::cerr << "Build Type: DEBUG\n"; +#endif + std::cout << "name,iterations,real_time,cpu_time,bytes_per_second," + "items_per_second,label\n"; + return true; +} + +void CSVReporter::ReportRuns(std::vector const& reports) { + if (reports.empty()) { + return; + } + + std::vector reports_cp = reports; + if (reports.size() >= 2) { + Run mean_data; + Run stddev_data; + BenchmarkReporter::ComputeStats(reports, &mean_data, &stddev_data); + reports_cp.push_back(mean_data); + reports_cp.push_back(stddev_data); + } + for (auto it = reports_cp.begin(); it != reports_cp.end(); ++it) { + PrintRunData(*it); + } +} + +void CSVReporter::PrintRunData(Run const& run) { + double const multiplier = 1e9; // nano second multiplier + double cpu_time = run.cpu_accumulated_time * multiplier; + double real_time = run.real_accumulated_time * multiplier; + if (run.iterations != 0) { + real_time = real_time / static_cast(run.iterations); + cpu_time = cpu_time / static_cast(run.iterations); + } + + std::cout << run.benchmark_name << ","; + std::cout << run.iterations << ","; + std::cout << real_time << ","; + std::cout << cpu_time << ","; + + if (run.bytes_per_second > 0.0) { + std::cout << run.bytes_per_second; + } + std::cout << ","; + if (run.items_per_second > 0.0) { + std::cout << run.items_per_second; + } + std::cout << ","; + if (!run.report_label.empty()) { + std::cout << run.report_label; + } + std::cout << '\n'; +} + +} // end namespace benchmark diff --git a/src/reporter.cc b/src/reporter.cc index d701ca8c..5d6e7226 100644 --- a/src/reporter.cc +++ b/src/reporter.cc @@ -14,17 +14,11 @@ #include "benchmark/reporter.h" -#include #include -#include -#include #include #include "check.h" -#include "colorprint.h" #include "stat.h" -#include "string_util.h" -#include "walltime.h" namespace benchmark { @@ -89,98 +83,4 @@ void BenchmarkReporter::Finalize() { BenchmarkReporter::~BenchmarkReporter() { } -bool ConsoleReporter::ReportContext(const Context& context) { - name_field_width_ = context.name_field_width; - - fprintf(stdout, - "Run on (%d X %0.0f MHz CPU%s)\n", - context.num_cpus, - context.mhz_per_cpu, - (context.num_cpus > 1) ? "s" : ""); - - std::string walltime_str = LocalDateTimeString(); - fprintf(stdout, "%s\n", walltime_str.c_str()); - - if (context.cpu_scaling_enabled) { - fprintf(stdout, "***WARNING*** CPU scaling is enabled, the benchmark " - "real time measurements may be noisy and will incure extra " - "overhead.\n"); - } - -#ifndef NDEBUG - fprintf(stdout, "Build Type: DEBUG\n"); -#endif - - int output_width = - fprintf(stdout, - "%-*s %10s %10s %10s\n", - static_cast(name_field_width_), - "Benchmark", - "Time(ns)", "CPU(ns)", - "Iterations"); - fprintf(stdout, "%s\n", std::string(output_width - 1, '-').c_str()); - - return true; -} - -void ConsoleReporter::ReportRuns(const std::vector& reports) { - if (reports.empty()) { - return; - } - - for (Run const& run : reports) { - CHECK_EQ(reports[0].benchmark_name, run.benchmark_name); - PrintRunData(run); - } - - if (reports.size() < 2) { - // We don't report aggregated data if there was a single run. - return; - } - - Run mean_data; - Run stddev_data; - BenchmarkReporter::ComputeStats(reports, &mean_data, &stddev_data); - - // Output using PrintRun. - PrintRunData(mean_data); - PrintRunData(stddev_data); - fprintf(stdout, "\n"); -} - -void ConsoleReporter::PrintRunData(const Run& result) { - // Format bytes per second - std::string rate; - if (result.bytes_per_second > 0) { - rate = StrCat(" ", HumanReadableNumber(result.bytes_per_second), "B/s"); - } - - // Format items per second - std::string items; - if (result.items_per_second > 0) { - items = StrCat(" ", HumanReadableNumber(result.items_per_second), - " items/s"); - } - - double const multiplier = 1e9; // nano second multiplier - ColorPrintf(COLOR_GREEN, "%-*s ", - name_field_width_, result.benchmark_name.c_str()); - if (result.iterations == 0) { - ColorPrintf(COLOR_YELLOW, "%10.0f %10.0f ", - result.real_accumulated_time * multiplier, - result.cpu_accumulated_time * multiplier); - } else { - ColorPrintf(COLOR_YELLOW, "%10.0f %10.0f ", - (result.real_accumulated_time * multiplier) / - (static_cast(result.iterations)), - (result.cpu_accumulated_time * multiplier) / - (static_cast(result.iterations))); - } - ColorPrintf(COLOR_CYAN, "%10lld", result.iterations); - ColorPrintf(COLOR_DEFAULT, "%*s %*s %s\n", - 13, rate.c_str(), - 18, items.c_str(), - result.report_label.c_str()); -} - } // end namespace benchmark