Add --benchmark_out=<filename> and --benchmark_out_format=<format> options.

These options allow you to write the output of a benchmark to the specified
file and with the specified format. The goal of this change is to help support
tooling.
This commit is contained in:
Eric Fiselier 2016-08-02 15:12:43 -06:00
parent f68e64c60a
commit 44128d87d2
9 changed files with 149 additions and 83 deletions

View File

@ -427,10 +427,10 @@ static void BM_test(benchmark::State& state) {
## Output Formats
The library supports multiple output formats. Use the
`--benchmark_format=<tabular|json|csv>` flag to set the format type. `tabular` is
the default format.
`--benchmark_format=<console|json|csv>` flag to set the format type. `console`
is the default format.
The Tabular format is intended to be a human readable format. By default
The Console format is intended to be a human readable format. By default
the format generates color output. Context is output on stderr and the
tabular data on stdout. Example tabular output looks like:
```
@ -493,6 +493,12 @@ name,iterations,real_time,cpu_time,bytes_per_second,items_per_second,label
"BM_SetInsert/1024/10",106365,17238.4,8421.53,4.74973e+06,1.18743e+06,
```
## Output Files
The library supports writing the output of the benchmark to a file specified
by `--benchmark_out=<filename>`. The format of the output can be specified
using `--benchmark_out_format={json|console|csv}`. Specifying
`--benchmark_out` does not suppress the console output.
## Debug vs Release
By default, benchmark builds as a debug library. You will see a warning in the output when this is the case. To build it as a release library instead, use:

View File

@ -165,11 +165,16 @@ void Initialize(int* argc, char** argv);
// of each matching benchmark. Otherwise run each matching benchmark and
// report the results.
//
// The second overload reports the results using the specified 'reporter'.
// The second and third overload use the specified 'console_reporter' and
// 'file_reporter' respectively. 'file_reporter' will write to the file specified
// by '--benchmark_output'. If '--benchmark_output' is not given the
// 'file_reporter' is ignored.
//
// RETURNS: The number of matching benchmarks.
size_t RunSpecifiedBenchmarks();
size_t RunSpecifiedBenchmarks(BenchmarkReporter* reporter);
size_t RunSpecifiedBenchmarks(BenchmarkReporter* console_reporter);
size_t RunSpecifiedBenchmarks(BenchmarkReporter* console_reporter,
BenchmarkReporter* file_reporter);
// If this routine is called, peak memory allocation past this point in the

View File

@ -156,14 +156,23 @@ class BenchmarkReporter {
// Simple reporter that outputs benchmark data to the console. This is the
// default reporter used by RunSpecifiedBenchmarks().
class ConsoleReporter : public BenchmarkReporter {
public:
public:
enum OutputOptions {
OO_None,
OO_Color
};
explicit ConsoleReporter(OutputOptions color_output = OO_Color)
: color_output_(color_output == OO_Color) {}
virtual bool ReportContext(const Context& context);
virtual void ReportRuns(const std::vector<Run>& reports);
protected:
protected:
virtual void PrintRunData(const Run& report);
size_t name_field_width_;
private:
bool color_output_;
};
class JSONReporter : public BenchmarkReporter {

View File

@ -28,6 +28,7 @@
#include <atomic>
#include <condition_variable>
#include <iostream>
#include <fstream>
#include <memory>
#include <thread>
@ -69,6 +70,12 @@ DEFINE_string(benchmark_format, "console",
"The format to use for console output. Valid values are "
"'console', 'json', or 'csv'.");
DEFINE_string(benchmark_out_format, "json",
"The format to use for file output. Valid values are "
"'console', 'json', or 'csv'.");
DEFINE_string(benchmark_out, "", "The file to write additonal output to");
DEFINE_bool(color_print, true, "Enables colorized logging.");
DEFINE_int32(v, 0, "The level of verbose logging to output");
@ -758,14 +765,13 @@ void RunInThread(const benchmark::internal::Benchmark::Instance* b,
timer_manager->Finalize();
}
void RunBenchmark(const benchmark::internal::Benchmark::Instance& b,
BenchmarkReporter* br,
std::vector<BenchmarkReporter::Run>& complexity_reports)
std::vector<BenchmarkReporter::Run>
RunBenchmark(const benchmark::internal::Benchmark::Instance& b,
std::vector<BenchmarkReporter::Run>* complexity_reports)
EXCLUDES(GetBenchmarkLock()) {
std::vector<BenchmarkReporter::Run> reports; // return value
size_t iters = 1;
std::vector<BenchmarkReporter::Run> reports;
std::vector<std::thread> pool;
if (b.multithreaded)
pool.resize(b.threads);
@ -872,7 +878,7 @@ void RunBenchmark(const benchmark::internal::Benchmark::Instance& b,
report.complexity = b.complexity;
report.complexity_lambda = b.complexity_lambda;
if(report.complexity != oNone)
complexity_reports.push_back(report);
complexity_reports->push_back(report);
}
reports.push_back(report);
@ -903,18 +909,18 @@ void RunBenchmark(const benchmark::internal::Benchmark::Instance& b,
additional_run_stats.end());
if((b.complexity != oNone) && b.last_benchmark_instance) {
additional_run_stats = ComputeBigO(complexity_reports);
additional_run_stats = ComputeBigO(*complexity_reports);
reports.insert(reports.end(), additional_run_stats.begin(),
additional_run_stats.end());
complexity_reports.clear();
complexity_reports->clear();
}
br->ReportRuns(reports);
if (b.multithreaded) {
for (std::thread& thread : pool)
thread.join();
}
return reports;
}
} // namespace
@ -975,8 +981,10 @@ namespace internal {
namespace {
void RunMatchingBenchmarks(const std::vector<Benchmark::Instance>& benchmarks,
BenchmarkReporter* reporter) {
CHECK(reporter != nullptr);
BenchmarkReporter* console_reporter,
BenchmarkReporter* file_reporter) {
// Note the file_reporter can be null.
CHECK(console_reporter != nullptr);
// Determine the width of the name field using a minimum width of 10.
bool has_repetitions = FLAGS_benchmark_repetitions > 1;
@ -1000,23 +1008,30 @@ void RunMatchingBenchmarks(const std::vector<Benchmark::Instance>& benchmarks,
// Keep track of runing times of all instances of current benchmark
std::vector<BenchmarkReporter::Run> complexity_reports;
if (reporter->ReportContext(context)) {
if (console_reporter->ReportContext(context)
&& (!file_reporter || file_reporter->ReportContext(context))) {
for (const auto& benchmark : benchmarks) {
RunBenchmark(benchmark, reporter, complexity_reports);
std::vector<BenchmarkReporter::Run> reports =
RunBenchmark(benchmark, &complexity_reports);
console_reporter->ReportRuns(reports);
if (file_reporter) file_reporter->ReportRuns(reports);
}
}
console_reporter->Finalize();
if (file_reporter) file_reporter->Finalize();
}
std::unique_ptr<BenchmarkReporter> GetDefaultReporter() {
std::unique_ptr<BenchmarkReporter>
CreateReporter(std::string const& name, ConsoleReporter::OutputOptions allow_color) {
typedef std::unique_ptr<BenchmarkReporter> PtrType;
if (FLAGS_benchmark_format == "console") {
return PtrType(new ConsoleReporter);
} else if (FLAGS_benchmark_format == "json") {
if (name == "console") {
return PtrType(new ConsoleReporter(allow_color));
} else if (name == "json") {
return PtrType(new JSONReporter);
} else if (FLAGS_benchmark_format == "csv") {
} else if (name == "csv") {
return PtrType(new CSVReporter);
} else {
std::cerr << "Unexpected format: '" << FLAGS_benchmark_format << "'\n";
std::cerr << "Unexpected format: '" << name << "'\n";
std::exit(1);
}
}
@ -1025,10 +1040,17 @@ std::unique_ptr<BenchmarkReporter> GetDefaultReporter() {
} // end namespace internal
size_t RunSpecifiedBenchmarks() {
return RunSpecifiedBenchmarks(nullptr);
return RunSpecifiedBenchmarks(nullptr, nullptr);
}
size_t RunSpecifiedBenchmarks(BenchmarkReporter* reporter) {
size_t RunSpecifiedBenchmarks(BenchmarkReporter* console_reporter) {
return RunSpecifiedBenchmarks(console_reporter, nullptr);
}
size_t RunSpecifiedBenchmarks(BenchmarkReporter* console_reporter,
BenchmarkReporter* file_reporter) {
std::string spec = FLAGS_benchmark_filter;
if (spec.empty() || spec == "all")
spec = "."; // Regexp that matches all benchmarks
@ -1041,13 +1063,38 @@ size_t RunSpecifiedBenchmarks(BenchmarkReporter* reporter) {
for (auto const& benchmark : benchmarks)
std::cout << benchmark.name << "\n";
} else {
std::unique_ptr<BenchmarkReporter> default_reporter;
if (!reporter) {
default_reporter = internal::GetDefaultReporter();
reporter = default_reporter.get();
// Setup the reporters
std::ofstream output_file;
std::unique_ptr<BenchmarkReporter> default_console_reporter;
std::unique_ptr<BenchmarkReporter> default_file_reporter;
if (!console_reporter) {
auto output_opts = FLAGS_color_print ? ConsoleReporter::OO_Color
: ConsoleReporter::OO_None;
default_console_reporter = internal::CreateReporter(
FLAGS_benchmark_format, output_opts);
console_reporter = default_console_reporter.get();
}
internal::RunMatchingBenchmarks(benchmarks, reporter);
reporter->Finalize();
std::string const& fname = FLAGS_benchmark_out;
if (fname == "" && file_reporter) {
std::cerr << "A custom file reporter was provided but "
"--benchmark_out=<file> was not specified." << std::endl;
std::exit(1);
}
if (fname != "") {
output_file.open(fname);
if (!output_file.is_open()) {
std::cerr << "invalid file name: '" << fname << std::endl;
std::exit(1);
}
if (!file_reporter) {
default_file_reporter = internal::CreateReporter(
FLAGS_benchmark_out_format, ConsoleReporter::OO_None);
file_reporter = default_file_reporter.get();
}
file_reporter->SetOutputStream(&output_file);
file_reporter->SetErrorStream(&output_file);
}
internal::RunMatchingBenchmarks(benchmarks, console_reporter, file_reporter);
}
return benchmarks.size();
}
@ -1062,6 +1109,8 @@ void PrintUsageAndExit() {
" [--benchmark_min_time=<min_time>]\n"
" [--benchmark_repetitions=<num_repetitions>]\n"
" [--benchmark_format=<console|json|csv>]\n"
" [--benchmark_out=<filename>]\n"
" [--benchmark_out_format=<json|console|csv>]\n"
" [--color_print={true|false}]\n"
" [--v=<verbosity>]\n");
exit(0);
@ -1081,6 +1130,10 @@ void ParseCommandLineFlags(int* argc, char** argv) {
&FLAGS_benchmark_repetitions) ||
ParseStringFlag(argv[i], "benchmark_format",
&FLAGS_benchmark_format) ||
ParseStringFlag(argv[i], "benchmark_out",
&FLAGS_benchmark_out) ||
ParseStringFlag(argv[i], "benchmark_out_format",
&FLAGS_benchmark_out_format) ||
ParseBoolFlag(argv[i], "color_print",
&FLAGS_color_print) ||
ParseInt32Flag(argv[i], "v", &FLAGS_v)) {
@ -1092,10 +1145,9 @@ void ParseCommandLineFlags(int* argc, char** argv) {
PrintUsageAndExit();
}
}
if (FLAGS_benchmark_format != "console" &&
FLAGS_benchmark_format != "json" &&
FLAGS_benchmark_format != "csv") {
for (auto const* flag : {&FLAGS_benchmark_format,
&FLAGS_benchmark_out_format})
if (*flag != "console" && *flag != "json" && *flag != "csv") {
PrintUsageAndExit();
}
}

View File

@ -20,7 +20,6 @@
#include <string>
#include <memory>
#include "commandlineflags.h"
#include "check.h"
#include "internal_macros.h"
@ -28,8 +27,6 @@
#include <Windows.h>
#endif
DECLARE_bool(color_print);
namespace benchmark {
namespace {
#ifdef BENCHMARK_OS_WINDOWS
@ -120,14 +117,14 @@ std::string FormatString(const char *msg, ...) {
void ColorPrintf(std::ostream& out, LogColor color, const char* fmt, ...) {
va_list args;
va_start(args, fmt);
ColorPrintf(out, color, fmt, args);
va_end(args);
}
if (!FLAGS_color_print) {
out << FormatString(fmt, args);
va_end(args);
return;
}
void ColorPrintf(std::ostream& out, LogColor color, const char* fmt, va_list args) {
#ifdef BENCHMARK_OS_WINDOWS
((void)out); // suppress unused warning
const HANDLE stdout_handle = GetStdHandle(STD_OUTPUT_HANDLE);
// Gets the current text color.
@ -152,7 +149,6 @@ void ColorPrintf(std::ostream& out, LogColor color, const char* fmt, ...) {
out << FormatString(fmt, args) << "\033[m";
#endif
va_end(args);
}
} // end namespace benchmark

View File

@ -20,6 +20,7 @@ enum LogColor {
std::string FormatString(const char* msg, va_list args);
std::string FormatString(const char* msg, ...);
void ColorPrintf(std::ostream& out, LogColor color, const char* fmt, va_list args);
void ColorPrintf(std::ostream& out, LogColor color, const char* fmt, ...);
} // end namespace benchmark

View File

@ -30,8 +30,6 @@
#include "string_util.h"
#include "walltime.h"
DECLARE_bool(color_print);
namespace benchmark {
bool ConsoleReporter::ReportContext(const Context& context) {
@ -40,10 +38,10 @@ bool ConsoleReporter::ReportContext(const Context& context) {
PrintBasicContext(&GetErrorStream(), context);
#ifdef BENCHMARK_OS_WINDOWS
if (FLAGS_color_print && &std::cout != &GetOutputStream()) {
if (color_output_ && &std::cout != &GetOutputStream()) {
GetErrorStream() << "Color printing is only supported for stdout on windows."
" Disabling color printing\n";
FLAGS_color_print = false;
color_output_ = false;
}
#endif
std::string str = FormatString("%-*s %13s %13s %10s\n",
@ -59,18 +57,29 @@ void ConsoleReporter::ReportRuns(const std::vector<Run>& reports) {
PrintRunData(run);
}
void ConsoleReporter::PrintRunData(const Run& result) {
auto& Out = GetOutputStream();
static void IgnoreColorPrint(std::ostream& out, LogColor,
const char* fmt, ...)
{
va_list args;
va_start(args, fmt);
out << FormatString(fmt, args);
va_end(args);
}
void ConsoleReporter::PrintRunData(const Run& result) {
typedef void(PrinterFn)(std::ostream&, LogColor, const char*, ...);
auto& Out = GetOutputStream();
PrinterFn* printer = color_output_ ? (PrinterFn*)ColorPrintf
: IgnoreColorPrint;
auto name_color =
(result.report_big_o || result.report_rms) ? COLOR_BLUE : COLOR_GREEN;
ColorPrintf(Out, name_color, "%-*s ", name_field_width_,
printer(Out, name_color, "%-*s ", name_field_width_,
result.benchmark_name.c_str());
if (result.error_occurred) {
ColorPrintf(Out, COLOR_RED, "ERROR OCCURRED: \'%s\'",
printer(Out, COLOR_RED, "ERROR OCCURRED: \'%s\'",
result.error_message.c_str());
ColorPrintf(Out, COLOR_DEFAULT, "\n");
printer(Out, COLOR_DEFAULT, "\n");
return;
}
// Format bytes per second
@ -91,34 +100,34 @@ void ConsoleReporter::PrintRunData(const Run& result) {
if (result.report_big_o) {
std::string big_o = GetBigOString(result.complexity);
ColorPrintf(Out, COLOR_YELLOW, "%10.2f %s %10.2f %s ", real_time,
printer(Out, COLOR_YELLOW, "%10.2f %s %10.2f %s ", real_time,
big_o.c_str(), cpu_time, big_o.c_str());
} else if (result.report_rms) {
ColorPrintf(Out, COLOR_YELLOW, "%10.0f %% %10.0f %% ", real_time * 100,
printer(Out, COLOR_YELLOW, "%10.0f %% %10.0f %% ", real_time * 100,
cpu_time * 100);
} else {
const char* timeLabel = GetTimeUnitString(result.time_unit);
ColorPrintf(Out, COLOR_YELLOW, "%10.0f %s %10.0f %s ", real_time, timeLabel,
printer(Out, COLOR_YELLOW, "%10.0f %s %10.0f %s ", real_time, timeLabel,
cpu_time, timeLabel);
}
if (!result.report_big_o && !result.report_rms) {
ColorPrintf(Out, COLOR_CYAN, "%10lld", result.iterations);
printer(Out, COLOR_CYAN, "%10lld", result.iterations);
}
if (!rate.empty()) {
ColorPrintf(Out, COLOR_DEFAULT, " %*s", 13, rate.c_str());
printer(Out, COLOR_DEFAULT, " %*s", 13, rate.c_str());
}
if (!items.empty()) {
ColorPrintf(Out, COLOR_DEFAULT, " %*s", 18, items.c_str());
printer(Out, COLOR_DEFAULT, " %*s", 18, items.c_str());
}
if (!result.report_label.empty()) {
ColorPrintf(Out, COLOR_DEFAULT, " %s", result.report_label.c_str());
printer(Out, COLOR_DEFAULT, " %s", result.report_label.c_str());
}
ColorPrintf(Out, COLOR_DEFAULT, "\n");
printer(Out, COLOR_DEFAULT, "\n");
}
} // end namespace benchmark

View File

@ -257,14 +257,8 @@ ADD_COMPLEXITY_CASES(&ConsoleOutputTests, &JSONOutputTests, &CSVOutputTests,
int main(int argc, char* argv[]) {
// Add --color_print=false to argv since we don't want to match color codes.
char new_arg[64];
char* new_argv[64];
std::copy(argv, argv + argc, new_argv);
new_argv[argc++] = std::strcpy(new_arg, "--color_print=false");
benchmark::Initialize(&argc, new_argv);
benchmark::ConsoleReporter CR;
benchmark::Initialize(&argc, argv);
benchmark::ConsoleReporter CR(benchmark::ConsoleReporter::OO_None);
benchmark::JSONReporter JR;
benchmark::CSVReporter CSVR;
struct ReporterTest {

View File

@ -203,14 +203,8 @@ ADD_CASES(&ConsoleOutputTests, {
int main(int argc, char* argv[]) {
// Add --color_print=false to argv since we don't want to match color codes.
char new_arg[64];
char* new_argv[64];
std::copy(argv, argv + argc, new_argv);
new_argv[argc++] = std::strcpy(new_arg, "--color_print=false");
benchmark::Initialize(&argc, new_argv);
benchmark::ConsoleReporter CR;
benchmark::Initialize(&argc, argv);
benchmark::ConsoleReporter CR(benchmark::ConsoleReporter::OO_None);
benchmark::JSONReporter JR;
benchmark::CSVReporter CSVR;
struct ReporterTest {