Merge pull request #100 from google/json_reporter

Add JSON Reporter
This commit is contained in:
Eric 2015-03-18 10:21:25 -04:00
commit ec0c725a33
6 changed files with 288 additions and 15 deletions

View File

@ -150,6 +150,71 @@ static void BM_MultiThreaded(benchmark::State& state) {
BENCHMARK(BM_MultiThreaded)->Threads(2);
```
Output Formats
--------------
The library supports multiple output formats. Use the
`--benchmark_format=<tabular|json>` flag to set the format type. `tabular` is
the default format.
The Tabular format is intended to be a human readable
format. By default the format generates color output. Example tabular output
looks like:
```
Run on (40 X 2801 MHz CPUs)
2015/03/17-18:35:54
Build Type: DEBUG
Benchmark Time(ns) CPU(ns) Iterations
----------------------------------------------------------------------
BM_SetInsert/1024/1 28928 29349 23853 133.097kB/s 33.2742k items/s
BM_SetInsert/1024/8 32065 32913 21375 949.487kB/s 237.372k items/s
BM_SetInsert/1024/10 33157 33648 21431 1.13369MB/s 290.225k items/s
```
The JSON format outputs human readable json split into two top level attributes.
The `context` attribute contains information about the run in general, including
information about the CPU and the date.
The `benchmarks` attribute contains a list of ever benchmark run. Example json
output looks like:
```
{
"context": {
"date": "2015/03/17-18:40:25",
"num_cpus": 40,
"mhz_per_cpu": 2801,
"cpu_scaling_enabled": false,
"build_type": "debug"
},
"benchmarks": [
{
"name": "BM_SetInsert/1024/1",
"iterations": 94877,
"real_time": 29275,
"cpu_time": 29836,
"bytes_per_second": 134066,
"items_per_second": 33516
},
{
"name": "BM_SetInsert/1024/8",
"iterations": 21609,
"real_time": 32317,
"cpu_time": 32429,
"bytes_per_second": 986770,
"items_per_second": 246693
},
{
"name": "BM_SetInsert/1024/10",
"iterations": 21393,
"real_time": 32724,
"cpu_time": 33355,
"bytes_per_second": 1199226,
"items_per_second": 299807
}
]
}
```
Linking against the library
---------------------------
When using gcc, it is necessary to link against pthread to avoid runtime exceptions. This is due to how gcc implements std::thread. See [issue #67](https://github.com/google/benchmark/issues/67) for more details.

View File

@ -80,6 +80,8 @@ class BenchmarkReporter {
virtual void Finalize();
virtual ~BenchmarkReporter();
protected:
static void ComputeStats(std::vector<Run> const& reports, Run* mean, Run* stddev);
};
// Simple reporter that outputs benchmark data to the console. This is the
@ -88,11 +90,24 @@ class ConsoleReporter : public BenchmarkReporter {
public:
virtual bool ReportContext(const Context& context);
virtual void ReportRuns(const std::vector<Run>& reports);
private:
protected:
virtual void PrintRunData(const Run& report);
size_t name_field_width_;
};
class JSONReporter : public BenchmarkReporter {
public:
JSONReporter() : first_report_(true) {}
virtual bool ReportContext(const Context& context);
virtual void ReportRuns(const std::vector<Run>& reports);
virtual void Finalize();
private:
void PrintRunData(const Run& report);
bool first_report_;
};
} // end namespace benchmark
#endif // BENCHMARK_REPORTER_H_

View File

@ -3,8 +3,8 @@ include_directories(${PROJECT_SOURCE_DIR}/src)
# Define the source files
set(SOURCE_FILES "benchmark.cc" "colorprint.cc" "commandlineflags.cc" "log.cc"
"reporter.cc" "sleep.cc" "string_util.cc" "sysinfo.cc"
"walltime.cc")
"json_reporter.cc" "reporter.cc" "sleep.cc" "string_util.cc"
"sysinfo.cc" "walltime.cc")
# Determine the correct regular expression engine to use
if(HAVE_STD_REGEX)
set(RE_FILES "re_std.cc")

View File

@ -60,6 +60,10 @@ DEFINE_int32(benchmark_repetitions, 1,
"The number of runs of each benchmark. If greater than 1, the "
"mean and standard deviation of the runs will be reported.");
DEFINE_string(benchmark_format, "tabular",
"The format to use for console output. Valid values are "
"'tabular' or 'json'.");
DEFINE_bool(color_print, true, "Enables colorized logging.");
DEFINE_int32(v, 0, "The level of verbose logging to output");
@ -807,19 +811,35 @@ void RunMatchingBenchmarks(const std::string& spec,
}
}
std::unique_ptr<BenchmarkReporter> GetDefaultReporter() {
typedef std::unique_ptr<BenchmarkReporter> PtrType;
if (FLAGS_benchmark_format == "tabular") {
return PtrType(new ConsoleReporter);
}
else if (FLAGS_benchmark_format == "json") {
return PtrType(new JSONReporter);
} else {
std::cerr << "Unexpected format: '" << FLAGS_benchmark_format << "'\n";
std::exit(1);
}
}
} // end namespace internal
void RunSpecifiedBenchmarks() {
RunSpecifiedBenchmarks(nullptr);
}
void RunSpecifiedBenchmarks(BenchmarkReporter* provided_reporter) {
void RunSpecifiedBenchmarks(BenchmarkReporter* reporter) {
std::string spec = FLAGS_benchmark_filter;
if (spec.empty() || spec == "all")
spec = "."; // Regexp that matches all benchmarks
ConsoleReporter default_reporter;
BenchmarkReporter* reporter = provided_reporter ? provided_reporter
: &default_reporter;
std::unique_ptr<BenchmarkReporter> default_reporter;
if (!reporter) {
default_reporter = internal::GetDefaultReporter();
reporter = default_reporter.get();
}
internal::RunMatchingBenchmarks(spec, reporter);
reporter->Finalize();
}
@ -833,6 +853,7 @@ void PrintUsageAndExit() {
" [--benchmark_iterations=<iterations>]\n"
" [--benchmark_min_time=<min_time>]\n"
" [--benchmark_repetitions=<num_repetitions>]\n"
" [--benchmark_format=<tabular|json>]\n"
" [--color_print={true|false}]\n"
" [--v=<verbosity>]\n");
exit(0);
@ -850,6 +871,8 @@ void ParseCommandLineFlags(int* argc, const char** argv) {
&FLAGS_benchmark_min_time) ||
ParseInt32Flag(argv[i], "benchmark_repetitions",
&FLAGS_benchmark_repetitions) ||
ParseStringFlag(argv[i], "benchmark_format",
&FLAGS_benchmark_format) ||
ParseBoolFlag(argv[i], "color_print",
&FLAGS_color_print) ||
ParseInt32Flag(argv[i], "v", &FLAGS_v)) {
@ -861,6 +884,10 @@ void ParseCommandLineFlags(int* argc, const char** argv) {
PrintUsageAndExit();
}
}
if (FLAGS_benchmark_format != "tabular" &&
FLAGS_benchmark_format != "json") {
PrintUsageAndExit();
}
}
} // end namespace internal

168
src/json_reporter.cc Normal file
View File

@ -0,0 +1,168 @@
// Copyright 2015 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "benchmark/reporter.h"
#include <cstdint>
#include <iostream>
#include <string>
#include <vector>
#include "string_util.h"
#include "walltime.h"
namespace benchmark {
namespace {
std::string FormatKV(std::string const& key, std::string const& value) {
return StringPrintF("\"%s\": \"%s\"", key.c_str(), value.c_str());
}
std::string FormatKV(std::string const& key, const char* value) {
return StringPrintF("\"%s\": \"%s\"", key.c_str(), value);
}
std::string FormatKV(std::string const& key, bool value) {
return StringPrintF("\"%s\": %s", key.c_str(), value ? "true" : "false");
}
std::string FormatKV(std::string const& key, int64_t value) {
std::stringstream ss;
ss << '"' << key << "\": " << value;
return ss.str();
}
std::string FormatKV(std::string const& key, std::size_t value) {
std::stringstream ss;
ss << '"' << key << "\": " << value;
return ss.str();
}
int64_t RoundDouble(double v) {
return static_cast<int64_t>(v + 0.5);
}
} // end namespace
bool JSONReporter::ReportContext(const Context& context) {
std::ostream& out = std::cout;
out << "{\n";
std::string inner_indent(2, ' ');
// Open context block and print context information.
out << inner_indent << "\"context\": {\n";
std::string indent(4, ' ');
int remainder_us;
std::string walltime_value = walltime::Print(
walltime::Now(), "%Y/%m/%d-%H:%M:%S",
true, // use local timezone
&remainder_us);
out << indent << FormatKV("date", walltime_value) << ",\n";
out << indent
<< FormatKV("num_cpus", static_cast<int64_t>(context.num_cpus))
<< ",\n";
out << indent
<< FormatKV("mhz_per_cpu", RoundDouble(context.mhz_per_cpu))
<< ",\n";
out << indent
<< FormatKV("cpu_scaling_enabled", context.cpu_scaling_enabled)
<< ",\n";
#if defined(NDEBUG)
const char build_type[] = "release";
#else
const char build_type[] = "debug";
#endif
out << indent << FormatKV("build_type", build_type) << "\n";
// Close context block and open the list of benchmarks.
out << inner_indent << "},\n";
out << inner_indent << "\"benchmarks\": [\n";
return true;
}
void JSONReporter::ReportRuns(std::vector<Run> const& reports) {
if (reports.empty()) {
return;
}
std::string indent(4, ' ');
std::ostream& out = std::cout;
if (!first_report_) {
out << ",\n";
}
first_report_ = false;
std::vector<Run> reports_cp = reports;
if (reports.size() >= 2) {
Run mean_data;
Run stddev_data;
BenchmarkReporter::ComputeStats(reports, &mean_data, &stddev_data);
reports_cp.push_back(mean_data);
reports_cp.push_back(stddev_data);
}
for (auto it = reports_cp.begin(); it != reports_cp.end(); ++it) {
out << indent << "{\n";
PrintRunData(*it);
out << indent << '}';
auto it_cp = it;
if (++it_cp != reports_cp.end()) {
out << ',';
}
}
}
void JSONReporter::Finalize() {
// Close the list of benchmarks and the top level object.
std::cout << "\n ]\n}\n";
}
void JSONReporter::PrintRunData(Run const& run) {
double const multiplier = 1e9; // nano second multiplier
double cpu_time = run.cpu_accumulated_time * multiplier;
double real_time = run.real_accumulated_time * multiplier;
if (run.iterations != 0) {
real_time = real_time / static_cast<double>(run.iterations);
cpu_time = cpu_time / static_cast<double>(run.iterations);
}
std::string indent(6, ' ');
std::ostream& out = std::cout;
out << indent
<< FormatKV("name", run.benchmark_name)
<< ",\n";
out << indent
<< FormatKV("iterations", run.iterations)
<< ",\n";
out << indent
<< FormatKV("real_time", RoundDouble(real_time))
<< ",\n";
out << indent
<< FormatKV("cpu_time", RoundDouble(cpu_time));
if (run.bytes_per_second > 0.0) {
out << ",\n" << indent
<< FormatKV("bytes_per_second", RoundDouble(run.bytes_per_second));
}
if (run.items_per_second > 0.0) {
out << ",\n" << indent
<< FormatKV("items_per_second", RoundDouble(run.items_per_second));
}
if (!run.report_label.empty()) {
out << ",\n" << indent
<< FormatKV("label", run.report_label);
}
out << '\n';
}
} // end namespace benchmark

View File

@ -16,6 +16,7 @@
#include <cstdio>
#include <cstdlib>
#include <iostream>
#include <string>
#include <vector>
@ -26,11 +27,10 @@
#include "walltime.h"
namespace benchmark {
namespace {
void ComputeStats(const std::vector<BenchmarkReporter::Run>& reports,
BenchmarkReporter::Run* mean_data,
BenchmarkReporter::Run* stddev_data) {
void BenchmarkReporter::ComputeStats(
const std::vector<Run>& reports,
Run* mean_data, Run* stddev_data) {
CHECK(reports.size() >= 2) << "Cannot compute stats for less than 2 reports";
// Accumulators.
Stat1_d real_accumulated_time_stat;
@ -42,7 +42,7 @@ void ComputeStats(const std::vector<BenchmarkReporter::Run>& reports,
std::size_t const run_iterations = reports.front().iterations;
// Populate the accumulators.
for (BenchmarkReporter::Run const& run : reports) {
for (Run const& run : reports) {
CHECK_EQ(reports[0].benchmark_name, run.benchmark_name);
CHECK_EQ(run_iterations, run.iterations);
real_accumulated_time_stat +=
@ -83,8 +83,6 @@ void ComputeStats(const std::vector<BenchmarkReporter::Run>& reports,
stddev_data->items_per_second = items_per_second_stat.StdDev();
}
} // end namespace
void BenchmarkReporter::Finalize() {
}
@ -145,7 +143,7 @@ void ConsoleReporter::ReportRuns(const std::vector<Run>& reports) {
Run mean_data;
Run stddev_data;
ComputeStats(reports, &mean_data, &stddev_data);
BenchmarkReporter::ComputeStats(reports, &mean_data, &stddev_data);
// Output using PrintRun.
PrintRunData(mean_data);