2015-03-18 01:46:16 +08:00
|
|
|
// Copyright 2015 Google Inc. All rights reserved.
|
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
|
|
|
#include "benchmark/reporter.h"
|
2016-05-27 03:16:40 +08:00
|
|
|
#include "complexity.h"
|
2015-03-18 01:46:16 +08:00
|
|
|
|
|
|
|
#include <cstdlib>
|
2016-05-28 03:34:37 +08:00
|
|
|
|
|
|
|
#include <iostream>
|
2015-03-18 01:46:16 +08:00
|
|
|
#include <vector>
|
2016-05-21 14:55:43 +08:00
|
|
|
#include <tuple>
|
2015-03-18 01:46:16 +08:00
|
|
|
|
|
|
|
#include "check.h"
|
|
|
|
#include "stat.h"
|
|
|
|
|
|
|
|
namespace benchmark {
|
|
|
|
|
2016-05-28 03:34:37 +08:00
|
|
|
BenchmarkReporter::BenchmarkReporter()
|
|
|
|
: output_stream_(&std::cout), error_stream_(&std::cerr)
|
|
|
|
{
|
|
|
|
}
|
|
|
|
|
2015-03-18 12:23:43 +08:00
|
|
|
void BenchmarkReporter::ComputeStats(
|
|
|
|
const std::vector<Run>& reports,
|
|
|
|
Run* mean_data, Run* stddev_data) {
|
2015-03-18 01:46:16 +08:00
|
|
|
CHECK(reports.size() >= 2) << "Cannot compute stats for less than 2 reports";
|
|
|
|
// Accumulators.
|
|
|
|
Stat1_d real_accumulated_time_stat;
|
|
|
|
Stat1_d cpu_accumulated_time_stat;
|
|
|
|
Stat1_d bytes_per_second_stat;
|
|
|
|
Stat1_d items_per_second_stat;
|
|
|
|
// All repetitions should be run with the same number of iterations so we
|
|
|
|
// can take this information from the first benchmark.
|
2015-04-14 03:03:27 +08:00
|
|
|
int64_t const run_iterations = reports.front().iterations;
|
2015-03-18 01:46:16 +08:00
|
|
|
|
|
|
|
// Populate the accumulators.
|
2015-03-18 12:23:43 +08:00
|
|
|
for (Run const& run : reports) {
|
2015-03-18 01:46:16 +08:00
|
|
|
CHECK_EQ(reports[0].benchmark_name, run.benchmark_name);
|
|
|
|
CHECK_EQ(run_iterations, run.iterations);
|
2016-05-24 09:24:56 +08:00
|
|
|
if (run.error_occurred)
|
|
|
|
continue;
|
2015-03-18 01:46:16 +08:00
|
|
|
real_accumulated_time_stat +=
|
|
|
|
Stat1_d(run.real_accumulated_time/run.iterations, run.iterations);
|
|
|
|
cpu_accumulated_time_stat +=
|
|
|
|
Stat1_d(run.cpu_accumulated_time/run.iterations, run.iterations);
|
|
|
|
items_per_second_stat += Stat1_d(run.items_per_second, run.iterations);
|
|
|
|
bytes_per_second_stat += Stat1_d(run.bytes_per_second, run.iterations);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Get the data from the accumulator to BenchmarkReporter::Run's.
|
|
|
|
mean_data->benchmark_name = reports[0].benchmark_name + "_mean";
|
|
|
|
mean_data->iterations = run_iterations;
|
|
|
|
mean_data->real_accumulated_time = real_accumulated_time_stat.Mean() *
|
|
|
|
run_iterations;
|
|
|
|
mean_data->cpu_accumulated_time = cpu_accumulated_time_stat.Mean() *
|
|
|
|
run_iterations;
|
|
|
|
mean_data->bytes_per_second = bytes_per_second_stat.Mean();
|
|
|
|
mean_data->items_per_second = items_per_second_stat.Mean();
|
|
|
|
|
|
|
|
// Only add label to mean/stddev if it is same for all runs
|
|
|
|
mean_data->report_label = reports[0].report_label;
|
|
|
|
for (std::size_t i = 1; i < reports.size(); i++) {
|
|
|
|
if (reports[i].report_label != reports[0].report_label) {
|
|
|
|
mean_data->report_label = "";
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
stddev_data->benchmark_name = reports[0].benchmark_name + "_stddev";
|
|
|
|
stddev_data->report_label = mean_data->report_label;
|
|
|
|
stddev_data->iterations = 0;
|
|
|
|
stddev_data->real_accumulated_time =
|
|
|
|
real_accumulated_time_stat.StdDev();
|
|
|
|
stddev_data->cpu_accumulated_time =
|
|
|
|
cpu_accumulated_time_stat.StdDev();
|
|
|
|
stddev_data->bytes_per_second = bytes_per_second_stat.StdDev();
|
|
|
|
stddev_data->items_per_second = items_per_second_stat.StdDev();
|
|
|
|
}
|
|
|
|
|
2016-05-19 03:25:00 +08:00
|
|
|
void BenchmarkReporter::ComputeBigO(
|
|
|
|
const std::vector<Run>& reports,
|
2016-05-24 00:51:29 +08:00
|
|
|
Run* big_o, Run* rms) {
|
2016-05-25 04:25:59 +08:00
|
|
|
CHECK(reports.size() >= 2)
|
|
|
|
<< "Cannot compute asymptotic complexity for fewer than 2 reports";
|
|
|
|
|
2016-05-19 03:25:00 +08:00
|
|
|
// Accumulators.
|
2016-05-24 02:50:35 +08:00
|
|
|
std::vector<int> n;
|
|
|
|
std::vector<double> real_time;
|
|
|
|
std::vector<double> cpu_time;
|
2016-05-19 03:25:00 +08:00
|
|
|
|
|
|
|
// Populate the accumulators.
|
2016-05-21 17:51:42 +08:00
|
|
|
for (const Run& run : reports) {
|
2016-05-25 04:25:59 +08:00
|
|
|
n.push_back(run.complexity_n);
|
2016-05-24 02:50:35 +08:00
|
|
|
real_time.push_back(run.real_accumulated_time/run.iterations);
|
|
|
|
cpu_time.push_back(run.cpu_accumulated_time/run.iterations);
|
2016-05-19 03:25:00 +08:00
|
|
|
}
|
2016-05-25 04:25:59 +08:00
|
|
|
|
2016-05-24 02:50:35 +08:00
|
|
|
LeastSq result_cpu = MinimalLeastSq(n, cpu_time, reports[0].complexity);
|
2016-05-25 04:25:59 +08:00
|
|
|
|
2016-05-24 02:50:35 +08:00
|
|
|
// result_cpu.complexity is passed as parameter to result_real because in case
|
2016-05-25 04:25:59 +08:00
|
|
|
// reports[0].complexity is oAuto, the noise on the measured data could make
|
|
|
|
// the best fit function of Cpu and Real differ. In order to solve this, we
|
|
|
|
// take the best fitting function for the Cpu, and apply it to Real data.
|
2016-05-24 02:50:35 +08:00
|
|
|
LeastSq result_real = MinimalLeastSq(n, real_time, result_cpu.complexity);
|
2016-05-19 03:25:00 +08:00
|
|
|
|
|
|
|
std::string benchmark_name = reports[0].benchmark_name.substr(0, reports[0].benchmark_name.find('/'));
|
2016-05-25 04:25:59 +08:00
|
|
|
|
2016-05-19 03:25:00 +08:00
|
|
|
// Get the data from the accumulator to BenchmarkReporter::Run's.
|
2016-05-24 00:51:29 +08:00
|
|
|
big_o->benchmark_name = benchmark_name + "_BigO";
|
|
|
|
big_o->iterations = 0;
|
2016-05-24 02:50:35 +08:00
|
|
|
big_o->real_accumulated_time = result_real.coef;
|
|
|
|
big_o->cpu_accumulated_time = result_cpu.coef;
|
2016-05-24 00:51:29 +08:00
|
|
|
big_o->report_big_o = true;
|
2016-05-24 02:50:35 +08:00
|
|
|
big_o->complexity = result_cpu.complexity;
|
2016-05-21 14:55:43 +08:00
|
|
|
|
|
|
|
double multiplier;
|
2016-05-24 02:50:35 +08:00
|
|
|
const char* time_label;
|
2016-05-25 04:25:59 +08:00
|
|
|
std::tie(time_label, multiplier) =
|
|
|
|
GetTimeUnitAndMultiplier(reports[0].time_unit);
|
2016-05-19 03:25:00 +08:00
|
|
|
|
|
|
|
// Only add label to mean/stddev if it is same for all runs
|
2016-05-24 00:51:29 +08:00
|
|
|
big_o->report_label = reports[0].report_label;
|
|
|
|
rms->benchmark_name = benchmark_name + "_RMS";
|
|
|
|
rms->report_label = big_o->report_label;
|
|
|
|
rms->iterations = 0;
|
2016-05-24 02:50:35 +08:00
|
|
|
rms->real_accumulated_time = result_real.rms / multiplier;
|
|
|
|
rms->cpu_accumulated_time = result_cpu.rms / multiplier;
|
2016-05-24 00:51:29 +08:00
|
|
|
rms->report_rms = true;
|
2016-05-24 02:50:35 +08:00
|
|
|
rms->complexity = result_cpu.complexity;
|
2016-05-21 14:55:43 +08:00
|
|
|
}
|
|
|
|
|
2016-03-29 03:32:11 +08:00
|
|
|
TimeUnitMultiplier BenchmarkReporter::GetTimeUnitAndMultiplier(TimeUnit unit) {
|
|
|
|
switch (unit) {
|
|
|
|
case kMillisecond:
|
|
|
|
return std::make_pair("ms", 1e3);
|
|
|
|
case kMicrosecond:
|
|
|
|
return std::make_pair("us", 1e6);
|
|
|
|
case kNanosecond:
|
|
|
|
default:
|
|
|
|
return std::make_pair("ns", 1e9);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-03-18 04:16:36 +08:00
|
|
|
void BenchmarkReporter::Finalize() {
|
|
|
|
}
|
2015-03-18 01:46:16 +08:00
|
|
|
|
2015-03-18 04:16:36 +08:00
|
|
|
BenchmarkReporter::~BenchmarkReporter() {
|
|
|
|
}
|
2015-03-18 01:46:16 +08:00
|
|
|
|
|
|
|
} // end namespace benchmark
|