2015-03-07 06:01:05 +08:00
|
|
|
// Copyright 2015 Google Inc. All rights reserved.
|
2014-01-10 04:16:51 +08:00
|
|
|
//
|
2014-01-10 00:01:34 +08:00
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
2014-01-10 04:16:51 +08:00
|
|
|
//
|
2014-01-10 00:01:34 +08:00
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
2014-01-10 04:16:51 +08:00
|
|
|
//
|
2014-01-10 00:01:34 +08:00
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
2013-12-19 08:55:45 +08:00
|
|
|
#include "benchmark/benchmark.h"
|
2016-09-06 05:48:40 +08:00
|
|
|
#include "benchmark_api_internal.h"
|
2018-10-01 22:51:08 +08:00
|
|
|
#include "benchmark_runner.h"
|
2016-10-08 02:35:03 +08:00
|
|
|
#include "internal_macros.h"
|
2013-12-19 08:55:45 +08:00
|
|
|
|
2015-10-05 19:58:35 +08:00
|
|
|
#ifndef BENCHMARK_OS_WINDOWS
|
2018-02-15 05:17:12 +08:00
|
|
|
#ifndef BENCHMARK_OS_FUCHSIA
|
2015-03-13 06:03:33 +08:00
|
|
|
#include <sys/resource.h>
|
2018-02-15 05:17:12 +08:00
|
|
|
#endif
|
2016-10-08 02:35:03 +08:00
|
|
|
#include <sys/time.h>
|
2015-03-13 06:03:33 +08:00
|
|
|
#include <unistd.h>
|
2015-04-14 01:45:16 +08:00
|
|
|
#endif
|
2013-12-19 08:55:45 +08:00
|
|
|
|
|
|
|
#include <algorithm>
|
2013-12-20 09:04:54 +08:00
|
|
|
#include <atomic>
|
2014-08-04 18:38:37 +08:00
|
|
|
#include <condition_variable>
|
2016-10-08 02:35:03 +08:00
|
|
|
#include <cstdio>
|
|
|
|
#include <cstdlib>
|
2016-08-03 05:12:43 +08:00
|
|
|
#include <fstream>
|
2016-10-08 02:35:03 +08:00
|
|
|
#include <iostream>
|
2013-12-19 08:55:45 +08:00
|
|
|
#include <memory>
|
2018-03-16 18:14:38 +08:00
|
|
|
#include <string>
|
2014-08-04 18:38:37 +08:00
|
|
|
#include <thread>
|
2018-09-12 21:26:17 +08:00
|
|
|
#include <utility>
|
2015-03-13 06:03:33 +08:00
|
|
|
|
|
|
|
#include "check.h"
|
2016-09-16 05:10:35 +08:00
|
|
|
#include "colorprint.h"
|
2015-03-13 06:03:33 +08:00
|
|
|
#include "commandlineflags.h"
|
2016-05-28 06:45:25 +08:00
|
|
|
#include "complexity.h"
|
2017-03-02 08:23:42 +08:00
|
|
|
#include "counter.h"
|
2017-11-23 00:33:52 +08:00
|
|
|
#include "internal_macros.h"
|
2015-03-13 06:03:33 +08:00
|
|
|
#include "log.h"
|
|
|
|
#include "mutex.h"
|
|
|
|
#include "re.h"
|
2017-11-23 00:33:52 +08:00
|
|
|
#include "statistics.h"
|
2015-03-13 06:03:33 +08:00
|
|
|
#include "string_util.h"
|
2018-03-16 18:14:38 +08:00
|
|
|
#include "thread_manager.h"
|
|
|
|
#include "thread_timer.h"
|
2013-12-19 08:55:45 +08:00
|
|
|
|
2019-08-22 05:12:03 +08:00
|
|
|
// Print a list of benchmarks. This option overrides all other options.
|
|
|
|
DEFINE_bool(benchmark_list_tests, false);
|
|
|
|
|
|
|
|
// A regular expression that specifies the set of benchmarks to execute. If
|
|
|
|
// this flag is empty, or if this flag is the string \"all\", all benchmarks
|
|
|
|
// linked into the binary are run.
|
|
|
|
DEFINE_string(benchmark_filter, ".");
|
|
|
|
|
|
|
|
// Minimum number of seconds we should run benchmark before results are
|
|
|
|
// considered significant. For cpu-time based tests, this is the lower bound
|
|
|
|
// on the total cpu time used by all threads that make up the test. For
|
|
|
|
// real-time based tests, this is the lower bound on the elapsed time of the
|
|
|
|
// benchmark execution, regardless of number of threads.
|
|
|
|
DEFINE_double(benchmark_min_time, 0.5);
|
|
|
|
|
|
|
|
// The number of runs of each benchmark. If greater than 1, the mean and
|
|
|
|
// standard deviation of the runs will be reported.
|
|
|
|
DEFINE_int32(benchmark_repetitions, 1);
|
|
|
|
|
|
|
|
// Report the result of each benchmark repetitions. When 'true' is specified
|
|
|
|
// only the mean, standard deviation, and other statistics are reported for
|
|
|
|
// repeated benchmarks. Affects all reporters.
|
2019-10-23 16:07:08 +08:00
|
|
|
DEFINE_bool(benchmark_report_aggregates_only, false);
|
2019-08-22 05:12:03 +08:00
|
|
|
|
|
|
|
// Display the result of each benchmark repetitions. When 'true' is specified
|
|
|
|
// only the mean, standard deviation, and other statistics are displayed for
|
|
|
|
// repeated benchmarks. Unlike benchmark_report_aggregates_only, only affects
|
|
|
|
// the display reporter, but *NOT* file reporter, which will still contain
|
|
|
|
// all the output.
|
2019-10-23 16:07:08 +08:00
|
|
|
DEFINE_bool(benchmark_display_aggregates_only, false);
|
2019-08-22 05:12:03 +08:00
|
|
|
|
|
|
|
// The format to use for console output.
|
|
|
|
// Valid values are 'console', 'json', or 'csv'.
|
|
|
|
DEFINE_string(benchmark_format, "console");
|
|
|
|
|
|
|
|
// The format to use for file output.
|
|
|
|
// Valid values are 'console', 'json', or 'csv'.
|
|
|
|
DEFINE_string(benchmark_out_format, "json");
|
|
|
|
|
|
|
|
// The file to write additional output to.
|
|
|
|
DEFINE_string(benchmark_out, "");
|
|
|
|
|
|
|
|
// Whether to use colors in the output. Valid values:
|
|
|
|
// 'true'/'yes'/1, 'false'/'no'/0, and 'auto'. 'auto' means to use colors if
|
|
|
|
// the output is being sent to a terminal and the TERM environment variable is
|
|
|
|
// set to a terminal type that supports colors.
|
|
|
|
DEFINE_string(benchmark_color, "auto");
|
|
|
|
|
|
|
|
// Whether to use tabular format when printing user counters to the console.
|
|
|
|
// Valid values: 'true'/'yes'/1, 'false'/'no'/0. Defaults to false.
|
|
|
|
DEFINE_bool(benchmark_counters_tabular, false);
|
|
|
|
|
|
|
|
// The level of verbose logging to output
|
|
|
|
DEFINE_int32(v, 0);
|
2015-03-13 06:03:33 +08:00
|
|
|
|
2013-12-19 08:55:45 +08:00
|
|
|
namespace benchmark {
|
2015-03-13 06:03:33 +08:00
|
|
|
|
2016-09-03 11:34:34 +08:00
|
|
|
namespace internal {
|
2013-12-19 08:55:45 +08:00
|
|
|
|
2018-10-01 22:51:08 +08:00
|
|
|
// FIXME: wouldn't LTO mess this up?
|
2017-10-10 03:01:30 +08:00
|
|
|
void UseCharPointer(char const volatile*) {}
|
|
|
|
|
2016-10-08 02:35:03 +08:00
|
|
|
} // namespace internal
|
2015-03-13 06:03:33 +08:00
|
|
|
|
Iteration counts should be `uint64_t` globally. (#817)
This is a shameless rip-off of https://github.com/google/benchmark/pull/646
I did promise to look into why that proposed PR was producing
so much worse assembly, and so i finally did.
The reason is - that diff changes `size_t` (unsigned) to `int64_t` (signed).
There is this nice little `assert`:
https://github.com/google/benchmark/blob/7a1c37028359ca9d386d719a6ad527743cf1b753/include/benchmark/benchmark.h#L744
It ensures that we didn't magically decide to advance our iterator
when we should have finished benchmarking.
When `cached_` was unsigned, the `assert` was `cached_ UGT 0`.
But we only ever get to that `assert` if `cached_ NE 0`,
and naturally if `cached_` is not `0`, then it is bigger than `0`,
so the `assert` is tautological, and gets folded away.
But now that `cached_` became signed, the assert became `cached_ SGT 0`.
And we still only know that `cached_ NE 0`, so the assert can't be
optimized out, or at least it doesn't currently.
Regardless of whether or not that is a bug in itself,
that particular diff would have regressed the normal 64-bit systems,
by halving the maximal iteration space (since we go from unsigned counter
to signed one, of the same bit-width), which seems like a bug.
And just so it happens, fixing *this* bug, fixes the other bug.
This produces fully (bit-by-bit) identical state_assembly_test.s
The filecheck change is actually needed regardless of this patch,
else this test does not pass for me even without this diff.
2019-05-13 17:33:11 +08:00
|
|
|
State::State(IterationCount max_iters, const std::vector<int64_t>& ranges,
|
|
|
|
int thread_i, int n_threads, internal::ThreadTimer* timer,
|
2016-09-03 11:34:34 +08:00
|
|
|
internal::ThreadManager* manager)
|
2018-02-15 04:44:41 +08:00
|
|
|
: total_iterations_(0),
|
|
|
|
batch_leftover_(0),
|
|
|
|
max_iterations(max_iters),
|
|
|
|
started_(false),
|
2016-09-03 11:34:34 +08:00
|
|
|
finished_(false),
|
2018-02-15 04:44:41 +08:00
|
|
|
error_occurred_(false),
|
2016-08-05 03:30:14 +08:00
|
|
|
range_(ranges),
|
2016-05-25 02:06:54 +08:00
|
|
|
complexity_n_(0),
|
2017-03-02 08:23:42 +08:00
|
|
|
counters(),
|
2015-03-13 06:03:33 +08:00
|
|
|
thread_index(thread_i),
|
2016-02-15 21:19:43 +08:00
|
|
|
threads(n_threads),
|
2016-09-03 11:34:34 +08:00
|
|
|
timer_(timer),
|
|
|
|
manager_(manager) {
|
|
|
|
CHECK(max_iterations != 0) << "At least one iteration must be run";
|
|
|
|
CHECK_LT(thread_index, threads) << "thread_index must be less than threads";
|
2018-02-15 04:44:41 +08:00
|
|
|
|
2018-03-22 03:47:25 +08:00
|
|
|
// Note: The use of offsetof below is technically undefined until C++17
|
|
|
|
// because State is not a standard layout type. However, all compilers
|
|
|
|
// currently provide well-defined behavior as an extension (which is
|
|
|
|
// demonstrated since constexpr evaluation must diagnose all undefined
|
|
|
|
// behavior). However, GCC and Clang also warn about this use of offsetof,
|
|
|
|
// which must be suppressed.
|
2018-07-03 17:13:22 +08:00
|
|
|
#if defined(__INTEL_COMPILER)
|
|
|
|
#pragma warning push
|
2019-10-23 16:07:08 +08:00
|
|
|
#pragma warning(disable : 1875)
|
2018-07-03 17:13:22 +08:00
|
|
|
#elif defined(__GNUC__)
|
2018-03-22 03:47:25 +08:00
|
|
|
#pragma GCC diagnostic push
|
|
|
|
#pragma GCC diagnostic ignored "-Winvalid-offsetof"
|
|
|
|
#endif
|
2018-02-15 04:44:41 +08:00
|
|
|
// Offset tests to ensure commonly accessed data is on the first cache line.
|
|
|
|
const int cache_line_size = 64;
|
|
|
|
static_assert(offsetof(State, error_occurred_) <=
|
2018-06-01 18:14:19 +08:00
|
|
|
(cache_line_size - sizeof(error_occurred_)),
|
|
|
|
"");
|
2018-07-03 17:13:22 +08:00
|
|
|
#if defined(__INTEL_COMPILER)
|
|
|
|
#pragma warning pop
|
|
|
|
#elif defined(__GNUC__)
|
2018-03-22 03:47:25 +08:00
|
|
|
#pragma GCC diagnostic pop
|
|
|
|
#endif
|
2013-12-19 08:55:45 +08:00
|
|
|
}
|
|
|
|
|
2014-05-29 21:49:38 +08:00
|
|
|
void State::PauseTiming() {
|
2015-03-13 06:03:33 +08:00
|
|
|
// Add in time accumulated so far
|
2016-05-24 09:24:56 +08:00
|
|
|
CHECK(started_ && !finished_ && !error_occurred_);
|
2016-09-03 11:34:34 +08:00
|
|
|
timer_->StopTimer();
|
2014-05-29 21:49:38 +08:00
|
|
|
}
|
2013-12-19 08:55:45 +08:00
|
|
|
|
2014-05-29 21:49:38 +08:00
|
|
|
void State::ResumeTiming() {
|
2016-05-25 05:01:07 +08:00
|
|
|
CHECK(started_ && !finished_ && !error_occurred_);
|
2016-09-03 11:34:34 +08:00
|
|
|
timer_->StartTimer();
|
2014-05-29 21:49:38 +08:00
|
|
|
}
|
2013-12-19 08:55:45 +08:00
|
|
|
|
2016-05-24 09:24:56 +08:00
|
|
|
void State::SkipWithError(const char* msg) {
|
|
|
|
CHECK(msg);
|
|
|
|
error_occurred_ = true;
|
2016-09-03 11:34:34 +08:00
|
|
|
{
|
|
|
|
MutexLock l(manager_->GetBenchmarkMutex());
|
2016-09-06 16:28:35 +08:00
|
|
|
if (manager_->results.has_error_ == false) {
|
|
|
|
manager_->results.error_message_ = msg;
|
|
|
|
manager_->results.has_error_ = true;
|
2016-09-03 11:34:34 +08:00
|
|
|
}
|
|
|
|
}
|
2018-02-10 12:57:04 +08:00
|
|
|
total_iterations_ = 0;
|
2016-09-03 11:34:34 +08:00
|
|
|
if (timer_->running()) timer_->StopTimer();
|
2016-05-24 09:24:56 +08:00
|
|
|
}
|
|
|
|
|
2016-10-08 02:35:03 +08:00
|
|
|
void State::SetIterationTime(double seconds) {
|
2016-09-03 11:34:34 +08:00
|
|
|
timer_->SetIterationTime(seconds);
|
2016-04-30 21:23:58 +08:00
|
|
|
}
|
|
|
|
|
2015-03-13 06:03:33 +08:00
|
|
|
void State::SetLabel(const char* label) {
|
2016-09-03 11:34:34 +08:00
|
|
|
MutexLock l(manager_->GetBenchmarkMutex());
|
2016-09-06 16:28:35 +08:00
|
|
|
manager_->results.report_label_ = label;
|
2016-09-03 11:34:34 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
void State::StartKeepRunning() {
|
|
|
|
CHECK(!started_ && !finished_);
|
|
|
|
started_ = true;
|
2018-02-10 12:57:04 +08:00
|
|
|
total_iterations_ = error_occurred_ ? 0 : max_iterations;
|
2016-09-03 11:34:34 +08:00
|
|
|
manager_->StartStopBarrier();
|
|
|
|
if (!error_occurred_) ResumeTiming();
|
|
|
|
}
|
|
|
|
|
|
|
|
void State::FinishKeepRunning() {
|
|
|
|
CHECK(started_ && (!finished_ || error_occurred_));
|
|
|
|
if (!error_occurred_) {
|
|
|
|
PauseTiming();
|
|
|
|
}
|
2018-02-10 12:57:04 +08:00
|
|
|
// Total iterations has now wrapped around past 0. Fix this.
|
|
|
|
total_iterations_ = 0;
|
2016-09-03 11:34:34 +08:00
|
|
|
finished_ = true;
|
|
|
|
manager_->StartStopBarrier();
|
2013-12-19 08:55:45 +08:00
|
|
|
}
|
|
|
|
|
2015-03-13 06:03:33 +08:00
|
|
|
namespace internal {
|
2015-03-31 11:32:37 +08:00
|
|
|
namespace {
|
|
|
|
|
2018-09-28 19:28:43 +08:00
|
|
|
void RunBenchmarks(const std::vector<BenchmarkInstance>& benchmarks,
|
2018-08-29 19:58:54 +08:00
|
|
|
BenchmarkReporter* display_reporter,
|
2018-06-01 18:14:19 +08:00
|
|
|
BenchmarkReporter* file_reporter) {
|
2016-08-03 05:12:43 +08:00
|
|
|
// Note the file_reporter can be null.
|
2018-08-29 19:58:54 +08:00
|
|
|
CHECK(display_reporter != nullptr);
|
2015-03-13 06:03:33 +08:00
|
|
|
|
2013-12-19 08:55:45 +08:00
|
|
|
// Determine the width of the name field using a minimum width of 10.
|
2018-10-18 20:03:17 +08:00
|
|
|
bool might_have_aggregates = FLAGS_benchmark_repetitions > 1;
|
2015-02-19 02:07:45 +08:00
|
|
|
size_t name_field_width = 10;
|
Drop Stat1, refactor statistics to be user-providable, add median. (#428)
* Drop Stat1, refactor statistics to be user-providable, add median.
My main goal was to add median statistic. Since Stat1
calculated the stats incrementally, and did not store
the values themselves, it is was not possible. Thus,
i have replaced Stat1 with simple std::vector<double>,
containing all the values.
Then, i have refactored current mean/stdev to be a
function that is provided with values vector, and
returns the statistic. While there, it seemed to make
sense to deduplicate the code by storing all the
statistics functions in a map, and then simply iterate
over it. And the interface to add new statistics is
intentionally exposed, so they may be added easily.
The notable change is that Iterations are no longer
displayed as 0 for stdev. Is could be changed, but
i'm not sure how to nicely fit that into the API.
Similarly, this dance about sometimes (for some fields,
for some statistics) dividing by run.iterations, and
then multiplying the calculated stastic back is also
dropped, and if you do the math, i fail to see why
it was needed there in the first place.
Since that was the only use of stat.h, it is removed.
* complexity.h: attempt to fix MSVC build
* Update README.md
* Store statistics to compute in a vector, ensures ordering.
* Add a bit more tests for repetitions.
* Partially address review notes.
* Fix gcc build: drop extra ';'
clang, why didn't you warn me?
* Address review comments.
* double() -> 0.0
* early return
2017-08-24 07:44:29 +08:00
|
|
|
size_t stat_field_width = 0;
|
2018-09-28 19:28:43 +08:00
|
|
|
for (const BenchmarkInstance& benchmark : benchmarks) {
|
2015-03-28 00:28:22 +08:00
|
|
|
name_field_width =
|
2019-03-17 21:38:51 +08:00
|
|
|
std::max<size_t>(name_field_width, benchmark.name.str().size());
|
2018-10-18 20:03:17 +08:00
|
|
|
might_have_aggregates |= benchmark.repetitions > 1;
|
Drop Stat1, refactor statistics to be user-providable, add median. (#428)
* Drop Stat1, refactor statistics to be user-providable, add median.
My main goal was to add median statistic. Since Stat1
calculated the stats incrementally, and did not store
the values themselves, it is was not possible. Thus,
i have replaced Stat1 with simple std::vector<double>,
containing all the values.
Then, i have refactored current mean/stdev to be a
function that is provided with values vector, and
returns the statistic. While there, it seemed to make
sense to deduplicate the code by storing all the
statistics functions in a map, and then simply iterate
over it. And the interface to add new statistics is
intentionally exposed, so they may be added easily.
The notable change is that Iterations are no longer
displayed as 0 for stdev. Is could be changed, but
i'm not sure how to nicely fit that into the API.
Similarly, this dance about sometimes (for some fields,
for some statistics) dividing by run.iterations, and
then multiplying the calculated stastic back is also
dropped, and if you do the math, i fail to see why
it was needed there in the first place.
Since that was the only use of stat.h, it is removed.
* complexity.h: attempt to fix MSVC build
* Update README.md
* Store statistics to compute in a vector, ensures ordering.
* Add a bit more tests for repetitions.
* Partially address review notes.
* Fix gcc build: drop extra ';'
clang, why didn't you warn me?
* Address review comments.
* double() -> 0.0
* early return
2017-08-24 07:44:29 +08:00
|
|
|
|
2018-06-01 18:14:19 +08:00
|
|
|
for (const auto& Stat : *benchmark.statistics)
|
Drop Stat1, refactor statistics to be user-providable, add median. (#428)
* Drop Stat1, refactor statistics to be user-providable, add median.
My main goal was to add median statistic. Since Stat1
calculated the stats incrementally, and did not store
the values themselves, it is was not possible. Thus,
i have replaced Stat1 with simple std::vector<double>,
containing all the values.
Then, i have refactored current mean/stdev to be a
function that is provided with values vector, and
returns the statistic. While there, it seemed to make
sense to deduplicate the code by storing all the
statistics functions in a map, and then simply iterate
over it. And the interface to add new statistics is
intentionally exposed, so they may be added easily.
The notable change is that Iterations are no longer
displayed as 0 for stdev. Is could be changed, but
i'm not sure how to nicely fit that into the API.
Similarly, this dance about sometimes (for some fields,
for some statistics) dividing by run.iterations, and
then multiplying the calculated stastic back is also
dropped, and if you do the math, i fail to see why
it was needed there in the first place.
Since that was the only use of stat.h, it is removed.
* complexity.h: attempt to fix MSVC build
* Update README.md
* Store statistics to compute in a vector, ensures ordering.
* Add a bit more tests for repetitions.
* Partially address review notes.
* Fix gcc build: drop extra ';'
clang, why didn't you warn me?
* Address review comments.
* double() -> 0.0
* early return
2017-08-24 07:44:29 +08:00
|
|
|
stat_field_width = std::max<size_t>(stat_field_width, Stat.name_.size());
|
2013-12-19 08:55:45 +08:00
|
|
|
}
|
2018-10-18 20:03:17 +08:00
|
|
|
if (might_have_aggregates) name_field_width += 1 + stat_field_width;
|
2013-12-19 08:55:45 +08:00
|
|
|
|
|
|
|
// Print header here
|
2014-01-08 08:33:40 +08:00
|
|
|
BenchmarkReporter::Context context;
|
2013-12-19 08:55:45 +08:00
|
|
|
context.name_field_width = name_field_width;
|
|
|
|
|
2018-03-06 19:44:25 +08:00
|
|
|
// Keep track of running times of all instances of current benchmark
|
2016-05-19 03:25:00 +08:00
|
|
|
std::vector<BenchmarkReporter::Run> complexity_reports;
|
|
|
|
|
2016-09-12 05:36:14 +08:00
|
|
|
// We flush streams after invoking reporter methods that write to them. This
|
|
|
|
// ensures users get timely updates even when streams are not line-buffered.
|
|
|
|
auto flushStreams = [](BenchmarkReporter* reporter) {
|
|
|
|
if (!reporter) return;
|
|
|
|
std::flush(reporter->GetOutputStream());
|
|
|
|
std::flush(reporter->GetErrorStream());
|
|
|
|
};
|
|
|
|
|
2018-08-29 19:58:54 +08:00
|
|
|
if (display_reporter->ReportContext(context) &&
|
2016-10-08 02:35:03 +08:00
|
|
|
(!file_reporter || file_reporter->ReportContext(context))) {
|
2018-08-29 19:58:54 +08:00
|
|
|
flushStreams(display_reporter);
|
2016-09-12 05:36:14 +08:00
|
|
|
flushStreams(file_reporter);
|
2018-09-12 21:26:17 +08:00
|
|
|
|
2015-03-13 06:03:33 +08:00
|
|
|
for (const auto& benchmark : benchmarks) {
|
2018-09-12 21:26:17 +08:00
|
|
|
RunResults run_results = RunBenchmark(benchmark, &complexity_reports);
|
|
|
|
|
|
|
|
auto report = [&run_results](BenchmarkReporter* reporter,
|
|
|
|
bool report_aggregates_only) {
|
|
|
|
assert(reporter);
|
[NFC] BenchmarkRunner: always populate *_report_aggregates_only bools. (#708)
It is better to let the RunBenchmarks(), report() decide
whether to actually *only* output aggregates or not,
depending on whether there are actually aggregates.
It's subtle indeed.
Previously, `BenchmarkRunner()` always said that "if there are no repetitions,
then you should never output only the repetitions". And the `report()` simply assumed
that the `report_aggregates_only` bool it received makes sense, and simply used it.
Now, the logic is the same, but the blame has shifted.
`BenchmarkRunner()` always propagates what those benchmarks would have wanted
to happen wrt the aggregates. And the `report()` lambda has to actually consider
both the `report_aggregates_only` bool, and it's meaningfulness.
To put it in the context of the patch series - if the repetition count was `1`,
but `*_report_aggregates_only` was set to `true`, and we capture each iteration separately,
then we will compute the aggregates, but then output everything, both the iteration,
and aggregates, despite `*_report_aggregates_only` being set to `true`.
2018-10-18 20:08:59 +08:00
|
|
|
// If there are no aggregates, do output non-aggregates.
|
|
|
|
report_aggregates_only &= !run_results.aggregates_only.empty();
|
2018-09-12 21:26:17 +08:00
|
|
|
if (!report_aggregates_only)
|
|
|
|
reporter->ReportRuns(run_results.non_aggregates);
|
|
|
|
if (!run_results.aggregates_only.empty())
|
|
|
|
reporter->ReportRuns(run_results.aggregates_only);
|
|
|
|
};
|
|
|
|
|
|
|
|
report(display_reporter, run_results.display_report_aggregates_only);
|
|
|
|
if (file_reporter)
|
|
|
|
report(file_reporter, run_results.file_report_aggregates_only);
|
|
|
|
|
2018-08-29 19:58:54 +08:00
|
|
|
flushStreams(display_reporter);
|
2016-09-12 05:36:14 +08:00
|
|
|
flushStreams(file_reporter);
|
2015-03-13 06:03:33 +08:00
|
|
|
}
|
|
|
|
}
|
2018-08-29 19:58:54 +08:00
|
|
|
display_reporter->Finalize();
|
2016-08-03 05:12:43 +08:00
|
|
|
if (file_reporter) file_reporter->Finalize();
|
2018-08-29 19:58:54 +08:00
|
|
|
flushStreams(display_reporter);
|
2016-09-12 05:36:14 +08:00
|
|
|
flushStreams(file_reporter);
|
2013-12-19 08:55:45 +08:00
|
|
|
}
|
|
|
|
|
2019-08-08 03:55:40 +08:00
|
|
|
// Disable deprecated warnings temporarily because we need to reference
|
2020-04-14 17:20:22 +08:00
|
|
|
// CSVReporter but don't want to trigger -Werror=-Wdeprecated-declarations
|
2019-08-08 03:55:40 +08:00
|
|
|
#ifdef __GNUC__
|
|
|
|
#pragma GCC diagnostic push
|
2020-04-14 17:20:22 +08:00
|
|
|
#pragma GCC diagnostic ignored "-Wdeprecated-declarations"
|
2019-08-08 03:55:40 +08:00
|
|
|
#endif
|
|
|
|
|
2016-10-08 02:35:03 +08:00
|
|
|
std::unique_ptr<BenchmarkReporter> CreateReporter(
|
2017-03-02 10:55:36 +08:00
|
|
|
std::string const& name, ConsoleReporter::OutputOptions output_opts) {
|
2015-03-18 06:18:06 +08:00
|
|
|
typedef std::unique_ptr<BenchmarkReporter> PtrType;
|
2016-08-03 05:12:43 +08:00
|
|
|
if (name == "console") {
|
2017-03-02 10:55:36 +08:00
|
|
|
return PtrType(new ConsoleReporter(output_opts));
|
2016-08-03 05:12:43 +08:00
|
|
|
} else if (name == "json") {
|
2015-03-18 06:18:06 +08:00
|
|
|
return PtrType(new JSONReporter);
|
2016-08-03 05:12:43 +08:00
|
|
|
} else if (name == "csv") {
|
2015-03-28 04:27:15 +08:00
|
|
|
return PtrType(new CSVReporter);
|
2015-03-18 06:18:06 +08:00
|
|
|
} else {
|
2016-08-03 05:12:43 +08:00
|
|
|
std::cerr << "Unexpected format: '" << name << "'\n";
|
2015-03-18 06:18:06 +08:00
|
|
|
std::exit(1);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-08-08 03:55:40 +08:00
|
|
|
#ifdef __GNUC__
|
|
|
|
#pragma GCC diagnostic pop
|
|
|
|
#endif
|
|
|
|
|
2016-10-08 02:35:03 +08:00
|
|
|
} // end namespace
|
2017-05-03 03:31:54 +08:00
|
|
|
|
2017-05-03 07:05:15 +08:00
|
|
|
bool IsZero(double n) {
|
|
|
|
return std::abs(n) < std::numeric_limits<double>::epsilon();
|
|
|
|
}
|
|
|
|
|
2017-05-03 03:31:54 +08:00
|
|
|
ConsoleReporter::OutputOptions GetOutputOptions(bool force_no_color) {
|
|
|
|
int output_opts = ConsoleReporter::OO_Defaults;
|
2019-10-23 16:07:08 +08:00
|
|
|
auto is_benchmark_color = [force_no_color]() -> bool {
|
2018-10-08 16:33:21 +08:00
|
|
|
if (force_no_color) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
if (FLAGS_benchmark_color == "auto") {
|
|
|
|
return IsColorTerminal();
|
|
|
|
}
|
|
|
|
return IsTruthyFlagValue(FLAGS_benchmark_color);
|
|
|
|
};
|
|
|
|
if (is_benchmark_color()) {
|
2017-05-03 03:31:54 +08:00
|
|
|
output_opts |= ConsoleReporter::OO_Color;
|
|
|
|
} else {
|
|
|
|
output_opts &= ~ConsoleReporter::OO_Color;
|
|
|
|
}
|
2018-06-01 18:14:19 +08:00
|
|
|
if (FLAGS_benchmark_counters_tabular) {
|
2017-05-03 03:31:54 +08:00
|
|
|
output_opts |= ConsoleReporter::OO_Tabular;
|
|
|
|
} else {
|
|
|
|
output_opts &= ~ConsoleReporter::OO_Tabular;
|
|
|
|
}
|
2018-06-01 18:14:19 +08:00
|
|
|
return static_cast<ConsoleReporter::OutputOptions>(output_opts);
|
2017-05-03 03:31:54 +08:00
|
|
|
}
|
|
|
|
|
2016-10-08 02:35:03 +08:00
|
|
|
} // end namespace internal
|
2013-12-19 08:55:45 +08:00
|
|
|
|
2016-05-24 13:32:05 +08:00
|
|
|
size_t RunSpecifiedBenchmarks() {
|
2016-08-03 05:12:43 +08:00
|
|
|
return RunSpecifiedBenchmarks(nullptr, nullptr);
|
2015-03-13 07:16:06 +08:00
|
|
|
}
|
2013-12-19 08:55:45 +08:00
|
|
|
|
2018-08-29 19:58:54 +08:00
|
|
|
size_t RunSpecifiedBenchmarks(BenchmarkReporter* display_reporter) {
|
|
|
|
return RunSpecifiedBenchmarks(display_reporter, nullptr);
|
2016-08-03 05:12:43 +08:00
|
|
|
}
|
|
|
|
|
2018-08-29 19:58:54 +08:00
|
|
|
size_t RunSpecifiedBenchmarks(BenchmarkReporter* display_reporter,
|
2016-08-03 05:12:43 +08:00
|
|
|
BenchmarkReporter* file_reporter) {
|
2013-12-19 08:55:45 +08:00
|
|
|
std::string spec = FLAGS_benchmark_filter;
|
|
|
|
if (spec.empty() || spec == "all")
|
2014-01-10 04:12:11 +08:00
|
|
|
spec = "."; // Regexp that matches all benchmarks
|
2015-03-18 06:18:06 +08:00
|
|
|
|
2016-08-30 02:43:55 +08:00
|
|
|
// Setup the reporters
|
|
|
|
std::ofstream output_file;
|
2018-08-29 19:58:54 +08:00
|
|
|
std::unique_ptr<BenchmarkReporter> default_display_reporter;
|
2016-08-30 02:43:55 +08:00
|
|
|
std::unique_ptr<BenchmarkReporter> default_file_reporter;
|
2018-08-29 19:58:54 +08:00
|
|
|
if (!display_reporter) {
|
|
|
|
default_display_reporter = internal::CreateReporter(
|
2018-06-01 18:14:19 +08:00
|
|
|
FLAGS_benchmark_format, internal::GetOutputOptions());
|
2018-08-29 19:58:54 +08:00
|
|
|
display_reporter = default_display_reporter.get();
|
2016-08-30 02:43:55 +08:00
|
|
|
}
|
2018-08-29 19:58:54 +08:00
|
|
|
auto& Out = display_reporter->GetOutputStream();
|
|
|
|
auto& Err = display_reporter->GetErrorStream();
|
2016-08-30 02:43:55 +08:00
|
|
|
|
|
|
|
std::string const& fname = FLAGS_benchmark_out;
|
2017-07-14 00:33:43 +08:00
|
|
|
if (fname.empty() && file_reporter) {
|
2016-08-30 02:43:55 +08:00
|
|
|
Err << "A custom file reporter was provided but "
|
2016-10-08 02:35:03 +08:00
|
|
|
"--benchmark_out=<file> was not specified."
|
|
|
|
<< std::endl;
|
2016-08-30 02:43:55 +08:00
|
|
|
std::exit(1);
|
|
|
|
}
|
2017-07-14 00:33:43 +08:00
|
|
|
if (!fname.empty()) {
|
2016-08-30 02:43:55 +08:00
|
|
|
output_file.open(fname);
|
|
|
|
if (!output_file.is_open()) {
|
|
|
|
Err << "invalid file name: '" << fname << std::endl;
|
2016-08-03 05:12:43 +08:00
|
|
|
std::exit(1);
|
|
|
|
}
|
2016-08-30 02:43:55 +08:00
|
|
|
if (!file_reporter) {
|
|
|
|
default_file_reporter = internal::CreateReporter(
|
2016-10-08 02:35:03 +08:00
|
|
|
FLAGS_benchmark_out_format, ConsoleReporter::OO_None);
|
2016-08-30 02:43:55 +08:00
|
|
|
file_reporter = default_file_reporter.get();
|
2016-08-03 05:12:43 +08:00
|
|
|
}
|
2016-08-30 02:43:55 +08:00
|
|
|
file_reporter->SetOutputStream(&output_file);
|
|
|
|
file_reporter->SetErrorStream(&output_file);
|
|
|
|
}
|
|
|
|
|
2018-09-28 19:28:43 +08:00
|
|
|
std::vector<internal::BenchmarkInstance> benchmarks;
|
2016-09-06 05:48:40 +08:00
|
|
|
if (!FindBenchmarksInternal(spec, &benchmarks, &Err)) return 0;
|
2016-08-30 02:43:55 +08:00
|
|
|
|
2016-10-29 05:22:22 +08:00
|
|
|
if (benchmarks.empty()) {
|
|
|
|
Err << "Failed to match any benchmarks against regex: " << spec << "\n";
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2016-08-30 02:43:55 +08:00
|
|
|
if (FLAGS_benchmark_list_tests) {
|
2019-03-17 21:38:51 +08:00
|
|
|
for (auto const& benchmark : benchmarks)
|
|
|
|
Out << benchmark.name.str() << "\n";
|
2016-08-30 02:43:55 +08:00
|
|
|
} else {
|
2018-08-29 19:58:54 +08:00
|
|
|
internal::RunBenchmarks(benchmarks, display_reporter, file_reporter);
|
2015-03-18 06:18:06 +08:00
|
|
|
}
|
2016-08-30 02:43:55 +08:00
|
|
|
|
2016-05-24 14:42:11 +08:00
|
|
|
return benchmarks.size();
|
2015-03-13 06:03:33 +08:00
|
|
|
}
|
|
|
|
|
2018-10-01 22:51:08 +08:00
|
|
|
void RegisterMemoryManager(MemoryManager* manager) {
|
|
|
|
internal::memory_manager = manager;
|
|
|
|
}
|
2018-07-24 22:57:15 +08:00
|
|
|
|
2015-03-13 06:03:33 +08:00
|
|
|
namespace internal {
|
|
|
|
|
|
|
|
void PrintUsageAndExit() {
|
|
|
|
fprintf(stdout,
|
|
|
|
"benchmark"
|
2015-03-31 11:32:37 +08:00
|
|
|
" [--benchmark_list_tests={true|false}]\n"
|
|
|
|
" [--benchmark_filter=<regex>]\n"
|
2015-03-13 06:03:33 +08:00
|
|
|
" [--benchmark_min_time=<min_time>]\n"
|
|
|
|
" [--benchmark_repetitions=<num_repetitions>]\n"
|
2018-09-04 00:45:09 +08:00
|
|
|
" [--benchmark_report_aggregates_only={true|false}]\n"
|
2018-09-12 21:26:17 +08:00
|
|
|
" [--benchmark_display_aggregates_only={true|false}]\n"
|
2016-05-03 03:04:16 +08:00
|
|
|
" [--benchmark_format=<console|json|csv>]\n"
|
2016-08-03 05:12:43 +08:00
|
|
|
" [--benchmark_out=<filename>]\n"
|
|
|
|
" [--benchmark_out_format=<json|console|csv>]\n"
|
2016-09-16 05:10:35 +08:00
|
|
|
" [--benchmark_color={auto|true|false}]\n"
|
2017-03-02 10:55:36 +08:00
|
|
|
" [--benchmark_counters_tabular={true|false}]\n"
|
2015-03-13 06:03:33 +08:00
|
|
|
" [--v=<verbosity>]\n");
|
|
|
|
exit(0);
|
|
|
|
}
|
|
|
|
|
2015-10-01 04:14:50 +08:00
|
|
|
void ParseCommandLineFlags(int* argc, char** argv) {
|
2015-03-13 06:03:33 +08:00
|
|
|
using namespace benchmark;
|
2018-05-30 20:17:41 +08:00
|
|
|
BenchmarkReporter::Context::executable_name =
|
|
|
|
(argc && *argc > 0) ? argv[0] : "unknown";
|
2019-11-23 05:23:11 +08:00
|
|
|
for (int i = 1; argc && i < *argc; ++i) {
|
2016-10-08 02:35:03 +08:00
|
|
|
if (ParseBoolFlag(argv[i], "benchmark_list_tests",
|
2015-03-31 11:32:37 +08:00
|
|
|
&FLAGS_benchmark_list_tests) ||
|
2016-10-08 02:35:03 +08:00
|
|
|
ParseStringFlag(argv[i], "benchmark_filter", &FLAGS_benchmark_filter) ||
|
2015-03-13 06:03:33 +08:00
|
|
|
ParseDoubleFlag(argv[i], "benchmark_min_time",
|
|
|
|
&FLAGS_benchmark_min_time) ||
|
|
|
|
ParseInt32Flag(argv[i], "benchmark_repetitions",
|
|
|
|
&FLAGS_benchmark_repetitions) ||
|
2016-08-11 08:20:54 +08:00
|
|
|
ParseBoolFlag(argv[i], "benchmark_report_aggregates_only",
|
2016-10-08 02:35:03 +08:00
|
|
|
&FLAGS_benchmark_report_aggregates_only) ||
|
2018-09-12 21:26:17 +08:00
|
|
|
ParseBoolFlag(argv[i], "benchmark_display_aggregates_only",
|
|
|
|
&FLAGS_benchmark_display_aggregates_only) ||
|
2016-10-08 02:35:03 +08:00
|
|
|
ParseStringFlag(argv[i], "benchmark_format", &FLAGS_benchmark_format) ||
|
|
|
|
ParseStringFlag(argv[i], "benchmark_out", &FLAGS_benchmark_out) ||
|
2016-08-03 05:12:43 +08:00
|
|
|
ParseStringFlag(argv[i], "benchmark_out_format",
|
|
|
|
&FLAGS_benchmark_out_format) ||
|
2016-10-08 02:35:03 +08:00
|
|
|
ParseStringFlag(argv[i], "benchmark_color", &FLAGS_benchmark_color) ||
|
2016-09-16 05:10:35 +08:00
|
|
|
// "color_print" is the deprecated name for "benchmark_color".
|
|
|
|
// TODO: Remove this.
|
2016-10-08 02:35:03 +08:00
|
|
|
ParseStringFlag(argv[i], "color_print", &FLAGS_benchmark_color) ||
|
2017-03-02 10:55:36 +08:00
|
|
|
ParseBoolFlag(argv[i], "benchmark_counters_tabular",
|
2018-06-01 18:14:19 +08:00
|
|
|
&FLAGS_benchmark_counters_tabular) ||
|
2015-03-13 06:03:33 +08:00
|
|
|
ParseInt32Flag(argv[i], "v", &FLAGS_v)) {
|
2016-12-07 00:38:03 +08:00
|
|
|
for (int j = i; j != *argc - 1; ++j) argv[j] = argv[j + 1];
|
2015-03-13 06:03:33 +08:00
|
|
|
|
|
|
|
--(*argc);
|
|
|
|
--i;
|
|
|
|
} else if (IsFlag(argv[i], "help")) {
|
|
|
|
PrintUsageAndExit();
|
|
|
|
}
|
|
|
|
}
|
2016-10-08 02:35:03 +08:00
|
|
|
for (auto const* flag :
|
|
|
|
{&FLAGS_benchmark_format, &FLAGS_benchmark_out_format})
|
|
|
|
if (*flag != "console" && *flag != "json" && *flag != "csv") {
|
|
|
|
PrintUsageAndExit();
|
|
|
|
}
|
2016-09-16 05:10:35 +08:00
|
|
|
if (FLAGS_benchmark_color.empty()) {
|
|
|
|
PrintUsageAndExit();
|
|
|
|
}
|
2013-12-19 08:55:45 +08:00
|
|
|
}
|
|
|
|
|
2016-08-29 12:48:48 +08:00
|
|
|
int InitializeStreams() {
|
2016-10-08 02:35:03 +08:00
|
|
|
static std::ios_base::Init init;
|
|
|
|
return 0;
|
2016-08-29 12:48:48 +08:00
|
|
|
}
|
|
|
|
|
2016-10-08 02:35:03 +08:00
|
|
|
} // end namespace internal
|
2015-03-12 00:47:15 +08:00
|
|
|
|
2015-10-01 04:14:50 +08:00
|
|
|
void Initialize(int* argc, char** argv) {
|
2014-01-10 04:12:11 +08:00
|
|
|
internal::ParseCommandLineFlags(argc, argv);
|
2016-09-03 11:34:34 +08:00
|
|
|
internal::LogLevel() = FLAGS_v;
|
2013-12-19 08:55:45 +08:00
|
|
|
}
|
|
|
|
|
2017-01-18 11:28:20 +08:00
|
|
|
bool ReportUnrecognizedArguments(int argc, char** argv) {
|
|
|
|
for (int i = 1; i < argc; ++i) {
|
2018-06-01 18:14:19 +08:00
|
|
|
fprintf(stderr, "%s: error: unrecognized command-line flag: %s\n", argv[0],
|
|
|
|
argv[i]);
|
2017-01-18 11:28:20 +08:00
|
|
|
}
|
|
|
|
return argc > 1;
|
|
|
|
}
|
|
|
|
|
2016-10-08 02:35:03 +08:00
|
|
|
} // end namespace benchmark
|