2015-03-07 06:01:05 +08:00
|
|
|
// Copyright 2015 Google Inc. All rights reserved.
|
2014-01-10 04:16:51 +08:00
|
|
|
//
|
2014-01-10 00:01:34 +08:00
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
2014-01-10 04:16:51 +08:00
|
|
|
//
|
2014-01-10 00:01:34 +08:00
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
2014-01-10 04:16:51 +08:00
|
|
|
//
|
2014-01-10 00:01:34 +08:00
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
2013-12-19 08:55:45 +08:00
|
|
|
#include "benchmark/benchmark.h"
|
2016-09-06 05:48:40 +08:00
|
|
|
#include "benchmark_api_internal.h"
|
2016-10-08 02:35:03 +08:00
|
|
|
#include "internal_macros.h"
|
2013-12-19 08:55:45 +08:00
|
|
|
|
2015-10-05 19:58:35 +08:00
|
|
|
#ifndef BENCHMARK_OS_WINDOWS
|
2018-02-15 05:17:12 +08:00
|
|
|
#ifndef BENCHMARK_OS_FUCHSIA
|
2015-03-13 06:03:33 +08:00
|
|
|
#include <sys/resource.h>
|
2018-02-15 05:17:12 +08:00
|
|
|
#endif
|
2016-10-08 02:35:03 +08:00
|
|
|
#include <sys/time.h>
|
2015-03-13 06:03:33 +08:00
|
|
|
#include <unistd.h>
|
2015-04-14 01:45:16 +08:00
|
|
|
#endif
|
2013-12-19 08:55:45 +08:00
|
|
|
|
|
|
|
#include <algorithm>
|
2013-12-20 09:04:54 +08:00
|
|
|
#include <atomic>
|
2014-08-04 18:38:37 +08:00
|
|
|
#include <condition_variable>
|
2016-10-08 02:35:03 +08:00
|
|
|
#include <cstdio>
|
|
|
|
#include <cstdlib>
|
2016-08-03 05:12:43 +08:00
|
|
|
#include <fstream>
|
2016-10-08 02:35:03 +08:00
|
|
|
#include <iostream>
|
2013-12-19 08:55:45 +08:00
|
|
|
#include <memory>
|
2018-03-16 18:14:38 +08:00
|
|
|
#include <string>
|
2014-08-04 18:38:37 +08:00
|
|
|
#include <thread>
|
2015-03-13 06:03:33 +08:00
|
|
|
|
|
|
|
#include "check.h"
|
2016-09-16 05:10:35 +08:00
|
|
|
#include "colorprint.h"
|
2015-03-13 06:03:33 +08:00
|
|
|
#include "commandlineflags.h"
|
2016-05-28 06:45:25 +08:00
|
|
|
#include "complexity.h"
|
2017-03-02 08:23:42 +08:00
|
|
|
#include "counter.h"
|
2017-11-23 00:33:52 +08:00
|
|
|
#include "internal_macros.h"
|
2015-03-13 06:03:33 +08:00
|
|
|
#include "log.h"
|
|
|
|
#include "mutex.h"
|
|
|
|
#include "re.h"
|
2017-11-23 00:33:52 +08:00
|
|
|
#include "statistics.h"
|
2015-03-13 06:03:33 +08:00
|
|
|
#include "string_util.h"
|
2018-03-16 18:14:38 +08:00
|
|
|
#include "thread_manager.h"
|
|
|
|
#include "thread_timer.h"
|
2013-12-19 08:55:45 +08:00
|
|
|
|
2015-03-31 11:32:37 +08:00
|
|
|
DEFINE_bool(benchmark_list_tests, false,
|
|
|
|
"Print a list of benchmarks. This option overrides all other "
|
|
|
|
"options.");
|
|
|
|
|
2013-12-19 08:55:45 +08:00
|
|
|
DEFINE_string(benchmark_filter, ".",
|
|
|
|
"A regular expression that specifies the set of benchmarks "
|
|
|
|
"to execute. If this flag is empty, no benchmarks are run. "
|
|
|
|
"If this flag is the string \"all\", all benchmarks linked "
|
|
|
|
"into the process are run.");
|
|
|
|
|
|
|
|
DEFINE_double(benchmark_min_time, 0.5,
|
|
|
|
"Minimum number of seconds we should run benchmark before "
|
|
|
|
"results are considered significant. For cpu-time based "
|
|
|
|
"tests, this is the lower bound on the total cpu time "
|
|
|
|
"used by all threads that make up the test. For real-time "
|
|
|
|
"based tests, this is the lower bound on the elapsed time "
|
|
|
|
"of the benchmark execution, regardless of number of "
|
|
|
|
"threads.");
|
|
|
|
|
|
|
|
DEFINE_int32(benchmark_repetitions, 1,
|
|
|
|
"The number of runs of each benchmark. If greater than 1, the "
|
|
|
|
"mean and standard deviation of the runs will be reported.");
|
|
|
|
|
2016-08-11 08:20:54 +08:00
|
|
|
DEFINE_bool(benchmark_report_aggregates_only, false,
|
|
|
|
"Report the result of each benchmark repetitions. When 'true' is "
|
|
|
|
"specified only the mean, standard deviation, and other statistics "
|
|
|
|
"are reported for repeated benchmarks.");
|
|
|
|
|
2016-05-03 03:04:16 +08:00
|
|
|
DEFINE_string(benchmark_format, "console",
|
2015-03-18 06:18:06 +08:00
|
|
|
"The format to use for console output. Valid values are "
|
2016-05-03 03:04:16 +08:00
|
|
|
"'console', 'json', or 'csv'.");
|
2015-03-18 06:18:06 +08:00
|
|
|
|
2016-08-03 05:12:43 +08:00
|
|
|
DEFINE_string(benchmark_out_format, "json",
|
|
|
|
"The format to use for file output. Valid values are "
|
|
|
|
"'console', 'json', or 'csv'.");
|
|
|
|
|
2018-03-06 19:44:25 +08:00
|
|
|
DEFINE_string(benchmark_out, "", "The file to write additional output to");
|
2016-08-03 05:12:43 +08:00
|
|
|
|
2016-09-16 05:10:35 +08:00
|
|
|
DEFINE_string(benchmark_color, "auto",
|
|
|
|
"Whether to use colors in the output. Valid values: "
|
|
|
|
"'true'/'yes'/1, 'false'/'no'/0, and 'auto'. 'auto' means to use "
|
|
|
|
"colors if the output is being sent to a terminal and the TERM "
|
|
|
|
"environment variable is set to a terminal type that supports "
|
|
|
|
"colors.");
|
2013-12-19 08:55:45 +08:00
|
|
|
|
2017-03-02 10:55:36 +08:00
|
|
|
DEFINE_bool(benchmark_counters_tabular, false,
|
|
|
|
"Whether to use tabular format when printing user counters to "
|
|
|
|
"the console. Valid values: 'true'/'yes'/1, 'false'/'no'/0."
|
|
|
|
"Defaults to false.");
|
|
|
|
|
2015-03-13 06:03:33 +08:00
|
|
|
DEFINE_int32(v, 0, "The level of verbose logging to output");
|
|
|
|
|
2013-12-19 08:55:45 +08:00
|
|
|
namespace benchmark {
|
2015-03-13 06:03:33 +08:00
|
|
|
|
2013-12-19 08:55:45 +08:00
|
|
|
namespace {
|
2015-10-02 08:46:39 +08:00
|
|
|
static const size_t kMaxIterations = 1000000000;
|
2016-09-03 11:34:34 +08:00
|
|
|
} // end namespace
|
2013-12-19 08:55:45 +08:00
|
|
|
|
2016-09-03 11:34:34 +08:00
|
|
|
namespace internal {
|
2013-12-19 08:55:45 +08:00
|
|
|
|
2017-10-10 03:01:30 +08:00
|
|
|
void UseCharPointer(char const volatile*) {}
|
|
|
|
|
2015-03-13 06:03:33 +08:00
|
|
|
namespace {
|
2013-12-19 08:55:45 +08:00
|
|
|
|
2016-10-08 02:35:03 +08:00
|
|
|
BenchmarkReporter::Run CreateRunReport(
|
|
|
|
const benchmark::internal::Benchmark::Instance& b,
|
|
|
|
const internal::ThreadManager::Result& results, size_t iters,
|
|
|
|
double seconds) {
|
2016-09-06 16:28:35 +08:00
|
|
|
// Create report about this benchmark run.
|
|
|
|
BenchmarkReporter::Run report;
|
|
|
|
|
|
|
|
report.benchmark_name = b.name;
|
|
|
|
report.error_occurred = results.has_error_;
|
|
|
|
report.error_message = results.error_message_;
|
|
|
|
report.report_label = results.report_label_;
|
|
|
|
// Report the total iterations across all threads.
|
|
|
|
report.iterations = static_cast<int64_t>(iters) * b.threads;
|
|
|
|
report.time_unit = b.time_unit;
|
|
|
|
|
|
|
|
if (!report.error_occurred) {
|
|
|
|
double bytes_per_second = 0;
|
|
|
|
if (results.bytes_processed > 0 && seconds > 0.0) {
|
|
|
|
bytes_per_second = (results.bytes_processed / seconds);
|
|
|
|
}
|
|
|
|
double items_per_second = 0;
|
|
|
|
if (results.items_processed > 0 && seconds > 0.0) {
|
|
|
|
items_per_second = (results.items_processed / seconds);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (b.use_manual_time) {
|
|
|
|
report.real_accumulated_time = results.manual_time_used;
|
|
|
|
} else {
|
|
|
|
report.real_accumulated_time = results.real_time_used;
|
|
|
|
}
|
|
|
|
report.cpu_accumulated_time = results.cpu_time_used;
|
|
|
|
report.bytes_per_second = bytes_per_second;
|
|
|
|
report.items_per_second = items_per_second;
|
|
|
|
report.complexity_n = results.complexity_n;
|
|
|
|
report.complexity = b.complexity;
|
|
|
|
report.complexity_lambda = b.complexity_lambda;
|
Drop Stat1, refactor statistics to be user-providable, add median. (#428)
* Drop Stat1, refactor statistics to be user-providable, add median.
My main goal was to add median statistic. Since Stat1
calculated the stats incrementally, and did not store
the values themselves, it is was not possible. Thus,
i have replaced Stat1 with simple std::vector<double>,
containing all the values.
Then, i have refactored current mean/stdev to be a
function that is provided with values vector, and
returns the statistic. While there, it seemed to make
sense to deduplicate the code by storing all the
statistics functions in a map, and then simply iterate
over it. And the interface to add new statistics is
intentionally exposed, so they may be added easily.
The notable change is that Iterations are no longer
displayed as 0 for stdev. Is could be changed, but
i'm not sure how to nicely fit that into the API.
Similarly, this dance about sometimes (for some fields,
for some statistics) dividing by run.iterations, and
then multiplying the calculated stastic back is also
dropped, and if you do the math, i fail to see why
it was needed there in the first place.
Since that was the only use of stat.h, it is removed.
* complexity.h: attempt to fix MSVC build
* Update README.md
* Store statistics to compute in a vector, ensures ordering.
* Add a bit more tests for repetitions.
* Partially address review notes.
* Fix gcc build: drop extra ';'
clang, why didn't you warn me?
* Address review comments.
* double() -> 0.0
* early return
2017-08-24 07:44:29 +08:00
|
|
|
report.statistics = b.statistics;
|
2017-03-02 08:23:42 +08:00
|
|
|
report.counters = results.counters;
|
2017-04-27 20:16:49 +08:00
|
|
|
internal::Finish(&report.counters, seconds, b.threads);
|
2016-09-06 16:28:35 +08:00
|
|
|
}
|
|
|
|
return report;
|
|
|
|
}
|
|
|
|
|
2015-03-13 06:03:33 +08:00
|
|
|
// Execute one thread of benchmark b for the specified number of iterations.
|
|
|
|
// Adds the stats collected for the thread into *total.
|
|
|
|
void RunInThread(const benchmark::internal::Benchmark::Instance* b,
|
2015-10-02 05:08:44 +08:00
|
|
|
size_t iters, int thread_id,
|
2016-09-03 11:34:34 +08:00
|
|
|
internal::ThreadManager* manager) {
|
|
|
|
internal::ThreadTimer timer;
|
|
|
|
State st(iters, b->arg, thread_id, b->threads, &timer, manager);
|
2015-04-07 05:00:06 +08:00
|
|
|
b->benchmark->Run(st);
|
2018-02-10 12:57:04 +08:00
|
|
|
CHECK(st.iterations() >= st.max_iterations)
|
2016-10-08 02:35:03 +08:00
|
|
|
<< "Benchmark returned before State::KeepRunning() returned false!";
|
2015-03-13 06:03:33 +08:00
|
|
|
{
|
2016-09-03 11:34:34 +08:00
|
|
|
MutexLock l(manager->GetBenchmarkMutex());
|
2016-09-06 16:28:35 +08:00
|
|
|
internal::ThreadManager::Result& results = manager->results;
|
|
|
|
results.cpu_time_used += timer.cpu_time_used();
|
|
|
|
results.real_time_used += timer.real_time_used();
|
|
|
|
results.manual_time_used += timer.manual_time_used();
|
|
|
|
results.bytes_processed += st.bytes_processed();
|
|
|
|
results.items_processed += st.items_processed();
|
|
|
|
results.complexity_n += st.complexity_length_n();
|
2017-03-02 08:23:42 +08:00
|
|
|
internal::Increment(&results.counters, st.counters);
|
2013-12-19 08:55:45 +08:00
|
|
|
}
|
2016-09-03 11:34:34 +08:00
|
|
|
manager->NotifyThreadComplete();
|
2013-12-19 08:55:45 +08:00
|
|
|
}
|
|
|
|
|
2016-09-03 11:34:34 +08:00
|
|
|
std::vector<BenchmarkReporter::Run> RunBenchmark(
|
|
|
|
const benchmark::internal::Benchmark::Instance& b,
|
|
|
|
std::vector<BenchmarkReporter::Run>* complexity_reports) {
|
2016-10-08 02:35:03 +08:00
|
|
|
std::vector<BenchmarkReporter::Run> reports; // return value
|
2016-08-11 08:20:54 +08:00
|
|
|
|
2017-04-18 12:29:28 +08:00
|
|
|
const bool has_explicit_iteration_count = b.iterations != 0;
|
|
|
|
size_t iters = has_explicit_iteration_count ? b.iterations : 1;
|
2016-09-06 16:28:35 +08:00
|
|
|
std::unique_ptr<internal::ThreadManager> manager;
|
|
|
|
std::vector<std::thread> pool(b.threads - 1);
|
2016-10-08 02:35:03 +08:00
|
|
|
const int repeats =
|
|
|
|
b.repetitions != 0 ? b.repetitions : FLAGS_benchmark_repetitions;
|
|
|
|
const bool report_aggregates_only =
|
|
|
|
repeats != 1 &&
|
2016-08-11 08:20:54 +08:00
|
|
|
(b.report_mode == internal::RM_Unspecified
|
2016-10-08 02:35:03 +08:00
|
|
|
? FLAGS_benchmark_report_aggregates_only
|
|
|
|
: b.report_mode == internal::RM_ReportAggregatesOnly);
|
2017-04-18 14:13:18 +08:00
|
|
|
for (int repetition_num = 0; repetition_num < repeats; repetition_num++) {
|
2015-10-05 20:38:07 +08:00
|
|
|
for (;;) {
|
2015-03-13 06:03:33 +08:00
|
|
|
// Try benchmark
|
|
|
|
VLOG(2) << "Running " << b.name << " for " << iters << "\n";
|
2013-12-19 08:55:45 +08:00
|
|
|
|
2016-09-06 16:28:35 +08:00
|
|
|
manager.reset(new internal::ThreadManager(b.threads));
|
|
|
|
for (std::size_t ti = 0; ti < pool.size(); ++ti) {
|
|
|
|
pool[ti] = std::thread(&RunInThread, &b, iters,
|
|
|
|
static_cast<int>(ti + 1), manager.get());
|
2015-03-13 06:03:33 +08:00
|
|
|
}
|
2016-09-06 16:28:35 +08:00
|
|
|
RunInThread(&b, iters, 0, manager.get());
|
|
|
|
manager->WaitForAllThreads();
|
2016-10-08 02:35:03 +08:00
|
|
|
for (std::thread& thread : pool) thread.join();
|
2016-09-06 16:28:35 +08:00
|
|
|
internal::ThreadManager::Result results;
|
|
|
|
{
|
|
|
|
MutexLock l(manager->GetBenchmarkMutex());
|
|
|
|
results = manager->results;
|
|
|
|
}
|
|
|
|
manager.reset();
|
|
|
|
// Adjust real/manual time stats since they were reported per thread.
|
|
|
|
results.real_time_used /= b.threads;
|
|
|
|
results.manual_time_used /= b.threads;
|
2013-12-19 08:55:45 +08:00
|
|
|
|
2016-09-06 16:28:35 +08:00
|
|
|
VLOG(2) << "Ran in " << results.cpu_time_used << "/"
|
2016-10-08 02:35:03 +08:00
|
|
|
<< results.real_time_used << "\n";
|
2013-12-19 08:55:45 +08:00
|
|
|
|
2015-03-13 06:03:33 +08:00
|
|
|
// Base decisions off of real time if requested by this benchmark.
|
2016-09-06 16:28:35 +08:00
|
|
|
double seconds = results.cpu_time_used;
|
2016-04-30 21:23:58 +08:00
|
|
|
if (b.use_manual_time) {
|
2016-10-08 02:35:03 +08:00
|
|
|
seconds = results.manual_time_used;
|
2016-04-30 21:23:58 +08:00
|
|
|
} else if (b.use_real_time) {
|
2016-10-08 02:35:03 +08:00
|
|
|
seconds = results.real_time_used;
|
2015-03-27 11:37:26 +08:00
|
|
|
}
|
|
|
|
|
2016-10-08 02:35:03 +08:00
|
|
|
const double min_time =
|
|
|
|
!IsZero(b.min_time) ? b.min_time : FLAGS_benchmark_min_time;
|
2017-04-18 14:13:18 +08:00
|
|
|
|
|
|
|
// Determine if this run should be reported; Either it has
|
|
|
|
// run for a sufficient amount of time or because an error was reported.
|
|
|
|
const bool should_report = repetition_num > 0
|
|
|
|
|| has_explicit_iteration_count // An exact iteration count was requested
|
|
|
|
|| results.has_error_
|
|
|
|
|| iters >= kMaxIterations
|
|
|
|
|| seconds >= min_time // the elapsed time is large enough
|
|
|
|
// CPU time is specified but the elapsed real time greatly exceeds the
|
|
|
|
// minimum time. Note that user provided timers are except from this
|
|
|
|
// sanity check.
|
|
|
|
|| ((results.real_time_used >= 5 * min_time) && !b.use_manual_time);
|
|
|
|
|
|
|
|
if (should_report) {
|
2016-10-08 02:35:03 +08:00
|
|
|
BenchmarkReporter::Run report =
|
|
|
|
CreateRunReport(b, results, iters, seconds);
|
2016-09-06 16:28:35 +08:00
|
|
|
if (!report.error_occurred && b.complexity != oNone)
|
|
|
|
complexity_reports->push_back(report);
|
2015-03-13 06:03:33 +08:00
|
|
|
reports.push_back(report);
|
|
|
|
break;
|
|
|
|
}
|
2014-05-05 13:50:45 +08:00
|
|
|
|
2015-03-13 06:03:33 +08:00
|
|
|
// See how much iterations should be increased by
|
|
|
|
// Note: Avoid division by zero with max(seconds, 1ns).
|
2015-03-27 11:37:26 +08:00
|
|
|
double multiplier = min_time * 1.4 / std::max(seconds, 1e-9);
|
2015-03-13 06:03:33 +08:00
|
|
|
// If our last run was at least 10% of FLAGS_benchmark_min_time then we
|
|
|
|
// use the multiplier directly. Otherwise we use at most 10 times
|
|
|
|
// expansion.
|
|
|
|
// NOTE: When the last run was at least 10% of the min time the max
|
|
|
|
// expansion should be 14x.
|
2015-03-27 11:37:26 +08:00
|
|
|
bool is_significant = (seconds / min_time) > 0.1;
|
2015-03-13 06:03:33 +08:00
|
|
|
multiplier = is_significant ? multiplier : std::min(10.0, multiplier);
|
|
|
|
if (multiplier <= 1.0) multiplier = 2.0;
|
|
|
|
double next_iters = std::max(multiplier * iters, iters + 1.0);
|
|
|
|
if (next_iters > kMaxIterations) {
|
|
|
|
next_iters = kMaxIterations;
|
|
|
|
}
|
|
|
|
VLOG(3) << "Next iters: " << next_iters << ", " << multiplier << "\n";
|
|
|
|
iters = static_cast<int>(next_iters + 0.5);
|
2014-05-05 13:50:45 +08:00
|
|
|
}
|
|
|
|
}
|
2016-08-11 08:20:54 +08:00
|
|
|
// Calculate additional statistics
|
|
|
|
auto stat_reports = ComputeStats(reports);
|
2016-10-08 02:35:03 +08:00
|
|
|
if ((b.complexity != oNone) && b.last_benchmark_instance) {
|
2016-08-11 08:20:54 +08:00
|
|
|
auto additional_run_stats = ComputeBigO(*complexity_reports);
|
|
|
|
stat_reports.insert(stat_reports.end(), additional_run_stats.begin(),
|
2016-10-08 02:35:03 +08:00
|
|
|
additional_run_stats.end());
|
2016-08-11 08:20:54 +08:00
|
|
|
complexity_reports->clear();
|
|
|
|
}
|
2016-08-03 05:12:43 +08:00
|
|
|
|
2016-08-11 08:20:54 +08:00
|
|
|
if (report_aggregates_only) reports.clear();
|
|
|
|
reports.insert(reports.end(), stat_reports.begin(), stat_reports.end());
|
2016-08-03 05:12:43 +08:00
|
|
|
return reports;
|
2015-03-13 06:03:33 +08:00
|
|
|
}
|
|
|
|
|
2016-10-08 02:35:03 +08:00
|
|
|
} // namespace
|
|
|
|
} // namespace internal
|
2015-03-13 06:03:33 +08:00
|
|
|
|
2018-04-04 06:12:47 +08:00
|
|
|
State::State(size_t max_iters, const std::vector<int64_t>& ranges, int thread_i,
|
2016-09-03 11:34:34 +08:00
|
|
|
int n_threads, internal::ThreadTimer* timer,
|
|
|
|
internal::ThreadManager* manager)
|
2018-02-15 04:44:41 +08:00
|
|
|
: total_iterations_(0),
|
|
|
|
batch_leftover_(0),
|
|
|
|
max_iterations(max_iters),
|
|
|
|
started_(false),
|
2016-09-03 11:34:34 +08:00
|
|
|
finished_(false),
|
2018-02-15 04:44:41 +08:00
|
|
|
error_occurred_(false),
|
2016-08-05 03:30:14 +08:00
|
|
|
range_(ranges),
|
2016-09-03 11:34:34 +08:00
|
|
|
bytes_processed_(0),
|
|
|
|
items_processed_(0),
|
2016-05-25 02:06:54 +08:00
|
|
|
complexity_n_(0),
|
2017-03-02 08:23:42 +08:00
|
|
|
counters(),
|
2015-03-13 06:03:33 +08:00
|
|
|
thread_index(thread_i),
|
2016-02-15 21:19:43 +08:00
|
|
|
threads(n_threads),
|
2016-09-03 11:34:34 +08:00
|
|
|
timer_(timer),
|
|
|
|
manager_(manager) {
|
|
|
|
CHECK(max_iterations != 0) << "At least one iteration must be run";
|
|
|
|
CHECK_LT(thread_index, threads) << "thread_index must be less than threads";
|
2018-02-15 04:44:41 +08:00
|
|
|
|
2018-03-22 03:47:25 +08:00
|
|
|
// Note: The use of offsetof below is technically undefined until C++17
|
|
|
|
// because State is not a standard layout type. However, all compilers
|
|
|
|
// currently provide well-defined behavior as an extension (which is
|
|
|
|
// demonstrated since constexpr evaluation must diagnose all undefined
|
|
|
|
// behavior). However, GCC and Clang also warn about this use of offsetof,
|
|
|
|
// which must be suppressed.
|
|
|
|
#ifdef __GNUC__
|
|
|
|
#pragma GCC diagnostic push
|
|
|
|
#pragma GCC diagnostic ignored "-Winvalid-offsetof"
|
|
|
|
#endif
|
2018-02-15 04:44:41 +08:00
|
|
|
// Offset tests to ensure commonly accessed data is on the first cache line.
|
|
|
|
const int cache_line_size = 64;
|
|
|
|
static_assert(offsetof(State, error_occurred_) <=
|
|
|
|
(cache_line_size - sizeof(error_occurred_)), "");
|
2018-03-22 03:47:25 +08:00
|
|
|
#ifdef __GNUC__
|
|
|
|
#pragma GCC diagnostic pop
|
|
|
|
#endif
|
2013-12-19 08:55:45 +08:00
|
|
|
}
|
|
|
|
|
2014-05-29 21:49:38 +08:00
|
|
|
void State::PauseTiming() {
|
2015-03-13 06:03:33 +08:00
|
|
|
// Add in time accumulated so far
|
2016-05-24 09:24:56 +08:00
|
|
|
CHECK(started_ && !finished_ && !error_occurred_);
|
2016-09-03 11:34:34 +08:00
|
|
|
timer_->StopTimer();
|
2014-05-29 21:49:38 +08:00
|
|
|
}
|
2013-12-19 08:55:45 +08:00
|
|
|
|
2014-05-29 21:49:38 +08:00
|
|
|
void State::ResumeTiming() {
|
2016-05-25 05:01:07 +08:00
|
|
|
CHECK(started_ && !finished_ && !error_occurred_);
|
2016-09-03 11:34:34 +08:00
|
|
|
timer_->StartTimer();
|
2014-05-29 21:49:38 +08:00
|
|
|
}
|
2013-12-19 08:55:45 +08:00
|
|
|
|
2016-05-24 09:24:56 +08:00
|
|
|
void State::SkipWithError(const char* msg) {
|
|
|
|
CHECK(msg);
|
|
|
|
error_occurred_ = true;
|
2016-09-03 11:34:34 +08:00
|
|
|
{
|
|
|
|
MutexLock l(manager_->GetBenchmarkMutex());
|
2016-09-06 16:28:35 +08:00
|
|
|
if (manager_->results.has_error_ == false) {
|
|
|
|
manager_->results.error_message_ = msg;
|
|
|
|
manager_->results.has_error_ = true;
|
2016-09-03 11:34:34 +08:00
|
|
|
}
|
|
|
|
}
|
2018-02-10 12:57:04 +08:00
|
|
|
total_iterations_ = 0;
|
2016-09-03 11:34:34 +08:00
|
|
|
if (timer_->running()) timer_->StopTimer();
|
2016-05-24 09:24:56 +08:00
|
|
|
}
|
|
|
|
|
2016-10-08 02:35:03 +08:00
|
|
|
void State::SetIterationTime(double seconds) {
|
2016-09-03 11:34:34 +08:00
|
|
|
timer_->SetIterationTime(seconds);
|
2016-04-30 21:23:58 +08:00
|
|
|
}
|
|
|
|
|
2015-03-13 06:03:33 +08:00
|
|
|
void State::SetLabel(const char* label) {
|
2016-09-03 11:34:34 +08:00
|
|
|
MutexLock l(manager_->GetBenchmarkMutex());
|
2016-09-06 16:28:35 +08:00
|
|
|
manager_->results.report_label_ = label;
|
2016-09-03 11:34:34 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
void State::StartKeepRunning() {
|
|
|
|
CHECK(!started_ && !finished_);
|
|
|
|
started_ = true;
|
2018-02-10 12:57:04 +08:00
|
|
|
total_iterations_ = error_occurred_ ? 0 : max_iterations;
|
2016-09-03 11:34:34 +08:00
|
|
|
manager_->StartStopBarrier();
|
|
|
|
if (!error_occurred_) ResumeTiming();
|
|
|
|
}
|
|
|
|
|
|
|
|
void State::FinishKeepRunning() {
|
|
|
|
CHECK(started_ && (!finished_ || error_occurred_));
|
|
|
|
if (!error_occurred_) {
|
|
|
|
PauseTiming();
|
|
|
|
}
|
2018-02-10 12:57:04 +08:00
|
|
|
// Total iterations has now wrapped around past 0. Fix this.
|
|
|
|
total_iterations_ = 0;
|
2016-09-03 11:34:34 +08:00
|
|
|
finished_ = true;
|
|
|
|
manager_->StartStopBarrier();
|
2013-12-19 08:55:45 +08:00
|
|
|
}
|
|
|
|
|
2015-03-13 06:03:33 +08:00
|
|
|
namespace internal {
|
2015-03-31 11:32:37 +08:00
|
|
|
namespace {
|
|
|
|
|
2016-10-29 05:22:22 +08:00
|
|
|
void RunBenchmarks(const std::vector<Benchmark::Instance>& benchmarks,
|
2016-08-03 05:12:43 +08:00
|
|
|
BenchmarkReporter* console_reporter,
|
|
|
|
BenchmarkReporter* file_reporter) {
|
|
|
|
// Note the file_reporter can be null.
|
|
|
|
CHECK(console_reporter != nullptr);
|
2015-03-13 06:03:33 +08:00
|
|
|
|
2013-12-19 08:55:45 +08:00
|
|
|
// Determine the width of the name field using a minimum width of 10.
|
2016-05-25 11:52:23 +08:00
|
|
|
bool has_repetitions = FLAGS_benchmark_repetitions > 1;
|
2015-02-19 02:07:45 +08:00
|
|
|
size_t name_field_width = 10;
|
Drop Stat1, refactor statistics to be user-providable, add median. (#428)
* Drop Stat1, refactor statistics to be user-providable, add median.
My main goal was to add median statistic. Since Stat1
calculated the stats incrementally, and did not store
the values themselves, it is was not possible. Thus,
i have replaced Stat1 with simple std::vector<double>,
containing all the values.
Then, i have refactored current mean/stdev to be a
function that is provided with values vector, and
returns the statistic. While there, it seemed to make
sense to deduplicate the code by storing all the
statistics functions in a map, and then simply iterate
over it. And the interface to add new statistics is
intentionally exposed, so they may be added easily.
The notable change is that Iterations are no longer
displayed as 0 for stdev. Is could be changed, but
i'm not sure how to nicely fit that into the API.
Similarly, this dance about sometimes (for some fields,
for some statistics) dividing by run.iterations, and
then multiplying the calculated stastic back is also
dropped, and if you do the math, i fail to see why
it was needed there in the first place.
Since that was the only use of stat.h, it is removed.
* complexity.h: attempt to fix MSVC build
* Update README.md
* Store statistics to compute in a vector, ensures ordering.
* Add a bit more tests for repetitions.
* Partially address review notes.
* Fix gcc build: drop extra ';'
clang, why didn't you warn me?
* Address review comments.
* double() -> 0.0
* early return
2017-08-24 07:44:29 +08:00
|
|
|
size_t stat_field_width = 0;
|
2015-03-31 11:32:37 +08:00
|
|
|
for (const Benchmark::Instance& benchmark : benchmarks) {
|
2015-03-28 00:28:22 +08:00
|
|
|
name_field_width =
|
|
|
|
std::max<size_t>(name_field_width, benchmark.name.size());
|
2016-05-25 11:52:23 +08:00
|
|
|
has_repetitions |= benchmark.repetitions > 1;
|
Drop Stat1, refactor statistics to be user-providable, add median. (#428)
* Drop Stat1, refactor statistics to be user-providable, add median.
My main goal was to add median statistic. Since Stat1
calculated the stats incrementally, and did not store
the values themselves, it is was not possible. Thus,
i have replaced Stat1 with simple std::vector<double>,
containing all the values.
Then, i have refactored current mean/stdev to be a
function that is provided with values vector, and
returns the statistic. While there, it seemed to make
sense to deduplicate the code by storing all the
statistics functions in a map, and then simply iterate
over it. And the interface to add new statistics is
intentionally exposed, so they may be added easily.
The notable change is that Iterations are no longer
displayed as 0 for stdev. Is could be changed, but
i'm not sure how to nicely fit that into the API.
Similarly, this dance about sometimes (for some fields,
for some statistics) dividing by run.iterations, and
then multiplying the calculated stastic back is also
dropped, and if you do the math, i fail to see why
it was needed there in the first place.
Since that was the only use of stat.h, it is removed.
* complexity.h: attempt to fix MSVC build
* Update README.md
* Store statistics to compute in a vector, ensures ordering.
* Add a bit more tests for repetitions.
* Partially address review notes.
* Fix gcc build: drop extra ';'
clang, why didn't you warn me?
* Address review comments.
* double() -> 0.0
* early return
2017-08-24 07:44:29 +08:00
|
|
|
|
|
|
|
for(const auto& Stat : *benchmark.statistics)
|
|
|
|
stat_field_width = std::max<size_t>(stat_field_width, Stat.name_.size());
|
2013-12-19 08:55:45 +08:00
|
|
|
}
|
Drop Stat1, refactor statistics to be user-providable, add median. (#428)
* Drop Stat1, refactor statistics to be user-providable, add median.
My main goal was to add median statistic. Since Stat1
calculated the stats incrementally, and did not store
the values themselves, it is was not possible. Thus,
i have replaced Stat1 with simple std::vector<double>,
containing all the values.
Then, i have refactored current mean/stdev to be a
function that is provided with values vector, and
returns the statistic. While there, it seemed to make
sense to deduplicate the code by storing all the
statistics functions in a map, and then simply iterate
over it. And the interface to add new statistics is
intentionally exposed, so they may be added easily.
The notable change is that Iterations are no longer
displayed as 0 for stdev. Is could be changed, but
i'm not sure how to nicely fit that into the API.
Similarly, this dance about sometimes (for some fields,
for some statistics) dividing by run.iterations, and
then multiplying the calculated stastic back is also
dropped, and if you do the math, i fail to see why
it was needed there in the first place.
Since that was the only use of stat.h, it is removed.
* complexity.h: attempt to fix MSVC build
* Update README.md
* Store statistics to compute in a vector, ensures ordering.
* Add a bit more tests for repetitions.
* Partially address review notes.
* Fix gcc build: drop extra ';'
clang, why didn't you warn me?
* Address review comments.
* double() -> 0.0
* early return
2017-08-24 07:44:29 +08:00
|
|
|
if (has_repetitions) name_field_width += 1 + stat_field_width;
|
2013-12-19 08:55:45 +08:00
|
|
|
|
|
|
|
// Print header here
|
2014-01-08 08:33:40 +08:00
|
|
|
BenchmarkReporter::Context context;
|
2013-12-19 08:55:45 +08:00
|
|
|
context.name_field_width = name_field_width;
|
|
|
|
|
2018-03-06 19:44:25 +08:00
|
|
|
// Keep track of running times of all instances of current benchmark
|
2016-05-19 03:25:00 +08:00
|
|
|
std::vector<BenchmarkReporter::Run> complexity_reports;
|
|
|
|
|
2016-09-12 05:36:14 +08:00
|
|
|
// We flush streams after invoking reporter methods that write to them. This
|
|
|
|
// ensures users get timely updates even when streams are not line-buffered.
|
|
|
|
auto flushStreams = [](BenchmarkReporter* reporter) {
|
|
|
|
if (!reporter) return;
|
|
|
|
std::flush(reporter->GetOutputStream());
|
|
|
|
std::flush(reporter->GetErrorStream());
|
|
|
|
};
|
|
|
|
|
2016-10-08 02:35:03 +08:00
|
|
|
if (console_reporter->ReportContext(context) &&
|
|
|
|
(!file_reporter || file_reporter->ReportContext(context))) {
|
2016-09-12 05:36:14 +08:00
|
|
|
flushStreams(console_reporter);
|
|
|
|
flushStreams(file_reporter);
|
2015-03-13 06:03:33 +08:00
|
|
|
for (const auto& benchmark : benchmarks) {
|
2016-08-03 05:12:43 +08:00
|
|
|
std::vector<BenchmarkReporter::Run> reports =
|
|
|
|
RunBenchmark(benchmark, &complexity_reports);
|
|
|
|
console_reporter->ReportRuns(reports);
|
|
|
|
if (file_reporter) file_reporter->ReportRuns(reports);
|
2016-09-12 05:36:14 +08:00
|
|
|
flushStreams(console_reporter);
|
|
|
|
flushStreams(file_reporter);
|
2015-03-13 06:03:33 +08:00
|
|
|
}
|
|
|
|
}
|
2016-08-03 05:12:43 +08:00
|
|
|
console_reporter->Finalize();
|
|
|
|
if (file_reporter) file_reporter->Finalize();
|
2016-09-12 05:36:14 +08:00
|
|
|
flushStreams(console_reporter);
|
|
|
|
flushStreams(file_reporter);
|
2013-12-19 08:55:45 +08:00
|
|
|
}
|
|
|
|
|
2016-10-08 02:35:03 +08:00
|
|
|
std::unique_ptr<BenchmarkReporter> CreateReporter(
|
2017-03-02 10:55:36 +08:00
|
|
|
std::string const& name, ConsoleReporter::OutputOptions output_opts) {
|
2015-03-18 06:18:06 +08:00
|
|
|
typedef std::unique_ptr<BenchmarkReporter> PtrType;
|
2016-08-03 05:12:43 +08:00
|
|
|
if (name == "console") {
|
2017-03-02 10:55:36 +08:00
|
|
|
return PtrType(new ConsoleReporter(output_opts));
|
2016-08-03 05:12:43 +08:00
|
|
|
} else if (name == "json") {
|
2015-03-18 06:18:06 +08:00
|
|
|
return PtrType(new JSONReporter);
|
2016-08-03 05:12:43 +08:00
|
|
|
} else if (name == "csv") {
|
2015-03-28 04:27:15 +08:00
|
|
|
return PtrType(new CSVReporter);
|
2015-03-18 06:18:06 +08:00
|
|
|
} else {
|
2016-08-03 05:12:43 +08:00
|
|
|
std::cerr << "Unexpected format: '" << name << "'\n";
|
2015-03-18 06:18:06 +08:00
|
|
|
std::exit(1);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-10-08 02:35:03 +08:00
|
|
|
} // end namespace
|
2017-05-03 03:31:54 +08:00
|
|
|
|
2017-05-03 07:05:15 +08:00
|
|
|
bool IsZero(double n) {
|
|
|
|
return std::abs(n) < std::numeric_limits<double>::epsilon();
|
|
|
|
}
|
|
|
|
|
2017-05-03 03:31:54 +08:00
|
|
|
ConsoleReporter::OutputOptions GetOutputOptions(bool force_no_color) {
|
|
|
|
int output_opts = ConsoleReporter::OO_Defaults;
|
|
|
|
if ((FLAGS_benchmark_color == "auto" && IsColorTerminal()) ||
|
|
|
|
IsTruthyFlagValue(FLAGS_benchmark_color)) {
|
|
|
|
output_opts |= ConsoleReporter::OO_Color;
|
|
|
|
} else {
|
|
|
|
output_opts &= ~ConsoleReporter::OO_Color;
|
|
|
|
}
|
|
|
|
if(force_no_color) {
|
|
|
|
output_opts &= ~ConsoleReporter::OO_Color;
|
|
|
|
}
|
|
|
|
if(FLAGS_benchmark_counters_tabular) {
|
|
|
|
output_opts |= ConsoleReporter::OO_Tabular;
|
|
|
|
} else {
|
|
|
|
output_opts &= ~ConsoleReporter::OO_Tabular;
|
|
|
|
}
|
|
|
|
return static_cast< ConsoleReporter::OutputOptions >(output_opts);
|
|
|
|
}
|
|
|
|
|
2016-10-08 02:35:03 +08:00
|
|
|
} // end namespace internal
|
2013-12-19 08:55:45 +08:00
|
|
|
|
2016-05-24 13:32:05 +08:00
|
|
|
size_t RunSpecifiedBenchmarks() {
|
2016-08-03 05:12:43 +08:00
|
|
|
return RunSpecifiedBenchmarks(nullptr, nullptr);
|
2015-03-13 07:16:06 +08:00
|
|
|
}
|
2013-12-19 08:55:45 +08:00
|
|
|
|
2016-08-03 05:12:43 +08:00
|
|
|
size_t RunSpecifiedBenchmarks(BenchmarkReporter* console_reporter) {
|
|
|
|
return RunSpecifiedBenchmarks(console_reporter, nullptr);
|
|
|
|
}
|
|
|
|
|
|
|
|
size_t RunSpecifiedBenchmarks(BenchmarkReporter* console_reporter,
|
|
|
|
BenchmarkReporter* file_reporter) {
|
2013-12-19 08:55:45 +08:00
|
|
|
std::string spec = FLAGS_benchmark_filter;
|
|
|
|
if (spec.empty() || spec == "all")
|
2014-01-10 04:12:11 +08:00
|
|
|
spec = "."; // Regexp that matches all benchmarks
|
2015-03-18 06:18:06 +08:00
|
|
|
|
2016-08-30 02:43:55 +08:00
|
|
|
// Setup the reporters
|
|
|
|
std::ofstream output_file;
|
|
|
|
std::unique_ptr<BenchmarkReporter> default_console_reporter;
|
|
|
|
std::unique_ptr<BenchmarkReporter> default_file_reporter;
|
|
|
|
if (!console_reporter) {
|
2017-03-02 10:55:36 +08:00
|
|
|
default_console_reporter = internal::CreateReporter(
|
2017-05-03 03:31:54 +08:00
|
|
|
FLAGS_benchmark_format, internal::GetOutputOptions());
|
2016-08-30 02:43:55 +08:00
|
|
|
console_reporter = default_console_reporter.get();
|
|
|
|
}
|
|
|
|
auto& Out = console_reporter->GetOutputStream();
|
|
|
|
auto& Err = console_reporter->GetErrorStream();
|
|
|
|
|
|
|
|
std::string const& fname = FLAGS_benchmark_out;
|
2017-07-14 00:33:43 +08:00
|
|
|
if (fname.empty() && file_reporter) {
|
2016-08-30 02:43:55 +08:00
|
|
|
Err << "A custom file reporter was provided but "
|
2016-10-08 02:35:03 +08:00
|
|
|
"--benchmark_out=<file> was not specified."
|
|
|
|
<< std::endl;
|
2016-08-30 02:43:55 +08:00
|
|
|
std::exit(1);
|
|
|
|
}
|
2017-07-14 00:33:43 +08:00
|
|
|
if (!fname.empty()) {
|
2016-08-30 02:43:55 +08:00
|
|
|
output_file.open(fname);
|
|
|
|
if (!output_file.is_open()) {
|
|
|
|
Err << "invalid file name: '" << fname << std::endl;
|
2016-08-03 05:12:43 +08:00
|
|
|
std::exit(1);
|
|
|
|
}
|
2016-08-30 02:43:55 +08:00
|
|
|
if (!file_reporter) {
|
|
|
|
default_file_reporter = internal::CreateReporter(
|
2016-10-08 02:35:03 +08:00
|
|
|
FLAGS_benchmark_out_format, ConsoleReporter::OO_None);
|
2016-08-30 02:43:55 +08:00
|
|
|
file_reporter = default_file_reporter.get();
|
2016-08-03 05:12:43 +08:00
|
|
|
}
|
2016-08-30 02:43:55 +08:00
|
|
|
file_reporter->SetOutputStream(&output_file);
|
|
|
|
file_reporter->SetErrorStream(&output_file);
|
|
|
|
}
|
|
|
|
|
|
|
|
std::vector<internal::Benchmark::Instance> benchmarks;
|
2016-09-06 05:48:40 +08:00
|
|
|
if (!FindBenchmarksInternal(spec, &benchmarks, &Err)) return 0;
|
2016-08-30 02:43:55 +08:00
|
|
|
|
2016-10-29 05:22:22 +08:00
|
|
|
if (benchmarks.empty()) {
|
|
|
|
Err << "Failed to match any benchmarks against regex: " << spec << "\n";
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2016-08-30 02:43:55 +08:00
|
|
|
if (FLAGS_benchmark_list_tests) {
|
2016-10-08 02:35:03 +08:00
|
|
|
for (auto const& benchmark : benchmarks) Out << benchmark.name << "\n";
|
2016-08-30 02:43:55 +08:00
|
|
|
} else {
|
2016-10-29 05:22:22 +08:00
|
|
|
internal::RunBenchmarks(benchmarks, console_reporter, file_reporter);
|
2015-03-18 06:18:06 +08:00
|
|
|
}
|
2016-08-30 02:43:55 +08:00
|
|
|
|
2016-05-24 14:42:11 +08:00
|
|
|
return benchmarks.size();
|
2015-03-13 06:03:33 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
namespace internal {
|
|
|
|
|
|
|
|
void PrintUsageAndExit() {
|
|
|
|
fprintf(stdout,
|
|
|
|
"benchmark"
|
2015-03-31 11:32:37 +08:00
|
|
|
" [--benchmark_list_tests={true|false}]\n"
|
|
|
|
" [--benchmark_filter=<regex>]\n"
|
2015-03-13 06:03:33 +08:00
|
|
|
" [--benchmark_min_time=<min_time>]\n"
|
|
|
|
" [--benchmark_repetitions=<num_repetitions>]\n"
|
2016-08-11 08:20:54 +08:00
|
|
|
" [--benchmark_report_aggregates_only={true|false}\n"
|
2016-05-03 03:04:16 +08:00
|
|
|
" [--benchmark_format=<console|json|csv>]\n"
|
2016-08-03 05:12:43 +08:00
|
|
|
" [--benchmark_out=<filename>]\n"
|
|
|
|
" [--benchmark_out_format=<json|console|csv>]\n"
|
2016-09-16 05:10:35 +08:00
|
|
|
" [--benchmark_color={auto|true|false}]\n"
|
2017-03-02 10:55:36 +08:00
|
|
|
" [--benchmark_counters_tabular={true|false}]\n"
|
2015-03-13 06:03:33 +08:00
|
|
|
" [--v=<verbosity>]\n");
|
|
|
|
exit(0);
|
|
|
|
}
|
|
|
|
|
2015-10-01 04:14:50 +08:00
|
|
|
void ParseCommandLineFlags(int* argc, char** argv) {
|
2015-03-13 06:03:33 +08:00
|
|
|
using namespace benchmark;
|
2018-02-22 00:43:57 +08:00
|
|
|
BenchmarkReporter::Context::executable_name = argv[0];
|
2015-03-13 06:03:33 +08:00
|
|
|
for (int i = 1; i < *argc; ++i) {
|
2016-10-08 02:35:03 +08:00
|
|
|
if (ParseBoolFlag(argv[i], "benchmark_list_tests",
|
2015-03-31 11:32:37 +08:00
|
|
|
&FLAGS_benchmark_list_tests) ||
|
2016-10-08 02:35:03 +08:00
|
|
|
ParseStringFlag(argv[i], "benchmark_filter", &FLAGS_benchmark_filter) ||
|
2015-03-13 06:03:33 +08:00
|
|
|
ParseDoubleFlag(argv[i], "benchmark_min_time",
|
|
|
|
&FLAGS_benchmark_min_time) ||
|
|
|
|
ParseInt32Flag(argv[i], "benchmark_repetitions",
|
|
|
|
&FLAGS_benchmark_repetitions) ||
|
2016-08-11 08:20:54 +08:00
|
|
|
ParseBoolFlag(argv[i], "benchmark_report_aggregates_only",
|
2016-10-08 02:35:03 +08:00
|
|
|
&FLAGS_benchmark_report_aggregates_only) ||
|
|
|
|
ParseStringFlag(argv[i], "benchmark_format", &FLAGS_benchmark_format) ||
|
|
|
|
ParseStringFlag(argv[i], "benchmark_out", &FLAGS_benchmark_out) ||
|
2016-08-03 05:12:43 +08:00
|
|
|
ParseStringFlag(argv[i], "benchmark_out_format",
|
|
|
|
&FLAGS_benchmark_out_format) ||
|
2016-10-08 02:35:03 +08:00
|
|
|
ParseStringFlag(argv[i], "benchmark_color", &FLAGS_benchmark_color) ||
|
2016-09-16 05:10:35 +08:00
|
|
|
// "color_print" is the deprecated name for "benchmark_color".
|
|
|
|
// TODO: Remove this.
|
2016-10-08 02:35:03 +08:00
|
|
|
ParseStringFlag(argv[i], "color_print", &FLAGS_benchmark_color) ||
|
2017-03-02 10:55:36 +08:00
|
|
|
ParseBoolFlag(argv[i], "benchmark_counters_tabular",
|
|
|
|
&FLAGS_benchmark_counters_tabular) ||
|
2015-03-13 06:03:33 +08:00
|
|
|
ParseInt32Flag(argv[i], "v", &FLAGS_v)) {
|
2016-12-07 00:38:03 +08:00
|
|
|
for (int j = i; j != *argc - 1; ++j) argv[j] = argv[j + 1];
|
2015-03-13 06:03:33 +08:00
|
|
|
|
|
|
|
--(*argc);
|
|
|
|
--i;
|
|
|
|
} else if (IsFlag(argv[i], "help")) {
|
|
|
|
PrintUsageAndExit();
|
|
|
|
}
|
|
|
|
}
|
2016-10-08 02:35:03 +08:00
|
|
|
for (auto const* flag :
|
|
|
|
{&FLAGS_benchmark_format, &FLAGS_benchmark_out_format})
|
|
|
|
if (*flag != "console" && *flag != "json" && *flag != "csv") {
|
|
|
|
PrintUsageAndExit();
|
|
|
|
}
|
2016-09-16 05:10:35 +08:00
|
|
|
if (FLAGS_benchmark_color.empty()) {
|
|
|
|
PrintUsageAndExit();
|
|
|
|
}
|
2013-12-19 08:55:45 +08:00
|
|
|
}
|
|
|
|
|
2016-08-29 12:48:48 +08:00
|
|
|
int InitializeStreams() {
|
2016-10-08 02:35:03 +08:00
|
|
|
static std::ios_base::Init init;
|
|
|
|
return 0;
|
2016-08-29 12:48:48 +08:00
|
|
|
}
|
|
|
|
|
2016-10-08 02:35:03 +08:00
|
|
|
} // end namespace internal
|
2015-03-12 00:47:15 +08:00
|
|
|
|
2015-10-01 04:14:50 +08:00
|
|
|
void Initialize(int* argc, char** argv) {
|
2014-01-10 04:12:11 +08:00
|
|
|
internal::ParseCommandLineFlags(argc, argv);
|
2016-09-03 11:34:34 +08:00
|
|
|
internal::LogLevel() = FLAGS_v;
|
2013-12-19 08:55:45 +08:00
|
|
|
}
|
|
|
|
|
2017-01-18 11:28:20 +08:00
|
|
|
bool ReportUnrecognizedArguments(int argc, char** argv) {
|
|
|
|
for (int i = 1; i < argc; ++i) {
|
|
|
|
fprintf(stderr, "%s: error: unrecognized command-line flag: %s\n", argv[0], argv[i]);
|
|
|
|
}
|
|
|
|
return argc > 1;
|
|
|
|
}
|
|
|
|
|
2016-10-08 02:35:03 +08:00
|
|
|
} // end namespace benchmark
|