[clang-tidy] autofix readability issues (#1931)

* [clang-tidy] autofix readability issues

* more modern clang format
This commit is contained in:
dominic 2025-02-12 11:40:49 -08:00 committed by GitHub
parent 2d4c8dd21a
commit adbda82db3
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
37 changed files with 253 additions and 225 deletions

View File

@ -10,9 +10,7 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- uses: DoozyX/clang-format-lint-action@v0.15 - uses: DoozyX/clang-format-lint-action@v0.18.2
with: with:
source: './include/benchmark ./src ./test' source: './include/benchmark ./src ./test'
extensions: 'h,cc' clangFormatVersion: 18
clangFormatVersion: 12
style: Google

View File

@ -313,7 +313,7 @@ BENCHMARK_EXPORT std::string GetBenchmarkVersion();
BENCHMARK_EXPORT void PrintDefaultHelp(); BENCHMARK_EXPORT void PrintDefaultHelp();
BENCHMARK_EXPORT void Initialize(int* argc, char** argv, BENCHMARK_EXPORT void Initialize(int* argc, char** argv,
void (*HelperPrinterf)() = PrintDefaultHelp); void (*HelperPrintf)() = PrintDefaultHelp);
BENCHMARK_EXPORT void Shutdown(); BENCHMARK_EXPORT void Shutdown();
// Report to stdout all arguments in 'argv' as unrecognized except the first. // Report to stdout all arguments in 'argv' as unrecognized except the first.
@ -1165,7 +1165,7 @@ class BENCHMARK_EXPORT Benchmark {
// Pass this benchmark object to *func, which can customize // Pass this benchmark object to *func, which can customize
// the benchmark by calling various methods like Arg, Args, // the benchmark by calling various methods like Arg, Args,
// Threads, etc. // Threads, etc.
Benchmark* Apply(void (*func)(Benchmark* benchmark)); Benchmark* Apply(void (*custom_arguments)(Benchmark* benchmark));
// Set the range multiplier for non-dense range. If not called, the range // Set the range multiplier for non-dense range. If not called, the range
// multiplier kRangeMultiplier will be used. // multiplier kRangeMultiplier will be used.
@ -1869,8 +1869,8 @@ class BENCHMARK_EXPORT ConsoleReporter : public BenchmarkReporter {
void ReportRuns(const std::vector<Run>& reports) override; void ReportRuns(const std::vector<Run>& reports) override;
protected: protected:
virtual void PrintRunData(const Run& report); virtual void PrintRunData(const Run& result);
virtual void PrintHeader(const Run& report); virtual void PrintHeader(const Run& run);
OutputOptions output_options_; OutputOptions output_options_;
size_t name_field_width_; size_t name_field_width_;
@ -1886,7 +1886,7 @@ class BENCHMARK_EXPORT JSONReporter : public BenchmarkReporter {
void Finalize() override; void Finalize() override;
private: private:
void PrintRunData(const Run& report); void PrintRunData(const Run& run);
bool first_report_; bool first_report_;
}; };
@ -1900,7 +1900,7 @@ class BENCHMARK_EXPORT BENCHMARK_DEPRECATED_MSG(
void ReportRuns(const std::vector<Run>& reports) override; void ReportRuns(const std::vector<Run>& reports) override;
private: private:
void PrintRunData(const Run& report); void PrintRunData(const Run& run);
bool printed_header_; bool printed_header_;
std::set<std::string> user_counter_names_; std::set<std::string> user_counter_names_;

View File

@ -46,7 +46,6 @@
#include "commandlineflags.h" #include "commandlineflags.h"
#include "complexity.h" #include "complexity.h"
#include "counter.h" #include "counter.h"
#include "internal_macros.h"
#include "log.h" #include "log.h"
#include "mutex.h" #include "mutex.h"
#include "perf_counters.h" #include "perf_counters.h"
@ -198,7 +197,7 @@ State::State(std::string name, IterationCount max_iters,
// `PauseTiming`, a new `Counter` will be inserted the first time, which // `PauseTiming`, a new `Counter` will be inserted the first time, which
// won't have the flag. Inserting them now also reduces the allocations // won't have the flag. Inserting them now also reduces the allocations
// during the benchmark. // during the benchmark.
if (perf_counters_measurement_) { if (perf_counters_measurement_ != nullptr) {
for (const std::string& counter_name : for (const std::string& counter_name :
perf_counters_measurement_->names()) { perf_counters_measurement_->names()) {
counters[counter_name] = Counter(0.0, Counter::kAvgIterations); counters[counter_name] = Counter(0.0, Counter::kAvgIterations);
@ -247,7 +246,7 @@ void State::PauseTiming() {
// Add in time accumulated so far // Add in time accumulated so far
BM_CHECK(started_ && !finished_ && !skipped()); BM_CHECK(started_ && !finished_ && !skipped());
timer_->StopTimer(); timer_->StopTimer();
if (perf_counters_measurement_) { if (perf_counters_measurement_ != nullptr) {
std::vector<std::pair<std::string, double>> measurements; std::vector<std::pair<std::string, double>> measurements;
if (!perf_counters_measurement_->Stop(measurements)) { if (!perf_counters_measurement_->Stop(measurements)) {
BM_CHECK(false) << "Perf counters read the value failed."; BM_CHECK(false) << "Perf counters read the value failed.";
@ -265,7 +264,7 @@ void State::PauseTiming() {
void State::ResumeTiming() { void State::ResumeTiming() {
BM_CHECK(started_ && !finished_ && !skipped()); BM_CHECK(started_ && !finished_ && !skipped());
timer_->StartTimer(); timer_->StartTimer();
if (perf_counters_measurement_) { if (perf_counters_measurement_ != nullptr) {
perf_counters_measurement_->Start(); perf_counters_measurement_->Start();
} }
} }
@ -342,7 +341,7 @@ namespace {
// Flushes streams after invoking reporter methods that write to them. This // Flushes streams after invoking reporter methods that write to them. This
// ensures users get timely updates even when streams are not line-buffered. // ensures users get timely updates even when streams are not line-buffered.
void FlushStreams(BenchmarkReporter* reporter) { void FlushStreams(BenchmarkReporter* reporter) {
if (!reporter) { if (reporter == nullptr) {
return; return;
} }
std::flush(reporter->GetOutputStream()); std::flush(reporter->GetOutputStream());
@ -367,7 +366,7 @@ void Report(BenchmarkReporter* display_reporter,
report_one(display_reporter, run_results.display_report_aggregates_only, report_one(display_reporter, run_results.display_report_aggregates_only,
run_results); run_results);
if (file_reporter) { if (file_reporter != nullptr) {
report_one(file_reporter, run_results.file_report_aggregates_only, report_one(file_reporter, run_results.file_report_aggregates_only,
run_results); run_results);
} }
@ -408,7 +407,7 @@ void RunBenchmarks(const std::vector<BenchmarkInstance>& benchmarks,
per_family_reports; per_family_reports;
if (display_reporter->ReportContext(context) && if (display_reporter->ReportContext(context) &&
(!file_reporter || file_reporter->ReportContext(context))) { ((file_reporter == nullptr) || file_reporter->ReportContext(context))) {
FlushStreams(display_reporter); FlushStreams(display_reporter);
FlushStreams(file_reporter); FlushStreams(file_reporter);
@ -433,12 +432,12 @@ void RunBenchmarks(const std::vector<BenchmarkInstance>& benchmarks,
if (benchmark.complexity() != oNone) { if (benchmark.complexity() != oNone) {
reports_for_family = &per_family_reports[benchmark.family_index()]; reports_for_family = &per_family_reports[benchmark.family_index()];
} }
benchmarks_with_threads += (benchmark.threads() > 1); benchmarks_with_threads += static_cast<int>(benchmark.threads() > 1);
runners.emplace_back(benchmark, &perfcounters, reports_for_family); runners.emplace_back(benchmark, &perfcounters, reports_for_family);
int num_repeats_of_this_instance = runners.back().GetNumRepeats(); int num_repeats_of_this_instance = runners.back().GetNumRepeats();
num_repetitions_total += num_repetitions_total +=
static_cast<size_t>(num_repeats_of_this_instance); static_cast<size_t>(num_repeats_of_this_instance);
if (reports_for_family) { if (reports_for_family != nullptr) {
reports_for_family->num_runs_total += num_repeats_of_this_instance; reports_for_family->num_runs_total += num_repeats_of_this_instance;
} }
} }
@ -482,7 +481,7 @@ void RunBenchmarks(const std::vector<BenchmarkInstance>& benchmarks,
display_reporter->ReportRunsConfig( display_reporter->ReportRunsConfig(
runner.GetMinTime(), runner.HasExplicitIters(), runner.GetIters()); runner.GetMinTime(), runner.HasExplicitIters(), runner.GetIters());
if (file_reporter) { if (file_reporter != nullptr) {
file_reporter->ReportRunsConfig( file_reporter->ReportRunsConfig(
runner.GetMinTime(), runner.HasExplicitIters(), runner.GetIters()); runner.GetMinTime(), runner.HasExplicitIters(), runner.GetIters());
} }
@ -506,7 +505,7 @@ void RunBenchmarks(const std::vector<BenchmarkInstance>& benchmarks,
} }
} }
display_reporter->Finalize(); display_reporter->Finalize();
if (file_reporter) { if (file_reporter != nullptr) {
file_reporter->Finalize(); file_reporter->Finalize();
} }
FlushStreams(display_reporter); FlushStreams(display_reporter);
@ -569,7 +568,7 @@ ConsoleReporter::OutputOptions GetOutputOptions(bool force_no_color) {
} // end namespace internal } // end namespace internal
BenchmarkReporter* CreateDefaultDisplayReporter() { BenchmarkReporter* CreateDefaultDisplayReporter() {
static auto default_display_reporter = static auto* default_display_reporter =
internal::CreateReporter(FLAGS_benchmark_format, internal::CreateReporter(FLAGS_benchmark_format,
internal::GetOutputOptions()) internal::GetOutputOptions())
.release(); .release();
@ -611,7 +610,7 @@ size_t RunSpecifiedBenchmarks(BenchmarkReporter* display_reporter,
std::ofstream output_file; std::ofstream output_file;
std::unique_ptr<BenchmarkReporter> default_display_reporter; std::unique_ptr<BenchmarkReporter> default_display_reporter;
std::unique_ptr<BenchmarkReporter> default_file_reporter; std::unique_ptr<BenchmarkReporter> default_file_reporter;
if (!display_reporter) { if (display_reporter == nullptr) {
default_display_reporter.reset(CreateDefaultDisplayReporter()); default_display_reporter.reset(CreateDefaultDisplayReporter());
display_reporter = default_display_reporter.get(); display_reporter = default_display_reporter.get();
} }
@ -619,7 +618,7 @@ size_t RunSpecifiedBenchmarks(BenchmarkReporter* display_reporter,
auto& Err = display_reporter->GetErrorStream(); auto& Err = display_reporter->GetErrorStream();
std::string const& fname = FLAGS_benchmark_out; std::string const& fname = FLAGS_benchmark_out;
if (fname.empty() && file_reporter) { if (fname.empty() && (file_reporter != nullptr)) {
Err << "A custom file reporter was provided but " Err << "A custom file reporter was provided but "
"--benchmark_out=<file> was not specified.\n"; "--benchmark_out=<file> was not specified.\n";
Out.flush(); Out.flush();
@ -634,7 +633,7 @@ size_t RunSpecifiedBenchmarks(BenchmarkReporter* display_reporter,
Err.flush(); Err.flush();
std::exit(1); std::exit(1);
} }
if (!file_reporter) { if (file_reporter == nullptr) {
default_file_reporter = internal::CreateReporter( default_file_reporter = internal::CreateReporter(
FLAGS_benchmark_out_format, FLAGS_benchmark_counters_tabular FLAGS_benchmark_out_format, FLAGS_benchmark_counters_tabular
? ConsoleReporter::OO_Tabular ? ConsoleReporter::OO_Tabular
@ -743,8 +742,8 @@ void SetDefaultTimeUnitFromFlag(const std::string& time_unit_flag) {
void ParseCommandLineFlags(int* argc, char** argv) { void ParseCommandLineFlags(int* argc, char** argv) {
using namespace benchmark; using namespace benchmark;
BenchmarkReporter::Context::executable_name = BenchmarkReporter::Context::executable_name =
(argc && *argc > 0) ? argv[0] : "unknown"; ((argc != nullptr) && *argc > 0) ? argv[0] : "unknown";
for (int i = 1; argc && i < *argc; ++i) { for (int i = 1; (argc != nullptr) && i < *argc; ++i) {
if (ParseBoolFlag(argv[i], "benchmark_list_tests", if (ParseBoolFlag(argv[i], "benchmark_list_tests",
&FLAGS_benchmark_list_tests) || &FLAGS_benchmark_list_tests) ||
ParseStringFlag(argv[i], "benchmark_filter", &FLAGS_benchmark_filter) || ParseStringFlag(argv[i], "benchmark_filter", &FLAGS_benchmark_filter) ||

View File

@ -101,7 +101,7 @@ State BenchmarkInstance::Run(
} }
void BenchmarkInstance::Setup() const { void BenchmarkInstance::Setup() const {
if (setup_) { if (setup_ != nullptr) {
State st(name_.function_name, /*iters*/ 1, args_, /*thread_id*/ 0, threads_, State st(name_.function_name, /*iters*/ 1, args_, /*thread_id*/ 0, threads_,
nullptr, nullptr, nullptr, nullptr); nullptr, nullptr, nullptr, nullptr);
setup_(st); setup_(st);
@ -109,7 +109,7 @@ void BenchmarkInstance::Setup() const {
} }
void BenchmarkInstance::Teardown() const { void BenchmarkInstance::Teardown() const {
if (teardown_) { if (teardown_ != nullptr) {
State st(name_.function_name, /*iters*/ 1, args_, /*thread_id*/ 0, threads_, State st(name_.function_name, /*iters*/ 1, args_, /*thread_id*/ 0, threads_,
nullptr, nullptr, nullptr, nullptr); nullptr, nullptr, nullptr, nullptr);
teardown_(st); teardown_(st);

View File

@ -17,9 +17,9 @@ namespace internal {
// Information kept per benchmark we may want to run // Information kept per benchmark we may want to run
class BenchmarkInstance { class BenchmarkInstance {
public: public:
BenchmarkInstance(Benchmark* benchmark, int family_index, BenchmarkInstance(Benchmark* benchmark, int family_idx,
int per_family_instance_index, int per_family_instance_idx,
const std::vector<int64_t>& args, int threads); const std::vector<int64_t>& args, int thread_count);
const BenchmarkName& name() const { return name_; } const BenchmarkName& name() const { return name_; }
int family_index() const { return family_index_; } int family_index() const { return family_index_; }

View File

@ -14,5 +14,5 @@
#include "benchmark/benchmark.h" #include "benchmark/benchmark.h"
BENCHMARK_EXPORT int main(int, char**); BENCHMARK_EXPORT int main(int /*argc*/, char** /*argv*/);
BENCHMARK_MAIN(); BENCHMARK_MAIN();

View File

@ -27,8 +27,8 @@ size_t size_impl(const Head& head, const Tail&... tail) {
} }
// Join a pack of std::strings using a delimiter // Join a pack of std::strings using a delimiter
// TODO: use absl::StrJoin // TODO(dominic): use absl::StrJoin
void join_impl(std::string&, char) {} void join_impl(std::string& /*unused*/, char /*unused*/) {}
template <typename Head, typename... Tail> template <typename Head, typename... Tail>
void join_impl(std::string& s, const char delimiter, const Head& head, void join_impl(std::string& s, const char delimiter, const Head& head,

View File

@ -53,13 +53,13 @@ namespace benchmark {
namespace { namespace {
// For non-dense Range, intermediate values are powers of kRangeMultiplier. // For non-dense Range, intermediate values are powers of kRangeMultiplier.
static constexpr int kRangeMultiplier = 8; constexpr int kRangeMultiplier = 8;
// The size of a benchmark family determines is the number of inputs to repeat // The size of a benchmark family determines is the number of inputs to repeat
// the benchmark on. If this is "large" then warn the user during configuration. // the benchmark on. If this is "large" then warn the user during configuration.
static constexpr size_t kMaxFamilySize = 100; constexpr size_t kMaxFamilySize = 100;
static constexpr char kDisabledPrefix[] = "DISABLED_"; constexpr char kDisabledPrefix[] = "DISABLED_";
} // end namespace } // end namespace
namespace internal { namespace internal {
@ -82,7 +82,7 @@ class BenchmarkFamilies {
// Extract the list of benchmark instances that match the specified // Extract the list of benchmark instances that match the specified
// regular expression. // regular expression.
bool FindBenchmarks(std::string re, bool FindBenchmarks(std::string spec,
std::vector<BenchmarkInstance>* benchmarks, std::vector<BenchmarkInstance>* benchmarks,
std::ostream* Err); std::ostream* Err);

View File

@ -46,7 +46,6 @@
#include "commandlineflags.h" #include "commandlineflags.h"
#include "complexity.h" #include "complexity.h"
#include "counter.h" #include "counter.h"
#include "internal_macros.h"
#include "log.h" #include "log.h"
#include "mutex.h" #include "mutex.h"
#include "perf_counters.h" #include "perf_counters.h"
@ -74,7 +73,7 @@ ProfilerManager* profiler_manager = nullptr;
namespace { namespace {
static constexpr IterationCount kMaxIterations = 1000000000000; constexpr IterationCount kMaxIterations = 1000000000000;
const double kDefaultMinTime = const double kDefaultMinTime =
std::strtod(::benchmark::kDefaultMinTimeStr, /*p_end*/ nullptr); std::strtod(::benchmark::kDefaultMinTimeStr, /*p_end*/ nullptr);
@ -100,7 +99,7 @@ BenchmarkReporter::Run CreateRunReport(
report.repetition_index = repetition_index; report.repetition_index = repetition_index;
report.repetitions = repeats; report.repetitions = repeats;
if (!report.skipped) { if (report.skipped == 0u) {
if (b.use_manual_time()) { if (b.use_manual_time()) {
report.real_accumulated_time = results.manual_time_used; report.real_accumulated_time = results.manual_time_used;
} else { } else {
@ -118,7 +117,8 @@ BenchmarkReporter::Run CreateRunReport(
assert(memory_result != nullptr); assert(memory_result != nullptr);
report.memory_result = memory_result; report.memory_result = memory_result;
report.allocs_per_iter = report.allocs_per_iter =
memory_iterations ? static_cast<double>(memory_result->num_allocs) / memory_iterations != 0
? static_cast<double>(memory_result->num_allocs) /
static_cast<double>(memory_iterations) static_cast<double>(memory_iterations)
: 0; : 0;
} }
@ -273,10 +273,11 @@ BenchmarkRunner::BenchmarkRunner(
FLAGS_benchmark_report_aggregates_only; FLAGS_benchmark_report_aggregates_only;
if (b.aggregation_report_mode() != internal::ARM_Unspecified) { if (b.aggregation_report_mode() != internal::ARM_Unspecified) {
run_results.display_report_aggregates_only = run_results.display_report_aggregates_only =
(b.aggregation_report_mode() & ((b.aggregation_report_mode() &
internal::ARM_DisplayReportAggregatesOnly); internal::ARM_DisplayReportAggregatesOnly) != 0u);
run_results.file_report_aggregates_only = run_results.file_report_aggregates_only =
(b.aggregation_report_mode() & internal::ARM_FileReportAggregatesOnly); ((b.aggregation_report_mode() &
internal::ARM_FileReportAggregatesOnly) != 0u);
BM_CHECK(FLAGS_benchmark_perf_counters.empty() || BM_CHECK(FLAGS_benchmark_perf_counters.empty() ||
(perf_counters_measurement_ptr->num_counters() == 0)) (perf_counters_measurement_ptr->num_counters() == 0))
<< "Perf counters were requested but could not be set up."; << "Perf counters were requested but could not be set up.";
@ -364,7 +365,7 @@ bool BenchmarkRunner::ShouldReportIterationResults(
// Determine if this run should be reported; // Determine if this run should be reported;
// Either it has run for a sufficient amount of time // Either it has run for a sufficient amount of time
// or because an error was reported. // or because an error was reported.
return i.results.skipped_ || FLAGS_benchmark_dry_run || return (i.results.skipped_ != 0u) || FLAGS_benchmark_dry_run ||
i.iters >= kMaxIterations || // Too many iterations already. i.iters >= kMaxIterations || // Too many iterations already.
i.seconds >= i.seconds >=
GetMinTimeToApply() || // The elapsed time is large enough. GetMinTimeToApply() || // The elapsed time is large enough.
@ -528,9 +529,9 @@ void BenchmarkRunner::DoOneRepetition() {
CreateRunReport(b, i.results, memory_iterations, memory_result, i.seconds, CreateRunReport(b, i.results, memory_iterations, memory_result, i.seconds,
num_repetitions_done, repeats); num_repetitions_done, repeats);
if (reports_for_family) { if (reports_for_family != nullptr) {
++reports_for_family->num_runs_done; ++reports_for_family->num_runs_done;
if (!report.skipped) { if (report.skipped == 0u) {
reports_for_family->Runs.push_back(report); reports_for_family->Runs.push_back(report);
} }
} }

View File

@ -51,7 +51,7 @@ BenchTimeType ParseBenchMinTime(const std::string& value);
class BenchmarkRunner { class BenchmarkRunner {
public: public:
BenchmarkRunner(const benchmark::internal::BenchmarkInstance& b_, BenchmarkRunner(const benchmark::internal::BenchmarkInstance& b_,
benchmark::internal::PerfCountersMeasurement* pmc_, benchmark::internal::PerfCountersMeasurement* pcm_,
BenchmarkReporter::PerFamilyRunReports* reports_for_family); BenchmarkReporter::PerFamilyRunReports* reports_for_family);
int GetNumRepeats() const { return repeats; } int GetNumRepeats() const { return repeats; }

View File

@ -156,7 +156,7 @@ void ColorPrintf(std::ostream& out, LogColor color, const char* fmt,
SetConsoleTextAttribute(stdout_handle, original_color_attrs); SetConsoleTextAttribute(stdout_handle, original_color_attrs);
#else #else
const char* color_code = GetPlatformColorCode(color); const char* color_code = GetPlatformColorCode(color);
if (color_code) { if (color_code != nullptr) {
out << FormatString("\033[0;3%sm", color_code); out << FormatString("\033[0;3%sm", color_code);
} }
out << FormatString(fmt, args) << "\033[m"; out << FormatString(fmt, args) << "\033[m";
@ -195,7 +195,7 @@ bool IsColorTerminal() {
bool term_supports_color = false; bool term_supports_color = false;
for (const char* candidate : SUPPORTED_TERM_VALUES) { for (const char* candidate : SUPPORTED_TERM_VALUES) {
if (term && 0 == strcmp(term, candidate)) { if ((term != nullptr) && 0 == strcmp(term, candidate)) {
term_supports_color = true; term_supports_color = true;
break; break;
} }

View File

@ -109,7 +109,7 @@ bool ParseKvPairs(const std::string& src_text, const char* str,
// Returns the name of the environment variable corresponding to the // Returns the name of the environment variable corresponding to the
// given flag. For example, FlagToEnvVar("foo") will return // given flag. For example, FlagToEnvVar("foo") will return
// "BENCHMARK_FOO" in the open-source version. // "BENCHMARK_FOO" in the open-source version.
static std::string FlagToEnvVar(const char* flag) { std::string FlagToEnvVar(const char* flag) {
const std::string flag_str(flag); const std::string flag_str(flag);
std::string env_var; std::string env_var;

View File

@ -63,7 +63,7 @@ void ConsoleReporter::PrintHeader(const Run& run) {
FormatString("%-*s %13s %15s %12s", static_cast<int>(name_field_width_), FormatString("%-*s %13s %15s %12s", static_cast<int>(name_field_width_),
"Benchmark", "Time", "CPU", "Iterations"); "Benchmark", "Time", "CPU", "Iterations");
if (!run.counters.empty()) { if (!run.counters.empty()) {
if (output_options_ & OO_Tabular) { if ((output_options_ & OO_Tabular) != 0) {
for (auto const& c : run.counters) { for (auto const& c : run.counters) {
str += FormatString(" %10s", c.first.c_str()); str += FormatString(" %10s", c.first.c_str());
} }
@ -83,7 +83,7 @@ void ConsoleReporter::ReportRuns(const std::vector<Run>& reports) {
bool print_header = !printed_header_; bool print_header = !printed_header_;
// --- or if the format is tabular and this run // --- or if the format is tabular and this run
// has different fields from the prev header // has different fields from the prev header
print_header |= (output_options_ & OO_Tabular) && print_header |= ((output_options_ & OO_Tabular) != 0) &&
(!internal::SameNames(run.counters, prev_counters_)); (!internal::SameNames(run.counters, prev_counters_));
if (print_header) { if (print_header) {
printed_header_ = true; printed_header_ = true;
@ -97,8 +97,8 @@ void ConsoleReporter::ReportRuns(const std::vector<Run>& reports) {
} }
} }
static void IgnoreColorPrint(std::ostream& out, LogColor, const char* fmt, static void IgnoreColorPrint(std::ostream& out, LogColor /*unused*/,
...) { const char* fmt, ...) {
va_list args; va_list args;
va_start(args, fmt); va_start(args, fmt);
out << FormatString(fmt, args); out << FormatString(fmt, args);
@ -131,7 +131,7 @@ BENCHMARK_EXPORT
void ConsoleReporter::PrintRunData(const Run& result) { void ConsoleReporter::PrintRunData(const Run& result) {
typedef void(PrinterFn)(std::ostream&, LogColor, const char*, ...); typedef void(PrinterFn)(std::ostream&, LogColor, const char*, ...);
auto& Out = GetOutputStream(); auto& Out = GetOutputStream();
PrinterFn* printer = (output_options_ & OO_Color) PrinterFn* printer = (output_options_ & OO_Color) != 0
? static_cast<PrinterFn*>(ColorPrintf) ? static_cast<PrinterFn*>(ColorPrintf)
: IgnoreColorPrint; : IgnoreColorPrint;
auto name_color = auto name_color =
@ -144,7 +144,8 @@ void ConsoleReporter::PrintRunData(const Run& result) {
result.skip_message.c_str()); result.skip_message.c_str());
printer(Out, COLOR_DEFAULT, "\n"); printer(Out, COLOR_DEFAULT, "\n");
return; return;
} else if (internal::SkippedWithMessage == result.skipped) { }
if (internal::SkippedWithMessage == result.skipped) {
printer(Out, COLOR_WHITE, "SKIPPED: \'%s\'", result.skip_message.c_str()); printer(Out, COLOR_WHITE, "SKIPPED: \'%s\'", result.skip_message.c_str());
printer(Out, COLOR_DEFAULT, "\n"); printer(Out, COLOR_DEFAULT, "\n");
return; return;
@ -178,9 +179,9 @@ void ConsoleReporter::PrintRunData(const Run& result) {
printer(Out, COLOR_CYAN, "%10lld", result.iterations); printer(Out, COLOR_CYAN, "%10lld", result.iterations);
} }
for (auto& c : result.counters) { for (const auto& c : result.counters) {
const std::size_t cNameLen = const std::size_t cNameLen =
std::max(std::string::size_type(10), c.first.length()); std::max(static_cast<std::size_t>(10), c.first.length());
std::string s; std::string s;
const char* unit = ""; const char* unit = "";
if (result.run_type == Run::RT_Aggregate && if (result.run_type == Run::RT_Aggregate &&
@ -189,11 +190,11 @@ void ConsoleReporter::PrintRunData(const Run& result) {
unit = "%"; unit = "%";
} else { } else {
s = HumanReadableNumber(c.second.value, c.second.oneK); s = HumanReadableNumber(c.second.value, c.second.oneK);
if (c.second.flags & Counter::kIsRate) { if ((c.second.flags & Counter::kIsRate) != 0) {
unit = (c.second.flags & Counter::kInvert) ? "s" : "/s"; unit = (c.second.flags & Counter::kInvert) != 0 ? "s" : "/s";
} }
} }
if (output_options_ & OO_Tabular) { if ((output_options_ & OO_Tabular) != 0) {
printer(Out, COLOR_DEFAULT, " %*s%s", cNameLen - strlen(unit), s.c_str(), printer(Out, COLOR_DEFAULT, " %*s%s", cNameLen - strlen(unit), s.c_str(),
unit); unit);
} else { } else {

View File

@ -20,20 +20,20 @@ namespace internal {
double Finish(Counter const& c, IterationCount iterations, double cpu_time, double Finish(Counter const& c, IterationCount iterations, double cpu_time,
double num_threads) { double num_threads) {
double v = c.value; double v = c.value;
if (c.flags & Counter::kIsRate) { if ((c.flags & Counter::kIsRate) != 0) {
v /= cpu_time; v /= cpu_time;
} }
if (c.flags & Counter::kAvgThreads) { if ((c.flags & Counter::kAvgThreads) != 0) {
v /= num_threads; v /= num_threads;
} }
if (c.flags & Counter::kIsIterationInvariant) { if ((c.flags & Counter::kIsIterationInvariant) != 0) {
v *= static_cast<double>(iterations); v *= static_cast<double>(iterations);
} }
if (c.flags & Counter::kAvgIterations) { if ((c.flags & Counter::kAvgIterations) != 0) {
v /= static_cast<double>(iterations); v /= static_cast<double>(iterations);
} }
if (c.flags & Counter::kInvert) { // Invert is *always* last. if ((c.flags & Counter::kInvert) != 0) { // Invert is *always* last.
v = 1.0 / v; v = 1.0 / v;
} }
return v; return v;

View File

@ -115,7 +115,7 @@ BENCHMARK_EXPORT
void CSVReporter::PrintRunData(const Run& run) { void CSVReporter::PrintRunData(const Run& run) {
std::ostream& Out = GetOutputStream(); std::ostream& Out = GetOutputStream();
Out << CsvEscape(run.benchmark_name()) << ","; Out << CsvEscape(run.benchmark_name()) << ",";
if (run.skipped) { if (run.skipped != 0u) {
Out << std::string(elements.size() - 3, ','); Out << std::string(elements.size() - 3, ',');
Out << std::boolalpha << (internal::SkippedWithError == run.skipped) << ","; Out << std::boolalpha << (internal::SkippedWithError == run.skipped) << ",";
Out << CsvEscape(run.skip_message) << "\n"; Out << CsvEscape(run.skip_message) << "\n";

View File

@ -85,6 +85,10 @@ std::string FormatKV(std::string const& key, int64_t value) {
return ss.str(); return ss.str();
} }
std::string FormatKV(std::string const& key, int value) {
return FormatKV(key, static_cast<int64_t>(value));
}
std::string FormatKV(std::string const& key, double value) { std::string FormatKV(std::string const& key, double value) {
std::stringstream ss; std::stringstream ss;
ss << '"' << StrEscape(key) << "\": "; ss << '"' << StrEscape(key) << "\": ";
@ -122,7 +126,7 @@ bool JSONReporter::ReportContext(const Context& context) {
out << indent << FormatKV("host_name", context.sys_info.name) << ",\n"; out << indent << FormatKV("host_name", context.sys_info.name) << ",\n";
if (Context::executable_name) { if (Context::executable_name != nullptr) {
out << indent << FormatKV("executable", Context::executable_name) << ",\n"; out << indent << FormatKV("executable", Context::executable_name) << ",\n";
} }
@ -136,7 +140,7 @@ bool JSONReporter::ReportContext(const Context& context) {
if (CPUInfo::Scaling::UNKNOWN != info.scaling) { if (CPUInfo::Scaling::UNKNOWN != info.scaling) {
out << indent out << indent
<< FormatKV("cpu_scaling_enabled", << FormatKV("cpu_scaling_enabled",
info.scaling == CPUInfo::Scaling::ENABLED ? true : false) info.scaling == CPUInfo::Scaling::ENABLED)
<< ",\n"; << ",\n";
} }
@ -144,7 +148,7 @@ bool JSONReporter::ReportContext(const Context& context) {
indent = std::string(6, ' '); indent = std::string(6, ' ');
std::string cache_indent(8, ' '); std::string cache_indent(8, ' ');
for (size_t i = 0; i < info.caches.size(); ++i) { for (size_t i = 0; i < info.caches.size(); ++i) {
auto& CI = info.caches[i]; const auto& CI = info.caches[i];
out << indent << "{\n"; out << indent << "{\n";
out << cache_indent << FormatKV("type", CI.type) << ",\n"; out << cache_indent << FormatKV("type", CI.type) << ",\n";
out << cache_indent << FormatKV("level", static_cast<int64_t>(CI.level)) out << cache_indent << FormatKV("level", static_cast<int64_t>(CI.level))
@ -183,7 +187,7 @@ bool JSONReporter::ReportContext(const Context& context) {
out << ",\n"; out << ",\n";
// NOTE: our json schema is not strictly tied to the library version! // NOTE: our json schema is not strictly tied to the library version!
out << indent << FormatKV("json_schema_version", int64_t(1)); out << indent << FormatKV("json_schema_version", 1);
std::map<std::string, std::string>* global_context = std::map<std::string, std::string>* global_context =
internal::GetGlobalContext(); internal::GetGlobalContext();
@ -298,11 +302,11 @@ void JSONReporter::PrintRunData(Run const& run) {
out << indent << FormatKV("rms", run.GetAdjustedCPUTime()); out << indent << FormatKV("rms", run.GetAdjustedCPUTime());
} }
for (auto& c : run.counters) { for (const auto& c : run.counters) {
out << ",\n" << indent << FormatKV(c.first, c.second); out << ",\n" << indent << FormatKV(c.first, c.second);
} }
if (run.memory_result) { if (run.memory_result != nullptr) {
const MemoryManager::Result memory_result = *run.memory_result; const MemoryManager::Result memory_result = *run.memory_result;
out << ",\n" << indent << FormatKV("allocs_per_iter", run.allocs_per_iter); out << ",\n" << indent << FormatKV("allocs_per_iter", run.allocs_per_iter);
out << ",\n" out << ",\n"

View File

@ -42,17 +42,18 @@ void BenchmarkReporter::PrintBasicContext(std::ostream *out,
Out << LocalDateTimeString() << "\n"; Out << LocalDateTimeString() << "\n";
#endif #endif
if (context.executable_name) { if (benchmark::BenchmarkReporter::Context::executable_name != nullptr) {
Out << "Running " << context.executable_name << "\n"; Out << "Running " << benchmark::BenchmarkReporter::Context::executable_name
<< "\n";
} }
const CPUInfo &info = context.cpu_info; const CPUInfo &info = context.cpu_info;
Out << "Run on (" << info.num_cpus << " X " Out << "Run on (" << info.num_cpus << " X "
<< (info.cycles_per_second / 1000000.0) << " MHz CPU " << (info.cycles_per_second / 1000000.0) << " MHz CPU "
<< ((info.num_cpus > 1) ? "s" : "") << ")\n"; << ((info.num_cpus > 1) ? "s" : "") << ")\n";
if (info.caches.size() != 0) { if (!info.caches.empty()) {
Out << "CPU Caches:\n"; Out << "CPU Caches:\n";
for (auto &CInfo : info.caches) { for (const auto &CInfo : info.caches) {
Out << " L" << CInfo.level << " " << CInfo.type << " " Out << " L" << CInfo.level << " " << CInfo.type << " "
<< (CInfo.size / 1024) << " KiB"; << (CInfo.size / 1024) << " KiB";
if (CInfo.num_sharing != 0) { if (CInfo.num_sharing != 0) {

View File

@ -153,7 +153,7 @@ std::vector<BenchmarkReporter::Run> ComputeStats(
for (Run const& run : reports) { for (Run const& run : reports) {
BM_CHECK_EQ(reports[0].benchmark_name(), run.benchmark_name()); BM_CHECK_EQ(reports[0].benchmark_name(), run.benchmark_name());
BM_CHECK_EQ(run_iterations, run.iterations); BM_CHECK_EQ(run_iterations, run.iterations);
if (run.skipped) { if (run.skipped != 0u) {
continue; continue;
} }
real_accumulated_time_stat.emplace_back(run.real_accumulated_time); real_accumulated_time_stat.emplace_back(run.real_accumulated_time);
@ -176,7 +176,7 @@ std::vector<BenchmarkReporter::Run> ComputeStats(
} }
const double iteration_rescale_factor = const double iteration_rescale_factor =
double(reports.size()) / double(run_iterations); static_cast<double>(reports.size()) / static_cast<double>(run_iterations);
for (const auto& Stat : *reports[0].statistics) { for (const auto& Stat : *reports[0].statistics) {
// Get the data from the accumulator to BenchmarkReporter::Run's. // Get the data from the accumulator to BenchmarkReporter::Run's.

View File

@ -29,7 +29,7 @@ static_assert(arraysize(kBigSIUnits) == arraysize(kBigIECUnits),
static_assert(arraysize(kSmallSIUnits) == arraysize(kBigSIUnits), static_assert(arraysize(kSmallSIUnits) == arraysize(kBigSIUnits),
"Small SI and Big SI unit arrays must be the same size"); "Small SI and Big SI unit arrays must be the same size");
static const int64_t kUnitsSize = arraysize(kBigSIUnits); const int64_t kUnitsSize = arraysize(kBigSIUnits);
void ToExponentAndMantissa(double val, int precision, double one_k, void ToExponentAndMantissa(double val, int precision, double one_k,
std::string* mantissa, int64_t* exponent) { std::string* mantissa, int64_t* exponent) {

View File

@ -76,7 +76,6 @@
#include "benchmark/benchmark.h" #include "benchmark/benchmark.h"
#include "check.h" #include "check.h"
#include "cycleclock.h" #include "cycleclock.h"
#include "internal_macros.h"
#include "log.h" #include "log.h"
#include "string_util.h" #include "string_util.h"
#include "timers.h" #include "timers.h"

View File

@ -107,7 +107,7 @@ double MakeTime(struct timespec const& ts) {
} }
#endif #endif
BENCHMARK_NORETURN static void DiagnoseAndExit(const char* msg) { BENCHMARK_NORETURN void DiagnoseAndExit(const char* msg) {
std::cerr << "ERROR: " << msg << '\n'; std::cerr << "ERROR: " << msg << '\n';
std::flush(std::cerr); std::flush(std::cerr);
std::exit(EXIT_FAILURE); std::exit(EXIT_FAILURE);

View File

@ -5,7 +5,8 @@
void BM_empty(benchmark::State& state) { void BM_empty(benchmark::State& state) {
for (auto _ : state) { for (auto _ : state) {
auto iterations = double(state.iterations()) * double(state.iterations()); auto iterations = static_cast<double>(state.iterations()) *
static_cast<double>(state.iterations());
benchmark::DoNotOptimize(iterations); benchmark::DoNotOptimize(iterations);
} }
} }

View File

@ -48,7 +48,7 @@ class BenchmarkTest : public testing::Test {
static void TeardownHook(int /* num_threads */) { queue->push("Teardown"); } static void TeardownHook(int /* num_threads */) { queue->push("Teardown"); }
void Execute(const std::string& pattern) { static void Execute(const std::string& pattern) {
queue->Clear(); queue->Clear();
std::unique_ptr<BenchmarkReporter> reporter(new NullReporter()); std::unique_ptr<BenchmarkReporter> reporter(new NullReporter());

View File

@ -80,7 +80,7 @@ int fixture_setup = 0;
class FIXTURE_BECHMARK_NAME : public ::benchmark::Fixture { class FIXTURE_BECHMARK_NAME : public ::benchmark::Fixture {
public: public:
void SetUp(const ::benchmark::State&) override { void SetUp(const ::benchmark::State& /*unused*/) override {
fixture_interaction::fixture_setup++; fixture_interaction::fixture_setup++;
} }
@ -92,7 +92,7 @@ BENCHMARK_F(FIXTURE_BECHMARK_NAME, BM_WithFixture)(benchmark::State& st) {
} }
} }
static void DoSetupWithFixture(const benchmark::State&) { static void DoSetupWithFixture(const benchmark::State& /*unused*/) {
fixture_interaction::setup++; fixture_interaction::setup++;
} }
@ -110,7 +110,7 @@ namespace repetitions {
int setup = 0; int setup = 0;
} }
static void DoSetupWithRepetitions(const benchmark::State&) { static void DoSetupWithRepetitions(const benchmark::State& /*unused*/) {
repetitions::setup++; repetitions::setup++;
} }
static void BM_WithRep(benchmark::State& state) { static void BM_WithRep(benchmark::State& state) {

View File

@ -46,16 +46,17 @@ void try_invalid_pause_resume(benchmark::State& state) {
void BM_diagnostic_test(benchmark::State& state) { void BM_diagnostic_test(benchmark::State& state) {
static bool called_once = false; static bool called_once = false;
if (called_once == false) { if (!called_once) {
try_invalid_pause_resume(state); try_invalid_pause_resume(state);
} }
for (auto _ : state) { for (auto _ : state) {
auto iterations = double(state.iterations()) * double(state.iterations()); auto iterations = static_cast<double>(state.iterations()) *
static_cast<double>(state.iterations());
benchmark::DoNotOptimize(iterations); benchmark::DoNotOptimize(iterations);
} }
if (called_once == false) { if (!called_once) {
try_invalid_pause_resume(state); try_invalid_pause_resume(state);
} }
@ -66,16 +67,17 @@ BENCHMARK(BM_diagnostic_test);
void BM_diagnostic_test_keep_running(benchmark::State& state) { void BM_diagnostic_test_keep_running(benchmark::State& state) {
static bool called_once = false; static bool called_once = false;
if (called_once == false) { if (!called_once) {
try_invalid_pause_resume(state); try_invalid_pause_resume(state);
} }
while (state.KeepRunning()) { while (state.KeepRunning()) {
auto iterations = double(state.iterations()) * double(state.iterations()); auto iterations = static_cast<double>(state.iterations()) *
static_cast<double>(state.iterations());
benchmark::DoNotOptimize(iterations); benchmark::DoNotOptimize(iterations);
} }
if (called_once == false) { if (!called_once) {
try_invalid_pause_resume(state); try_invalid_pause_resume(state);
} }

View File

@ -4,7 +4,7 @@
namespace { namespace {
#if defined(__GNUC__) #if defined(__GNUC__)
std::int64_t double_up(const std::int64_t x) __attribute__((const)); std::int64_t double_up(std::int64_t x) __attribute__((const));
#endif #endif
std::int64_t double_up(const std::int64_t x) { return x * 2; } std::int64_t double_up(const std::int64_t x) { return x * 2; }
} // namespace } // namespace
@ -26,7 +26,7 @@ struct BitRef {
BitRef(int i, unsigned char& b) : index(i), byte(b) {} BitRef(int i, unsigned char& b) : index(i), byte(b) {}
}; };
int main(int, char*[]) { int main(int /*unused*/, char* /*unused*/[]) {
// this test verifies compilation of DoNotOptimize() for some types // this test verifies compilation of DoNotOptimize() for some types
char buffer1[1] = ""; char buffer1[1] = "";

View File

@ -2,7 +2,8 @@
void BM_empty(benchmark::State& state) { void BM_empty(benchmark::State& state) {
for (auto _ : state) { for (auto _ : state) {
auto iterations = double(state.iterations()) * double(state.iterations()); auto iterations = static_cast<double>(state.iterations()) *
static_cast<double>(state.iterations());
benchmark::DoNotOptimize(iterations); benchmark::DoNotOptimize(iterations);
} }
} }

View File

@ -39,7 +39,7 @@ class MapFixture : public ::benchmark::Fixture {
m = ConstructRandomMap(static_cast<int>(st.range(0))); m = ConstructRandomMap(static_cast<int>(st.range(0)));
} }
void TearDown(const ::benchmark::State&) override { m.clear(); } void TearDown(const ::benchmark::State& /*unused*/) override { m.clear(); }
std::map<int, int> m; std::map<int, int> m;
}; };

View File

@ -14,7 +14,8 @@ class TestMemoryManager : public benchmark::MemoryManager {
void BM_empty(benchmark::State& state) { void BM_empty(benchmark::State& state) {
for (auto _ : state) { for (auto _ : state) {
auto iterations = double(state.iterations()) * double(state.iterations()); auto iterations = static_cast<double>(state.iterations()) *
static_cast<double>(state.iterations());
benchmark::DoNotOptimize(iterations); benchmark::DoNotOptimize(iterations);
} }
} }

View File

@ -98,7 +98,7 @@ void CheckCase(std::stringstream& remaining_output, TestCase const& TC,
std::string first_line; std::string first_line;
bool on_first = true; bool on_first = true;
std::string line; std::string line;
while (remaining_output.eof() == false) { while (!remaining_output.eof()) {
BM_CHECK(remaining_output.good()); BM_CHECK(remaining_output.good());
std::getline(remaining_output, line); std::getline(remaining_output, line);
if (on_first) { if (on_first) {
@ -149,7 +149,7 @@ class TestReporter : public benchmark::BenchmarkReporter {
bool ReportContext(const Context& context) override { bool ReportContext(const Context& context) override {
bool last_ret = false; bool last_ret = false;
bool first = true; bool first = true;
for (auto rep : reporters_) { for (auto* rep : reporters_) {
bool new_ret = rep->ReportContext(context); bool new_ret = rep->ReportContext(context);
BM_CHECK(first || new_ret == last_ret) BM_CHECK(first || new_ret == last_ret)
<< "Reports return different values for ReportContext"; << "Reports return different values for ReportContext";
@ -161,12 +161,12 @@ class TestReporter : public benchmark::BenchmarkReporter {
} }
void ReportRuns(const std::vector<Run>& report) override { void ReportRuns(const std::vector<Run>& report) override {
for (auto rep : reporters_) { for (auto* rep : reporters_) {
rep->ReportRuns(report); rep->ReportRuns(report);
} }
} }
void Finalize() override { void Finalize() override {
for (auto rep : reporters_) { for (auto* rep : reporters_) {
rep->Finalize(); rep->Finalize();
} }
} }
@ -206,7 +206,7 @@ class ResultsChecker {
void SetHeader_(const std::string& csv_header); void SetHeader_(const std::string& csv_header);
void SetValues_(const std::string& entry_csv_line); void SetValues_(const std::string& entry_csv_line);
std::vector<std::string> SplitCsv_(const std::string& line); std::vector<std::string> SplitCsv_(const std::string& line) const;
}; };
// store the static ResultsChecker in a function to prevent initialization // store the static ResultsChecker in a function to prevent initialization
@ -239,7 +239,7 @@ void ResultsChecker::CheckResults(std::stringstream& output) {
// now go over every line and publish it to the ResultsChecker // now go over every line and publish it to the ResultsChecker
std::string line; std::string line;
bool on_first = true; bool on_first = true;
while (output.eof() == false) { while (!output.eof()) {
BM_CHECK(output.good()); BM_CHECK(output.good());
std::getline(output, line); std::getline(output, line);
if (on_first) { if (on_first) {
@ -287,7 +287,8 @@ void ResultsChecker::SetValues_(const std::string& entry_csv_line) {
} }
// a quick'n'dirty csv splitter (eliminating quotes) // a quick'n'dirty csv splitter (eliminating quotes)
std::vector<std::string> ResultsChecker::SplitCsv_(const std::string& line) { std::vector<std::string> ResultsChecker::SplitCsv_(
const std::string& line) const {
std::vector<std::string> out; std::vector<std::string> out;
if (line.empty()) { if (line.empty()) {
return out; return out;
@ -295,8 +296,10 @@ std::vector<std::string> ResultsChecker::SplitCsv_(const std::string& line) {
if (!field_names.empty()) { if (!field_names.empty()) {
out.reserve(field_names.size()); out.reserve(field_names.size());
} }
size_t prev = 0, pos = line.find_first_of(','), curr = pos; size_t prev = 0;
while (pos != line.npos) { size_t pos = line.find_first_of(',');
size_t curr = pos;
while (pos != std::string::npos) {
BM_CHECK(curr > 0); BM_CHECK(curr > 0);
if (line[prev] == '"') { if (line[prev] == '"') {
++prev; ++prev;
@ -330,7 +333,7 @@ size_t AddChecker(const std::string& bm_name, const ResultsCheckFn& fn) {
int Results::NumThreads() const { int Results::NumThreads() const {
auto pos = name.find("/threads:"); auto pos = name.find("/threads:");
if (pos == name.npos) { if (pos == std::string::npos) {
return 1; return 1;
} }
auto end = name.find('/', pos + 9); auto end = name.find('/', pos + 9);
@ -348,7 +351,7 @@ double Results::GetTime(BenchmarkTime which) const {
BM_CHECK(which == kCpuTime || which == kRealTime); BM_CHECK(which == kCpuTime || which == kRealTime);
const char* which_str = which == kCpuTime ? "cpu_time" : "real_time"; const char* which_str = which == kCpuTime ? "cpu_time" : "real_time";
double val = GetAs<double>(which_str); double val = GetAs<double>(which_str);
auto unit = Get("time_unit"); const auto* unit = Get("time_unit");
BM_CHECK(unit); BM_CHECK(unit);
if (*unit == "ns") { if (*unit == "ns") {
return val * 1.e-9; return val * 1.e-9;
@ -517,7 +520,7 @@ static std::string GetTempFileName() {
// create the same file at the same time. However, it still introduces races // create the same file at the same time. However, it still introduces races
// similar to tmpnam. // similar to tmpnam.
int retries = 3; int retries = 3;
while (--retries) { while (--retries != 0) {
std::string name = GetRandomFileName(); std::string name = GetRandomFileName();
if (!FileExists(name)) { if (!FileExists(name)) {
return name; return name;
@ -539,7 +542,7 @@ std::string GetFileReporterOutput(int argc, char* argv[]) {
tmp += tmp_file_name; tmp += tmp_file_name;
new_argv.emplace_back(const_cast<char*>(tmp.c_str())); new_argv.emplace_back(const_cast<char*>(tmp.c_str()));
argc = int(new_argv.size()); argc = static_cast<int>(new_argv.size());
benchmark::Initialize(&argc, new_argv.data()); benchmark::Initialize(&argc, new_argv.data());
benchmark::RunSpecifiedBenchmarks(); benchmark::RunSpecifiedBenchmarks();

View File

@ -164,7 +164,7 @@ void RunTestOne() {
// benchmarks. // benchmarks.
// Also test that new benchmarks can be registered and ran afterwards. // Also test that new benchmarks can be registered and ran afterwards.
void RunTestTwo() { void RunTestTwo() {
assert(ExpectedResults.size() != 0 && assert(!ExpectedResults.empty() &&
"must have at least one registered benchmark"); "must have at least one registered benchmark");
ExpectedResults.clear(); ExpectedResults.clear();
benchmark::ClearRegisteredBenchmarks(); benchmark::ClearRegisteredBenchmarks();

View File

@ -96,7 +96,8 @@ ADD_CASES(TC_CSVOut, {{"^\"BM_basic\",%csv_report$"}});
void BM_bytes_per_second(benchmark::State& state) { void BM_bytes_per_second(benchmark::State& state) {
for (auto _ : state) { for (auto _ : state) {
// This test requires a non-zero CPU time to avoid divide-by-zero // This test requires a non-zero CPU time to avoid divide-by-zero
auto iterations = double(state.iterations()) * double(state.iterations()); auto iterations = static_cast<double>(state.iterations()) *
static_cast<double>(state.iterations());
benchmark::DoNotOptimize(iterations); benchmark::DoNotOptimize(iterations);
} }
state.SetBytesProcessed(1); state.SetBytesProcessed(1);
@ -128,7 +129,8 @@ ADD_CASES(TC_CSVOut, {{"^\"BM_bytes_per_second\",%csv_bytes_report$"}});
void BM_items_per_second(benchmark::State& state) { void BM_items_per_second(benchmark::State& state) {
for (auto _ : state) { for (auto _ : state) {
// This test requires a non-zero CPU time to avoid divide-by-zero // This test requires a non-zero CPU time to avoid divide-by-zero
auto iterations = double(state.iterations()) * double(state.iterations()); auto iterations = static_cast<double>(state.iterations()) *
static_cast<double>(state.iterations());
benchmark::DoNotOptimize(iterations); benchmark::DoNotOptimize(iterations);
} }
state.SetItemsProcessed(1); state.SetItemsProcessed(1);
@ -409,7 +411,8 @@ ADD_CASES(TC_ConsoleOut, {{"^BM_BigArgs/1073741824 %console_report$"},
void BM_Complexity_O1(benchmark::State& state) { void BM_Complexity_O1(benchmark::State& state) {
for (auto _ : state) { for (auto _ : state) {
// This test requires a non-zero CPU time to avoid divide-by-zero // This test requires a non-zero CPU time to avoid divide-by-zero
auto iterations = double(state.iterations()) * double(state.iterations()); auto iterations = static_cast<double>(state.iterations()) *
static_cast<double>(state.iterations());
benchmark::DoNotOptimize(iterations); benchmark::DoNotOptimize(iterations);
} }
state.SetComplexityN(state.range(0)); state.SetComplexityN(state.range(0));

View File

@ -97,11 +97,11 @@ BENCHMARK(BM_error_before_running_range_for);
ADD_CASES("BM_error_before_running_range_for", {{"", true, "error message"}}); ADD_CASES("BM_error_before_running_range_for", {{"", true, "error message"}});
void BM_error_during_running(benchmark::State& state) { void BM_error_during_running(benchmark::State& state) {
int first_iter = true; int first_iter = 1;
while (state.KeepRunning()) { while (state.KeepRunning()) {
if (state.range(0) == 1 && state.thread_index() <= (state.threads() / 2)) { if (state.range(0) == 1 && state.thread_index() <= (state.threads() / 2)) {
assert(first_iter); assert(first_iter);
first_iter = false; first_iter = 0;
state.SkipWithError("error message"); state.SkipWithError("error message");
} else { } else {
state.PauseTiming(); state.PauseTiming();
@ -143,7 +143,8 @@ ADD_CASES("BM_error_during_running_ranged_for",
void BM_error_after_running(benchmark::State& state) { void BM_error_after_running(benchmark::State& state) {
for (auto _ : state) { for (auto _ : state) {
auto iterations = double(state.iterations()) * double(state.iterations()); auto iterations = static_cast<double>(state.iterations()) *
static_cast<double>(state.iterations());
benchmark::DoNotOptimize(iterations); benchmark::DoNotOptimize(iterations);
} }
if (state.thread_index() <= (state.threads() / 2)) { if (state.thread_index() <= (state.threads() / 2)) {

View File

@ -13,18 +13,18 @@ namespace {
TEST(StringUtilTest, stoul) { TEST(StringUtilTest, stoul) {
{ {
size_t pos = 0; size_t pos = 0;
EXPECT_EQ(0ul, benchmark::stoul("0", &pos)); EXPECT_EQ(0UL, benchmark::stoul("0", &pos));
EXPECT_EQ(1ul, pos); EXPECT_EQ(1UL, pos);
} }
{ {
size_t pos = 0; size_t pos = 0;
EXPECT_EQ(7ul, benchmark::stoul("7", &pos)); EXPECT_EQ(7UL, benchmark::stoul("7", &pos));
EXPECT_EQ(1ul, pos); EXPECT_EQ(1UL, pos);
} }
{ {
size_t pos = 0; size_t pos = 0;
EXPECT_EQ(135ul, benchmark::stoul("135", &pos)); EXPECT_EQ(135UL, benchmark::stoul("135", &pos));
EXPECT_EQ(3ul, pos); EXPECT_EQ(3UL, pos);
} }
#if ULONG_MAX == 0xFFFFFFFFul #if ULONG_MAX == 0xFFFFFFFFul
{ {
@ -35,35 +35,35 @@ TEST(StringUtilTest, stoul) {
#elif ULONG_MAX == 0xFFFFFFFFFFFFFFFFul #elif ULONG_MAX == 0xFFFFFFFFFFFFFFFFul
{ {
size_t pos = 0; size_t pos = 0;
EXPECT_EQ(0xFFFFFFFFFFFFFFFFul, EXPECT_EQ(0xFFFFFFFFFFFFFFFFUL,
benchmark::stoul("18446744073709551615", &pos)); benchmark::stoul("18446744073709551615", &pos));
EXPECT_EQ(20ul, pos); EXPECT_EQ(20UL, pos);
} }
#endif #endif
{ {
size_t pos = 0; size_t pos = 0;
EXPECT_EQ(10ul, benchmark::stoul("1010", &pos, 2)); EXPECT_EQ(10UL, benchmark::stoul("1010", &pos, 2));
EXPECT_EQ(4ul, pos); EXPECT_EQ(4UL, pos);
} }
{ {
size_t pos = 0; size_t pos = 0;
EXPECT_EQ(520ul, benchmark::stoul("1010", &pos, 8)); EXPECT_EQ(520UL, benchmark::stoul("1010", &pos, 8));
EXPECT_EQ(4ul, pos); EXPECT_EQ(4UL, pos);
} }
{ {
size_t pos = 0; size_t pos = 0;
EXPECT_EQ(1010ul, benchmark::stoul("1010", &pos, 10)); EXPECT_EQ(1010UL, benchmark::stoul("1010", &pos, 10));
EXPECT_EQ(4ul, pos); EXPECT_EQ(4UL, pos);
} }
{ {
size_t pos = 0; size_t pos = 0;
EXPECT_EQ(4112ul, benchmark::stoul("1010", &pos, 16)); EXPECT_EQ(4112UL, benchmark::stoul("1010", &pos, 16));
EXPECT_EQ(4ul, pos); EXPECT_EQ(4UL, pos);
} }
{ {
size_t pos = 0; size_t pos = 0;
EXPECT_EQ(0xBEEFul, benchmark::stoul("BEEF", &pos, 16)); EXPECT_EQ(0xBEEFUL, benchmark::stoul("BEEF", &pos, 16));
EXPECT_EQ(4ul, pos); EXPECT_EQ(4UL, pos);
} }
#ifndef BENCHMARK_HAS_NO_EXCEPTIONS #ifndef BENCHMARK_HAS_NO_EXCEPTIONS
{ {
@ -73,44 +73,46 @@ TEST(StringUtilTest, stoul) {
#endif #endif
} }
TEST(StringUtilTest, stoi){{size_t pos = 0; TEST(StringUtilTest, stoi) {
{
size_t pos = 0;
EXPECT_EQ(0, benchmark::stoi("0", &pos)); EXPECT_EQ(0, benchmark::stoi("0", &pos));
EXPECT_EQ(1ul, pos); EXPECT_EQ(1UL, pos);
} // namespace } // namespace
{ {
size_t pos = 0; size_t pos = 0;
EXPECT_EQ(-17, benchmark::stoi("-17", &pos)); EXPECT_EQ(-17, benchmark::stoi("-17", &pos));
EXPECT_EQ(3ul, pos); EXPECT_EQ(3UL, pos);
} }
{ {
size_t pos = 0; size_t pos = 0;
EXPECT_EQ(1357, benchmark::stoi("1357", &pos)); EXPECT_EQ(1357, benchmark::stoi("1357", &pos));
EXPECT_EQ(4ul, pos); EXPECT_EQ(4UL, pos);
} }
{ {
size_t pos = 0; size_t pos = 0;
EXPECT_EQ(10, benchmark::stoi("1010", &pos, 2)); EXPECT_EQ(10, benchmark::stoi("1010", &pos, 2));
EXPECT_EQ(4ul, pos); EXPECT_EQ(4UL, pos);
} }
{ {
size_t pos = 0; size_t pos = 0;
EXPECT_EQ(520, benchmark::stoi("1010", &pos, 8)); EXPECT_EQ(520, benchmark::stoi("1010", &pos, 8));
EXPECT_EQ(4ul, pos); EXPECT_EQ(4UL, pos);
} }
{ {
size_t pos = 0; size_t pos = 0;
EXPECT_EQ(1010, benchmark::stoi("1010", &pos, 10)); EXPECT_EQ(1010, benchmark::stoi("1010", &pos, 10));
EXPECT_EQ(4ul, pos); EXPECT_EQ(4UL, pos);
} }
{ {
size_t pos = 0; size_t pos = 0;
EXPECT_EQ(4112, benchmark::stoi("1010", &pos, 16)); EXPECT_EQ(4112, benchmark::stoi("1010", &pos, 16));
EXPECT_EQ(4ul, pos); EXPECT_EQ(4UL, pos);
} }
{ {
size_t pos = 0; size_t pos = 0;
EXPECT_EQ(0xBEEF, benchmark::stoi("BEEF", &pos, 16)); EXPECT_EQ(0xBEEF, benchmark::stoi("BEEF", &pos, 16));
EXPECT_EQ(4ul, pos); EXPECT_EQ(4UL, pos);
} }
#ifndef BENCHMARK_HAS_NO_EXCEPTIONS #ifndef BENCHMARK_HAS_NO_EXCEPTIONS
{ {
@ -120,30 +122,32 @@ EXPECT_EQ(1ul, pos);
#endif #endif
} }
TEST(StringUtilTest, stod){{size_t pos = 0; TEST(StringUtilTest, stod) {
{
size_t pos = 0;
EXPECT_EQ(0.0, benchmark::stod("0", &pos)); EXPECT_EQ(0.0, benchmark::stod("0", &pos));
EXPECT_EQ(1ul, pos); EXPECT_EQ(1UL, pos);
} }
{ {
size_t pos = 0; size_t pos = 0;
EXPECT_EQ(-84.0, benchmark::stod("-84", &pos)); EXPECT_EQ(-84.0, benchmark::stod("-84", &pos));
EXPECT_EQ(3ul, pos); EXPECT_EQ(3UL, pos);
} }
{ {
size_t pos = 0; size_t pos = 0;
EXPECT_EQ(1234.0, benchmark::stod("1234", &pos)); EXPECT_EQ(1234.0, benchmark::stod("1234", &pos));
EXPECT_EQ(4ul, pos); EXPECT_EQ(4UL, pos);
} }
{ {
size_t pos = 0; size_t pos = 0;
EXPECT_EQ(1.5, benchmark::stod("1.5", &pos)); EXPECT_EQ(1.5, benchmark::stod("1.5", &pos));
EXPECT_EQ(3ul, pos); EXPECT_EQ(3UL, pos);
} }
{ {
size_t pos = 0; size_t pos = 0;
/* Note: exactly representable as double */ /* Note: exactly representable as double */
EXPECT_EQ(-1.25e+9, benchmark::stod("-1.25e+9", &pos)); EXPECT_EQ(-1.25e+9, benchmark::stod("-1.25e+9", &pos));
EXPECT_EQ(8ul, pos); EXPECT_EQ(8UL, pos);
} }
#ifndef BENCHMARK_HAS_NO_EXCEPTIONS #ifndef BENCHMARK_HAS_NO_EXCEPTIONS
{ {

View File

@ -9,7 +9,7 @@ namespace {
class DummyBenchmark : public Benchmark { class DummyBenchmark : public Benchmark {
public: public:
DummyBenchmark() : Benchmark("dummy") {} DummyBenchmark() : Benchmark("dummy") {}
void Run(State&) override {} void Run(State& /*state*/) override {}
}; };
TEST(DefaultTimeUnitTest, TimeUnitIsNotSet) { TEST(DefaultTimeUnitTest, TimeUnitIsNotSet) {

View File

@ -64,7 +64,8 @@ ADD_CASES(TC_CSVOut, {{"%csv_header,"
void BM_Counters_Tabular(benchmark::State& state) { void BM_Counters_Tabular(benchmark::State& state) {
for (auto _ : state) { for (auto _ : state) {
// This test requires a non-zero CPU time to avoid divide-by-zero // This test requires a non-zero CPU time to avoid divide-by-zero
auto iterations = double(state.iterations()) * double(state.iterations()); auto iterations = static_cast<double>(state.iterations()) *
static_cast<double>(state.iterations());
benchmark::DoNotOptimize(iterations); benchmark::DoNotOptimize(iterations);
} }
namespace bm = benchmark; namespace bm = benchmark;
@ -375,7 +376,8 @@ CHECK_BENCHMARK_RESULTS("BM_Counters_Tabular/repeats:2/threads:2$",
void BM_CounterRates_Tabular(benchmark::State& state) { void BM_CounterRates_Tabular(benchmark::State& state) {
for (auto _ : state) { for (auto _ : state) {
// This test requires a non-zero CPU time to avoid divide-by-zero // This test requires a non-zero CPU time to avoid divide-by-zero
auto iterations = double(state.iterations()) * double(state.iterations()); auto iterations = static_cast<double>(state.iterations()) *
static_cast<double>(state.iterations());
benchmark::DoNotOptimize(iterations); benchmark::DoNotOptimize(iterations);
} }
namespace bm = benchmark; namespace bm = benchmark;

View File

@ -67,7 +67,8 @@ int num_calls1 = 0;
void BM_Counters_WithBytesAndItemsPSec(benchmark::State& state) { void BM_Counters_WithBytesAndItemsPSec(benchmark::State& state) {
for (auto _ : state) { for (auto _ : state) {
// This test requires a non-zero CPU time to avoid divide-by-zero // This test requires a non-zero CPU time to avoid divide-by-zero
auto iterations = double(state.iterations()) * double(state.iterations()); auto iterations = static_cast<double>(state.iterations()) *
static_cast<double>(state.iterations());
benchmark::DoNotOptimize(iterations); benchmark::DoNotOptimize(iterations);
} }
state.counters["foo"] = 1; state.counters["foo"] = 1;
@ -119,7 +120,8 @@ CHECK_BENCHMARK_RESULTS("BM_Counters_WithBytesAndItemsPSec",
void BM_Counters_Rate(benchmark::State& state) { void BM_Counters_Rate(benchmark::State& state) {
for (auto _ : state) { for (auto _ : state) {
// This test requires a non-zero CPU time to avoid divide-by-zero // This test requires a non-zero CPU time to avoid divide-by-zero
auto iterations = double(state.iterations()) * double(state.iterations()); auto iterations = static_cast<double>(state.iterations()) *
static_cast<double>(state.iterations());
benchmark::DoNotOptimize(iterations); benchmark::DoNotOptimize(iterations);
} }
namespace bm = benchmark; namespace bm = benchmark;
@ -163,7 +165,8 @@ CHECK_BENCHMARK_RESULTS("BM_Counters_Rate", &CheckRate);
void BM_Invert(benchmark::State& state) { void BM_Invert(benchmark::State& state) {
for (auto _ : state) { for (auto _ : state) {
// This test requires a non-zero CPU time to avoid divide-by-zero // This test requires a non-zero CPU time to avoid divide-by-zero
auto iterations = double(state.iterations()) * double(state.iterations()); auto iterations = static_cast<double>(state.iterations()) *
static_cast<double>(state.iterations());
benchmark::DoNotOptimize(iterations); benchmark::DoNotOptimize(iterations);
} }
namespace bm = benchmark; namespace bm = benchmark;
@ -204,7 +207,8 @@ CHECK_BENCHMARK_RESULTS("BM_Invert", &CheckInvert);
void BM_Counters_InvertedRate(benchmark::State& state) { void BM_Counters_InvertedRate(benchmark::State& state) {
for (auto _ : state) { for (auto _ : state) {
// This test requires a non-zero CPU time to avoid divide-by-zero // This test requires a non-zero CPU time to avoid divide-by-zero
auto iterations = double(state.iterations()) * double(state.iterations()); auto iterations = static_cast<double>(state.iterations()) *
static_cast<double>(state.iterations());
benchmark::DoNotOptimize(iterations); benchmark::DoNotOptimize(iterations);
} }
namespace bm = benchmark; namespace bm = benchmark;
@ -333,7 +337,8 @@ CHECK_BENCHMARK_RESULTS("BM_Counters_AvgThreads/threads:%int",
void BM_Counters_AvgThreadsRate(benchmark::State& state) { void BM_Counters_AvgThreadsRate(benchmark::State& state) {
for (auto _ : state) { for (auto _ : state) {
// This test requires a non-zero CPU time to avoid divide-by-zero // This test requires a non-zero CPU time to avoid divide-by-zero
auto iterations = double(state.iterations()) * double(state.iterations()); auto iterations = static_cast<double>(state.iterations()) *
static_cast<double>(state.iterations());
benchmark::DoNotOptimize(iterations); benchmark::DoNotOptimize(iterations);
} }
namespace bm = benchmark; namespace bm = benchmark;
@ -421,7 +426,8 @@ CHECK_BENCHMARK_RESULTS("BM_Counters_IterationInvariant",
void BM_Counters_kIsIterationInvariantRate(benchmark::State& state) { void BM_Counters_kIsIterationInvariantRate(benchmark::State& state) {
for (auto _ : state) { for (auto _ : state) {
// This test requires a non-zero CPU time to avoid divide-by-zero // This test requires a non-zero CPU time to avoid divide-by-zero
auto iterations = double(state.iterations()) * double(state.iterations()); auto iterations = static_cast<double>(state.iterations()) *
static_cast<double>(state.iterations());
benchmark::DoNotOptimize(iterations); benchmark::DoNotOptimize(iterations);
} }
namespace bm = benchmark; namespace bm = benchmark;
@ -513,7 +519,8 @@ CHECK_BENCHMARK_RESULTS("BM_Counters_AvgIterations", &CheckAvgIterations);
void BM_Counters_kAvgIterationsRate(benchmark::State& state) { void BM_Counters_kAvgIterationsRate(benchmark::State& state) {
for (auto _ : state) { for (auto _ : state) {
// This test requires a non-zero CPU time to avoid divide-by-zero // This test requires a non-zero CPU time to avoid divide-by-zero
auto iterations = double(state.iterations()) * double(state.iterations()); auto iterations = static_cast<double>(state.iterations()) *
static_cast<double>(state.iterations());
benchmark::DoNotOptimize(iterations); benchmark::DoNotOptimize(iterations);
} }
namespace bm = benchmark; namespace bm = benchmark;