[RFC] Adding API for setting/getting benchmark_filter flag? (#1254)

* [RFC] Adding API for setting/getting benchmark_filter flag?

This PR is more of a Request-for-comment - open to other ideas/suggestions as well.

Details:
This flag has different implementations(absl vs benchmark) and since the proposal to add absl as a dependency was rejected, it would be nice to have a reliable (and less hacky) way to access this flag internally.
(Actually, reading it isn't much a problem but setting it is).

Internally, we have a sizeable number users to use absl::SetFlags to set this flag. This will not work with benchmark-flags.

Another motivation is that not all users use the command line flag. Some prefer to programmatically set this value.

* fixed build errors

* fix lints again

* per discussion: add additional RunSpecifiedBenchmarks instead.

* add tests

* fix up tests

* clarify comment

* fix stray : in test

* more assertion in test

* add test file to test/CMakeLists.txt

* more test

* make test ISO C++ compliant

* fix up BUILD file to pass the flag
This commit is contained in:
Vy Nguyen 2021-10-27 03:52:57 -04:00 committed by GitHub
parent fca348296f
commit 4f47ed2c9a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 128 additions and 11 deletions

View File

@ -289,11 +289,18 @@ void Shutdown();
// Returns true there is at least on unrecognized argument (i.e. 'argc' > 1).
bool ReportUnrecognizedArguments(int argc, char** argv);
// Returns the current value of --benchmark_filter.
const char* GetBenchmarkFilter();
// Generate a list of benchmarks matching the specified --benchmark_filter flag
// and if --benchmark_list_tests is specified return after printing the name
// of each matching benchmark. Otherwise run each matching benchmark and
// report the results.
//
// spec : Specify the benchmarks to run. If users do not specify this arg or
// if it has value of NULL, then the value of FLAGS_benchmark_filter
// will be used.
//
// The second and third overload use the specified 'display_reporter' and
// 'file_reporter' respectively. 'file_reporter' will write to the file
// specified
@ -301,10 +308,12 @@ bool ReportUnrecognizedArguments(int argc, char** argv);
// 'file_reporter' is ignored.
//
// RETURNS: The number of matching benchmarks.
size_t RunSpecifiedBenchmarks();
size_t RunSpecifiedBenchmarks(BenchmarkReporter* display_reporter);
size_t RunSpecifiedBenchmarks(const char* spec = NULL);
size_t RunSpecifiedBenchmarks(BenchmarkReporter* display_reporter,
BenchmarkReporter* file_reporter);
const char* spec = NULL);
size_t RunSpecifiedBenchmarks(BenchmarkReporter* display_reporter,
BenchmarkReporter* file_reporter,
const char* spec = NULL);
// If a MemoryManager is registered (via RegisterMemoryManager()),
// it can be used to collect and report allocation metrics for a run of the

View File

@ -429,17 +429,20 @@ ConsoleReporter::OutputOptions GetOutputOptions(bool force_no_color) {
} // end namespace internal
size_t RunSpecifiedBenchmarks() {
return RunSpecifiedBenchmarks(nullptr, nullptr);
}
size_t RunSpecifiedBenchmarks(BenchmarkReporter* display_reporter) {
return RunSpecifiedBenchmarks(display_reporter, nullptr);
size_t RunSpecifiedBenchmarks(const char* spec) {
return RunSpecifiedBenchmarks(nullptr, nullptr, spec);
}
size_t RunSpecifiedBenchmarks(BenchmarkReporter* display_reporter,
BenchmarkReporter* file_reporter) {
std::string spec = FLAGS_benchmark_filter;
const char* spec) {
return RunSpecifiedBenchmarks(display_reporter, nullptr, spec);
}
size_t RunSpecifiedBenchmarks(BenchmarkReporter* display_reporter,
BenchmarkReporter* file_reporter,
const char* spec_str) {
std::string spec =
spec_str != nullptr ? std::string(spec_str) : FLAGS_benchmark_filter;
if (spec.empty() || spec == "all")
spec = "."; // Regexp that matches all benchmarks
@ -495,6 +498,10 @@ size_t RunSpecifiedBenchmarks(BenchmarkReporter* display_reporter,
return benchmarks.size();
}
const char* GetBenchmarkFilter() {
return FLAGS_benchmark_filter.c_str();
}
void RegisterMemoryManager(MemoryManager* manager) {
internal::memory_manager = manager;
}

View File

@ -21,6 +21,7 @@ TEST_ARGS = ["--benchmark_min_time=0.01"]
PER_SRC_TEST_ARGS = ({
"user_counters_tabular_test.cc": ["--benchmark_counters_tabular=true"],
"repetitions_test.cc": [" --benchmark_repetitions=3"],
"spec_arg_test.cc" : ["--benchmark_filter=BM_NotChosen"],
})
load("@rules_cc//cc:defs.bzl", "cc_library", "cc_test")

View File

@ -56,6 +56,9 @@ endmacro(compile_output_test)
compile_benchmark_test(benchmark_test)
add_test(NAME benchmark COMMAND benchmark_test --benchmark_min_time=0.01)
compile_benchmark_test(spec_arg_test)
add_test(NAME spec_arg COMMAND spec_arg_test --benchmark_filter=BM_NotChosen)
compile_benchmark_test(filter_test)
macro(add_filter_test name filter expect)
add_test(NAME ${name} COMMAND filter_test --benchmark_min_time=0.01 --benchmark_filter=${filter} ${expect})

97
test/spec_arg_test.cc Normal file
View File

@ -0,0 +1,97 @@
#include <algorithm>
#include <cassert>
#include <cstdint>
#include <cstdlib>
#include <cstring>
#include <iostream>
#include <limits>
#include <string>
#include <vector>
#include "benchmark/benchmark.h"
// Tests that we can override benchmark-spec value from FLAGS_benchmark_filter
// with argument to RunSpecifiedBenchmarks(...).
namespace {
class TestReporter : public benchmark::ConsoleReporter {
public:
virtual bool ReportContext(const Context& context) BENCHMARK_OVERRIDE {
return ConsoleReporter::ReportContext(context);
};
virtual void ReportRuns(const std::vector<Run>& report) BENCHMARK_OVERRIDE {
assert(report.size() == 1);
matched_functions.push_back(report[0].run_name.function_name);
ConsoleReporter::ReportRuns(report);
};
TestReporter() {}
virtual ~TestReporter() {}
const std::vector<std::string>& GetMatchedFunctions() const {
return matched_functions;
}
private:
std::vector<std::string> matched_functions;
};
} // end namespace
static void BM_NotChosen(benchmark::State& state) {
assert(false && "SHOULD NOT BE CALLED");
for (auto _ : state) {
}
}
BENCHMARK(BM_NotChosen);
static void BM_Chosen(benchmark::State& state) {
for (auto _ : state) {
}
}
BENCHMARK(BM_Chosen);
int main(int argc, char** argv) {
const char* const flag = "BM_NotChosen";
// Verify that argv specify --benchmark_filter=BM_NotChosen.
bool found;
for (int i = 0; i < argc; ++i) {
if (strcmp("--benchmark_filter=BM_NotChosen", argv[i]) == 0) {
found = true;
break;
}
}
assert(found);
benchmark::Initialize(&argc, argv);
// Check that the current flag value is reported accurately via the
// GetBenchmarkFilter() function.
if (strcmp(flag, benchmark::GetBenchmarkFilter()) != 0) {
std::cerr
<< "Seeing different value for flags. GetBenchmarkFilter() returns ["
<< benchmark::GetBenchmarkFilter() << "] expected flag=[" << flag
<< "]\n";
return 1;
}
TestReporter test_reporter;
const char* const spec = "BM_Chosen";
const size_t returned_count =
benchmark::RunSpecifiedBenchmarks(&test_reporter,
spec);
assert(returned_count == 1);
const std::vector<std::string> matched_functions =
test_reporter.GetMatchedFunctions();
assert(matched_functions.size() == 1);
if (strcmp(spec, matched_functions.front().c_str()) != 0) {
std::cerr
<< "Expected benchmark [" << spec << "] to run, but got ["
<< matched_functions.front() << "]\n";
return 2;
}
return 0;
}