diff --git a/include/benchmark/benchmark.h b/include/benchmark/benchmark.h index 17145559..12e879ea 100644 --- a/include/benchmark/benchmark.h +++ b/include/benchmark/benchmark.h @@ -289,11 +289,18 @@ void Shutdown(); // Returns true there is at least on unrecognized argument (i.e. 'argc' > 1). bool ReportUnrecognizedArguments(int argc, char** argv); +// Returns the current value of --benchmark_filter. +const char* GetBenchmarkFilter(); + // Generate a list of benchmarks matching the specified --benchmark_filter flag // and if --benchmark_list_tests is specified return after printing the name // of each matching benchmark. Otherwise run each matching benchmark and // report the results. // +// spec : Specify the benchmarks to run. If users do not specify this arg or +// if it has value of NULL, then the value of FLAGS_benchmark_filter +// will be used. +// // The second and third overload use the specified 'display_reporter' and // 'file_reporter' respectively. 'file_reporter' will write to the file // specified @@ -301,10 +308,12 @@ bool ReportUnrecognizedArguments(int argc, char** argv); // 'file_reporter' is ignored. // // RETURNS: The number of matching benchmarks. -size_t RunSpecifiedBenchmarks(); -size_t RunSpecifiedBenchmarks(BenchmarkReporter* display_reporter); +size_t RunSpecifiedBenchmarks(const char* spec = NULL); size_t RunSpecifiedBenchmarks(BenchmarkReporter* display_reporter, - BenchmarkReporter* file_reporter); + const char* spec = NULL); +size_t RunSpecifiedBenchmarks(BenchmarkReporter* display_reporter, + BenchmarkReporter* file_reporter, + const char* spec = NULL); // If a MemoryManager is registered (via RegisterMemoryManager()), // it can be used to collect and report allocation metrics for a run of the diff --git a/src/benchmark.cc b/src/benchmark.cc index 28d9368e..e7b96ca5 100644 --- a/src/benchmark.cc +++ b/src/benchmark.cc @@ -429,17 +429,20 @@ ConsoleReporter::OutputOptions GetOutputOptions(bool force_no_color) { } // end namespace internal -size_t RunSpecifiedBenchmarks() { - return RunSpecifiedBenchmarks(nullptr, nullptr); -} - -size_t RunSpecifiedBenchmarks(BenchmarkReporter* display_reporter) { - return RunSpecifiedBenchmarks(display_reporter, nullptr); +size_t RunSpecifiedBenchmarks(const char* spec) { + return RunSpecifiedBenchmarks(nullptr, nullptr, spec); } size_t RunSpecifiedBenchmarks(BenchmarkReporter* display_reporter, - BenchmarkReporter* file_reporter) { - std::string spec = FLAGS_benchmark_filter; + const char* spec) { + return RunSpecifiedBenchmarks(display_reporter, nullptr, spec); +} + +size_t RunSpecifiedBenchmarks(BenchmarkReporter* display_reporter, + BenchmarkReporter* file_reporter, + const char* spec_str) { + std::string spec = + spec_str != nullptr ? std::string(spec_str) : FLAGS_benchmark_filter; if (spec.empty() || spec == "all") spec = "."; // Regexp that matches all benchmarks @@ -495,6 +498,10 @@ size_t RunSpecifiedBenchmarks(BenchmarkReporter* display_reporter, return benchmarks.size(); } +const char* GetBenchmarkFilter() { + return FLAGS_benchmark_filter.c_str(); +} + void RegisterMemoryManager(MemoryManager* manager) { internal::memory_manager = manager; } diff --git a/test/BUILD b/test/BUILD index 1f27f99e..df700a7a 100644 --- a/test/BUILD +++ b/test/BUILD @@ -21,6 +21,7 @@ TEST_ARGS = ["--benchmark_min_time=0.01"] PER_SRC_TEST_ARGS = ({ "user_counters_tabular_test.cc": ["--benchmark_counters_tabular=true"], "repetitions_test.cc": [" --benchmark_repetitions=3"], + "spec_arg_test.cc" : ["--benchmark_filter=BM_NotChosen"], }) load("@rules_cc//cc:defs.bzl", "cc_library", "cc_test") diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt index 79cdf53b..0f7b265d 100644 --- a/test/CMakeLists.txt +++ b/test/CMakeLists.txt @@ -56,6 +56,9 @@ endmacro(compile_output_test) compile_benchmark_test(benchmark_test) add_test(NAME benchmark COMMAND benchmark_test --benchmark_min_time=0.01) +compile_benchmark_test(spec_arg_test) +add_test(NAME spec_arg COMMAND spec_arg_test --benchmark_filter=BM_NotChosen) + compile_benchmark_test(filter_test) macro(add_filter_test name filter expect) add_test(NAME ${name} COMMAND filter_test --benchmark_min_time=0.01 --benchmark_filter=${filter} ${expect}) diff --git a/test/spec_arg_test.cc b/test/spec_arg_test.cc new file mode 100644 index 00000000..f1f0543e --- /dev/null +++ b/test/spec_arg_test.cc @@ -0,0 +1,97 @@ +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "benchmark/benchmark.h" + +// Tests that we can override benchmark-spec value from FLAGS_benchmark_filter +// with argument to RunSpecifiedBenchmarks(...). + +namespace { + +class TestReporter : public benchmark::ConsoleReporter { + public: + virtual bool ReportContext(const Context& context) BENCHMARK_OVERRIDE { + return ConsoleReporter::ReportContext(context); + }; + + virtual void ReportRuns(const std::vector& report) BENCHMARK_OVERRIDE { + assert(report.size() == 1); + matched_functions.push_back(report[0].run_name.function_name); + ConsoleReporter::ReportRuns(report); + }; + + TestReporter() {} + + virtual ~TestReporter() {} + + const std::vector& GetMatchedFunctions() const { + return matched_functions; + } + + private: + std::vector matched_functions; +}; + +} // end namespace + +static void BM_NotChosen(benchmark::State& state) { + assert(false && "SHOULD NOT BE CALLED"); + for (auto _ : state) { + } +} +BENCHMARK(BM_NotChosen); + +static void BM_Chosen(benchmark::State& state) { + for (auto _ : state) { + } +} +BENCHMARK(BM_Chosen); + +int main(int argc, char** argv) { + const char* const flag = "BM_NotChosen"; + + // Verify that argv specify --benchmark_filter=BM_NotChosen. + bool found; + for (int i = 0; i < argc; ++i) { + if (strcmp("--benchmark_filter=BM_NotChosen", argv[i]) == 0) { + found = true; + break; + } + } + assert(found); + + benchmark::Initialize(&argc, argv); + + // Check that the current flag value is reported accurately via the + // GetBenchmarkFilter() function. + if (strcmp(flag, benchmark::GetBenchmarkFilter()) != 0) { + std::cerr + << "Seeing different value for flags. GetBenchmarkFilter() returns [" + << benchmark::GetBenchmarkFilter() << "] expected flag=[" << flag + << "]\n"; + return 1; + } + TestReporter test_reporter; + const char* const spec = "BM_Chosen"; + const size_t returned_count = + benchmark::RunSpecifiedBenchmarks(&test_reporter, + spec); + assert(returned_count == 1); + const std::vector matched_functions = + test_reporter.GetMatchedFunctions(); + assert(matched_functions.size() == 1); + if (strcmp(spec, matched_functions.front().c_str()) != 0) { + std::cerr + << "Expected benchmark [" << spec << "] to run, but got [" + << matched_functions.front() << "]\n"; + return 2; + } + return 0; +}