mirror of
https://github.com/google/benchmark.git
synced 2024-12-27 13:00:36 +08:00
Change --benchmark_list_tests to respect the benchmark filter.
This behavior mirrors how GTest handles these two flags.
This commit is contained in:
parent
e0de8171c6
commit
9341d705a1
@ -160,11 +160,14 @@ class BenchmarkReporter;
|
||||
|
||||
void Initialize(int* argc, char** argv);
|
||||
|
||||
// Run all benchmarks which match the specified --benchmark_filter flag.
|
||||
// Generate a list of benchmarks matching the specified --benchmark_filter flag
|
||||
// and if --benchmark_list_tests is specified return after printing the name
|
||||
// of each matching benchmark. Otherwise run each matching benchmark and
|
||||
// report the results.
|
||||
//
|
||||
// The second overload reports the results using the specified 'reporter'.
|
||||
//
|
||||
// RETURNS: The number of benchmarks run, not including repetitions. If
|
||||
// '--benchmark_list_tests' is specified '0' is returned.
|
||||
// RETURNS: The number of matching benchmarks.
|
||||
size_t RunSpecifiedBenchmarks();
|
||||
size_t RunSpecifiedBenchmarks(BenchmarkReporter* reporter);
|
||||
|
||||
|
@ -853,24 +853,9 @@ void State::SetLabel(const char* label) {
|
||||
namespace internal {
|
||||
namespace {
|
||||
|
||||
void PrintBenchmarkList() {
|
||||
std::vector<Benchmark::Instance> benchmarks;
|
||||
auto families = BenchmarkFamilies::GetInstance();
|
||||
if (!families->FindBenchmarks(".", &benchmarks)) return;
|
||||
|
||||
for (const internal::Benchmark::Instance& benchmark : benchmarks) {
|
||||
std::cout << benchmark.name << "\n";
|
||||
}
|
||||
}
|
||||
|
||||
size_t RunMatchingBenchmarks(const std::string& spec,
|
||||
BenchmarkReporter* reporter) {
|
||||
void RunMatchingBenchmarks(const std::vector<Benchmark::Instance>& benchmarks,
|
||||
BenchmarkReporter* reporter) {
|
||||
CHECK(reporter != nullptr);
|
||||
CHECK(!spec.empty());
|
||||
|
||||
std::vector<Benchmark::Instance> benchmarks;
|
||||
auto families = BenchmarkFamilies::GetInstance();
|
||||
if (!families->FindBenchmarks(spec, &benchmarks)) return 0;
|
||||
|
||||
// Determine the width of the name field using a minimum width of 10.
|
||||
size_t name_field_width = 10;
|
||||
@ -894,7 +879,6 @@ size_t RunMatchingBenchmarks(const std::string& spec,
|
||||
RunBenchmark(benchmark, reporter);
|
||||
}
|
||||
}
|
||||
return benchmarks.size();
|
||||
}
|
||||
|
||||
std::unique_ptr<BenchmarkReporter> GetDefaultReporter() {
|
||||
@ -919,22 +903,27 @@ size_t RunSpecifiedBenchmarks() {
|
||||
}
|
||||
|
||||
size_t RunSpecifiedBenchmarks(BenchmarkReporter* reporter) {
|
||||
if (FLAGS_benchmark_list_tests) {
|
||||
internal::PrintBenchmarkList();
|
||||
return 0;
|
||||
}
|
||||
std::string spec = FLAGS_benchmark_filter;
|
||||
if (spec.empty() || spec == "all")
|
||||
spec = "."; // Regexp that matches all benchmarks
|
||||
|
||||
std::unique_ptr<BenchmarkReporter> default_reporter;
|
||||
if (!reporter) {
|
||||
default_reporter = internal::GetDefaultReporter();
|
||||
reporter = default_reporter.get();
|
||||
std::vector<internal::Benchmark::Instance> benchmarks;
|
||||
auto families = internal::BenchmarkFamilies::GetInstance();
|
||||
if (!families->FindBenchmarks(spec, &benchmarks)) return 0;
|
||||
|
||||
if (FLAGS_benchmark_list_tests) {
|
||||
for (auto const& benchmark : benchmarks)
|
||||
std::cout << benchmark.name << "\n";
|
||||
} else {
|
||||
std::unique_ptr<BenchmarkReporter> default_reporter;
|
||||
if (!reporter) {
|
||||
default_reporter = internal::GetDefaultReporter();
|
||||
reporter = default_reporter.get();
|
||||
}
|
||||
internal::RunMatchingBenchmarks(benchmarks, reporter);
|
||||
reporter->Finalize();
|
||||
}
|
||||
size_t num_run = internal::RunMatchingBenchmarks(spec, reporter);
|
||||
reporter->Finalize();
|
||||
return num_run;
|
||||
return benchmarks.size();
|
||||
}
|
||||
|
||||
namespace internal {
|
||||
|
@ -14,6 +14,7 @@ add_test(benchmark benchmark_test --benchmark_min_time=0.01)
|
||||
compile_benchmark_test(filter_test)
|
||||
macro(add_filter_test name filter expect)
|
||||
add_test(${name} filter_test --benchmark_min_time=0.01 --benchmark_filter=${filter} ${expect})
|
||||
add_test(${name}_list_only filter_test --benchmark_list_tests --benchmark_filter=${filter} ${expect})
|
||||
endmacro(add_filter_test)
|
||||
|
||||
add_filter_test(filter_simple "Foo" 3)
|
||||
|
@ -68,7 +68,11 @@ BENCHMARK(BM_FooBa);
|
||||
|
||||
|
||||
|
||||
int main(int argc, char* argv[]) {
|
||||
int main(int argc, char** argv) {
|
||||
bool list_only = false;
|
||||
for (int i=0; i < argc; ++i)
|
||||
list_only |= std::string(argv[i]).find("--benchmark_list_tests") != std::string::npos;
|
||||
|
||||
benchmark::Initialize(&argc, argv);
|
||||
|
||||
TestReporter test_reporter;
|
||||
@ -77,15 +81,25 @@ int main(int argc, char* argv[]) {
|
||||
if (argc == 2) {
|
||||
// Make sure we ran all of the tests
|
||||
std::stringstream ss(argv[1]);
|
||||
size_t expected;
|
||||
ss >> expected;
|
||||
size_t expected_return;
|
||||
ss >> expected_return;
|
||||
|
||||
const size_t count = test_reporter.GetCount();
|
||||
if (count != expected || returned_count != expected) {
|
||||
std::cerr << "ERROR: Expected " << expected << " tests to be run but returned_count = "
|
||||
<< returned_count << " and reporter_count = " << count << std::endl;
|
||||
if (returned_count != expected_return) {
|
||||
std::cerr << "ERROR: Expected " << expected_return
|
||||
<< " tests to match the filter but returned_count = "
|
||||
<< returned_count << std::endl;
|
||||
return -1;
|
||||
}
|
||||
|
||||
const size_t expected_reports = list_only ? 0 : expected_return;
|
||||
const size_t reports_count = test_reporter.GetCount();
|
||||
if (reports_count != expected_reports) {
|
||||
std::cerr << "ERROR: Expected " << expected_reports
|
||||
<< " tests to be run but reported_count = " << reports_count
|
||||
<< std::endl;
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user