2016-05-28 03:34:37 +08:00
|
|
|
|
|
|
|
#undef NDEBUG
|
|
|
|
#include "benchmark/benchmark.h"
|
|
|
|
#include "../src/check.h" // NOTE: check.h is for internal use only!
|
|
|
|
#include "../src/re.h" // NOTE: re.h is for internal use only
|
|
|
|
#include <cassert>
|
|
|
|
#include <cstring>
|
|
|
|
#include <iostream>
|
2016-08-11 08:20:54 +08:00
|
|
|
#include <memory>
|
2016-05-28 03:34:37 +08:00
|
|
|
#include <sstream>
|
|
|
|
#include <vector>
|
|
|
|
#include <utility>
|
|
|
|
|
|
|
|
namespace {
|
|
|
|
|
|
|
|
// ========================================================================= //
|
|
|
|
// -------------------------- Testing Case --------------------------------- //
|
|
|
|
// ========================================================================= //
|
|
|
|
|
|
|
|
enum MatchRules {
|
|
|
|
MR_Default, // Skip non-matching lines until a match is found.
|
2016-08-11 08:20:54 +08:00
|
|
|
MR_Next, // Match must occur on the next line.
|
|
|
|
MR_Not // No line between the current position and the next match matches
|
|
|
|
// the regex
|
2016-05-28 03:34:37 +08:00
|
|
|
};
|
|
|
|
|
|
|
|
struct TestCase {
|
2016-08-11 08:20:54 +08:00
|
|
|
std::string regex_str;
|
2016-05-28 03:34:37 +08:00
|
|
|
int match_rule;
|
2016-08-11 08:20:54 +08:00
|
|
|
std::shared_ptr<benchmark::Regex> regex;
|
2016-05-28 03:34:37 +08:00
|
|
|
|
2016-08-11 08:20:54 +08:00
|
|
|
TestCase(std::string re, int rule = MR_Default)
|
|
|
|
: regex_str(re), match_rule(rule), regex(std::make_shared<benchmark::Regex>()) {
|
2016-05-28 03:34:37 +08:00
|
|
|
std::string err_str;
|
2016-08-11 08:20:54 +08:00
|
|
|
regex->Init(regex_str, &err_str);
|
|
|
|
CHECK(err_str.empty()) << "Could not construct regex \"" << regex_str << "\""
|
2016-05-28 03:34:37 +08:00
|
|
|
<< " got Error: " << err_str;
|
2016-08-11 08:20:54 +08:00
|
|
|
}
|
2016-05-28 03:34:37 +08:00
|
|
|
|
2016-08-11 08:20:54 +08:00
|
|
|
void Check(std::stringstream& remaining_output,
|
|
|
|
std::vector<TestCase>& not_checks) const {
|
2016-05-28 03:34:37 +08:00
|
|
|
std::string line;
|
|
|
|
while (remaining_output.eof() == false) {
|
|
|
|
CHECK(remaining_output.good());
|
|
|
|
std::getline(remaining_output, line);
|
2016-08-11 08:20:54 +08:00
|
|
|
for (auto& NC : not_checks) {
|
|
|
|
CHECK(!NC.regex->Match(line)) << "Unexpected match for line \""
|
|
|
|
<< line << "\" for MR_Not regex \""
|
|
|
|
<< NC.regex_str << "\"";
|
|
|
|
}
|
|
|
|
if (regex->Match(line)) return;
|
2016-05-28 03:34:37 +08:00
|
|
|
CHECK(match_rule != MR_Next) << "Expected line \"" << line
|
2016-08-11 08:20:54 +08:00
|
|
|
<< "\" to match regex \"" << regex_str << "\"";
|
2016-05-28 03:34:37 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
CHECK(remaining_output.eof() == false)
|
2016-08-11 08:20:54 +08:00
|
|
|
<< "End of output reached before match for regex \"" << regex_str
|
2016-05-28 03:34:37 +08:00
|
|
|
<< "\" was found";
|
|
|
|
}
|
2016-08-11 08:20:54 +08:00
|
|
|
|
|
|
|
static void CheckCases(std::vector<TestCase> const& checks,
|
|
|
|
std::stringstream& output) {
|
|
|
|
std::vector<TestCase> not_checks;
|
|
|
|
for (size_t i=0; i < checks.size(); ++i) {
|
|
|
|
const auto& TC = checks[i];
|
|
|
|
if (TC.match_rule == MR_Not) {
|
|
|
|
not_checks.push_back(TC);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
TC.Check(output, not_checks);
|
|
|
|
not_checks.clear();
|
|
|
|
}
|
|
|
|
}
|
2016-05-28 03:34:37 +08:00
|
|
|
};
|
|
|
|
|
|
|
|
std::vector<TestCase> ConsoleOutputTests;
|
|
|
|
std::vector<TestCase> JSONOutputTests;
|
|
|
|
std::vector<TestCase> CSVOutputTests;
|
|
|
|
|
|
|
|
std::vector<TestCase> ConsoleErrorTests;
|
|
|
|
std::vector<TestCase> JSONErrorTests;
|
|
|
|
std::vector<TestCase> CSVErrorTests;
|
|
|
|
|
|
|
|
// ========================================================================= //
|
|
|
|
// -------------------------- Test Helpers --------------------------------- //
|
|
|
|
// ========================================================================= //
|
|
|
|
|
|
|
|
class TestReporter : public benchmark::BenchmarkReporter {
|
|
|
|
public:
|
|
|
|
TestReporter(std::vector<benchmark::BenchmarkReporter*> reps)
|
|
|
|
: reporters_(reps) {}
|
|
|
|
|
|
|
|
virtual bool ReportContext(const Context& context) {
|
|
|
|
bool last_ret = false;
|
|
|
|
bool first = true;
|
|
|
|
for (auto rep : reporters_) {
|
|
|
|
bool new_ret = rep->ReportContext(context);
|
|
|
|
CHECK(first || new_ret == last_ret)
|
|
|
|
<< "Reports return different values for ReportContext";
|
|
|
|
first = false;
|
|
|
|
last_ret = new_ret;
|
|
|
|
}
|
|
|
|
return last_ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
virtual void ReportRuns(const std::vector<Run>& report) {
|
|
|
|
for (auto rep : reporters_)
|
|
|
|
rep->ReportRuns(report);
|
|
|
|
}
|
|
|
|
|
|
|
|
virtual void Finalize() {
|
|
|
|
for (auto rep : reporters_)
|
|
|
|
rep->Finalize();
|
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
|
|
|
std::vector<benchmark::BenchmarkReporter*> reporters_;
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
#define CONCAT2(x, y) x##y
|
|
|
|
#define CONCAT(x, y) CONCAT2(x, y)
|
|
|
|
|
|
|
|
#define ADD_CASES(...) \
|
|
|
|
int CONCAT(dummy, __LINE__) = AddCases(__VA_ARGS__)
|
|
|
|
|
|
|
|
int AddCases(std::vector<TestCase>* out, std::initializer_list<TestCase> const& v) {
|
|
|
|
for (auto const& TC : v)
|
|
|
|
out->push_back(TC);
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
template <class First>
|
|
|
|
std::string join(First f) { return f; }
|
|
|
|
|
|
|
|
template <class First, class ...Args>
|
|
|
|
std::string join(First f, Args&&... args) {
|
|
|
|
return std::string(std::move(f)) + "[ ]+" + join(std::forward<Args>(args)...);
|
|
|
|
}
|
|
|
|
|
2016-08-08 09:25:19 +08:00
|
|
|
std::string dec_re = "[0-9]*[.]?[0-9]+([eE][-+][0-9]+)?";
|
2016-05-28 03:34:37 +08:00
|
|
|
|
|
|
|
} // end namespace
|
|
|
|
|
|
|
|
// ========================================================================= //
|
|
|
|
// ---------------------- Testing Prologue Output -------------------------- //
|
|
|
|
// ========================================================================= //
|
|
|
|
|
|
|
|
ADD_CASES(&ConsoleOutputTests, {
|
|
|
|
{join("^Benchmark", "Time", "CPU", "Iterations$"), MR_Next},
|
|
|
|
{"^[-]+$", MR_Next}
|
|
|
|
});
|
|
|
|
ADD_CASES(&CSVOutputTests, {
|
|
|
|
{"name,iterations,real_time,cpu_time,time_unit,bytes_per_second,items_per_second,"
|
|
|
|
"label,error_occurred,error_message"}
|
|
|
|
});
|
|
|
|
|
|
|
|
// ========================================================================= //
|
|
|
|
// ------------------------ Testing Basic Output --------------------------- //
|
|
|
|
// ========================================================================= //
|
|
|
|
|
|
|
|
void BM_basic(benchmark::State& state) {
|
|
|
|
while (state.KeepRunning()) {}
|
|
|
|
}
|
|
|
|
BENCHMARK(BM_basic);
|
|
|
|
|
|
|
|
ADD_CASES(&ConsoleOutputTests, {
|
|
|
|
{"^BM_basic[ ]+[0-9]{1,5} ns[ ]+[0-9]{1,5} ns[ ]+[0-9]+$"}
|
|
|
|
});
|
|
|
|
ADD_CASES(&JSONOutputTests, {
|
|
|
|
{"\"name\": \"BM_basic\",$"},
|
|
|
|
{"\"iterations\": [0-9]+,$", MR_Next},
|
2016-05-31 07:13:41 +08:00
|
|
|
{"\"real_time\": [0-9]{1,5},$", MR_Next},
|
|
|
|
{"\"cpu_time\": [0-9]{1,5},$", MR_Next},
|
2016-05-28 03:34:37 +08:00
|
|
|
{"\"time_unit\": \"ns\"$", MR_Next},
|
|
|
|
{"}", MR_Next}
|
|
|
|
});
|
|
|
|
ADD_CASES(&CSVOutputTests, {
|
|
|
|
{"^\"BM_basic\",[0-9]+," + dec_re + "," + dec_re + ",ns,,,,,$"}
|
|
|
|
});
|
|
|
|
|
|
|
|
// ========================================================================= //
|
|
|
|
// ------------------------ Testing Error Output --------------------------- //
|
|
|
|
// ========================================================================= //
|
|
|
|
|
|
|
|
void BM_error(benchmark::State& state) {
|
|
|
|
state.SkipWithError("message");
|
|
|
|
while(state.KeepRunning()) {}
|
|
|
|
}
|
|
|
|
BENCHMARK(BM_error);
|
|
|
|
ADD_CASES(&ConsoleOutputTests, {
|
|
|
|
{"^BM_error[ ]+ERROR OCCURRED: 'message'$"}
|
|
|
|
});
|
|
|
|
ADD_CASES(&JSONOutputTests, {
|
|
|
|
{"\"name\": \"BM_error\",$"},
|
|
|
|
{"\"error_occurred\": true,$", MR_Next},
|
|
|
|
{"\"error_message\": \"message\",$", MR_Next}
|
|
|
|
});
|
|
|
|
|
|
|
|
ADD_CASES(&CSVOutputTests, {
|
|
|
|
{"^\"BM_error\",,,,,,,,true,\"message\"$"}
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
|
|
// ========================================================================= //
|
|
|
|
// ----------------------- Testing Complexity Output ----------------------- //
|
|
|
|
// ========================================================================= //
|
|
|
|
|
|
|
|
void BM_Complexity_O1(benchmark::State& state) {
|
|
|
|
while (state.KeepRunning()) {
|
|
|
|
}
|
2016-08-05 03:30:14 +08:00
|
|
|
state.SetComplexityN(state.range(0));
|
2016-05-28 03:34:37 +08:00
|
|
|
}
|
|
|
|
BENCHMARK(BM_Complexity_O1)->Range(1, 1<<18)->Complexity(benchmark::o1);
|
|
|
|
|
2016-06-02 05:13:10 +08:00
|
|
|
std::string bigOStr = "[0-9]+\\.[0-9]+ \\([0-9]+\\)";
|
2016-05-28 03:34:37 +08:00
|
|
|
|
|
|
|
ADD_CASES(&ConsoleOutputTests, {
|
|
|
|
{join("^BM_Complexity_O1_BigO", bigOStr, bigOStr) + "[ ]*$"},
|
|
|
|
{join("^BM_Complexity_O1_RMS", "[0-9]+ %", "[0-9]+ %") + "[ ]*$"}
|
|
|
|
});
|
|
|
|
|
|
|
|
|
2016-08-11 08:20:54 +08:00
|
|
|
// ========================================================================= //
|
|
|
|
// ----------------------- Testing Aggregate Output ------------------------ //
|
|
|
|
// ========================================================================= //
|
|
|
|
|
|
|
|
// Test that non-aggregate data is printed by default
|
|
|
|
void BM_Repeat(benchmark::State& state) { while (state.KeepRunning()) {} }
|
|
|
|
BENCHMARK(BM_Repeat)->Repetitions(3);
|
|
|
|
ADD_CASES(&ConsoleOutputTests, {
|
|
|
|
{"^BM_Repeat/repeats:3[ ]+[0-9]{1,5} ns[ ]+[0-9]{1,5} ns[ ]+[0-9]+$"},
|
|
|
|
{"^BM_Repeat/repeats:3[ ]+[0-9]{1,5} ns[ ]+[0-9]{1,5} ns[ ]+[0-9]+$"},
|
|
|
|
{"^BM_Repeat/repeats:3[ ]+[0-9]{1,5} ns[ ]+[0-9]{1,5} ns[ ]+[0-9]+$"},
|
|
|
|
{"^BM_Repeat/repeats:3_mean[ ]+[0-9]{1,5} ns[ ]+[0-9]{1,5} ns[ ]+[0-9]+$"},
|
|
|
|
{"^BM_Repeat/repeats:3_stddev[ ]+[0-9]{1,5} ns[ ]+[0-9]{1,5} ns[ ]+[0-9]+$"}
|
|
|
|
});
|
|
|
|
ADD_CASES(&JSONOutputTests, {
|
|
|
|
{"\"name\": \"BM_Repeat/repeats:3\",$"},
|
|
|
|
{"\"name\": \"BM_Repeat/repeats:3\",$"},
|
|
|
|
{"\"name\": \"BM_Repeat/repeats:3\",$"},
|
|
|
|
{"\"name\": \"BM_Repeat/repeats:3_mean\",$"},
|
|
|
|
{"\"name\": \"BM_Repeat/repeats:3_stddev\",$"}
|
|
|
|
});
|
|
|
|
ADD_CASES(&CSVOutputTests, {
|
|
|
|
{"^\"BM_Repeat/repeats:3\",[0-9]+," + dec_re + "," + dec_re + ",ns,,,,,$"},
|
|
|
|
{"^\"BM_Repeat/repeats:3\",[0-9]+," + dec_re + "," + dec_re + ",ns,,,,,$"},
|
|
|
|
{"^\"BM_Repeat/repeats:3\",[0-9]+," + dec_re + "," + dec_re + ",ns,,,,,$"},
|
|
|
|
{"^\"BM_Repeat/repeats:3_mean\",[0-9]+," + dec_re + "," + dec_re + ",ns,,,,,$"},
|
|
|
|
{"^\"BM_Repeat/repeats:3_stddev\",[0-9]+," + dec_re + "," + dec_re + ",ns,,,,,$"}
|
|
|
|
});
|
|
|
|
|
|
|
|
// Test that a non-repeated test still prints non-aggregate results even when
|
|
|
|
// only-aggregate reports have been requested
|
|
|
|
void BM_RepeatOnce(benchmark::State& state) { while (state.KeepRunning()) {} }
|
|
|
|
BENCHMARK(BM_RepeatOnce)->Repetitions(1)->ReportAggregatesOnly();
|
|
|
|
ADD_CASES(&ConsoleOutputTests, {
|
|
|
|
{"^BM_RepeatOnce/repeats:1[ ]+[0-9]{1,5} ns[ ]+[0-9]{1,5} ns[ ]+[0-9]+$"}
|
|
|
|
});
|
|
|
|
ADD_CASES(&JSONOutputTests, {
|
|
|
|
{"\"name\": \"BM_RepeatOnce/repeats:1\",$"}
|
|
|
|
});
|
|
|
|
ADD_CASES(&CSVOutputTests, {
|
|
|
|
{"^\"BM_RepeatOnce/repeats:1\",[0-9]+," + dec_re + "," + dec_re + ",ns,,,,,$"}
|
|
|
|
});
|
|
|
|
|
|
|
|
// Test that non-aggregate data is not reported
|
|
|
|
void BM_SummaryRepeat(benchmark::State& state) { while (state.KeepRunning()) {} }
|
|
|
|
BENCHMARK(BM_SummaryRepeat)->Repetitions(3)->ReportAggregatesOnly();
|
|
|
|
ADD_CASES(&ConsoleOutputTests, {
|
|
|
|
{".*BM_SummaryRepeat/repeats:3 ", MR_Not},
|
|
|
|
{"^BM_SummaryRepeat/repeats:3_mean[ ]+[0-9]{1,5} ns[ ]+[0-9]{1,5} ns[ ]+[0-9]+$"},
|
|
|
|
{"^BM_SummaryRepeat/repeats:3_stddev[ ]+[0-9]{1,5} ns[ ]+[0-9]{1,5} ns[ ]+[0-9]+$"}
|
|
|
|
});
|
|
|
|
ADD_CASES(&JSONOutputTests, {
|
|
|
|
{".*BM_SummaryRepeat/repeats:3 ", MR_Not},
|
|
|
|
{"\"name\": \"BM_SummaryRepeat/repeats:3_mean\",$"},
|
|
|
|
{"\"name\": \"BM_SummaryRepeat/repeats:3_stddev\",$"}
|
|
|
|
});
|
|
|
|
ADD_CASES(&CSVOutputTests, {
|
|
|
|
{".*BM_SummaryRepeat/repeats:3 ", MR_Not},
|
|
|
|
{"^\"BM_SummaryRepeat/repeats:3_mean\",[0-9]+," + dec_re + "," + dec_re + ",ns,,,,,$"},
|
|
|
|
{"^\"BM_SummaryRepeat/repeats:3_stddev\",[0-9]+," + dec_re + "," + dec_re + ",ns,,,,,$"}
|
|
|
|
});
|
|
|
|
|
2016-05-28 03:34:37 +08:00
|
|
|
// ========================================================================= //
|
|
|
|
// --------------------------- TEST CASES END ------------------------------ //
|
|
|
|
// ========================================================================= //
|
|
|
|
|
|
|
|
|
|
|
|
int main(int argc, char* argv[]) {
|
2016-08-03 05:12:43 +08:00
|
|
|
benchmark::Initialize(&argc, argv);
|
|
|
|
benchmark::ConsoleReporter CR(benchmark::ConsoleReporter::OO_None);
|
2016-05-28 03:34:37 +08:00
|
|
|
benchmark::JSONReporter JR;
|
|
|
|
benchmark::CSVReporter CSVR;
|
|
|
|
struct ReporterTest {
|
|
|
|
const char* name;
|
|
|
|
std::vector<TestCase>& output_cases;
|
|
|
|
std::vector<TestCase>& error_cases;
|
|
|
|
benchmark::BenchmarkReporter& reporter;
|
|
|
|
std::stringstream out_stream;
|
|
|
|
std::stringstream err_stream;
|
|
|
|
|
|
|
|
ReporterTest(const char* n,
|
|
|
|
std::vector<TestCase>& out_tc,
|
|
|
|
std::vector<TestCase>& err_tc,
|
|
|
|
benchmark::BenchmarkReporter& br)
|
|
|
|
: name(n), output_cases(out_tc), error_cases(err_tc), reporter(br) {
|
|
|
|
reporter.SetOutputStream(&out_stream);
|
|
|
|
reporter.SetErrorStream(&err_stream);
|
|
|
|
}
|
|
|
|
} TestCases[] = {
|
|
|
|
{"ConsoleReporter", ConsoleOutputTests, ConsoleErrorTests, CR},
|
|
|
|
{"JSONReporter", JSONOutputTests, JSONErrorTests, JR},
|
|
|
|
{"CSVReporter", CSVOutputTests, CSVErrorTests, CSVR}
|
|
|
|
};
|
|
|
|
|
|
|
|
// Create the test reporter and run the benchmarks.
|
|
|
|
std::cout << "Running benchmarks...\n";
|
|
|
|
TestReporter test_rep({&CR, &JR, &CSVR});
|
|
|
|
benchmark::RunSpecifiedBenchmarks(&test_rep);
|
|
|
|
|
|
|
|
for (auto& rep_test : TestCases) {
|
|
|
|
std::string msg = std::string("\nTesting ") + rep_test.name + " Output\n";
|
|
|
|
std::string banner(msg.size() - 1, '-');
|
|
|
|
std::cout << banner << msg << banner << "\n";
|
|
|
|
|
|
|
|
std::cerr << rep_test.err_stream.str();
|
|
|
|
std::cout << rep_test.out_stream.str();
|
|
|
|
|
2016-08-11 08:20:54 +08:00
|
|
|
TestCase::CheckCases(rep_test.error_cases, rep_test.err_stream);
|
|
|
|
TestCase::CheckCases(rep_test.output_cases, rep_test.out_stream);
|
2016-05-28 03:34:37 +08:00
|
|
|
|
|
|
|
std::cout << "\n";
|
|
|
|
}
|
|
|
|
return 0;
|
|
|
|
}
|