#undef NDEBUG #include "benchmark/benchmark.h" #include "../src/check.h" // NOTE: check.h is for internal use only! #include "../src/re.h" // NOTE: re.h is for internal use only #include #include #include #include #include #include #include namespace { // ========================================================================= // // -------------------------- Testing Case --------------------------------- // // ========================================================================= // enum MatchRules { MR_Default, // Skip non-matching lines until a match is found. MR_Next // Match must occur on the next line. }; struct TestCase { std::string regex; int match_rule; TestCase(std::string re, int rule = MR_Default) : regex(re), match_rule(rule) {} void Check(std::stringstream& remaining_output) const { benchmark::Regex r; std::string err_str; r.Init(regex, &err_str); CHECK(err_str.empty()) << "Could not construct regex \"" << regex << "\"" << " got Error: " << err_str; std::string line; while (remaining_output.eof() == false) { CHECK(remaining_output.good()); std::getline(remaining_output, line); if (r.Match(line)) return; CHECK(match_rule != MR_Next) << "Expected line \"" << line << "\" to match regex \"" << regex << "\""; } CHECK(remaining_output.eof() == false) << "End of output reached before match for regex \"" << regex << "\" was found"; } }; std::vector ConsoleOutputTests; std::vector JSONOutputTests; std::vector CSVOutputTests; // ========================================================================= // // -------------------------- Test Helpers --------------------------------- // // ========================================================================= // class TestReporter : public benchmark::BenchmarkReporter { public: TestReporter(std::vector reps) : reporters_(reps) {} virtual bool ReportContext(const Context& context) { bool last_ret = false; bool first = true; for (auto rep : reporters_) { bool new_ret = rep->ReportContext(context); CHECK(first || new_ret == last_ret) << "Reports return different values for ReportContext"; first = false; last_ret = new_ret; } return last_ret; } virtual void ReportRuns(const std::vector& report) { for (auto rep : reporters_) rep->ReportRuns(report); } virtual void Finalize() { for (auto rep : reporters_) rep->Finalize(); } private: std::vector reporters_; }; #define CONCAT2(x, y) x##y #define CONCAT(x, y) CONCAT2(x, y) #define ADD_CASES(...) \ int CONCAT(dummy, __LINE__) = AddCases(__VA_ARGS__) int AddCases(std::vector* out, std::initializer_list const& v) { for (auto const& TC : v) out->push_back(TC); return 0; } template std::string join(First f) { return f; } template std::string join(First f, Args&&... args) { return std::string(std::move(f)) + "[ ]+" + join(std::forward(args)...); } std::string dec_re = "[0-9]+\\.[0-9]+"; #define ADD_COMPLEXITY_CASES(...) \ int CONCAT(dummy, __LINE__) = AddComplexityTest(__VA_ARGS__) int AddComplexityTest(std::vector* console_out, std::vector* json_out, std::vector* csv_out, std::string big_o_test_name, std::string rms_test_name, std::string big_o) { std::string big_o_str = dec_re + " " + big_o; AddCases(console_out, { {join("^" + big_o_test_name + "", big_o_str, big_o_str) + "[ ]*$"}, {join("^" + rms_test_name + "", "[0-9]+ %", "[0-9]+ %") + "[ ]*$"} }); AddCases(json_out, { {"\"name\": \"" + big_o_test_name + "\",$"}, {"\"cpu_coefficient\": [0-9]+,$", MR_Next}, {"\"real_coefficient\": [0-9]{1,5},$", MR_Next}, {"\"big_o\": \"" + big_o + "\",$", MR_Next}, {"\"time_unit\": \"ns\"$", MR_Next}, {"}", MR_Next}, {"\"name\": \"" + rms_test_name + "\",$"}, {"\"rms\": [0-9]+%$", MR_Next}, {"}", MR_Next} }); AddCases(csv_out, { {"^\"" + big_o_test_name + "\",," + dec_re + "," + dec_re + "," + big_o + ",,,,,$"}, {"^\"" + rms_test_name + "\",," + dec_re + "," + dec_re + ",,,,,,$"} }); return 0; } } // end namespace // ========================================================================= // // --------------------------- Testing BigO O(1) --------------------------- // // ========================================================================= // void BM_Complexity_O1(benchmark::State& state) { while (state.KeepRunning()) { } state.SetComplexityN(state.range_x()); } BENCHMARK(BM_Complexity_O1) -> Range(1, 1<<18) -> Complexity(benchmark::o1); BENCHMARK(BM_Complexity_O1) -> Range(1, 1<<18) -> Complexity([](int){return 1.0; }); BENCHMARK(BM_Complexity_O1) -> Range(1, 1<<18) -> Complexity(); std::string big_o_1_test_name = "BM_Complexity_O1_BigO"; std::string rms_o_1_test_name = "BM_Complexity_O1_RMS"; std::string enum_auto_big_o_1 = "\\([0-9]+\\)"; std::string lambda_big_o_1 = "f\\(N\\)"; // Add enum tests ADD_COMPLEXITY_CASES(&ConsoleOutputTests, &JSONOutputTests, &CSVOutputTests, big_o_1_test_name, rms_o_1_test_name, enum_auto_big_o_1); // Add lambda tests ADD_COMPLEXITY_CASES(&ConsoleOutputTests, &JSONOutputTests, &CSVOutputTests, big_o_1_test_name, rms_o_1_test_name, lambda_big_o_1); // ========================================================================= // // --------------------------- Testing BigO O(N) --------------------------- // // ========================================================================= // std::vector ConstructRandomVector(int size) { std::vector v; v.reserve(size); for (int i = 0; i < size; ++i) { v.push_back(rand() % size); } return v; } void BM_Complexity_O_N(benchmark::State& state) { auto v = ConstructRandomVector(state.range_x()); const int item_not_in_vector = state.range_x()*2; // Test worst case scenario (item not in vector) while (state.KeepRunning()) { benchmark::DoNotOptimize(std::find(v.begin(), v.end(), item_not_in_vector)); } state.SetComplexityN(state.range_x()); } BENCHMARK(BM_Complexity_O_N) -> RangeMultiplier(2) -> Range(1<<10, 1<<16) -> Complexity(benchmark::oN); BENCHMARK(BM_Complexity_O_N) -> RangeMultiplier(2) -> Range(1<<10, 1<<16) -> Complexity([](int n) -> double{return n; }); BENCHMARK(BM_Complexity_O_N) -> RangeMultiplier(2) -> Range(1<<10, 1<<16) -> Complexity(); std::string big_o_n_test_name = "BM_Complexity_O_N_BigO"; std::string rms_o_n_test_name = "BM_Complexity_O_N_RMS"; std::string enum_auto_big_o_n = "N"; std::string lambda_big_o_n = "f\\(N\\)"; // Add enum tests ADD_COMPLEXITY_CASES(&ConsoleOutputTests, &JSONOutputTests, &CSVOutputTests, big_o_n_test_name, rms_o_n_test_name, enum_auto_big_o_n); // Add lambda tests ADD_COMPLEXITY_CASES(&ConsoleOutputTests, &JSONOutputTests, &CSVOutputTests, big_o_n_test_name, rms_o_n_test_name, lambda_big_o_n); // ========================================================================= // // ------------------------- Testing BigO O(N*lgN) ------------------------- // // ========================================================================= // static void BM_Complexity_O_N_log_N(benchmark::State& state) { auto v = ConstructRandomVector(state.range_x()); while (state.KeepRunning()) { std::sort(v.begin(), v.end()); } state.SetComplexityN(state.range_x()); } BENCHMARK(BM_Complexity_O_N_log_N) -> RangeMultiplier(2) -> Range(1<<10, 1<<16) -> Complexity(benchmark::oNLogN); BENCHMARK(BM_Complexity_O_N_log_N) -> RangeMultiplier(2) -> Range(1<<10, 1<<16) -> Complexity([](int n) {return n * log2(n); }); BENCHMARK(BM_Complexity_O_N_log_N) -> RangeMultiplier(2) -> Range(1<<10, 1<<16) -> Complexity(); std::string big_o_n_lg_n_test_name = "BM_Complexity_O_N_log_N_BigO"; std::string rms_o_n_lg_n_test_name = "BM_Complexity_O_N_log_N_RMS"; std::string enum_auto_big_o_n_lg_n = "NlgN"; std::string lambda_big_o_n_lg_n = "f\\(N\\)"; // Add enum tests ADD_COMPLEXITY_CASES(&ConsoleOutputTests, &JSONOutputTests, &CSVOutputTests, big_o_n_lg_n_test_name, rms_o_n_lg_n_test_name, enum_auto_big_o_n_lg_n); // Add lambda tests ADD_COMPLEXITY_CASES(&ConsoleOutputTests, &JSONOutputTests, &CSVOutputTests, big_o_n_lg_n_test_name, rms_o_n_lg_n_test_name, lambda_big_o_n_lg_n); // ========================================================================= // // --------------------------- TEST CASES END ------------------------------ // // ========================================================================= // int main(int argc, char* argv[]) { // Add --color_print=false to argv since we don't want to match color codes. char new_arg[64]; char* new_argv[64]; std::copy(argv, argv + argc, new_argv); new_argv[argc++] = std::strcpy(new_arg, "--color_print=false"); benchmark::Initialize(&argc, new_argv); benchmark::ConsoleReporter CR; benchmark::JSONReporter JR; benchmark::CSVReporter CSVR; struct ReporterTest { const char* name; std::vector& output_cases; benchmark::BenchmarkReporter& reporter; std::stringstream out_stream; std::stringstream err_stream; ReporterTest(const char* n, std::vector& out_tc, benchmark::BenchmarkReporter& br) : name(n), output_cases(out_tc), reporter(br) { reporter.SetOutputStream(&out_stream); reporter.SetErrorStream(&err_stream); } } TestCases[] = { {"ConsoleReporter", ConsoleOutputTests, CR}, {"JSONReporter", JSONOutputTests, JR}, {"CSVReporter", CSVOutputTests, CSVR} }; // Create the test reporter and run the benchmarks. std::cout << "Running benchmarks...\n"; TestReporter test_rep({&CR, &JR, &CSVR}); benchmark::RunSpecifiedBenchmarks(&test_rep); for (auto& rep_test : TestCases) { std::string msg = std::string("\nTesting ") + rep_test.name + " Output\n"; std::string banner(msg.size() - 1, '-'); std::cout << banner << msg << banner << "\n"; std::cerr << rep_test.err_stream.str(); std::cout << rep_test.out_stream.str(); for (const auto& TC : rep_test.output_cases) TC.Check(rep_test.out_stream); std::cout << "\n"; } return 0; }