mirror of
https://github.com/google/benchmark.git
synced 2024-12-26 12:30:14 +08:00
Simplify clang-format and apply to tests (#302)
This commit is contained in:
parent
d1daeee4e9
commit
1100e91907
@ -1,96 +1,5 @@
|
||||
---
|
||||
Language: Cpp
|
||||
# BasedOnStyle: Google
|
||||
AccessModifierOffset: -1
|
||||
AlignAfterOpenBracket: Align
|
||||
AlignConsecutiveAssignments: false
|
||||
AlignConsecutiveDeclarations: false
|
||||
AlignEscapedNewlinesLeft: true
|
||||
AlignOperands: true
|
||||
AlignTrailingComments: true
|
||||
AllowAllParametersOfDeclarationOnNextLine: true
|
||||
AllowShortBlocksOnASingleLine: false
|
||||
AllowShortCaseLabelsOnASingleLine: false
|
||||
AllowShortFunctionsOnASingleLine: All
|
||||
AllowShortIfStatementsOnASingleLine: true
|
||||
AllowShortLoopsOnASingleLine: true
|
||||
AlwaysBreakAfterDefinitionReturnType: None
|
||||
AlwaysBreakAfterReturnType: None
|
||||
AlwaysBreakBeforeMultilineStrings: true
|
||||
AlwaysBreakTemplateDeclarations: true
|
||||
BinPackArguments: true
|
||||
BinPackParameters: true
|
||||
BraceWrapping:
|
||||
AfterClass: false
|
||||
AfterControlStatement: false
|
||||
AfterEnum: false
|
||||
AfterFunction: false
|
||||
AfterNamespace: false
|
||||
AfterObjCDeclaration: false
|
||||
AfterStruct: false
|
||||
AfterUnion: false
|
||||
BeforeCatch: false
|
||||
BeforeElse: false
|
||||
IndentBraces: false
|
||||
BreakBeforeBinaryOperators: None
|
||||
BreakBeforeBraces: Attach
|
||||
BreakBeforeTernaryOperators: true
|
||||
BreakConstructorInitializersBeforeComma: false
|
||||
BreakAfterJavaFieldAnnotations: false
|
||||
BreakStringLiterals: true
|
||||
ColumnLimit: 80
|
||||
CommentPragmas: '^ IWYU pragma:'
|
||||
ConstructorInitializerAllOnOneLineOrOnePerLine: true
|
||||
ConstructorInitializerIndentWidth: 4
|
||||
ContinuationIndentWidth: 4
|
||||
Cpp11BracedListStyle: true
|
||||
DerivePointerAlignment: true
|
||||
DisableFormat: false
|
||||
ExperimentalAutoDetectBinPacking: false
|
||||
ForEachMacros: [ foreach, Q_FOREACH, BOOST_FOREACH ]
|
||||
IncludeCategories:
|
||||
- Regex: '^<.*\.h>'
|
||||
Priority: 1
|
||||
- Regex: '^<.*'
|
||||
Priority: 2
|
||||
- Regex: '.*'
|
||||
Priority: 3
|
||||
IncludeIsMainRegex: '([-_](test|unittest))?$'
|
||||
IndentCaseLabels: true
|
||||
IndentWidth: 2
|
||||
IndentWrappedFunctionNames: false
|
||||
JavaScriptQuotes: Leave
|
||||
JavaScriptWrapImports: true
|
||||
KeepEmptyLinesAtTheStartOfBlocks: false
|
||||
MacroBlockBegin: ''
|
||||
MacroBlockEnd: ''
|
||||
MaxEmptyLinesToKeep: 1
|
||||
NamespaceIndentation: None
|
||||
ObjCBlockIndentWidth: 2
|
||||
ObjCSpaceAfterProperty: false
|
||||
ObjCSpaceBeforeProtocolList: false
|
||||
PenaltyBreakBeforeFirstCallParameter: 1
|
||||
PenaltyBreakComment: 300
|
||||
PenaltyBreakFirstLessLess: 120
|
||||
PenaltyBreakString: 1000
|
||||
PenaltyExcessCharacter: 1000000
|
||||
PenaltyReturnTypeOnItsOwnLine: 200
|
||||
PointerAlignment: Left
|
||||
ReflowComments: true
|
||||
SortIncludes: true
|
||||
SpaceAfterCStyleCast: false
|
||||
SpaceAfterTemplateKeyword: true
|
||||
SpaceBeforeAssignmentOperators: true
|
||||
SpaceBeforeParens: ControlStatements
|
||||
SpaceInEmptyParentheses: false
|
||||
SpacesBeforeTrailingComments: 2
|
||||
SpacesInAngles: false
|
||||
SpacesInContainerLiterals: true
|
||||
SpacesInCStyleCastParentheses: false
|
||||
SpacesInParentheses: false
|
||||
SpacesInSquareBrackets: false
|
||||
Standard: Auto
|
||||
TabWidth: 8
|
||||
UseTab: Never
|
||||
BasedOnStyle: Google
|
||||
...
|
||||
|
||||
|
@ -1,8 +1,7 @@
|
||||
|
||||
#include "benchmark/benchmark_api.h"
|
||||
|
||||
#define BASIC_BENCHMARK_TEST(x) \
|
||||
BENCHMARK(x)->Arg(8)->Arg(512)->Arg(8192)
|
||||
#define BASIC_BENCHMARK_TEST(x) BENCHMARK(x)->Arg(8)->Arg(512)->Arg(8192)
|
||||
|
||||
void BM_empty(benchmark::State& state) {
|
||||
while (state.KeepRunning()) {
|
||||
@ -26,7 +25,7 @@ void BM_spin_pause_before(benchmark::State& state) {
|
||||
for (int i = 0; i < state.range(0); ++i) {
|
||||
benchmark::DoNotOptimize(i);
|
||||
}
|
||||
while(state.KeepRunning()) {
|
||||
while (state.KeepRunning()) {
|
||||
for (int i = 0; i < state.range(0); ++i) {
|
||||
benchmark::DoNotOptimize(i);
|
||||
}
|
||||
@ -35,9 +34,8 @@ void BM_spin_pause_before(benchmark::State& state) {
|
||||
BASIC_BENCHMARK_TEST(BM_spin_pause_before);
|
||||
BASIC_BENCHMARK_TEST(BM_spin_pause_before)->ThreadPerCpu();
|
||||
|
||||
|
||||
void BM_spin_pause_during(benchmark::State& state) {
|
||||
while(state.KeepRunning()) {
|
||||
while (state.KeepRunning()) {
|
||||
state.PauseTiming();
|
||||
for (int i = 0; i < state.range(0); ++i) {
|
||||
benchmark::DoNotOptimize(i);
|
||||
@ -52,7 +50,7 @@ BASIC_BENCHMARK_TEST(BM_spin_pause_during);
|
||||
BASIC_BENCHMARK_TEST(BM_spin_pause_during)->ThreadPerCpu();
|
||||
|
||||
void BM_pause_during(benchmark::State& state) {
|
||||
while(state.KeepRunning()) {
|
||||
while (state.KeepRunning()) {
|
||||
state.PauseTiming();
|
||||
state.ResumeTiming();
|
||||
}
|
||||
@ -63,7 +61,7 @@ BENCHMARK(BM_pause_during)->UseRealTime();
|
||||
BENCHMARK(BM_pause_during)->UseRealTime()->ThreadPerCpu();
|
||||
|
||||
void BM_spin_pause_after(benchmark::State& state) {
|
||||
while(state.KeepRunning()) {
|
||||
while (state.KeepRunning()) {
|
||||
for (int i = 0; i < state.range(0); ++i) {
|
||||
benchmark::DoNotOptimize(i);
|
||||
}
|
||||
@ -75,12 +73,11 @@ void BM_spin_pause_after(benchmark::State& state) {
|
||||
BASIC_BENCHMARK_TEST(BM_spin_pause_after);
|
||||
BASIC_BENCHMARK_TEST(BM_spin_pause_after)->ThreadPerCpu();
|
||||
|
||||
|
||||
void BM_spin_pause_before_and_after(benchmark::State& state) {
|
||||
for (int i = 0; i < state.range(0); ++i) {
|
||||
benchmark::DoNotOptimize(i);
|
||||
}
|
||||
while(state.KeepRunning()) {
|
||||
while (state.KeepRunning()) {
|
||||
for (int i = 0; i < state.range(0); ++i) {
|
||||
benchmark::DoNotOptimize(i);
|
||||
}
|
||||
@ -92,9 +89,9 @@ void BM_spin_pause_before_and_after(benchmark::State& state) {
|
||||
BASIC_BENCHMARK_TEST(BM_spin_pause_before_and_after);
|
||||
BASIC_BENCHMARK_TEST(BM_spin_pause_before_and_after)->ThreadPerCpu();
|
||||
|
||||
|
||||
void BM_empty_stop_start(benchmark::State& state) {
|
||||
while (state.KeepRunning()) { }
|
||||
while (state.KeepRunning()) {
|
||||
}
|
||||
}
|
||||
BENCHMARK(BM_empty_stop_start);
|
||||
BENCHMARK(BM_empty_stop_start)->ThreadPerCpu();
|
||||
|
@ -4,6 +4,7 @@
|
||||
#include <math.h>
|
||||
#include <stdint.h>
|
||||
|
||||
#include <chrono>
|
||||
#include <cstdlib>
|
||||
#include <iostream>
|
||||
#include <limits>
|
||||
@ -13,15 +14,14 @@
|
||||
#include <set>
|
||||
#include <sstream>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <chrono>
|
||||
#include <thread>
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
|
||||
#if defined(__GNUC__)
|
||||
# define BENCHMARK_NOINLINE __attribute__((noinline))
|
||||
#define BENCHMARK_NOINLINE __attribute__((noinline))
|
||||
#else
|
||||
# define BENCHMARK_NOINLINE
|
||||
#define BENCHMARK_NOINLINE
|
||||
#endif
|
||||
|
||||
namespace {
|
||||
@ -42,8 +42,7 @@ double CalculatePi(int depth) {
|
||||
|
||||
std::set<int> ConstructRandomSet(int size) {
|
||||
std::set<int> s;
|
||||
for (int i = 0; i < size; ++i)
|
||||
s.insert(i);
|
||||
for (int i = 0; i < size; ++i) s.insert(i);
|
||||
return s;
|
||||
}
|
||||
|
||||
@ -54,8 +53,7 @@ std::vector<int>* test_vector = nullptr;
|
||||
|
||||
static void BM_Factorial(benchmark::State& state) {
|
||||
int fac_42 = 0;
|
||||
while (state.KeepRunning())
|
||||
fac_42 = Factorial(8);
|
||||
while (state.KeepRunning()) fac_42 = Factorial(8);
|
||||
// Prevent compiler optimizations
|
||||
std::stringstream ss;
|
||||
ss << fac_42;
|
||||
@ -66,8 +64,7 @@ BENCHMARK(BM_Factorial)->UseRealTime();
|
||||
|
||||
static void BM_CalculatePiRange(benchmark::State& state) {
|
||||
double pi = 0.0;
|
||||
while (state.KeepRunning())
|
||||
pi = CalculatePi(state.range(0));
|
||||
while (state.KeepRunning()) pi = CalculatePi(state.range(0));
|
||||
std::stringstream ss;
|
||||
ss << pi;
|
||||
state.SetLabel(ss.str());
|
||||
@ -89,27 +86,27 @@ static void BM_SetInsert(benchmark::State& state) {
|
||||
state.PauseTiming();
|
||||
std::set<int> data = ConstructRandomSet(state.range(0));
|
||||
state.ResumeTiming();
|
||||
for (int j = 0; j < state.range(1); ++j)
|
||||
data.insert(rand());
|
||||
for (int j = 0; j < state.range(1); ++j) data.insert(rand());
|
||||
}
|
||||
state.SetItemsProcessed(state.iterations() * state.range(1));
|
||||
state.SetBytesProcessed(state.iterations() * state.range(1) * sizeof(int));
|
||||
}
|
||||
BENCHMARK(BM_SetInsert)->Ranges({{1<<10,8<<10}, {1,10}});
|
||||
BENCHMARK(BM_SetInsert)->Ranges({{1 << 10, 8 << 10}, {1, 10}});
|
||||
|
||||
template<typename Container, typename ValueType = typename Container::value_type>
|
||||
template <typename Container,
|
||||
typename ValueType = typename Container::value_type>
|
||||
static void BM_Sequential(benchmark::State& state) {
|
||||
ValueType v = 42;
|
||||
while (state.KeepRunning()) {
|
||||
Container c;
|
||||
for (int i = state.range(0); --i; )
|
||||
c.push_back(v);
|
||||
for (int i = state.range(0); --i;) c.push_back(v);
|
||||
}
|
||||
const size_t items_processed = state.iterations() * state.range(0);
|
||||
state.SetItemsProcessed(items_processed);
|
||||
state.SetBytesProcessed(items_processed * sizeof(v));
|
||||
}
|
||||
BENCHMARK_TEMPLATE2(BM_Sequential, std::vector<int>, int)->Range(1 << 0, 1 << 10);
|
||||
BENCHMARK_TEMPLATE2(BM_Sequential, std::vector<int>, int)
|
||||
->Range(1 << 0, 1 << 10);
|
||||
BENCHMARK_TEMPLATE(BM_Sequential, std::list<int>)->Range(1 << 0, 1 << 10);
|
||||
// Test the variadic version of BENCHMARK_TEMPLATE in C++11 and beyond.
|
||||
#if __cplusplus >= 201103L
|
||||
@ -119,10 +116,9 @@ BENCHMARK_TEMPLATE(BM_Sequential, std::vector<int>, int)->Arg(512);
|
||||
static void BM_StringCompare(benchmark::State& state) {
|
||||
std::string s1(state.range(0), '-');
|
||||
std::string s2(state.range(0), '-');
|
||||
while (state.KeepRunning())
|
||||
benchmark::DoNotOptimize(s1.compare(s2));
|
||||
while (state.KeepRunning()) benchmark::DoNotOptimize(s1.compare(s2));
|
||||
}
|
||||
BENCHMARK(BM_StringCompare)->Range(1, 1<<20);
|
||||
BENCHMARK(BM_StringCompare)->Range(1, 1 << 20);
|
||||
|
||||
static void BM_SetupTeardown(benchmark::State& state) {
|
||||
if (state.thread_index == 0) {
|
||||
@ -132,7 +128,7 @@ static void BM_SetupTeardown(benchmark::State& state) {
|
||||
int i = 0;
|
||||
while (state.KeepRunning()) {
|
||||
std::lock_guard<std::mutex> l(test_vector_mu);
|
||||
if (i%2 == 0)
|
||||
if (i % 2 == 0)
|
||||
test_vector->push_back(i);
|
||||
else
|
||||
test_vector->pop_back();
|
||||
@ -151,7 +147,7 @@ static void BM_LongTest(benchmark::State& state) {
|
||||
benchmark::DoNotOptimize(tracker += i);
|
||||
}
|
||||
}
|
||||
BENCHMARK(BM_LongTest)->Range(1<<16,1<<28);
|
||||
BENCHMARK(BM_LongTest)->Range(1 << 16, 1 << 28);
|
||||
|
||||
static void BM_ParallelMemset(benchmark::State& state) {
|
||||
int size = state.range(0) / sizeof(int);
|
||||
@ -180,20 +176,18 @@ BENCHMARK(BM_ParallelMemset)->Arg(10 << 20)->ThreadRange(1, 4);
|
||||
static void BM_ManualTiming(benchmark::State& state) {
|
||||
size_t slept_for = 0;
|
||||
int microseconds = state.range(0);
|
||||
std::chrono::duration<double, std::micro> sleep_duration {
|
||||
static_cast<double>(microseconds)
|
||||
};
|
||||
std::chrono::duration<double, std::micro> sleep_duration{
|
||||
static_cast<double>(microseconds)};
|
||||
|
||||
while (state.KeepRunning()) {
|
||||
auto start = std::chrono::high_resolution_clock::now();
|
||||
auto start = std::chrono::high_resolution_clock::now();
|
||||
// Simulate some useful workload with a sleep
|
||||
std::this_thread::sleep_for(std::chrono::duration_cast<
|
||||
std::chrono::nanoseconds>(sleep_duration));
|
||||
auto end = std::chrono::high_resolution_clock::now();
|
||||
std::this_thread::sleep_for(
|
||||
std::chrono::duration_cast<std::chrono::nanoseconds>(sleep_duration));
|
||||
auto end = std::chrono::high_resolution_clock::now();
|
||||
|
||||
auto elapsed =
|
||||
std::chrono::duration_cast<std::chrono::duration<double>>(
|
||||
end - start);
|
||||
std::chrono::duration_cast<std::chrono::duration<double>>(end - start);
|
||||
|
||||
state.SetIterationTime(elapsed.count());
|
||||
slept_for += microseconds;
|
||||
@ -205,35 +199,37 @@ BENCHMARK(BM_ManualTiming)->Range(1, 1 << 14)->UseManualTime();
|
||||
|
||||
#if __cplusplus >= 201103L
|
||||
|
||||
template <class ...Args>
|
||||
template <class... Args>
|
||||
void BM_with_args(benchmark::State& state, Args&&...) {
|
||||
while (state.KeepRunning()) {}
|
||||
while (state.KeepRunning()) {
|
||||
}
|
||||
}
|
||||
BENCHMARK_CAPTURE(BM_with_args, int_test, 42, 43, 44);
|
||||
BENCHMARK_CAPTURE(BM_with_args, string_and_pair_test,
|
||||
std::string("abc"), std::pair<int, double>(42, 3.8));
|
||||
BENCHMARK_CAPTURE(BM_with_args, string_and_pair_test, std::string("abc"),
|
||||
std::pair<int, double>(42, 3.8));
|
||||
|
||||
void BM_non_template_args(benchmark::State& state, int, double) {
|
||||
while(state.KeepRunning()) {}
|
||||
while (state.KeepRunning()) {
|
||||
}
|
||||
}
|
||||
BENCHMARK_CAPTURE(BM_non_template_args, basic_test, 0, 0);
|
||||
|
||||
#endif // __cplusplus >= 201103L
|
||||
#endif // __cplusplus >= 201103L
|
||||
|
||||
static void BM_DenseThreadRanges(benchmark::State &st) {
|
||||
static void BM_DenseThreadRanges(benchmark::State& st) {
|
||||
switch (st.range(0)) {
|
||||
case 1:
|
||||
assert(st.threads == 1 || st.threads == 2 || st.threads == 3);
|
||||
break;
|
||||
case 2:
|
||||
assert(st.threads == 1 || st.threads == 3 || st.threads == 4);
|
||||
break;
|
||||
case 3:
|
||||
assert(st.threads == 5 || st.threads == 8 || st.threads == 11 ||
|
||||
st.threads == 14);
|
||||
break;
|
||||
default:
|
||||
assert(false && "Invalid test case number");
|
||||
case 1:
|
||||
assert(st.threads == 1 || st.threads == 2 || st.threads == 3);
|
||||
break;
|
||||
case 2:
|
||||
assert(st.threads == 1 || st.threads == 3 || st.threads == 4);
|
||||
break;
|
||||
case 3:
|
||||
assert(st.threads == 5 || st.threads == 8 || st.threads == 11 ||
|
||||
st.threads == 14);
|
||||
break;
|
||||
default:
|
||||
assert(false && "Invalid test case number");
|
||||
}
|
||||
while (st.KeepRunning()) {
|
||||
}
|
||||
|
@ -1,47 +1,41 @@
|
||||
#undef NDEBUG
|
||||
#include <algorithm>
|
||||
#include <cassert>
|
||||
#include <cmath>
|
||||
#include <cstdlib>
|
||||
#include <vector>
|
||||
#include "benchmark/benchmark.h"
|
||||
#include "output_test.h"
|
||||
#include <cassert>
|
||||
#include <vector>
|
||||
#include <algorithm>
|
||||
#include <cstdlib>
|
||||
#include <cmath>
|
||||
|
||||
namespace {
|
||||
|
||||
#define ADD_COMPLEXITY_CASES(...) \
|
||||
int CONCAT(dummy, __LINE__) = AddComplexityTest(__VA_ARGS__)
|
||||
int CONCAT(dummy, __LINE__) = AddComplexityTest(__VA_ARGS__)
|
||||
|
||||
int AddComplexityTest(std::string big_o_test_name,
|
||||
std::string rms_test_name, std::string big_o) {
|
||||
SetSubstitutions({
|
||||
{"%bigo_name", big_o_test_name},
|
||||
{"%rms_name", rms_test_name},
|
||||
{"%bigo_str", "[ ]*" + std::string(dec_re) + " " + big_o},
|
||||
{"%bigo", big_o},
|
||||
{"%rms", "[ ]*[0-9]+ %"}
|
||||
});
|
||||
AddCases(TC_ConsoleOut, {
|
||||
{"^%bigo_name %bigo_str %bigo_str[ ]*$"},
|
||||
{"^%bigo_name", MR_Not}, // Assert we we didn't only matched a name.
|
||||
{"^%rms_name %rms %rms[ ]*$", MR_Next}
|
||||
});
|
||||
AddCases(TC_JSONOut, {
|
||||
{"\"name\": \"%bigo_name\",$"},
|
||||
{"\"cpu_coefficient\": [0-9]+,$", MR_Next},
|
||||
{"\"real_coefficient\": [0-9]{1,5},$", MR_Next},
|
||||
{"\"big_o\": \"%bigo\",$", MR_Next},
|
||||
{"\"time_unit\": \"ns\"$", MR_Next},
|
||||
{"}", MR_Next},
|
||||
{"\"name\": \"%rms_name\",$"},
|
||||
{"\"rms\": [0-9]+%$", MR_Next},
|
||||
{"}", MR_Next}
|
||||
});
|
||||
AddCases(TC_CSVOut, {
|
||||
{"^\"%bigo_name\",,%float,%float,%bigo,,,,,$"},
|
||||
{"^\"%bigo_name\"", MR_Not},
|
||||
{"^\"%rms_name\",,%float,%float,,,,,,$", MR_Next}
|
||||
});
|
||||
int AddComplexityTest(std::string big_o_test_name, std::string rms_test_name,
|
||||
std::string big_o) {
|
||||
SetSubstitutions({{"%bigo_name", big_o_test_name},
|
||||
{"%rms_name", rms_test_name},
|
||||
{"%bigo_str", "[ ]*" + std::string(dec_re) + " " + big_o},
|
||||
{"%bigo", big_o},
|
||||
{"%rms", "[ ]*[0-9]+ %"}});
|
||||
AddCases(
|
||||
TC_ConsoleOut,
|
||||
{{"^%bigo_name %bigo_str %bigo_str[ ]*$"},
|
||||
{"^%bigo_name", MR_Not}, // Assert we we didn't only matched a name.
|
||||
{"^%rms_name %rms %rms[ ]*$", MR_Next}});
|
||||
AddCases(TC_JSONOut, {{"\"name\": \"%bigo_name\",$"},
|
||||
{"\"cpu_coefficient\": [0-9]+,$", MR_Next},
|
||||
{"\"real_coefficient\": [0-9]{1,5},$", MR_Next},
|
||||
{"\"big_o\": \"%bigo\",$", MR_Next},
|
||||
{"\"time_unit\": \"ns\"$", MR_Next},
|
||||
{"}", MR_Next},
|
||||
{"\"name\": \"%rms_name\",$"},
|
||||
{"\"rms\": [0-9]+%$", MR_Next},
|
||||
{"}", MR_Next}});
|
||||
AddCases(TC_CSVOut, {{"^\"%bigo_name\",,%float,%float,%bigo,,,,,$"},
|
||||
{"^\"%bigo_name\"", MR_Not},
|
||||
{"^\"%rms_name\",,%float,%float,,,,,,$", MR_Next}});
|
||||
return 0;
|
||||
}
|
||||
|
||||
@ -53,23 +47,26 @@ int AddComplexityTest(std::string big_o_test_name,
|
||||
|
||||
void BM_Complexity_O1(benchmark::State& state) {
|
||||
while (state.KeepRunning()) {
|
||||
for (int i=0; i < 1024; ++i) {
|
||||
benchmark::DoNotOptimize(&i);
|
||||
}
|
||||
for (int i = 0; i < 1024; ++i) {
|
||||
benchmark::DoNotOptimize(&i);
|
||||
}
|
||||
}
|
||||
state.SetComplexityN(state.range(0));
|
||||
}
|
||||
BENCHMARK(BM_Complexity_O1) -> Range(1, 1<<18) -> Complexity(benchmark::o1);
|
||||
BENCHMARK(BM_Complexity_O1) -> Range(1, 1<<18) -> Complexity();
|
||||
BENCHMARK(BM_Complexity_O1) -> Range(1, 1<<18) -> Complexity([](int){return 1.0; });
|
||||
BENCHMARK(BM_Complexity_O1)->Range(1, 1 << 18)->Complexity(benchmark::o1);
|
||||
BENCHMARK(BM_Complexity_O1)->Range(1, 1 << 18)->Complexity();
|
||||
BENCHMARK(BM_Complexity_O1)->Range(1, 1 << 18)->Complexity([](int) {
|
||||
return 1.0;
|
||||
});
|
||||
|
||||
const char* big_o_1_test_name = "BM_Complexity_O1_BigO";
|
||||
const char* rms_o_1_test_name = "BM_Complexity_O1_RMS";
|
||||
const char* enum_big_o_1 = "\\([0-9]+\\)";
|
||||
// FIXME: Tolerate both '(1)' and 'lgN' as output when the complexity is auto deduced.
|
||||
const char *big_o_1_test_name = "BM_Complexity_O1_BigO";
|
||||
const char *rms_o_1_test_name = "BM_Complexity_O1_RMS";
|
||||
const char *enum_big_o_1 = "\\([0-9]+\\)";
|
||||
// FIXME: Tolerate both '(1)' and 'lgN' as output when the complexity is auto
|
||||
// deduced.
|
||||
// See https://github.com/google/benchmark/issues/272
|
||||
const char* auto_big_o_1 = "(\\([0-9]+\\))|(lgN)";
|
||||
const char* lambda_big_o_1 = "f\\(N\\)";
|
||||
const char *auto_big_o_1 = "(\\([0-9]+\\))|(lgN)";
|
||||
const char *lambda_big_o_1 = "f\\(N\\)";
|
||||
|
||||
// Add enum tests
|
||||
ADD_COMPLEXITY_CASES(big_o_1_test_name, rms_o_1_test_name, enum_big_o_1);
|
||||
@ -95,20 +92,30 @@ std::vector<int> ConstructRandomVector(int size) {
|
||||
|
||||
void BM_Complexity_O_N(benchmark::State& state) {
|
||||
auto v = ConstructRandomVector(state.range(0));
|
||||
const int item_not_in_vector = state.range(0)*2; // Test worst case scenario (item not in vector)
|
||||
const int item_not_in_vector =
|
||||
state.range(0) * 2; // Test worst case scenario (item not in vector)
|
||||
while (state.KeepRunning()) {
|
||||
benchmark::DoNotOptimize(std::find(v.begin(), v.end(), item_not_in_vector));
|
||||
benchmark::DoNotOptimize(std::find(v.begin(), v.end(), item_not_in_vector));
|
||||
}
|
||||
state.SetComplexityN(state.range(0));
|
||||
}
|
||||
BENCHMARK(BM_Complexity_O_N) -> RangeMultiplier(2) -> Range(1<<10, 1<<16) -> Complexity(benchmark::oN);
|
||||
BENCHMARK(BM_Complexity_O_N) -> RangeMultiplier(2) -> Range(1<<10, 1<<16) -> Complexity([](int n) -> double{return n; });
|
||||
BENCHMARK(BM_Complexity_O_N) -> RangeMultiplier(2) -> Range(1<<10, 1<<16) -> Complexity();
|
||||
BENCHMARK(BM_Complexity_O_N)
|
||||
->RangeMultiplier(2)
|
||||
->Range(1 << 10, 1 << 16)
|
||||
->Complexity(benchmark::oN);
|
||||
BENCHMARK(BM_Complexity_O_N)
|
||||
->RangeMultiplier(2)
|
||||
->Range(1 << 10, 1 << 16)
|
||||
->Complexity([](int n) -> double { return n; });
|
||||
BENCHMARK(BM_Complexity_O_N)
|
||||
->RangeMultiplier(2)
|
||||
->Range(1 << 10, 1 << 16)
|
||||
->Complexity();
|
||||
|
||||
const char* big_o_n_test_name = "BM_Complexity_O_N_BigO";
|
||||
const char* rms_o_n_test_name = "BM_Complexity_O_N_RMS";
|
||||
const char* enum_auto_big_o_n = "N";
|
||||
const char* lambda_big_o_n = "f\\(N\\)";
|
||||
const char *big_o_n_test_name = "BM_Complexity_O_N_BigO";
|
||||
const char *rms_o_n_test_name = "BM_Complexity_O_N_RMS";
|
||||
const char *enum_auto_big_o_n = "N";
|
||||
const char *lambda_big_o_n = "f\\(N\\)";
|
||||
|
||||
// Add enum tests
|
||||
ADD_COMPLEXITY_CASES(big_o_n_test_name, rms_o_n_test_name, enum_auto_big_o_n);
|
||||
@ -123,31 +130,38 @@ ADD_COMPLEXITY_CASES(big_o_n_test_name, rms_o_n_test_name, lambda_big_o_n);
|
||||
static void BM_Complexity_O_N_log_N(benchmark::State& state) {
|
||||
auto v = ConstructRandomVector(state.range(0));
|
||||
while (state.KeepRunning()) {
|
||||
std::sort(v.begin(), v.end());
|
||||
std::sort(v.begin(), v.end());
|
||||
}
|
||||
state.SetComplexityN(state.range(0));
|
||||
}
|
||||
BENCHMARK(BM_Complexity_O_N_log_N) -> RangeMultiplier(2) -> Range(1<<10, 1<<16) -> Complexity(benchmark::oNLogN);
|
||||
BENCHMARK(BM_Complexity_O_N_log_N) -> RangeMultiplier(2) -> Range(1<<10, 1<<16) -> Complexity([](int n) {return n * std::log2(n); });
|
||||
BENCHMARK(BM_Complexity_O_N_log_N) -> RangeMultiplier(2) -> Range(1<<10, 1<<16) -> Complexity();
|
||||
BENCHMARK(BM_Complexity_O_N_log_N)
|
||||
->RangeMultiplier(2)
|
||||
->Range(1 << 10, 1 << 16)
|
||||
->Complexity(benchmark::oNLogN);
|
||||
BENCHMARK(BM_Complexity_O_N_log_N)
|
||||
->RangeMultiplier(2)
|
||||
->Range(1 << 10, 1 << 16)
|
||||
->Complexity([](int n) { return n * std::log2(n); });
|
||||
BENCHMARK(BM_Complexity_O_N_log_N)
|
||||
->RangeMultiplier(2)
|
||||
->Range(1 << 10, 1 << 16)
|
||||
->Complexity();
|
||||
|
||||
const char* big_o_n_lg_n_test_name = "BM_Complexity_O_N_log_N_BigO";
|
||||
const char* rms_o_n_lg_n_test_name = "BM_Complexity_O_N_log_N_RMS";
|
||||
const char* enum_auto_big_o_n_lg_n = "NlgN";
|
||||
const char* lambda_big_o_n_lg_n = "f\\(N\\)";
|
||||
const char *big_o_n_lg_n_test_name = "BM_Complexity_O_N_log_N_BigO";
|
||||
const char *rms_o_n_lg_n_test_name = "BM_Complexity_O_N_log_N_RMS";
|
||||
const char *enum_auto_big_o_n_lg_n = "NlgN";
|
||||
const char *lambda_big_o_n_lg_n = "f\\(N\\)";
|
||||
|
||||
// Add enum tests
|
||||
ADD_COMPLEXITY_CASES(big_o_n_lg_n_test_name, rms_o_n_lg_n_test_name, enum_auto_big_o_n_lg_n);
|
||||
ADD_COMPLEXITY_CASES(big_o_n_lg_n_test_name, rms_o_n_lg_n_test_name,
|
||||
enum_auto_big_o_n_lg_n);
|
||||
|
||||
// Add lambda tests
|
||||
ADD_COMPLEXITY_CASES(big_o_n_lg_n_test_name, rms_o_n_lg_n_test_name, lambda_big_o_n_lg_n);
|
||||
|
||||
ADD_COMPLEXITY_CASES(big_o_n_lg_n_test_name, rms_o_n_lg_n_test_name,
|
||||
lambda_big_o_n_lg_n);
|
||||
|
||||
// ========================================================================= //
|
||||
// --------------------------- TEST CASES END ------------------------------ //
|
||||
// ========================================================================= //
|
||||
|
||||
int main(int argc, char* argv[]) {
|
||||
RunOutputTests(argc, argv);
|
||||
}
|
||||
|
||||
int main(int argc, char *argv[]) { RunOutputTests(argc, argv); }
|
||||
|
@ -1,6 +1,6 @@
|
||||
#undef NDEBUG
|
||||
#include <cstddef>
|
||||
#include <cassert>
|
||||
#include <cstddef>
|
||||
|
||||
#include "benchmark/benchmark.h"
|
||||
|
||||
@ -9,32 +9,32 @@
|
||||
#endif
|
||||
|
||||
void BM_empty(benchmark::State& state) {
|
||||
while (state.KeepRunning()) {
|
||||
volatile std::size_t x = state.iterations();
|
||||
((void)x);
|
||||
}
|
||||
while (state.KeepRunning()) {
|
||||
volatile std::size_t x = state.iterations();
|
||||
((void)x);
|
||||
}
|
||||
}
|
||||
BENCHMARK(BM_empty);
|
||||
|
||||
// The new C++11 interface for args/ranges requires initializer list support.
|
||||
// Therefore we provide the old interface to support C++03.
|
||||
void BM_old_arg_range_interface(benchmark::State& state) {
|
||||
assert((state.range(0) == 1 && state.range(1) == 2) ||
|
||||
(state.range(0) == 5 && state.range(1) == 6));
|
||||
while (state.KeepRunning()) {
|
||||
}
|
||||
assert((state.range(0) == 1 && state.range(1) == 2) ||
|
||||
(state.range(0) == 5 && state.range(1) == 6));
|
||||
while (state.KeepRunning()) {
|
||||
}
|
||||
}
|
||||
BENCHMARK(BM_old_arg_range_interface)->ArgPair(1, 2)->RangePair(5, 5, 6, 6);
|
||||
|
||||
template <class T, class U>
|
||||
void BM_template2(benchmark::State& state) {
|
||||
BM_empty(state);
|
||||
BM_empty(state);
|
||||
}
|
||||
BENCHMARK_TEMPLATE2(BM_template2, int, long);
|
||||
|
||||
template <class T>
|
||||
void BM_template1(benchmark::State& state) {
|
||||
BM_empty(state);
|
||||
BM_empty(state);
|
||||
}
|
||||
BENCHMARK_TEMPLATE(BM_template1, long);
|
||||
BENCHMARK_TEMPLATE1(BM_template1, int);
|
||||
|
@ -7,10 +7,11 @@
|
||||
// NOTE: Users should NOT include or use src/check.h. This is only done in
|
||||
// order to test library internals.
|
||||
|
||||
#include "benchmark/benchmark_api.h"
|
||||
#include "../src/check.h"
|
||||
#include <stdexcept>
|
||||
#include <cstdlib>
|
||||
#include <stdexcept>
|
||||
|
||||
#include "../src/check.h"
|
||||
#include "benchmark/benchmark_api.h"
|
||||
|
||||
#if defined(__GNUC__) && !defined(__EXCEPTIONS)
|
||||
#define TEST_HAS_NO_EXCEPTIONS
|
||||
@ -29,13 +30,15 @@ void try_invalid_pause_resume(benchmark::State& state) {
|
||||
try {
|
||||
state.PauseTiming();
|
||||
std::abort();
|
||||
} catch (std::logic_error const&) {}
|
||||
} catch (std::logic_error const&) {
|
||||
}
|
||||
try {
|
||||
state.ResumeTiming();
|
||||
std::abort();
|
||||
} catch (std::logic_error const&) {}
|
||||
} catch (std::logic_error const&) {
|
||||
}
|
||||
#else
|
||||
(void)state; // avoid unused warning
|
||||
(void)state; // avoid unused warning
|
||||
#endif
|
||||
}
|
||||
|
||||
@ -54,7 +57,7 @@ void BM_diagnostic_test(benchmark::State& state) {
|
||||
}
|
||||
BENCHMARK(BM_diagnostic_test);
|
||||
|
||||
int main(int argc, char** argv) {
|
||||
int main(int argc, char* argv[]) {
|
||||
benchmark::internal::GetAbortHandler() = &TestHandler;
|
||||
benchmark::Initialize(&argc, argv);
|
||||
benchmark::RunSpecifiedBenchmarks();
|
||||
|
@ -4,15 +4,12 @@
|
||||
|
||||
namespace {
|
||||
#if defined(__GNUC__)
|
||||
std::uint64_t double_up(const std::uint64_t x) __attribute__ ((const));
|
||||
std::uint64_t double_up(const std::uint64_t x) __attribute__((const));
|
||||
#endif
|
||||
std::uint64_t double_up(const std::uint64_t x) {
|
||||
return x * 2;
|
||||
}
|
||||
std::uint64_t double_up(const std::uint64_t x) { return x * 2; }
|
||||
}
|
||||
|
||||
int main(int, char*[]) {
|
||||
|
||||
// this test verifies compilation of DoNotOptimize() for some types
|
||||
|
||||
char buffer8[8];
|
||||
|
@ -27,9 +27,7 @@ class TestReporter : public benchmark::ConsoleReporter {
|
||||
|
||||
virtual ~TestReporter() {}
|
||||
|
||||
size_t GetCount() const {
|
||||
return count_;
|
||||
}
|
||||
size_t GetCount() const { return count_; }
|
||||
|
||||
private:
|
||||
mutable size_t count_;
|
||||
@ -37,46 +35,47 @@ class TestReporter : public benchmark::ConsoleReporter {
|
||||
|
||||
} // end namespace
|
||||
|
||||
|
||||
static void NoPrefix(benchmark::State& state) {
|
||||
while (state.KeepRunning()) {}
|
||||
while (state.KeepRunning()) {
|
||||
}
|
||||
}
|
||||
BENCHMARK(NoPrefix);
|
||||
|
||||
static void BM_Foo(benchmark::State& state) {
|
||||
while (state.KeepRunning()) {}
|
||||
while (state.KeepRunning()) {
|
||||
}
|
||||
}
|
||||
BENCHMARK(BM_Foo);
|
||||
|
||||
|
||||
static void BM_Bar(benchmark::State& state) {
|
||||
while (state.KeepRunning()) {}
|
||||
while (state.KeepRunning()) {
|
||||
}
|
||||
}
|
||||
BENCHMARK(BM_Bar);
|
||||
|
||||
|
||||
static void BM_FooBar(benchmark::State& state) {
|
||||
while (state.KeepRunning()) {}
|
||||
while (state.KeepRunning()) {
|
||||
}
|
||||
}
|
||||
BENCHMARK(BM_FooBar);
|
||||
|
||||
|
||||
static void BM_FooBa(benchmark::State& state) {
|
||||
while (state.KeepRunning()) {}
|
||||
while (state.KeepRunning()) {
|
||||
}
|
||||
}
|
||||
BENCHMARK(BM_FooBa);
|
||||
|
||||
|
||||
|
||||
int main(int argc, char** argv) {
|
||||
int main(int argc, char **argv) {
|
||||
bool list_only = false;
|
||||
for (int i=0; i < argc; ++i)
|
||||
list_only |= std::string(argv[i]).find("--benchmark_list_tests") != std::string::npos;
|
||||
for (int i = 0; i < argc; ++i)
|
||||
list_only |= std::string(argv[i]).find("--benchmark_list_tests") !=
|
||||
std::string::npos;
|
||||
|
||||
benchmark::Initialize(&argc, argv);
|
||||
|
||||
TestReporter test_reporter;
|
||||
const size_t returned_count = benchmark::RunSpecifiedBenchmarks(&test_reporter);
|
||||
const size_t returned_count =
|
||||
benchmark::RunSpecifiedBenchmarks(&test_reporter);
|
||||
|
||||
if (argc == 2) {
|
||||
// Make sure we ran all of the tests
|
||||
|
@ -20,15 +20,12 @@ class MyFixture : public ::benchmark::Fixture {
|
||||
}
|
||||
}
|
||||
|
||||
~MyFixture() {
|
||||
assert(data == nullptr);
|
||||
}
|
||||
~MyFixture() { assert(data == nullptr); }
|
||||
|
||||
std::unique_ptr<int> data;
|
||||
};
|
||||
|
||||
|
||||
BENCHMARK_F(MyFixture, Foo)(benchmark::State& st) {
|
||||
BENCHMARK_F(MyFixture, Foo)(benchmark::State &st) {
|
||||
assert(data.get() != nullptr);
|
||||
assert(*data == 42);
|
||||
while (st.KeepRunning()) {
|
||||
|
@ -37,9 +37,7 @@ class MapFixture : public ::benchmark::Fixture {
|
||||
m = ConstructRandomMap(st.range(0));
|
||||
}
|
||||
|
||||
void TearDown(const ::benchmark::State&) {
|
||||
m.clear();
|
||||
}
|
||||
void TearDown(const ::benchmark::State&) { m.clear(); }
|
||||
|
||||
std::map<int, int> m;
|
||||
};
|
||||
@ -53,6 +51,6 @@ BENCHMARK_DEFINE_F(MapFixture, Lookup)(benchmark::State& state) {
|
||||
}
|
||||
state.SetItemsProcessed(state.iterations() * size);
|
||||
}
|
||||
BENCHMARK_REGISTER_F(MapFixture, Lookup)->Range(1<<3, 1<<12);
|
||||
BENCHMARK_REGISTER_F(MapFixture, Lookup)->Range(1 << 3, 1 << 12);
|
||||
|
||||
BENCHMARK_MAIN()
|
||||
|
@ -1,19 +1,30 @@
|
||||
#include "benchmark/benchmark.h"
|
||||
|
||||
#include <set>
|
||||
#include <cassert>
|
||||
#include <set>
|
||||
|
||||
class MultipleRangesFixture : public ::benchmark::Fixture {
|
||||
public:
|
||||
MultipleRangesFixture()
|
||||
: expectedValues({
|
||||
{1, 3, 5}, {1, 3, 8}, {1, 3, 15}, {2, 3, 5}, {2, 3, 8}, {2, 3, 15},
|
||||
{1, 4, 5}, {1, 4, 8}, {1, 4, 15}, {2, 4, 5}, {2, 4, 8}, {2, 4, 15},
|
||||
{1, 7, 5}, {1, 7, 8}, {1, 7, 15}, {2, 7, 5}, {2, 7, 8}, {2, 7, 15},
|
||||
{7, 6, 3}
|
||||
})
|
||||
{
|
||||
}
|
||||
: expectedValues({{1, 3, 5},
|
||||
{1, 3, 8},
|
||||
{1, 3, 15},
|
||||
{2, 3, 5},
|
||||
{2, 3, 8},
|
||||
{2, 3, 15},
|
||||
{1, 4, 5},
|
||||
{1, 4, 8},
|
||||
{1, 4, 15},
|
||||
{2, 4, 5},
|
||||
{2, 4, 8},
|
||||
{2, 4, 15},
|
||||
{1, 7, 5},
|
||||
{1, 7, 8},
|
||||
{1, 7, 15},
|
||||
{2, 7, 5},
|
||||
{2, 7, 8},
|
||||
{2, 7, 15},
|
||||
{7, 6, 3}}) {}
|
||||
|
||||
void SetUp(const ::benchmark::State& state) {
|
||||
std::vector<int> ranges = {state.range(0), state.range(1), state.range(2)};
|
||||
@ -26,12 +37,11 @@ class MultipleRangesFixture : public ::benchmark::Fixture {
|
||||
virtual ~MultipleRangesFixture() {
|
||||
assert(actualValues.size() == expectedValues.size());
|
||||
}
|
||||
|
||||
|
||||
std::set<std::vector<int>> expectedValues;
|
||||
std::set<std::vector<int>> actualValues;
|
||||
};
|
||||
|
||||
|
||||
BENCHMARK_DEFINE_F(MultipleRangesFixture, Empty)(benchmark::State& state) {
|
||||
while (state.KeepRunning()) {
|
||||
int product = state.range(0) * state.range(1) * state.range(2);
|
||||
@ -41,21 +51,24 @@ BENCHMARK_DEFINE_F(MultipleRangesFixture, Empty)(benchmark::State& state) {
|
||||
}
|
||||
}
|
||||
|
||||
BENCHMARK_REGISTER_F(MultipleRangesFixture, Empty)->RangeMultiplier(2)
|
||||
->Ranges({{1, 2}, {3, 7}, {5, 15}})->Args({7, 6, 3});
|
||||
BENCHMARK_REGISTER_F(MultipleRangesFixture, Empty)
|
||||
->RangeMultiplier(2)
|
||||
->Ranges({{1, 2}, {3, 7}, {5, 15}})
|
||||
->Args({7, 6, 3});
|
||||
|
||||
void BM_CheckDefaultArgument(benchmark::State& state) {
|
||||
// Test that the 'range()' without an argument is the same as 'range(0)'.
|
||||
assert(state.range() == state.range(0));
|
||||
assert(state.range() != state.range(1));
|
||||
while (state.KeepRunning()) {}
|
||||
while (state.KeepRunning()) {
|
||||
}
|
||||
}
|
||||
BENCHMARK(BM_CheckDefaultArgument)->Ranges({{1, 5}, {6, 10}});
|
||||
|
||||
static void BM_MultipleRanges(benchmark::State& st) {
|
||||
while (st.KeepRunning()) {}
|
||||
while (st.KeepRunning()) {
|
||||
}
|
||||
}
|
||||
BENCHMARK(BM_MultipleRanges)->Ranges({{5, 5}, {6, 6}});
|
||||
|
||||
|
||||
BENCHMARK_MAIN()
|
||||
|
@ -12,8 +12,7 @@ void BM_basic_slow(benchmark::State& state) {
|
||||
std::chrono::milliseconds sleep_duration(state.range(0));
|
||||
while (state.KeepRunning()) {
|
||||
std::this_thread::sleep_for(
|
||||
std::chrono::duration_cast<std::chrono::nanoseconds>(sleep_duration)
|
||||
);
|
||||
std::chrono::duration_cast<std::chrono::nanoseconds>(sleep_duration));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2,28 +2,28 @@
|
||||
#define TEST_OUTPUT_TEST_H
|
||||
|
||||
#undef NDEBUG
|
||||
#include "benchmark/benchmark.h"
|
||||
#include "../src/re.h"
|
||||
#include <vector>
|
||||
#include <string>
|
||||
#include <initializer_list>
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
|
||||
#include "../src/re.h"
|
||||
#include "benchmark/benchmark.h"
|
||||
|
||||
#define CONCAT2(x, y) x##y
|
||||
#define CONCAT(x, y) CONCAT2(x, y)
|
||||
|
||||
#define ADD_CASES(...) \
|
||||
int CONCAT(dummy, __LINE__) = ::AddCases(__VA_ARGS__)
|
||||
#define ADD_CASES(...) int CONCAT(dummy, __LINE__) = ::AddCases(__VA_ARGS__)
|
||||
|
||||
#define SET_SUBSTITUTIONS(...) \
|
||||
int CONCAT(dummy, __LINE__) = ::SetSubstitutions(__VA_ARGS__)
|
||||
int CONCAT(dummy, __LINE__) = ::SetSubstitutions(__VA_ARGS__)
|
||||
|
||||
enum MatchRules {
|
||||
MR_Default, // Skip non-matching lines until a match is found.
|
||||
MR_Next, // Match must occur on the next line.
|
||||
MR_Not // No line between the current position and the next match matches
|
||||
// the regex
|
||||
MR_Default, // Skip non-matching lines until a match is found.
|
||||
MR_Next, // Match must occur on the next line.
|
||||
MR_Not // No line between the current position and the next match matches
|
||||
// the regex
|
||||
};
|
||||
|
||||
struct TestCase {
|
||||
@ -43,7 +43,7 @@ enum TestCaseID {
|
||||
TC_CSVOut,
|
||||
TC_CSVErr,
|
||||
|
||||
TC_NumID // PRIVATE
|
||||
TC_NumID // PRIVATE
|
||||
};
|
||||
|
||||
// Add a list of test cases to be run against the output specified by
|
||||
@ -66,7 +66,6 @@ namespace {
|
||||
|
||||
const char* const dec_re = "[0-9]*[.]?[0-9]+([eE][-+][0-9]+)?";
|
||||
|
||||
} // end namespace
|
||||
} // end namespace
|
||||
|
||||
|
||||
#endif // TEST_OUTPUT_TEST_H
|
||||
#endif // TEST_OUTPUT_TEST_H
|
||||
|
@ -1,16 +1,17 @@
|
||||
#include "output_test.h"
|
||||
#include "../src/check.h" // NOTE: check.h is for internal use only!
|
||||
#include "../src/re.h" // NOTE: re.h is for internal use only
|
||||
#include <memory>
|
||||
#include <map>
|
||||
#include <iostream>
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <sstream>
|
||||
|
||||
#include "../src/check.h" // NOTE: check.h is for internal use only!
|
||||
#include "../src/re.h" // NOTE: re.h is for internal use only
|
||||
#include "output_test.h"
|
||||
|
||||
// ========================================================================= //
|
||||
// ------------------------------ Internals -------------------------------- //
|
||||
// ========================================================================= //
|
||||
namespace internal { namespace {
|
||||
namespace internal {
|
||||
namespace {
|
||||
|
||||
using TestCaseList = std::vector<TestCase>;
|
||||
|
||||
@ -22,91 +23,89 @@ using TestCaseList = std::vector<TestCase>;
|
||||
using SubMap = std::vector<std::pair<std::string, std::string>>;
|
||||
|
||||
TestCaseList& GetTestCaseList(TestCaseID ID) {
|
||||
// Uses function-local statics to ensure initialization occurs
|
||||
// before first use.
|
||||
static TestCaseList lists[TC_NumID];
|
||||
return lists[ID];
|
||||
// Uses function-local statics to ensure initialization occurs
|
||||
// before first use.
|
||||
static TestCaseList lists[TC_NumID];
|
||||
return lists[ID];
|
||||
}
|
||||
|
||||
SubMap& GetSubstitutions() {
|
||||
// Don't use 'dec_re' from header because it may not yet be initialized.
|
||||
static std::string dec_re = "[0-9]*[.]?[0-9]+([eE][-+][0-9]+)?";
|
||||
static SubMap map = {
|
||||
{"%float", "[0-9]*[.]?[0-9]+([eE][-+][0-9]+)?"},
|
||||
{"%int", "[ ]*[0-9]+"},
|
||||
{" %s ", "[ ]+"},
|
||||
{"%time", "[ ]*[0-9]{1,5} ns"},
|
||||
{"%console_report", "[ ]*[0-9]{1,5} ns [ ]*[0-9]{1,5} ns [ ]*[0-9]+"},
|
||||
{"%csv_report", "[0-9]+," + dec_re + "," + dec_re + ",ns,,,,,"}
|
||||
};
|
||||
return map;
|
||||
// Don't use 'dec_re' from header because it may not yet be initialized.
|
||||
static std::string dec_re = "[0-9]*[.]?[0-9]+([eE][-+][0-9]+)?";
|
||||
static SubMap map = {
|
||||
{"%float", "[0-9]*[.]?[0-9]+([eE][-+][0-9]+)?"},
|
||||
{"%int", "[ ]*[0-9]+"},
|
||||
{" %s ", "[ ]+"},
|
||||
{"%time", "[ ]*[0-9]{1,5} ns"},
|
||||
{"%console_report", "[ ]*[0-9]{1,5} ns [ ]*[0-9]{1,5} ns [ ]*[0-9]+"},
|
||||
{"%csv_report", "[0-9]+," + dec_re + "," + dec_re + ",ns,,,,,"}};
|
||||
return map;
|
||||
}
|
||||
|
||||
std::string PerformSubstitutions(std::string source) {
|
||||
SubMap const& subs = GetSubstitutions();
|
||||
using SizeT = std::string::size_type;
|
||||
for (auto const& KV : subs) {
|
||||
SizeT pos;
|
||||
SizeT next_start = 0;
|
||||
while ((pos = source.find(KV.first, next_start)) != std::string::npos) {
|
||||
next_start = pos + KV.second.size();
|
||||
source.replace(pos, KV.first.size(), KV.second);
|
||||
}
|
||||
SubMap const& subs = GetSubstitutions();
|
||||
using SizeT = std::string::size_type;
|
||||
for (auto const& KV : subs) {
|
||||
SizeT pos;
|
||||
SizeT next_start = 0;
|
||||
while ((pos = source.find(KV.first, next_start)) != std::string::npos) {
|
||||
next_start = pos + KV.second.size();
|
||||
source.replace(pos, KV.first.size(), KV.second);
|
||||
}
|
||||
return source;
|
||||
}
|
||||
return source;
|
||||
}
|
||||
|
||||
void CheckCase(std::stringstream& remaining_output, TestCase const& TC,
|
||||
TestCaseList const& not_checks)
|
||||
{
|
||||
std::string first_line;
|
||||
bool on_first = true;
|
||||
std::string line;
|
||||
while (remaining_output.eof() == false) {
|
||||
CHECK(remaining_output.good());
|
||||
std::getline(remaining_output, line);
|
||||
if (on_first) {
|
||||
first_line = line;
|
||||
on_first = false;
|
||||
}
|
||||
for (auto& NC : not_checks) {
|
||||
CHECK(!NC.regex->Match(line))
|
||||
<< "Unexpected match for line \"" << line
|
||||
<< "\" for MR_Not regex \"" << NC.regex_str << "\""
|
||||
<< "\n actual regex string \"" << TC.substituted_regex << "\""
|
||||
<< "\n started matching near: " << first_line;
|
||||
}
|
||||
if (TC.regex->Match(line)) return;
|
||||
CHECK(TC.match_rule != MR_Next)
|
||||
<< "Expected line \"" << line << "\" to match regex \"" << TC.regex_str << "\""
|
||||
<< "\n actual regex string \"" << TC.substituted_regex << "\""
|
||||
<< "\n started matching near: " << first_line;
|
||||
TestCaseList const& not_checks) {
|
||||
std::string first_line;
|
||||
bool on_first = true;
|
||||
std::string line;
|
||||
while (remaining_output.eof() == false) {
|
||||
CHECK(remaining_output.good());
|
||||
std::getline(remaining_output, line);
|
||||
if (on_first) {
|
||||
first_line = line;
|
||||
on_first = false;
|
||||
}
|
||||
CHECK(remaining_output.eof() == false)
|
||||
<< "End of output reached before match for regex \"" << TC.regex_str
|
||||
<< "\" was found"
|
||||
for (const auto& NC : not_checks) {
|
||||
CHECK(!NC.regex->Match(line))
|
||||
<< "Unexpected match for line \"" << line << "\" for MR_Not regex \""
|
||||
<< NC.regex_str << "\""
|
||||
<< "\n actual regex string \"" << TC.substituted_regex << "\""
|
||||
<< "\n started matching near: " << first_line;
|
||||
}
|
||||
if (TC.regex->Match(line)) return;
|
||||
CHECK(TC.match_rule != MR_Next)
|
||||
<< "Expected line \"" << line << "\" to match regex \"" << TC.regex_str
|
||||
<< "\""
|
||||
<< "\n actual regex string \"" << TC.substituted_regex << "\""
|
||||
<< "\n started matching near: " << first_line;
|
||||
}
|
||||
CHECK(remaining_output.eof() == false)
|
||||
<< "End of output reached before match for regex \"" << TC.regex_str
|
||||
<< "\" was found"
|
||||
<< "\n actual regex string \"" << TC.substituted_regex << "\""
|
||||
<< "\n started matching near: " << first_line;
|
||||
}
|
||||
|
||||
|
||||
void CheckCases(TestCaseList const& checks, std::stringstream& output) {
|
||||
std::vector<TestCase> not_checks;
|
||||
for (size_t i=0; i < checks.size(); ++i) {
|
||||
const auto& TC = checks[i];
|
||||
if (TC.match_rule == MR_Not) {
|
||||
not_checks.push_back(TC);
|
||||
continue;
|
||||
}
|
||||
CheckCase(output, TC, not_checks);
|
||||
not_checks.clear();
|
||||
std::vector<TestCase> not_checks;
|
||||
for (size_t i = 0; i < checks.size(); ++i) {
|
||||
const auto& TC = checks[i];
|
||||
if (TC.match_rule == MR_Not) {
|
||||
not_checks.push_back(TC);
|
||||
continue;
|
||||
}
|
||||
CheckCase(output, TC, not_checks);
|
||||
not_checks.clear();
|
||||
}
|
||||
}
|
||||
|
||||
class TestReporter : public benchmark::BenchmarkReporter {
|
||||
public:
|
||||
public:
|
||||
TestReporter(std::vector<benchmark::BenchmarkReporter*> reps)
|
||||
: reporters_(reps) {}
|
||||
: reporters_(reps) {}
|
||||
|
||||
virtual bool ReportContext(const Context& context) {
|
||||
bool last_ret = false;
|
||||
@ -118,56 +117,61 @@ public:
|
||||
first = false;
|
||||
last_ret = new_ret;
|
||||
}
|
||||
(void)first;
|
||||
return last_ret;
|
||||
}
|
||||
|
||||
void ReportRuns(const std::vector<Run>& report)
|
||||
{ for (auto rep : reporters_) rep->ReportRuns(report); }
|
||||
void Finalize() { for (auto rep : reporters_) rep->Finalize(); }
|
||||
void ReportRuns(const std::vector<Run>& report) {
|
||||
for (auto rep : reporters_) rep->ReportRuns(report);
|
||||
}
|
||||
void Finalize() {
|
||||
for (auto rep : reporters_) rep->Finalize();
|
||||
}
|
||||
|
||||
private:
|
||||
std::vector<benchmark::BenchmarkReporter*> reporters_;
|
||||
private:
|
||||
std::vector<benchmark::BenchmarkReporter *> reporters_;
|
||||
};
|
||||
|
||||
}} // end namespace internal
|
||||
}
|
||||
} // end namespace internal
|
||||
|
||||
// ========================================================================= //
|
||||
// -------------------------- Public API Definitions------------------------ //
|
||||
// ========================================================================= //
|
||||
|
||||
TestCase::TestCase(std::string re, int rule)
|
||||
: regex_str(std::move(re)), match_rule(rule),
|
||||
: regex_str(std::move(re)),
|
||||
match_rule(rule),
|
||||
substituted_regex(internal::PerformSubstitutions(regex_str)),
|
||||
regex(std::make_shared<benchmark::Regex>())
|
||||
{
|
||||
std::string err_str;
|
||||
regex->Init(substituted_regex, &err_str);
|
||||
CHECK(err_str.empty())
|
||||
<< "Could not construct regex \"" << substituted_regex << "\""
|
||||
<< "\n originally \"" << regex_str << "\""
|
||||
<< "\n got error: " << err_str;
|
||||
regex(std::make_shared<benchmark::Regex>()) {
|
||||
std::string err_str;
|
||||
regex->Init(substituted_regex,& err_str);
|
||||
CHECK(err_str.empty()) << "Could not construct regex \"" << substituted_regex
|
||||
<< "\""
|
||||
<< "\n originally \"" << regex_str << "\""
|
||||
<< "\n got error: " << err_str;
|
||||
}
|
||||
|
||||
int AddCases(TestCaseID ID, std::initializer_list<TestCase> il) {
|
||||
auto& L = internal::GetTestCaseList(ID);
|
||||
L.insert(L.end(), il);
|
||||
return 0;
|
||||
auto& L = internal::GetTestCaseList(ID);
|
||||
L.insert(L.end(), il);
|
||||
return 0;
|
||||
}
|
||||
|
||||
int SetSubstitutions(std::initializer_list<std::pair<std::string, std::string>> il) {
|
||||
auto& subs = internal::GetSubstitutions();
|
||||
for (auto const& KV : il) {
|
||||
bool exists = false;
|
||||
for (auto& EKV : subs) {
|
||||
if (EKV.first == KV.first) {
|
||||
EKV.second = KV.second;
|
||||
exists = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!exists) subs.push_back(KV);
|
||||
int SetSubstitutions(
|
||||
std::initializer_list<std::pair<std::string, std::string>> il) {
|
||||
auto& subs = internal::GetSubstitutions();
|
||||
for (auto const& KV : il) {
|
||||
bool exists = false;
|
||||
for (auto& EKV : subs) {
|
||||
if (EKV.first == KV.first) {
|
||||
EKV.second = KV.second;
|
||||
exists = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
if (!exists) subs.push_back(KV);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
void RunOutputTests(int argc, char* argv[]) {
|
||||
@ -184,21 +188,20 @@ void RunOutputTests(int argc, char* argv[]) {
|
||||
std::stringstream out_stream;
|
||||
std::stringstream err_stream;
|
||||
|
||||
ReporterTest(const char* n,
|
||||
std::vector<TestCase>& out_tc,
|
||||
ReporterTest(const char* n, std::vector<TestCase>& out_tc,
|
||||
std::vector<TestCase>& err_tc,
|
||||
benchmark::BenchmarkReporter& br)
|
||||
: name(n), output_cases(out_tc), error_cases(err_tc), reporter(br) {
|
||||
reporter.SetOutputStream(&out_stream);
|
||||
reporter.SetErrorStream(&err_stream);
|
||||
reporter.SetOutputStream(&out_stream);
|
||||
reporter.SetErrorStream(&err_stream);
|
||||
}
|
||||
} TestCases[] = {
|
||||
{"ConsoleReporter", GetTestCaseList(TC_ConsoleOut),
|
||||
GetTestCaseList(TC_ConsoleErr), CR},
|
||||
{"JSONReporter", GetTestCaseList(TC_JSONOut),
|
||||
GetTestCaseList(TC_JSONErr), JR},
|
||||
{"CSVReporter", GetTestCaseList(TC_CSVOut),
|
||||
GetTestCaseList(TC_CSVErr), CSVR},
|
||||
GetTestCaseList(TC_ConsoleErr), CR},
|
||||
{"JSONReporter", GetTestCaseList(TC_JSONOut), GetTestCaseList(TC_JSONErr),
|
||||
JR},
|
||||
{"CSVReporter", GetTestCaseList(TC_CSVOut), GetTestCaseList(TC_CSVErr),
|
||||
CSVR},
|
||||
};
|
||||
|
||||
// Create the test reporter and run the benchmarks.
|
||||
@ -207,18 +210,16 @@ void RunOutputTests(int argc, char* argv[]) {
|
||||
benchmark::RunSpecifiedBenchmarks(&test_rep);
|
||||
|
||||
for (auto& rep_test : TestCases) {
|
||||
std::string msg = std::string("\nTesting ") + rep_test.name + " Output\n";
|
||||
std::string banner(msg.size() - 1, '-');
|
||||
std::cout << banner << msg << banner << "\n";
|
||||
std::string msg = std::string("\nTesting ") + rep_test.name + " Output\n";
|
||||
std::string banner(msg.size() - 1, '-');
|
||||
std::cout << banner << msg << banner << "\n";
|
||||
|
||||
std::cerr << rep_test.err_stream.str();
|
||||
std::cout << rep_test.out_stream.str();
|
||||
std::cerr << rep_test.err_stream.str();
|
||||
std::cout << rep_test.out_stream.str();
|
||||
|
||||
internal::CheckCases(rep_test.error_cases,rep_test.err_stream);
|
||||
internal::CheckCases(rep_test.output_cases, rep_test.out_stream);
|
||||
internal::CheckCases(rep_test.error_cases, rep_test.err_stream);
|
||||
internal::CheckCases(rep_test.output_cases, rep_test.out_stream);
|
||||
|
||||
std::cout << "\n";
|
||||
std::cout << "\n";
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
@ -1,14 +1,15 @@
|
||||
|
||||
#undef NDEBUG
|
||||
#include "benchmark/benchmark.h"
|
||||
#include "../src/check.h" // NOTE: check.h is for internal use only!
|
||||
#include <cassert>
|
||||
#include <vector>
|
||||
|
||||
#include "../src/check.h" // NOTE: check.h is for internal use only!
|
||||
#include "benchmark/benchmark.h"
|
||||
|
||||
namespace {
|
||||
|
||||
class TestReporter : public benchmark::ConsoleReporter {
|
||||
public:
|
||||
public:
|
||||
virtual void ReportRuns(const std::vector<Run>& report) {
|
||||
all_runs_.insert(all_runs_.end(), begin(report), end(report));
|
||||
ConsoleReporter::ReportRuns(report);
|
||||
@ -20,18 +21,19 @@ public:
|
||||
struct TestCase {
|
||||
std::string name;
|
||||
const char* label;
|
||||
TestCase(const char* xname) : name(xname), label(nullptr) {}
|
||||
// Note: not explicit as we rely on it being converted through ADD_CASES.
|
||||
TestCase(const char* xname) : TestCase(xname, nullptr) {}
|
||||
TestCase(const char* xname, const char* xlabel)
|
||||
: name(xname), label(xlabel) {}
|
||||
: name(xname), label(xlabel) {}
|
||||
|
||||
typedef benchmark::BenchmarkReporter::Run Run;
|
||||
|
||||
void CheckRun(Run const& run) const {
|
||||
CHECK(name == run.benchmark_name) << "expected " << name
|
||||
<< " got " << run.benchmark_name;
|
||||
CHECK(name == run.benchmark_name) << "expected " << name << " got "
|
||||
<< run.benchmark_name;
|
||||
if (label) {
|
||||
CHECK(run.report_label == label) << "expected " << label
|
||||
<< " got " << run.report_label;
|
||||
CHECK(run.report_label == label) << "expected " << label << " got "
|
||||
<< run.report_label;
|
||||
} else {
|
||||
CHECK(run.report_label == "");
|
||||
}
|
||||
@ -49,8 +51,7 @@ int AddCases(std::initializer_list<TestCase> const& v) {
|
||||
|
||||
#define CONCAT(x, y) CONCAT2(x, y)
|
||||
#define CONCAT2(x, y) x##y
|
||||
#define ADD_CASES(...) \
|
||||
int CONCAT(dummy, __LINE__) = AddCases({__VA_ARGS__})
|
||||
#define ADD_CASES(...) int CONCAT(dummy, __LINE__) = AddCases({__VA_ARGS__})
|
||||
|
||||
} // end namespace
|
||||
|
||||
@ -59,11 +60,13 @@ typedef benchmark::internal::Benchmark* ReturnVal;
|
||||
//----------------------------------------------------------------------------//
|
||||
// Test RegisterBenchmark with no additional arguments
|
||||
//----------------------------------------------------------------------------//
|
||||
void BM_function(benchmark::State& state) { while (state.KeepRunning()) {} }
|
||||
void BM_function(benchmark::State& state) {
|
||||
while (state.KeepRunning()) {
|
||||
}
|
||||
}
|
||||
BENCHMARK(BM_function);
|
||||
ReturnVal dummy = benchmark::RegisterBenchmark(
|
||||
"BM_function_manual_registration",
|
||||
BM_function);
|
||||
"BM_function_manual_registration", BM_function);
|
||||
ADD_CASES({"BM_function"}, {"BM_function_manual_registration"});
|
||||
|
||||
//----------------------------------------------------------------------------//
|
||||
@ -74,27 +77,21 @@ ADD_CASES({"BM_function"}, {"BM_function_manual_registration"});
|
||||
#ifndef BENCHMARK_HAS_NO_VARIADIC_REGISTER_BENCHMARK
|
||||
|
||||
void BM_extra_args(benchmark::State& st, const char* label) {
|
||||
while (st.KeepRunning()) {}
|
||||
while (st.KeepRunning()) {
|
||||
}
|
||||
st.SetLabel(label);
|
||||
}
|
||||
int RegisterFromFunction() {
|
||||
std::pair<const char*, const char*> cases[] = {
|
||||
{"test1", "One"},
|
||||
{"test2", "Two"},
|
||||
{"test3", "Three"}
|
||||
};
|
||||
for (auto& c : cases)
|
||||
{"test1", "One"}, {"test2", "Two"}, {"test3", "Three"}};
|
||||
for (auto const& c : cases)
|
||||
benchmark::RegisterBenchmark(c.first, &BM_extra_args, c.second);
|
||||
return 0;
|
||||
}
|
||||
int dummy2 = RegisterFromFunction();
|
||||
ADD_CASES(
|
||||
{"test1", "One"},
|
||||
{"test2", "Two"},
|
||||
{"test3", "Three"}
|
||||
);
|
||||
ADD_CASES({"test1", "One"}, {"test2", "Two"}, {"test3", "Three"});
|
||||
|
||||
#endif // BENCHMARK_HAS_NO_VARIADIC_REGISTER_BENCHMARK
|
||||
#endif // BENCHMARK_HAS_NO_VARIADIC_REGISTER_BENCHMARK
|
||||
|
||||
//----------------------------------------------------------------------------//
|
||||
// Test RegisterBenchmark with different callable types
|
||||
@ -102,7 +99,8 @@ ADD_CASES(
|
||||
|
||||
struct CustomFixture {
|
||||
void operator()(benchmark::State& st) {
|
||||
while (st.KeepRunning()) {}
|
||||
while (st.KeepRunning()) {
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@ -118,7 +116,8 @@ void TestRegistrationAtRuntime() {
|
||||
{
|
||||
int x = 42;
|
||||
auto capturing_lam = [=](benchmark::State& st) {
|
||||
while (st.KeepRunning()) {}
|
||||
while (st.KeepRunning()) {
|
||||
}
|
||||
st.SetLabel(std::to_string(x));
|
||||
};
|
||||
benchmark::RegisterBenchmark("lambda_benchmark", capturing_lam);
|
||||
|
@ -1,69 +1,55 @@
|
||||
|
||||
#undef NDEBUG
|
||||
#include "benchmark/benchmark.h"
|
||||
#include "output_test.h"
|
||||
#include <utility>
|
||||
|
||||
#include "benchmark/benchmark.h"
|
||||
#include "output_test.h"
|
||||
|
||||
// ========================================================================= //
|
||||
// ---------------------- Testing Prologue Output -------------------------- //
|
||||
// ========================================================================= //
|
||||
|
||||
ADD_CASES(TC_ConsoleOut, {
|
||||
{"^Benchmark %s Time %s CPU %s Iterations$", MR_Next},
|
||||
{"^[-]+$", MR_Next}
|
||||
});
|
||||
ADD_CASES(TC_CSVOut, {
|
||||
{"name,iterations,real_time,cpu_time,time_unit,bytes_per_second,items_per_second,"
|
||||
"label,error_occurred,error_message"}
|
||||
});
|
||||
ADD_CASES(TC_ConsoleOut, {{"^Benchmark %s Time %s CPU %s Iterations$", MR_Next},
|
||||
{"^[-]+$", MR_Next}});
|
||||
ADD_CASES(TC_CSVOut, {{"name,iterations,real_time,cpu_time,time_unit,bytes_per_"
|
||||
"second,items_per_second,"
|
||||
"label,error_occurred,error_message"}});
|
||||
|
||||
// ========================================================================= //
|
||||
// ------------------------ Testing Basic Output --------------------------- //
|
||||
// ========================================================================= //
|
||||
|
||||
void BM_basic(benchmark::State& state) {
|
||||
while (state.KeepRunning()) {}
|
||||
while (state.KeepRunning()) {
|
||||
}
|
||||
}
|
||||
BENCHMARK(BM_basic);
|
||||
|
||||
ADD_CASES(TC_ConsoleOut, {
|
||||
{"^BM_basic %console_report$"}
|
||||
});
|
||||
ADD_CASES(TC_JSONOut, {
|
||||
{"\"name\": \"BM_basic\",$"},
|
||||
{"\"iterations\": %int,$", MR_Next},
|
||||
{"\"real_time\": %int,$", MR_Next},
|
||||
{"\"cpu_time\": %int,$", MR_Next},
|
||||
{"\"time_unit\": \"ns\"$", MR_Next},
|
||||
{"}", MR_Next}
|
||||
});
|
||||
ADD_CASES(TC_CSVOut, {
|
||||
{"^\"BM_basic\",%csv_report$"}
|
||||
});
|
||||
ADD_CASES(TC_ConsoleOut, {{"^BM_basic %console_report$"}});
|
||||
ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_basic\",$"},
|
||||
{"\"iterations\": %int,$", MR_Next},
|
||||
{"\"real_time\": %int,$", MR_Next},
|
||||
{"\"cpu_time\": %int,$", MR_Next},
|
||||
{"\"time_unit\": \"ns\"$", MR_Next},
|
||||
{"}", MR_Next}});
|
||||
ADD_CASES(TC_CSVOut, {{"^\"BM_basic\",%csv_report$"}});
|
||||
|
||||
// ========================================================================= //
|
||||
// ------------------------ Testing Error Output --------------------------- //
|
||||
// ========================================================================= //
|
||||
|
||||
void BM_error(benchmark::State& state) {
|
||||
state.SkipWithError("message");
|
||||
while(state.KeepRunning()) {}
|
||||
state.SkipWithError("message");
|
||||
while (state.KeepRunning()) {
|
||||
}
|
||||
}
|
||||
BENCHMARK(BM_error);
|
||||
ADD_CASES(TC_ConsoleOut, {
|
||||
{"^BM_error[ ]+ERROR OCCURRED: 'message'$"}
|
||||
});
|
||||
ADD_CASES(TC_JSONOut, {
|
||||
{"\"name\": \"BM_error\",$"},
|
||||
{"\"error_occurred\": true,$", MR_Next},
|
||||
{"\"error_message\": \"message\",$", MR_Next}
|
||||
});
|
||||
|
||||
ADD_CASES(TC_CSVOut, {
|
||||
{"^\"BM_error\",,,,,,,,true,\"message\"$"}
|
||||
});
|
||||
ADD_CASES(TC_ConsoleOut, {{"^BM_error[ ]+ERROR OCCURRED: 'message'$"}});
|
||||
ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_error\",$"},
|
||||
{"\"error_occurred\": true,$", MR_Next},
|
||||
{"\"error_message\": \"message\",$", MR_Next}});
|
||||
|
||||
ADD_CASES(TC_CSVOut, {{"^\"BM_error\",,,,,,,,true,\"message\"$"}});
|
||||
|
||||
// ========================================================================= //
|
||||
// ----------------------- Testing Complexity Output ----------------------- //
|
||||
@ -74,85 +60,68 @@ void BM_Complexity_O1(benchmark::State& state) {
|
||||
}
|
||||
state.SetComplexityN(state.range(0));
|
||||
}
|
||||
BENCHMARK(BM_Complexity_O1)->Range(1, 1<<18)->Complexity(benchmark::o1);
|
||||
SET_SUBSTITUTIONS({
|
||||
{"%bigOStr", "[ ]*[0-9]+\\.[0-9]+ \\([0-9]+\\)"},
|
||||
{"%RMS", "[ ]*[0-9]+ %"}
|
||||
});
|
||||
ADD_CASES(TC_ConsoleOut, {
|
||||
{"^BM_Complexity_O1_BigO %bigOStr %bigOStr[ ]*$"},
|
||||
{"^BM_Complexity_O1_RMS %RMS %RMS[ ]*$"}
|
||||
});
|
||||
|
||||
BENCHMARK(BM_Complexity_O1)->Range(1, 1 << 18)->Complexity(benchmark::o1);
|
||||
SET_SUBSTITUTIONS({{"%bigOStr", "[ ]*[0-9]+\\.[0-9]+ \\([0-9]+\\)"},
|
||||
{"%RMS", "[ ]*[0-9]+ %"}});
|
||||
ADD_CASES(TC_ConsoleOut, {{"^BM_Complexity_O1_BigO %bigOStr %bigOStr[ ]*$"},
|
||||
{"^BM_Complexity_O1_RMS %RMS %RMS[ ]*$"}});
|
||||
|
||||
// ========================================================================= //
|
||||
// ----------------------- Testing Aggregate Output ------------------------ //
|
||||
// ========================================================================= //
|
||||
|
||||
// Test that non-aggregate data is printed by default
|
||||
void BM_Repeat(benchmark::State& state) { while (state.KeepRunning()) {} }
|
||||
void BM_Repeat(benchmark::State& state) {
|
||||
while (state.KeepRunning()) {
|
||||
}
|
||||
}
|
||||
BENCHMARK(BM_Repeat)->Repetitions(3);
|
||||
ADD_CASES(TC_ConsoleOut, {
|
||||
{"^BM_Repeat/repeats:3 %console_report$"},
|
||||
{"^BM_Repeat/repeats:3 %console_report$"},
|
||||
{"^BM_Repeat/repeats:3 %console_report$"},
|
||||
{"^BM_Repeat/repeats:3_mean %console_report$"},
|
||||
{"^BM_Repeat/repeats:3_stddev %console_report$"}
|
||||
});
|
||||
ADD_CASES(TC_JSONOut, {
|
||||
{"\"name\": \"BM_Repeat/repeats:3\",$"},
|
||||
{"\"name\": \"BM_Repeat/repeats:3\",$"},
|
||||
{"\"name\": \"BM_Repeat/repeats:3\",$"},
|
||||
{"\"name\": \"BM_Repeat/repeats:3_mean\",$"},
|
||||
{"\"name\": \"BM_Repeat/repeats:3_stddev\",$"}
|
||||
});
|
||||
ADD_CASES(TC_CSVOut, {
|
||||
{"^\"BM_Repeat/repeats:3\",%csv_report$"},
|
||||
{"^\"BM_Repeat/repeats:3\",%csv_report$"},
|
||||
{"^\"BM_Repeat/repeats:3\",%csv_report$"},
|
||||
{"^\"BM_Repeat/repeats:3_mean\",%csv_report$"},
|
||||
{"^\"BM_Repeat/repeats:3_stddev\",%csv_report$"}
|
||||
});
|
||||
ADD_CASES(TC_ConsoleOut, {{"^BM_Repeat/repeats:3 %console_report$"},
|
||||
{"^BM_Repeat/repeats:3 %console_report$"},
|
||||
{"^BM_Repeat/repeats:3 %console_report$"},
|
||||
{"^BM_Repeat/repeats:3_mean %console_report$"},
|
||||
{"^BM_Repeat/repeats:3_stddev %console_report$"}});
|
||||
ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_Repeat/repeats:3\",$"},
|
||||
{"\"name\": \"BM_Repeat/repeats:3\",$"},
|
||||
{"\"name\": \"BM_Repeat/repeats:3\",$"},
|
||||
{"\"name\": \"BM_Repeat/repeats:3_mean\",$"},
|
||||
{"\"name\": \"BM_Repeat/repeats:3_stddev\",$"}});
|
||||
ADD_CASES(TC_CSVOut, {{"^\"BM_Repeat/repeats:3\",%csv_report$"},
|
||||
{"^\"BM_Repeat/repeats:3\",%csv_report$"},
|
||||
{"^\"BM_Repeat/repeats:3\",%csv_report$"},
|
||||
{"^\"BM_Repeat/repeats:3_mean\",%csv_report$"},
|
||||
{"^\"BM_Repeat/repeats:3_stddev\",%csv_report$"}});
|
||||
|
||||
// Test that a non-repeated test still prints non-aggregate results even when
|
||||
// only-aggregate reports have been requested
|
||||
void BM_RepeatOnce(benchmark::State& state) { while (state.KeepRunning()) {} }
|
||||
void BM_RepeatOnce(benchmark::State& state) {
|
||||
while (state.KeepRunning()) {
|
||||
}
|
||||
}
|
||||
BENCHMARK(BM_RepeatOnce)->Repetitions(1)->ReportAggregatesOnly();
|
||||
ADD_CASES(TC_ConsoleOut, {
|
||||
{"^BM_RepeatOnce/repeats:1 %console_report$"}
|
||||
});
|
||||
ADD_CASES(TC_JSONOut, {
|
||||
{"\"name\": \"BM_RepeatOnce/repeats:1\",$"}
|
||||
});
|
||||
ADD_CASES(TC_CSVOut, {
|
||||
{"^\"BM_RepeatOnce/repeats:1\",%csv_report$"}
|
||||
});
|
||||
|
||||
ADD_CASES(TC_ConsoleOut, {{"^BM_RepeatOnce/repeats:1 %console_report$"}});
|
||||
ADD_CASES(TC_JSONOut, {{"\"name\": \"BM_RepeatOnce/repeats:1\",$"}});
|
||||
ADD_CASES(TC_CSVOut, {{"^\"BM_RepeatOnce/repeats:1\",%csv_report$"}});
|
||||
|
||||
// Test that non-aggregate data is not reported
|
||||
void BM_SummaryRepeat(benchmark::State& state) { while (state.KeepRunning()) {} }
|
||||
void BM_SummaryRepeat(benchmark::State& state) {
|
||||
while (state.KeepRunning()) {
|
||||
}
|
||||
}
|
||||
BENCHMARK(BM_SummaryRepeat)->Repetitions(3)->ReportAggregatesOnly();
|
||||
ADD_CASES(TC_ConsoleOut, {
|
||||
{".*BM_SummaryRepeat/repeats:3 ", MR_Not},
|
||||
{"^BM_SummaryRepeat/repeats:3_mean %console_report$"},
|
||||
{"^BM_SummaryRepeat/repeats:3_stddev %console_report$"}
|
||||
});
|
||||
ADD_CASES(TC_JSONOut, {
|
||||
{".*BM_SummaryRepeat/repeats:3 ", MR_Not},
|
||||
{"\"name\": \"BM_SummaryRepeat/repeats:3_mean\",$"},
|
||||
{"\"name\": \"BM_SummaryRepeat/repeats:3_stddev\",$"}
|
||||
});
|
||||
ADD_CASES(TC_CSVOut, {
|
||||
{".*BM_SummaryRepeat/repeats:3 ", MR_Not},
|
||||
{"^\"BM_SummaryRepeat/repeats:3_mean\",%csv_report$"},
|
||||
{"^\"BM_SummaryRepeat/repeats:3_stddev\",%csv_report$"}
|
||||
});
|
||||
ADD_CASES(TC_ConsoleOut,
|
||||
{{".*BM_SummaryRepeat/repeats:3 ", MR_Not},
|
||||
{"^BM_SummaryRepeat/repeats:3_mean %console_report$"},
|
||||
{"^BM_SummaryRepeat/repeats:3_stddev %console_report$"}});
|
||||
ADD_CASES(TC_JSONOut, {{".*BM_SummaryRepeat/repeats:3 ", MR_Not},
|
||||
{"\"name\": \"BM_SummaryRepeat/repeats:3_mean\",$"},
|
||||
{"\"name\": \"BM_SummaryRepeat/repeats:3_stddev\",$"}});
|
||||
ADD_CASES(TC_CSVOut, {{".*BM_SummaryRepeat/repeats:3 ", MR_Not},
|
||||
{"^\"BM_SummaryRepeat/repeats:3_mean\",%csv_report$"},
|
||||
{"^\"BM_SummaryRepeat/repeats:3_stddev\",%csv_report$"}});
|
||||
|
||||
// ========================================================================= //
|
||||
// --------------------------- TEST CASES END ------------------------------ //
|
||||
// ========================================================================= //
|
||||
|
||||
|
||||
int main(int argc, char* argv[]) {
|
||||
RunOutputTests(argc, argv);
|
||||
}
|
||||
int main(int argc, char *argv[]) { RunOutputTests(argc, argv); }
|
||||
|
@ -1,10 +1,11 @@
|
||||
|
||||
#undef NDEBUG
|
||||
#include "benchmark/benchmark.h"
|
||||
#include "../src/check.h" // NOTE: check.h is for internal use only!
|
||||
#include <cassert>
|
||||
#include <vector>
|
||||
|
||||
#include "../src/check.h" // NOTE: check.h is for internal use only!
|
||||
#include "benchmark/benchmark.h"
|
||||
|
||||
namespace {
|
||||
|
||||
class TestReporter : public benchmark::ConsoleReporter {
|
||||
@ -18,7 +19,7 @@ class TestReporter : public benchmark::ConsoleReporter {
|
||||
ConsoleReporter::ReportRuns(report);
|
||||
}
|
||||
|
||||
TestReporter() {}
|
||||
TestReporter() {}
|
||||
virtual ~TestReporter() {}
|
||||
|
||||
mutable std::vector<Run> all_runs_;
|
||||
@ -32,11 +33,12 @@ struct TestCase {
|
||||
typedef benchmark::BenchmarkReporter::Run Run;
|
||||
|
||||
void CheckRun(Run const& run) const {
|
||||
CHECK(name == run.benchmark_name) << "expected " << name << " got " << run.benchmark_name;
|
||||
CHECK(name == run.benchmark_name) << "expected " << name << " got "
|
||||
<< run.benchmark_name;
|
||||
CHECK(error_occurred == run.error_occurred);
|
||||
CHECK(error_message == run.error_message);
|
||||
if (error_occurred) {
|
||||
//CHECK(run.iterations == 0);
|
||||
// CHECK(run.iterations == 0);
|
||||
} else {
|
||||
CHECK(run.iterations != 0);
|
||||
}
|
||||
@ -55,12 +57,10 @@ int AddCases(const char* base_name, std::initializer_list<TestCase> const& v) {
|
||||
|
||||
#define CONCAT(x, y) CONCAT2(x, y)
|
||||
#define CONCAT2(x, y) x##y
|
||||
#define ADD_CASES(...) \
|
||||
int CONCAT(dummy, __LINE__) = AddCases(__VA_ARGS__)
|
||||
#define ADD_CASES(...) int CONCAT(dummy, __LINE__) = AddCases(__VA_ARGS__)
|
||||
|
||||
} // end namespace
|
||||
|
||||
|
||||
void BM_error_before_running(benchmark::State& state) {
|
||||
state.SkipWithError("error message");
|
||||
while (state.KeepRunning()) {
|
||||
@ -68,8 +68,7 @@ void BM_error_before_running(benchmark::State& state) {
|
||||
}
|
||||
}
|
||||
BENCHMARK(BM_error_before_running);
|
||||
ADD_CASES("BM_error_before_running",
|
||||
{{"", true, "error message"}});
|
||||
ADD_CASES("BM_error_before_running", {{"", true, "error message"}});
|
||||
|
||||
void BM_error_during_running(benchmark::State& state) {
|
||||
int first_iter = true;
|
||||
@ -85,17 +84,14 @@ void BM_error_during_running(benchmark::State& state) {
|
||||
}
|
||||
}
|
||||
BENCHMARK(BM_error_during_running)->Arg(1)->Arg(2)->ThreadRange(1, 8);
|
||||
ADD_CASES(
|
||||
"BM_error_during_running",
|
||||
{{"/1/threads:1", true, "error message"},
|
||||
{"/1/threads:2", true, "error message"},
|
||||
{"/1/threads:4", true, "error message"},
|
||||
{"/1/threads:8", true, "error message"},
|
||||
{"/2/threads:1", false, ""},
|
||||
{"/2/threads:2", false, ""},
|
||||
{"/2/threads:4", false, ""},
|
||||
{"/2/threads:8", false, ""}}
|
||||
);
|
||||
ADD_CASES("BM_error_during_running", {{"/1/threads:1", true, "error message"},
|
||||
{"/1/threads:2", true, "error message"},
|
||||
{"/1/threads:4", true, "error message"},
|
||||
{"/1/threads:8", true, "error message"},
|
||||
{"/2/threads:1", false, ""},
|
||||
{"/2/threads:2", false, ""},
|
||||
{"/2/threads:4", false, ""},
|
||||
{"/2/threads:8", false, ""}});
|
||||
|
||||
void BM_error_after_running(benchmark::State& state) {
|
||||
while (state.KeepRunning()) {
|
||||
@ -105,13 +101,10 @@ void BM_error_after_running(benchmark::State& state) {
|
||||
state.SkipWithError("error message");
|
||||
}
|
||||
BENCHMARK(BM_error_after_running)->ThreadRange(1, 8);
|
||||
ADD_CASES(
|
||||
"BM_error_after_running",
|
||||
{{"/threads:1", true, "error message"},
|
||||
{"/threads:2", true, "error message"},
|
||||
{"/threads:4", true, "error message"},
|
||||
{"/threads:8", true, "error message"}}
|
||||
);
|
||||
ADD_CASES("BM_error_after_running", {{"/threads:1", true, "error message"},
|
||||
{"/threads:2", true, "error message"},
|
||||
{"/threads:4", true, "error message"},
|
||||
{"/threads:8", true, "error message"}});
|
||||
|
||||
void BM_error_while_paused(benchmark::State& state) {
|
||||
bool first_iter = true;
|
||||
@ -128,18 +121,14 @@ void BM_error_while_paused(benchmark::State& state) {
|
||||
}
|
||||
}
|
||||
BENCHMARK(BM_error_while_paused)->Arg(1)->Arg(2)->ThreadRange(1, 8);
|
||||
ADD_CASES(
|
||||
"BM_error_while_paused",
|
||||
{{"/1/threads:1", true, "error message"},
|
||||
{"/1/threads:2", true, "error message"},
|
||||
{"/1/threads:4", true, "error message"},
|
||||
{"/1/threads:8", true, "error message"},
|
||||
{"/2/threads:1", false, ""},
|
||||
{"/2/threads:2", false, ""},
|
||||
{"/2/threads:4", false, ""},
|
||||
{"/2/threads:8", false, ""}}
|
||||
);
|
||||
|
||||
ADD_CASES("BM_error_while_paused", {{"/1/threads:1", true, "error message"},
|
||||
{"/1/threads:2", true, "error message"},
|
||||
{"/1/threads:4", true, "error message"},
|
||||
{"/1/threads:8", true, "error message"},
|
||||
{"/2/threads:1", false, ""},
|
||||
{"/2/threads:2", false, ""},
|
||||
{"/2/threads:4", false, ""},
|
||||
{"/2/threads:8", false, ""}});
|
||||
|
||||
int main(int argc, char* argv[]) {
|
||||
benchmark::Initialize(&argc, argv);
|
||||
|
Loading…
Reference in New Issue
Block a user